On 4/6/22 00:11, Zebediah Figura wrote:
On 4/5/22 07:26, Rémi Bernon wrote:
@@ -70,11 +70,18 @@ static HRESULT try_create_wg_transform(struct h264_decoder *decoder) if (input_format.major_type == WG_MAJOR_TYPE_UNKNOWN) return MF_E_INVALIDMEDIATYPE; - mf_media_type_to_wg_format(decoder->output_type, &output_format); - if (output_format.major_type == WG_MAJOR_TYPE_UNKNOWN) + mf_media_type_to_wg_format(decoder->output_type, &decoder->output_format); + if (decoder->output_format.major_type == WG_MAJOR_TYPE_UNKNOWN) return MF_E_INVALIDMEDIATYPE; - if (!(decoder->wg_transform = wg_transform_create(&input_format, &output_format))) + /* Don't force any output frame size, H264 streams already have the + * metadata for it and will generate a MF_E_TRANSFORM_STREAM_CHANGE + * result later. + */ + decoder->output_format.u.video.width = 0; + decoder->output_format.u.video.height = 0;
This seems like it's orthogonal to dynamic format change.
It also leads me to wonder: do we need the output_type field at all if we're going to have decoder->output_format?
Maybe not completely useful, but I think it makes the MF API implementation easier. It will also later hold information which is not present in the wg_format struct and doesn't need to be, such as video frame aperture for proper NV12 plane alignment and frame crop.
+ if (!(decoder->wg_transform = wg_transform_create(&input_format, &decoder->output_format))) return E_FAIL; return S_OK; @@ -369,7 +376,7 @@ static HRESULT WINAPI transform_GetOutputAvailableType(IMFTransform *iface, DWOR if (FAILED(hr = IMFMediaType_SetGUID(media_type, &MF_MT_SUBTYPE, output_type))) goto done; - hr = fill_output_media_type(media_type, NULL); + hr = fill_output_media_type(media_type, decoder->output_type); done: if (SUCCEEDED(hr))
This also seems like it's orthogonal to dynamic format change. (Is it possible to write tests for this?)
I don't think it's completely orthogonal, and there are tests already, checking the new output types after a format change.
I can add more tests to see what is enumerated after an output type has been selected but TBH I don't think it really matters, none of the games I saw using this transform seem to really care about what is enumerated there.
@@ -418,6 +425,7 @@ static HRESULT WINAPI transform_SetOutputType(IMFTransform *iface, DWORD id, IMF { struct h264_decoder *decoder = impl_from_IMFTransform(iface); GUID major, subtype; + BOOL identical; HRESULT hr; ULONG i; @@ -440,7 +448,14 @@ static HRESULT WINAPI transform_SetOutputType(IMFTransform *iface, DWORD id, IMF return MF_E_INVALIDMEDIATYPE; if (decoder->output_type) + { + fill_output_media_type(decoder->output_type, NULL); + if (SUCCEEDED(hr = IMFMediaType_Compare(decoder->output_type, (IMFAttributes *)type, + MF_ATTRIBUTES_MATCH_THEIR_ITEMS, &identical)) && identical) + return S_OK;
So we require the user to specify meaningless 29.97 frame rate (and so on), and reject different real values?
This is awfully surprising behaviour; do we have tests for it?
This is just checking that the new output type is the same as the one we just had from the stream format change, and that the transform doesn't need to be re-created.
AFAIU transform clients are supposed to enumerate types on stream format change and set the output type again. Most games simply call GetOutputAvailableType, pick the first one and pass it directly to SetOutputType without even looking at it. Though they keep a lot of expectations about that type, and most notably that it's NV12 with the same plane alignment as native.
We don't have that much level of detail in the tests. I can probably add more, though I'm not sure it's really interesting to implement that level of checking (as in requiring clients to enumerate types again, etc).
IMFMediaType_Release(decoder->output_type); + }
IMFMediaType_AddRef((decoder->output_type = type)); if (FAILED(hr = try_create_wg_transform(decoder))) @@ -490,7 +505,23 @@ static HRESULT WINAPI transform_ProcessEvent(IMFTransform *iface, DWORD id, IMFM static HRESULT WINAPI transform_ProcessMessage(IMFTransform *iface, MFT_MESSAGE_TYPE message, ULONG_PTR param) { + struct h264_decoder *decoder = impl_from_IMFTransform(iface);
FIXME("iface %p, message %#x, param %Ix stub!\n", iface, message, param);
+ switch (message) + { + case MFT_MESSAGE_NOTIFY_BEGIN_STREAMING: + /* Call of Duty Black Ops 3 expects to receive an MF_E_TRANSFORM_STREAM_CHANGE result again + * after it has called ProcessMessage with BEGIN_STREAMING. We could destroy and re-create + * the wg_transform instead but resetting the output format should be enough. + */ + memset(&decoder->output_format, 0, sizeof(decoder->output_format)); + break; + default: + break; + }
return S_OK; } @@ -524,6 +555,8 @@ static HRESULT WINAPI transform_ProcessOutput(IMFTransform *iface, DWORD flags, struct h264_decoder *decoder = impl_from_IMFTransform(iface); MFT_OUTPUT_STREAM_INFO info; struct wg_sample *wg_sample; + IMFMediaType *media_type; + UINT64 frame_rate; HRESULT hr; TRACE("iface %p, flags %#lx, count %lu, samples %p, status %p.\n", iface, flags, count, samples, status); @@ -537,6 +570,9 @@ static HRESULT WINAPI transform_ProcessOutput(IMFTransform *iface, DWORD flags, if (!decoder->wg_transform) return MF_E_TRANSFORM_TYPE_NOT_SET; + if (FAILED(IMFMediaType_GetUINT64(decoder->output_type, &MF_MT_FRAME_RATE, &frame_rate))) + frame_rate = (UINT64)30000 << 32 | 1001;
*status = 0; samples[0].dwStatus = 0; if (!samples[0].pSample) return E_INVALIDARG; @@ -544,11 +580,24 @@ static HRESULT WINAPI transform_ProcessOutput(IMFTransform *iface, DWORD flags, if (FAILED(hr = mf_create_wg_sample(samples[0].pSample, &wg_sample))) return hr; + wg_sample->format = &decoder->output_format; if (wg_sample->max_size < info.cbSize) hr = MF_E_BUFFERTOOSMALL; else hr = wg_transform_read_data(decoder->wg_transform, wg_sample); + if (hr == MF_E_TRANSFORM_STREAM_CHANGE) + { + media_type = mf_media_type_from_wg_format(&decoder->output_format); + IMFMediaType_SetUINT64(media_type, &MF_MT_FRAME_RATE, frame_rate);
So if the user never requests a specific frame rate, then we always force it to 29.97? And we ignore dynamic frame rate changes?
This is also surprising; do we have tests for these?
We have tests that this is the default frame rate, yes.
Then I think the reason why this keeps and restore the frame rate is because GStreamer output caps did not convey the frame rate, and we then had a 1:1 frame rate instead.