The test has a little inspiration from the SDK samples.
From: Bernhard Kölbl besentv@gmail.com
Signed-off-by: Bernhard Kölbl besentv@gmail.com --- dlls/mfmediaengine/tests/mfmediaengine.c | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-)
diff --git a/dlls/mfmediaengine/tests/mfmediaengine.c b/dlls/mfmediaengine/tests/mfmediaengine.c index 3a5b2bf8253..86b2d4b3cf6 100644 --- a/dlls/mfmediaengine/tests/mfmediaengine.c +++ b/dlls/mfmediaengine/tests/mfmediaengine.c @@ -124,8 +124,8 @@ static void dump_rgb32(const BYTE *data, DWORD length, const RECT *rect, HANDLE ok(written == length, "written %lu bytes\n", written); }
-#define check_rgb32_data(a, b, c, d) check_rgb32_data_(__LINE__, a, b, c, d) -static void check_rgb32_data_(int line, const WCHAR *filename, const BYTE *data, DWORD length, const RECT *rect) +#define check_rgb32_data(a, b, c, d, e) check_rgb32_data_(__LINE__, a, b, c, d, e) +static void check_rgb32_data_(int line, const WCHAR *filename, const BYTE *data, DWORD length, const RECT *rect, DWORD tolerance) { WCHAR output_path[MAX_PATH]; const BYTE *expect_data; @@ -146,7 +146,7 @@ static void check_rgb32_data_(int line, const WCHAR *filename, const BYTE *data, expect_data = LockResource(LoadResource(GetModuleHandleW(NULL), resource));
diff = compare_rgb32(data, &length, rect, expect_data); - ok_(__FILE__, line)(diff == 0, "Unexpected %lu%% diff\n", diff); + ok_(__FILE__, line)(diff <= tolerance, "Unexpected %lu%% diff\n", diff); }
static void init_functions(void) @@ -1297,7 +1297,7 @@ static void test_TransferVideoFrames(void) ok(!!map_desc.pData, "got pData %p\n", map_desc.pData); ok(map_desc.DepthPitch == 16384, "got DepthPitch %u\n", map_desc.DepthPitch); ok(map_desc.RowPitch == desc.Width * 4, "got RowPitch %u\n", map_desc.RowPitch); - check_rgb32_data(L"rgb32frame.bmp", map_desc.pData, map_desc.RowPitch * desc.Height, &dst_rect); + check_rgb32_data(L"rgb32frame.bmp", map_desc.pData, map_desc.RowPitch * desc.Height, &dst_rect, 0); ID3D11DeviceContext_Unmap(context, (ID3D11Resource *)rb_texture, 0);
ID3D11DeviceContext_Release(context);
From: Bernhard Kölbl besentv@gmail.com
Signed-off-by: Bernhard Kölbl besentv@gmail.com --- dlls/mfmediaengine/tests/mfmediaengine.c | 672 ++++++++++++++++++ dlls/mfmediaengine/tests/resource.rc | 4 + .../tests/rgb32frame_grayscale.bmp | Bin 0 -> 16438 bytes 3 files changed, 676 insertions(+) create mode 100644 dlls/mfmediaengine/tests/rgb32frame_grayscale.bmp
diff --git a/dlls/mfmediaengine/tests/mfmediaengine.c b/dlls/mfmediaengine/tests/mfmediaengine.c index 86b2d4b3cf6..27c1f017414 100644 --- a/dlls/mfmediaengine/tests/mfmediaengine.c +++ b/dlls/mfmediaengine/tests/mfmediaengine.c @@ -1313,6 +1313,677 @@ done: CloseHandle(notify.ready_event); }
+struct grayscale_video_effect +{ + IMFTransform IMFTransform_iface; + LONG refcount; + + IMFMediaType *media_type_in, *media_type_out; + DWORD frame_size, frame_width, frame_height; + GUID **advertized_subtypes; + IMFSample *sample_buf; + LONG processing_delta; + CRITICAL_SECTION cs; +}; + +const GUID *media_subtypes[] = +{ + &MFVideoFormat_I420, + &MFVideoFormat_NV12, +}; + +static struct grayscale_video_effect *impl_from_IMFTransform(IMFTransform *iface) +{ + return CONTAINING_RECORD(iface, struct grayscale_video_effect, IMFTransform_iface); +} + +static HRESULT WINAPI grayscale_video_effect_QueryInterface(IMFTransform *iface, REFIID iid, void **out) +{ + struct grayscale_video_effect *impl = impl_from_IMFTransform(iface); + + if (IsEqualGUID(iid, &IID_IUnknown) || + IsEqualGUID(iid, &IID_IMFTransform)) + { + *out = &impl->IMFTransform_iface; + IUnknown_AddRef((IUnknown *)*out); + return S_OK; + } + + *out = NULL; + return E_NOINTERFACE; +} + +static ULONG WINAPI grayscale_video_effect_AddRef(IMFTransform *iface) +{ + struct grayscale_video_effect *impl = impl_from_IMFTransform(iface); + ULONG refcount = InterlockedIncrement(&impl->refcount); + return refcount; +} + +static ULONG WINAPI grayscale_video_effect_Release(IMFTransform *iface) +{ + struct grayscale_video_effect *impl = impl_from_IMFTransform(iface); + ULONG refcount = InterlockedDecrement(&impl->refcount); + + if (!refcount) + { + DeleteCriticalSection(&impl->cs); + if (impl->media_type_out) IMFMediaType_Release(impl->media_type_out); + if (impl->media_type_in) IMFMediaType_Release(impl->media_type_in); + free(impl); + } + + return refcount; +} + +static HRESULT WINAPI grayscale_video_effect_GetStreamLimits(IMFTransform *iface, DWORD *input_minimum, + DWORD *input_maximum, DWORD *output_minimum, DWORD *output_maximum) +{ + *input_minimum = *input_maximum = *output_minimum = *output_maximum = 1; + return S_OK; +} + +static HRESULT WINAPI grayscale_video_effect_GetStreamCount(IMFTransform *iface, DWORD *inputs, DWORD *outputs) +{ + *inputs = *outputs = 1; + return S_OK; +} + +static HRESULT WINAPI grayscale_video_effect_GetStreamIDs(IMFTransform *iface, DWORD input_size, DWORD *inputs, + DWORD output_size, DWORD *outputs) +{ + return E_NOTIMPL; +} + +static HRESULT WINAPI grayscale_video_effect_GetInputStreamInfo(IMFTransform *iface, DWORD id, MFT_INPUT_STREAM_INFO *info) +{ + return E_NOTIMPL; +} + +static HRESULT WINAPI grayscale_video_effect_GetOutputStreamInfo(IMFTransform *iface, DWORD id, MFT_OUTPUT_STREAM_INFO *info) +{ + struct grayscale_video_effect *impl = impl_from_IMFTransform(iface); + + if (id) return MF_E_INVALIDSTREAMNUMBER; + + info->dwFlags = + MFT_OUTPUT_STREAM_WHOLE_SAMPLES | + MFT_OUTPUT_STREAM_FIXED_SAMPLE_SIZE | + MFT_OUTPUT_STREAM_SINGLE_SAMPLE_PER_BUFFER; + + info->cbAlignment = 0; + info->cbSize = impl->frame_size; + + return S_OK; +} + +static HRESULT WINAPI grayscale_video_effect_GetAttributes(IMFTransform *iface, IMFAttributes **attributes) +{ + return E_NOTIMPL; +} + +static HRESULT WINAPI grayscale_video_effect_GetInputStreamAttributes(IMFTransform *iface, DWORD id, IMFAttributes **attributes) +{ + return E_NOTIMPL; +} + +static HRESULT WINAPI grayscale_video_effect_GetOutputStreamAttributes(IMFTransform *iface, DWORD id, IMFAttributes **attributes) +{ + return E_NOTIMPL; +} + +static HRESULT WINAPI grayscale_video_effect_DeleteInputStream(IMFTransform *iface, DWORD id) +{ + return E_NOTIMPL; +} + +static HRESULT WINAPI grayscale_video_effect_AddInputStreams(IMFTransform *iface, DWORD streams, DWORD *ids) +{ + return E_NOTIMPL; +} + +static HRESULT WINAPI grayscale_video_effect_GetInputAvailableType(IMFTransform *iface, DWORD id, DWORD index, + IMFMediaType **type) +{ + struct grayscale_video_effect *impl = impl_from_IMFTransform(iface); + IMFMediaType *mt; + + if (id) return MF_E_INVALIDSTREAMNUMBER; + + EnterCriticalSection(&impl->cs); + if (impl->media_type_in) + { + if (index > 0) + goto no_more_types; + + *type = impl->media_type_in; + IMFMediaType_AddRef(*type); + } + else + { + if (index >= ARRAY_SIZE(media_subtypes)) + goto no_more_types; + + MFCreateMediaType(&mt); + IMFMediaType_SetGUID(mt, &MF_MT_MAJOR_TYPE, &MFMediaType_Video); + IMFMediaType_SetGUID(mt, &MF_MT_SUBTYPE, media_subtypes[index]); + *type = mt; + } + LeaveCriticalSection(&impl->cs); + + return S_OK; + +no_more_types: + LeaveCriticalSection(&impl->cs); + + return MF_E_NO_MORE_TYPES; +} + +static HRESULT WINAPI grayscale_video_effect_GetOutputAvailableType(IMFTransform *iface, DWORD id, DWORD index, + IMFMediaType **type) +{ + struct grayscale_video_effect *impl = impl_from_IMFTransform(iface); + IMFMediaType *mt; + + if (id) return MF_E_INVALIDSTREAMNUMBER; + + EnterCriticalSection(&impl->cs); + if (impl->media_type_out) + { + if (index > 0) + goto no_more_types; + + *type = impl->media_type_out; + IMFMediaType_AddRef(*type); + } + else + { + if (index >= ARRAY_SIZE(media_subtypes)) + goto no_more_types; + + MFCreateMediaType(&mt); + IMFMediaType_SetGUID(mt, &MF_MT_MAJOR_TYPE, &MFMediaType_Video); + IMFMediaType_SetGUID(mt, &MF_MT_SUBTYPE, media_subtypes[index]); + *type = mt; + } + LeaveCriticalSection(&impl->cs); + + return S_OK; + +no_more_types: + LeaveCriticalSection(&impl->cs); + + return MF_E_NO_MORE_TYPES; +} + +static HRESULT WINAPI grayscale_video_effect_SetInputType(IMFTransform *iface, DWORD id, IMFMediaType *type, DWORD flags) +{ + struct grayscale_video_effect *impl = impl_from_IMFTransform(iface); + UINT32 height, width; + PROPVARIANT propvar; + GUID value; + HRESULT hr; + + if (id) return MF_E_INVALIDSTREAMNUMBER; + + EnterCriticalSection(&impl->cs); + + if (FAILED(hr = IMFMediaType_GetGUID(type, &MF_MT_SUBTYPE, &value))) + goto done; + + if (FAILED(hr = IMFMediaType_GetItem(type, &MF_MT_FRAME_SIZE, &propvar))) + goto done; + + impl->frame_width = width = propvar.uhVal.HighPart; + impl->frame_height = height = propvar.uhVal.LowPart; + + if (impl->media_type_in) IMFMediaType_Release(impl->media_type_in); + impl->media_type_in = type; + IMFMediaType_AddRef(impl->media_type_in); + + /* Idk why, but Windows apparently expects this function to also set an output type... */ + if (impl->media_type_out) IMFMediaType_Release(impl->media_type_out); + impl->media_type_out = type; + IMFMediaType_AddRef(impl->media_type_out); + + if (IsEqualGUID(&value, &MFVideoFormat_I420) || + IsEqualGUID(&value, &MFVideoFormat_NV12)) + { + impl->frame_size = width * (height + (height / 2)); /* 12bpp */ + } + else if (IsEqualGUID(&value, &MFVideoFormat_YUY2) || + IsEqualGUID(&value, &MFVideoFormat_UYVY)) + { + impl->frame_size = width * height * 2; /* 16bpp */ + } + else + { + impl->frame_size = 0; + } + +done: + LeaveCriticalSection(&impl->cs); + return S_OK; +} + +static HRESULT WINAPI grayscale_video_effect_SetOutputType(IMFTransform *iface, DWORD id, IMFMediaType *type, DWORD flags) +{ + struct grayscale_video_effect *impl = impl_from_IMFTransform(iface); + + if (id) return MF_E_INVALIDSTREAMNUMBER; + + EnterCriticalSection(&impl->cs); + if (impl->media_type_out) IMFMediaType_Release(impl->media_type_out); + impl->media_type_out = type; + IMFMediaType_AddRef(impl->media_type_out); + LeaveCriticalSection(&impl->cs); + + return S_OK; +} + +static HRESULT WINAPI grayscale_video_effect_GetInputCurrentType(IMFTransform *iface, DWORD id, IMFMediaType **type) +{ + struct grayscale_video_effect *impl = impl_from_IMFTransform(iface); + HRESULT hr; + + if (id) return MF_E_INVALIDSTREAMNUMBER; + + EnterCriticalSection(&impl->cs); + if (!impl->media_type_in) + { + hr = MF_E_TRANSFORM_TYPE_NOT_SET; + } + else + { + *type = impl->media_type_in; + IMFMediaType_AddRef(*type); + hr = S_OK; + } + LeaveCriticalSection(&impl->cs); + + return hr; +} + +static HRESULT WINAPI grayscale_video_effect_GetOutputCurrentType(IMFTransform *iface, DWORD id, IMFMediaType **type) +{ + struct grayscale_video_effect *impl = impl_from_IMFTransform(iface); + HRESULT hr; + + if (id) return MF_E_INVALIDSTREAMNUMBER; + + EnterCriticalSection(&impl->cs); + if (!impl->media_type_out) + { + hr = MF_E_TRANSFORM_TYPE_NOT_SET; + } + else + { + *type = impl->media_type_out; + IMFMediaType_AddRef(*type); + hr = S_OK; + } + LeaveCriticalSection(&impl->cs); + + return hr; +} + +static HRESULT WINAPI grayscale_video_effect_GetInputStatus(IMFTransform *iface, DWORD id, DWORD *flags) +{ + return E_NOTIMPL; +} + +static HRESULT WINAPI grayscale_video_effect_GetOutputStatus(IMFTransform *iface, DWORD *flags) +{ + return E_NOTIMPL; +} + +static HRESULT WINAPI grayscale_video_effect_SetOutputBounds(IMFTransform *iface, LONGLONG lower, LONGLONG upper) +{ + return E_NOTIMPL; +} + +static HRESULT WINAPI grayscale_video_effect_ProcessEvent(IMFTransform *iface, DWORD id, IMFMediaEvent *event) +{ + return E_NOTIMPL; +} + +static HRESULT WINAPI grayscale_video_effect_ProcessMessage(IMFTransform *iface, MFT_MESSAGE_TYPE message, ULONG_PTR param) +{ + struct grayscale_video_effect *impl = impl_from_IMFTransform(iface); + HRESULT hr = S_OK; + + EnterCriticalSection(&impl->cs); + + switch (message) + { + case MFT_MESSAGE_COMMAND_FLUSH: + hr = E_NOTIMPL; + break; + + default: + break; + } + + LeaveCriticalSection(&impl->cs); + + return hr; +} + +static HRESULT WINAPI grayscale_video_effect_ProcessInput(IMFTransform *iface, DWORD id, IMFSample *sample, DWORD flags) +{ + struct grayscale_video_effect *impl = impl_from_IMFTransform(iface); + DWORD buf_count = 0; + HRESULT hr = S_OK; + + if (id) return MF_E_INVALIDSTREAMNUMBER; + + if (impl->sample_buf) return MF_E_NOTACCEPTING; + + if (FAILED(hr = IMFSample_GetBufferCount(sample, &buf_count))) + goto done; + + if (!buf_count) + { + hr = E_FAIL; + goto done; + } + else if (buf_count > 1) + { + hr = MF_E_SAMPLE_HAS_TOO_MANY_BUFFERS; + } + + IMFSample_AddRef((impl->sample_buf = sample)); + impl->processing_delta++; + +done: + return hr; +} + +static HRESULT grayscale_video_effect_convert_frame(GUID *subtype, IMFMediaBuffer *in, IMFMediaBuffer *out, DWORD height, DWORD width) +{ + IMF2DBuffer *src_2d = NULL, *dst_2d = NULL; + DWORD len = 0, max = 0, i; + BYTE *src, *dst; + HRESULT hr; + + if (FAILED(hr = IMFMediaBuffer_Lock(in, &src, NULL, NULL))) + goto done; + + if (FAILED(hr = IMFMediaBuffer_Lock(out, &dst, &max, &len))) + { + IMFMediaBuffer_Unlock(in); + goto done; + } + + if (IsEqualGUID(subtype, &MFVideoFormat_I420)) + { + for (i = 0; i < height; ++i) + { + memcpy(dst, src, width); + dst += width; + src += width; + } + for (i = 0; i < height / 2; ++i) + { + memset(dst, 0x80, width / 2); + dst += width / 2; + src += width / 2; + } + for (i = 0; i < height / 2; ++i) + { + memset(dst, 0x80, width / 2); + dst += width / 2; + src += width / 2; + } + } + else if (IsEqualGUID(subtype, &MFVideoFormat_NV12)) + { + for (i = 0; i < height; ++i) + { + memcpy(dst, src, width); + dst += width; + src += width; + } + for (i = 0; i < height / 2; ++i) + { + memset(dst, 0x80, width); + dst += width; + src += width; + } + } + else hr = E_FAIL; + + hr = IMFMediaBuffer_SetCurrentLength(out, max); + + IMFMediaBuffer_Unlock(out); + IMFMediaBuffer_Unlock(in); + +done: + if (dst_2d) IMF2DBuffer_Release(dst_2d); + if (src_2d) IMF2DBuffer_Release(src_2d); + return hr; +} + +static HRESULT WINAPI grayscale_video_effect_ProcessOutput(IMFTransform *iface, DWORD flags, DWORD count, + MFT_OUTPUT_DATA_BUFFER *samples, DWORD *status) +{ + struct grayscale_video_effect *impl = impl_from_IMFTransform(iface); + IMFMediaBuffer *in_buf = NULL, *out_buf = NULL; + LONGLONG duration = 0, time = 0; + HRESULT hr = S_OK; + GUID subtype; + + if (count != 1) return E_INVALIDARG; + + EnterCriticalSection(&impl->cs); + if (!impl->sample_buf) + { + hr = MF_E_TRANSFORM_NEED_MORE_INPUT; + goto done; + } + + if (FAILED(hr = IMFMediaType_GetGUID(impl->media_type_in, &MF_MT_SUBTYPE, &subtype))) + goto done; + + if (SUCCEEDED(hr = IMFSample_GetSampleDuration(impl->sample_buf, &duration))) + { + if (FAILED(hr = IMFSample_SetSampleDuration(samples[0].pSample, duration))) + goto done; + } + + if (SUCCEEDED(hr = IMFSample_GetSampleTime(impl->sample_buf, &time))) + { + if (FAILED(hr = IMFSample_SetSampleTime(samples[0].pSample, time))) + goto done; + } + + if (FAILED(hr = IMFSample_ConvertToContiguousBuffer(impl->sample_buf, &in_buf))) + goto done; + + if (FAILED(hr = IMFSample_ConvertToContiguousBuffer(samples[0].pSample, &out_buf))) + goto done; + + if (FAILED(hr = grayscale_video_effect_convert_frame(&subtype, in_buf, out_buf, impl->frame_height, impl->frame_width))) + goto done; + + *status = samples[0].dwStatus = 0; + impl->processing_delta--; +done: + if (impl->sample_buf) IMFSample_Release(impl->sample_buf); + impl->sample_buf = NULL; + LeaveCriticalSection(&impl->cs); + if (out_buf) IMFMediaBuffer_Release(out_buf); + if (in_buf) IMFMediaBuffer_Release(in_buf); + return hr; +} + +static const IMFTransformVtbl grayscale_video_effect_vtbl = +{ + grayscale_video_effect_QueryInterface, + grayscale_video_effect_AddRef, + grayscale_video_effect_Release, + grayscale_video_effect_GetStreamLimits, + grayscale_video_effect_GetStreamCount, + grayscale_video_effect_GetStreamIDs, + grayscale_video_effect_GetInputStreamInfo, + grayscale_video_effect_GetOutputStreamInfo, + grayscale_video_effect_GetAttributes, + grayscale_video_effect_GetInputStreamAttributes, + grayscale_video_effect_GetOutputStreamAttributes, + grayscale_video_effect_DeleteInputStream, + grayscale_video_effect_AddInputStreams, + grayscale_video_effect_GetInputAvailableType, + grayscale_video_effect_GetOutputAvailableType, + grayscale_video_effect_SetInputType, + grayscale_video_effect_SetOutputType, + grayscale_video_effect_GetInputCurrentType, + grayscale_video_effect_GetOutputCurrentType, + grayscale_video_effect_GetInputStatus, + grayscale_video_effect_GetOutputStatus, + grayscale_video_effect_SetOutputBounds, + grayscale_video_effect_ProcessEvent, + grayscale_video_effect_ProcessMessage, + grayscale_video_effect_ProcessInput, + grayscale_video_effect_ProcessOutput, +}; + +HRESULT grayscale_video_effect_create(struct grayscale_video_effect **out) +{ + struct grayscale_video_effect *impl; + + if (!(impl = calloc(1, sizeof(*impl)))) + return E_OUTOFMEMORY; + + impl->IMFTransform_iface.lpVtbl = &grayscale_video_effect_vtbl; + impl->refcount = 1; + + InitializeCriticalSection(&impl->cs); + + *out = impl; + return S_OK; +} + +static void test_video_effect(void) +{ + struct test_transfer_notify notify = {{&test_transfer_notify_vtbl}}; + WCHAR url[] = { L"i420-64x64.avi" }; + struct grayscale_video_effect *video_effect; + ID3D11Texture2D *texture, *rb_texture; + D3D11_MAPPED_SUBRESOURCE map_desc; + IMFMediaEngineEx *media_engine_ex; + IMFDXGIDeviceManager *manager; + ID3D11DeviceContext *context; + D3D11_TEXTURE2D_DESC desc; + IMFByteStream *stream; + ID3D11Device *device; + RECT dst_rect; + UINT token; + HRESULT hr; + DWORD res; + + stream = load_resource(L"i420-64x64.avi", L"video/avi"); + + notify.ready_event = CreateEventW(NULL, FALSE, FALSE, NULL); + ok(!!notify.ready_event, "CreateEventW failed, error %lu\n", GetLastError()); + + if (!(device = create_d3d11_device())) + { + skip("Failed to create a D3D11 device, skipping tests.\n"); + return; + } + + hr = pMFCreateDXGIDeviceManager(&token, &manager); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFDXGIDeviceManager_ResetDevice(manager, (IUnknown *)device, token); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + + media_engine_ex = create_media_engine_ex(¬ify.IMFMediaEngineNotify_iface, + manager, DXGI_FORMAT_B8G8R8X8_UNORM); + + IMFDXGIDeviceManager_Release(manager); + + if (!(notify.media_engine = media_engine_ex)) + return; + + memset(&desc, 0, sizeof(desc)); + desc.Width = 64; + desc.Height = 64; + desc.ArraySize = 1; + desc.Format = DXGI_FORMAT_B8G8R8X8_UNORM; + desc.BindFlags = D3D11_BIND_RENDER_TARGET; + desc.SampleDesc.Count = 1; + hr = ID3D11Device_CreateTexture2D(device, &desc, NULL, &texture); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + + video_effect = NULL; + hr = grayscale_video_effect_create(&video_effect); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + + hr = IMFMediaEngineEx_InsertVideoEffect(media_engine_ex, (IUnknown *)&video_effect->IMFTransform_iface, FALSE); + todo_wine ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + + if (FAILED(hr)) + goto done; + + hr = IMFMediaEngineEx_SetSourceFromByteStream(media_engine_ex, stream, url); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + IMFByteStream_Release(stream); + + res = WaitForSingleObject(notify.ready_event, 5000); + ok(!res, "Unexpected res %#lx.\n", res); + + if (FAILED(notify.error)) + { + win_skip("Media engine reported error %#lx, skipping tests.\n", notify.error); + goto done; + } + + /* FIXME: Wine first video frame is often full of garbage, wait for another update */ + res = WaitForSingleObject(notify.ready_event, 500); + /* It's also missing the MF_MEDIA_ENGINE_EVENT_TIMEUPDATE notifications */ + todo_wine + ok(!res, "Unexpected res %#lx.\n", res); + + SetRect(&dst_rect, 0, 0, desc.Width, desc.Height); + hr = IMFMediaEngineEx_TransferVideoFrame(notify.media_engine, (IUnknown *)texture, NULL, NULL, NULL); + ok(hr == S_OK || broken(hr == E_POINTER) /* w1064v1507 */, "Unexpected hr %#lx.\n", hr); + + ID3D11Texture2D_GetDesc(texture, &desc); + desc.Usage = D3D11_USAGE_STAGING; + desc.BindFlags = 0; + desc.CPUAccessFlags = D3D11_CPU_ACCESS_READ; + desc.MiscFlags = 0; + hr = ID3D11Device_CreateTexture2D(device, &desc, NULL, &rb_texture); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + + ID3D11Device_GetImmediateContext(device, &context); + ID3D11DeviceContext_CopySubresourceRegion(context, (ID3D11Resource *)rb_texture, + 0, 0, 0, 0, (ID3D11Resource *)texture, 0, NULL); + + memset(&map_desc, 0, sizeof(map_desc)); + hr = ID3D11DeviceContext_Map(context, (ID3D11Resource *)rb_texture, 0, D3D11_MAP_READ, 0, &map_desc); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(!!map_desc.pData, "got pData %p\n", map_desc.pData); + ok(map_desc.DepthPitch == 16384, "got DepthPitch %u\n", map_desc.DepthPitch); + ok(map_desc.RowPitch == desc.Width * 4, "got RowPitch %u\n", map_desc.RowPitch); + check_rgb32_data(L"rgb32frame_grayscale.bmp", map_desc.pData, map_desc.RowPitch * desc.Height, &dst_rect, 2); + ID3D11DeviceContext_Unmap(context, (ID3D11Resource *)rb_texture, 0); + + ok(video_effect->processing_delta == 0, "Unexpected delta %lu.\n", video_effect->processing_delta); + + ID3D11DeviceContext_Release(context); + ID3D11Texture2D_Release(rb_texture); + +done: + IMFTransform_Release(&video_effect->IMFTransform_iface); + IMFMediaEngineEx_Shutdown(media_engine_ex); + IMFMediaEngineEx_Release(media_engine_ex); + ID3D11Texture2D_Release(texture); + ID3D11Device_Release(device); + + CloseHandle(notify.ready_event); +} + START_TEST(mfmediaengine) { HRESULT hr; @@ -1344,6 +2015,7 @@ START_TEST(mfmediaengine) test_SetSourceFromByteStream(); test_audio_configuration(); test_TransferVideoFrames(); + test_video_effect();
IMFMediaEngineClassFactory_Release(factory);
diff --git a/dlls/mfmediaengine/tests/resource.rc b/dlls/mfmediaengine/tests/resource.rc index 50152586758..8219738bdb9 100644 --- a/dlls/mfmediaengine/tests/resource.rc +++ b/dlls/mfmediaengine/tests/resource.rc @@ -30,3 +30,7 @@ i420-64x64.avi RCDATA i420-64x64.avi /* Generated from running the tests on Windows */ /* @makedep: rgb32frame.bmp */ rgb32frame.bmp RCDATA rgb32frame.bmp + +/* Generated from running the tests on Windows */ +/* @makedep: rgb32frame_grayscale.bmp */ +rgb32frame_grayscale.bmp RCDATA rgb32frame_grayscale.bmp diff --git a/dlls/mfmediaengine/tests/rgb32frame_grayscale.bmp b/dlls/mfmediaengine/tests/rgb32frame_grayscale.bmp new file mode 100644 index 0000000000000000000000000000000000000000..c352388b7d1cbb1fb86de6b966733ae7b229b7d8 GIT binary patch literal 16438 zcmeIw!3l#v5Qb4xNF+xq@Vs~p+lN?CEF?A*?8C_-UaVo>!Z+;s=S_7i-TB6LhgP|I zbg%YnublUG;)sw97+`<_1{h!<lY#xWhit$A0}L?000Wr}Y~L+p0|ppifB^;=$Yfys ztRWjPzyJdbFu*`21IxUGY`_2m3^2d|1DOoecMaKq0R|XgfB^<F8R(yV$Oa5BzyJdb JFu*__1AjfK0tx^C
literal 0 HcmV?d00001
From: Bernhard Kölbl besentv@gmail.com
--- dlls/mf/topology.c | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-)
diff --git a/dlls/mf/topology.c b/dlls/mf/topology.c index 25a00708100..b684b17ff72 100644 --- a/dlls/mf/topology.c +++ b/dlls/mf/topology.c @@ -712,7 +712,12 @@ static HRESULT WINAPI topology_CloneFrom(IMFTopology *iface, IMFTopology *src) for (j = 0; j < outputs->count; ++j) { DWORD input_index = outputs->streams[j].connection_stream; - TOPOID id = outputs->streams[j].connection->id; + TOPOID id; + + if (!outputs->streams[j].connection) + continue; + + id = outputs->streams[j].connection->id;
/* Skip node lookup in destination topology, assuming same node order. */ if (SUCCEEDED(hr = topology_get_node_by_id(topology, id, &node)))
From: Bernhard Kölbl besentv@gmail.com
--- dlls/mfmediaengine/main.c | 58 ++++++++++++++++++++++-- dlls/mfmediaengine/tests/mfmediaengine.c | 2 +- 2 files changed, 55 insertions(+), 5 deletions(-)
diff --git a/dlls/mfmediaengine/main.c b/dlls/mfmediaengine/main.c index 63e9a8f07d5..7b28b036911 100644 --- a/dlls/mfmediaengine/main.c +++ b/dlls/mfmediaengine/main.c @@ -146,6 +146,11 @@ struct media_engine IMFPresentationDescriptor *pd; } presentation; struct + { + IUnknown *video; + BOOL video_optional; + } effects; + struct { LONGLONG pts; SIZE size; @@ -1018,6 +1023,30 @@ static HRESULT media_engine_create_source_node(IMFMediaSource *source, IMFPresen return S_OK; }
+static HRESULT media_engine_create_video_effect(struct media_engine *engine, IMFTopologyNode **node) +{ + HRESULT hr; + + *node = NULL; + + if (!engine->effects.video) + return S_OK; + + if (FAILED(hr = MFCreateTopologyNode(MF_TOPOLOGY_TRANSFORM_NODE, node))) + return hr; + + IMFTopologyNode_SetObject(*node, (IUnknown *)engine->effects.video); + IMFTopologyNode_SetUINT32(*node, &MF_TOPONODE_NOSHUTDOWN_ON_REMOVE, FALSE); + + if (engine->effects.video_optional) + IMFTopologyNode_SetUINT32(*node, &MF_TOPONODE_CONNECT_METHOD, MF_CONNECT_AS_OPTIONAL); + + IUnknown_Release(engine->effects.video); + engine->effects.video = NULL; + + return hr; +} + static HRESULT media_engine_create_audio_renderer(struct media_engine *engine, IMFTopologyNode **node) { unsigned int category, role; @@ -1189,7 +1218,7 @@ static HRESULT media_engine_create_topology(struct media_engine *engine, IMFMedi if (SUCCEEDED(hr = MFCreateTopology(&topology))) { IMFTopologyNode *sar_node = NULL, *audio_src = NULL; - IMFTopologyNode *grabber_node = NULL, *video_src = NULL; + IMFTopologyNode *grabber_node = NULL, *video_src = NULL, *video_effect = NULL;
if (engine->flags & MF_MEDIA_ENGINE_REAL_TIME_MODE) IMFTopology_SetUINT32(topology, &MF_LOW_LATENCY, TRUE); @@ -1223,16 +1252,29 @@ static HRESULT media_engine_create_topology(struct media_engine *engine, IMFMedi if (FAILED(hr = media_engine_create_video_renderer(engine, &grabber_node))) WARN("Failed to create video grabber node, hr %#lx.\n", hr);
+ if (FAILED(media_engine_create_video_effect(engine, &video_effect))) + WARN("Failed to create video effect node, hr %#lx.\n", hr); + if (grabber_node && video_src) { IMFTopology_AddNode(topology, video_src); IMFTopology_AddNode(topology, grabber_node); - IMFTopologyNode_ConnectOutput(video_src, 0, grabber_node, 0); + + if (video_effect) + { + IMFTopology_AddNode(topology, video_effect); + IMFTopologyNode_ConnectOutput(video_src, 0, video_effect, 0); + IMFTopologyNode_ConnectOutput(video_effect, 0, grabber_node, 0); + } + else + IMFTopologyNode_ConnectOutput(video_src, 0, grabber_node, 0); }
if (SUCCEEDED(hr)) IMFTopologyNode_GetTopoNodeID(video_src, &engine->video_frame.node_id);
+ if (video_effect) + IMFTopologyNode_Release(video_effect); if (grabber_node) IMFTopologyNode_Release(grabber_node); if (video_src) @@ -2579,9 +2621,17 @@ static HRESULT WINAPI media_engine_IsProtected(IMFMediaEngineEx *iface, BOOL *pr
static HRESULT WINAPI media_engine_InsertVideoEffect(IMFMediaEngineEx *iface, IUnknown *effect, BOOL is_optional) { - FIXME("%p, %p, %d stub.\n", iface, effect, is_optional); + struct media_engine *impl = impl_from_IMFMediaEngineEx(iface); + TRACE("%p, %p, %d.\n", iface, effect, is_optional);
- return E_NOTIMPL; + if (impl->effects.video) + IUnknown_Release(impl->effects.video); + + impl->effects.video = effect; + IUnknown_AddRef(impl->effects.video); + impl->effects.video_optional = is_optional; + + return S_OK; }
static HRESULT WINAPI media_engine_InsertAudioEffect(IMFMediaEngineEx *iface, IUnknown *effect, BOOL is_optional) diff --git a/dlls/mfmediaengine/tests/mfmediaengine.c b/dlls/mfmediaengine/tests/mfmediaengine.c index 27c1f017414..16757de99e9 100644 --- a/dlls/mfmediaengine/tests/mfmediaengine.c +++ b/dlls/mfmediaengine/tests/mfmediaengine.c @@ -1920,7 +1920,7 @@ static void test_video_effect(void) ok(hr == S_OK, "Unexpected hr %#lx.\n", hr);
hr = IMFMediaEngineEx_InsertVideoEffect(media_engine_ex, (IUnknown *)&video_effect->IMFTransform_iface, FALSE); - todo_wine ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr);
if (FAILED(hr)) goto done;
Hi,
It looks like your patch introduced the new failures shown below. Please investigate and fix them before resubmitting your patch. If they are not new, fixing them anyway would help a lot. Otherwise please ask for the known failures list to be updated.
The tests also ran into some preexisting test failures. If you know how to fix them that would be helpful. See the TestBot job for the details:
The full results can be found at: https://testbot.winehq.org/JobDetails.pl?Key=132526
Your paranoid android.
=== w8 (32 bit report) ===
mfmediaengine: mfmediaengine.c:1969: Test failed: Unexpected 56% diff
=== w8adm (32 bit report) ===
mfmediaengine: mfmediaengine.c:1969: Test failed: Unexpected 56% diff
=== w864 (32 bit report) ===
mfmediaengine: mfmediaengine.c:1969: Test failed: Unexpected 56% diff
=== w1064v1507 (32 bit report) ===
mfmediaengine: mfmediaengine.c:1969: Test failed: Unexpected 56% diff
=== w864 (64 bit report) ===
mfmediaengine: mfmediaengine.c:1969: Test failed: Unexpected 56% diff
=== w1064v1507 (64 bit report) ===
mfmediaengine: mfmediaengine.c:1969: Test failed: Unexpected 56% diff
=== w11pro64_amd (64 bit report) ===
mfmediaengine: mfmediaengine.c:1969: Test failed: Unexpected 40% diff
Nikolay Sivov (@nsivov) commented about dlls/mfmediaengine/tests/mfmediaengine.c:
CloseHandle(notify.ready_event);
}
+struct grayscale_video_effect +{
- IMFTransform IMFTransform_iface;
- LONG refcount;
- IMFMediaType *media_type_in, *media_type_out;
- DWORD frame_size, frame_width, frame_height;
- GUID **advertized_subtypes;
- IMFSample *sample_buf;
- LONG processing_delta;
- CRITICAL_SECTION cs;
+};
This is way too much for such test. It would be enough to have some stub transform that outputs solid color fills, ignoring input completely.
Nikolay Sivov (@nsivov) commented about dlls/mfmediaengine/tests/mfmediaengine.c:
- notify.ready_event = CreateEventW(NULL, FALSE, FALSE, NULL);
- ok(!!notify.ready_event, "CreateEventW failed, error %lu\n", GetLastError());
- if (!(device = create_d3d11_device()))
- {
skip("Failed to create a D3D11 device, skipping tests.\n");
return;
- }
- hr = pMFCreateDXGIDeviceManager(&token, &manager);
- ok(hr == S_OK, "Unexpected hr %#lx.\n", hr);
- hr = IMFDXGIDeviceManager_ResetDevice(manager, (IUnknown *)device, token);
- ok(hr == S_OK, "Unexpected hr %#lx.\n", hr);
- media_engine_ex = create_media_engine_ex(¬ify.IMFMediaEngineNotify_iface,
manager, DXGI_FORMAT_B8G8R8X8_UNORM);
Why do you need d3d support here?
Nikolay Sivov (@nsivov) commented about dlls/mf/topology.c:
for (j = 0; j < outputs->count; ++j) { DWORD input_index = outputs->streams[j].connection_stream;
TOPOID id = outputs->streams[j].connection->id;
TOPOID id;
if (!outputs->streams[j].connection)
continue;
id = outputs->streams[j].connection->id;
This needs to be a separate MR, with a test.
Nikolay Sivov (@nsivov) commented about dlls/mfmediaengine/main.c:
IMFPresentationDescriptor *pd; } presentation; struct
- {
IUnknown *video;
BOOL video_optional;
- } effects;
Docs suggest that you can have a chain of effects. Is that not the case?
Nikolay Sivov (@nsivov) commented about dlls/mfmediaengine/main.c:
static HRESULT WINAPI media_engine_InsertVideoEffect(IMFMediaEngineEx *iface, IUnknown *effect, BOOL is_optional) {
- FIXME("%p, %p, %d stub.\n", iface, effect, is_optional);
- struct media_engine *impl = impl_from_IMFMediaEngineEx(iface);
- TRACE("%p, %p, %d.\n", iface, effect, is_optional);
- return E_NOTIMPL;
- if (impl->effects.video)
IUnknown_Release(impl->effects.video);
- impl->effects.video = effect;
- IUnknown_AddRef(impl->effects.video);
- impl->effects.video_optional = is_optional;
- return S_OK;
You probably need to handle shutdown state, and protect with a mutex.
On Tue May 9 21:17:04 2023 +0000, Nikolay Sivov wrote:
Docs suggest that you can have a chain of effects. Is that not the case?
Which docs are you referring to?
On Tue May 9 21:17:02 2023 +0000, Nikolay Sivov wrote:
This is way too much for such test. It would be enough to have some stub transform that outputs solid color fills, ignoring input completely.
My idea was to have a reusable transform that could even be used for more extensive tests in mf. Also, I suspect windows won't like the input not working?
On Tue May 9 21:43:29 2023 +0000, Bernhard Kölbl wrote:
Which docs are you referring to?
InsertVideoEffect() page talks about error code for max number of effects. Also RemoveAllEffects() method exist, and if this is matches IMFPMediaPlayer at all it could mean that you can add multiple effects.
On Tue May 9 21:49:47 2023 +0000, Bernhard Kölbl wrote:
My idea was to have a reusable transform that could even be used for more extensive tests in mf. Also, I suspect windows won't like the input not working?
For this API we don't need it to be extensive. It probably will work if it only supports some RGB output.
On Tue May 9 21:51:19 2023 +0000, Nikolay Sivov wrote:
InsertVideoEffect() page talks about error code for max number of effects. Also RemoveAllEffects() method exist, and if this is matches IMFPMediaPlayer at all it could mean that you can add multiple effects.
Alright, I'll make it able to accept multiple effects.
On Wed May 10 10:00:55 2023 +0000, Bernhard Kölbl wrote:
Alright, I'll make it able to accept multiple effects.
Only if it actually has to support that.
On Tue May 9 21:54:29 2023 +0000, Nikolay Sivov wrote:
For this API we don't need it to be extensive. It probably will work if it only supports some RGB output.
Okay, so how about making this MFT output static colors of choice over a certain part of the frame, so we can chain a few Mfts to test that chaining as well? So for example, in the end we get an image that is blue top left, red top right, green bottom left, etc. ?
On Tue May 9 21:17:03 2023 +0000, Nikolay Sivov wrote:
Why do you need d3d support here?
VRChat does this as well, but not sure if it's really needed.
On Mon May 15 21:01:58 2023 +0000, Bernhard Kölbl wrote:
Okay, so how about making this MFT output static colors of choice over a certain part of the frame, so we can chain a few Mfts to test that chaining as well? So for example, in the end we get an image that is blue top left, red top right, green bottom left, etc. ?
Yes, that's fine. You could use minimal frame size that works, like 4x4 or 16x16.
On Mon May 15 21:03:31 2023 +0000, Bernhard Kölbl wrote:
VRChat does this as well, but not sure if it's really needed.
It's probably useful for TransferVideoFrame(), but for the test no need to handle it in MFTs.
On Mon May 15 21:22:19 2023 +0000, Nikolay Sivov wrote:
It's probably useful for TransferVideoFrame(), but for the test no need to handle it in MFTs.
I'm not entirely sure what you mean by that. I want to use TransferVideoFrame() to check if the final result has all inserted effects applied.
On Tue May 23 10:38:54 2023 +0000, Bernhard Kölbl wrote:
I'm not entirely sure what you mean by that. I want to use TransferVideoFrame() to check if the final result has all inserted effects applied.
Ok, I see. For some reason I thought d3d samples were also handled by test MFT. Grayscale transform might be good to have because it's available on Windows too, but for test purposes it's enough to have some passthrough transform, that would set some predefined location to some predefined value, so you can check later that whole chain works. Still, I'd rather had some test player demo program based on mfmediaengine that I can run manually, than all this test code, but if you can reduce it to bare minimum it's fine too.