This commit replaces the SampleGrabberSink with a dedicated Video Sink, referred to as the Simple Video Renderer (SVR).
This brings it more inline with Windows and provides the benefit of having direct access to the IMFSample, removing the need to copy the sample data.
-- v6: mfmediaengine: Fallback to sample copy if scaling is required. mfmediaengine: Implement D3D-aware video frame sink. mfmediaengine: Implement SVR.
From: Brendan McGrath bmcgrath@codeweavers.com
This commit replaces the SampleGrabberSink with a dedicated Video Sink, referred to as the Simple Video Renderer (SVR).
This brings it more inline with Windows and provides the benefit of having direct access to the IMFSample, removing the need to copy the sample data.
Based on a patch by Nikolay Sivov. --- dlls/mfmediaengine/Makefile.in | 3 +- dlls/mfmediaengine/main.c | 255 ++--- dlls/mfmediaengine/mediaengine_private.h | 29 + dlls/mfmediaengine/tests/mfmediaengine.c | 9 +- dlls/mfmediaengine/video_frame_sink.c | 1209 ++++++++++++++++++++++ 5 files changed, 1354 insertions(+), 151 deletions(-) create mode 100644 dlls/mfmediaengine/mediaengine_private.h create mode 100644 dlls/mfmediaengine/video_frame_sink.c
diff --git a/dlls/mfmediaengine/Makefile.in b/dlls/mfmediaengine/Makefile.in index 8e4bf011d81..5b593814ef6 100644 --- a/dlls/mfmediaengine/Makefile.in +++ b/dlls/mfmediaengine/Makefile.in @@ -5,4 +5,5 @@ EXTRADLLFLAGS = -Wb,--prefer-native
SOURCES = \ main.c \ - mediaengine_classes.idl + mediaengine_classes.idl \ + video_frame_sink.c diff --git a/dlls/mfmediaengine/main.c b/dlls/mfmediaengine/main.c index 026b825a7a5..caeab3dea30 100644 --- a/dlls/mfmediaengine/main.c +++ b/dlls/mfmediaengine/main.c @@ -32,6 +32,8 @@ #include "mmdeviceapi.h" #include "audiosessiontypes.h"
+#include "mediaengine_private.h" + #include "wine/debug.h"
WINE_DEFAULT_DEBUG_CHANNEL(mfplat); @@ -138,8 +140,8 @@ struct media_engine IMFMediaEngineEx IMFMediaEngineEx_iface; IMFGetService IMFGetService_iface; IMFAsyncCallback session_events; + IMFAsyncCallback sink_events; IMFAsyncCallback load_handler; - IMFSampleGrabberSinkCallback grabber_callback; LONG refcount; IMFMediaEngineNotify *callback; IMFAttributes *attributes; @@ -166,6 +168,7 @@ struct media_engine IMFMediaSource *source; IMFPresentationDescriptor *pd; PROPVARIANT start_position; + struct video_frame_sink *frame_sink; } presentation; struct effects video_effects; struct effects audio_effects; @@ -784,14 +787,14 @@ static struct media_engine *impl_from_session_events_IMFAsyncCallback(IMFAsyncCa return CONTAINING_RECORD(iface, struct media_engine, session_events); }
-static struct media_engine *impl_from_load_handler_IMFAsyncCallback(IMFAsyncCallback *iface) +static struct media_engine *impl_from_sink_events_IMFAsyncCallback(IMFAsyncCallback *iface) { - return CONTAINING_RECORD(iface, struct media_engine, load_handler); + return CONTAINING_RECORD(iface, struct media_engine, sink_events); }
-static struct media_engine *impl_from_IMFSampleGrabberSinkCallback(IMFSampleGrabberSinkCallback *iface) +static struct media_engine *impl_from_load_handler_IMFAsyncCallback(IMFAsyncCallback *iface) { - return CONTAINING_RECORD(iface, struct media_engine, grabber_callback); + return CONTAINING_RECORD(iface, struct media_engine, load_handler); }
static unsigned int get_gcd(unsigned int a, unsigned int b) @@ -1000,6 +1003,10 @@ static HRESULT WINAPI media_engine_session_events_Invoke(IMFAsyncCallback *iface
IMFMediaEngineNotify_EventNotify(engine->callback, MF_MEDIA_ENGINE_EVENT_ENDED, 0, 0); break; + + case MEEndOfPresentationSegment: + video_frame_sink_notify_end_of_presentation_segment(engine->presentation.frame_sink); + break; }
failed: @@ -1022,6 +1029,48 @@ static const IMFAsyncCallbackVtbl media_engine_session_events_vtbl = media_engine_session_events_Invoke, };
+static ULONG WINAPI media_engine_sink_events_AddRef(IMFAsyncCallback *iface) +{ + struct media_engine *engine = impl_from_sink_events_IMFAsyncCallback(iface); + return IMFMediaEngineEx_AddRef(&engine->IMFMediaEngineEx_iface); +} + +static ULONG WINAPI media_engine_sink_events_Release(IMFAsyncCallback *iface) +{ + struct media_engine *engine = impl_from_sink_events_IMFAsyncCallback(iface); + return IMFMediaEngineEx_Release(&engine->IMFMediaEngineEx_iface); +} + +static HRESULT WINAPI media_engine_sink_events_Invoke(IMFAsyncCallback *iface, IMFAsyncResult *result) +{ + struct media_engine *engine = impl_from_sink_events_IMFAsyncCallback(iface); + MF_MEDIA_ENGINE_EVENT event = IMFAsyncResult_GetStatus(result); + + EnterCriticalSection(&engine->cs); + + switch (event) + { + case MF_MEDIA_ENGINE_EVENT_FIRSTFRAMEREADY: + IMFMediaEngineNotify_EventNotify(engine->callback, event, 0, 0); + break; + default: + ; + } + + LeaveCriticalSection(&engine->cs); + + return S_OK; +} + +static const IMFAsyncCallbackVtbl media_engine_sink_events_vtbl = +{ + media_engine_callback_QueryInterface, + media_engine_sink_events_AddRef, + media_engine_sink_events_Release, + media_engine_callback_GetParameters, + media_engine_sink_events_Invoke, +}; + static ULONG WINAPI media_engine_load_handler_AddRef(IMFAsyncCallback *iface) { struct media_engine *engine = impl_from_load_handler_IMFAsyncCallback(iface); @@ -1122,7 +1171,6 @@ static HRESULT media_engine_create_audio_renderer(struct media_engine *engine, I static HRESULT media_engine_create_video_renderer(struct media_engine *engine, IMFTopologyNode **node) { IMFMediaType *media_type; - IMFActivate *activate; UINT32 output_format; GUID subtype; HRESULT hr; @@ -1148,33 +1196,47 @@ static HRESULT media_engine_create_video_renderer(struct media_engine *engine, I IMFMediaType_SetGUID(media_type, &MF_MT_MAJOR_TYPE, &MFMediaType_Video); IMFMediaType_SetGUID(media_type, &MF_MT_SUBTYPE, &subtype);
- hr = MFCreateSampleGrabberSinkActivate(media_type, &engine->grabber_callback, &activate); + hr = create_video_frame_sink(media_type, &engine->sink_events, &engine->presentation.frame_sink); IMFMediaType_Release(media_type); if (FAILED(hr)) return hr;
if (SUCCEEDED(hr = MFCreateTopologyNode(MF_TOPOLOGY_OUTPUT_NODE, node))) { - IMFTopologyNode_SetObject(*node, (IUnknown *)activate); + IMFStreamSink *sink; + video_frame_sink_query_iface(engine->presentation.frame_sink, &IID_IMFStreamSink, (void **)&sink); + + IMFTopologyNode_SetObject(*node, (IUnknown *)sink); IMFTopologyNode_SetUINT32(*node, &MF_TOPONODE_NOSHUTDOWN_ON_REMOVE, FALSE); - }
- IMFActivate_Release(activate); + IMFStreamSink_Release(sink); + }
engine->video_frame.output_format = output_format;
return hr; }
+/* must be called with engine->cs held */ static void media_engine_clear_presentation(struct media_engine *engine) { if (engine->presentation.source) { + /* critical section can not be held during shutdown, as shut down requires all pending + * callbacks to complete, and some callbacks require this cs */ + LeaveCriticalSection(&engine->cs); IMFMediaSource_Shutdown(engine->presentation.source); + EnterCriticalSection(&engine->cs); IMFMediaSource_Release(engine->presentation.source); } if (engine->presentation.pd) IMFPresentationDescriptor_Release(engine->presentation.pd); + if (engine->presentation.frame_sink) + { + video_frame_sink_release(engine->presentation.frame_sink); + engine->presentation.frame_sink = NULL; + } + memset(&engine->presentation, 0, sizeof(engine->presentation)); }
@@ -1273,7 +1335,7 @@ static HRESULT media_engine_create_topology(struct media_engine *engine, IMFMedi if (SUCCEEDED(hr = MFCreateTopology(&topology))) { IMFTopologyNode *sar_node = NULL, *audio_src = NULL; - IMFTopologyNode *grabber_node = NULL, *video_src = NULL; + IMFTopologyNode *svr_node = NULL, *video_src = NULL;
if (engine->flags & MF_MEDIA_ENGINE_REAL_TIME_MODE) IMFTopology_SetUINT32(topology, &MF_LOW_LATENCY, TRUE); @@ -1307,24 +1369,24 @@ static HRESULT media_engine_create_topology(struct media_engine *engine, IMFMedi if (FAILED(hr = media_engine_create_source_node(source, pd, sd_video, &video_src))) WARN("Failed to create video source node, hr %#lx.\n", hr);
- if (FAILED(hr = media_engine_create_video_renderer(engine, &grabber_node))) - WARN("Failed to create video grabber node, hr %#lx.\n", hr); + if (FAILED(hr = media_engine_create_video_renderer(engine, &svr_node))) + WARN("Failed to create simple video render node, hr %#lx.\n", hr);
- if (grabber_node && video_src) + if (svr_node && video_src) { IMFTopology_AddNode(topology, video_src); - IMFTopology_AddNode(topology, grabber_node); + IMFTopology_AddNode(topology, svr_node);
if (FAILED(hr = media_engine_create_effects(engine->video_effects.effects, engine->video_effects.count, - video_src, grabber_node, topology))) + video_src, svr_node, topology))) WARN("Failed to create video effect nodes, hr %#lx.\n", hr); }
if (SUCCEEDED(hr)) IMFTopologyNode_GetTopoNodeID(video_src, &engine->video_frame.node_id);
- if (grabber_node) - IMFTopologyNode_Release(grabber_node); + if (svr_node) + IMFTopologyNode_Release(svr_node); if (video_src) IMFTopologyNode_Release(video_src); } @@ -2300,7 +2362,11 @@ static HRESULT WINAPI media_engine_Shutdown(IMFMediaEngineEx *iface) { media_engine_set_flag(engine, FLAGS_ENGINE_SHUT_DOWN, TRUE); media_engine_clear_presentation(engine); + /* critical section can not be held during shutdown, as shut down requires all pending + * callbacks to complete, and some callbacks require this cs */ + LeaveCriticalSection(&engine->cs); IMFMediaSession_Shutdown(engine->session); + EnterCriticalSection(&engine->cs); } LeaveCriticalSection(&engine->cs);
@@ -2353,8 +2419,10 @@ static void media_engine_adjust_destination_for_ratio(const struct media_engine static void media_engine_update_d3d11_frame_surface(ID3D11DeviceContext *context, struct media_engine *engine) { D3D11_TEXTURE2D_DESC surface_desc; + IMFMediaBuffer *media_buffer; + IMFSample *sample;
- if (!(engine->flags & FLAGS_ENGINE_NEW_FRAME)) + if (!video_frame_sink_get_sample(engine->presentation.frame_sink, &sample)) return;
ID3D11Texture2D_GetDesc(engine->video_frame.d3d11.source, &surface_desc); @@ -2370,13 +2438,24 @@ static void media_engine_update_d3d11_frame_surface(ID3D11DeviceContext *context surface_desc.Width = 0; }
- if (engine->video_frame.buffer_size == surface_desc.Width * surface_desc.Height) + if (SUCCEEDED(IMFSample_ConvertToContiguousBuffer(sample, &media_buffer))) { - ID3D11DeviceContext_UpdateSubresource(context, (ID3D11Resource *)engine->video_frame.d3d11.source, - 0, NULL, engine->video_frame.buffer, surface_desc.Width, 0); + BYTE *buffer; + DWORD buffer_size; + if (SUCCEEDED(IMFMediaBuffer_Lock(media_buffer, &buffer, NULL, &buffer_size))) + { + if (buffer_size == surface_desc.Width * surface_desc.Height) + { + ID3D11DeviceContext_UpdateSubresource(context, (ID3D11Resource *)engine->video_frame.d3d11.source, + 0, NULL, buffer, surface_desc.Width, 0); + } + + IMFMediaBuffer_Unlock(media_buffer); + } + IMFMediaBuffer_Release(media_buffer); }
- media_engine_set_flag(engine, FLAGS_ENGINE_NEW_FRAME, FALSE); + IMFSample_Release(sample); }
static HRESULT media_engine_transfer_to_d3d11_texture(struct media_engine *engine, ID3D11Texture2D *texture, @@ -2572,8 +2651,9 @@ static HRESULT WINAPI media_engine_OnVideoStreamTick(IMFMediaEngineEx *iface, LO hr = E_POINTER; else { - *pts = engine->video_frame.pts; - hr = *pts == MINLONGLONG ? S_FALSE : S_OK; + MFTIME clocktime; + IMFPresentationClock_GetTime(engine->clock, &clocktime); + hr = video_frame_sink_get_pts(engine->presentation.frame_sink, clocktime, pts); }
LeaveCriticalSection(&engine->cs); @@ -3169,127 +3249,6 @@ static const IMFGetServiceVtbl media_engine_get_service_vtbl = media_engine_gs_GetService, };
-static HRESULT WINAPI media_engine_grabber_callback_QueryInterface(IMFSampleGrabberSinkCallback *iface, - REFIID riid, void **obj) -{ - if (IsEqualIID(riid, &IID_IMFSampleGrabberSinkCallback) || - IsEqualIID(riid, &IID_IUnknown)) - { - *obj = iface; - IMFSampleGrabberSinkCallback_AddRef(iface); - return S_OK; - } - - *obj = NULL; - return E_NOINTERFACE; -} - -static ULONG WINAPI media_engine_grabber_callback_AddRef(IMFSampleGrabberSinkCallback *iface) -{ - struct media_engine *engine = impl_from_IMFSampleGrabberSinkCallback(iface); - return IMFMediaEngineEx_AddRef(&engine->IMFMediaEngineEx_iface); -} - -static ULONG WINAPI media_engine_grabber_callback_Release(IMFSampleGrabberSinkCallback *iface) -{ - struct media_engine *engine = impl_from_IMFSampleGrabberSinkCallback(iface); - return IMFMediaEngineEx_Release(&engine->IMFMediaEngineEx_iface); -} - -static HRESULT WINAPI media_engine_grabber_callback_OnClockStart(IMFSampleGrabberSinkCallback *iface, - MFTIME systime, LONGLONG start_offset) -{ - return S_OK; -} - -static HRESULT WINAPI media_engine_grabber_callback_OnClockStop(IMFSampleGrabberSinkCallback *iface, - MFTIME systime) -{ - struct media_engine *engine = impl_from_IMFSampleGrabberSinkCallback(iface); - - EnterCriticalSection(&engine->cs); - media_engine_set_flag(engine, FLAGS_ENGINE_FIRST_FRAME, FALSE); - engine->video_frame.pts = MINLONGLONG; - LeaveCriticalSection(&engine->cs); - - return S_OK; -} - -static HRESULT WINAPI media_engine_grabber_callback_OnClockPause(IMFSampleGrabberSinkCallback *iface, - MFTIME systime) -{ - return S_OK; -} - -static HRESULT WINAPI media_engine_grabber_callback_OnClockRestart(IMFSampleGrabberSinkCallback *iface, - MFTIME systime) -{ - return S_OK; -} - -static HRESULT WINAPI media_engine_grabber_callback_OnClockSetRate(IMFSampleGrabberSinkCallback *iface, - MFTIME systime, float rate) -{ - return S_OK; -} - -static HRESULT WINAPI media_engine_grabber_callback_OnSetPresentationClock(IMFSampleGrabberSinkCallback *iface, - IMFPresentationClock *clock) -{ - return S_OK; -} - -static HRESULT WINAPI media_engine_grabber_callback_OnProcessSample(IMFSampleGrabberSinkCallback *iface, - REFGUID major_type, DWORD sample_flags, LONGLONG sample_time, LONGLONG sample_duration, - const BYTE *buffer, DWORD buffer_size) -{ - struct media_engine *engine = impl_from_IMFSampleGrabberSinkCallback(iface); - - EnterCriticalSection(&engine->cs); - - if (!(engine->flags & FLAGS_ENGINE_FIRST_FRAME)) - { - IMFMediaEngineNotify_EventNotify(engine->callback, MF_MEDIA_ENGINE_EVENT_FIRSTFRAMEREADY, 0, 0); - media_engine_set_flag(engine, FLAGS_ENGINE_FIRST_FRAME, TRUE); - } - engine->video_frame.pts = sample_time; - if (engine->video_frame.buffer_size < buffer_size) - { - free(engine->video_frame.buffer); - if ((engine->video_frame.buffer = malloc(buffer_size))) - engine->video_frame.buffer_size = buffer_size; - } - if (engine->video_frame.buffer) - { - memcpy(engine->video_frame.buffer, buffer, buffer_size); - engine->flags |= FLAGS_ENGINE_NEW_FRAME; - } - - LeaveCriticalSection(&engine->cs); - - return S_OK; -} - -static HRESULT WINAPI media_engine_grabber_callback_OnShutdown(IMFSampleGrabberSinkCallback *iface) -{ - return S_OK; -} - -static const IMFSampleGrabberSinkCallbackVtbl media_engine_grabber_callback_vtbl = -{ - media_engine_grabber_callback_QueryInterface, - media_engine_grabber_callback_AddRef, - media_engine_grabber_callback_Release, - media_engine_grabber_callback_OnClockStart, - media_engine_grabber_callback_OnClockStop, - media_engine_grabber_callback_OnClockPause, - media_engine_grabber_callback_OnClockRestart, - media_engine_grabber_callback_OnClockSetRate, - media_engine_grabber_callback_OnSetPresentationClock, - media_engine_grabber_callback_OnProcessSample, - media_engine_grabber_callback_OnShutdown, -}; - static HRESULT WINAPI media_engine_factory_QueryInterface(IMFMediaEngineClassFactory *iface, REFIID riid, void **obj) { if (IsEqualIID(riid, &IID_IMFMediaEngineClassFactory) || @@ -3318,15 +3277,15 @@ static ULONG WINAPI media_engine_factory_Release(IMFMediaEngineClassFactory *ifa static HRESULT init_media_engine(DWORD flags, IMFAttributes *attributes, struct media_engine *engine) { UINT32 output_format; - UINT64 playback_hwnd; + UINT64 playback_hwnd = 0; IMFClock *clock; HRESULT hr;
engine->IMFMediaEngineEx_iface.lpVtbl = &media_engine_vtbl; engine->IMFGetService_iface.lpVtbl = &media_engine_get_service_vtbl; engine->session_events.lpVtbl = &media_engine_session_events_vtbl; + engine->sink_events.lpVtbl = &media_engine_sink_events_vtbl; engine->load_handler.lpVtbl = &media_engine_load_handler_vtbl; - engine->grabber_callback.lpVtbl = &media_engine_grabber_callback_vtbl; engine->refcount = 1; engine->flags = (flags & MF_MEDIA_ENGINE_CREATEFLAGS_MASK) | FLAGS_ENGINE_PAUSED; engine->default_playback_rate = 1.0; diff --git a/dlls/mfmediaengine/mediaengine_private.h b/dlls/mfmediaengine/mediaengine_private.h new file mode 100644 index 00000000000..286cb9b1430 --- /dev/null +++ b/dlls/mfmediaengine/mediaengine_private.h @@ -0,0 +1,29 @@ +/* + * Copyright 2019 Nikolay Sivov for CodeWeavers + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2.1 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with this library; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA + */ + +#include "mfmediaengine.h" + +struct video_frame_sink; + +HRESULT create_video_frame_sink(IMFMediaType *media_type, IMFAsyncCallback *events_callback, + struct video_frame_sink **sink); +HRESULT video_frame_sink_query_iface(struct video_frame_sink *object, REFIID riid, void **obj); +ULONG video_frame_sink_release(struct video_frame_sink *sink); +int video_frame_sink_get_sample(struct video_frame_sink *sink, IMFSample **sample); +HRESULT video_frame_sink_get_pts(struct video_frame_sink *sink, MFTIME clocktime, LONGLONG *pts); +void video_frame_sink_notify_end_of_presentation_segment(struct video_frame_sink *sink); diff --git a/dlls/mfmediaengine/tests/mfmediaengine.c b/dlls/mfmediaengine/tests/mfmediaengine.c index bf8de216890..34b3519d063 100644 --- a/dlls/mfmediaengine/tests/mfmediaengine.c +++ b/dlls/mfmediaengine/tests/mfmediaengine.c @@ -1282,6 +1282,7 @@ static void test_TransferVideoFrame(void) HRESULT hr; DWORD res; BSTR url; + LONGLONG pts;
stream = load_resource(L"i420-64x64.avi", L"video/avi");
@@ -1343,6 +1344,7 @@ static void test_TransferVideoFrame(void) ok(!res, "Unexpected res %#lx.\n", res);
SetRect(&dst_rect, 0, 0, desc.Width, desc.Height); + IMFMediaEngineEx_OnVideoStreamTick(notify->media_engine, &pts); hr = IMFMediaEngineEx_TransferVideoFrame(notify->media_engine, (IUnknown *)texture, NULL, &dst_rect, NULL); ok(hr == S_OK, "Unexpected hr %#lx.\n", hr);
@@ -2331,8 +2333,12 @@ static void test_GetSeekable(void) ok(time_range == NULL || broken(time_range == (IMFMediaTimeRange *)0xdeadbeef) /* <= Win10 1507 */, "Got unexpected pointer.\n");
+ /* IMFMediaEngineEx_Shutdown can release in parallel. A small sleep allows this test to pass more + * often than not. But given its a matter of timing, this test is marked flaky + */ + Sleep(10); refcount = IMFMediaEngineEx_Release(media_engine); - todo_wine + flaky_wine ok(!refcount, "Got unexpected refcount %lu.\n", refcount);
/* Unseekable bytestreams */ @@ -2631,7 +2637,6 @@ static void test_SetCurrentTime(void) ok(hr == MF_E_SHUTDOWN, "Unexpected hr %#lx.\n", hr);
refcount = IMFMediaEngineEx_Release(media_engine); - todo_wine ok(!refcount, "Got unexpected refcount %lu.\n", refcount);
/* Unseekable bytestreams */ diff --git a/dlls/mfmediaengine/video_frame_sink.c b/dlls/mfmediaengine/video_frame_sink.c new file mode 100644 index 00000000000..ba4ef67c59a --- /dev/null +++ b/dlls/mfmediaengine/video_frame_sink.c @@ -0,0 +1,1209 @@ +/* + * Copyright 2019 Nikolay Sivov for CodeWeavers + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2.1 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with this library; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA + */ + +#define COBJMACROS + +#include <float.h> +#include <assert.h> +#include <stdbool.h> + +#include "mfapi.h" +#include "mfidl.h" +#include "mferror.h" + +#include "mediaengine_private.h" + +#include "wine/debug.h" +#include "wine/list.h" + +WINE_DEFAULT_DEBUG_CHANNEL(mfplat); + +enum sink_state +{ + SINK_STATE_STOPPED = 0, + SINK_STATE_PAUSED, + SINK_STATE_RUNNING, +}; + +static inline const char *debugstr_time(LONGLONG time) +{ + ULONGLONG abstime = time >= 0 ? time : -time; + unsigned int i = 0, j = 0; + char buffer[23], rev[23]; + + while (abstime || i <= 8) + { + buffer[i++] = '0' + (abstime % 10); + abstime /= 10; + if (i == 7) buffer[i++] = '.'; + } + if (time < 0) buffer[i++] = '-'; + + while (i--) rev[j++] = buffer[i]; + while (rev[j-1] == '0' && rev[j-2] != '.') --j; + rev[j] = 0; + + return wine_dbg_sprintf("%s", rev); +} + +enum video_frame_sink_flags +{ + FLAGS_FIRST_FRAME = 0x1, +}; + +struct video_frame_sink +{ + IMFMediaSink IMFMediaSink_iface; + IMFClockStateSink IMFClockStateSink_iface; + IMFMediaEventGenerator IMFMediaEventGenerator_iface; + IMFStreamSink IMFStreamSink_iface; + IMFMediaTypeHandler IMFMediaTypeHandler_iface; + LONG refcount; + IMFMediaType *media_type; + IMFMediaType *current_media_type; + bool is_shut_down; + IMFMediaEventQueue *event_queue; + IMFMediaEventQueue *stream_event_queue; + IMFPresentationClock *clock; + IMFAsyncCallback *callback; + float rate; + enum sink_state state; + unsigned int flags; + IMFSample *sample[2]; + IMFSample *presentation_sample; + int sample_write_index; + int sample_read_index; + bool sample_request_pending; + bool sample_presented; + bool eos; + CRITICAL_SECTION cs; +}; + +static void video_frame_sink_set_flag(struct video_frame_sink *sink, unsigned int mask, bool value) +{ + if (value) + sink->flags |= mask; + else + sink->flags &= ~mask; +} + +static struct video_frame_sink *impl_from_IMFMediaSink(IMFMediaSink *iface) +{ + return CONTAINING_RECORD(iface, struct video_frame_sink, IMFMediaSink_iface); +} + +static struct video_frame_sink *impl_from_IMFClockStateSink(IMFClockStateSink *iface) +{ + return CONTAINING_RECORD(iface, struct video_frame_sink, IMFClockStateSink_iface); +} + +static struct video_frame_sink *impl_from_IMFMediaEventGenerator(IMFMediaEventGenerator *iface) +{ + return CONTAINING_RECORD(iface, struct video_frame_sink, IMFMediaEventGenerator_iface); +} + +static struct video_frame_sink *impl_from_IMFStreamSink(IMFStreamSink *iface) +{ + return CONTAINING_RECORD(iface, struct video_frame_sink, IMFStreamSink_iface); +} + +static struct video_frame_sink *impl_from_IMFMediaTypeHandler(IMFMediaTypeHandler *iface) +{ + return CONTAINING_RECORD(iface, struct video_frame_sink, IMFMediaTypeHandler_iface); +} + +static void video_frame_sink_samples_release(struct video_frame_sink *sink) +{ + for (int i = 0; i < ARRAY_SIZE(sink->sample); i++) + { + if (sink->sample[i]) + { + IMFSample_Release(sink->sample[i]); + sink->sample[i] = NULL; + } + } + if (sink->presentation_sample) + { + IMFSample_Release(sink->presentation_sample); + sink->presentation_sample = NULL; + } + sink->sample_read_index = 0; + sink->sample_write_index = 0; + sink->sample_presented = FALSE; +} + +static HRESULT WINAPI video_frame_sink_stream_QueryInterface(IMFStreamSink *iface, REFIID riid, void **obj) +{ + struct video_frame_sink *sink = impl_from_IMFStreamSink(iface); + + TRACE("%p, %s, %p.\n", iface, debugstr_guid(riid), obj); + + if (IsEqualIID(riid, &IID_IMFStreamSink) || + IsEqualIID(riid, &IID_IMFMediaEventGenerator) || + IsEqualIID(riid, &IID_IUnknown)) + { + *obj = &sink->IMFStreamSink_iface; + } + else if (IsEqualIID(riid, &IID_IMFMediaTypeHandler)) + { + *obj = &sink->IMFMediaTypeHandler_iface; + } + else + { + WARN("Unsupported %s.\n", debugstr_guid(riid)); + *obj = NULL; + return E_NOINTERFACE; + } + + IUnknown_AddRef((IUnknown *)*obj); + + return S_OK; +} + +static ULONG WINAPI video_frame_sink_stream_AddRef(IMFStreamSink *iface) +{ + struct video_frame_sink *sink = impl_from_IMFStreamSink(iface); + return IMFMediaSink_AddRef(&sink->IMFMediaSink_iface); +} + +static ULONG WINAPI video_frame_sink_stream_Release(IMFStreamSink *iface) +{ + struct video_frame_sink *sink = impl_from_IMFStreamSink(iface); + return IMFMediaSink_Release(&sink->IMFMediaSink_iface); +} + +static HRESULT WINAPI video_frame_sink_stream_GetEvent(IMFStreamSink *iface, DWORD flags, IMFMediaEvent **event) +{ + struct video_frame_sink *sink = impl_from_IMFStreamSink(iface); + + TRACE("%p, %#lx, %p.\n", iface, flags, event); + + if (sink->is_shut_down) + return MF_E_STREAMSINK_REMOVED; + + return IMFMediaEventQueue_GetEvent(sink->stream_event_queue, flags, event); +} + +static HRESULT WINAPI video_frame_sink_stream_BeginGetEvent(IMFStreamSink *iface, IMFAsyncCallback *callback, + IUnknown *state) +{ + struct video_frame_sink *sink = impl_from_IMFStreamSink(iface); + + TRACE("%p, %p, %p.\n", iface, callback, state); + + if (sink->is_shut_down) + return MF_E_STREAMSINK_REMOVED; + + return IMFMediaEventQueue_BeginGetEvent(sink->stream_event_queue, callback, state); +} + +static HRESULT WINAPI video_frame_sink_stream_EndGetEvent(IMFStreamSink *iface, IMFAsyncResult *result, + IMFMediaEvent **event) +{ + struct video_frame_sink *sink = impl_from_IMFStreamSink(iface); + + TRACE("%p, %p, %p.\n", iface, result, event); + + if (sink->is_shut_down) + return MF_E_STREAMSINK_REMOVED; + + return IMFMediaEventQueue_EndGetEvent(sink->stream_event_queue, result, event); +} + +static HRESULT WINAPI video_frame_sink_stream_QueueEvent(IMFStreamSink *iface, MediaEventType event_type, + REFGUID ext_type, HRESULT hr, const PROPVARIANT *value) +{ + struct video_frame_sink *sink = impl_from_IMFStreamSink(iface); + + TRACE("%p, %lu, %s, %#lx, %p.\n", iface, event_type, debugstr_guid(ext_type), hr, value); + + if (sink->is_shut_down) + return MF_E_STREAMSINK_REMOVED; + + return IMFMediaEventQueue_QueueEventParamVar(sink->stream_event_queue, event_type, ext_type, hr, value); +} + +static HRESULT WINAPI video_frame_sink_stream_GetMediaSink(IMFStreamSink *iface, IMFMediaSink **ret) +{ + struct video_frame_sink *sink = impl_from_IMFStreamSink(iface); + + TRACE("%p, %p.\n", iface, ret); + + if (sink->is_shut_down) + return MF_E_STREAMSINK_REMOVED; + + *ret = &sink->IMFMediaSink_iface; + IMFMediaSink_AddRef(*ret); + + return S_OK; +} + +static HRESULT WINAPI video_frame_sink_stream_GetIdentifier(IMFStreamSink *iface, DWORD *identifier) +{ + struct video_frame_sink *sink = impl_from_IMFStreamSink(iface); + + TRACE("%p, %p.\n", iface, identifier); + + if (sink->is_shut_down) + return MF_E_STREAMSINK_REMOVED; + + *identifier = 0; + + return S_OK; +} + +static HRESULT WINAPI video_frame_sink_stream_GetMediaTypeHandler(IMFStreamSink *iface, IMFMediaTypeHandler **handler) +{ + struct video_frame_sink *sink = impl_from_IMFStreamSink(iface); + + TRACE("%p, %p.\n", iface, handler); + + if (!handler) + return E_POINTER; + + if (sink->is_shut_down) + return MF_E_STREAMSINK_REMOVED; + + *handler = &sink->IMFMediaTypeHandler_iface; + IMFMediaTypeHandler_AddRef(*handler); + + return S_OK; +} + +/* must be called with critical section held */ +static void video_frame_sink_stream_request_sample(struct video_frame_sink *sink) +{ + if (sink->sample_request_pending || sink->eos) + return; + + IMFStreamSink_QueueEvent(&sink->IMFStreamSink_iface, MEStreamSinkRequestSample, &GUID_NULL, S_OK, NULL); + sink->sample_request_pending = TRUE; +} + +static void video_frame_sink_notify(struct video_frame_sink *sink, unsigned int event) +{ + IMFAsyncResult *result; + + if (FAILED(MFCreateAsyncResult(NULL, sink->callback, NULL, &result))) + return; + + IMFAsyncResult_SetStatus(result, event); + MFInvokeCallback(result); + IMFAsyncResult_Release(result); +} + +static void sample_index_increment(int *index) +{ + int prev = *index; + *index = (prev + 1) % 2; +} + +static HRESULT WINAPI video_frame_sink_stream_ProcessSample(IMFStreamSink *iface, IMFSample *sample) +{ + struct video_frame_sink *sink = impl_from_IMFStreamSink(iface); + LONGLONG sampletime; + HRESULT hr = S_OK; + + TRACE("%p, %p.\n", iface, sample); + + if (!sample) + return S_OK; + + EnterCriticalSection(&sink->cs); + + sink->sample_request_pending = FALSE; + + if (sink->is_shut_down) + { + hr = MF_E_STREAMSINK_REMOVED; + } + else if (sink->state == SINK_STATE_RUNNING || sink->state == SINK_STATE_PAUSED) + { + int sample_write_index = sink->sample_write_index; + hr = IMFSample_GetSampleTime(sample, &sampletime); + + if (sink->sample[sample_write_index]) + { + IMFSample_Release(sink->sample[sample_write_index]); + sink->sample[sample_write_index] = NULL; + } + + if (SUCCEEDED(hr)) + { + if (!(sink->flags & FLAGS_FIRST_FRAME)) + { + video_frame_sink_notify(sink, MF_MEDIA_ENGINE_EVENT_FIRSTFRAMEREADY); + video_frame_sink_set_flag(sink, FLAGS_FIRST_FRAME, TRUE); + } + // else TODO: send MEQualityNotify event + + IMFSample_AddRef(sink->sample[sample_write_index] = sample); + sample_index_increment(&sink->sample_write_index); + if (!sink->sample[sink->sample_write_index]) + video_frame_sink_stream_request_sample(sink); + } + } + + LeaveCriticalSection(&sink->cs); + + return hr; +} + +static HRESULT WINAPI video_frame_sink_stream_PlaceMarker(IMFStreamSink *iface, MFSTREAMSINK_MARKER_TYPE marker_type, + const PROPVARIANT *marker_value, const PROPVARIANT *context_value) +{ + struct video_frame_sink *sink = impl_from_IMFStreamSink(iface); + HRESULT hr = S_OK; + + TRACE("%p, %d, %p, %p.\n", iface, marker_type, marker_value, context_value); + + EnterCriticalSection(&sink->cs); + + if (sink->is_shut_down) + { + hr = MF_E_STREAMSINK_REMOVED; + } + else if (sink->state == SINK_STATE_RUNNING) + { + video_frame_sink_samples_release(sink); + hr = IMFMediaEventQueue_QueueEventParamVar(sink->stream_event_queue, MEStreamSinkMarker, + &GUID_NULL, S_OK, context_value); + } + + LeaveCriticalSection(&sink->cs); + + return hr; +} + +static HRESULT WINAPI video_frame_sink_stream_Flush(IMFStreamSink *iface) +{ + struct video_frame_sink *sink = impl_from_IMFStreamSink(iface); + HRESULT hr = S_OK; + + TRACE("%p.\n", iface); + + EnterCriticalSection(&sink->cs); + + if (sink->is_shut_down) + hr = MF_E_STREAMSINK_REMOVED; + else + video_frame_sink_samples_release(sink); + + LeaveCriticalSection(&sink->cs); + + return hr; +} + +static const IMFStreamSinkVtbl video_frame_sink_stream_vtbl = +{ + video_frame_sink_stream_QueryInterface, + video_frame_sink_stream_AddRef, + video_frame_sink_stream_Release, + video_frame_sink_stream_GetEvent, + video_frame_sink_stream_BeginGetEvent, + video_frame_sink_stream_EndGetEvent, + video_frame_sink_stream_QueueEvent, + video_frame_sink_stream_GetMediaSink, + video_frame_sink_stream_GetIdentifier, + video_frame_sink_stream_GetMediaTypeHandler, + video_frame_sink_stream_ProcessSample, + video_frame_sink_stream_PlaceMarker, + video_frame_sink_stream_Flush, +}; + +static HRESULT WINAPI video_frame_sink_stream_type_handler_QueryInterface(IMFMediaTypeHandler *iface, REFIID riid, + void **obj) +{ + struct video_frame_sink *sink = impl_from_IMFMediaTypeHandler(iface); + return IMFStreamSink_QueryInterface(&sink->IMFStreamSink_iface, riid, obj); +} + +static ULONG WINAPI video_frame_sink_stream_type_handler_AddRef(IMFMediaTypeHandler *iface) +{ + struct video_frame_sink *sink = impl_from_IMFMediaTypeHandler(iface); + return IMFStreamSink_AddRef(&sink->IMFStreamSink_iface); +} + +static ULONG WINAPI video_frame_sink_stream_type_handler_Release(IMFMediaTypeHandler *iface) +{ + struct video_frame_sink *sink = impl_from_IMFMediaTypeHandler(iface); + return IMFStreamSink_Release(&sink->IMFStreamSink_iface); +} + +static HRESULT video_frame_sink_stream_is_media_type_supported(struct video_frame_sink *sink, IMFMediaType *in_type) +{ + const DWORD supported_flags = MF_MEDIATYPE_EQUAL_MAJOR_TYPES | MF_MEDIATYPE_EQUAL_FORMAT_TYPES | + MF_MEDIATYPE_EQUAL_FORMAT_DATA; + DWORD flags; + + if (sink->is_shut_down) + return MF_E_STREAMSINK_REMOVED; + + if (!in_type) + return E_POINTER; + + if (IMFMediaType_IsEqual(sink->media_type, in_type, &flags) == S_OK) + return S_OK; + + return (flags & supported_flags) == supported_flags ? S_OK : MF_E_INVALIDMEDIATYPE; +} + +static HRESULT WINAPI video_frame_sink_stream_type_handler_IsMediaTypeSupported(IMFMediaTypeHandler *iface, + IMFMediaType *in_type, IMFMediaType **out_type) +{ + struct video_frame_sink *sink = impl_from_IMFMediaTypeHandler(iface); + + TRACE("%p, %p, %p.\n", iface, in_type, out_type); + + return video_frame_sink_stream_is_media_type_supported(sink, in_type); +} + +static HRESULT WINAPI video_frame_sink_stream_type_handler_GetMediaTypeCount(IMFMediaTypeHandler *iface, DWORD *count) +{ + TRACE("%p, %p.\n", iface, count); + + if (!count) + return E_POINTER; + + *count = 0; + + return S_OK; +} + +static HRESULT WINAPI video_frame_sink_stream_type_handler_GetMediaTypeByIndex(IMFMediaTypeHandler *iface, DWORD index, + IMFMediaType **media_type) +{ + TRACE("%p, %lu, %p.\n", iface, index, media_type); + + if (!media_type) + return E_POINTER; + + return MF_E_NO_MORE_TYPES; +} + +static HRESULT WINAPI video_frame_sink_stream_type_handler_SetCurrentMediaType(IMFMediaTypeHandler *iface, + IMFMediaType *media_type) +{ + struct video_frame_sink *sink = impl_from_IMFMediaTypeHandler(iface); + HRESULT hr; + + TRACE("%p, %p.\n", iface, media_type); + + if (FAILED(hr = video_frame_sink_stream_is_media_type_supported(sink, media_type))) + return hr; + + IMFMediaType_Release(sink->current_media_type); + sink->current_media_type = media_type; + IMFMediaType_AddRef(sink->current_media_type); + + return S_OK; +} + +static HRESULT WINAPI video_frame_sink_stream_type_handler_GetCurrentMediaType(IMFMediaTypeHandler *iface, + IMFMediaType **media_type) +{ + struct video_frame_sink *sink = impl_from_IMFMediaTypeHandler(iface); + HRESULT hr = S_OK; + + TRACE("%p, %p.\n", iface, media_type); + + if (!media_type) + return E_POINTER; + + EnterCriticalSection(&sink->cs); + + if (sink->is_shut_down) + { + hr = MF_E_STREAMSINK_REMOVED; + } + else + { + *media_type = sink->current_media_type; + IMFMediaType_AddRef(*media_type); + } + + LeaveCriticalSection(&sink->cs); + + return hr; +} + +static HRESULT WINAPI video_frame_sink_stream_type_handler_GetMajorType(IMFMediaTypeHandler *iface, GUID *type) +{ + struct video_frame_sink *sink = impl_from_IMFMediaTypeHandler(iface); + HRESULT hr; + + TRACE("%p, %p.\n", iface, type); + + if (!type) + return E_POINTER; + + EnterCriticalSection(&sink->cs); + + if (sink->is_shut_down) + hr = MF_E_STREAMSINK_REMOVED; + else + hr = IMFMediaType_GetMajorType(sink->current_media_type, type); + + LeaveCriticalSection(&sink->cs); + + return hr; +} + +static const IMFMediaTypeHandlerVtbl video_frame_sink_stream_type_handler_vtbl = +{ + video_frame_sink_stream_type_handler_QueryInterface, + video_frame_sink_stream_type_handler_AddRef, + video_frame_sink_stream_type_handler_Release, + video_frame_sink_stream_type_handler_IsMediaTypeSupported, + video_frame_sink_stream_type_handler_GetMediaTypeCount, + video_frame_sink_stream_type_handler_GetMediaTypeByIndex, + video_frame_sink_stream_type_handler_SetCurrentMediaType, + video_frame_sink_stream_type_handler_GetCurrentMediaType, + video_frame_sink_stream_type_handler_GetMajorType, +}; + +static HRESULT WINAPI video_frame_sink_QueryInterface(IMFMediaSink *iface, REFIID riid, void **obj) +{ + struct video_frame_sink *sink = impl_from_IMFMediaSink(iface); + + TRACE("%p, %s, %p.\n", iface, debugstr_guid(riid), obj); + + if (IsEqualIID(riid, &IID_IMFMediaSink) || + IsEqualIID(riid, &IID_IUnknown)) + { + *obj = &sink->IMFMediaSink_iface; + } + else if (IsEqualIID(riid, &IID_IMFClockStateSink)) + { + *obj = &sink->IMFClockStateSink_iface; + } + else if (IsEqualIID(riid, &IID_IMFMediaEventGenerator)) + { + *obj = &sink->IMFMediaEventGenerator_iface; + } + else + { + WARN("Unsupported %s.\n", debugstr_guid(riid)); + *obj = NULL; + return E_NOINTERFACE; + } + + IUnknown_AddRef((IUnknown *)*obj); + + return S_OK; +} + +static ULONG WINAPI video_frame_sink_AddRef(IMFMediaSink *iface) +{ + struct video_frame_sink *sink = impl_from_IMFMediaSink(iface); + ULONG refcount = InterlockedIncrement(&sink->refcount); + + TRACE("%p, refcount %lu.\n", iface, refcount); + + return refcount; +} + +static ULONG WINAPI video_frame_sink_Release(IMFMediaSink *iface) +{ + struct video_frame_sink *sink = impl_from_IMFMediaSink(iface); + ULONG refcount = InterlockedDecrement(&sink->refcount); + + TRACE("%p, refcount %lu.\n", iface, refcount); + + if (!refcount) + { + if (sink->current_media_type) + IMFMediaType_Release(sink->current_media_type); + IMFMediaType_Release(sink->media_type); + if (sink->event_queue) + IMFMediaEventQueue_Release(sink->event_queue); + if (sink->clock) + IMFPresentationClock_Release(sink->clock); + if (sink->callback) + IMFAsyncCallback_Release(sink->callback); + if (sink->stream_event_queue) + { + IMFMediaEventQueue_Shutdown(sink->stream_event_queue); + IMFMediaEventQueue_Release(sink->stream_event_queue); + } + video_frame_sink_samples_release(sink); + DeleteCriticalSection(&sink->cs); + free(sink); + } + + return refcount; +} + +static HRESULT WINAPI video_frame_sink_GetCharacteristics(IMFMediaSink *iface, DWORD *flags) +{ + struct video_frame_sink *sink = impl_from_IMFMediaSink(iface); + + TRACE("%p, %p.\n", iface, flags); + + if (sink->is_shut_down) + return MF_E_SHUTDOWN; + + *flags = MEDIASINK_FIXED_STREAMS | MEDIASINK_RATELESS | MEDIASINK_CAN_PREROLL; + + return S_OK; +} + +static HRESULT WINAPI video_frame_sink_AddStreamSink(IMFMediaSink *iface, DWORD stream_sink_id, + IMFMediaType *media_type, IMFStreamSink **stream_sink) +{ + struct video_frame_sink *sink = impl_from_IMFMediaSink(iface); + + TRACE("%p, %#lx, %p, %p.\n", iface, stream_sink_id, media_type, stream_sink); + + return sink->is_shut_down ? MF_E_SHUTDOWN : MF_E_STREAMSINKS_FIXED; +} + +static HRESULT WINAPI video_frame_sink_RemoveStreamSink(IMFMediaSink *iface, DWORD stream_sink_id) +{ + struct video_frame_sink *sink = impl_from_IMFMediaSink(iface); + + TRACE("%p, %#lx.\n", iface, stream_sink_id); + + return sink->is_shut_down ? MF_E_SHUTDOWN : MF_E_STREAMSINKS_FIXED; +} + +static HRESULT WINAPI video_frame_sink_GetStreamSinkCount(IMFMediaSink *iface, DWORD *count) +{ + struct video_frame_sink *sink = impl_from_IMFMediaSink(iface); + + TRACE("%p, %p.\n", iface, count); + + if (sink->is_shut_down) + return MF_E_SHUTDOWN; + + *count = 1; + + return S_OK; +} + +static HRESULT WINAPI video_frame_sink_GetStreamSinkByIndex(IMFMediaSink *iface, DWORD index, + IMFStreamSink **stream) +{ + struct video_frame_sink *sink = impl_from_IMFMediaSink(iface); + HRESULT hr = S_OK; + + TRACE("%p, %lu, %p.\n", iface, index, stream); + + EnterCriticalSection(&sink->cs); + + if (sink->is_shut_down) + hr = MF_E_SHUTDOWN; + else if (index > 0) + hr = MF_E_INVALIDINDEX; + else + { + *stream = &sink->IMFStreamSink_iface; + IMFStreamSink_AddRef(*stream); + } + + LeaveCriticalSection(&sink->cs); + + return hr; +} + +static HRESULT WINAPI video_frame_sink_GetStreamSinkById(IMFMediaSink *iface, DWORD stream_sink_id, + IMFStreamSink **stream) +{ + struct video_frame_sink *sink = impl_from_IMFMediaSink(iface); + HRESULT hr = S_OK; + + TRACE("%p, %#lx, %p.\n", iface, stream_sink_id, stream); + + EnterCriticalSection(&sink->cs); + + if (sink->is_shut_down) + hr = MF_E_SHUTDOWN; + else if (stream_sink_id > 0) + hr = MF_E_INVALIDSTREAMNUMBER; + else + { + *stream = &sink->IMFStreamSink_iface; + IMFStreamSink_AddRef(*stream); + } + + LeaveCriticalSection(&sink->cs); + + return hr; +} + +static void video_frame_sink_set_presentation_clock(struct video_frame_sink *sink, IMFPresentationClock *clock) +{ + if (sink->clock) + { + IMFPresentationClock_RemoveClockStateSink(sink->clock, &sink->IMFClockStateSink_iface); + IMFPresentationClock_Release(sink->clock); + } + sink->clock = clock; + if (sink->clock) + { + IMFPresentationClock_AddRef(sink->clock); + IMFPresentationClock_AddClockStateSink(sink->clock, &sink->IMFClockStateSink_iface); + } +} + +static HRESULT WINAPI video_frame_sink_SetPresentationClock(IMFMediaSink *iface, IMFPresentationClock *clock) +{ + struct video_frame_sink *sink = impl_from_IMFMediaSink(iface); + HRESULT hr = S_OK; + + TRACE("%p, %p.\n", iface, clock); + + EnterCriticalSection(&sink->cs); + + if (sink->is_shut_down) + { + hr = MF_E_SHUTDOWN; + } + else + { + video_frame_sink_set_presentation_clock(sink, clock); + } + + LeaveCriticalSection(&sink->cs); + + return hr; +} + +static HRESULT WINAPI video_frame_sink_GetPresentationClock(IMFMediaSink *iface, IMFPresentationClock **clock) +{ + struct video_frame_sink *sink = impl_from_IMFMediaSink(iface); + HRESULT hr = S_OK; + + TRACE("%p, %p.\n", iface, clock); + + if (!clock) + return E_POINTER; + + EnterCriticalSection(&sink->cs); + + if (sink->clock) + { + *clock = sink->clock; + IMFPresentationClock_AddRef(*clock); + } + else + hr = MF_E_NO_CLOCK; + + LeaveCriticalSection(&sink->cs); + + return hr; +} + +static HRESULT WINAPI video_frame_sink_Shutdown(IMFMediaSink *iface) +{ + struct video_frame_sink *sink = impl_from_IMFMediaSink(iface); + HRESULT hr = S_OK; + + TRACE("%p.\n", iface); + + EnterCriticalSection(&sink->cs); + + if (sink->is_shut_down) + hr = MF_E_SHUTDOWN; + else + { + sink->is_shut_down = TRUE; + video_frame_sink_set_presentation_clock(sink, NULL); + IMFMediaType_Release(sink->current_media_type); + sink->current_media_type = NULL; + IMFAsyncCallback_Release(sink->callback); + sink->callback = NULL; + IMFMediaEventQueue_Shutdown(sink->stream_event_queue); + IMFMediaEventQueue_Shutdown(sink->event_queue); + } + + LeaveCriticalSection(&sink->cs); + + return hr; +} + +static const IMFMediaSinkVtbl video_frame_sink_vtbl = +{ + video_frame_sink_QueryInterface, + video_frame_sink_AddRef, + video_frame_sink_Release, + video_frame_sink_GetCharacteristics, + video_frame_sink_AddStreamSink, + video_frame_sink_RemoveStreamSink, + video_frame_sink_GetStreamSinkCount, + video_frame_sink_GetStreamSinkByIndex, + video_frame_sink_GetStreamSinkById, + video_frame_sink_SetPresentationClock, + video_frame_sink_GetPresentationClock, + video_frame_sink_Shutdown, +}; + +static HRESULT WINAPI video_frame_sink_clock_sink_QueryInterface(IMFClockStateSink *iface, REFIID riid, void **obj) +{ + struct video_frame_sink *sink = impl_from_IMFClockStateSink(iface); + return IMFMediaSink_QueryInterface(&sink->IMFMediaSink_iface, riid, obj); +} + +static ULONG WINAPI video_frame_sink_clock_sink_AddRef(IMFClockStateSink *iface) +{ + struct video_frame_sink *sink = impl_from_IMFClockStateSink(iface); + return IMFMediaSink_AddRef(&sink->IMFMediaSink_iface); +} + +static ULONG WINAPI video_frame_sink_clock_sink_Release(IMFClockStateSink *iface) +{ + struct video_frame_sink *sink = impl_from_IMFClockStateSink(iface); + return IMFMediaSink_Release(&sink->IMFMediaSink_iface); +} + +static HRESULT video_frame_sink_set_state(struct video_frame_sink *sink, enum sink_state state, + MFTIME systime, LONGLONG offset) +{ + static const DWORD events[] = + { + MEStreamSinkStopped, /* SINK_STATE_STOPPED */ + MEStreamSinkPaused, /* SINK_STATE_PAUSED */ + MEStreamSinkStarted, /* SINK_STATE_RUNNING */ + }; + HRESULT hr = S_OK; + + EnterCriticalSection(&sink->cs); + + if (!sink->is_shut_down) + { + if (state == SINK_STATE_PAUSED && sink->state == SINK_STATE_STOPPED) + { + hr = MF_E_INVALID_STATE_TRANSITION; + } + else + { + if (state == SINK_STATE_STOPPED) + { + video_frame_sink_samples_release(sink); + video_frame_sink_set_flag(sink, FLAGS_FIRST_FRAME, FALSE); + } + + if (state == SINK_STATE_RUNNING && sink->state != SINK_STATE_RUNNING) + { + video_frame_sink_samples_release(sink); + video_frame_sink_stream_request_sample(sink); + } + + if (state != sink->state || state != SINK_STATE_PAUSED) + { + if (sink->rate == 0.0f && state == SINK_STATE_RUNNING) + IMFStreamSink_QueueEvent(&sink->IMFStreamSink_iface, MEStreamSinkScrubSampleComplete, + &GUID_NULL, S_OK, NULL); + + IMFStreamSink_QueueEvent(&sink->IMFStreamSink_iface, events[state], &GUID_NULL, S_OK, NULL); + } + sink->state = state; + } + } + + LeaveCriticalSection(&sink->cs); + + return hr; +} + +static HRESULT WINAPI video_frame_sink_clock_sink_OnClockStart(IMFClockStateSink *iface, MFTIME systime, LONGLONG offset) +{ + struct video_frame_sink *sink = impl_from_IMFClockStateSink(iface); + + TRACE("%p, %s, %s.\n", iface, debugstr_time(systime), debugstr_time(offset)); + + return video_frame_sink_set_state(sink, SINK_STATE_RUNNING, systime, offset); +} + +static HRESULT WINAPI video_frame_sink_clock_sink_OnClockStop(IMFClockStateSink *iface, MFTIME systime) +{ + struct video_frame_sink *sink = impl_from_IMFClockStateSink(iface); + + TRACE("%p, %s.\n", iface, debugstr_time(systime)); + + return video_frame_sink_set_state(sink, SINK_STATE_STOPPED, systime, 0); +} + +static HRESULT WINAPI video_frame_sink_clock_sink_OnClockPause(IMFClockStateSink *iface, MFTIME systime) +{ + struct video_frame_sink *sink = impl_from_IMFClockStateSink(iface); + + TRACE("%p, %s.\n", iface, debugstr_time(systime)); + + return video_frame_sink_set_state(sink, SINK_STATE_PAUSED, systime, 0); +} + +static HRESULT WINAPI video_frame_sink_clock_sink_OnClockRestart(IMFClockStateSink *iface, MFTIME systime) +{ + struct video_frame_sink *sink = impl_from_IMFClockStateSink(iface); + + TRACE("%p, %s.\n", iface, debugstr_time(systime)); + + return video_frame_sink_set_state(sink, SINK_STATE_RUNNING, systime, PRESENTATION_CURRENT_POSITION); +} + +static HRESULT WINAPI video_frame_sink_clock_sink_OnClockSetRate(IMFClockStateSink *iface, MFTIME systime, float rate) +{ + struct video_frame_sink *sink = impl_from_IMFClockStateSink(iface); + HRESULT hr = S_OK; + + TRACE("%p, %s, %f.\n", iface, debugstr_time(systime), rate); + + EnterCriticalSection(&sink->cs); + + if (sink->is_shut_down) + hr = MF_E_SHUTDOWN; + else + { + IMFStreamSink_QueueEvent(&sink->IMFStreamSink_iface, MEStreamSinkRateChanged, &GUID_NULL, S_OK, NULL); + sink->rate = rate; + } + + LeaveCriticalSection(&sink->cs); + + return hr; +} + +static HRESULT WINAPI video_frame_sink_events_QueryInterface(IMFMediaEventGenerator *iface, REFIID riid, void **obj) +{ + struct video_frame_sink *sink = impl_from_IMFMediaEventGenerator(iface); + return IMFMediaSink_QueryInterface(&sink->IMFMediaSink_iface, riid, obj); +} + +static ULONG WINAPI video_frame_sink_events_AddRef(IMFMediaEventGenerator *iface) +{ + struct video_frame_sink *sink = impl_from_IMFMediaEventGenerator(iface); + return IMFMediaSink_AddRef(&sink->IMFMediaSink_iface); +} + +static ULONG WINAPI video_frame_sink_events_Release(IMFMediaEventGenerator *iface) +{ + struct video_frame_sink *sink = impl_from_IMFMediaEventGenerator(iface); + return IMFMediaSink_Release(&sink->IMFMediaSink_iface); +} + +static HRESULT WINAPI video_frame_sink_events_GetEvent(IMFMediaEventGenerator *iface, DWORD flags, IMFMediaEvent **event) +{ + struct video_frame_sink *sink = impl_from_IMFMediaEventGenerator(iface); + + TRACE("%p, %#lx, %p.\n", iface, flags, event); + + return IMFMediaEventQueue_GetEvent(sink->event_queue, flags, event); +} + +static HRESULT WINAPI video_frame_sink_events_BeginGetEvent(IMFMediaEventGenerator *iface, IMFAsyncCallback *callback, + IUnknown *state) +{ + struct video_frame_sink *sink = impl_from_IMFMediaEventGenerator(iface); + + TRACE("%p, %p, %p.\n", iface, callback, state); + + return IMFMediaEventQueue_BeginGetEvent(sink->event_queue, callback, state); +} + +static HRESULT WINAPI video_frame_sink_events_EndGetEvent(IMFMediaEventGenerator *iface, IMFAsyncResult *result, + IMFMediaEvent **event) +{ + struct video_frame_sink *sink = impl_from_IMFMediaEventGenerator(iface); + + TRACE("%p, %p, %p.\n", iface, result, event); + + return IMFMediaEventQueue_EndGetEvent(sink->event_queue, result, event); +} + +static HRESULT WINAPI video_frame_sink_events_QueueEvent(IMFMediaEventGenerator *iface, MediaEventType event_type, + REFGUID ext_type, HRESULT hr, const PROPVARIANT *value) +{ + struct video_frame_sink *sink = impl_from_IMFMediaEventGenerator(iface); + + TRACE("%p, %lu, %s, %#lx, %p.\n", iface, event_type, debugstr_guid(ext_type), hr, value); + + return IMFMediaEventQueue_QueueEventParamVar(sink->event_queue, event_type, ext_type, hr, value); +} + +static const IMFMediaEventGeneratorVtbl video_frame_sink_events_vtbl = +{ + video_frame_sink_events_QueryInterface, + video_frame_sink_events_AddRef, + video_frame_sink_events_Release, + video_frame_sink_events_GetEvent, + video_frame_sink_events_BeginGetEvent, + video_frame_sink_events_EndGetEvent, + video_frame_sink_events_QueueEvent, +}; + +static const IMFClockStateSinkVtbl video_frame_sink_clock_sink_vtbl = +{ + video_frame_sink_clock_sink_QueryInterface, + video_frame_sink_clock_sink_AddRef, + video_frame_sink_clock_sink_Release, + video_frame_sink_clock_sink_OnClockStart, + video_frame_sink_clock_sink_OnClockStop, + video_frame_sink_clock_sink_OnClockPause, + video_frame_sink_clock_sink_OnClockRestart, + video_frame_sink_clock_sink_OnClockSetRate, +}; + +HRESULT create_video_frame_sink(IMFMediaType *media_type, IMFAsyncCallback *events_callback, struct video_frame_sink **sink) +{ + struct video_frame_sink *object; + HRESULT hr; + + if (!(object = calloc(1, sizeof(*object)))) + return E_OUTOFMEMORY; + + object->IMFMediaSink_iface.lpVtbl = &video_frame_sink_vtbl; + object->IMFClockStateSink_iface.lpVtbl = &video_frame_sink_clock_sink_vtbl; + object->IMFMediaEventGenerator_iface.lpVtbl = &video_frame_sink_events_vtbl; + object->IMFStreamSink_iface.lpVtbl = &video_frame_sink_stream_vtbl; + object->IMFMediaTypeHandler_iface.lpVtbl = &video_frame_sink_stream_type_handler_vtbl; + object->refcount = 1; + object->rate = 1.0f; + object->media_type = media_type; + IMFAsyncCallback_AddRef(object->callback = events_callback); + IMFMediaType_AddRef(object->media_type); + object->current_media_type = media_type; + IMFMediaType_AddRef(object->current_media_type); + InitializeCriticalSection(&object->cs); + + if (FAILED(hr = MFCreateEventQueue(&object->stream_event_queue))) + goto failed; + + if (FAILED(hr = MFCreateEventQueue(&object->event_queue))) + goto failed; + + *sink = object; + + return S_OK; + +failed: + + IMFMediaSink_Release(&object->IMFMediaSink_iface); + + return hr; +} + +HRESULT video_frame_sink_query_iface(struct video_frame_sink *sink, REFIID riid, void **obj) +{ + return IMFStreamSink_QueryInterface(&sink->IMFStreamSink_iface, riid, obj); +} + +int video_frame_sink_get_sample(struct video_frame_sink *sink, IMFSample **ret) +{ + *ret = NULL; + + if (sink) + { + EnterCriticalSection(&sink->cs); + + if (sink->presentation_sample) + { + IMFSample_AddRef(*ret = sink->presentation_sample); + sink->sample_presented = TRUE; + } + + LeaveCriticalSection(&sink->cs); + } + + return !!*ret; +} + +static HRESULT sample_get_pts(IMFSample *sample, MFTIME clocktime, LONGLONG *pts) +{ + HRESULT hr = S_FALSE; + if (sample) + { + if (SUCCEEDED(hr = IMFSample_GetSampleTime(sample, pts))) + { + if (clocktime < *pts) + *pts = MINLONGLONG; + hr = *pts == MINLONGLONG ? S_FALSE : S_OK; + } + else + WARN("Failed to get sample time, hr %#lx.\n", hr); + } + return hr; +} + +/* + * This function selects the queued sample with the greatest PTS that is also below the supplied value of clocktime. + * If no queued sample has a PTS below the supplied value of clocktime, S_FALSE is returned. + * Otherwise S_TRUE is returned, the PTS of the selected sample is provided, and the selected sample will now be returned + * by subsequent calls to video_frame_sink_get_sample. + * Queued samples with a PTS lower than the PTS of the selected sample will be silently dropped. + */ +HRESULT video_frame_sink_get_pts(struct video_frame_sink *sink, MFTIME clocktime, LONGLONG *pts) +{ + HRESULT hr = S_FALSE; + + *pts = MINLONGLONG; + if (sink) + { + int sample_read_index; + bool transfer_sample = FALSE; + EnterCriticalSection(&sink->cs); + sample_read_index = sink->sample_read_index; + hr = sample_get_pts(sink->sample[sample_read_index], clocktime, pts); + + if (hr == S_OK) + { + LONGLONG pts2; + transfer_sample = TRUE; + /* if the second sample we have is also OK, we'll drop the first and use the second */ + sample_index_increment(&sample_read_index); + if (sink->sample[sample_read_index] && sample_get_pts(sink->sample[sample_read_index], clocktime, &pts2) == S_OK) + { + *pts = pts2; + IMFSample_Release(sink->sample[sink->sample_read_index]); + sink->sample[sink->sample_read_index] = NULL; + sink->sample_read_index = sample_read_index; + } + } + else if (sink->presentation_sample && !sink->sample_presented) + { + hr = sample_get_pts(sink->presentation_sample, clocktime, pts); + } + + if (transfer_sample) + { + video_frame_sink_stream_request_sample(sink); + if (sink->presentation_sample) + IMFSample_Release(sink->presentation_sample); + /* transfer ownership from sample array to presentation sample */ + sink->presentation_sample = sink->sample[sink->sample_read_index]; + sink->sample[sink->sample_read_index] = NULL; + sink->sample_presented = FALSE; + sample_index_increment(&sink->sample_read_index); + } + + LeaveCriticalSection(&sink->cs); + } + + return hr; +} + +void video_frame_sink_notify_end_of_presentation_segment(struct video_frame_sink *sink) +{ + sink->eos = TRUE; +} + +ULONG video_frame_sink_release(struct video_frame_sink *sink) +{ + return video_frame_sink_Release(&sink->IMFMediaSink_iface); +} +
From: Rémi Bernon rbernon@codeweavers.com
--- dlls/mfmediaengine/main.c | 82 +++++++++++++++++++++++- dlls/mfmediaengine/mediaengine_private.h | 2 +- dlls/mfmediaengine/video_frame_sink.c | 64 +++++++++++++++++- 3 files changed, 144 insertions(+), 4 deletions(-)
diff --git a/dlls/mfmediaengine/main.c b/dlls/mfmediaengine/main.c index caeab3dea30..0f2ce5d189f 100644 --- a/dlls/mfmediaengine/main.c +++ b/dlls/mfmediaengine/main.c @@ -1196,7 +1196,7 @@ static HRESULT media_engine_create_video_renderer(struct media_engine *engine, I IMFMediaType_SetGUID(media_type, &MF_MT_MAJOR_TYPE, &MFMediaType_Video); IMFMediaType_SetGUID(media_type, &MF_MT_SUBTYPE, &subtype);
- hr = create_video_frame_sink(media_type, &engine->sink_events, &engine->presentation.frame_sink); + hr = create_video_frame_sink(media_type, (IUnknown *)engine->device_manager, &engine->sink_events, &engine->presentation.frame_sink); IMFMediaType_Release(media_type); if (FAILED(hr)) return hr; @@ -2458,6 +2458,83 @@ static void media_engine_update_d3d11_frame_surface(ID3D11DeviceContext *context IMFSample_Release(sample); }
+static HRESULT get_d3d11_resource_from_sample(IMFSample *sample, ID3D11Texture2D **resource, UINT *subresource) +{ + IMFDXGIBuffer *dxgi_buffer; + IMFMediaBuffer *buffer; + HRESULT hr; + + *resource = NULL; + *subresource = 0; + + if (FAILED(hr = IMFSample_GetBufferByIndex(sample, 0, &buffer))) + return hr; + + if (SUCCEEDED(hr = IMFMediaBuffer_QueryInterface(buffer, &IID_IMFDXGIBuffer, (void **)&dxgi_buffer))) + { + IMFDXGIBuffer_GetSubresourceIndex(dxgi_buffer, subresource); + hr = IMFDXGIBuffer_GetResource(dxgi_buffer, &IID_ID3D11Texture2D, (void **)resource); + IMFDXGIBuffer_Release(dxgi_buffer); + } + + IMFMediaBuffer_Release(buffer); + return hr; +} + +static HRESULT media_engine_transfer_d3d11(struct media_engine *engine, ID3D11Texture2D *dst_texture, + const MFVideoNormalizedRect *src_rect, const RECT *dst_rect, const MFARGB *color) +{ + MFVideoNormalizedRect src_rect_default = {0.0, 0.0, 1.0, 1.0}; + MFARGB color_default = {0, 0, 0, 0}; + D3D11_TEXTURE2D_DESC src_desc; + ID3D11DeviceContext *context; + ID3D11Texture2D *src_texture; + RECT dst_rect_default = {0}; + D3D11_BOX src_box = {0}; + ID3D11Device *device; + IMFSample *sample; + UINT subresource; + HRESULT hr; + + if (!src_rect) + src_rect = &src_rect_default; + if (!dst_rect) + dst_rect = &dst_rect_default; + if (!color) + color = &color_default; + + if (!video_frame_sink_get_sample(engine->presentation.frame_sink, &sample)) + return MF_E_UNEXPECTED; + hr = get_d3d11_resource_from_sample(sample, &src_texture, &subresource); + IMFSample_Release(sample); + if (FAILED(hr)) + return hr; + + if (FAILED(hr = media_engine_lock_d3d_device(engine, &device))) + { + ID3D11Texture2D_Release(src_texture); + return hr; + } + + ID3D11Texture2D_GetDesc(src_texture, &src_desc); + + src_box.left = src_rect->left * src_desc.Width; + src_box.top = src_rect->top * src_desc.Height; + src_box.front = 0; + src_box.right = src_rect->right * src_desc.Width; + src_box.bottom = src_rect->bottom * src_desc.Height; + src_box.back = 1; + + ID3D11Device_GetImmediateContext(device, &context); + ID3D11DeviceContext_CopySubresourceRegion(context, (ID3D11Resource *)dst_texture, 0, + dst_rect->left, dst_rect->top, 0, (ID3D11Resource *)src_texture, subresource, &src_box); + ID3D11DeviceContext_Release(context); + + media_engine_unlock_d3d_device(engine, device); + ID3D11Texture2D_Release(src_texture); + return hr; +} + static HRESULT media_engine_transfer_to_d3d11_texture(struct media_engine *engine, ID3D11Texture2D *texture, const MFVideoNormalizedRect *src_rect, const RECT *dst_rect, const MFARGB *color) { @@ -2623,7 +2700,8 @@ static HRESULT WINAPI media_engine_TransferVideoFrame(IMFMediaEngineEx *iface, I
if (SUCCEEDED(IUnknown_QueryInterface(surface, &IID_ID3D11Texture2D, (void **)&texture))) { - hr = media_engine_transfer_to_d3d11_texture(engine, texture, src_rect, dst_rect, color); + if (!engine->device_manager || FAILED(hr = media_engine_transfer_d3d11(engine, texture, src_rect, dst_rect, color))) + hr = media_engine_transfer_to_d3d11_texture(engine, texture, src_rect, dst_rect, color); ID3D11Texture2D_Release(texture); } else diff --git a/dlls/mfmediaengine/mediaengine_private.h b/dlls/mfmediaengine/mediaengine_private.h index 286cb9b1430..3b280a6d1ee 100644 --- a/dlls/mfmediaengine/mediaengine_private.h +++ b/dlls/mfmediaengine/mediaengine_private.h @@ -20,7 +20,7 @@
struct video_frame_sink;
-HRESULT create_video_frame_sink(IMFMediaType *media_type, IMFAsyncCallback *events_callback, +HRESULT create_video_frame_sink(IMFMediaType *media_type, IUnknown *device_manager, IMFAsyncCallback *events_callback, struct video_frame_sink **sink); HRESULT video_frame_sink_query_iface(struct video_frame_sink *object, REFIID riid, void **obj); ULONG video_frame_sink_release(struct video_frame_sink *sink); diff --git a/dlls/mfmediaengine/video_frame_sink.c b/dlls/mfmediaengine/video_frame_sink.c index ba4ef67c59a..bb33d1e69b7 100644 --- a/dlls/mfmediaengine/video_frame_sink.c +++ b/dlls/mfmediaengine/video_frame_sink.c @@ -28,6 +28,9 @@
#include "mediaengine_private.h"
+#include "initguid.h" +#include "evr.h" + #include "wine/debug.h" #include "wine/list.h"
@@ -73,7 +76,9 @@ struct video_frame_sink IMFMediaEventGenerator IMFMediaEventGenerator_iface; IMFStreamSink IMFStreamSink_iface; IMFMediaTypeHandler IMFMediaTypeHandler_iface; + IMFGetService IMFGetService_iface; LONG refcount; + IUnknown *device_manager; IMFMediaType *media_type; IMFMediaType *current_media_type; bool is_shut_down; @@ -127,6 +132,11 @@ static struct video_frame_sink *impl_from_IMFMediaTypeHandler(IMFMediaTypeHandle return CONTAINING_RECORD(iface, struct video_frame_sink, IMFMediaTypeHandler_iface); }
+static struct video_frame_sink *impl_from_IMFGetService(IMFGetService *iface) +{ + return CONTAINING_RECORD(iface, struct video_frame_sink, IMFGetService_iface); +} + static void video_frame_sink_samples_release(struct video_frame_sink *sink) { for (int i = 0; i < ARRAY_SIZE(sink->sample); i++) @@ -163,6 +173,10 @@ static HRESULT WINAPI video_frame_sink_stream_QueryInterface(IMFStreamSink *ifac { *obj = &sink->IMFMediaTypeHandler_iface; } + else if (IsEqualIID(riid, &IID_IMFGetService)) + { + *obj = &sink->IMFGetService_iface; + } else { WARN("Unsupported %s.\n", debugstr_guid(riid)); @@ -577,6 +591,49 @@ static const IMFMediaTypeHandlerVtbl video_frame_sink_stream_type_handler_vtbl = video_frame_sink_stream_type_handler_GetMajorType, };
+static HRESULT WINAPI video_frame_sink_stream_get_service_QueryInterface(IMFGetService *iface, REFIID riid, + void **obj) +{ + struct video_frame_sink *sink = impl_from_IMFGetService(iface); + return IMFStreamSink_QueryInterface(&sink->IMFStreamSink_iface, riid, obj); +} + +static ULONG WINAPI video_frame_sink_stream_get_service_AddRef(IMFGetService *iface) +{ + struct video_frame_sink *sink = impl_from_IMFGetService(iface); + return IMFStreamSink_AddRef(&sink->IMFStreamSink_iface); +} + +static ULONG WINAPI video_frame_sink_stream_get_service_Release(IMFGetService *iface) +{ + struct video_frame_sink *sink = impl_from_IMFGetService(iface); + return IMFStreamSink_Release(&sink->IMFStreamSink_iface); +} + +static HRESULT WINAPI video_frame_sink_stream_get_service_GetService(IMFGetService *iface, REFGUID service, REFIID riid, + void **obj) +{ + struct video_frame_sink *sink = impl_from_IMFGetService(iface); + + if (IsEqualGUID(service, &MR_VIDEO_ACCELERATION_SERVICE)) + { + if (sink->device_manager) + return IUnknown_QueryInterface(sink->device_manager, riid, obj); + return E_NOINTERFACE; + } + + FIXME("Unsupported service %s, riid %s.\n", debugstr_guid(service), debugstr_guid(riid)); + return MF_E_UNSUPPORTED_SERVICE; +} + +static const IMFGetServiceVtbl video_frame_sink_stream_get_service_vtbl = +{ + video_frame_sink_stream_get_service_QueryInterface, + video_frame_sink_stream_get_service_AddRef, + video_frame_sink_stream_get_service_Release, + video_frame_sink_stream_get_service_GetService, +}; + static HRESULT WINAPI video_frame_sink_QueryInterface(IMFMediaSink *iface, REFIID riid, void **obj) { struct video_frame_sink *sink = impl_from_IMFMediaSink(iface); @@ -630,6 +687,8 @@ static ULONG WINAPI video_frame_sink_Release(IMFMediaSink *iface) if (sink->current_media_type) IMFMediaType_Release(sink->current_media_type); IMFMediaType_Release(sink->media_type); + if (sink->device_manager) + IUnknown_Release(sink->device_manager); if (sink->event_queue) IMFMediaEventQueue_Release(sink->event_queue); if (sink->clock) @@ -1059,7 +1118,7 @@ static const IMFClockStateSinkVtbl video_frame_sink_clock_sink_vtbl = video_frame_sink_clock_sink_OnClockSetRate, };
-HRESULT create_video_frame_sink(IMFMediaType *media_type, IMFAsyncCallback *events_callback, struct video_frame_sink **sink) +HRESULT create_video_frame_sink(IMFMediaType *media_type, IUnknown *device_manager, IMFAsyncCallback *events_callback, struct video_frame_sink **sink) { struct video_frame_sink *object; HRESULT hr; @@ -1072,8 +1131,11 @@ HRESULT create_video_frame_sink(IMFMediaType *media_type, IMFAsyncCallback *even object->IMFMediaEventGenerator_iface.lpVtbl = &video_frame_sink_events_vtbl; object->IMFStreamSink_iface.lpVtbl = &video_frame_sink_stream_vtbl; object->IMFMediaTypeHandler_iface.lpVtbl = &video_frame_sink_stream_type_handler_vtbl; + object->IMFGetService_iface.lpVtbl = &video_frame_sink_stream_get_service_vtbl; object->refcount = 1; object->rate = 1.0f; + if ((object->device_manager = device_manager)) + IUnknown_AddRef(object->device_manager); object->media_type = media_type; IMFAsyncCallback_AddRef(object->callback = events_callback); IMFMediaType_AddRef(object->media_type);
From: Brendan McGrath bmcgrath@codeweavers.com
CopySubresourceRegion only supports copy; it doesn't support any stretch, color key, or blend.
If you try and copy outside the destination resource, the behavior of CopySubresourceRegion is undefined. --- dlls/mfmediaengine/main.c | 22 +++++++++++++++------- 1 file changed, 15 insertions(+), 7 deletions(-)
diff --git a/dlls/mfmediaengine/main.c b/dlls/mfmediaengine/main.c index 0f2ce5d189f..1a8f6291342 100644 --- a/dlls/mfmediaengine/main.c +++ b/dlls/mfmediaengine/main.c @@ -2486,7 +2486,7 @@ static HRESULT media_engine_transfer_d3d11(struct media_engine *engine, ID3D11Te { MFVideoNormalizedRect src_rect_default = {0.0, 0.0, 1.0, 1.0}; MFARGB color_default = {0, 0, 0, 0}; - D3D11_TEXTURE2D_DESC src_desc; + D3D11_TEXTURE2D_DESC src_desc, dst_desc; ID3D11DeviceContext *context; ID3D11Texture2D *src_texture; RECT dst_rect_default = {0}; @@ -2510,13 +2510,8 @@ static HRESULT media_engine_transfer_d3d11(struct media_engine *engine, ID3D11Te if (FAILED(hr)) return hr;
- if (FAILED(hr = media_engine_lock_d3d_device(engine, &device))) - { - ID3D11Texture2D_Release(src_texture); - return hr; - } - ID3D11Texture2D_GetDesc(src_texture, &src_desc); + ID3D11Texture2D_GetDesc(dst_texture, &dst_desc);
src_box.left = src_rect->left * src_desc.Width; src_box.top = src_rect->top * src_desc.Height; @@ -2525,6 +2520,19 @@ static HRESULT media_engine_transfer_d3d11(struct media_engine *engine, ID3D11Te src_box.bottom = src_rect->bottom * src_desc.Height; src_box.back = 1;
+ if (dst_rect->left + src_box.right - src_box.left > dst_desc.Width || + dst_rect->top + src_box.top - src_box.bottom > dst_desc.Height) + { + ID3D11Texture2D_Release(src_texture); + return MF_E_UNEXPECTED; + } + + if (FAILED(hr = media_engine_lock_d3d_device(engine, &device))) + { + ID3D11Texture2D_Release(src_texture); + return hr; + } + ID3D11Device_GetImmediateContext(device, &context); ID3D11DeviceContext_CopySubresourceRegion(context, (ID3D11Resource *)dst_texture, 0, dst_rect->left, dst_rect->top, 0, (ID3D11Resource *)src_texture, subresource, &src_box);
Hi,
It looks like your patch introduced the new failures shown below. Please investigate and fix them before resubmitting your patch. If they are not new, fixing them anyway would help a lot. Otherwise please ask for the known failures list to be updated.
The full results can be found at: https://testbot.winehq.org/JobDetails.pl?Key=149931
Your paranoid android.
=== build (build log) ===
error: patch failed: dlls/mfmediaengine/main.c:1196 error: patch failed: dlls/mfmediaengine/mediaengine_private.h:20 error: patch failed: dlls/mfmediaengine/video_frame_sink.c:28 error: patch failed: dlls/mfmediaengine/main.c:2486 Task: Patch failed to apply
=== debian11 (build log) ===
error: patch failed: dlls/mfmediaengine/main.c:1196 error: patch failed: dlls/mfmediaengine/mediaengine_private.h:20 error: patch failed: dlls/mfmediaengine/video_frame_sink.c:28 error: patch failed: dlls/mfmediaengine/main.c:2486 Task: Patch failed to apply
=== debian11b (build log) ===
error: patch failed: dlls/mfmediaengine/main.c:1196 error: patch failed: dlls/mfmediaengine/mediaengine_private.h:20 error: patch failed: dlls/mfmediaengine/video_frame_sink.c:28 error: patch failed: dlls/mfmediaengine/main.c:2486 Task: Patch failed to apply
On Fri Nov 22 23:33:36 2024 +0000, Nikolay Sivov wrote:
That's something else, unrelated to SVR work?
From memory I ran in to a deadlock caused by this whilst testing. I found `IMFMediaSession_Shutdown` was re-entrant. Some of the callbacks ran within mfmediaengine as a result of the shutdown required the `cs` lock (causing a deadlock if already held).
On Fri Nov 22 23:33:36 2024 +0000, Nikolay Sivov wrote:
I find this hard to understand. We'll need to document what's happening. For example, why does this check one more sample at all? Is that because buffer size is 2, or we always would check the adjacent one.
Basically we just want to supply the latest sample that has a PTS prior to the current presentation timestamp. So we will: - supply no samples if neither sample has a PTS prior to the current presentation timestamp; - supply the first sample if the first sample has a PTS prior to the current presentation timestamp and the 2nd sample has a PTS after the current presentation timestamp (or there is no 2nd sample); or - supply the 2nd sample (and drop the 1st) if the 2nd sample has a PTS prior to the current presentation timestamp.
I've now added a comment to the function.
could we use sample allocator unconditionally? In other words, is it possible to get SVR used when no device manager was provided;
For 'Frame Server Mode', the documentation states `MF_MEDIA_ENGINE_DXGI_MANAGER` is optional:
https://learn.microsoft.com/en-us/windows/win32/api/mfmediaengine/nf-mfmedia...
And the documentation for `MF_MEDIA_ENGINE_DXGI_MANAGER` says:
In frame-server mode, this attribute enables the Media Engine to use hardware acceleration for video decoding and video processing. If the attribute is not set, the Media Engine uses software decoding and processing.
So technically we should add support for not providing `MF_MEDIA_ENGINE_DXGI_MANAGER`. But we don't currently support that, and the purpose of this MR is just to replace the sample grabber. Maybe this can be the target of a future MR?
EVR has a circular buffer logic, so maybe we could copy from there;
Do you mean the `IID_IMFVideoSampleAllocator` interface in the `MR_VIDEO_ACCELERATION_SERVICE`? If so, I think this would only be used once we support optionally providing a DXGI manager. As when DXGI manager is supplied the MFTs provide their own samples (so the sample allocator isn't used).
it would be great if we could get rid of locking sample buffer for copies, and that should be possible if we always have d3d samples.
@rbernon implemented this in Proton, so I've cherry-picked his commit. Although I did also add one more commit that will fallback to the existing sample lock/copy if scaling in required.
On Mon Nov 25 00:37:21 2024 +0000, Brendan McGrath wrote:
Basically we just want to supply the latest sample that has a PTS prior to the current presentation timestamp. So we will:
- supply no samples if neither sample has a PTS prior to the current
presentation timestamp;
- supply the first sample if the first sample has a PTS prior to the
current presentation timestamp and the 2nd sample has a PTS after the current presentation timestamp (or there is no 2nd sample); or
- supply the 2nd sample (and drop the 1st) if the 2nd sample has a PTS
prior to the current presentation timestamp. I've now added a comment to the function.
Why do we need to keep this pair of samples at all? Do we know from the tests that this sink always keeps two of them?
On Tue Nov 26 13:52:15 2024 +0000, Nikolay Sivov wrote:
Why do we need to keep this pair of samples at all? Do we know from the tests that this sink always keeps two of them?
That's right. I wrote the following tests to explore what Windows was doing: [svr_tests.patch](/uploads/445c54d660f272fde21e8eef710509db/svr_tests.patch)
I found Windows requests two samples as part of a pre-roll, and then will request a new sample every-time it returns `S_OK` to `IMFMediaEngineEx::OnVideoStreamTick`. If it drops a sample (due to the second sample being "ready"), then it will request a second sample on receipt of the first.
On Tue Nov 26 23:40:30 2024 +0000, Brendan McGrath wrote:
That's right. I wrote the following tests to explore what Windows was doing: [svr_tests.patch](/uploads/445c54d660f272fde21e8eef710509db/svr_tests.patch) I found Windows requests two samples as part of a pre-roll, and then will request a new sample every-time it returns `S_OK` to `IMFMediaEngineEx::OnVideoStreamTick`. If it drops a sample (due to the second sample being "ready"), then it will request a second sample on receipt of the first.
Even if it's always going to be just two pending samples at most, this needs generic logic to handle queue of any size. Unless that's what it already does, and I'm just missing something.
What do you mean by supply? Make available to OnVideoStreamTick/Transfer* ?
My understanding is that during playback, if say we don't request frames temporarily, on next OnVideoStreamTick we'd compare those queued timestamps with current time, and drop every outdated sample. Then we pick one that's closest to current time, is that the idea? Then on Transfer* call, do we need to reevaluate pts of those closest sample again and potentially drop it?
What you're describing sounds like sample 1 could be either ahead or behind sample 2? How is that possible? On seeking we are most likely supposed to clear the queue, and reversed playback would still supply samples in temporal order. So with fifo queue, popped sample would be the "oldest", and we only need to check if it's behind current time to drop it, and pick next one. This is similar to what happens in EVR, that's why I suggest using queue logic from there.
On Mon Nov 25 00:38:44 2024 +0000, Brendan McGrath wrote:
could we use sample allocator unconditionally? In other words, is it
possible to get SVR used when no device manager was provided; For 'Frame Server Mode', the documentation states `MF_MEDIA_ENGINE_DXGI_MANAGER` is optional: https://learn.microsoft.com/en-us/windows/win32/api/mfmediaengine/nf-mfmedia... And the documentation for `MF_MEDIA_ENGINE_DXGI_MANAGER` says:
In frame-server mode, this attribute enables the Media Engine to use
hardware acceleration for video decoding and video processing. If the attribute is not set, the Media Engine uses software decoding and processing. So technically we should add support for not providing `MF_MEDIA_ENGINE_DXGI_MANAGER`. But we don't currently support that, and the purpose of this MR is just to replace the sample grabber. Maybe this can be the target of a future MR?
EVR has a circular buffer logic, so maybe we could copy from there;
Do you mean the `IID_IMFVideoSampleAllocator` interface in the `MR_VIDEO_ACCELERATION_SERVICE`? If so, I think this would only be used once we support optionally providing a DXGI manager. As when DXGI manager is supplied the MFTs provide their own samples (so the sample allocator isn't used).
it would be great if we could get rid of locking sample buffer for
copies, and that should be possible if we always have d3d samples. @rbernon implemented this in Proton, so I've cherry-picked his commit. Although I did also add one more commit that will fallback to the existing sample lock/copy if scaling in required.
No, we don't need everything at once, definitely.