This commit replaces the SampleGrabberSink with a dedicated Video Sink, referred to as the Simple Video Renderer (SVR).
This brings it more inline with Windows and provides the benefit of having direct access to the IMFSample, removing the need to copy the sample data.
-- v7: mfmediaengine: Fallback to sample copy if scaling is required. mfmediaengine: Implement D3D-aware video frame sink. mfmediaengine: Implement SVR.
From: Brendan McGrath bmcgrath@codeweavers.com
This commit replaces the SampleGrabberSink with a dedicated Video Sink, referred to as the Simple Video Renderer (SVR).
This brings it more inline with Windows and provides the benefit of having direct access to the IMFSample, removing the need to copy the sample data.
Based on a patch by Nikolay Sivov. --- dlls/mfmediaengine/Makefile.in | 3 +- dlls/mfmediaengine/main.c | 255 ++--- dlls/mfmediaengine/mediaengine_private.h | 29 + dlls/mfmediaengine/tests/mfmediaengine.c | 9 +- dlls/mfmediaengine/video_frame_sink.c | 1253 ++++++++++++++++++++++ 5 files changed, 1398 insertions(+), 151 deletions(-) create mode 100644 dlls/mfmediaengine/mediaengine_private.h create mode 100644 dlls/mfmediaengine/video_frame_sink.c
diff --git a/dlls/mfmediaengine/Makefile.in b/dlls/mfmediaengine/Makefile.in index 8e4bf011d81..5b593814ef6 100644 --- a/dlls/mfmediaengine/Makefile.in +++ b/dlls/mfmediaengine/Makefile.in @@ -5,4 +5,5 @@ EXTRADLLFLAGS = -Wb,--prefer-native
SOURCES = \ main.c \ - mediaengine_classes.idl + mediaengine_classes.idl \ + video_frame_sink.c diff --git a/dlls/mfmediaengine/main.c b/dlls/mfmediaengine/main.c index 026b825a7a5..caeab3dea30 100644 --- a/dlls/mfmediaengine/main.c +++ b/dlls/mfmediaengine/main.c @@ -32,6 +32,8 @@ #include "mmdeviceapi.h" #include "audiosessiontypes.h"
+#include "mediaengine_private.h" + #include "wine/debug.h"
WINE_DEFAULT_DEBUG_CHANNEL(mfplat); @@ -138,8 +140,8 @@ struct media_engine IMFMediaEngineEx IMFMediaEngineEx_iface; IMFGetService IMFGetService_iface; IMFAsyncCallback session_events; + IMFAsyncCallback sink_events; IMFAsyncCallback load_handler; - IMFSampleGrabberSinkCallback grabber_callback; LONG refcount; IMFMediaEngineNotify *callback; IMFAttributes *attributes; @@ -166,6 +168,7 @@ struct media_engine IMFMediaSource *source; IMFPresentationDescriptor *pd; PROPVARIANT start_position; + struct video_frame_sink *frame_sink; } presentation; struct effects video_effects; struct effects audio_effects; @@ -784,14 +787,14 @@ static struct media_engine *impl_from_session_events_IMFAsyncCallback(IMFAsyncCa return CONTAINING_RECORD(iface, struct media_engine, session_events); }
-static struct media_engine *impl_from_load_handler_IMFAsyncCallback(IMFAsyncCallback *iface) +static struct media_engine *impl_from_sink_events_IMFAsyncCallback(IMFAsyncCallback *iface) { - return CONTAINING_RECORD(iface, struct media_engine, load_handler); + return CONTAINING_RECORD(iface, struct media_engine, sink_events); }
-static struct media_engine *impl_from_IMFSampleGrabberSinkCallback(IMFSampleGrabberSinkCallback *iface) +static struct media_engine *impl_from_load_handler_IMFAsyncCallback(IMFAsyncCallback *iface) { - return CONTAINING_RECORD(iface, struct media_engine, grabber_callback); + return CONTAINING_RECORD(iface, struct media_engine, load_handler); }
static unsigned int get_gcd(unsigned int a, unsigned int b) @@ -1000,6 +1003,10 @@ static HRESULT WINAPI media_engine_session_events_Invoke(IMFAsyncCallback *iface
IMFMediaEngineNotify_EventNotify(engine->callback, MF_MEDIA_ENGINE_EVENT_ENDED, 0, 0); break; + + case MEEndOfPresentationSegment: + video_frame_sink_notify_end_of_presentation_segment(engine->presentation.frame_sink); + break; }
failed: @@ -1022,6 +1029,48 @@ static const IMFAsyncCallbackVtbl media_engine_session_events_vtbl = media_engine_session_events_Invoke, };
+static ULONG WINAPI media_engine_sink_events_AddRef(IMFAsyncCallback *iface) +{ + struct media_engine *engine = impl_from_sink_events_IMFAsyncCallback(iface); + return IMFMediaEngineEx_AddRef(&engine->IMFMediaEngineEx_iface); +} + +static ULONG WINAPI media_engine_sink_events_Release(IMFAsyncCallback *iface) +{ + struct media_engine *engine = impl_from_sink_events_IMFAsyncCallback(iface); + return IMFMediaEngineEx_Release(&engine->IMFMediaEngineEx_iface); +} + +static HRESULT WINAPI media_engine_sink_events_Invoke(IMFAsyncCallback *iface, IMFAsyncResult *result) +{ + struct media_engine *engine = impl_from_sink_events_IMFAsyncCallback(iface); + MF_MEDIA_ENGINE_EVENT event = IMFAsyncResult_GetStatus(result); + + EnterCriticalSection(&engine->cs); + + switch (event) + { + case MF_MEDIA_ENGINE_EVENT_FIRSTFRAMEREADY: + IMFMediaEngineNotify_EventNotify(engine->callback, event, 0, 0); + break; + default: + ; + } + + LeaveCriticalSection(&engine->cs); + + return S_OK; +} + +static const IMFAsyncCallbackVtbl media_engine_sink_events_vtbl = +{ + media_engine_callback_QueryInterface, + media_engine_sink_events_AddRef, + media_engine_sink_events_Release, + media_engine_callback_GetParameters, + media_engine_sink_events_Invoke, +}; + static ULONG WINAPI media_engine_load_handler_AddRef(IMFAsyncCallback *iface) { struct media_engine *engine = impl_from_load_handler_IMFAsyncCallback(iface); @@ -1122,7 +1171,6 @@ static HRESULT media_engine_create_audio_renderer(struct media_engine *engine, I static HRESULT media_engine_create_video_renderer(struct media_engine *engine, IMFTopologyNode **node) { IMFMediaType *media_type; - IMFActivate *activate; UINT32 output_format; GUID subtype; HRESULT hr; @@ -1148,33 +1196,47 @@ static HRESULT media_engine_create_video_renderer(struct media_engine *engine, I IMFMediaType_SetGUID(media_type, &MF_MT_MAJOR_TYPE, &MFMediaType_Video); IMFMediaType_SetGUID(media_type, &MF_MT_SUBTYPE, &subtype);
- hr = MFCreateSampleGrabberSinkActivate(media_type, &engine->grabber_callback, &activate); + hr = create_video_frame_sink(media_type, &engine->sink_events, &engine->presentation.frame_sink); IMFMediaType_Release(media_type); if (FAILED(hr)) return hr;
if (SUCCEEDED(hr = MFCreateTopologyNode(MF_TOPOLOGY_OUTPUT_NODE, node))) { - IMFTopologyNode_SetObject(*node, (IUnknown *)activate); + IMFStreamSink *sink; + video_frame_sink_query_iface(engine->presentation.frame_sink, &IID_IMFStreamSink, (void **)&sink); + + IMFTopologyNode_SetObject(*node, (IUnknown *)sink); IMFTopologyNode_SetUINT32(*node, &MF_TOPONODE_NOSHUTDOWN_ON_REMOVE, FALSE); - }
- IMFActivate_Release(activate); + IMFStreamSink_Release(sink); + }
engine->video_frame.output_format = output_format;
return hr; }
+/* must be called with engine->cs held */ static void media_engine_clear_presentation(struct media_engine *engine) { if (engine->presentation.source) { + /* critical section can not be held during shutdown, as shut down requires all pending + * callbacks to complete, and some callbacks require this cs */ + LeaveCriticalSection(&engine->cs); IMFMediaSource_Shutdown(engine->presentation.source); + EnterCriticalSection(&engine->cs); IMFMediaSource_Release(engine->presentation.source); } if (engine->presentation.pd) IMFPresentationDescriptor_Release(engine->presentation.pd); + if (engine->presentation.frame_sink) + { + video_frame_sink_release(engine->presentation.frame_sink); + engine->presentation.frame_sink = NULL; + } + memset(&engine->presentation, 0, sizeof(engine->presentation)); }
@@ -1273,7 +1335,7 @@ static HRESULT media_engine_create_topology(struct media_engine *engine, IMFMedi if (SUCCEEDED(hr = MFCreateTopology(&topology))) { IMFTopologyNode *sar_node = NULL, *audio_src = NULL; - IMFTopologyNode *grabber_node = NULL, *video_src = NULL; + IMFTopologyNode *svr_node = NULL, *video_src = NULL;
if (engine->flags & MF_MEDIA_ENGINE_REAL_TIME_MODE) IMFTopology_SetUINT32(topology, &MF_LOW_LATENCY, TRUE); @@ -1307,24 +1369,24 @@ static HRESULT media_engine_create_topology(struct media_engine *engine, IMFMedi if (FAILED(hr = media_engine_create_source_node(source, pd, sd_video, &video_src))) WARN("Failed to create video source node, hr %#lx.\n", hr);
- if (FAILED(hr = media_engine_create_video_renderer(engine, &grabber_node))) - WARN("Failed to create video grabber node, hr %#lx.\n", hr); + if (FAILED(hr = media_engine_create_video_renderer(engine, &svr_node))) + WARN("Failed to create simple video render node, hr %#lx.\n", hr);
- if (grabber_node && video_src) + if (svr_node && video_src) { IMFTopology_AddNode(topology, video_src); - IMFTopology_AddNode(topology, grabber_node); + IMFTopology_AddNode(topology, svr_node);
if (FAILED(hr = media_engine_create_effects(engine->video_effects.effects, engine->video_effects.count, - video_src, grabber_node, topology))) + video_src, svr_node, topology))) WARN("Failed to create video effect nodes, hr %#lx.\n", hr); }
if (SUCCEEDED(hr)) IMFTopologyNode_GetTopoNodeID(video_src, &engine->video_frame.node_id);
- if (grabber_node) - IMFTopologyNode_Release(grabber_node); + if (svr_node) + IMFTopologyNode_Release(svr_node); if (video_src) IMFTopologyNode_Release(video_src); } @@ -2300,7 +2362,11 @@ static HRESULT WINAPI media_engine_Shutdown(IMFMediaEngineEx *iface) { media_engine_set_flag(engine, FLAGS_ENGINE_SHUT_DOWN, TRUE); media_engine_clear_presentation(engine); + /* critical section can not be held during shutdown, as shut down requires all pending + * callbacks to complete, and some callbacks require this cs */ + LeaveCriticalSection(&engine->cs); IMFMediaSession_Shutdown(engine->session); + EnterCriticalSection(&engine->cs); } LeaveCriticalSection(&engine->cs);
@@ -2353,8 +2419,10 @@ static void media_engine_adjust_destination_for_ratio(const struct media_engine static void media_engine_update_d3d11_frame_surface(ID3D11DeviceContext *context, struct media_engine *engine) { D3D11_TEXTURE2D_DESC surface_desc; + IMFMediaBuffer *media_buffer; + IMFSample *sample;
- if (!(engine->flags & FLAGS_ENGINE_NEW_FRAME)) + if (!video_frame_sink_get_sample(engine->presentation.frame_sink, &sample)) return;
ID3D11Texture2D_GetDesc(engine->video_frame.d3d11.source, &surface_desc); @@ -2370,13 +2438,24 @@ static void media_engine_update_d3d11_frame_surface(ID3D11DeviceContext *context surface_desc.Width = 0; }
- if (engine->video_frame.buffer_size == surface_desc.Width * surface_desc.Height) + if (SUCCEEDED(IMFSample_ConvertToContiguousBuffer(sample, &media_buffer))) { - ID3D11DeviceContext_UpdateSubresource(context, (ID3D11Resource *)engine->video_frame.d3d11.source, - 0, NULL, engine->video_frame.buffer, surface_desc.Width, 0); + BYTE *buffer; + DWORD buffer_size; + if (SUCCEEDED(IMFMediaBuffer_Lock(media_buffer, &buffer, NULL, &buffer_size))) + { + if (buffer_size == surface_desc.Width * surface_desc.Height) + { + ID3D11DeviceContext_UpdateSubresource(context, (ID3D11Resource *)engine->video_frame.d3d11.source, + 0, NULL, buffer, surface_desc.Width, 0); + } + + IMFMediaBuffer_Unlock(media_buffer); + } + IMFMediaBuffer_Release(media_buffer); }
- media_engine_set_flag(engine, FLAGS_ENGINE_NEW_FRAME, FALSE); + IMFSample_Release(sample); }
static HRESULT media_engine_transfer_to_d3d11_texture(struct media_engine *engine, ID3D11Texture2D *texture, @@ -2572,8 +2651,9 @@ static HRESULT WINAPI media_engine_OnVideoStreamTick(IMFMediaEngineEx *iface, LO hr = E_POINTER; else { - *pts = engine->video_frame.pts; - hr = *pts == MINLONGLONG ? S_FALSE : S_OK; + MFTIME clocktime; + IMFPresentationClock_GetTime(engine->clock, &clocktime); + hr = video_frame_sink_get_pts(engine->presentation.frame_sink, clocktime, pts); }
LeaveCriticalSection(&engine->cs); @@ -3169,127 +3249,6 @@ static const IMFGetServiceVtbl media_engine_get_service_vtbl = media_engine_gs_GetService, };
-static HRESULT WINAPI media_engine_grabber_callback_QueryInterface(IMFSampleGrabberSinkCallback *iface, - REFIID riid, void **obj) -{ - if (IsEqualIID(riid, &IID_IMFSampleGrabberSinkCallback) || - IsEqualIID(riid, &IID_IUnknown)) - { - *obj = iface; - IMFSampleGrabberSinkCallback_AddRef(iface); - return S_OK; - } - - *obj = NULL; - return E_NOINTERFACE; -} - -static ULONG WINAPI media_engine_grabber_callback_AddRef(IMFSampleGrabberSinkCallback *iface) -{ - struct media_engine *engine = impl_from_IMFSampleGrabberSinkCallback(iface); - return IMFMediaEngineEx_AddRef(&engine->IMFMediaEngineEx_iface); -} - -static ULONG WINAPI media_engine_grabber_callback_Release(IMFSampleGrabberSinkCallback *iface) -{ - struct media_engine *engine = impl_from_IMFSampleGrabberSinkCallback(iface); - return IMFMediaEngineEx_Release(&engine->IMFMediaEngineEx_iface); -} - -static HRESULT WINAPI media_engine_grabber_callback_OnClockStart(IMFSampleGrabberSinkCallback *iface, - MFTIME systime, LONGLONG start_offset) -{ - return S_OK; -} - -static HRESULT WINAPI media_engine_grabber_callback_OnClockStop(IMFSampleGrabberSinkCallback *iface, - MFTIME systime) -{ - struct media_engine *engine = impl_from_IMFSampleGrabberSinkCallback(iface); - - EnterCriticalSection(&engine->cs); - media_engine_set_flag(engine, FLAGS_ENGINE_FIRST_FRAME, FALSE); - engine->video_frame.pts = MINLONGLONG; - LeaveCriticalSection(&engine->cs); - - return S_OK; -} - -static HRESULT WINAPI media_engine_grabber_callback_OnClockPause(IMFSampleGrabberSinkCallback *iface, - MFTIME systime) -{ - return S_OK; -} - -static HRESULT WINAPI media_engine_grabber_callback_OnClockRestart(IMFSampleGrabberSinkCallback *iface, - MFTIME systime) -{ - return S_OK; -} - -static HRESULT WINAPI media_engine_grabber_callback_OnClockSetRate(IMFSampleGrabberSinkCallback *iface, - MFTIME systime, float rate) -{ - return S_OK; -} - -static HRESULT WINAPI media_engine_grabber_callback_OnSetPresentationClock(IMFSampleGrabberSinkCallback *iface, - IMFPresentationClock *clock) -{ - return S_OK; -} - -static HRESULT WINAPI media_engine_grabber_callback_OnProcessSample(IMFSampleGrabberSinkCallback *iface, - REFGUID major_type, DWORD sample_flags, LONGLONG sample_time, LONGLONG sample_duration, - const BYTE *buffer, DWORD buffer_size) -{ - struct media_engine *engine = impl_from_IMFSampleGrabberSinkCallback(iface); - - EnterCriticalSection(&engine->cs); - - if (!(engine->flags & FLAGS_ENGINE_FIRST_FRAME)) - { - IMFMediaEngineNotify_EventNotify(engine->callback, MF_MEDIA_ENGINE_EVENT_FIRSTFRAMEREADY, 0, 0); - media_engine_set_flag(engine, FLAGS_ENGINE_FIRST_FRAME, TRUE); - } - engine->video_frame.pts = sample_time; - if (engine->video_frame.buffer_size < buffer_size) - { - free(engine->video_frame.buffer); - if ((engine->video_frame.buffer = malloc(buffer_size))) - engine->video_frame.buffer_size = buffer_size; - } - if (engine->video_frame.buffer) - { - memcpy(engine->video_frame.buffer, buffer, buffer_size); - engine->flags |= FLAGS_ENGINE_NEW_FRAME; - } - - LeaveCriticalSection(&engine->cs); - - return S_OK; -} - -static HRESULT WINAPI media_engine_grabber_callback_OnShutdown(IMFSampleGrabberSinkCallback *iface) -{ - return S_OK; -} - -static const IMFSampleGrabberSinkCallbackVtbl media_engine_grabber_callback_vtbl = -{ - media_engine_grabber_callback_QueryInterface, - media_engine_grabber_callback_AddRef, - media_engine_grabber_callback_Release, - media_engine_grabber_callback_OnClockStart, - media_engine_grabber_callback_OnClockStop, - media_engine_grabber_callback_OnClockPause, - media_engine_grabber_callback_OnClockRestart, - media_engine_grabber_callback_OnClockSetRate, - media_engine_grabber_callback_OnSetPresentationClock, - media_engine_grabber_callback_OnProcessSample, - media_engine_grabber_callback_OnShutdown, -}; - static HRESULT WINAPI media_engine_factory_QueryInterface(IMFMediaEngineClassFactory *iface, REFIID riid, void **obj) { if (IsEqualIID(riid, &IID_IMFMediaEngineClassFactory) || @@ -3318,15 +3277,15 @@ static ULONG WINAPI media_engine_factory_Release(IMFMediaEngineClassFactory *ifa static HRESULT init_media_engine(DWORD flags, IMFAttributes *attributes, struct media_engine *engine) { UINT32 output_format; - UINT64 playback_hwnd; + UINT64 playback_hwnd = 0; IMFClock *clock; HRESULT hr;
engine->IMFMediaEngineEx_iface.lpVtbl = &media_engine_vtbl; engine->IMFGetService_iface.lpVtbl = &media_engine_get_service_vtbl; engine->session_events.lpVtbl = &media_engine_session_events_vtbl; + engine->sink_events.lpVtbl = &media_engine_sink_events_vtbl; engine->load_handler.lpVtbl = &media_engine_load_handler_vtbl; - engine->grabber_callback.lpVtbl = &media_engine_grabber_callback_vtbl; engine->refcount = 1; engine->flags = (flags & MF_MEDIA_ENGINE_CREATEFLAGS_MASK) | FLAGS_ENGINE_PAUSED; engine->default_playback_rate = 1.0; diff --git a/dlls/mfmediaengine/mediaengine_private.h b/dlls/mfmediaengine/mediaengine_private.h new file mode 100644 index 00000000000..286cb9b1430 --- /dev/null +++ b/dlls/mfmediaengine/mediaengine_private.h @@ -0,0 +1,29 @@ +/* + * Copyright 2019 Nikolay Sivov for CodeWeavers + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2.1 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with this library; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA + */ + +#include "mfmediaengine.h" + +struct video_frame_sink; + +HRESULT create_video_frame_sink(IMFMediaType *media_type, IMFAsyncCallback *events_callback, + struct video_frame_sink **sink); +HRESULT video_frame_sink_query_iface(struct video_frame_sink *object, REFIID riid, void **obj); +ULONG video_frame_sink_release(struct video_frame_sink *sink); +int video_frame_sink_get_sample(struct video_frame_sink *sink, IMFSample **sample); +HRESULT video_frame_sink_get_pts(struct video_frame_sink *sink, MFTIME clocktime, LONGLONG *pts); +void video_frame_sink_notify_end_of_presentation_segment(struct video_frame_sink *sink); diff --git a/dlls/mfmediaengine/tests/mfmediaengine.c b/dlls/mfmediaengine/tests/mfmediaengine.c index bf8de216890..34b3519d063 100644 --- a/dlls/mfmediaengine/tests/mfmediaengine.c +++ b/dlls/mfmediaengine/tests/mfmediaengine.c @@ -1282,6 +1282,7 @@ static void test_TransferVideoFrame(void) HRESULT hr; DWORD res; BSTR url; + LONGLONG pts;
stream = load_resource(L"i420-64x64.avi", L"video/avi");
@@ -1343,6 +1344,7 @@ static void test_TransferVideoFrame(void) ok(!res, "Unexpected res %#lx.\n", res);
SetRect(&dst_rect, 0, 0, desc.Width, desc.Height); + IMFMediaEngineEx_OnVideoStreamTick(notify->media_engine, &pts); hr = IMFMediaEngineEx_TransferVideoFrame(notify->media_engine, (IUnknown *)texture, NULL, &dst_rect, NULL); ok(hr == S_OK, "Unexpected hr %#lx.\n", hr);
@@ -2331,8 +2333,12 @@ static void test_GetSeekable(void) ok(time_range == NULL || broken(time_range == (IMFMediaTimeRange *)0xdeadbeef) /* <= Win10 1507 */, "Got unexpected pointer.\n");
+ /* IMFMediaEngineEx_Shutdown can release in parallel. A small sleep allows this test to pass more + * often than not. But given its a matter of timing, this test is marked flaky + */ + Sleep(10); refcount = IMFMediaEngineEx_Release(media_engine); - todo_wine + flaky_wine ok(!refcount, "Got unexpected refcount %lu.\n", refcount);
/* Unseekable bytestreams */ @@ -2631,7 +2637,6 @@ static void test_SetCurrentTime(void) ok(hr == MF_E_SHUTDOWN, "Unexpected hr %#lx.\n", hr);
refcount = IMFMediaEngineEx_Release(media_engine); - todo_wine ok(!refcount, "Got unexpected refcount %lu.\n", refcount);
/* Unseekable bytestreams */ diff --git a/dlls/mfmediaengine/video_frame_sink.c b/dlls/mfmediaengine/video_frame_sink.c new file mode 100644 index 00000000000..44ceca8a2ad --- /dev/null +++ b/dlls/mfmediaengine/video_frame_sink.c @@ -0,0 +1,1253 @@ +/* + * Copyright 2019 Nikolay Sivov for CodeWeavers + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2.1 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with this library; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA + */ + +#define COBJMACROS + +#include <float.h> +#include <assert.h> +#include <stdbool.h> + +#include "mfapi.h" +#include "mfidl.h" +#include "mferror.h" + +#include "mediaengine_private.h" + +#include "wine/debug.h" +#include "wine/list.h" + +WINE_DEFAULT_DEBUG_CHANNEL(mfplat); + +enum sink_state +{ + SINK_STATE_STOPPED = 0, + SINK_STATE_PAUSED, + SINK_STATE_RUNNING, +}; + +static inline const char *debugstr_time(LONGLONG time) +{ + ULONGLONG abstime = time >= 0 ? time : -time; + unsigned int i = 0, j = 0; + char buffer[23], rev[23]; + + while (abstime || i <= 8) + { + buffer[i++] = '0' + (abstime % 10); + abstime /= 10; + if (i == 7) buffer[i++] = '.'; + } + if (time < 0) buffer[i++] = '-'; + + while (i--) rev[j++] = buffer[i]; + while (rev[j-1] == '0' && rev[j-2] != '.') --j; + rev[j] = 0; + + return wine_dbg_sprintf("%s", rev); +} + +enum video_frame_sink_flags +{ + FLAGS_FIRST_FRAME = 0x1, +}; + +struct sample_queue +{ + IMFSample *samples[2]; + unsigned int used; + unsigned int front; + unsigned int back; + IMFSample *presentation_sample; + bool sample_presented; +}; + +struct video_frame_sink +{ + IMFMediaSink IMFMediaSink_iface; + IMFClockStateSink IMFClockStateSink_iface; + IMFMediaEventGenerator IMFMediaEventGenerator_iface; + IMFStreamSink IMFStreamSink_iface; + IMFMediaTypeHandler IMFMediaTypeHandler_iface; + LONG refcount; + IMFMediaType *media_type; + IMFMediaType *current_media_type; + bool is_shut_down; + IMFMediaEventQueue *event_queue; + IMFMediaEventQueue *stream_event_queue; + IMFPresentationClock *clock; + IMFAsyncCallback *callback; + float rate; + enum sink_state state; + unsigned int flags; + struct sample_queue queue; + bool sample_request_pending; + bool eos; + CRITICAL_SECTION cs; +}; + +static void video_frame_sink_sample_queue_set_presentation(struct video_frame_sink *sink, IMFSample *sample) +{ + struct sample_queue *queue = &sink->queue; + + if (queue->presentation_sample) + IMFSample_Release(queue->presentation_sample); + + queue->presentation_sample = sample; + queue->sample_presented = FALSE; +} + +static BOOL video_frame_sink_sample_queue_pop(struct video_frame_sink *sink, IMFSample **sample) +{ + struct sample_queue *queue = &sink->queue; + + if (queue->used) + { + *sample = queue->samples[queue->front]; + queue->front = (queue->front + 1) % ARRAY_SIZE(queue->samples); + queue->used--; + } + else + *sample = NULL; + + return *sample != NULL; +} + +static void video_frame_sink_sample_queue_push(struct video_frame_sink *sink, IMFSample *sample, + BOOL at_front) +{ + struct sample_queue *queue = &sink->queue; + IMFSample *old_sample; + unsigned int idx; + + // if queue is full, we drop the oldest sample to make room for the new one + if (queue->used == ARRAY_SIZE(queue->samples)) + { + video_frame_sink_sample_queue_pop(sink, &old_sample); + IMFSample_Release(old_sample); + } + + if (at_front) + idx = queue->front = (ARRAY_SIZE(queue->samples) + queue->front - 1) % ARRAY_SIZE(queue->samples); + else + idx = queue->back = (queue->back + 1) % ARRAY_SIZE(queue->samples); + queue->samples[idx] = sample; + queue->used++; +} + +static void video_frame_sink_sample_queue_flush(struct video_frame_sink *sink) +{ + struct sample_queue *queue = &sink->queue; + IMFSample *sample; + + while (video_frame_sink_sample_queue_pop(sink, &sample)) + IMFSample_Release(sample); + + queue->used = 0; + queue->front = 0; + queue->back = ARRAY_SIZE(queue->samples) - 1; + + if (queue->presentation_sample) + { + IMFSample_Release(queue->presentation_sample); + queue->presentation_sample = NULL; + queue->sample_presented = FALSE; + } +} + +static void video_frame_sink_sample_queue_free(struct video_frame_sink *sink) +{ + struct sample_queue *queue = &sink->queue; + IMFSample *sample; + + while (video_frame_sink_sample_queue_pop(sink, &sample)) + IMFSample_Release(sample); + + if (queue->presentation_sample) + IMFSample_Release(queue->presentation_sample); +} + +static void video_frame_sink_set_flag(struct video_frame_sink *sink, unsigned int mask, bool value) +{ + if (value) + sink->flags |= mask; + else + sink->flags &= ~mask; +} + +static struct video_frame_sink *impl_from_IMFMediaSink(IMFMediaSink *iface) +{ + return CONTAINING_RECORD(iface, struct video_frame_sink, IMFMediaSink_iface); +} + +static struct video_frame_sink *impl_from_IMFClockStateSink(IMFClockStateSink *iface) +{ + return CONTAINING_RECORD(iface, struct video_frame_sink, IMFClockStateSink_iface); +} + +static struct video_frame_sink *impl_from_IMFMediaEventGenerator(IMFMediaEventGenerator *iface) +{ + return CONTAINING_RECORD(iface, struct video_frame_sink, IMFMediaEventGenerator_iface); +} + +static struct video_frame_sink *impl_from_IMFStreamSink(IMFStreamSink *iface) +{ + return CONTAINING_RECORD(iface, struct video_frame_sink, IMFStreamSink_iface); +} + +static struct video_frame_sink *impl_from_IMFMediaTypeHandler(IMFMediaTypeHandler *iface) +{ + return CONTAINING_RECORD(iface, struct video_frame_sink, IMFMediaTypeHandler_iface); +} + +static HRESULT WINAPI video_frame_sink_stream_QueryInterface(IMFStreamSink *iface, REFIID riid, void **obj) +{ + struct video_frame_sink *sink = impl_from_IMFStreamSink(iface); + + TRACE("%p, %s, %p.\n", iface, debugstr_guid(riid), obj); + + if (IsEqualIID(riid, &IID_IMFStreamSink) || + IsEqualIID(riid, &IID_IMFMediaEventGenerator) || + IsEqualIID(riid, &IID_IUnknown)) + { + *obj = &sink->IMFStreamSink_iface; + } + else if (IsEqualIID(riid, &IID_IMFMediaTypeHandler)) + { + *obj = &sink->IMFMediaTypeHandler_iface; + } + else + { + WARN("Unsupported %s.\n", debugstr_guid(riid)); + *obj = NULL; + return E_NOINTERFACE; + } + + IUnknown_AddRef((IUnknown *)*obj); + + return S_OK; +} + +static ULONG WINAPI video_frame_sink_stream_AddRef(IMFStreamSink *iface) +{ + struct video_frame_sink *sink = impl_from_IMFStreamSink(iface); + return IMFMediaSink_AddRef(&sink->IMFMediaSink_iface); +} + +static ULONG WINAPI video_frame_sink_stream_Release(IMFStreamSink *iface) +{ + struct video_frame_sink *sink = impl_from_IMFStreamSink(iface); + return IMFMediaSink_Release(&sink->IMFMediaSink_iface); +} + +static HRESULT WINAPI video_frame_sink_stream_GetEvent(IMFStreamSink *iface, DWORD flags, IMFMediaEvent **event) +{ + struct video_frame_sink *sink = impl_from_IMFStreamSink(iface); + + TRACE("%p, %#lx, %p.\n", iface, flags, event); + + if (sink->is_shut_down) + return MF_E_STREAMSINK_REMOVED; + + return IMFMediaEventQueue_GetEvent(sink->stream_event_queue, flags, event); +} + +static HRESULT WINAPI video_frame_sink_stream_BeginGetEvent(IMFStreamSink *iface, IMFAsyncCallback *callback, + IUnknown *state) +{ + struct video_frame_sink *sink = impl_from_IMFStreamSink(iface); + + TRACE("%p, %p, %p.\n", iface, callback, state); + + if (sink->is_shut_down) + return MF_E_STREAMSINK_REMOVED; + + return IMFMediaEventQueue_BeginGetEvent(sink->stream_event_queue, callback, state); +} + +static HRESULT WINAPI video_frame_sink_stream_EndGetEvent(IMFStreamSink *iface, IMFAsyncResult *result, + IMFMediaEvent **event) +{ + struct video_frame_sink *sink = impl_from_IMFStreamSink(iface); + + TRACE("%p, %p, %p.\n", iface, result, event); + + if (sink->is_shut_down) + return MF_E_STREAMSINK_REMOVED; + + return IMFMediaEventQueue_EndGetEvent(sink->stream_event_queue, result, event); +} + +static HRESULT WINAPI video_frame_sink_stream_QueueEvent(IMFStreamSink *iface, MediaEventType event_type, + REFGUID ext_type, HRESULT hr, const PROPVARIANT *value) +{ + struct video_frame_sink *sink = impl_from_IMFStreamSink(iface); + + TRACE("%p, %lu, %s, %#lx, %p.\n", iface, event_type, debugstr_guid(ext_type), hr, value); + + if (sink->is_shut_down) + return MF_E_STREAMSINK_REMOVED; + + return IMFMediaEventQueue_QueueEventParamVar(sink->stream_event_queue, event_type, ext_type, hr, value); +} + +static HRESULT WINAPI video_frame_sink_stream_GetMediaSink(IMFStreamSink *iface, IMFMediaSink **ret) +{ + struct video_frame_sink *sink = impl_from_IMFStreamSink(iface); + + TRACE("%p, %p.\n", iface, ret); + + if (sink->is_shut_down) + return MF_E_STREAMSINK_REMOVED; + + *ret = &sink->IMFMediaSink_iface; + IMFMediaSink_AddRef(*ret); + + return S_OK; +} + +static HRESULT WINAPI video_frame_sink_stream_GetIdentifier(IMFStreamSink *iface, DWORD *identifier) +{ + struct video_frame_sink *sink = impl_from_IMFStreamSink(iface); + + TRACE("%p, %p.\n", iface, identifier); + + if (sink->is_shut_down) + return MF_E_STREAMSINK_REMOVED; + + *identifier = 0; + + return S_OK; +} + +static HRESULT WINAPI video_frame_sink_stream_GetMediaTypeHandler(IMFStreamSink *iface, IMFMediaTypeHandler **handler) +{ + struct video_frame_sink *sink = impl_from_IMFStreamSink(iface); + + TRACE("%p, %p.\n", iface, handler); + + if (!handler) + return E_POINTER; + + if (sink->is_shut_down) + return MF_E_STREAMSINK_REMOVED; + + *handler = &sink->IMFMediaTypeHandler_iface; + IMFMediaTypeHandler_AddRef(*handler); + + return S_OK; +} + +/* must be called with critical section held */ +static void video_frame_sink_stream_request_sample(struct video_frame_sink *sink) +{ + if (sink->sample_request_pending || sink->eos) + return; + + IMFStreamSink_QueueEvent(&sink->IMFStreamSink_iface, MEStreamSinkRequestSample, &GUID_NULL, S_OK, NULL); + sink->sample_request_pending = TRUE; +} + +static void video_frame_sink_notify(struct video_frame_sink *sink, unsigned int event) +{ + IMFAsyncResult *result; + + if (FAILED(MFCreateAsyncResult(NULL, sink->callback, NULL, &result))) + return; + + IMFAsyncResult_SetStatus(result, event); + MFInvokeCallback(result); + IMFAsyncResult_Release(result); +} + +static HRESULT WINAPI video_frame_sink_stream_ProcessSample(IMFStreamSink *iface, IMFSample *sample) +{ + struct video_frame_sink *sink = impl_from_IMFStreamSink(iface); + LONGLONG sampletime; + HRESULT hr = S_OK; + + TRACE("%p, %p.\n", iface, sample); + + if (!sample) + return S_OK; + + EnterCriticalSection(&sink->cs); + + sink->sample_request_pending = FALSE; + + if (sink->is_shut_down) + { + hr = MF_E_STREAMSINK_REMOVED; + } + else if (sink->state == SINK_STATE_RUNNING || sink->state == SINK_STATE_PAUSED) + { + hr = IMFSample_GetSampleTime(sample, &sampletime); + + if (SUCCEEDED(hr)) + { + if (!(sink->flags & FLAGS_FIRST_FRAME)) + { + video_frame_sink_notify(sink, MF_MEDIA_ENGINE_EVENT_FIRSTFRAMEREADY); + video_frame_sink_set_flag(sink, FLAGS_FIRST_FRAME, TRUE); + } + // else TODO: send MEQualityNotify event + + IMFSample_AddRef(sample); + video_frame_sink_sample_queue_push(sink, sample, FALSE); + + if (sink->queue.used != ARRAY_SIZE(sink->queue.samples)) + video_frame_sink_stream_request_sample(sink); + } + } + + LeaveCriticalSection(&sink->cs); + + return hr; +} + +static HRESULT WINAPI video_frame_sink_stream_PlaceMarker(IMFStreamSink *iface, MFSTREAMSINK_MARKER_TYPE marker_type, + const PROPVARIANT *marker_value, const PROPVARIANT *context_value) +{ + struct video_frame_sink *sink = impl_from_IMFStreamSink(iface); + HRESULT hr = S_OK; + + TRACE("%p, %d, %p, %p.\n", iface, marker_type, marker_value, context_value); + + EnterCriticalSection(&sink->cs); + + if (sink->is_shut_down) + { + hr = MF_E_STREAMSINK_REMOVED; + } + else if (sink->state == SINK_STATE_RUNNING) + { + video_frame_sink_sample_queue_flush(sink); + hr = IMFMediaEventQueue_QueueEventParamVar(sink->stream_event_queue, MEStreamSinkMarker, + &GUID_NULL, S_OK, context_value); + } + + LeaveCriticalSection(&sink->cs); + + return hr; +} + +static HRESULT WINAPI video_frame_sink_stream_Flush(IMFStreamSink *iface) +{ + struct video_frame_sink *sink = impl_from_IMFStreamSink(iface); + HRESULT hr = S_OK; + + TRACE("%p.\n", iface); + + EnterCriticalSection(&sink->cs); + + if (sink->is_shut_down) + hr = MF_E_STREAMSINK_REMOVED; + else + video_frame_sink_sample_queue_flush(sink); + + LeaveCriticalSection(&sink->cs); + + return hr; +} + +static const IMFStreamSinkVtbl video_frame_sink_stream_vtbl = +{ + video_frame_sink_stream_QueryInterface, + video_frame_sink_stream_AddRef, + video_frame_sink_stream_Release, + video_frame_sink_stream_GetEvent, + video_frame_sink_stream_BeginGetEvent, + video_frame_sink_stream_EndGetEvent, + video_frame_sink_stream_QueueEvent, + video_frame_sink_stream_GetMediaSink, + video_frame_sink_stream_GetIdentifier, + video_frame_sink_stream_GetMediaTypeHandler, + video_frame_sink_stream_ProcessSample, + video_frame_sink_stream_PlaceMarker, + video_frame_sink_stream_Flush, +}; + +static HRESULT WINAPI video_frame_sink_stream_type_handler_QueryInterface(IMFMediaTypeHandler *iface, REFIID riid, + void **obj) +{ + struct video_frame_sink *sink = impl_from_IMFMediaTypeHandler(iface); + return IMFStreamSink_QueryInterface(&sink->IMFStreamSink_iface, riid, obj); +} + +static ULONG WINAPI video_frame_sink_stream_type_handler_AddRef(IMFMediaTypeHandler *iface) +{ + struct video_frame_sink *sink = impl_from_IMFMediaTypeHandler(iface); + return IMFStreamSink_AddRef(&sink->IMFStreamSink_iface); +} + +static ULONG WINAPI video_frame_sink_stream_type_handler_Release(IMFMediaTypeHandler *iface) +{ + struct video_frame_sink *sink = impl_from_IMFMediaTypeHandler(iface); + return IMFStreamSink_Release(&sink->IMFStreamSink_iface); +} + +static HRESULT video_frame_sink_stream_is_media_type_supported(struct video_frame_sink *sink, IMFMediaType *in_type) +{ + const DWORD supported_flags = MF_MEDIATYPE_EQUAL_MAJOR_TYPES | MF_MEDIATYPE_EQUAL_FORMAT_TYPES | + MF_MEDIATYPE_EQUAL_FORMAT_DATA; + DWORD flags; + + if (sink->is_shut_down) + return MF_E_STREAMSINK_REMOVED; + + if (!in_type) + return E_POINTER; + + if (IMFMediaType_IsEqual(sink->media_type, in_type, &flags) == S_OK) + return S_OK; + + return (flags & supported_flags) == supported_flags ? S_OK : MF_E_INVALIDMEDIATYPE; +} + +static HRESULT WINAPI video_frame_sink_stream_type_handler_IsMediaTypeSupported(IMFMediaTypeHandler *iface, + IMFMediaType *in_type, IMFMediaType **out_type) +{ + struct video_frame_sink *sink = impl_from_IMFMediaTypeHandler(iface); + + TRACE("%p, %p, %p.\n", iface, in_type, out_type); + + return video_frame_sink_stream_is_media_type_supported(sink, in_type); +} + +static HRESULT WINAPI video_frame_sink_stream_type_handler_GetMediaTypeCount(IMFMediaTypeHandler *iface, DWORD *count) +{ + TRACE("%p, %p.\n", iface, count); + + if (!count) + return E_POINTER; + + *count = 0; + + return S_OK; +} + +static HRESULT WINAPI video_frame_sink_stream_type_handler_GetMediaTypeByIndex(IMFMediaTypeHandler *iface, DWORD index, + IMFMediaType **media_type) +{ + TRACE("%p, %lu, %p.\n", iface, index, media_type); + + if (!media_type) + return E_POINTER; + + return MF_E_NO_MORE_TYPES; +} + +static HRESULT WINAPI video_frame_sink_stream_type_handler_SetCurrentMediaType(IMFMediaTypeHandler *iface, + IMFMediaType *media_type) +{ + struct video_frame_sink *sink = impl_from_IMFMediaTypeHandler(iface); + HRESULT hr; + + TRACE("%p, %p.\n", iface, media_type); + + if (FAILED(hr = video_frame_sink_stream_is_media_type_supported(sink, media_type))) + return hr; + + IMFMediaType_Release(sink->current_media_type); + sink->current_media_type = media_type; + IMFMediaType_AddRef(sink->current_media_type); + + return S_OK; +} + +static HRESULT WINAPI video_frame_sink_stream_type_handler_GetCurrentMediaType(IMFMediaTypeHandler *iface, + IMFMediaType **media_type) +{ + struct video_frame_sink *sink = impl_from_IMFMediaTypeHandler(iface); + HRESULT hr = S_OK; + + TRACE("%p, %p.\n", iface, media_type); + + if (!media_type) + return E_POINTER; + + EnterCriticalSection(&sink->cs); + + if (sink->is_shut_down) + { + hr = MF_E_STREAMSINK_REMOVED; + } + else + { + *media_type = sink->current_media_type; + IMFMediaType_AddRef(*media_type); + } + + LeaveCriticalSection(&sink->cs); + + return hr; +} + +static HRESULT WINAPI video_frame_sink_stream_type_handler_GetMajorType(IMFMediaTypeHandler *iface, GUID *type) +{ + struct video_frame_sink *sink = impl_from_IMFMediaTypeHandler(iface); + HRESULT hr; + + TRACE("%p, %p.\n", iface, type); + + if (!type) + return E_POINTER; + + EnterCriticalSection(&sink->cs); + + if (sink->is_shut_down) + hr = MF_E_STREAMSINK_REMOVED; + else + hr = IMFMediaType_GetMajorType(sink->current_media_type, type); + + LeaveCriticalSection(&sink->cs); + + return hr; +} + +static const IMFMediaTypeHandlerVtbl video_frame_sink_stream_type_handler_vtbl = +{ + video_frame_sink_stream_type_handler_QueryInterface, + video_frame_sink_stream_type_handler_AddRef, + video_frame_sink_stream_type_handler_Release, + video_frame_sink_stream_type_handler_IsMediaTypeSupported, + video_frame_sink_stream_type_handler_GetMediaTypeCount, + video_frame_sink_stream_type_handler_GetMediaTypeByIndex, + video_frame_sink_stream_type_handler_SetCurrentMediaType, + video_frame_sink_stream_type_handler_GetCurrentMediaType, + video_frame_sink_stream_type_handler_GetMajorType, +}; + +static HRESULT WINAPI video_frame_sink_QueryInterface(IMFMediaSink *iface, REFIID riid, void **obj) +{ + struct video_frame_sink *sink = impl_from_IMFMediaSink(iface); + + TRACE("%p, %s, %p.\n", iface, debugstr_guid(riid), obj); + + if (IsEqualIID(riid, &IID_IMFMediaSink) || + IsEqualIID(riid, &IID_IUnknown)) + { + *obj = &sink->IMFMediaSink_iface; + } + else if (IsEqualIID(riid, &IID_IMFClockStateSink)) + { + *obj = &sink->IMFClockStateSink_iface; + } + else if (IsEqualIID(riid, &IID_IMFMediaEventGenerator)) + { + *obj = &sink->IMFMediaEventGenerator_iface; + } + else + { + WARN("Unsupported %s.\n", debugstr_guid(riid)); + *obj = NULL; + return E_NOINTERFACE; + } + + IUnknown_AddRef((IUnknown *)*obj); + + return S_OK; +} + +static ULONG WINAPI video_frame_sink_AddRef(IMFMediaSink *iface) +{ + struct video_frame_sink *sink = impl_from_IMFMediaSink(iface); + ULONG refcount = InterlockedIncrement(&sink->refcount); + + TRACE("%p, refcount %lu.\n", iface, refcount); + + return refcount; +} + +static ULONG WINAPI video_frame_sink_Release(IMFMediaSink *iface) +{ + struct video_frame_sink *sink = impl_from_IMFMediaSink(iface); + ULONG refcount = InterlockedDecrement(&sink->refcount); + + TRACE("%p, refcount %lu.\n", iface, refcount); + + if (!refcount) + { + if (sink->current_media_type) + IMFMediaType_Release(sink->current_media_type); + IMFMediaType_Release(sink->media_type); + if (sink->event_queue) + IMFMediaEventQueue_Release(sink->event_queue); + if (sink->clock) + IMFPresentationClock_Release(sink->clock); + if (sink->callback) + IMFAsyncCallback_Release(sink->callback); + if (sink->stream_event_queue) + { + IMFMediaEventQueue_Shutdown(sink->stream_event_queue); + IMFMediaEventQueue_Release(sink->stream_event_queue); + } + video_frame_sink_sample_queue_free(sink); + DeleteCriticalSection(&sink->cs); + free(sink); + } + + return refcount; +} + +static HRESULT WINAPI video_frame_sink_GetCharacteristics(IMFMediaSink *iface, DWORD *flags) +{ + struct video_frame_sink *sink = impl_from_IMFMediaSink(iface); + + TRACE("%p, %p.\n", iface, flags); + + if (sink->is_shut_down) + return MF_E_SHUTDOWN; + + *flags = MEDIASINK_FIXED_STREAMS | MEDIASINK_RATELESS | MEDIASINK_CAN_PREROLL; + + return S_OK; +} + +static HRESULT WINAPI video_frame_sink_AddStreamSink(IMFMediaSink *iface, DWORD stream_sink_id, + IMFMediaType *media_type, IMFStreamSink **stream_sink) +{ + struct video_frame_sink *sink = impl_from_IMFMediaSink(iface); + + TRACE("%p, %#lx, %p, %p.\n", iface, stream_sink_id, media_type, stream_sink); + + return sink->is_shut_down ? MF_E_SHUTDOWN : MF_E_STREAMSINKS_FIXED; +} + +static HRESULT WINAPI video_frame_sink_RemoveStreamSink(IMFMediaSink *iface, DWORD stream_sink_id) +{ + struct video_frame_sink *sink = impl_from_IMFMediaSink(iface); + + TRACE("%p, %#lx.\n", iface, stream_sink_id); + + return sink->is_shut_down ? MF_E_SHUTDOWN : MF_E_STREAMSINKS_FIXED; +} + +static HRESULT WINAPI video_frame_sink_GetStreamSinkCount(IMFMediaSink *iface, DWORD *count) +{ + struct video_frame_sink *sink = impl_from_IMFMediaSink(iface); + + TRACE("%p, %p.\n", iface, count); + + if (sink->is_shut_down) + return MF_E_SHUTDOWN; + + *count = 1; + + return S_OK; +} + +static HRESULT WINAPI video_frame_sink_GetStreamSinkByIndex(IMFMediaSink *iface, DWORD index, + IMFStreamSink **stream) +{ + struct video_frame_sink *sink = impl_from_IMFMediaSink(iface); + HRESULT hr = S_OK; + + TRACE("%p, %lu, %p.\n", iface, index, stream); + + EnterCriticalSection(&sink->cs); + + if (sink->is_shut_down) + hr = MF_E_SHUTDOWN; + else if (index > 0) + hr = MF_E_INVALIDINDEX; + else + { + *stream = &sink->IMFStreamSink_iface; + IMFStreamSink_AddRef(*stream); + } + + LeaveCriticalSection(&sink->cs); + + return hr; +} + +static HRESULT WINAPI video_frame_sink_GetStreamSinkById(IMFMediaSink *iface, DWORD stream_sink_id, + IMFStreamSink **stream) +{ + struct video_frame_sink *sink = impl_from_IMFMediaSink(iface); + HRESULT hr = S_OK; + + TRACE("%p, %#lx, %p.\n", iface, stream_sink_id, stream); + + EnterCriticalSection(&sink->cs); + + if (sink->is_shut_down) + hr = MF_E_SHUTDOWN; + else if (stream_sink_id > 0) + hr = MF_E_INVALIDSTREAMNUMBER; + else + { + *stream = &sink->IMFStreamSink_iface; + IMFStreamSink_AddRef(*stream); + } + + LeaveCriticalSection(&sink->cs); + + return hr; +} + +static void video_frame_sink_set_presentation_clock(struct video_frame_sink *sink, IMFPresentationClock *clock) +{ + if (sink->clock) + { + IMFPresentationClock_RemoveClockStateSink(sink->clock, &sink->IMFClockStateSink_iface); + IMFPresentationClock_Release(sink->clock); + } + sink->clock = clock; + if (sink->clock) + { + IMFPresentationClock_AddRef(sink->clock); + IMFPresentationClock_AddClockStateSink(sink->clock, &sink->IMFClockStateSink_iface); + } +} + +static HRESULT WINAPI video_frame_sink_SetPresentationClock(IMFMediaSink *iface, IMFPresentationClock *clock) +{ + struct video_frame_sink *sink = impl_from_IMFMediaSink(iface); + HRESULT hr = S_OK; + + TRACE("%p, %p.\n", iface, clock); + + EnterCriticalSection(&sink->cs); + + if (sink->is_shut_down) + { + hr = MF_E_SHUTDOWN; + } + else + { + video_frame_sink_set_presentation_clock(sink, clock); + } + + LeaveCriticalSection(&sink->cs); + + return hr; +} + +static HRESULT WINAPI video_frame_sink_GetPresentationClock(IMFMediaSink *iface, IMFPresentationClock **clock) +{ + struct video_frame_sink *sink = impl_from_IMFMediaSink(iface); + HRESULT hr = S_OK; + + TRACE("%p, %p.\n", iface, clock); + + if (!clock) + return E_POINTER; + + EnterCriticalSection(&sink->cs); + + if (sink->clock) + { + *clock = sink->clock; + IMFPresentationClock_AddRef(*clock); + } + else + hr = MF_E_NO_CLOCK; + + LeaveCriticalSection(&sink->cs); + + return hr; +} + +static HRESULT WINAPI video_frame_sink_Shutdown(IMFMediaSink *iface) +{ + struct video_frame_sink *sink = impl_from_IMFMediaSink(iface); + HRESULT hr = S_OK; + + TRACE("%p.\n", iface); + + EnterCriticalSection(&sink->cs); + + if (sink->is_shut_down) + hr = MF_E_SHUTDOWN; + else + { + sink->is_shut_down = TRUE; + video_frame_sink_set_presentation_clock(sink, NULL); + IMFMediaType_Release(sink->current_media_type); + sink->current_media_type = NULL; + IMFAsyncCallback_Release(sink->callback); + sink->callback = NULL; + IMFMediaEventQueue_Shutdown(sink->stream_event_queue); + IMFMediaEventQueue_Shutdown(sink->event_queue); + } + + LeaveCriticalSection(&sink->cs); + + return hr; +} + +static const IMFMediaSinkVtbl video_frame_sink_vtbl = +{ + video_frame_sink_QueryInterface, + video_frame_sink_AddRef, + video_frame_sink_Release, + video_frame_sink_GetCharacteristics, + video_frame_sink_AddStreamSink, + video_frame_sink_RemoveStreamSink, + video_frame_sink_GetStreamSinkCount, + video_frame_sink_GetStreamSinkByIndex, + video_frame_sink_GetStreamSinkById, + video_frame_sink_SetPresentationClock, + video_frame_sink_GetPresentationClock, + video_frame_sink_Shutdown, +}; + +static HRESULT WINAPI video_frame_sink_clock_sink_QueryInterface(IMFClockStateSink *iface, REFIID riid, void **obj) +{ + struct video_frame_sink *sink = impl_from_IMFClockStateSink(iface); + return IMFMediaSink_QueryInterface(&sink->IMFMediaSink_iface, riid, obj); +} + +static ULONG WINAPI video_frame_sink_clock_sink_AddRef(IMFClockStateSink *iface) +{ + struct video_frame_sink *sink = impl_from_IMFClockStateSink(iface); + return IMFMediaSink_AddRef(&sink->IMFMediaSink_iface); +} + +static ULONG WINAPI video_frame_sink_clock_sink_Release(IMFClockStateSink *iface) +{ + struct video_frame_sink *sink = impl_from_IMFClockStateSink(iface); + return IMFMediaSink_Release(&sink->IMFMediaSink_iface); +} + +static HRESULT video_frame_sink_set_state(struct video_frame_sink *sink, enum sink_state state, + MFTIME systime, LONGLONG offset) +{ + static const DWORD events[] = + { + MEStreamSinkStopped, /* SINK_STATE_STOPPED */ + MEStreamSinkPaused, /* SINK_STATE_PAUSED */ + MEStreamSinkStarted, /* SINK_STATE_RUNNING */ + }; + HRESULT hr = S_OK; + + EnterCriticalSection(&sink->cs); + + if (!sink->is_shut_down) + { + if (state == SINK_STATE_PAUSED && sink->state == SINK_STATE_STOPPED) + { + hr = MF_E_INVALID_STATE_TRANSITION; + } + else + { + if (state == SINK_STATE_STOPPED) + { + video_frame_sink_sample_queue_flush(sink); + video_frame_sink_set_flag(sink, FLAGS_FIRST_FRAME, FALSE); + } + + if (state == SINK_STATE_RUNNING && sink->state != SINK_STATE_RUNNING) + { + video_frame_sink_sample_queue_flush(sink); + video_frame_sink_stream_request_sample(sink); + } + + if (state != sink->state || state != SINK_STATE_PAUSED) + { + if (sink->rate == 0.0f && state == SINK_STATE_RUNNING) + IMFStreamSink_QueueEvent(&sink->IMFStreamSink_iface, MEStreamSinkScrubSampleComplete, + &GUID_NULL, S_OK, NULL); + + IMFStreamSink_QueueEvent(&sink->IMFStreamSink_iface, events[state], &GUID_NULL, S_OK, NULL); + } + sink->state = state; + } + } + + LeaveCriticalSection(&sink->cs); + + return hr; +} + +static HRESULT WINAPI video_frame_sink_clock_sink_OnClockStart(IMFClockStateSink *iface, MFTIME systime, LONGLONG offset) +{ + struct video_frame_sink *sink = impl_from_IMFClockStateSink(iface); + + TRACE("%p, %s, %s.\n", iface, debugstr_time(systime), debugstr_time(offset)); + + return video_frame_sink_set_state(sink, SINK_STATE_RUNNING, systime, offset); +} + +static HRESULT WINAPI video_frame_sink_clock_sink_OnClockStop(IMFClockStateSink *iface, MFTIME systime) +{ + struct video_frame_sink *sink = impl_from_IMFClockStateSink(iface); + + TRACE("%p, %s.\n", iface, debugstr_time(systime)); + + return video_frame_sink_set_state(sink, SINK_STATE_STOPPED, systime, 0); +} + +static HRESULT WINAPI video_frame_sink_clock_sink_OnClockPause(IMFClockStateSink *iface, MFTIME systime) +{ + struct video_frame_sink *sink = impl_from_IMFClockStateSink(iface); + + TRACE("%p, %s.\n", iface, debugstr_time(systime)); + + return video_frame_sink_set_state(sink, SINK_STATE_PAUSED, systime, 0); +} + +static HRESULT WINAPI video_frame_sink_clock_sink_OnClockRestart(IMFClockStateSink *iface, MFTIME systime) +{ + struct video_frame_sink *sink = impl_from_IMFClockStateSink(iface); + + TRACE("%p, %s.\n", iface, debugstr_time(systime)); + + return video_frame_sink_set_state(sink, SINK_STATE_RUNNING, systime, PRESENTATION_CURRENT_POSITION); +} + +static HRESULT WINAPI video_frame_sink_clock_sink_OnClockSetRate(IMFClockStateSink *iface, MFTIME systime, float rate) +{ + struct video_frame_sink *sink = impl_from_IMFClockStateSink(iface); + HRESULT hr = S_OK; + + TRACE("%p, %s, %f.\n", iface, debugstr_time(systime), rate); + + EnterCriticalSection(&sink->cs); + + if (sink->is_shut_down) + hr = MF_E_SHUTDOWN; + else + { + IMFStreamSink_QueueEvent(&sink->IMFStreamSink_iface, MEStreamSinkRateChanged, &GUID_NULL, S_OK, NULL); + sink->rate = rate; + } + + LeaveCriticalSection(&sink->cs); + + return hr; +} + +static HRESULT WINAPI video_frame_sink_events_QueryInterface(IMFMediaEventGenerator *iface, REFIID riid, void **obj) +{ + struct video_frame_sink *sink = impl_from_IMFMediaEventGenerator(iface); + return IMFMediaSink_QueryInterface(&sink->IMFMediaSink_iface, riid, obj); +} + +static ULONG WINAPI video_frame_sink_events_AddRef(IMFMediaEventGenerator *iface) +{ + struct video_frame_sink *sink = impl_from_IMFMediaEventGenerator(iface); + return IMFMediaSink_AddRef(&sink->IMFMediaSink_iface); +} + +static ULONG WINAPI video_frame_sink_events_Release(IMFMediaEventGenerator *iface) +{ + struct video_frame_sink *sink = impl_from_IMFMediaEventGenerator(iface); + return IMFMediaSink_Release(&sink->IMFMediaSink_iface); +} + +static HRESULT WINAPI video_frame_sink_events_GetEvent(IMFMediaEventGenerator *iface, DWORD flags, IMFMediaEvent **event) +{ + struct video_frame_sink *sink = impl_from_IMFMediaEventGenerator(iface); + + TRACE("%p, %#lx, %p.\n", iface, flags, event); + + return IMFMediaEventQueue_GetEvent(sink->event_queue, flags, event); +} + +static HRESULT WINAPI video_frame_sink_events_BeginGetEvent(IMFMediaEventGenerator *iface, IMFAsyncCallback *callback, + IUnknown *state) +{ + struct video_frame_sink *sink = impl_from_IMFMediaEventGenerator(iface); + + TRACE("%p, %p, %p.\n", iface, callback, state); + + return IMFMediaEventQueue_BeginGetEvent(sink->event_queue, callback, state); +} + +static HRESULT WINAPI video_frame_sink_events_EndGetEvent(IMFMediaEventGenerator *iface, IMFAsyncResult *result, + IMFMediaEvent **event) +{ + struct video_frame_sink *sink = impl_from_IMFMediaEventGenerator(iface); + + TRACE("%p, %p, %p.\n", iface, result, event); + + return IMFMediaEventQueue_EndGetEvent(sink->event_queue, result, event); +} + +static HRESULT WINAPI video_frame_sink_events_QueueEvent(IMFMediaEventGenerator *iface, MediaEventType event_type, + REFGUID ext_type, HRESULT hr, const PROPVARIANT *value) +{ + struct video_frame_sink *sink = impl_from_IMFMediaEventGenerator(iface); + + TRACE("%p, %lu, %s, %#lx, %p.\n", iface, event_type, debugstr_guid(ext_type), hr, value); + + return IMFMediaEventQueue_QueueEventParamVar(sink->event_queue, event_type, ext_type, hr, value); +} + +static const IMFMediaEventGeneratorVtbl video_frame_sink_events_vtbl = +{ + video_frame_sink_events_QueryInterface, + video_frame_sink_events_AddRef, + video_frame_sink_events_Release, + video_frame_sink_events_GetEvent, + video_frame_sink_events_BeginGetEvent, + video_frame_sink_events_EndGetEvent, + video_frame_sink_events_QueueEvent, +}; + +static const IMFClockStateSinkVtbl video_frame_sink_clock_sink_vtbl = +{ + video_frame_sink_clock_sink_QueryInterface, + video_frame_sink_clock_sink_AddRef, + video_frame_sink_clock_sink_Release, + video_frame_sink_clock_sink_OnClockStart, + video_frame_sink_clock_sink_OnClockStop, + video_frame_sink_clock_sink_OnClockPause, + video_frame_sink_clock_sink_OnClockRestart, + video_frame_sink_clock_sink_OnClockSetRate, +}; + +HRESULT create_video_frame_sink(IMFMediaType *media_type, IMFAsyncCallback *events_callback, struct video_frame_sink **sink) +{ + struct video_frame_sink *object; + HRESULT hr; + + if (!(object = calloc(1, sizeof(*object)))) + return E_OUTOFMEMORY; + + object->IMFMediaSink_iface.lpVtbl = &video_frame_sink_vtbl; + object->IMFClockStateSink_iface.lpVtbl = &video_frame_sink_clock_sink_vtbl; + object->IMFMediaEventGenerator_iface.lpVtbl = &video_frame_sink_events_vtbl; + object->IMFStreamSink_iface.lpVtbl = &video_frame_sink_stream_vtbl; + object->IMFMediaTypeHandler_iface.lpVtbl = &video_frame_sink_stream_type_handler_vtbl; + object->refcount = 1; + object->rate = 1.0f; + object->queue.back = ARRAY_SIZE(object->queue.samples) - 1; + object->media_type = media_type; + IMFAsyncCallback_AddRef(object->callback = events_callback); + IMFMediaType_AddRef(object->media_type); + object->current_media_type = media_type; + IMFMediaType_AddRef(object->current_media_type); + InitializeCriticalSection(&object->cs); + + if (FAILED(hr = MFCreateEventQueue(&object->stream_event_queue))) + goto failed; + + if (FAILED(hr = MFCreateEventQueue(&object->event_queue))) + goto failed; + + *sink = object; + + return S_OK; + +failed: + + IMFMediaSink_Release(&object->IMFMediaSink_iface); + + return hr; +} + +HRESULT video_frame_sink_query_iface(struct video_frame_sink *sink, REFIID riid, void **obj) +{ + return IMFStreamSink_QueryInterface(&sink->IMFStreamSink_iface, riid, obj); +} + +int video_frame_sink_get_sample(struct video_frame_sink *sink, IMFSample **ret) +{ + *ret = NULL; + + if (sink) + { + EnterCriticalSection(&sink->cs); + + if (sink->queue.presentation_sample) + { + IMFSample_AddRef(*ret = sink->queue.presentation_sample); + sink->queue.sample_presented = TRUE; + } + + LeaveCriticalSection(&sink->cs); + } + + return !!*ret; +} + +static HRESULT sample_get_pts(IMFSample *sample, MFTIME clocktime, LONGLONG *pts) +{ + HRESULT hr = S_FALSE; + LONGLONG sample_pts; + + if (sample) + { + if (SUCCEEDED(hr = IMFSample_GetSampleTime(sample, &sample_pts))) + { + hr = (clocktime >= sample_pts) ? S_OK : S_FALSE; + if (hr == S_OK) + *pts = sample_pts; + } + else + WARN("Failed to get sample time, hr %#lx.\n", hr); + } + return hr; +} + +/* + * This function selects the queued sample with the greatest PTS that is also below the supplied value of clocktime. + * If no queued sample has a PTS below the supplied value of clocktime, S_FALSE is returned. + * Otherwise S_TRUE is returned, the PTS of the selected sample is provided, and the selected sample will now be returned + * by subsequent calls to video_frame_sink_get_sample. + * Queued samples with a PTS lower than the PTS of the selected sample will be silently dropped. + */ +HRESULT video_frame_sink_get_pts(struct video_frame_sink *sink, MFTIME clocktime, LONGLONG *pts) +{ + HRESULT hr = S_FALSE; + + *pts = MINLONGLONG; + if (sink) + { + IMFSample *sample; + bool transfer_sample = FALSE; + EnterCriticalSection(&sink->cs); + while (video_frame_sink_sample_queue_pop(sink, &sample)) + { + if (sample_get_pts(sample, clocktime, pts) == S_OK) + { + video_frame_sink_sample_queue_set_presentation(sink, sample); + transfer_sample = TRUE; + hr = S_OK; + } + else + { + video_frame_sink_sample_queue_push(sink, sample, TRUE); + break; + } + } + + if (transfer_sample) + video_frame_sink_stream_request_sample(sink); + else if (!sink->queue.sample_presented) + hr = sample_get_pts(sink->queue.presentation_sample, clocktime, pts); + + LeaveCriticalSection(&sink->cs); + } + + return hr; +} + +void video_frame_sink_notify_end_of_presentation_segment(struct video_frame_sink *sink) +{ + sink->eos = TRUE; +} + +ULONG video_frame_sink_release(struct video_frame_sink *sink) +{ + return video_frame_sink_Release(&sink->IMFMediaSink_iface); +} +
From: Rémi Bernon rbernon@codeweavers.com
--- dlls/mfmediaengine/main.c | 82 +++++++++++++++++++++++- dlls/mfmediaengine/mediaengine_private.h | 2 +- dlls/mfmediaengine/video_frame_sink.c | 64 +++++++++++++++++- 3 files changed, 144 insertions(+), 4 deletions(-)
diff --git a/dlls/mfmediaengine/main.c b/dlls/mfmediaengine/main.c index caeab3dea30..0f2ce5d189f 100644 --- a/dlls/mfmediaengine/main.c +++ b/dlls/mfmediaengine/main.c @@ -1196,7 +1196,7 @@ static HRESULT media_engine_create_video_renderer(struct media_engine *engine, I IMFMediaType_SetGUID(media_type, &MF_MT_MAJOR_TYPE, &MFMediaType_Video); IMFMediaType_SetGUID(media_type, &MF_MT_SUBTYPE, &subtype);
- hr = create_video_frame_sink(media_type, &engine->sink_events, &engine->presentation.frame_sink); + hr = create_video_frame_sink(media_type, (IUnknown *)engine->device_manager, &engine->sink_events, &engine->presentation.frame_sink); IMFMediaType_Release(media_type); if (FAILED(hr)) return hr; @@ -2458,6 +2458,83 @@ static void media_engine_update_d3d11_frame_surface(ID3D11DeviceContext *context IMFSample_Release(sample); }
+static HRESULT get_d3d11_resource_from_sample(IMFSample *sample, ID3D11Texture2D **resource, UINT *subresource) +{ + IMFDXGIBuffer *dxgi_buffer; + IMFMediaBuffer *buffer; + HRESULT hr; + + *resource = NULL; + *subresource = 0; + + if (FAILED(hr = IMFSample_GetBufferByIndex(sample, 0, &buffer))) + return hr; + + if (SUCCEEDED(hr = IMFMediaBuffer_QueryInterface(buffer, &IID_IMFDXGIBuffer, (void **)&dxgi_buffer))) + { + IMFDXGIBuffer_GetSubresourceIndex(dxgi_buffer, subresource); + hr = IMFDXGIBuffer_GetResource(dxgi_buffer, &IID_ID3D11Texture2D, (void **)resource); + IMFDXGIBuffer_Release(dxgi_buffer); + } + + IMFMediaBuffer_Release(buffer); + return hr; +} + +static HRESULT media_engine_transfer_d3d11(struct media_engine *engine, ID3D11Texture2D *dst_texture, + const MFVideoNormalizedRect *src_rect, const RECT *dst_rect, const MFARGB *color) +{ + MFVideoNormalizedRect src_rect_default = {0.0, 0.0, 1.0, 1.0}; + MFARGB color_default = {0, 0, 0, 0}; + D3D11_TEXTURE2D_DESC src_desc; + ID3D11DeviceContext *context; + ID3D11Texture2D *src_texture; + RECT dst_rect_default = {0}; + D3D11_BOX src_box = {0}; + ID3D11Device *device; + IMFSample *sample; + UINT subresource; + HRESULT hr; + + if (!src_rect) + src_rect = &src_rect_default; + if (!dst_rect) + dst_rect = &dst_rect_default; + if (!color) + color = &color_default; + + if (!video_frame_sink_get_sample(engine->presentation.frame_sink, &sample)) + return MF_E_UNEXPECTED; + hr = get_d3d11_resource_from_sample(sample, &src_texture, &subresource); + IMFSample_Release(sample); + if (FAILED(hr)) + return hr; + + if (FAILED(hr = media_engine_lock_d3d_device(engine, &device))) + { + ID3D11Texture2D_Release(src_texture); + return hr; + } + + ID3D11Texture2D_GetDesc(src_texture, &src_desc); + + src_box.left = src_rect->left * src_desc.Width; + src_box.top = src_rect->top * src_desc.Height; + src_box.front = 0; + src_box.right = src_rect->right * src_desc.Width; + src_box.bottom = src_rect->bottom * src_desc.Height; + src_box.back = 1; + + ID3D11Device_GetImmediateContext(device, &context); + ID3D11DeviceContext_CopySubresourceRegion(context, (ID3D11Resource *)dst_texture, 0, + dst_rect->left, dst_rect->top, 0, (ID3D11Resource *)src_texture, subresource, &src_box); + ID3D11DeviceContext_Release(context); + + media_engine_unlock_d3d_device(engine, device); + ID3D11Texture2D_Release(src_texture); + return hr; +} + static HRESULT media_engine_transfer_to_d3d11_texture(struct media_engine *engine, ID3D11Texture2D *texture, const MFVideoNormalizedRect *src_rect, const RECT *dst_rect, const MFARGB *color) { @@ -2623,7 +2700,8 @@ static HRESULT WINAPI media_engine_TransferVideoFrame(IMFMediaEngineEx *iface, I
if (SUCCEEDED(IUnknown_QueryInterface(surface, &IID_ID3D11Texture2D, (void **)&texture))) { - hr = media_engine_transfer_to_d3d11_texture(engine, texture, src_rect, dst_rect, color); + if (!engine->device_manager || FAILED(hr = media_engine_transfer_d3d11(engine, texture, src_rect, dst_rect, color))) + hr = media_engine_transfer_to_d3d11_texture(engine, texture, src_rect, dst_rect, color); ID3D11Texture2D_Release(texture); } else diff --git a/dlls/mfmediaengine/mediaengine_private.h b/dlls/mfmediaengine/mediaengine_private.h index 286cb9b1430..3b280a6d1ee 100644 --- a/dlls/mfmediaengine/mediaengine_private.h +++ b/dlls/mfmediaengine/mediaengine_private.h @@ -20,7 +20,7 @@
struct video_frame_sink;
-HRESULT create_video_frame_sink(IMFMediaType *media_type, IMFAsyncCallback *events_callback, +HRESULT create_video_frame_sink(IMFMediaType *media_type, IUnknown *device_manager, IMFAsyncCallback *events_callback, struct video_frame_sink **sink); HRESULT video_frame_sink_query_iface(struct video_frame_sink *object, REFIID riid, void **obj); ULONG video_frame_sink_release(struct video_frame_sink *sink); diff --git a/dlls/mfmediaengine/video_frame_sink.c b/dlls/mfmediaengine/video_frame_sink.c index 44ceca8a2ad..1bcfe862648 100644 --- a/dlls/mfmediaengine/video_frame_sink.c +++ b/dlls/mfmediaengine/video_frame_sink.c @@ -28,6 +28,9 @@
#include "mediaengine_private.h"
+#include "initguid.h" +#include "evr.h" + #include "wine/debug.h" #include "wine/list.h"
@@ -83,7 +86,9 @@ struct video_frame_sink IMFMediaEventGenerator IMFMediaEventGenerator_iface; IMFStreamSink IMFStreamSink_iface; IMFMediaTypeHandler IMFMediaTypeHandler_iface; + IMFGetService IMFGetService_iface; LONG refcount; + IUnknown *device_manager; IMFMediaType *media_type; IMFMediaType *current_media_type; bool is_shut_down; @@ -214,6 +219,11 @@ static struct video_frame_sink *impl_from_IMFMediaTypeHandler(IMFMediaTypeHandle return CONTAINING_RECORD(iface, struct video_frame_sink, IMFMediaTypeHandler_iface); }
+static struct video_frame_sink *impl_from_IMFGetService(IMFGetService *iface) +{ + return CONTAINING_RECORD(iface, struct video_frame_sink, IMFGetService_iface); +} + static HRESULT WINAPI video_frame_sink_stream_QueryInterface(IMFStreamSink *iface, REFIID riid, void **obj) { struct video_frame_sink *sink = impl_from_IMFStreamSink(iface); @@ -230,6 +240,10 @@ static HRESULT WINAPI video_frame_sink_stream_QueryInterface(IMFStreamSink *ifac { *obj = &sink->IMFMediaTypeHandler_iface; } + else if (IsEqualIID(riid, &IID_IMFGetService)) + { + *obj = &sink->IMFGetService_iface; + } else { WARN("Unsupported %s.\n", debugstr_guid(riid)); @@ -632,6 +646,49 @@ static const IMFMediaTypeHandlerVtbl video_frame_sink_stream_type_handler_vtbl = video_frame_sink_stream_type_handler_GetMajorType, };
+static HRESULT WINAPI video_frame_sink_stream_get_service_QueryInterface(IMFGetService *iface, REFIID riid, + void **obj) +{ + struct video_frame_sink *sink = impl_from_IMFGetService(iface); + return IMFStreamSink_QueryInterface(&sink->IMFStreamSink_iface, riid, obj); +} + +static ULONG WINAPI video_frame_sink_stream_get_service_AddRef(IMFGetService *iface) +{ + struct video_frame_sink *sink = impl_from_IMFGetService(iface); + return IMFStreamSink_AddRef(&sink->IMFStreamSink_iface); +} + +static ULONG WINAPI video_frame_sink_stream_get_service_Release(IMFGetService *iface) +{ + struct video_frame_sink *sink = impl_from_IMFGetService(iface); + return IMFStreamSink_Release(&sink->IMFStreamSink_iface); +} + +static HRESULT WINAPI video_frame_sink_stream_get_service_GetService(IMFGetService *iface, REFGUID service, REFIID riid, + void **obj) +{ + struct video_frame_sink *sink = impl_from_IMFGetService(iface); + + if (IsEqualGUID(service, &MR_VIDEO_ACCELERATION_SERVICE)) + { + if (sink->device_manager) + return IUnknown_QueryInterface(sink->device_manager, riid, obj); + return E_NOINTERFACE; + } + + FIXME("Unsupported service %s, riid %s.\n", debugstr_guid(service), debugstr_guid(riid)); + return MF_E_UNSUPPORTED_SERVICE; +} + +static const IMFGetServiceVtbl video_frame_sink_stream_get_service_vtbl = +{ + video_frame_sink_stream_get_service_QueryInterface, + video_frame_sink_stream_get_service_AddRef, + video_frame_sink_stream_get_service_Release, + video_frame_sink_stream_get_service_GetService, +}; + static HRESULT WINAPI video_frame_sink_QueryInterface(IMFMediaSink *iface, REFIID riid, void **obj) { struct video_frame_sink *sink = impl_from_IMFMediaSink(iface); @@ -685,6 +742,8 @@ static ULONG WINAPI video_frame_sink_Release(IMFMediaSink *iface) if (sink->current_media_type) IMFMediaType_Release(sink->current_media_type); IMFMediaType_Release(sink->media_type); + if (sink->device_manager) + IUnknown_Release(sink->device_manager); if (sink->event_queue) IMFMediaEventQueue_Release(sink->event_queue); if (sink->clock) @@ -1114,7 +1173,7 @@ static const IMFClockStateSinkVtbl video_frame_sink_clock_sink_vtbl = video_frame_sink_clock_sink_OnClockSetRate, };
-HRESULT create_video_frame_sink(IMFMediaType *media_type, IMFAsyncCallback *events_callback, struct video_frame_sink **sink) +HRESULT create_video_frame_sink(IMFMediaType *media_type, IUnknown *device_manager, IMFAsyncCallback *events_callback, struct video_frame_sink **sink) { struct video_frame_sink *object; HRESULT hr; @@ -1127,9 +1186,12 @@ HRESULT create_video_frame_sink(IMFMediaType *media_type, IMFAsyncCallback *even object->IMFMediaEventGenerator_iface.lpVtbl = &video_frame_sink_events_vtbl; object->IMFStreamSink_iface.lpVtbl = &video_frame_sink_stream_vtbl; object->IMFMediaTypeHandler_iface.lpVtbl = &video_frame_sink_stream_type_handler_vtbl; + object->IMFGetService_iface.lpVtbl = &video_frame_sink_stream_get_service_vtbl; object->refcount = 1; object->rate = 1.0f; object->queue.back = ARRAY_SIZE(object->queue.samples) - 1; + if ((object->device_manager = device_manager)) + IUnknown_AddRef(object->device_manager); object->media_type = media_type; IMFAsyncCallback_AddRef(object->callback = events_callback); IMFMediaType_AddRef(object->media_type);
From: Brendan McGrath bmcgrath@codeweavers.com
CopySubresourceRegion only supports copy; it doesn't support any stretch, color key, or blend.
If you try and copy outside the destination resource, the behavior of CopySubresourceRegion is undefined. --- dlls/mfmediaengine/main.c | 22 +++++++++++++++------- 1 file changed, 15 insertions(+), 7 deletions(-)
diff --git a/dlls/mfmediaengine/main.c b/dlls/mfmediaengine/main.c index 0f2ce5d189f..1a8f6291342 100644 --- a/dlls/mfmediaengine/main.c +++ b/dlls/mfmediaengine/main.c @@ -2486,7 +2486,7 @@ static HRESULT media_engine_transfer_d3d11(struct media_engine *engine, ID3D11Te { MFVideoNormalizedRect src_rect_default = {0.0, 0.0, 1.0, 1.0}; MFARGB color_default = {0, 0, 0, 0}; - D3D11_TEXTURE2D_DESC src_desc; + D3D11_TEXTURE2D_DESC src_desc, dst_desc; ID3D11DeviceContext *context; ID3D11Texture2D *src_texture; RECT dst_rect_default = {0}; @@ -2510,13 +2510,8 @@ static HRESULT media_engine_transfer_d3d11(struct media_engine *engine, ID3D11Te if (FAILED(hr)) return hr;
- if (FAILED(hr = media_engine_lock_d3d_device(engine, &device))) - { - ID3D11Texture2D_Release(src_texture); - return hr; - } - ID3D11Texture2D_GetDesc(src_texture, &src_desc); + ID3D11Texture2D_GetDesc(dst_texture, &dst_desc);
src_box.left = src_rect->left * src_desc.Width; src_box.top = src_rect->top * src_desc.Height; @@ -2525,6 +2520,19 @@ static HRESULT media_engine_transfer_d3d11(struct media_engine *engine, ID3D11Te src_box.bottom = src_rect->bottom * src_desc.Height; src_box.back = 1;
+ if (dst_rect->left + src_box.right - src_box.left > dst_desc.Width || + dst_rect->top + src_box.top - src_box.bottom > dst_desc.Height) + { + ID3D11Texture2D_Release(src_texture); + return MF_E_UNEXPECTED; + } + + if (FAILED(hr = media_engine_lock_d3d_device(engine, &device))) + { + ID3D11Texture2D_Release(src_texture); + return hr; + } + ID3D11Device_GetImmediateContext(device, &context); ID3D11DeviceContext_CopySubresourceRegion(context, (ID3D11Resource *)dst_texture, 0, dst_rect->left, dst_rect->top, 0, (ID3D11Resource *)src_texture, subresource, &src_box);
Hi,
It looks like your patch introduced the new failures shown below. Please investigate and fix them before resubmitting your patch. If they are not new, fixing them anyway would help a lot. Otherwise please ask for the known failures list to be updated.
The full results can be found at: https://testbot.winehq.org/JobDetails.pl?Key=150074
Your paranoid android.
=== build (build log) ===
error: patch failed: dlls/mfmediaengine/main.c:1196 error: patch failed: dlls/mfmediaengine/mediaengine_private.h:20 error: patch failed: dlls/mfmediaengine/video_frame_sink.c:28 error: patch failed: dlls/mfmediaengine/main.c:2486 Task: Patch failed to apply
=== debian11 (build log) ===
error: patch failed: dlls/mfmediaengine/main.c:1196 error: patch failed: dlls/mfmediaengine/mediaengine_private.h:20 error: patch failed: dlls/mfmediaengine/video_frame_sink.c:28 error: patch failed: dlls/mfmediaengine/main.c:2486 Task: Patch failed to apply
=== debian11b (build log) ===
error: patch failed: dlls/mfmediaengine/main.c:1196 error: patch failed: dlls/mfmediaengine/mediaengine_private.h:20 error: patch failed: dlls/mfmediaengine/video_frame_sink.c:28 error: patch failed: dlls/mfmediaengine/main.c:2486 Task: Patch failed to apply
What do you mean by supply? Make available to OnVideoStreamTick/Transfer* ?
It will supply the PTS on return of `OnVideoStreamTick` and use the corrosponding sample for all subsequent `Transfer*` calls (until `OnVideoStreamTick` is called again and returns a different PTS).
Then on Transfer* call, do we need to reevaluate pts of those closest sample again and potentially drop it?
No, the `Transfer*` call doesn't appear to look at PTS at all. It will indefinitely return the sample corresponding with the PTS of the last successful call to `OnVideoStreamTick`.
This is similar to what happens in EVR, that's why I suggest using queue logic from there.
OK, understood. I found the sample queue in EVR and have copied that logic in to SVR.
@nsivov I believe I've addressed all the feedback, so I've marked this merge request as ready (i.e. it's no longer a draft). Let me know if there is anything else you would like me to address. Thanks Nikolay.
Nikolay Sivov (@nsivov) commented about dlls/mfmediaengine/video_frame_sink.c:
- if (sink->is_shut_down)
- {
hr = MF_E_STREAMSINK_REMOVED;
- }
- else if (sink->state == SINK_STATE_RUNNING || sink->state == SINK_STATE_PAUSED)
- {
hr = IMFSample_GetSampleTime(sample, &sampletime);
if (SUCCEEDED(hr))
{
if (!(sink->flags & FLAGS_FIRST_FRAME))
{
video_frame_sink_notify(sink, MF_MEDIA_ENGINE_EVENT_FIRSTFRAMEREADY);
video_frame_sink_set_flag(sink, FLAGS_FIRST_FRAME, TRUE);
}
// else TODO: send MEQualityNotify event
Please change comments style, there are other places too.
Nikolay Sivov (@nsivov) commented about dlls/mfmediaengine/video_frame_sink.c:
free(sink);
- }
- return refcount;
+}
+static HRESULT WINAPI video_frame_sink_GetCharacteristics(IMFMediaSink *iface, DWORD *flags) +{
- struct video_frame_sink *sink = impl_from_IMFMediaSink(iface);
- TRACE("%p, %p.\n", iface, flags);
- if (sink->is_shut_down)
return MF_E_SHUTDOWN;
- *flags = MEDIASINK_FIXED_STREAMS | MEDIASINK_RATELESS | MEDIASINK_CAN_PREROLL;
CAN_PREROLL here is a no-op, because you don't implement preroll interface. So either remove it, or add the interface.
On Wed Dec 4 19:43:13 2024 +0000, Brendan McGrath wrote:
From memory I ran in to a deadlock caused by this whilst testing. I found `IMFMediaSession_Shutdown` was re-entrant. Some of the callbacks ran within mfmediaengine as a result of the shutdown required the `cs` lock (causing a deadlock if already held).
Okay, hopefully we can resolve this better some day.