Signed-off-by: Derek Lesho dlesho@codeweavers.com --- dlls/winegstreamer/gst_cbs.c | 58 ++++++++ dlls/winegstreamer/gst_cbs.h | 12 +- dlls/winegstreamer/main.c | 3 + dlls/winegstreamer/media_source.c | 219 +++++++++++++++++++++++++++++- 4 files changed, 288 insertions(+), 4 deletions(-)
diff --git a/dlls/winegstreamer/gst_cbs.c b/dlls/winegstreamer/gst_cbs.c index bf7103b1606..8f48368c96a 100644 --- a/dlls/winegstreamer/gst_cbs.c +++ b/dlls/winegstreamer/gst_cbs.c @@ -49,6 +49,8 @@ static void CALLBACK perform_cb(TP_CALLBACK_INSTANCE *instance, void *user)
if (cbdata->type < GSTDEMUX_MAX) perform_cb_gstdemux(cbdata); + else if (cbdata->type < MEDIA_SOURCE_MAX) + perform_cb_media_source(cbdata);
pthread_mutex_lock(&cbdata->lock); cbdata->finished = 1; @@ -301,3 +303,59 @@ gboolean query_sink_wrapper(GstPad *pad, GstObject *parent, GstQuery *query)
return cbdata.u.query_sink_data.ret; } + +GstFlowReturn pull_from_bytestream_wrapper(GstPad *pad, GstObject *parent, guint64 ofs, guint len, + GstBuffer **buf) +{ + struct cb_data cbdata = { PULL_FROM_BYTESTREAM }; + + cbdata.u.getrange_data.pad = pad; + cbdata.u.getrange_data.parent = parent; + cbdata.u.getrange_data.ofs = ofs; + cbdata.u.getrange_data.len = len; + cbdata.u.getrange_data.buf = buf; + + call_cb(&cbdata); + + return cbdata.u.getrange_data.ret; +} + +gboolean query_bytestream_wrapper(GstPad *pad, GstObject *parent, GstQuery *query) +{ + struct cb_data cbdata = { QUERY_BYTESTREAM }; + + cbdata.u.query_function_data.pad = pad; + cbdata.u.query_function_data.parent = parent; + cbdata.u.query_function_data.query = query; + + call_cb(&cbdata); + + return cbdata.u.query_function_data.ret; +} + +gboolean activate_bytestream_pad_mode_wrapper(GstPad *pad, GstObject *parent, GstPadMode mode, gboolean activate) +{ + struct cb_data cbdata = { ACTIVATE_BYTESTREAM_PAD_MODE }; + + cbdata.u.activate_mode_data.pad = pad; + cbdata.u.activate_mode_data.parent = parent; + cbdata.u.activate_mode_data.mode = mode; + cbdata.u.activate_mode_data.activate = activate; + + call_cb(&cbdata); + + return cbdata.u.activate_mode_data.ret; +} + +gboolean process_bytestream_pad_event_wrapper(GstPad *pad, GstObject *parent, GstEvent *event) +{ + struct cb_data cbdata = { PROCESS_BYTESTREAM_PAD_EVENT }; + + cbdata.u.event_src_data.pad = pad; + cbdata.u.event_src_data.parent = parent; + cbdata.u.event_src_data.event = event; + + call_cb(&cbdata); + + return cbdata.u.event_src_data.ret; +} diff --git a/dlls/winegstreamer/gst_cbs.h b/dlls/winegstreamer/gst_cbs.h index 4725f23ad1a..10e999feea7 100644 --- a/dlls/winegstreamer/gst_cbs.h +++ b/dlls/winegstreamer/gst_cbs.h @@ -43,7 +43,12 @@ enum CB_TYPE { AUTOPLUG_BLACKLIST, UNKNOWN_TYPE, QUERY_SINK, - GSTDEMUX_MAX + GSTDEMUX_MAX, + PULL_FROM_BYTESTREAM, + QUERY_BYTESTREAM, + ACTIVATE_BYTESTREAM_PAD_MODE, + PROCESS_BYTESTREAM_PAD_EVENT, + MEDIA_SOURCE_MAX, };
struct cb_data { @@ -138,6 +143,7 @@ struct cb_data {
void mark_wine_thread(void) DECLSPEC_HIDDEN; void perform_cb_gstdemux(struct cb_data *data) DECLSPEC_HIDDEN; +void perform_cb_media_source(struct cb_data *data) DECLSPEC_HIDDEN;
GstBusSyncReply watch_bus_wrapper(GstBus *bus, GstMessage *msg, gpointer user) DECLSPEC_HIDDEN; void existing_new_pad_wrapper(GstElement *bin, GstPad *pad, gpointer user) DECLSPEC_HIDDEN; @@ -154,5 +160,9 @@ GstAutoplugSelectResult autoplug_blacklist_wrapper(GstElement *bin, GstPad *pad, void unknown_type_wrapper(GstElement *bin, GstPad *pad, GstCaps *caps, gpointer user) DECLSPEC_HIDDEN; void Gstreamer_transform_pad_added_wrapper(GstElement *filter, GstPad *pad, gpointer user) DECLSPEC_HIDDEN; gboolean query_sink_wrapper(GstPad *pad, GstObject *parent, GstQuery *query) DECLSPEC_HIDDEN; +GstFlowReturn pull_from_bytestream_wrapper(GstPad *pad, GstObject *parent, guint64 ofs, guint len, GstBuffer **buf) DECLSPEC_HIDDEN; +gboolean query_bytestream_wrapper(GstPad *pad, GstObject *parent, GstQuery *query) DECLSPEC_HIDDEN; +gboolean activate_bytestream_pad_mode_wrapper(GstPad *pad, GstObject *parent, GstPadMode mode, gboolean activate) DECLSPEC_HIDDEN; +gboolean process_bytestream_pad_event_wrapper(GstPad *pad, GstObject *parent, GstEvent *event) DECLSPEC_HIDDEN;
#endif diff --git a/dlls/winegstreamer/main.c b/dlls/winegstreamer/main.c index 2872710b3e2..4ca371d58bd 100644 --- a/dlls/winegstreamer/main.c +++ b/dlls/winegstreamer/main.c @@ -146,6 +146,9 @@ HRESULT WINAPI DllGetClassObject(REFCLSID clsid, REFIID iid, void **out)
TRACE("clsid %s, iid %s, out %p.\n", debugstr_guid(clsid), debugstr_guid(iid), out);
+ if (!init_gstreamer()) + return CLASS_E_CLASSNOTAVAILABLE; + if (SUCCEEDED(hr = mfplat_get_class_object(clsid, iid, out))) return hr;
diff --git a/dlls/winegstreamer/media_source.c b/dlls/winegstreamer/media_source.c index 84ecf305d4c..6b3bd4a7869 100644 --- a/dlls/winegstreamer/media_source.c +++ b/dlls/winegstreamer/media_source.c @@ -17,7 +17,12 @@ * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA */
+#include "config.h" + +#include <gst/gst.h> + #include "gst_private.h" +#include "gst_cbs.h"
#include <stdarg.h>
@@ -27,6 +32,7 @@ #include "mfapi.h" #include "mferror.h" #include "mfidl.h" +#include "mfobjects.h"
#include "wine/debug.h" #include "wine/heap.h" @@ -39,6 +45,8 @@ struct media_source IMFMediaSource IMFMediaSource_iface; LONG ref; IMFMediaEventQueue *event_queue; + IMFByteStream *byte_stream; + GstPad *my_src; enum { SOURCE_OPENING, @@ -52,6 +60,154 @@ static inline struct media_source *impl_from_IMFMediaSource(IMFMediaSource *ifac return CONTAINING_RECORD(iface, struct media_source, IMFMediaSource_iface); }
+GstFlowReturn pull_from_bytestream(GstPad *pad, GstObject *parent, guint64 ofs, guint len, + GstBuffer **buf) +{ + struct media_source *source = gst_pad_get_element_private(pad); + IMFByteStream *byte_stream = source->byte_stream; + ULONG bytes_read; + GstMapInfo info; + BOOL is_eof; + HRESULT hr; + + TRACE("gstreamer requesting %u bytes at %s from source %p into buffer %p\n", len, wine_dbgstr_longlong(ofs), source, buf); + + if (ofs != GST_BUFFER_OFFSET_NONE) + { + if (FAILED(IMFByteStream_SetCurrentPosition(byte_stream, ofs))) + return GST_FLOW_ERROR; + } + + if (FAILED(IMFByteStream_IsEndOfStream(byte_stream, &is_eof))) + return GST_FLOW_ERROR; + if (is_eof) + return GST_FLOW_EOS; + + if (!(*buf)) + *buf = gst_buffer_new_and_alloc(len); + gst_buffer_map(*buf, &info, GST_MAP_WRITE); + hr = IMFByteStream_Read(byte_stream, info.data, len, &bytes_read); + gst_buffer_unmap(*buf, &info); + + gst_buffer_set_size(*buf, bytes_read); + + if (FAILED(hr)) + { + return GST_FLOW_ERROR; + } + GST_BUFFER_OFFSET(*buf) = ofs; + return GST_FLOW_OK; +} + +static gboolean query_bytestream(GstPad *pad, GstObject *parent, GstQuery *query) +{ + struct media_source *source = gst_pad_get_element_private(pad); + GstFormat format; + QWORD bytestream_len; + + TRACE("GStreamer queries source %p for %s\n", source, GST_QUERY_TYPE_NAME(query)); + + if (FAILED(IMFByteStream_GetLength(source->byte_stream, &bytestream_len))) + return FALSE; + + switch (GST_QUERY_TYPE(query)) + { + case GST_QUERY_DURATION: + { + gst_query_parse_duration (query, &format, NULL); + if (format == GST_FORMAT_PERCENT) { + gst_query_set_duration (query, GST_FORMAT_PERCENT, GST_FORMAT_PERCENT_MAX); + return TRUE; + } + else if (format == GST_FORMAT_BYTES) + { + QWORD length; + IMFByteStream_GetLength(source->byte_stream, &length); + gst_query_set_duration (query, GST_FORMAT_BYTES, length); + return TRUE; + } + return FALSE; + } + case GST_QUERY_SEEKING: + { + gst_query_parse_seeking (query, &format, NULL, NULL, NULL); + if (format != GST_FORMAT_BYTES) + { + WARN("Cannot seek using format "%s".\n", gst_format_get_name(format)); + return FALSE; + } + gst_query_set_seeking(query, GST_FORMAT_BYTES, 1, 0, bytestream_len); + return TRUE; + } + case GST_QUERY_SCHEDULING: + { + gst_query_set_scheduling(query, GST_SCHEDULING_FLAG_SEEKABLE, 1, -1, 0); + gst_query_add_scheduling_mode(query, GST_PAD_MODE_PULL); + return TRUE; + } + case GST_QUERY_CAPS: + { + GstStaticCaps any = GST_STATIC_CAPS_ANY; + GstCaps *caps, *filter; + + caps = gst_static_caps_get(&any); + gst_query_parse_caps(query, &filter); + + if (filter) { + GstCaps* filtered; + filtered = gst_caps_intersect_full( + filter, caps, GST_CAPS_INTERSECT_FIRST); + gst_caps_unref(caps); + caps = filtered; + } + gst_query_set_caps_result(query, caps); + gst_caps_unref(caps); + return TRUE; + } + default: + { + WARN("Unhandled query type %s\n", GST_QUERY_TYPE_NAME(query)); + return FALSE; + } + } +} + +static gboolean activate_bytestream_pad_mode(GstPad *pad, GstObject *parent, GstPadMode mode, gboolean activate) +{ + struct media_source *source = gst_pad_get_element_private(pad); + + TRACE("%s source pad for mediasource %p in %s mode.\n", + activate ? "Activating" : "Deactivating", source, gst_pad_mode_get_name(mode)); + + switch (mode) { + case GST_PAD_MODE_PULL: + return TRUE; + default: + return FALSE; + } + return FALSE; +} + +static gboolean process_bytestream_pad_event(GstPad *pad, GstObject *parent, GstEvent *event) +{ + struct media_source *source = gst_pad_get_element_private(pad); + + TRACE("source %p, type "%s".\n", source, GST_EVENT_TYPE_NAME(event)); + + switch (event->type) { + /* the seek event should fail in pull mode */ + case GST_EVENT_SEEK: + return FALSE; + default: + WARN("Ignoring "%s" event.\n", GST_EVENT_TYPE_NAME(event)); + case GST_EVENT_TAG: + case GST_EVENT_QOS: + case GST_EVENT_RECONFIGURE: + return gst_pad_event_default(pad, parent, event); + } + return TRUE; +} + static HRESULT WINAPI media_source_QueryInterface(IMFMediaSource *iface, REFIID riid, void **out) { struct media_source *source = impl_from_IMFMediaSource(iface); @@ -211,8 +367,12 @@ static HRESULT WINAPI media_source_Shutdown(IMFMediaSource *iface)
source->state = SOURCE_SHUTDOWN;
+ if (source->my_src) + gst_object_unref(GST_OBJECT(source->my_src)); if (source->event_queue) IMFMediaEventQueue_Shutdown(source->event_queue); + if (source->byte_stream) + IMFByteStream_Release(source->byte_stream);
return S_OK; } @@ -236,19 +396,34 @@ static const IMFMediaSourceVtbl IMFMediaSource_vtbl =
static HRESULT media_source_constructor(IMFByteStream *bytestream, struct media_source **out_media_source) { + GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE( + "mf_src", + GST_PAD_SRC, + GST_PAD_ALWAYS, + GST_STATIC_CAPS_ANY); + struct media_source *object = heap_alloc_zero(sizeof(*object)); HRESULT hr;
if (!object) return E_OUTOFMEMORY;
+ object->IMFMediaSource_iface.lpVtbl = &IMFMediaSource_vtbl; + object->ref = 1; + object->byte_stream = bytestream; + IMFByteStream_AddRef(bytestream); + if (FAILED(hr = MFCreateEventQueue(&object->event_queue))) goto fail;
- object->state = SOURCE_STOPPED; + object->my_src = gst_pad_new_from_static_template(&src_template, "mf-src"); + gst_pad_set_element_private(object->my_src, object); + gst_pad_set_getrange_function(object->my_src, pull_from_bytestream_wrapper); + gst_pad_set_query_function(object->my_src, query_bytestream_wrapper); + gst_pad_set_activatemode_function(object->my_src, activate_bytestream_pad_mode_wrapper); + gst_pad_set_event_function(object->my_src, process_bytestream_pad_event_wrapper);
- object->IMFMediaSource_iface.lpVtbl = &IMFMediaSource_vtbl; - object->ref = 1; + object->state = SOURCE_STOPPED;
*out_media_source = object; return S_OK; @@ -717,3 +892,41 @@ HRESULT winegstreamer_stream_handler_create(REFIID riid, void **obj)
return hr; } + +/* helper for callback forwarding */ +void perform_cb_media_source(struct cb_data *cbdata) +{ + switch(cbdata->type) + { + case PULL_FROM_BYTESTREAM: + { + struct getrange_data *data = &cbdata->u.getrange_data; + cbdata->u.getrange_data.ret = pull_from_bytestream(data->pad, data->parent, + data->ofs, data->len, data->buf); + break; + } + case QUERY_BYTESTREAM: + { + struct query_function_data *data = &cbdata->u.query_function_data; + cbdata->u.query_function_data.ret = query_bytestream(data->pad, data->parent, data->query); + break; + } + case ACTIVATE_BYTESTREAM_PAD_MODE: + { + struct activate_mode_data *data = &cbdata->u.activate_mode_data; + cbdata->u.activate_mode_data.ret = activate_bytestream_pad_mode(data->pad, data->parent, data->mode, data->activate); + break; + } + case PROCESS_BYTESTREAM_PAD_EVENT: + { + struct event_src_data *data = &cbdata->u.event_src_data; + cbdata->u.event_src_data.ret = process_bytestream_pad_event(data->pad, data->parent, data->event); + break; + } + default: + { + ERR("Wrong callback forwarder called\n"); + return; + } + } +}
Signed-off-by: Derek Lesho dlesho@codeweavers.com --- dlls/winegstreamer/gst_cbs.c | 45 ++++ dlls/winegstreamer/gst_cbs.h | 8 + dlls/winegstreamer/media_source.c | 416 +++++++++++++++++++++++++++++- 3 files changed, 468 insertions(+), 1 deletion(-)
diff --git a/dlls/winegstreamer/gst_cbs.c b/dlls/winegstreamer/gst_cbs.c index 8f48368c96a..4755f5b42f1 100644 --- a/dlls/winegstreamer/gst_cbs.c +++ b/dlls/winegstreamer/gst_cbs.c @@ -359,3 +359,48 @@ gboolean process_bytestream_pad_event_wrapper(GstPad *pad, GstObject *parent, Gs
return cbdata.u.event_src_data.ret; } + +GstBusSyncReply watch_source_bus_wrapper(GstBus *bus, GstMessage *message, gpointer user) +{ + struct cb_data cbdata = { WATCH_SOURCE_BUS }; + + cbdata.u.watch_bus_data.bus = bus; + cbdata.u.watch_bus_data.msg = message; + cbdata.u.watch_bus_data.user = user; + + call_cb(&cbdata); + + return cbdata.u.watch_bus_data.ret; +} + +void source_stream_added_wrapper(GstElement *bin, GstPad *pad, gpointer user) +{ + struct cb_data cbdata = { SOURCE_STREAM_ADDED }; + + cbdata.u.pad_added_data.element = bin; + cbdata.u.pad_added_data.pad = pad; + cbdata.u.pad_added_data.user = user; + + call_cb(&cbdata); +} + +void source_stream_removed_wrapper(GstElement *element, GstPad *pad, gpointer user) +{ + struct cb_data cbdata = { SOURCE_STREAM_REMOVED }; + + cbdata.u.pad_removed_data.element = element; + cbdata.u.pad_removed_data.pad = pad; + cbdata.u.pad_removed_data.user = user; + + call_cb(&cbdata); +} + +void source_all_streams_wrapper(GstElement *element, gpointer user) +{ + struct cb_data cbdata = { SOURCE_ALL_STREAMS }; + + cbdata.u.no_more_pads_data.element = element; + cbdata.u.no_more_pads_data.user = user; + + call_cb(&cbdata); +} diff --git a/dlls/winegstreamer/gst_cbs.h b/dlls/winegstreamer/gst_cbs.h index 10e999feea7..d87cc8c21e9 100644 --- a/dlls/winegstreamer/gst_cbs.h +++ b/dlls/winegstreamer/gst_cbs.h @@ -48,6 +48,10 @@ enum CB_TYPE { QUERY_BYTESTREAM, ACTIVATE_BYTESTREAM_PAD_MODE, PROCESS_BYTESTREAM_PAD_EVENT, + WATCH_SOURCE_BUS, + SOURCE_STREAM_ADDED, + SOURCE_STREAM_REMOVED, + SOURCE_ALL_STREAMS, MEDIA_SOURCE_MAX, };
@@ -164,5 +168,9 @@ GstFlowReturn pull_from_bytestream_wrapper(GstPad *pad, GstObject *parent, guint gboolean query_bytestream_wrapper(GstPad *pad, GstObject *parent, GstQuery *query) DECLSPEC_HIDDEN; gboolean activate_bytestream_pad_mode_wrapper(GstPad *pad, GstObject *parent, GstPadMode mode, gboolean activate) DECLSPEC_HIDDEN; gboolean process_bytestream_pad_event_wrapper(GstPad *pad, GstObject *parent, GstEvent *event) DECLSPEC_HIDDEN; +GstBusSyncReply watch_source_bus_wrapper(GstBus *bus, GstMessage *message, gpointer user) DECLSPEC_HIDDEN; +void source_stream_added_wrapper(GstElement *bin, GstPad *pad, gpointer user) DECLSPEC_HIDDEN; +void source_stream_removed_wrapper(GstElement *element, GstPad *pad, gpointer user) DECLSPEC_HIDDEN; +void source_all_streams_wrapper(GstElement *element, gpointer user) DECLSPEC_HIDDEN;
#endif diff --git a/dlls/winegstreamer/media_source.c b/dlls/winegstreamer/media_source.c index 6b3bd4a7869..29af2b72def 100644 --- a/dlls/winegstreamer/media_source.c +++ b/dlls/winegstreamer/media_source.c @@ -24,6 +24,7 @@ #include "gst_private.h" #include "gst_cbs.h"
+#include <assert.h> #include <stdarg.h>
#define COBJMACROS @@ -40,21 +41,48 @@
WINE_DEFAULT_DEBUG_CHANNEL(mfplat);
+struct media_stream +{ + IMFMediaStream IMFMediaStream_iface; + LONG ref; + struct media_source *parent_source; + IMFMediaEventQueue *event_queue; + GstElement *appsink; + GstPad *their_src, *my_sink; + enum + { + STREAM_STUB, + STREAM_INACTIVE, + STREAM_SHUTDOWN, + } state; +}; + struct media_source { IMFMediaSource IMFMediaSource_iface; LONG ref; IMFMediaEventQueue *event_queue; IMFByteStream *byte_stream; - GstPad *my_src; + struct media_stream **streams; + ULONG stream_count; + GstBus *bus; + GstElement *container; + GstElement *decodebin; + GstPad *my_src, *their_sink; enum { SOURCE_OPENING, SOURCE_STOPPED, SOURCE_SHUTDOWN, } state; + HANDLE all_streams_event; };
+static inline struct media_stream *impl_from_IMFMediaStream(IMFMediaStream *iface) +{ + return CONTAINING_RECORD(iface, struct media_stream, IMFMediaStream_iface); +} + static inline struct media_source *impl_from_IMFMediaSource(IMFMediaSource *iface) { return CONTAINING_RECORD(iface, struct media_source, IMFMediaSource_iface); @@ -208,6 +236,243 @@ static gboolean process_bytestream_pad_event(GstPad *pad, GstObject *parent, Gst return TRUE; }
+GstBusSyncReply watch_source_bus(GstBus *bus, GstMessage *message, gpointer user) +{ + struct media_source *source = (struct media_source *) user; + gchar *dbg_info = NULL; + GError *err = NULL; + + TRACE("source %p message type %s\n", source, GST_MESSAGE_TYPE_NAME(message)); + + switch (message->type) + { + case GST_MESSAGE_ERROR: + gst_message_parse_error(message, &err, &dbg_info); + ERR("%s: %s\n", GST_OBJECT_NAME(message->src), err->message); + ERR("%s\n", dbg_info); + g_error_free(err); + g_free(dbg_info); + break; + case GST_MESSAGE_WARNING: + gst_message_parse_warning(message, &err, &dbg_info); + WARN("%s: %s\n", GST_OBJECT_NAME(message->src), err->message); + WARN("%s\n", dbg_info); + g_error_free(err); + g_free(dbg_info); + break; + default: + break; + } + + return GST_BUS_PASS; +} + +static HRESULT WINAPI media_stream_QueryInterface(IMFMediaStream *iface, REFIID riid, void **out) +{ + struct media_stream *stream = impl_from_IMFMediaStream(iface); + + TRACE("(%p)->(%s %p)\n", stream, debugstr_guid(riid), out); + + if (IsEqualIID(riid, &IID_IMFMediaStream) || + IsEqualIID(riid, &IID_IMFMediaEventGenerator) || + IsEqualIID(riid, &IID_IUnknown)) + { + *out = &stream->IMFMediaStream_iface; + } + else + { + FIXME("(%s, %p)\n", debugstr_guid(riid), out); + *out = NULL; + return E_NOINTERFACE; + } + + IUnknown_AddRef((IUnknown*)*out); + return S_OK; +} + +static ULONG WINAPI media_stream_AddRef(IMFMediaStream *iface) +{ + struct media_stream *stream = impl_from_IMFMediaStream(iface); + ULONG ref = InterlockedIncrement(&stream->ref); + + TRACE("(%p) ref=%u\n", stream, ref); + + return ref; +} + +static ULONG WINAPI media_stream_Release(IMFMediaStream *iface) +{ + struct media_stream *stream = impl_from_IMFMediaStream(iface); + + ULONG ref = InterlockedDecrement(&stream->ref); + + TRACE("(%p) ref=%u\n", stream, ref); + + if (!ref) + { + if (stream->my_sink) + gst_object_unref(GST_OBJECT(stream->my_sink)); + if (stream->event_queue) + IMFMediaEventQueue_Release(stream->event_queue); + if (stream->parent_source) + IMFMediaSource_Release(&stream->parent_source->IMFMediaSource_iface); + + heap_free(stream); + } + + return ref; +} + +static HRESULT WINAPI media_stream_GetEvent(IMFMediaStream *iface, DWORD flags, IMFMediaEvent **event) +{ + struct media_stream *stream = impl_from_IMFMediaStream(iface); + + TRACE("(%p)->(%#x, %p)\n", stream, flags, event); + + if (stream->state == STREAM_SHUTDOWN) + return MF_E_SHUTDOWN; + + return IMFMediaEventQueue_GetEvent(stream->event_queue, flags, event); +} + +static HRESULT WINAPI media_stream_BeginGetEvent(IMFMediaStream *iface, IMFAsyncCallback *callback, IUnknown *state) +{ + struct media_stream *stream = impl_from_IMFMediaStream(iface); + + TRACE("(%p)->(%p, %p)\n", stream, callback, state); + + if (stream->state == STREAM_SHUTDOWN) + return MF_E_SHUTDOWN; + + return IMFMediaEventQueue_BeginGetEvent(stream->event_queue, callback, state); +} + +static HRESULT WINAPI media_stream_EndGetEvent(IMFMediaStream *iface, IMFAsyncResult *result, IMFMediaEvent **event) +{ + struct media_stream *stream = impl_from_IMFMediaStream(iface); + + TRACE("(%p)->(%p, %p)\n", stream, result, event); + + if (stream->state == STREAM_SHUTDOWN) + return MF_E_SHUTDOWN; + + return IMFMediaEventQueue_EndGetEvent(stream->event_queue, result, event); +} + +static HRESULT WINAPI media_stream_QueueEvent(IMFMediaStream *iface, MediaEventType event_type, REFGUID ext_type, + HRESULT hr, const PROPVARIANT *value) +{ + struct media_stream *stream = impl_from_IMFMediaStream(iface); + + TRACE("(%p)->(%d, %s, %#x, %p)\n", stream, event_type, debugstr_guid(ext_type), hr, value); + + if (stream->state == STREAM_SHUTDOWN) + return MF_E_SHUTDOWN; + + return IMFMediaEventQueue_QueueEventParamVar(stream->event_queue, event_type, ext_type, hr, value); +} + +static HRESULT WINAPI media_stream_GetMediaSource(IMFMediaStream *iface, IMFMediaSource **source) +{ + struct media_stream *stream = impl_from_IMFMediaStream(iface); + + FIXME("stub (%p)->(%p)\n", stream, source); + + if (stream->state == STREAM_SHUTDOWN) + return MF_E_SHUTDOWN; + + return E_NOTIMPL; +} + +static HRESULT WINAPI media_stream_GetStreamDescriptor(IMFMediaStream* iface, IMFStreamDescriptor **descriptor) +{ + struct media_stream *stream = impl_from_IMFMediaStream(iface); + + TRACE("(%p)->(%p)\n", stream, descriptor); + + if (stream->state == STREAM_SHUTDOWN) + return MF_E_SHUTDOWN; + + return E_NOTIMPL; +} + +static HRESULT WINAPI media_stream_RequestSample(IMFMediaStream *iface, IUnknown *token) +{ + struct media_stream *stream = impl_from_IMFMediaStream(iface); + + TRACE("(%p)->(%p)\n", iface, token); + + if (stream->state == STREAM_SHUTDOWN) + return MF_E_SHUTDOWN; + + return E_NOTIMPL; +} + +static const IMFMediaStreamVtbl media_stream_vtbl = +{ + media_stream_QueryInterface, + media_stream_AddRef, + media_stream_Release, + media_stream_GetEvent, + media_stream_BeginGetEvent, + media_stream_EndGetEvent, + media_stream_QueueEvent, + media_stream_GetMediaSource, + media_stream_GetStreamDescriptor, + media_stream_RequestSample +}; + +/* creates a stub stream */ +static HRESULT new_media_stream(struct media_source *source, GstPad *pad, DWORD stream_id, struct media_stream **out_stream) +{ + struct media_stream *object = heap_alloc_zero(sizeof(*object)); + HRESULT hr; + + TRACE("(%p %p)->(%p)\n", source, pad, out_stream); + + object->IMFMediaStream_iface.lpVtbl = &media_stream_vtbl; + object->ref = 1; + + IMFMediaSource_AddRef(&source->IMFMediaSource_iface); + object->parent_source = source; + object->their_src = pad; + + object->state = STREAM_STUB; + + if (FAILED(hr = MFCreateEventQueue(&object->event_queue))) + goto fail; + + if (!(object->appsink = gst_element_factory_make("appsink", NULL))) + { + hr = E_OUTOFMEMORY; + goto fail; + } + gst_bin_add(GST_BIN(object->parent_source->container), object->appsink); + + g_object_set(object->appsink, "emit-signals", TRUE, NULL); + g_object_set(object->appsink, "sync", FALSE, NULL); + g_object_set(object->appsink, "max-buffers", 5, NULL); + g_object_set(object->appsink, "wait-on-eos", FALSE, NULL); + + object->my_sink = gst_element_get_static_pad(object->appsink, "sink"); + gst_pad_set_element_private(object->my_sink, object); + + gst_pad_link(object->their_src, object->my_sink); + + gst_element_sync_state_with_parent(object->appsink); + + TRACE("->(%p)\n", object); + *out_stream = object; + + return S_OK; + + fail: + WARN("Failed to construct media stream, hr %#x.\n", hr); + + IMFMediaStream_Release(&object->IMFMediaStream_iface); + return hr; +} + static HRESULT WINAPI media_source_QueryInterface(IMFMediaSource *iface, REFIID riid, void **out) { struct media_source *source = impl_from_IMFMediaSource(iface); @@ -367,13 +632,34 @@ static HRESULT WINAPI media_source_Shutdown(IMFMediaSource *iface)
source->state = SOURCE_SHUTDOWN;
+ if (source->container) + { + gst_element_set_state(source->container, GST_STATE_NULL); + gst_object_unref(GST_OBJECT(source->container)); + } + if (source->my_src) gst_object_unref(GST_OBJECT(source->my_src)); + if (source->their_sink) + gst_object_unref(GST_OBJECT(source->their_sink)); + if (source->event_queue) IMFMediaEventQueue_Shutdown(source->event_queue); if (source->byte_stream) IMFByteStream_Release(source->byte_stream);
+ for (unsigned int i = 0; i < source->stream_count; i++) + { + source->streams[i]->state = STREAM_SHUTDOWN; + IMFMediaStream_Release(&source->streams[i]->IMFMediaStream_iface); + } + + if (source->stream_count) + heap_free(source->streams); + + if (source->all_streams_event) + CloseHandle(source->all_streams_event); + return S_OK; }
@@ -394,6 +680,63 @@ static const IMFMediaSourceVtbl IMFMediaSource_vtbl = media_source_Shutdown, };
+static void source_stream_added(GstElement *element, GstPad *pad, gpointer user) +{ + struct media_source *source = (struct media_source *) user; + struct media_stream **new_stream_array; + struct media_stream *stream; + gchar *g_stream_id; + DWORD stream_id; + + if (gst_pad_get_direction(pad) != GST_PAD_SRC) + return; + + /* Most/All seen randomly calculate the initial part of the stream id, the last three digits are the only deterministic part */ + g_stream_id = GST_PAD_NAME(pad); + sscanf(strstr(g_stream_id, "_"), "_%u", &stream_id); + + TRACE("stream-id: %u\n", stream_id); + + if (FAILED(new_media_stream(source, pad, stream_id, &stream))) + { + goto leave; + } + + if (!(new_stream_array = heap_realloc(source->streams, (source->stream_count + 1) * (sizeof(*new_stream_array))))) + { + ERR("Failed to add stream to source\n"); + goto leave; + } + + source->streams = new_stream_array; + source->streams[source->stream_count++] = stream; + + leave: + return; +} + +static void source_stream_removed(GstElement *element, GstPad *pad, gpointer user) +{ + struct media_source *source = (struct media_source *)user; + + for (unsigned int i = 0; i < source->stream_count; i++) + { + struct media_stream *stream = source->streams[i]; + if (stream->their_src != pad) + continue; + stream->their_src = NULL; + if (stream->state != STREAM_INACTIVE) + stream->state = STREAM_INACTIVE; + } +} + +static void source_all_streams(GstElement *element, gpointer user) +{ + struct media_source *source = (struct media_source *) user; + + SetEvent(source->all_streams_event); +} + static HRESULT media_source_constructor(IMFByteStream *bytestream, struct media_source **out_media_source) { GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE( @@ -404,6 +747,7 @@ static HRESULT media_source_constructor(IMFByteStream *bytestream, struct media_
struct media_source *object = heap_alloc_zero(sizeof(*object)); HRESULT hr; + int ret;
if (!object) return E_OUTOFMEMORY; @@ -412,10 +756,16 @@ static HRESULT media_source_constructor(IMFByteStream *bytestream, struct media_ object->ref = 1; object->byte_stream = bytestream; IMFByteStream_AddRef(bytestream); + object->all_streams_event = CreateEventA(NULL, FALSE, FALSE, NULL);
if (FAILED(hr = MFCreateEventQueue(&object->event_queue))) goto fail;
+ object->container = gst_bin_new(NULL); + object->bus = gst_bus_new(); + gst_bus_set_sync_handler(object->bus, watch_source_bus_wrapper, object, NULL); + gst_element_set_bus(object->container, object->bus); + object->my_src = gst_pad_new_from_static_template(&src_template, "mf-src"); gst_pad_set_element_private(object->my_src, object); gst_pad_set_getrange_function(object->my_src, pull_from_bytestream_wrapper); @@ -423,6 +773,46 @@ static HRESULT media_source_constructor(IMFByteStream *bytestream, struct media_ gst_pad_set_activatemode_function(object->my_src, activate_bytestream_pad_mode_wrapper); gst_pad_set_event_function(object->my_src, process_bytestream_pad_event_wrapper);
+ object->decodebin = gst_element_factory_make("decodebin", NULL); + if (!(object->decodebin)) + { + WARN("Failed to create decodebin for source\n"); + hr = E_OUTOFMEMORY; + goto fail; + } + /* the appsinks determine the maximum amount of buffering instead, this means that if one stream isn't read, a leak will happen, like on windows */ + g_object_set(object->decodebin, "max-size-buffers", 0, NULL); + g_object_set(object->decodebin, "max-size-time", G_GUINT64_CONSTANT(0), NULL); + g_object_set(object->decodebin, "max-size-bytes", 0, NULL); + + gst_bin_add(GST_BIN(object->container), object->decodebin); + + g_signal_connect(object->decodebin, "pad-added", G_CALLBACK(source_stream_added_wrapper), object); + g_signal_connect(object->decodebin, "pad-removed", G_CALLBACK(source_stream_removed_wrapper), object); + g_signal_connect(object->decodebin, "no-more-pads", G_CALLBACK(source_all_streams_wrapper), object); + + object->their_sink = gst_element_get_static_pad(object->decodebin, "sink"); + + if ((ret = gst_pad_link(object->my_src, object->their_sink)) < 0) + { + WARN("Failed to link our bytestream pad to the demuxer input\n"); + hr = E_OUTOFMEMORY; + goto fail; + } + + object->state = SOURCE_OPENING; + + gst_element_set_state(object->container, GST_STATE_PAUSED); + ret = gst_element_get_state(object->container, NULL, NULL, -1); + if (ret == GST_STATE_CHANGE_FAILURE) + { + ERR("Failed to play source.\n"); + hr = E_OUTOFMEMORY; + goto fail; + } + + WaitForSingleObject(object->all_streams_event, INFINITE); + object->state = SOURCE_STOPPED;
*out_media_source = object; @@ -923,6 +1313,30 @@ void perform_cb_media_source(struct cb_data *cbdata) cbdata->u.event_src_data.ret = process_bytestream_pad_event(data->pad, data->parent, data->event); break; } + case WATCH_SOURCE_BUS: + { + struct watch_bus_data *data = &cbdata->u.watch_bus_data; + cbdata->u.watch_bus_data.ret = watch_source_bus(data->bus, data->msg, data->user); + break; + } + case SOURCE_STREAM_ADDED: + { + struct pad_added_data *data = &cbdata->u.pad_added_data; + source_stream_added(data->element, data->pad, data->user); + break; + } + case SOURCE_STREAM_REMOVED: + { + struct pad_removed_data *data = &cbdata->u.pad_removed_data; + source_stream_removed(data->element, data->pad, data->user); + break; + } + case SOURCE_ALL_STREAMS: + { + struct no_more_pads_data *data = &cbdata->u.no_more_pads_data; + source_all_streams(data->element, data->user); + break; + } default: { ERR("Wrong callback forwarder called\n");
On 9/8/20 10:47 AM, Derek Lesho wrote:
Signed-off-by: Derek Lesho dlesho@codeweavers.com
dlls/winegstreamer/gst_cbs.c | 45 ++++ dlls/winegstreamer/gst_cbs.h | 8 + dlls/winegstreamer/media_source.c | 416 +++++++++++++++++++++++++++++- 3 files changed, 468 insertions(+), 1 deletion(-)
diff --git a/dlls/winegstreamer/gst_cbs.c b/dlls/winegstreamer/gst_cbs.c index 8f48368c96a..4755f5b42f1 100644 --- a/dlls/winegstreamer/gst_cbs.c +++ b/dlls/winegstreamer/gst_cbs.c @@ -359,3 +359,48 @@ gboolean process_bytestream_pad_event_wrapper(GstPad *pad, GstObject *parent, Gs
return cbdata.u.event_src_data.ret;
}
+GstBusSyncReply watch_source_bus_wrapper(GstBus *bus, GstMessage *message, gpointer user) +{
- struct cb_data cbdata = { WATCH_SOURCE_BUS };
- cbdata.u.watch_bus_data.bus = bus;
- cbdata.u.watch_bus_data.msg = message;
- cbdata.u.watch_bus_data.user = user;
- call_cb(&cbdata);
- return cbdata.u.watch_bus_data.ret;
+}
+void source_stream_added_wrapper(GstElement *bin, GstPad *pad, gpointer user) +{
- struct cb_data cbdata = { SOURCE_STREAM_ADDED };
- cbdata.u.pad_added_data.element = bin;
- cbdata.u.pad_added_data.pad = pad;
- cbdata.u.pad_added_data.user = user;
- call_cb(&cbdata);
+}
The function naming is a bit inconsistent. I'd recommend standardizing on the "source_stream_added" style (i.e. object first, then verb), on the principle of narrowing scope. Same thing for 1/3, really.
(In general I wouldn't be too attached to the gstreamer function names; they're not always the best.)
+void source_stream_removed_wrapper(GstElement *element, GstPad *pad, gpointer user) +{
- struct cb_data cbdata = { SOURCE_STREAM_REMOVED };
- cbdata.u.pad_removed_data.element = element;
- cbdata.u.pad_removed_data.pad = pad;
- cbdata.u.pad_removed_data.user = user;
- call_cb(&cbdata);
+}
+void source_all_streams_wrapper(GstElement *element, gpointer user) +{
- struct cb_data cbdata = { SOURCE_ALL_STREAMS };
- cbdata.u.no_more_pads_data.element = element;
- cbdata.u.no_more_pads_data.user = user;
- call_cb(&cbdata);
+}
"all_streams" is a bit confusing at first glance; any reason not to use the GStreamer name "no_more_pads"?
diff --git a/dlls/winegstreamer/gst_cbs.h b/dlls/winegstreamer/gst_cbs.h index 10e999feea7..d87cc8c21e9 100644 --- a/dlls/winegstreamer/gst_cbs.h +++ b/dlls/winegstreamer/gst_cbs.h @@ -48,6 +48,10 @@ enum CB_TYPE { QUERY_BYTESTREAM, ACTIVATE_BYTESTREAM_PAD_MODE, PROCESS_BYTESTREAM_PAD_EVENT,
- WATCH_SOURCE_BUS,
- SOURCE_STREAM_ADDED,
- SOURCE_STREAM_REMOVED,
- SOURCE_ALL_STREAMS, MEDIA_SOURCE_MAX,
};
@@ -164,5 +168,9 @@ GstFlowReturn pull_from_bytestream_wrapper(GstPad *pad, GstObject *parent, guint gboolean query_bytestream_wrapper(GstPad *pad, GstObject *parent, GstQuery *query) DECLSPEC_HIDDEN; gboolean activate_bytestream_pad_mode_wrapper(GstPad *pad, GstObject *parent, GstPadMode mode, gboolean activate) DECLSPEC_HIDDEN; gboolean process_bytestream_pad_event_wrapper(GstPad *pad, GstObject *parent, GstEvent *event) DECLSPEC_HIDDEN; +GstBusSyncReply watch_source_bus_wrapper(GstBus *bus, GstMessage *message, gpointer user) DECLSPEC_HIDDEN; +void source_stream_added_wrapper(GstElement *bin, GstPad *pad, gpointer user) DECLSPEC_HIDDEN; +void source_stream_removed_wrapper(GstElement *element, GstPad *pad, gpointer user) DECLSPEC_HIDDEN; +void source_all_streams_wrapper(GstElement *element, gpointer user) DECLSPEC_HIDDEN;
#endif diff --git a/dlls/winegstreamer/media_source.c b/dlls/winegstreamer/media_source.c index 6b3bd4a7869..29af2b72def 100644 --- a/dlls/winegstreamer/media_source.c +++ b/dlls/winegstreamer/media_source.c @@ -24,6 +24,7 @@ #include "gst_private.h" #include "gst_cbs.h"
+#include <assert.h> #include <stdarg.h>
#define COBJMACROS @@ -40,21 +41,48 @@
WINE_DEFAULT_DEBUG_CHANNEL(mfplat);
+struct media_stream +{
- IMFMediaStream IMFMediaStream_iface;
- LONG ref;
- struct media_source *parent_source;
- IMFMediaEventQueue *event_queue;
- GstElement *appsink;
- GstPad *their_src, *my_sink;
- enum
- {
STREAM_STUB,
STREAM_INACTIVE,
These two values are set, but not (really) used in this patch.
STREAM_SHUTDOWN,
- } state;
+};
struct media_source { IMFMediaSource IMFMediaSource_iface; LONG ref; IMFMediaEventQueue *event_queue; IMFByteStream *byte_stream;
- GstPad *my_src;
- struct media_stream **streams;
- ULONG stream_count;
- GstBus *bus;
- GstElement *container;
- GstElement *decodebin;
- GstPad *my_src, *their_sink; enum { SOURCE_OPENING, SOURCE_STOPPED, SOURCE_SHUTDOWN, } state;
- HANDLE all_streams_event;
};
+static inline struct media_stream *impl_from_IMFMediaStream(IMFMediaStream *iface) +{
- return CONTAINING_RECORD(iface, struct media_stream, IMFMediaStream_iface);
+}
static inline struct media_source *impl_from_IMFMediaSource(IMFMediaSource *iface) { return CONTAINING_RECORD(iface, struct media_source, IMFMediaSource_iface); @@ -208,6 +236,243 @@ static gboolean process_bytestream_pad_event(GstPad *pad, GstObject *parent, Gst return TRUE; }
+GstBusSyncReply watch_source_bus(GstBus *bus, GstMessage *message, gpointer user) +{
- struct media_source *source = (struct media_source *) user;
- gchar *dbg_info = NULL;
- GError *err = NULL;
- TRACE("source %p message type %s\n", source, GST_MESSAGE_TYPE_NAME(message));
- switch (message->type)
- {
case GST_MESSAGE_ERROR:
gst_message_parse_error(message, &err, &dbg_info);
ERR("%s: %s\n", GST_OBJECT_NAME(message->src), err->message);
ERR("%s\n", dbg_info);
g_error_free(err);
g_free(dbg_info);
break;
case GST_MESSAGE_WARNING:
gst_message_parse_warning(message, &err, &dbg_info);
WARN("%s: %s\n", GST_OBJECT_NAME(message->src), err->message);
WARN("%s\n", dbg_info);
g_error_free(err);
g_free(dbg_info);
break;
default:
break;
- }
- return GST_BUS_PASS;
+}
There's no async queue used in this patch, so returning GST_BUS_PASS will effectively leak messages.
+static HRESULT WINAPI media_stream_QueryInterface(IMFMediaStream *iface, REFIID riid, void **out) +{
- struct media_stream *stream = impl_from_IMFMediaStream(iface);
- TRACE("(%p)->(%s %p)\n", stream, debugstr_guid(riid), out);
- if (IsEqualIID(riid, &IID_IMFMediaStream) ||
IsEqualIID(riid, &IID_IMFMediaEventGenerator) ||
IsEqualIID(riid, &IID_IUnknown))
- {
*out = &stream->IMFMediaStream_iface;
- }
- else
- {
FIXME("(%s, %p)\n", debugstr_guid(riid), out);
*out = NULL;
return E_NOINTERFACE;
- }
- IUnknown_AddRef((IUnknown*)*out);
- return S_OK;
+}
+static ULONG WINAPI media_stream_AddRef(IMFMediaStream *iface) +{
- struct media_stream *stream = impl_from_IMFMediaStream(iface);
- ULONG ref = InterlockedIncrement(&stream->ref);
- TRACE("(%p) ref=%u\n", stream, ref);
- return ref;
+}
+static ULONG WINAPI media_stream_Release(IMFMediaStream *iface) +{
- struct media_stream *stream = impl_from_IMFMediaStream(iface);
- ULONG ref = InterlockedDecrement(&stream->ref);
- TRACE("(%p) ref=%u\n", stream, ref);
- if (!ref)
- {
if (stream->my_sink)
gst_object_unref(GST_OBJECT(stream->my_sink));
if (stream->event_queue)
IMFMediaEventQueue_Release(stream->event_queue);
if (stream->parent_source)
IMFMediaSource_Release(&stream->parent_source->IMFMediaSource_iface);
heap_free(stream);
- }
- return ref;
+}
+static HRESULT WINAPI media_stream_GetEvent(IMFMediaStream *iface, DWORD flags, IMFMediaEvent **event) +{
- struct media_stream *stream = impl_from_IMFMediaStream(iface);
- TRACE("(%p)->(%#x, %p)\n", stream, flags, event);
- if (stream->state == STREAM_SHUTDOWN)
return MF_E_SHUTDOWN;
- return IMFMediaEventQueue_GetEvent(stream->event_queue, flags, event);
+}
+static HRESULT WINAPI media_stream_BeginGetEvent(IMFMediaStream *iface, IMFAsyncCallback *callback, IUnknown *state) +{
- struct media_stream *stream = impl_from_IMFMediaStream(iface);
- TRACE("(%p)->(%p, %p)\n", stream, callback, state);
- if (stream->state == STREAM_SHUTDOWN)
return MF_E_SHUTDOWN;
- return IMFMediaEventQueue_BeginGetEvent(stream->event_queue, callback, state);
+}
+static HRESULT WINAPI media_stream_EndGetEvent(IMFMediaStream *iface, IMFAsyncResult *result, IMFMediaEvent **event) +{
- struct media_stream *stream = impl_from_IMFMediaStream(iface);
- TRACE("(%p)->(%p, %p)\n", stream, result, event);
- if (stream->state == STREAM_SHUTDOWN)
return MF_E_SHUTDOWN;
- return IMFMediaEventQueue_EndGetEvent(stream->event_queue, result, event);
+}
+static HRESULT WINAPI media_stream_QueueEvent(IMFMediaStream *iface, MediaEventType event_type, REFGUID ext_type,
HRESULT hr, const PROPVARIANT *value)
+{
- struct media_stream *stream = impl_from_IMFMediaStream(iface);
- TRACE("(%p)->(%d, %s, %#x, %p)\n", stream, event_type, debugstr_guid(ext_type), hr, value);
- if (stream->state == STREAM_SHUTDOWN)
return MF_E_SHUTDOWN;
- return IMFMediaEventQueue_QueueEventParamVar(stream->event_queue, event_type, ext_type, hr, value);
+}
+static HRESULT WINAPI media_stream_GetMediaSource(IMFMediaStream *iface, IMFMediaSource **source) +{
- struct media_stream *stream = impl_from_IMFMediaStream(iface);
- FIXME("stub (%p)->(%p)\n", stream, source);
- if (stream->state == STREAM_SHUTDOWN)
return MF_E_SHUTDOWN;
- return E_NOTIMPL;
+}
+static HRESULT WINAPI media_stream_GetStreamDescriptor(IMFMediaStream* iface, IMFStreamDescriptor **descriptor) +{
- struct media_stream *stream = impl_from_IMFMediaStream(iface);
- TRACE("(%p)->(%p)\n", stream, descriptor);
- if (stream->state == STREAM_SHUTDOWN)
return MF_E_SHUTDOWN;
- return E_NOTIMPL;
+}
+static HRESULT WINAPI media_stream_RequestSample(IMFMediaStream *iface, IUnknown *token) +{
- struct media_stream *stream = impl_from_IMFMediaStream(iface);
- TRACE("(%p)->(%p)\n", iface, token);
- if (stream->state == STREAM_SHUTDOWN)
return MF_E_SHUTDOWN;
- return E_NOTIMPL;
+}
+static const IMFMediaStreamVtbl media_stream_vtbl = +{
- media_stream_QueryInterface,
- media_stream_AddRef,
- media_stream_Release,
- media_stream_GetEvent,
- media_stream_BeginGetEvent,
- media_stream_EndGetEvent,
- media_stream_QueueEvent,
- media_stream_GetMediaSource,
- media_stream_GetStreamDescriptor,
- media_stream_RequestSample
+};
+/* creates a stub stream */ +static HRESULT new_media_stream(struct media_source *source, GstPad *pad, DWORD stream_id, struct media_stream **out_stream)
You don't use "stream_id" in this patch.
+{
- struct media_stream *object = heap_alloc_zero(sizeof(*object));
- HRESULT hr;
- TRACE("(%p %p)->(%p)\n", source, pad, out_stream);
- object->IMFMediaStream_iface.lpVtbl = &media_stream_vtbl;
- object->ref = 1;
- IMFMediaSource_AddRef(&source->IMFMediaSource_iface);
- object->parent_source = source;
- object->their_src = pad;
- object->state = STREAM_STUB;
- if (FAILED(hr = MFCreateEventQueue(&object->event_queue)))
goto fail;
- if (!(object->appsink = gst_element_factory_make("appsink", NULL)))
- {
hr = E_OUTOFMEMORY;
goto fail;
- }
- gst_bin_add(GST_BIN(object->parent_source->container), object->appsink);
- g_object_set(object->appsink, "emit-signals", TRUE, NULL);
I might defer this line until you actually hook up the signals.
- g_object_set(object->appsink, "sync", FALSE, NULL);
- g_object_set(object->appsink, "max-buffers", 5, NULL);
So, just to clarify for my understanding, the reason we want to buffer is because the downstream renderer will request samples basically as soon as presentation time hits (through some weird roundabout process) and we want to minimize the latency there?
- g_object_set(object->appsink, "wait-on-eos", FALSE, NULL);
If I understand correctly, this means that GStreamer will signal EOS potentially before pushing all buffers; is that what we want?
- object->my_sink = gst_element_get_static_pad(object->appsink, "sink");
- gst_pad_set_element_private(object->my_sink, object);
- gst_pad_link(object->their_src, object->my_sink);
- gst_element_sync_state_with_parent(object->appsink);
- TRACE("->(%p)\n", object);
- *out_stream = object;
- return S_OK;
- fail:
Personally I wouldn't bother using gotos in this function, but if you do, it's at least clearer if you put the label at the beginning of the line.
- WARN("Failed to construct media stream, hr %#x.\n", hr);
- IMFMediaStream_Release(&object->IMFMediaStream_iface);
- return hr;
+}
static HRESULT WINAPI media_source_QueryInterface(IMFMediaSource *iface, REFIID riid, void **out) { struct media_source *source = impl_from_IMFMediaSource(iface); @@ -367,13 +632,34 @@ static HRESULT WINAPI media_source_Shutdown(IMFMediaSource *iface)
source->state = SOURCE_SHUTDOWN;
if (source->container)
{
gst_element_set_state(source->container, GST_STATE_NULL);
gst_object_unref(GST_OBJECT(source->container));
}
if (source->my_src) gst_object_unref(GST_OBJECT(source->my_src));
if (source->their_sink)
gst_object_unref(GST_OBJECT(source->their_sink));
if (source->event_queue) IMFMediaEventQueue_Shutdown(source->event_queue); if (source->byte_stream) IMFByteStream_Release(source->byte_stream);
for (unsigned int i = 0; i < source->stream_count; i++)
Misplaced variable declaration.
- {
source->streams[i]->state = STREAM_SHUTDOWN;
IMFMediaStream_Release(&source->streams[i]->IMFMediaStream_iface);
- }
- if (source->stream_count)
heap_free(source->streams);
- if (source->all_streams_event)
CloseHandle(source->all_streams_event);
- return S_OK;
}
@@ -394,6 +680,63 @@ static const IMFMediaSourceVtbl IMFMediaSource_vtbl = media_source_Shutdown, };
+static void source_stream_added(GstElement *element, GstPad *pad, gpointer user) +{
- struct media_source *source = (struct media_source *) user;
- struct media_stream **new_stream_array;
- struct media_stream *stream;
- gchar *g_stream_id;
- DWORD stream_id;
- if (gst_pad_get_direction(pad) != GST_PAD_SRC)
return;
- /* Most/All seen randomly calculate the initial part of the stream id, the last three digits are the only deterministic part */
- g_stream_id = GST_PAD_NAME(pad);
- sscanf(strstr(g_stream_id, "_"), "_%u", &stream_id);
- TRACE("stream-id: %u\n", stream_id);
I don't know what "stream_id" is supposed to be used for, but depending on the pad name seems wrong.
- if (FAILED(new_media_stream(source, pad, stream_id, &stream)))
- {
goto leave;
- }
- if (!(new_stream_array = heap_realloc(source->streams, (source->stream_count + 1) * (sizeof(*new_stream_array)))))
- {
ERR("Failed to add stream to source\n");
goto leave;
- }
- source->streams = new_stream_array;
- source->streams[source->stream_count++] = stream;
- leave:
This label is very superfluous.
- return;
+}
+static void source_stream_removed(GstElement *element, GstPad *pad, gpointer user) +{
- struct media_source *source = (struct media_source *)user;
- for (unsigned int i = 0; i < source->stream_count; i++)
Misplaced variable initializer.
- {
struct media_stream *stream = source->streams[i];
if (stream->their_src != pad)
continue;
stream->their_src = NULL;
if (stream->state != STREAM_INACTIVE)
stream->state = STREAM_INACTIVE;
- }
+}
+static void source_all_streams(GstElement *element, gpointer user) +{
- struct media_source *source = (struct media_source *) user;
- SetEvent(source->all_streams_event);
+}
static HRESULT media_source_constructor(IMFByteStream *bytestream, struct media_source **out_media_source) { GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE( @@ -404,6 +747,7 @@ static HRESULT media_source_constructor(IMFByteStream *bytestream, struct media_
struct media_source *object = heap_alloc_zero(sizeof(*object)); HRESULT hr;
int ret;
if (!object) return E_OUTOFMEMORY;
@@ -412,10 +756,16 @@ static HRESULT media_source_constructor(IMFByteStream *bytestream, struct media_ object->ref = 1; object->byte_stream = bytestream; IMFByteStream_AddRef(bytestream);
object->all_streams_event = CreateEventA(NULL, FALSE, FALSE, NULL);
if (FAILED(hr = MFCreateEventQueue(&object->event_queue))) goto fail;
object->container = gst_bin_new(NULL);
object->bus = gst_bus_new();
gst_bus_set_sync_handler(object->bus, watch_source_bus_wrapper, object, NULL);
gst_element_set_bus(object->container, object->bus);
object->my_src = gst_pad_new_from_static_template(&src_template, "mf-src"); gst_pad_set_element_private(object->my_src, object); gst_pad_set_getrange_function(object->my_src, pull_from_bytestream_wrapper);
@@ -423,6 +773,46 @@ static HRESULT media_source_constructor(IMFByteStream *bytestream, struct media_ gst_pad_set_activatemode_function(object->my_src, activate_bytestream_pad_mode_wrapper); gst_pad_set_event_function(object->my_src, process_bytestream_pad_event_wrapper);
- object->decodebin = gst_element_factory_make("decodebin", NULL);
- if (!(object->decodebin))
- {
WARN("Failed to create decodebin for source\n");
hr = E_OUTOFMEMORY;
goto fail;
- }
- /* the appsinks determine the maximum amount of buffering instead, this means that if one stream isn't read, a leak will happen, like on windows */
Very long line, and this comment is rather confusing. What you probably want to do is mention the application and describe the problem that it would run into if the default buffering settings were used.
g_object_set(object->decodebin, "max-size-buffers", 0, NULL);
g_object_set(object->decodebin, "max-size-time", G_GUINT64_CONSTANT(0), NULL);
g_object_set(object->decodebin, "max-size-bytes", 0, NULL);
gst_bin_add(GST_BIN(object->container), object->decodebin);
g_signal_connect(object->decodebin, "pad-added", G_CALLBACK(source_stream_added_wrapper), object);
g_signal_connect(object->decodebin, "pad-removed", G_CALLBACK(source_stream_removed_wrapper), object);
g_signal_connect(object->decodebin, "no-more-pads", G_CALLBACK(source_all_streams_wrapper), object);
object->their_sink = gst_element_get_static_pad(object->decodebin, "sink");
if ((ret = gst_pad_link(object->my_src, object->their_sink)) < 0)
{
WARN("Failed to link our bytestream pad to the demuxer input\n");
hr = E_OUTOFMEMORY;
goto fail;
}
object->state = SOURCE_OPENING;
gst_element_set_state(object->container, GST_STATE_PAUSED);
ret = gst_element_get_state(object->container, NULL, NULL, -1);
if (ret == GST_STATE_CHANGE_FAILURE)
{
ERR("Failed to play source.\n");
hr = E_OUTOFMEMORY;
goto fail;
}
WaitForSingleObject(object->all_streams_event, INFINITE);
object->state = SOURCE_STOPPED;
*out_media_source = object;
@@ -923,6 +1313,30 @@ void perform_cb_media_source(struct cb_data *cbdata) cbdata->u.event_src_data.ret = process_bytestream_pad_event(data->pad, data->parent, data->event); break; }
- case WATCH_SOURCE_BUS:
{
struct watch_bus_data *data = &cbdata->u.watch_bus_data;
cbdata->u.watch_bus_data.ret = watch_source_bus(data->bus, data->msg, data->user);
break;
}
- case SOURCE_STREAM_ADDED:
{
struct pad_added_data *data = &cbdata->u.pad_added_data;
source_stream_added(data->element, data->pad, data->user);
break;
}
- case SOURCE_STREAM_REMOVED:
{
struct pad_removed_data *data = &cbdata->u.pad_removed_data;
source_stream_removed(data->element, data->pad, data->user);
break;
}
- case SOURCE_ALL_STREAMS:
{
struct no_more_pads_data *data = &cbdata->u.no_more_pads_data;
source_all_streams(data->element, data->user);
break;
default: { ERR("Wrong callback forwarder called\n");}
On 9/9/20 6:28 PM, Zebediah Figura wrote:
On 9/8/20 10:47 AM, Derek Lesho wrote:
Signed-off-by: Derek Lesho dlesho@codeweavers.com
dlls/winegstreamer/gst_cbs.c | 45 ++++ dlls/winegstreamer/gst_cbs.h | 8 + dlls/winegstreamer/media_source.c | 416 +++++++++++++++++++++++++++++- 3 files changed, 468 insertions(+), 1 deletion(-)
diff --git a/dlls/winegstreamer/gst_cbs.c b/dlls/winegstreamer/gst_cbs.c index 8f48368c96a..4755f5b42f1 100644 --- a/dlls/winegstreamer/gst_cbs.c +++ b/dlls/winegstreamer/gst_cbs.c @@ -359,3 +359,48 @@ gboolean process_bytestream_pad_event_wrapper(GstPad *pad, GstObject *parent, Gs
return cbdata.u.event_src_data.ret;
}
+GstBusSyncReply watch_source_bus_wrapper(GstBus *bus, GstMessage *message, gpointer user) +{
- struct cb_data cbdata = { WATCH_SOURCE_BUS };
- cbdata.u.watch_bus_data.bus = bus;
- cbdata.u.watch_bus_data.msg = message;
- cbdata.u.watch_bus_data.user = user;
- call_cb(&cbdata);
- return cbdata.u.watch_bus_data.ret;
+}
+void source_stream_added_wrapper(GstElement *bin, GstPad *pad, gpointer user) +{
- struct cb_data cbdata = { SOURCE_STREAM_ADDED };
- cbdata.u.pad_added_data.element = bin;
- cbdata.u.pad_added_data.pad = pad;
- cbdata.u.pad_added_data.user = user;
- call_cb(&cbdata);
+}
The function naming is a bit inconsistent. I'd recommend standardizing on the "source_stream_added" style (i.e. object first, then verb), on the principle of narrowing scope. Same thing for 1/3, really.
(In general I wouldn't be too attached to the gstreamer function names; they're not always the best.)
π
+void source_stream_removed_wrapper(GstElement *element, GstPad *pad, gpointer user) +{
- struct cb_data cbdata = { SOURCE_STREAM_REMOVED };
- cbdata.u.pad_removed_data.element = element;
- cbdata.u.pad_removed_data.pad = pad;
- cbdata.u.pad_removed_data.user = user;
- call_cb(&cbdata);
+}
+void source_all_streams_wrapper(GstElement *element, gpointer user) +{
- struct cb_data cbdata = { SOURCE_ALL_STREAMS };
- cbdata.u.no_more_pads_data.element = element;
- cbdata.u.no_more_pads_data.user = user;
- call_cb(&cbdata);
+}
"all_streams" is a bit confusing at first glance; any reason not to use the GStreamer name "no_more_pads"?
no_more_pads_wrapper is already taken for gstdemux.c
diff --git a/dlls/winegstreamer/gst_cbs.h b/dlls/winegstreamer/gst_cbs.h index 10e999feea7..d87cc8c21e9 100644 --- a/dlls/winegstreamer/gst_cbs.h +++ b/dlls/winegstreamer/gst_cbs.h @@ -48,6 +48,10 @@ enum CB_TYPE { QUERY_BYTESTREAM, ACTIVATE_BYTESTREAM_PAD_MODE, PROCESS_BYTESTREAM_PAD_EVENT,
- WATCH_SOURCE_BUS,
- SOURCE_STREAM_ADDED,
- SOURCE_STREAM_REMOVED,
- SOURCE_ALL_STREAMS, MEDIA_SOURCE_MAX, };
@@ -164,5 +168,9 @@ GstFlowReturn pull_from_bytestream_wrapper(GstPad *pad, GstObject *parent, guint gboolean query_bytestream_wrapper(GstPad *pad, GstObject *parent, GstQuery *query) DECLSPEC_HIDDEN; gboolean activate_bytestream_pad_mode_wrapper(GstPad *pad, GstObject *parent, GstPadMode mode, gboolean activate) DECLSPEC_HIDDEN; gboolean process_bytestream_pad_event_wrapper(GstPad *pad, GstObject *parent, GstEvent *event) DECLSPEC_HIDDEN; +GstBusSyncReply watch_source_bus_wrapper(GstBus *bus, GstMessage *message, gpointer user) DECLSPEC_HIDDEN; +void source_stream_added_wrapper(GstElement *bin, GstPad *pad, gpointer user) DECLSPEC_HIDDEN; +void source_stream_removed_wrapper(GstElement *element, GstPad *pad, gpointer user) DECLSPEC_HIDDEN; +void source_all_streams_wrapper(GstElement *element, gpointer user) DECLSPEC_HIDDEN;
#endif diff --git a/dlls/winegstreamer/media_source.c b/dlls/winegstreamer/media_source.c index 6b3bd4a7869..29af2b72def 100644 --- a/dlls/winegstreamer/media_source.c +++ b/dlls/winegstreamer/media_source.c @@ -24,6 +24,7 @@ #include "gst_private.h" #include "gst_cbs.h"
+#include <assert.h> #include <stdarg.h>
#define COBJMACROS @@ -40,21 +41,48 @@
WINE_DEFAULT_DEBUG_CHANNEL(mfplat);
+struct media_stream +{
- IMFMediaStream IMFMediaStream_iface;
- LONG ref;
- struct media_source *parent_source;
- IMFMediaEventQueue *event_queue;
- GstElement *appsink;
- GstPad *their_src, *my_sink;
- enum
- {
STREAM_STUB,
STREAM_INACTIVE,
These two values are set, but not (really) used in this patch.
π
STREAM_SHUTDOWN,
- } state;
+};
- struct media_source { IMFMediaSource IMFMediaSource_iface; LONG ref; IMFMediaEventQueue *event_queue; IMFByteStream *byte_stream;
- GstPad *my_src;
- struct media_stream **streams;
- ULONG stream_count;
- GstBus *bus;
- GstElement *container;
- GstElement *decodebin;
- GstPad *my_src, *their_sink; enum { SOURCE_OPENING, SOURCE_STOPPED, SOURCE_SHUTDOWN, } state;
- HANDLE all_streams_event; };
+static inline struct media_stream *impl_from_IMFMediaStream(IMFMediaStream *iface) +{
- return CONTAINING_RECORD(iface, struct media_stream, IMFMediaStream_iface);
+}
- static inline struct media_source *impl_from_IMFMediaSource(IMFMediaSource *iface) { return CONTAINING_RECORD(iface, struct media_source, IMFMediaSource_iface);
@@ -208,6 +236,243 @@ static gboolean process_bytestream_pad_event(GstPad *pad, GstObject *parent, Gst return TRUE; }
+GstBusSyncReply watch_source_bus(GstBus *bus, GstMessage *message, gpointer user) +{
- struct media_source *source = (struct media_source *) user;
- gchar *dbg_info = NULL;
- GError *err = NULL;
- TRACE("source %p message type %s\n", source, GST_MESSAGE_TYPE_NAME(message));
- switch (message->type)
- {
case GST_MESSAGE_ERROR:
gst_message_parse_error(message, &err, &dbg_info);
ERR("%s: %s\n", GST_OBJECT_NAME(message->src), err->message);
ERR("%s\n", dbg_info);
g_error_free(err);
g_free(dbg_info);
break;
case GST_MESSAGE_WARNING:
gst_message_parse_warning(message, &err, &dbg_info);
WARN("%s: %s\n", GST_OBJECT_NAME(message->src), err->message);
WARN("%s\n", dbg_info);
g_error_free(err);
g_free(dbg_info);
break;
default:
break;
- }
- return GST_BUS_PASS;
+}
There's no async queue used in this patch, so returning GST_BUS_PASS will effectively leak messages.
π
+static HRESULT WINAPI media_stream_QueryInterface(IMFMediaStream *iface, REFIID riid, void **out) +{
- struct media_stream *stream = impl_from_IMFMediaStream(iface);
- TRACE("(%p)->(%s %p)\n", stream, debugstr_guid(riid), out);
- if (IsEqualIID(riid, &IID_IMFMediaStream) ||
IsEqualIID(riid, &IID_IMFMediaEventGenerator) ||
IsEqualIID(riid, &IID_IUnknown))
- {
*out = &stream->IMFMediaStream_iface;
- }
- else
- {
FIXME("(%s, %p)\n", debugstr_guid(riid), out);
*out = NULL;
return E_NOINTERFACE;
- }
- IUnknown_AddRef((IUnknown*)*out);
- return S_OK;
+}
+static ULONG WINAPI media_stream_AddRef(IMFMediaStream *iface) +{
- struct media_stream *stream = impl_from_IMFMediaStream(iface);
- ULONG ref = InterlockedIncrement(&stream->ref);
- TRACE("(%p) ref=%u\n", stream, ref);
- return ref;
+}
+static ULONG WINAPI media_stream_Release(IMFMediaStream *iface) +{
- struct media_stream *stream = impl_from_IMFMediaStream(iface);
- ULONG ref = InterlockedDecrement(&stream->ref);
- TRACE("(%p) ref=%u\n", stream, ref);
- if (!ref)
- {
if (stream->my_sink)
gst_object_unref(GST_OBJECT(stream->my_sink));
if (stream->event_queue)
IMFMediaEventQueue_Release(stream->event_queue);
if (stream->parent_source)
IMFMediaSource_Release(&stream->parent_source->IMFMediaSource_iface);
heap_free(stream);
- }
- return ref;
+}
+static HRESULT WINAPI media_stream_GetEvent(IMFMediaStream *iface, DWORD flags, IMFMediaEvent **event) +{
- struct media_stream *stream = impl_from_IMFMediaStream(iface);
- TRACE("(%p)->(%#x, %p)\n", stream, flags, event);
- if (stream->state == STREAM_SHUTDOWN)
return MF_E_SHUTDOWN;
- return IMFMediaEventQueue_GetEvent(stream->event_queue, flags, event);
+}
+static HRESULT WINAPI media_stream_BeginGetEvent(IMFMediaStream *iface, IMFAsyncCallback *callback, IUnknown *state) +{
- struct media_stream *stream = impl_from_IMFMediaStream(iface);
- TRACE("(%p)->(%p, %p)\n", stream, callback, state);
- if (stream->state == STREAM_SHUTDOWN)
return MF_E_SHUTDOWN;
- return IMFMediaEventQueue_BeginGetEvent(stream->event_queue, callback, state);
+}
+static HRESULT WINAPI media_stream_EndGetEvent(IMFMediaStream *iface, IMFAsyncResult *result, IMFMediaEvent **event) +{
- struct media_stream *stream = impl_from_IMFMediaStream(iface);
- TRACE("(%p)->(%p, %p)\n", stream, result, event);
- if (stream->state == STREAM_SHUTDOWN)
return MF_E_SHUTDOWN;
- return IMFMediaEventQueue_EndGetEvent(stream->event_queue, result, event);
+}
+static HRESULT WINAPI media_stream_QueueEvent(IMFMediaStream *iface, MediaEventType event_type, REFGUID ext_type,
HRESULT hr, const PROPVARIANT *value)
+{
- struct media_stream *stream = impl_from_IMFMediaStream(iface);
- TRACE("(%p)->(%d, %s, %#x, %p)\n", stream, event_type, debugstr_guid(ext_type), hr, value);
- if (stream->state == STREAM_SHUTDOWN)
return MF_E_SHUTDOWN;
- return IMFMediaEventQueue_QueueEventParamVar(stream->event_queue, event_type, ext_type, hr, value);
+}
+static HRESULT WINAPI media_stream_GetMediaSource(IMFMediaStream *iface, IMFMediaSource **source) +{
- struct media_stream *stream = impl_from_IMFMediaStream(iface);
- FIXME("stub (%p)->(%p)\n", stream, source);
- if (stream->state == STREAM_SHUTDOWN)
return MF_E_SHUTDOWN;
- return E_NOTIMPL;
+}
+static HRESULT WINAPI media_stream_GetStreamDescriptor(IMFMediaStream* iface, IMFStreamDescriptor **descriptor) +{
- struct media_stream *stream = impl_from_IMFMediaStream(iface);
- TRACE("(%p)->(%p)\n", stream, descriptor);
- if (stream->state == STREAM_SHUTDOWN)
return MF_E_SHUTDOWN;
- return E_NOTIMPL;
+}
+static HRESULT WINAPI media_stream_RequestSample(IMFMediaStream *iface, IUnknown *token) +{
- struct media_stream *stream = impl_from_IMFMediaStream(iface);
- TRACE("(%p)->(%p)\n", iface, token);
- if (stream->state == STREAM_SHUTDOWN)
return MF_E_SHUTDOWN;
- return E_NOTIMPL;
+}
+static const IMFMediaStreamVtbl media_stream_vtbl = +{
- media_stream_QueryInterface,
- media_stream_AddRef,
- media_stream_Release,
- media_stream_GetEvent,
- media_stream_BeginGetEvent,
- media_stream_EndGetEvent,
- media_stream_QueueEvent,
- media_stream_GetMediaSource,
- media_stream_GetStreamDescriptor,
- media_stream_RequestSample
+};
+/* creates a stub stream */ +static HRESULT new_media_stream(struct media_source *source, GstPad *pad, DWORD stream_id, struct media_stream **out_stream)
You don't use "stream_id" in this patch.
π
+{
- struct media_stream *object = heap_alloc_zero(sizeof(*object));
- HRESULT hr;
- TRACE("(%p %p)->(%p)\n", source, pad, out_stream);
- object->IMFMediaStream_iface.lpVtbl = &media_stream_vtbl;
- object->ref = 1;
- IMFMediaSource_AddRef(&source->IMFMediaSource_iface);
- object->parent_source = source;
- object->their_src = pad;
- object->state = STREAM_STUB;
- if (FAILED(hr = MFCreateEventQueue(&object->event_queue)))
goto fail;
- if (!(object->appsink = gst_element_factory_make("appsink", NULL)))
- {
hr = E_OUTOFMEMORY;
goto fail;
- }
- gst_bin_add(GST_BIN(object->parent_source->container), object->appsink);
- g_object_set(object->appsink, "emit-signals", TRUE, NULL);
I might defer this line until you actually hook up the signals.
π
- g_object_set(object->appsink, "sync", FALSE, NULL);
- g_object_set(object->appsink, "max-buffers", 5, NULL);
So, just to clarify for my understanding, the reason we want to buffer is because the downstream renderer will request samples basically as soon as presentation time hits (through some weird roundabout process) and we want to minimize the latency there?
Nope, the appsink will always buffer, and max-buffers just specifies where to put the limit.Β 5 is just a random value that felt reasonable, and specifying 1 would probably not cause any problems.
- g_object_set(object->appsink, "wait-on-eos", FALSE, NULL);
If I understand correctly, this means that GStreamer will signal EOS potentially before pushing all buffers; is that what we want?
Yeah, this shouldn't be here, it's a vestige of my much older media source code.
- object->my_sink = gst_element_get_static_pad(object->appsink, "sink");
- gst_pad_set_element_private(object->my_sink, object);
- gst_pad_link(object->their_src, object->my_sink);
- gst_element_sync_state_with_parent(object->appsink);
- TRACE("->(%p)\n", object);
- *out_stream = object;
- return S_OK;
- fail:
Personally I wouldn't bother using gotos in this function, but if you do, it's at least clearer if you put the label at the beginning of the line.
π
- WARN("Failed to construct media stream, hr %#x.\n", hr);
- IMFMediaStream_Release(&object->IMFMediaStream_iface);
- return hr;
+}
- static HRESULT WINAPI media_source_QueryInterface(IMFMediaSource *iface, REFIID riid, void **out) { struct media_source *source = impl_from_IMFMediaSource(iface);
@@ -367,13 +632,34 @@ static HRESULT WINAPI media_source_Shutdown(IMFMediaSource *iface)
source->state = SOURCE_SHUTDOWN;
if (source->container)
{
gst_element_set_state(source->container, GST_STATE_NULL);
gst_object_unref(GST_OBJECT(source->container));
}
if (source->my_src) gst_object_unref(GST_OBJECT(source->my_src));
if (source->their_sink)
gst_object_unref(GST_OBJECT(source->their_sink));
if (source->event_queue) IMFMediaEventQueue_Shutdown(source->event_queue); if (source->byte_stream) IMFByteStream_Release(source->byte_stream);
for (unsigned int i = 0; i < source->stream_count; i++)
Misplaced variable declaration.
π
- {
source->streams[i]->state = STREAM_SHUTDOWN;
IMFMediaStream_Release(&source->streams[i]->IMFMediaStream_iface);
- }
- if (source->stream_count)
heap_free(source->streams);
- if (source->all_streams_event)
CloseHandle(source->all_streams_event);
}return S_OK;
@@ -394,6 +680,63 @@ static const IMFMediaSourceVtbl IMFMediaSource_vtbl = media_source_Shutdown, };
+static void source_stream_added(GstElement *element, GstPad *pad, gpointer user) +{
- struct media_source *source = (struct media_source *) user;
- struct media_stream **new_stream_array;
- struct media_stream *stream;
- gchar *g_stream_id;
- DWORD stream_id;
- if (gst_pad_get_direction(pad) != GST_PAD_SRC)
return;
- /* Most/All seen randomly calculate the initial part of the stream id, the last three digits are the only deterministic part */
- g_stream_id = GST_PAD_NAME(pad);
- sscanf(strstr(g_stream_id, "_"), "_%u", &stream_id);
- TRACE("stream-id: %u\n", stream_id);
I don't know what "stream_id" is supposed to be used for, but depending on the pad name seems wrong.
Yeah, I'm not sure.Β Theoretically we could have an input file with media streams which don't last throughout the whole duration.Β In this case, the approach taken by quartz may not work, if say, for instance, an application seeked halfway through the source, and we started conflating stream-added events with the wrong cached media_stream objects.
- if (FAILED(new_media_stream(source, pad, stream_id, &stream)))
- {
goto leave;
- }
- if (!(new_stream_array = heap_realloc(source->streams, (source->stream_count + 1) * (sizeof(*new_stream_array)))))
- {
ERR("Failed to add stream to source\n");
goto leave;
- }
- source->streams = new_stream_array;
- source->streams[source->stream_count++] = stream;
- leave:
This label is very superfluous.
π π π
- return;
+}
+static void source_stream_removed(GstElement *element, GstPad *pad, gpointer user) +{
- struct media_source *source = (struct media_source *)user;
- for (unsigned int i = 0; i < source->stream_count; i++)
Misplaced variable initializer.
π
- {
struct media_stream *stream = source->streams[i];
if (stream->their_src != pad)
continue;
stream->their_src = NULL;
if (stream->state != STREAM_INACTIVE)
stream->state = STREAM_INACTIVE;
- }
+}
+static void source_all_streams(GstElement *element, gpointer user) +{
- struct media_source *source = (struct media_source *) user;
- SetEvent(source->all_streams_event);
+}
- static HRESULT media_source_constructor(IMFByteStream *bytestream, struct media_source **out_media_source) { GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE(
@@ -404,6 +747,7 @@ static HRESULT media_source_constructor(IMFByteStream *bytestream, struct media_
struct media_source *object = heap_alloc_zero(sizeof(*object)); HRESULT hr;
int ret;
if (!object) return E_OUTOFMEMORY;
@@ -412,10 +756,16 @@ static HRESULT media_source_constructor(IMFByteStream *bytestream, struct media_ object->ref = 1; object->byte_stream = bytestream; IMFByteStream_AddRef(bytestream);
object->all_streams_event = CreateEventA(NULL, FALSE, FALSE, NULL);
if (FAILED(hr = MFCreateEventQueue(&object->event_queue))) goto fail;
object->container = gst_bin_new(NULL);
object->bus = gst_bus_new();
gst_bus_set_sync_handler(object->bus, watch_source_bus_wrapper, object, NULL);
gst_element_set_bus(object->container, object->bus);
object->my_src = gst_pad_new_from_static_template(&src_template, "mf-src"); gst_pad_set_element_private(object->my_src, object); gst_pad_set_getrange_function(object->my_src, pull_from_bytestream_wrapper);
@@ -423,6 +773,46 @@ static HRESULT media_source_constructor(IMFByteStream *bytestream, struct media_ gst_pad_set_activatemode_function(object->my_src, activate_bytestream_pad_mode_wrapper); gst_pad_set_event_function(object->my_src, process_bytestream_pad_event_wrapper);
- object->decodebin = gst_element_factory_make("decodebin", NULL);
- if (!(object->decodebin))
- {
WARN("Failed to create decodebin for source\n");
hr = E_OUTOFMEMORY;
goto fail;
- }
- /* the appsinks determine the maximum amount of buffering instead, this means that if one stream isn't read, a leak will happen, like on windows */
Very long line, and this comment is rather confusing. What you probably want to do is mention the application and describe the problem that it would run into if the default buffering settings were used.
π
g_object_set(object->decodebin, "max-size-buffers", 0, NULL);
g_object_set(object->decodebin, "max-size-time", G_GUINT64_CONSTANT(0), NULL);
g_object_set(object->decodebin, "max-size-bytes", 0, NULL);
gst_bin_add(GST_BIN(object->container), object->decodebin);
g_signal_connect(object->decodebin, "pad-added", G_CALLBACK(source_stream_added_wrapper), object);
g_signal_connect(object->decodebin, "pad-removed", G_CALLBACK(source_stream_removed_wrapper), object);
g_signal_connect(object->decodebin, "no-more-pads", G_CALLBACK(source_all_streams_wrapper), object);
object->their_sink = gst_element_get_static_pad(object->decodebin, "sink");
if ((ret = gst_pad_link(object->my_src, object->their_sink)) < 0)
{
WARN("Failed to link our bytestream pad to the demuxer input\n");
hr = E_OUTOFMEMORY;
goto fail;
}
object->state = SOURCE_OPENING;
gst_element_set_state(object->container, GST_STATE_PAUSED);
ret = gst_element_get_state(object->container, NULL, NULL, -1);
if (ret == GST_STATE_CHANGE_FAILURE)
{
ERR("Failed to play source.\n");
hr = E_OUTOFMEMORY;
goto fail;
}
WaitForSingleObject(object->all_streams_event, INFINITE);
object->state = SOURCE_STOPPED; *out_media_source = object;
@@ -923,6 +1313,30 @@ void perform_cb_media_source(struct cb_data *cbdata) cbdata->u.event_src_data.ret = process_bytestream_pad_event(data->pad, data->parent, data->event); break; }
- case WATCH_SOURCE_BUS:
{
struct watch_bus_data *data = &cbdata->u.watch_bus_data;
cbdata->u.watch_bus_data.ret = watch_source_bus(data->bus, data->msg, data->user);
break;
}
- case SOURCE_STREAM_ADDED:
{
struct pad_added_data *data = &cbdata->u.pad_added_data;
source_stream_added(data->element, data->pad, data->user);
break;
}
- case SOURCE_STREAM_REMOVED:
{
struct pad_removed_data *data = &cbdata->u.pad_removed_data;
source_stream_removed(data->element, data->pad, data->user);
break;
}
- case SOURCE_ALL_STREAMS:
{
struct no_more_pads_data *data = &cbdata->u.no_more_pads_data;
source_all_streams(data->element, data->user);
break;
} default: { ERR("Wrong callback forwarder called\n");
On 9/10/20 11:28 AM, Derek Lesho wrote:
On 9/9/20 6:28 PM, Zebediah Figura wrote:
On 9/8/20 10:47 AM, Derek Lesho wrote:
Signed-off-by: Derek Lesho dlesho@codeweavers.com
dlls/winegstreamer/gst_cbs.c | 45 ++++ dlls/winegstreamer/gst_cbs.h | 8 + dlls/winegstreamer/media_source.c | 416 +++++++++++++++++++++++++++++- 3 files changed, 468 insertions(+), 1 deletion(-)
diff --git a/dlls/winegstreamer/gst_cbs.c b/dlls/winegstreamer/gst_cbs.c index 8f48368c96a..4755f5b42f1 100644 --- a/dlls/winegstreamer/gst_cbs.c +++ b/dlls/winegstreamer/gst_cbs.c @@ -359,3 +359,48 @@ gboolean process_bytestream_pad_event_wrapper(GstPad *pad, GstObject *parent, Gs
return cbdata.u.event_src_data.ret;
}
+GstBusSyncReply watch_source_bus_wrapper(GstBus *bus, GstMessage *message, gpointer user) +{
- struct cb_data cbdata = { WATCH_SOURCE_BUS };
- cbdata.u.watch_bus_data.bus = bus;
- cbdata.u.watch_bus_data.msg = message;
- cbdata.u.watch_bus_data.user = user;
- call_cb(&cbdata);
- return cbdata.u.watch_bus_data.ret;
+}
+void source_stream_added_wrapper(GstElement *bin, GstPad *pad, gpointer user) +{
- struct cb_data cbdata = { SOURCE_STREAM_ADDED };
- cbdata.u.pad_added_data.element = bin;
- cbdata.u.pad_added_data.pad = pad;
- cbdata.u.pad_added_data.user = user;
- call_cb(&cbdata);
+}
The function naming is a bit inconsistent. I'd recommend standardizing on the "source_stream_added" style (i.e. object first, then verb), on the principle of narrowing scope. Same thing for 1/3, really.
(In general I wouldn't be too attached to the gstreamer function names; they're not always the best.)
π
+void source_stream_removed_wrapper(GstElement *element, GstPad *pad, gpointer user) +{
- struct cb_data cbdata = { SOURCE_STREAM_REMOVED };
- cbdata.u.pad_removed_data.element = element;
- cbdata.u.pad_removed_data.pad = pad;
- cbdata.u.pad_removed_data.user = user;
- call_cb(&cbdata);
+}
+void source_all_streams_wrapper(GstElement *element, gpointer user) +{
- struct cb_data cbdata = { SOURCE_ALL_STREAMS };
- cbdata.u.no_more_pads_data.element = element;
- cbdata.u.no_more_pads_data.user = user;
- call_cb(&cbdata);
+}
"all_streams" is a bit confusing at first glance; any reason not to use the GStreamer name "no_more_pads"?
no_more_pads_wrapper is already taken for gstdemux.c
I think the solution to that is to add a namespace discriminator, e.g. "mfplat_source_no_more_pads" (and ideally to both, but renaming all of the quartz callbacks can probably wait for another day), rather than trying to use something nearly synonymous.
[To expand on that a bit, it's probably not a bad idea to be more specific with all of our callback names, and even function names in general. "mfplat_source" or "mf_source" seems better than "source"; I guess quartz should probably get a prefix like "quartz_parser" or "quartz_demuxer", not sure. Naming is hard in winegstreamer.]
diff --git a/dlls/winegstreamer/gst_cbs.h b/dlls/winegstreamer/gst_cbs.h index 10e999feea7..d87cc8c21e9 100644 --- a/dlls/winegstreamer/gst_cbs.h +++ b/dlls/winegstreamer/gst_cbs.h @@ -48,6 +48,10 @@ enum CB_TYPE { QUERY_BYTESTREAM, ACTIVATE_BYTESTREAM_PAD_MODE, PROCESS_BYTESTREAM_PAD_EVENT,
- WATCH_SOURCE_BUS,
- SOURCE_STREAM_ADDED,
- SOURCE_STREAM_REMOVED,
- SOURCE_ALL_STREAMS, MEDIA_SOURCE_MAX, };
@@ -164,5 +168,9 @@ GstFlowReturn pull_from_bytestream_wrapper(GstPad *pad, GstObject *parent, guint gboolean query_bytestream_wrapper(GstPad *pad, GstObject *parent, GstQuery *query) DECLSPEC_HIDDEN; gboolean activate_bytestream_pad_mode_wrapper(GstPad *pad, GstObject *parent, GstPadMode mode, gboolean activate) DECLSPEC_HIDDEN; gboolean process_bytestream_pad_event_wrapper(GstPad *pad, GstObject *parent, GstEvent *event) DECLSPEC_HIDDEN; +GstBusSyncReply watch_source_bus_wrapper(GstBus *bus, GstMessage *message, gpointer user) DECLSPEC_HIDDEN; +void source_stream_added_wrapper(GstElement *bin, GstPad *pad, gpointer user) DECLSPEC_HIDDEN; +void source_stream_removed_wrapper(GstElement *element, GstPad *pad, gpointer user) DECLSPEC_HIDDEN; +void source_all_streams_wrapper(GstElement *element, gpointer user) DECLSPEC_HIDDEN;
#endif diff --git a/dlls/winegstreamer/media_source.c b/dlls/winegstreamer/media_source.c index 6b3bd4a7869..29af2b72def 100644 --- a/dlls/winegstreamer/media_source.c +++ b/dlls/winegstreamer/media_source.c @@ -24,6 +24,7 @@ #include "gst_private.h" #include "gst_cbs.h"
+#include <assert.h> #include <stdarg.h>
#define COBJMACROS @@ -40,21 +41,48 @@
WINE_DEFAULT_DEBUG_CHANNEL(mfplat);
+struct media_stream +{
- IMFMediaStream IMFMediaStream_iface;
- LONG ref;
- struct media_source *parent_source;
- IMFMediaEventQueue *event_queue;
- GstElement *appsink;
- GstPad *their_src, *my_sink;
- enum
- {
STREAM_STUB,
STREAM_INACTIVE,
These two values are set, but not (really) used in this patch.
π
STREAM_SHUTDOWN,
- } state;
+};
- struct media_source { IMFMediaSource IMFMediaSource_iface; LONG ref; IMFMediaEventQueue *event_queue; IMFByteStream *byte_stream;
- GstPad *my_src;
- struct media_stream **streams;
- ULONG stream_count;
- GstBus *bus;
- GstElement *container;
- GstElement *decodebin;
- GstPad *my_src, *their_sink; enum { SOURCE_OPENING, SOURCE_STOPPED, SOURCE_SHUTDOWN, } state;
- HANDLE all_streams_event; };
+static inline struct media_stream *impl_from_IMFMediaStream(IMFMediaStream *iface) +{
- return CONTAINING_RECORD(iface, struct media_stream, IMFMediaStream_iface);
+}
- static inline struct media_source *impl_from_IMFMediaSource(IMFMediaSource *iface) { return CONTAINING_RECORD(iface, struct media_source, IMFMediaSource_iface);
@@ -208,6 +236,243 @@ static gboolean process_bytestream_pad_event(GstPad *pad, GstObject *parent, Gst return TRUE; }
+GstBusSyncReply watch_source_bus(GstBus *bus, GstMessage *message, gpointer user) +{
- struct media_source *source = (struct media_source *) user;
- gchar *dbg_info = NULL;
- GError *err = NULL;
- TRACE("source %p message type %s\n", source, GST_MESSAGE_TYPE_NAME(message));
- switch (message->type)
- {
case GST_MESSAGE_ERROR:
gst_message_parse_error(message, &err, &dbg_info);
ERR("%s: %s\n", GST_OBJECT_NAME(message->src), err->message);
ERR("%s\n", dbg_info);
g_error_free(err);
g_free(dbg_info);
break;
case GST_MESSAGE_WARNING:
gst_message_parse_warning(message, &err, &dbg_info);
WARN("%s: %s\n", GST_OBJECT_NAME(message->src), err->message);
WARN("%s\n", dbg_info);
g_error_free(err);
g_free(dbg_info);
break;
default:
break;
- }
- return GST_BUS_PASS;
+}
There's no async queue used in this patch, so returning GST_BUS_PASS will effectively leak messages.
π
+static HRESULT WINAPI media_stream_QueryInterface(IMFMediaStream *iface, REFIID riid, void **out) +{
- struct media_stream *stream = impl_from_IMFMediaStream(iface);
- TRACE("(%p)->(%s %p)\n", stream, debugstr_guid(riid), out);
- if (IsEqualIID(riid, &IID_IMFMediaStream) ||
IsEqualIID(riid, &IID_IMFMediaEventGenerator) ||
IsEqualIID(riid, &IID_IUnknown))
- {
*out = &stream->IMFMediaStream_iface;
- }
- else
- {
FIXME("(%s, %p)\n", debugstr_guid(riid), out);
*out = NULL;
return E_NOINTERFACE;
- }
- IUnknown_AddRef((IUnknown*)*out);
- return S_OK;
+}
+static ULONG WINAPI media_stream_AddRef(IMFMediaStream *iface) +{
- struct media_stream *stream = impl_from_IMFMediaStream(iface);
- ULONG ref = InterlockedIncrement(&stream->ref);
- TRACE("(%p) ref=%u\n", stream, ref);
- return ref;
+}
+static ULONG WINAPI media_stream_Release(IMFMediaStream *iface) +{
- struct media_stream *stream = impl_from_IMFMediaStream(iface);
- ULONG ref = InterlockedDecrement(&stream->ref);
- TRACE("(%p) ref=%u\n", stream, ref);
- if (!ref)
- {
if (stream->my_sink)
gst_object_unref(GST_OBJECT(stream->my_sink));
if (stream->event_queue)
IMFMediaEventQueue_Release(stream->event_queue);
if (stream->parent_source)
IMFMediaSource_Release(&stream->parent_source->IMFMediaSource_iface);
heap_free(stream);
- }
- return ref;
+}
+static HRESULT WINAPI media_stream_GetEvent(IMFMediaStream *iface, DWORD flags, IMFMediaEvent **event) +{
- struct media_stream *stream = impl_from_IMFMediaStream(iface);
- TRACE("(%p)->(%#x, %p)\n", stream, flags, event);
- if (stream->state == STREAM_SHUTDOWN)
return MF_E_SHUTDOWN;
- return IMFMediaEventQueue_GetEvent(stream->event_queue, flags, event);
+}
+static HRESULT WINAPI media_stream_BeginGetEvent(IMFMediaStream *iface, IMFAsyncCallback *callback, IUnknown *state) +{
- struct media_stream *stream = impl_from_IMFMediaStream(iface);
- TRACE("(%p)->(%p, %p)\n", stream, callback, state);
- if (stream->state == STREAM_SHUTDOWN)
return MF_E_SHUTDOWN;
- return IMFMediaEventQueue_BeginGetEvent(stream->event_queue, callback, state);
+}
+static HRESULT WINAPI media_stream_EndGetEvent(IMFMediaStream *iface, IMFAsyncResult *result, IMFMediaEvent **event) +{
- struct media_stream *stream = impl_from_IMFMediaStream(iface);
- TRACE("(%p)->(%p, %p)\n", stream, result, event);
- if (stream->state == STREAM_SHUTDOWN)
return MF_E_SHUTDOWN;
- return IMFMediaEventQueue_EndGetEvent(stream->event_queue, result, event);
+}
+static HRESULT WINAPI media_stream_QueueEvent(IMFMediaStream *iface, MediaEventType event_type, REFGUID ext_type,
HRESULT hr, const PROPVARIANT *value)
+{
- struct media_stream *stream = impl_from_IMFMediaStream(iface);
- TRACE("(%p)->(%d, %s, %#x, %p)\n", stream, event_type, debugstr_guid(ext_type), hr, value);
- if (stream->state == STREAM_SHUTDOWN)
return MF_E_SHUTDOWN;
- return IMFMediaEventQueue_QueueEventParamVar(stream->event_queue, event_type, ext_type, hr, value);
+}
+static HRESULT WINAPI media_stream_GetMediaSource(IMFMediaStream *iface, IMFMediaSource **source) +{
- struct media_stream *stream = impl_from_IMFMediaStream(iface);
- FIXME("stub (%p)->(%p)\n", stream, source);
- if (stream->state == STREAM_SHUTDOWN)
return MF_E_SHUTDOWN;
- return E_NOTIMPL;
+}
+static HRESULT WINAPI media_stream_GetStreamDescriptor(IMFMediaStream* iface, IMFStreamDescriptor **descriptor) +{
- struct media_stream *stream = impl_from_IMFMediaStream(iface);
- TRACE("(%p)->(%p)\n", stream, descriptor);
- if (stream->state == STREAM_SHUTDOWN)
return MF_E_SHUTDOWN;
- return E_NOTIMPL;
+}
+static HRESULT WINAPI media_stream_RequestSample(IMFMediaStream *iface, IUnknown *token) +{
- struct media_stream *stream = impl_from_IMFMediaStream(iface);
- TRACE("(%p)->(%p)\n", iface, token);
- if (stream->state == STREAM_SHUTDOWN)
return MF_E_SHUTDOWN;
- return E_NOTIMPL;
+}
+static const IMFMediaStreamVtbl media_stream_vtbl = +{
- media_stream_QueryInterface,
- media_stream_AddRef,
- media_stream_Release,
- media_stream_GetEvent,
- media_stream_BeginGetEvent,
- media_stream_EndGetEvent,
- media_stream_QueueEvent,
- media_stream_GetMediaSource,
- media_stream_GetStreamDescriptor,
- media_stream_RequestSample
+};
+/* creates a stub stream */ +static HRESULT new_media_stream(struct media_source *source, GstPad *pad, DWORD stream_id, struct media_stream **out_stream)
You don't use "stream_id" in this patch.
π
+{
- struct media_stream *object = heap_alloc_zero(sizeof(*object));
- HRESULT hr;
- TRACE("(%p %p)->(%p)\n", source, pad, out_stream);
- object->IMFMediaStream_iface.lpVtbl = &media_stream_vtbl;
- object->ref = 1;
- IMFMediaSource_AddRef(&source->IMFMediaSource_iface);
- object->parent_source = source;
- object->their_src = pad;
- object->state = STREAM_STUB;
- if (FAILED(hr = MFCreateEventQueue(&object->event_queue)))
goto fail;
- if (!(object->appsink = gst_element_factory_make("appsink", NULL)))
- {
hr = E_OUTOFMEMORY;
goto fail;
- }
- gst_bin_add(GST_BIN(object->parent_source->container), object->appsink);
- g_object_set(object->appsink, "emit-signals", TRUE, NULL);
I might defer this line until you actually hook up the signals.
π
- g_object_set(object->appsink, "sync", FALSE, NULL);
- g_object_set(object->appsink, "max-buffers", 5, NULL);
So, just to clarify for my understanding, the reason we want to buffer is because the downstream renderer will request samples basically as soon as presentation time hits (through some weird roundabout process) and we want to minimize the latency there?
Nope, the appsink will always buffer, and max-buffers just specifies where to put the limit.Β 5 is just a random value that felt reasonable, and specifying 1 would probably not cause any problems.
I got the impression from some past communication that the reason for using appsink in the first place (instead of just a solitary sink pad) is so that we can buffer, or is there some other reason?
- g_object_set(object->appsink, "wait-on-eos", FALSE, NULL);
If I understand correctly, this means that GStreamer will signal EOS potentially before pushing all buffers; is that what we want?
Yeah, this shouldn't be here, it's a vestige of my much older media source code.
- object->my_sink = gst_element_get_static_pad(object->appsink, "sink");
- gst_pad_set_element_private(object->my_sink, object);
- gst_pad_link(object->their_src, object->my_sink);
- gst_element_sync_state_with_parent(object->appsink);
- TRACE("->(%p)\n", object);
- *out_stream = object;
- return S_OK;
- fail:
Personally I wouldn't bother using gotos in this function, but if you do, it's at least clearer if you put the label at the beginning of the line.
π
- WARN("Failed to construct media stream, hr %#x.\n", hr);
- IMFMediaStream_Release(&object->IMFMediaStream_iface);
- return hr;
+}
- static HRESULT WINAPI media_source_QueryInterface(IMFMediaSource *iface, REFIID riid, void **out) { struct media_source *source = impl_from_IMFMediaSource(iface);
@@ -367,13 +632,34 @@ static HRESULT WINAPI media_source_Shutdown(IMFMediaSource *iface)
source->state = SOURCE_SHUTDOWN;
if (source->container)
{
gst_element_set_state(source->container, GST_STATE_NULL);
gst_object_unref(GST_OBJECT(source->container));
}
if (source->my_src) gst_object_unref(GST_OBJECT(source->my_src));
if (source->their_sink)
gst_object_unref(GST_OBJECT(source->their_sink));
if (source->event_queue) IMFMediaEventQueue_Shutdown(source->event_queue); if (source->byte_stream) IMFByteStream_Release(source->byte_stream);
for (unsigned int i = 0; i < source->stream_count; i++)
Misplaced variable declaration.
π
- {
source->streams[i]->state = STREAM_SHUTDOWN;
IMFMediaStream_Release(&source->streams[i]->IMFMediaStream_iface);
- }
- if (source->stream_count)
heap_free(source->streams);
- if (source->all_streams_event)
CloseHandle(source->all_streams_event);
}return S_OK;
@@ -394,6 +680,63 @@ static const IMFMediaSourceVtbl IMFMediaSource_vtbl = media_source_Shutdown, };
+static void source_stream_added(GstElement *element, GstPad *pad, gpointer user) +{
- struct media_source *source = (struct media_source *) user;
- struct media_stream **new_stream_array;
- struct media_stream *stream;
- gchar *g_stream_id;
- DWORD stream_id;
- if (gst_pad_get_direction(pad) != GST_PAD_SRC)
return;
- /* Most/All seen randomly calculate the initial part of the stream id, the last three digits are the only deterministic part */
- g_stream_id = GST_PAD_NAME(pad);
- sscanf(strstr(g_stream_id, "_"), "_%u", &stream_id);
- TRACE("stream-id: %u\n", stream_id);
I don't know what "stream_id" is supposed to be used for, but depending on the pad name seems wrong.
Yeah, I'm not sure.Β Theoretically we could have an input file with media streams which don't last throughout the whole duration.Β In this case, the approach taken by quartz may not work, if say, for instance, an application seeked halfway through the source, and we started conflating stream-added events with the wrong cached media_stream objects.
I get the vague impression that this is for the identifier returned by IMFStreamDescriptor::GetStreamIdentifier(), in which case I suspect what we want to do is use a source-specific counter, i.e. handle it all on the MF side.
I don't know how Media Foundation handles things like dynamically added streams (frankly; I can't recall offhand what kinds of media files those even show up in, except chained oggs maybe?), so I guess I'd need more context on that to offer any advice there. Of course, if we never need to worry about stream reconnection the way quartz does, it may be a moot point.
[As an aside, I really don't like the way quartz/winegstreamer reconnects its streams; it's very fragile. I haven't looked very deeply into how that could be improved, though.]
- if (FAILED(new_media_stream(source, pad, stream_id, &stream)))
- {
goto leave;
- }
- if (!(new_stream_array = heap_realloc(source->streams, (source->stream_count + 1) * (sizeof(*new_stream_array)))))
- {
ERR("Failed to add stream to source\n");
goto leave;
- }
- source->streams = new_stream_array;
- source->streams[source->stream_count++] = stream;
- leave:
This label is very superfluous.
π π π
- return;
+}
+static void source_stream_removed(GstElement *element, GstPad *pad, gpointer user) +{
- struct media_source *source = (struct media_source *)user;
- for (unsigned int i = 0; i < source->stream_count; i++)
Misplaced variable initializer.
π
- {
struct media_stream *stream = source->streams[i];
if (stream->their_src != pad)
continue;
stream->their_src = NULL;
if (stream->state != STREAM_INACTIVE)
stream->state = STREAM_INACTIVE;
- }
+}
+static void source_all_streams(GstElement *element, gpointer user) +{
- struct media_source *source = (struct media_source *) user;
- SetEvent(source->all_streams_event);
+}
- static HRESULT media_source_constructor(IMFByteStream *bytestream, struct media_source **out_media_source) { GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE(
@@ -404,6 +747,7 @@ static HRESULT media_source_constructor(IMFByteStream *bytestream, struct media_
struct media_source *object = heap_alloc_zero(sizeof(*object)); HRESULT hr;
int ret;
if (!object) return E_OUTOFMEMORY;
@@ -412,10 +756,16 @@ static HRESULT media_source_constructor(IMFByteStream *bytestream, struct media_ object->ref = 1; object->byte_stream = bytestream; IMFByteStream_AddRef(bytestream);
object->all_streams_event = CreateEventA(NULL, FALSE, FALSE, NULL);
if (FAILED(hr = MFCreateEventQueue(&object->event_queue))) goto fail;
object->container = gst_bin_new(NULL);
object->bus = gst_bus_new();
gst_bus_set_sync_handler(object->bus, watch_source_bus_wrapper, object, NULL);
gst_element_set_bus(object->container, object->bus);
object->my_src = gst_pad_new_from_static_template(&src_template, "mf-src"); gst_pad_set_element_private(object->my_src, object); gst_pad_set_getrange_function(object->my_src, pull_from_bytestream_wrapper);
@@ -423,6 +773,46 @@ static HRESULT media_source_constructor(IMFByteStream *bytestream, struct media_ gst_pad_set_activatemode_function(object->my_src, activate_bytestream_pad_mode_wrapper); gst_pad_set_event_function(object->my_src, process_bytestream_pad_event_wrapper);
- object->decodebin = gst_element_factory_make("decodebin", NULL);
- if (!(object->decodebin))
- {
WARN("Failed to create decodebin for source\n");
hr = E_OUTOFMEMORY;
goto fail;
- }
- /* the appsinks determine the maximum amount of buffering instead, this means that if one stream isn't read, a leak will happen, like on windows */
Very long line, and this comment is rather confusing. What you probably want to do is mention the application and describe the problem that it would run into if the default buffering settings were used.
π
g_object_set(object->decodebin, "max-size-buffers", 0, NULL);
g_object_set(object->decodebin, "max-size-time", G_GUINT64_CONSTANT(0), NULL);
g_object_set(object->decodebin, "max-size-bytes", 0, NULL);
gst_bin_add(GST_BIN(object->container), object->decodebin);
g_signal_connect(object->decodebin, "pad-added", G_CALLBACK(source_stream_added_wrapper), object);
g_signal_connect(object->decodebin, "pad-removed", G_CALLBACK(source_stream_removed_wrapper), object);
g_signal_connect(object->decodebin, "no-more-pads", G_CALLBACK(source_all_streams_wrapper), object);
object->their_sink = gst_element_get_static_pad(object->decodebin, "sink");
if ((ret = gst_pad_link(object->my_src, object->their_sink)) < 0)
{
WARN("Failed to link our bytestream pad to the demuxer input\n");
hr = E_OUTOFMEMORY;
goto fail;
}
object->state = SOURCE_OPENING;
gst_element_set_state(object->container, GST_STATE_PAUSED);
ret = gst_element_get_state(object->container, NULL, NULL, -1);
if (ret == GST_STATE_CHANGE_FAILURE)
{
ERR("Failed to play source.\n");
hr = E_OUTOFMEMORY;
goto fail;
}
WaitForSingleObject(object->all_streams_event, INFINITE);
object->state = SOURCE_STOPPED; *out_media_source = object;
@@ -923,6 +1313,30 @@ void perform_cb_media_source(struct cb_data *cbdata) cbdata->u.event_src_data.ret = process_bytestream_pad_event(data->pad, data->parent, data->event); break; }
- case WATCH_SOURCE_BUS:
{
struct watch_bus_data *data = &cbdata->u.watch_bus_data;
cbdata->u.watch_bus_data.ret = watch_source_bus(data->bus, data->msg, data->user);
break;
}
- case SOURCE_STREAM_ADDED:
{
struct pad_added_data *data = &cbdata->u.pad_added_data;
source_stream_added(data->element, data->pad, data->user);
break;
}
- case SOURCE_STREAM_REMOVED:
{
struct pad_removed_data *data = &cbdata->u.pad_removed_data;
source_stream_removed(data->element, data->pad, data->user);
break;
}
- case SOURCE_ALL_STREAMS:
{
struct no_more_pads_data *data = &cbdata->u.no_more_pads_data;
source_all_streams(data->element, data->user);
break;
} default: { ERR("Wrong callback forwarder called\n");
On 9/10/20 6:43 PM, Zebediah Figura wrote:
I got the impression from some past communication that the reason for using appsink in the first place (instead of just a solitary sink pad) is so that we can buffer, or is there some other reason?
Well, buffering is necessary, as media streams operate in a sort of pull/push mode where we can only send them samples if they have requested one with IMFMediaStream::RequestSample.Β Also, appsink is just more convenient than manually setting up a pad, I've also considered changing the source pad to appsrc, but if it isn't broken....
On 9/11/20 10:18 AM, Derek Lesho wrote:
On 9/10/20 6:43 PM, Zebediah Figura wrote:
I got the impression from some past communication that the reason for using appsink in the first place (instead of just a solitary sink pad) is so that we can buffer, or is there some other reason?
Well, buffering is necessary, as media streams operate in a sort of pull/push mode where we can only send them samples if they have requested one with IMFMediaStream::RequestSample.
Sure, but it also seems potentially simple enough to just wait on a semaphore in the chain function. If it makes sense (for latency reasons) to buffer more than that, then there's a potential argument for appsink (but also, perhaps, just an argument for appending a queue element after each stream). I don't know/remember the relevant details for Media Foundation timing.
Also, appsink is just more convenient than manually setting up a pad, I've also considered changing the source pad to appsrc, but if it isn't broken....
Is it, though? I don't know what all the things you need to hook up are, but it looks to me like you essentially have to set up the same callbacks, just in a different form. In the case of caps it makes things more than a little uglier, especially if you later end up doing more complicated caps negotiation. Note also that GstPad has default handling of EOS events and GST_PAD_IS_EOS(), which I'm inclined to understand is more useful for MF than handling GST_EVENT_EOS directly.
On 9/11/20 11:24 AM, Zebediah Figura wrote:
On 9/11/20 10:18 AM, Derek Lesho wrote:
On 9/10/20 6:43 PM, Zebediah Figura wrote:
I got the impression from some past communication that the reason for using appsink in the first place (instead of just a solitary sink pad) is so that we can buffer, or is there some other reason?
Well, buffering is necessary, as media streams operate in a sort of pull/push mode where we can only send them samples if they have requested one with IMFMediaStream::RequestSample.
Sure, but it also seems potentially simple enough to just wait on a semaphore in the chain function.
It's definitely more complicated for a system like that (I used to have it that way).Β With appsink, we just insert the request sample call into a command queue, and pull the buffer from the appsink in the async command.Β The command queue is completely synchronous so we don't have to worry about stuff like seeking or stopping interfering.Β With the semaphore solution, I'm not sure how much we have to worry about those cases, but I remember having all sorts of strange bugs.
If it makes sense (for latency reasons) to buffer more than that, then there's a potential argument for appsink (but also, perhaps, just an argument for appending a queue element after each stream). I don't know/remember the relevant details for Media Foundation timing.
Also, appsink is just more convenient than manually setting up a pad, I've also considered changing the source pad to appsrc, but if it isn't broken....
Is it, though? I don't know what all the things you need to hook up are, but it looks to me like you essentially have to set up the same callbacks
We don't set up any callbacks for appsink, just pull a buffer when we need one.Β And given how much boilerplate and how many naming problems callbacks cause in winegstreamer, it's probably better this way.
https://github.com/Guy1524/wine/commit/366946876d7a53f80d91749f290f511294415...
, just in a different form. In the case of caps it makes things more than a little uglier
What does, appsink or a custom pad?Β With appsink all you need to do is set the desired caps property.
, especially if you later end up doing more complicated caps negotiation. Note also that GstPad has default handling of EOS events and GST_PAD_IS_EOS(), which I'm inclined to understand is more useful for MF than handling GST_EVENT_EOS directly.
I'm not sure what you're referring to here.Β All we need to do in terms of end of stream is read the "eos" property on the appsink during the RequestSample-derived command, and dispatch the relevant events.
On 9/11/20 12:23 PM, Derek Lesho wrote:
On 9/11/20 11:24 AM, Zebediah Figura wrote:
On 9/11/20 10:18 AM, Derek Lesho wrote:
On 9/10/20 6:43 PM, Zebediah Figura wrote:
I got the impression from some past communication that the reason for using appsink in the first place (instead of just a solitary sink pad) is so that we can buffer, or is there some other reason?
Well, buffering is necessary, as media streams operate in a sort of pull/push mode where we can only send them samples if they have requested one with IMFMediaStream::RequestSample.
Sure, but it also seems potentially simple enough to just wait on a semaphore in the chain function.
It's definitely more complicated for a system like that (I used to have it that way).Β With appsink, we just insert the request sample call into a command queue, and pull the buffer from the appsink in the async command.Β The command queue is completely synchronous so we don't have to worry about stuff like seeking or stopping interfering.Β With the semaphore solution, I'm not sure how much we have to worry about those cases, but I remember having all sorts of strange bugs.
Could you please describe in more detail the problems you encountered? To be sure you'd need an extra event in there to handle flushes, but that doesn't seem like a deal-breaker to me either.
If it makes sense (for latency reasons) to buffer more than that, then there's a potential argument for appsink (but also, perhaps, just an argument for appending a queue element after each stream). I don't know/remember the relevant details for Media Foundation timing.
Also, appsink is just more convenient than manually setting up a pad, I've also considered changing the source pad to appsrc, but if it isn't broken....
Is it, though? I don't know what all the things you need to hook up are, but it looks to me like you essentially have to set up the same callbacks
We don't set up any callbacks for appsink, just pull a buffer when we need one.
Well, you do, actually, to get current caps (and, eventually, for caps negotiation). appsink does save you a chain function, but I don't immediately see anything else that it saves you.
And given how much boilerplate and how many naming problems callbacks cause in winegstreamer, it's probably better this way.
Wiring up GStreamer callbacks is annoying, yes, but that annoyance may be worthwhile. In particular adding another callback doesn't actually increase code complexity; it's just copying an existing pattern.
winegstreamer has some bad names, as I've complained in this thread, but that's a preΓ«xisting problem that wouldn't be exacerbated by adding a chain function.
https://github.com/Guy1524/wine/commit/366946876d7a53f80d91749f290f511294415...
, just in a different form. In the case of caps it makes things more than a little uglier
What does, appsink or a custom pad?Β With appsink all you need to do is set the desired caps property.
, especially if you later end up doing more complicated caps negotiation. Note also that GstPad has default handling of EOS events and GST_PAD_IS_EOS(), which I'm inclined to understand is more useful for MF than handling GST_EVENT_EOS directly.
I'm not sure what you're referring to here.Β All we need to do in terms of end of stream is read the "eos" property on the appsink during the RequestSample-derived command, and dispatch the relevant events.
Yes, my point is that it's not really any different for GstPad; quartz waits for GST_EVENT_EOS but that's only because it matches quartz's threading model much better.
To be quite clear, I'm not trying to argue that appsink shouldn't be used here; I'm just trying to understand all the potential factors behind the decision, and essentially stress-test it. So far as I see appsink provides a built-in buffer that matches mfplat's model nicely, which is good, but it also forces the use of probes and thereby some tricky parameter passing, which is not good.
On 9/14/20 4:21 PM, Zebediah Figura wrote:
On 9/11/20 12:23 PM, Derek Lesho wrote:
On 9/11/20 11:24 AM, Zebediah Figura wrote:
On 9/11/20 10:18 AM, Derek Lesho wrote:
On 9/10/20 6:43 PM, Zebediah Figura wrote:
I got the impression from some past communication that the reason for using appsink in the first place (instead of just a solitary sink pad) is so that we can buffer, or is there some other reason?
Well, buffering is necessary, as media streams operate in a sort of pull/push mode where we can only send them samples if they have requested one with IMFMediaStream::RequestSample.
Sure, but it also seems potentially simple enough to just wait on a semaphore in the chain function.
It's definitely more complicated for a system like that (I used to have it that way).Β With appsink, we just insert the request sample call into a command queue, and pull the buffer from the appsink in the async command.Β The command queue is completely synchronous so we don't have to worry about stuff like seeking or stopping interfering.Β With the semaphore solution, I'm not sure how much we have to worry about those cases, but I remember having all sorts of strange bugs.
Could you please describe in more detail the problems you encountered?
To be honest, I don't remember them too clearly, but back then I had a system where the appsink was accessed by both RequestSample and the new-sample callback. When there were outstanding requests, the new-sample callback would dispatch MEMediaSample, and when there were buffered samples, RequestSample would pop one and dispatch MEMediaSample.Β The problems occurred around the state of pending samples being invalidated upon actions performed on the gstreamer pipeline.Β Of course, if we redid it now, I'm sure we could avoid this, but it just seems like it would be more tricky.
To be sure you'd need an extra event in there to handle flushes, but that doesn't seem like a deal-breaker to me either.
Flushing is what caused me the most trouble earlier on, yeah.
If it makes sense (for latency reasons) to buffer more than that, then there's a potential argument for appsink (but also, perhaps, just an argument for appending a queue element after each stream). I don't know/remember the relevant details for Media Foundation timing.
Also, appsink is just more convenient than manually setting up a pad, I've also considered changing the source pad to appsrc, but if it isn't broken....
Is it, though? I don't know what all the things you need to hook up are, but it looks to me like you essentially have to set up the same callbacks
We don't set up any callbacks for appsink, just pull a buffer when we need one.
Well, you do, actually, to get current caps (and, eventually, for caps negotiation). appsink does save you a chain function, but I don't immediately see anything else that it saves you.
https://source.winehq.org/git/wine.git/blob/HEAD:/dlls/winegstreamer/gstdemu...
Seems like it saves us from needing an event and query function as well, going off the quartz equivalent.Β And it looks to me like there's a significant enough amount of boilerplate in them for it to be undesirable.
And given how much boilerplate and how many naming problems callbacks cause in winegstreamer, it's probably better this way.
Wiring up GStreamer callbacks is annoying, yes, but that annoyance may be worthwhile. In particular adding another callback doesn't actually increase code complexity; it's just copying an existing pattern.
winegstreamer has some bad names, as I've complained in this thread, but that's a preΓ«xisting problem that wouldn't be exacerbated by adding a chain function.
https://github.com/Guy1524/wine/commit/366946876d7a53f80d91749f290f511294415...
, just in a different form. In the case of caps it makes things more than a little uglier
What does, appsink or a custom pad?Β With appsink all you need to do is set the desired caps property.
, especially if you later end up doing more complicated caps negotiation. Note also that GstPad has default handling of EOS events and GST_PAD_IS_EOS(), which I'm inclined to understand is more useful for MF than handling GST_EVENT_EOS directly.
I'm not sure what you're referring to here.Β All we need to do in terms of end of stream is read the "eos" property on the appsink during the RequestSample-derived command, and dispatch the relevant events.
Yes, my point is that it's not really any different for GstPad; quartz waits for GST_EVENT_EOS but that's only because it matches quartz's threading model much better.
To be quite clear, I'm not trying to argue that appsink shouldn't be used here; I'm just trying to understand all the potential factors behind the decision, and essentially stress-test it. So far as I see appsink provides a built-in buffer that matches mfplat's model nicely, which is good, but it also forces the use of probes and thereby some tricky parameter passing, which is not good.
Well, the probe isn't strictly necessary, another option is to wait on the paused state, then read the caps from the pads, but the pad probe way would be useful in a case where we want to enumerate all the fixed types to expose on the stream's media type handler.
On 9/15/20 8:47 AM, Derek Lesho wrote:
On 9/14/20 4:21 PM, Zebediah Figura wrote:
On 9/11/20 12:23 PM, Derek Lesho wrote:
On 9/11/20 11:24 AM, Zebediah Figura wrote:
On 9/11/20 10:18 AM, Derek Lesho wrote:
On 9/10/20 6:43 PM, Zebediah Figura wrote:
I got the impression from some past communication that the reason for using appsink in the first place (instead of just a solitary sink pad) is so that we can buffer, or is there some other reason?
Well, buffering is necessary, as media streams operate in a sort of pull/push mode where we can only send them samples if they have requested one with IMFMediaStream::RequestSample.
Sure, but it also seems potentially simple enough to just wait on a semaphore in the chain function.
It's definitely more complicated for a system like that (I used to have it that way).Β With appsink, we just insert the request sample call into a command queue, and pull the buffer from the appsink in the async command.Β The command queue is completely synchronous so we don't have to worry about stuff like seeking or stopping interfering.Β With the semaphore solution, I'm not sure how much we have to worry about those cases, but I remember having all sorts of strange bugs.
Could you please describe in more detail the problems you encountered?
To be honest, I don't remember them too clearly, but back then I had a system where the appsink was accessed by both RequestSample and the new-sample callback. When there were outstanding requests, the new-sample callback would dispatch MEMediaSample, and when there were buffered samples, RequestSample would pop one and dispatch MEMediaSample.Β The problems occurred around the state of pending samples being invalidated upon actions performed on the gstreamer pipeline.Β Of course, if we redid it now, I'm sure we could avoid this, but it just seems like it would be more tricky.
To be sure you'd need an extra event in there to handle flushes, but that doesn't seem like a deal-breaker to me either.
Flushing is what caused me the most trouble earlier on, yeah.
If it makes sense (for latency reasons) to buffer more than that, then there's a potential argument for appsink (but also, perhaps, just an argument for appending a queue element after each stream). I don't know/remember the relevant details for Media Foundation timing.
Also, appsink is just more convenient than manually setting up a pad, I've also considered changing the source pad to appsrc, but if it isn't broken....
Is it, though? I don't know what all the things you need to hook up are, but it looks to me like you essentially have to set up the same callbacks
We don't set up any callbacks for appsink, just pull a buffer when we need one.
Well, you do, actually, to get current caps (and, eventually, for caps negotiation). appsink does save you a chain function, but I don't immediately see anything else that it saves you.
https://source.winehq.org/git/wine.git/blob/HEAD:/dlls/winegstreamer/gstdemu...
Seems like it saves us from needing an event and query function as well, going off the quartz equivalent.
Not really, since you need to add a probe function instead. Note that you also don't need to handle most of the events that quartz does.
And it looks to me like there's a significant enough amount of boilerplate in them for it to be undesirable.
I'm not sure what in those functions qualifies as boilerplate?
And given how much boilerplate and how many naming problems callbacks cause in winegstreamer, it's probably better this way.
Wiring up GStreamer callbacks is annoying, yes, but that annoyance may be worthwhile. In particular adding another callback doesn't actually increase code complexity; it's just copying an existing pattern.
winegstreamer has some bad names, as I've complained in this thread, but that's a preΓ«xisting problem that wouldn't be exacerbated by adding a chain function.
https://github.com/Guy1524/wine/commit/366946876d7a53f80d91749f290f511294415...
, just in a different form. In the case of caps it makes things more than a little uglier
What does, appsink or a custom pad?Β With appsink all you need to do is set the desired caps property.
, especially if you later end up doing more complicated caps negotiation. Note also that GstPad has default handling of EOS events and GST_PAD_IS_EOS(), which I'm inclined to understand is more useful for MF than handling GST_EVENT_EOS directly.
I'm not sure what you're referring to here.Β All we need to do in terms of end of stream is read the "eos" property on the appsink during the RequestSample-derived command, and dispatch the relevant events.
Yes, my point is that it's not really any different for GstPad; quartz waits for GST_EVENT_EOS but that's only because it matches quartz's threading model much better.
To be quite clear, I'm not trying to argue that appsink shouldn't be used here; I'm just trying to understand all the potential factors behind the decision, and essentially stress-test it. So far as I see appsink provides a built-in buffer that matches mfplat's model nicely, which is good, but it also forces the use of probes and thereby some tricky parameter passing, which is not good.
Well, the probe isn't strictly necessary, another option is to wait on the paused state, then read the caps from the pads, but the pad probe way would be useful in a case where we want to enumerate all the fixed types to expose on the stream's media type handler.
Wait for what, exactly?
On 9/15/20 10:07 AM, Zebediah Figura wrote:
On 9/15/20 8:47 AM, Derek Lesho wrote:
On 9/14/20 4:21 PM, Zebediah Figura wrote:
On 9/11/20 12:23 PM, Derek Lesho wrote:
On 9/11/20 11:24 AM, Zebediah Figura wrote:
On 9/11/20 10:18 AM, Derek Lesho wrote:
On 9/10/20 6:43 PM, Zebediah Figura wrote:
> I got the impression from some past communication that the reason for > using appsink in the first place (instead of just a solitary sink pad) > is so that we can buffer, or is there some other reason? Well, buffering is necessary, as media streams operate in a sort of pull/push mode where we can only send them samples if they have requested one with IMFMediaStream::RequestSample.
Sure, but it also seems potentially simple enough to just wait on a semaphore in the chain function.
It's definitely more complicated for a system like that (I used to have it that way).Β With appsink, we just insert the request sample call into a command queue, and pull the buffer from the appsink in the async command.Β The command queue is completely synchronous so we don't have to worry about stuff like seeking or stopping interfering.Β With the semaphore solution, I'm not sure how much we have to worry about those cases, but I remember having all sorts of strange bugs.
Could you please describe in more detail the problems you encountered?
To be honest, I don't remember them too clearly, but back then I had a system where the appsink was accessed by both RequestSample and the new-sample callback. When there were outstanding requests, the new-sample callback would dispatch MEMediaSample, and when there were buffered samples, RequestSample would pop one and dispatch MEMediaSample.Β The problems occurred around the state of pending samples being invalidated upon actions performed on the gstreamer pipeline.Β Of course, if we redid it now, I'm sure we could avoid this, but it just seems like it would be more tricky.
To be sure you'd need an extra event in there to handle flushes, but that doesn't seem like a deal-breaker to me either.
Flushing is what caused me the most trouble earlier on, yeah.
If it makes sense (for latency reasons)
to buffer more than that, then there's a potential argument for appsink (but also, perhaps, just an argument for appending a queue element after each stream). I don't know/remember the relevant details for Media Foundation timing.
Also, appsink is just more convenient than manually setting up a pad, I've also considered changing the source pad to appsrc, but if it isn't broken....
Is it, though? I don't know what all the things you need to hook up are, but it looks to me like you essentially have to set up the same callbacks
We don't set up any callbacks for appsink, just pull a buffer when we need one.
Well, you do, actually, to get current caps (and, eventually, for caps negotiation). appsink does save you a chain function, but I don't immediately see anything else that it saves you.
https://source.winehq.org/git/wine.git/blob/HEAD:/dlls/winegstreamer/gstdemu...
Seems like it saves us from needing an event and query function as well, going off the quartz equivalent.
Not really, since you need to add a probe function instead. Note that you also don't need to handle most of the events that quartz does.
And it looks to me like there's a significant enough amount of boilerplate in them for it to be undesirable.
I'm not sure what in those functions qualifies as boilerplate?
I just took a quick look at it, and saw the gstreamer bug workaround, but that's it.Β So I take back what I said, it seems like it's just a case of quartz pins aligning closer to gstreamer pads.
And given how much boilerplate and how many naming problems callbacks cause in winegstreamer, it's probably better this way.
Wiring up GStreamer callbacks is annoying, yes, but that annoyance may be worthwhile. In particular adding another callback doesn't actually increase code complexity; it's just copying an existing pattern.
winegstreamer has some bad names, as I've complained in this thread, but that's a preΓ«xisting problem that wouldn't be exacerbated by adding a chain function.
https://github.com/Guy1524/wine/commit/366946876d7a53f80d91749f290f511294415...
, just in a different form. In the case of caps it makes things more than a little uglier
What does, appsink or a custom pad?Β With appsink all you need to do is set the desired caps property.
, especially if you later end up doing more complicated caps negotiation. Note also that GstPad has default handling of EOS events and GST_PAD_IS_EOS(), which I'm inclined to understand is more useful for MF than handling GST_EVENT_EOS directly.
I'm not sure what you're referring to here.Β All we need to do in terms of end of stream is read the "eos" property on the appsink during the RequestSample-derived command, and dispatch the relevant events.
Yes, my point is that it's not really any different for GstPad; quartz waits for GST_EVENT_EOS but that's only because it matches quartz's threading model much better.
To be quite clear, I'm not trying to argue that appsink shouldn't be used here; I'm just trying to understand all the potential factors behind the decision, and essentially stress-test it. So far as I see appsink provides a built-in buffer that matches mfplat's model nicely, which is good, but it also forces the use of probes and thereby some tricky parameter passing, which is not good.
Well, the probe isn't strictly necessary, another option is to wait on the paused state, then read the caps from the pads, but the pad probe way would be useful in a case where we want to enumerate all the fixed types to expose on the stream's media type handler.
Wait for what, exactly?
Well, at first, my thought was that decodebin may output a wide array of possible formats, and I wanted to expose them in the stream's media type handler.Β I can't just go off the caps exposed in pad-added, as they are just templates.Β As it happened though, it seemed like the only things which would vary are the video formats, so instead I decided to just take the first caps decodebin exposes, then create duplicate the type for every output format videoconvert exposes.Β I kept the pad probe method for getting caps, but I'm not currently aware of any circumstance where we'd still need to retain that ability to loop through every cap possibility.Β If you don't think this is something we'll rub up against in the future, I can get rid of the probe and have 0 sink callbacks :-)
On 9/15/20 10:44 AM, Derek Lesho wrote:
On 9/15/20 10:07 AM, Zebediah Figura wrote:
On 9/15/20 8:47 AM, Derek Lesho wrote:
On 9/14/20 4:21 PM, Zebediah Figura wrote:
On 9/11/20 12:23 PM, Derek Lesho wrote:
On 9/11/20 11:24 AM, Zebediah Figura wrote:
On 9/11/20 10:18 AM, Derek Lesho wrote: > On 9/10/20 6:43 PM, Zebediah Figura wrote: > >> I got the impression from some past communication that the reason for >> using appsink in the first place (instead of just a solitary sink pad) >> is so that we can buffer, or is there some other reason? > Well, buffering is necessary, as media streams operate in a sort of > pull/push mode where we can only send them samples if they have > requested one with IMFMediaStream::RequestSample. Sure, but it also seems potentially simple enough to just wait on a semaphore in the chain function.
It's definitely more complicated for a system like that (I used to have it that way).Β With appsink, we just insert the request sample call into a command queue, and pull the buffer from the appsink in the async command.Β The command queue is completely synchronous so we don't have to worry about stuff like seeking or stopping interfering.Β With the semaphore solution, I'm not sure how much we have to worry about those cases, but I remember having all sorts of strange bugs.
Could you please describe in more detail the problems you encountered?
To be honest, I don't remember them too clearly, but back then I had a system where the appsink was accessed by both RequestSample and the new-sample callback. When there were outstanding requests, the new-sample callback would dispatch MEMediaSample, and when there were buffered samples, RequestSample would pop one and dispatch MEMediaSample.Β The problems occurred around the state of pending samples being invalidated upon actions performed on the gstreamer pipeline.Β Of course, if we redid it now, I'm sure we could avoid this, but it just seems like it would be more tricky.
To be sure you'd need an extra event in there to handle flushes, but that doesn't seem like a deal-breaker to me either.
Flushing is what caused me the most trouble earlier on, yeah.
If it makes sense (for latency reasons)
to buffer more than that, then there's a potential argument for appsink (but also, perhaps, just an argument for appending a queue element after each stream). I don't know/remember the relevant details for Media Foundation timing.
> Also, appsink is just > more convenient than manually setting up a pad, I've also considered > changing the source pad to appsrc, but if it isn't broken.... > Is it, though? I don't know what all the things you need to hook up are, but it looks to me like you essentially have to set up the same callbacks
We don't set up any callbacks for appsink, just pull a buffer when we need one.
Well, you do, actually, to get current caps (and, eventually, for caps negotiation). appsink does save you a chain function, but I don't immediately see anything else that it saves you.
https://source.winehq.org/git/wine.git/blob/HEAD:/dlls/winegstreamer/gstdemu...
Seems like it saves us from needing an event and query function as well, going off the quartz equivalent.
Not really, since you need to add a probe function instead. Note that you also don't need to handle most of the events that quartz does.
And it looks to me like there's a significant enough amount of boilerplate in them for it to be undesirable.
I'm not sure what in those functions qualifies as boilerplate?
I just took a quick look at it, and saw the gstreamer bug workaround, but that's it.Β So I take back what I said, it seems like it's just a case of quartz pins aligning closer to gstreamer pads.
And given how much boilerplate and how many naming problems callbacks cause in winegstreamer, it's probably better this way.
Wiring up GStreamer callbacks is annoying, yes, but that annoyance may be worthwhile. In particular adding another callback doesn't actually increase code complexity; it's just copying an existing pattern.
winegstreamer has some bad names, as I've complained in this thread, but that's a preΓ«xisting problem that wouldn't be exacerbated by adding a chain function.
https://github.com/Guy1524/wine/commit/366946876d7a53f80d91749f290f511294415...
, just in a different form. In the case of caps it makes things more than a little uglier
What does, appsink or a custom pad?Β With appsink all you need to do is set the desired caps property.
, especially if you later end up doing more complicated caps negotiation. Note also that GstPad has default handling of EOS events and GST_PAD_IS_EOS(), which I'm inclined to understand is more useful for MF than handling GST_EVENT_EOS directly.
I'm not sure what you're referring to here.Β All we need to do in terms of end of stream is read the "eos" property on the appsink during the RequestSample-derived command, and dispatch the relevant events.
Yes, my point is that it's not really any different for GstPad; quartz waits for GST_EVENT_EOS but that's only because it matches quartz's threading model much better.
To be quite clear, I'm not trying to argue that appsink shouldn't be used here; I'm just trying to understand all the potential factors behind the decision, and essentially stress-test it. So far as I see appsink provides a built-in buffer that matches mfplat's model nicely, which is good, but it also forces the use of probes and thereby some tricky parameter passing, which is not good.
Well, the probe isn't strictly necessary, another option is to wait on the paused state, then read the caps from the pads, but the pad probe way would be useful in a case where we want to enumerate all the fixed types to expose on the stream's media type handler.
Wait for what, exactly?
Well, at first, my thought was that decodebin may output a wide array of possible formats, and I wanted to expose them in the stream's media type handler.Β I can't just go off the caps exposed in pad-added, as they are just templates.Β As it happened though, it seemed like the only things which would vary are the video formats, so instead I decided to just take the first caps decodebin exposes, then create duplicate the type for every output format videoconvert exposes.Β I kept the pad probe method for getting caps, but I'm not currently aware of any circumstance where we'd still need to retain that ability to loop through every cap possibility.Β If you don't think this is something we'll rub up against in the future, I can get rid of the probe and have 0 sink callbacks :-)
What you are (were) looking for is an (upstream) GST_QUERY_CAPS, or gst_pad_query_caps() for short.
You can't get rid of the probe in any case, because you need to wait for caps to actually be negotiated before you can query them at all. That is, you need to wait for GST_EVENT_CAPS in some form or another. As far as I can tell there's no API-level guarantee that the caps will be fixed by the time a ready -> paused transition completes.
While exposing every format that GStreamer exposes is a sound choice, it's also not the only one, or clearly the best. In particular, when I ran into bug 47642 (or variants thereof) in quartz, it seemed clear that we'd need videoconvert and audioconvert plugins to convert from formats that DirectShow can't express. In that case it was just as simple to ask videoconvert to do e.g. color space conversion to a fixed list of formats that DirectShow sinks would likely be able to accept. In that case trying to expose all of the formats natively supported by a sink is more work for probably no benefit. mfplat looks like it has about the same limitations (no way to express RGB video, for example) with respect to format, so this may also be the course that we want to take there.
On 9/15/20 5:47 PM, Zebediah Figura wrote:
On 9/15/20 10:44 AM, Derek Lesho wrote:
On 9/15/20 10:07 AM, Zebediah Figura wrote:
On 9/15/20 8:47 AM, Derek Lesho wrote:
On 9/14/20 4:21 PM, Zebediah Figura wrote:
On 9/11/20 12:23 PM, Derek Lesho wrote:
On 9/11/20 11:24 AM, Zebediah Figura wrote: > On 9/11/20 10:18 AM, Derek Lesho wrote: >> On 9/10/20 6:43 PM, Zebediah Figura wrote: >> >>> I got the impression from some past communication that the reason for >>> using appsink in the first place (instead of just a solitary sink pad) >>> is so that we can buffer, or is there some other reason? >> Well, buffering is necessary, as media streams operate in a sort of >> pull/push mode where we can only send them samples if they have >> requested one with IMFMediaStream::RequestSample. > Sure, but it also seems potentially simple enough to just wait on a > semaphore in the chain function. It's definitely more complicated for a system like that (I used to have it that way).Β With appsink, we just insert the request sample call into a command queue, and pull the buffer from the appsink in the async command.Β The command queue is completely synchronous so we don't have to worry about stuff like seeking or stopping interfering.Β With the semaphore solution, I'm not sure how much we have to worry about those cases, but I remember having all sorts of strange bugs.
Could you please describe in more detail the problems you encountered?
To be honest, I don't remember them too clearly, but back then I had a system where the appsink was accessed by both RequestSample and the new-sample callback. When there were outstanding requests, the new-sample callback would dispatch MEMediaSample, and when there were buffered samples, RequestSample would pop one and dispatch MEMediaSample.Β The problems occurred around the state of pending samples being invalidated upon actions performed on the gstreamer pipeline.Β Of course, if we redid it now, I'm sure we could avoid this, but it just seems like it would be more tricky.
To be sure you'd need an extra event in there to handle flushes, but that doesn't seem like a deal-breaker to me either.
Flushing is what caused me the most trouble earlier on, yeah.
> If it makes sense (for latency reasons) > to buffer more than that, then there's a potential argument for appsink > (but also, perhaps, just an argument for appending a queue element after > each stream). I don't know/remember the relevant details for Media > Foundation timing. > >> Also, appsink is just >> more convenient than manually setting up a pad, I've also considered >> changing the source pad to appsrc, but if it isn't broken.... >> > Is it, though? I don't know what all the things you need to hook up are, > but it looks to me like you essentially have to set up the same > callbacks We don't set up any callbacks for appsink, just pull a buffer when we need one.
Well, you do, actually, to get current caps (and, eventually, for caps negotiation). appsink does save you a chain function, but I don't immediately see anything else that it saves you.
https://source.winehq.org/git/wine.git/blob/HEAD:/dlls/winegstreamer/gstdemu...
Seems like it saves us from needing an event and query function as well, going off the quartz equivalent.
Not really, since you need to add a probe function instead. Note that you also don't need to handle most of the events that quartz does.
And it looks to me like there's a significant enough amount of boilerplate in them for it to be undesirable.
I'm not sure what in those functions qualifies as boilerplate?
I just took a quick look at it, and saw the gstreamer bug workaround, but that's it.Β So I take back what I said, it seems like it's just a case of quartz pins aligning closer to gstreamer pads.
And given how much boilerplate and how many naming problems callbacks cause in winegstreamer, it's probably better this way.
Wiring up GStreamer callbacks is annoying, yes, but that annoyance may be worthwhile. In particular adding another callback doesn't actually increase code complexity; it's just copying an existing pattern.
winegstreamer has some bad names, as I've complained in this thread, but that's a preΓ«xisting problem that wouldn't be exacerbated by adding a chain function.
https://github.com/Guy1524/wine/commit/366946876d7a53f80d91749f290f511294415...
> , just in a different form. In the case of caps it makes things > more than a little uglier What does, appsink or a custom pad?Β With appsink all you need to do is set the desired caps property. > , especially if you later end up doing more > complicated caps negotiation. Note also that GstPad has default handling > of EOS events and GST_PAD_IS_EOS(), which I'm inclined to understand is > more useful for MF than handling GST_EVENT_EOS directly. I'm not sure what you're referring to here.Β All we need to do in terms of end of stream is read the "eos" property on the appsink during the RequestSample-derived command, and dispatch the relevant events.
Yes, my point is that it's not really any different for GstPad; quartz waits for GST_EVENT_EOS but that's only because it matches quartz's threading model much better.
To be quite clear, I'm not trying to argue that appsink shouldn't be used here; I'm just trying to understand all the potential factors behind the decision, and essentially stress-test it. So far as I see appsink provides a built-in buffer that matches mfplat's model nicely, which is good, but it also forces the use of probes and thereby some tricky parameter passing, which is not good.
Well, the probe isn't strictly necessary, another option is to wait on the paused state, then read the caps from the pads, but the pad probe way would be useful in a case where we want to enumerate all the fixed types to expose on the stream's media type handler.
Wait for what, exactly?
Well, at first, my thought was that decodebin may output a wide array of possible formats, and I wanted to expose them in the stream's media type handler.Β I can't just go off the caps exposed in pad-added, as they are just templates.Β As it happened though, it seemed like the only things which would vary are the video formats, so instead I decided to just take the first caps decodebin exposes, then create duplicate the type for every output format videoconvert exposes.Β I kept the pad probe method for getting caps, but I'm not currently aware of any circumstance where we'd still need to retain that ability to loop through every cap possibility.Β If you don't think this is something we'll rub up against in the future, I can get rid of the probe and have 0 sink callbacks :-)
What you are (were) looking for is an (upstream) GST_QUERY_CAPS, or gst_pad_query_caps() for short.
You can't get rid of the probe in any case, because you need to wait for caps to actually be negotiated before you can query them at all. That is, you need to wait for GST_EVENT_CAPS in some form or another. As far as I can tell there's no API-level guarantee that the caps will be fixed by the time a ready -> paused transition completes.
In that case, I could just wait on pull-preroll instead.
While exposing every format that GStreamer exposes is a sound choice, it's also not the only one, or clearly the best. In particular, when I ran into bug 47642 (or variants thereof) in quartz, it seemed clear that we'd need videoconvert and audioconvert plugins to convert from formats that DirectShow can't express. In that case it was just as simple to ask videoconvert to do e.g. color space conversion to a fixed list of formats that DirectShow sinks would likely be able to accept. In that case trying to expose all of the formats natively supported by a sink is more work for probably no benefit. mfplat looks like it has about the same limitations (no way to express RGB video, for example) with respect to format, so this may also be the course that we want to take there.
Makes sense, so should I remove the caps listener and use pull-preroll, or keep it as is?
On 9/16/20 8:15 AM, Derek Lesho wrote:
On 9/15/20 5:47 PM, Zebediah Figura wrote:
On 9/15/20 10:44 AM, Derek Lesho wrote:
On 9/15/20 10:07 AM, Zebediah Figura wrote:
On 9/15/20 8:47 AM, Derek Lesho wrote:
On 9/14/20 4:21 PM, Zebediah Figura wrote:
On 9/11/20 12:23 PM, Derek Lesho wrote: > On 9/11/20 11:24 AM, Zebediah Figura wrote: >> On 9/11/20 10:18 AM, Derek Lesho wrote: >>> On 9/10/20 6:43 PM, Zebediah Figura wrote: >>> >>>> I got the impression from some past communication that the reason for >>>> using appsink in the first place (instead of just a solitary sink pad) >>>> is so that we can buffer, or is there some other reason? >>> Well, buffering is necessary, as media streams operate in a sort of >>> pull/push mode where we can only send them samples if they have >>> requested one with IMFMediaStream::RequestSample. >> Sure, but it also seems potentially simple enough to just wait on a >> semaphore in the chain function. > It's definitely more complicated for a system like that (I used to have > it that way).Β With appsink, we just insert the request sample call into > a command queue, and pull the buffer from the appsink in the async > command.Β The command queue is completely synchronous so we don't have > to worry about stuff like seeking or stopping interfering.Β With the > semaphore solution, I'm not sure how much we have to worry about those > cases, but I remember having all sorts of strange bugs. Could you please describe in more detail the problems you encountered?
To be honest, I don't remember them too clearly, but back then I had a system where the appsink was accessed by both RequestSample and the new-sample callback. When there were outstanding requests, the new-sample callback would dispatch MEMediaSample, and when there were buffered samples, RequestSample would pop one and dispatch MEMediaSample.Β The problems occurred around the state of pending samples being invalidated upon actions performed on the gstreamer pipeline.Β Of course, if we redid it now, I'm sure we could avoid this, but it just seems like it would be more tricky.
To be sure you'd need an extra event in there to handle flushes, but that doesn't seem like a deal-breaker to me either.
Flushing is what caused me the most trouble earlier on, yeah.
>> If it makes sense (for latency reasons) >> to buffer more than that, then there's a potential argument for appsink >> (but also, perhaps, just an argument for appending a queue element after >> each stream). I don't know/remember the relevant details for Media >> Foundation timing. >> >>> Also, appsink is just >>> more convenient than manually setting up a pad, I've also considered >>> changing the source pad to appsrc, but if it isn't broken.... >>> >> Is it, though? I don't know what all the things you need to hook up are, >> but it looks to me like you essentially have to set up the same >> callbacks > We don't set up any callbacks for appsink, just pull a buffer when we > need one. Well, you do, actually, to get current caps (and, eventually, for caps negotiation). appsink does save you a chain function, but I don't immediately see anything else that it saves you.
https://source.winehq.org/git/wine.git/blob/HEAD:/dlls/winegstreamer/gstdemu...
Seems like it saves us from needing an event and query function as well, going off the quartz equivalent.
Not really, since you need to add a probe function instead. Note that you also don't need to handle most of the events that quartz does.
And it looks to me like there's a significant enough amount of boilerplate in them for it to be undesirable.
I'm not sure what in those functions qualifies as boilerplate?
I just took a quick look at it, and saw the gstreamer bug workaround, but that's it.Β So I take back what I said, it seems like it's just a case of quartz pins aligning closer to gstreamer pads.
> And given how much boilerplate and how many naming problems > callbacks cause in winegstreamer, it's probably better this way. Wiring up GStreamer callbacks is annoying, yes, but that annoyance may be worthwhile. In particular adding another callback doesn't actually increase code complexity; it's just copying an existing pattern.
winegstreamer has some bad names, as I've complained in this thread, but that's a preΓ«xisting problem that wouldn't be exacerbated by adding a chain function.
> https://github.com/Guy1524/wine/commit/366946876d7a53f80d91749f290f511294415... > >> , just in a different form. In the case of caps it makes things >> more than a little uglier > What does, appsink or a custom pad?Β With appsink all you need to do is > set the desired caps property. >> , especially if you later end up doing more >> complicated caps negotiation. Note also that GstPad has default handling >> of EOS events and GST_PAD_IS_EOS(), which I'm inclined to understand is >> more useful for MF than handling GST_EVENT_EOS directly. > I'm not sure what you're referring to here.Β All we need to do in terms > of end of stream is read the "eos" property on the appsink during the > RequestSample-derived command, and dispatch the relevant events. Yes, my point is that it's not really any different for GstPad; quartz waits for GST_EVENT_EOS but that's only because it matches quartz's threading model much better.
To be quite clear, I'm not trying to argue that appsink shouldn't be used here; I'm just trying to understand all the potential factors behind the decision, and essentially stress-test it. So far as I see appsink provides a built-in buffer that matches mfplat's model nicely, which is good, but it also forces the use of probes and thereby some tricky parameter passing, which is not good.
Well, the probe isn't strictly necessary, another option is to wait on the paused state, then read the caps from the pads, but the pad probe way would be useful in a case where we want to enumerate all the fixed types to expose on the stream's media type handler.
Wait for what, exactly?
Well, at first, my thought was that decodebin may output a wide array of possible formats, and I wanted to expose them in the stream's media type handler.Β I can't just go off the caps exposed in pad-added, as they are just templates.Β As it happened though, it seemed like the only things which would vary are the video formats, so instead I decided to just take the first caps decodebin exposes, then create duplicate the type for every output format videoconvert exposes.Β I kept the pad probe method for getting caps, but I'm not currently aware of any circumstance where we'd still need to retain that ability to loop through every cap possibility.Β If you don't think this is something we'll rub up against in the future, I can get rid of the probe and have 0 sink callbacks :-)
What you are (were) looking for is an (upstream) GST_QUERY_CAPS, or gst_pad_query_caps() for short.
You can't get rid of the probe in any case, because you need to wait for caps to actually be negotiated before you can query them at all. That is, you need to wait for GST_EVENT_CAPS in some form or another. As far as I can tell there's no API-level guarantee that the caps will be fixed by the time a ready -> paused transition completes.
In that case, I could just wait on pull-preroll instead.
While exposing every format that GStreamer exposes is a sound choice, it's also not the only one, or clearly the best. In particular, when I ran into bug 47642 (or variants thereof) in quartz, it seemed clear that we'd need videoconvert and audioconvert plugins to convert from formats that DirectShow can't express. In that case it was just as simple to ask videoconvert to do e.g. color space conversion to a fixed list of formats that DirectShow sinks would likely be able to accept. In that case trying to expose all of the formats natively supported by a sink is more work for probably no benefit. mfplat looks like it has about the same limitations (no way to express RGB video, for example) with respect to format, so this may also be the course that we want to take there.
Makes sense, so should I remove the caps listener and use pull-preroll, or keep it as is?
Well, for that matter, you don't exactly need to use gst_app_sink_pull_preroll(); gst_app_sink_pull_sample() also blocks. But I'm led to believe that mfplat requires you to know the caps before you start queuing samples.
Well, for that matter, you don't exactly need to use gst_app_sink_pull_preroll(); gst_app_sink_pull_sample() also blocks. But I'm led to believe that mfplat requires you to know the caps before you start queuing samples.
Well, as of right now, our pipeline is fully setup in the stream-added callback for decodebin, so it's not like once we know our caps, we have to modify the pipeline elements.Β Also, this is completely transparent to media foundation, pulling from the preroll queue will not affect the main queue.Β Of course, in the case where media source user decides to use a different type (which pretty much means using videoconvert or audioconvert), we just update the appsink "caps" property and the caps are renegotiated.
On 9/21/20 10:56 AM, Derek Lesho wrote:
Well, for that matter, you don't exactly need to use gst_app_sink_pull_preroll(); gst_app_sink_pull_sample() also blocks. But I'm led to believe that mfplat requires you to know the caps before you start queuing samples.
Well, as of right now, our pipeline is fully setup in the stream-added callback for decodebin, so it's not like once we know our caps, we have to modify the pipeline elements.Β Also, this is completely transparent to media foundation, pulling from the preroll queue will not affect the main queue.Β Of course, in the case where media source user decides to use a different type (which pretty much means using videoconvert or audioconvert), we just update the appsink "caps" property and the caps are renegotiated.
That seems about right, actually. (I think you also need to restart the pipeline, but that's true no matter what.) With this in mind I'm inclined to think that appsink + static caps renegotiation + gst_app_sink_pull_preroll() is the best fit for mfplat.
I actually thought that GstPad had similar support for static caps, but it doesn't reallyβonly support for setting the caps at creation time, which is usable but at that point not actually better than GstAppSink.
On 9/15/20 10:07 AM, Zebediah Figura wrote:
On 9/15/20 8:47 AM, Derek Lesho wrote:
On 9/14/20 4:21 PM, Zebediah Figura wrote:
On 9/11/20 12:23 PM, Derek Lesho wrote:
On 9/11/20 11:24 AM, Zebediah Figura wrote:
On 9/11/20 10:18 AM, Derek Lesho wrote:
On 9/10/20 6:43 PM, Zebediah Figura wrote:
> I got the impression from some past communication that the > reason for > using appsink in the first place (instead of just a solitary > sink pad) > is so that we can buffer, or is there some other reason? Well, buffering is necessary, as media streams operate in a sort of pull/push mode where we can only send them samples if they have requested one with IMFMediaStream::RequestSample.
Sure, but it also seems potentially simple enough to just wait on a semaphore in the chain function.
It's definitely more complicated for a system like that (I used to have it that way).Β With appsink, we just insert the request sample call into a command queue, and pull the buffer from the appsink in the async command.Β The command queue is completely synchronous so we don't have to worry about stuff like seeking or stopping interfering. With the semaphore solution, I'm not sure how much we have to worry about those cases, but I remember having all sorts of strange bugs.
Could you please describe in more detail the problems you encountered?
To be honest, I don't remember them too clearly, but back then I had a system where the appsink was accessed by both RequestSample and the new-sample callback. When there were outstanding requests, the new-sample callback would dispatch MEMediaSample, and when there were buffered samples, RequestSample would pop one and dispatch MEMediaSample.Β The problems occurred around the state of pending samples being invalidated upon actions performed on the gstreamer pipeline.Β Of course, if we redid it now, I'm sure we could avoid this, but it just seems like it would be more tricky.
To be sure you'd need an extra event in there to handle flushes, but that doesn't seem like a deal-breaker to me either.
Flushing is what caused me the most trouble earlier on, yeah.
If it makes sense (for latency reasons) to buffer more than that, then there's a potential argument for appsink (but also, perhaps, just an argument for appending a queue element after each stream). I don't know/remember the relevant details for Media Foundation timing.
Also, appsink is just more convenient than manually setting up a pad, I've also considered changing the source pad to appsrc, but if it isn't broken....
Is it, though? I don't know what all the things you need to hook up are, but it looks to me like you essentially have to set up the same callbacks
We don't set up any callbacks for appsink, just pull a buffer when we need one.
Well, you do, actually, to get current caps (and, eventually, for caps negotiation). appsink does save you a chain function, but I don't immediately see anything else that it saves you.
https://source.winehq.org/git/wine.git/blob/HEAD:/dlls/winegstreamer/gstdemu...
Seems like it saves us from needing an event and query function as well, going off the quartz equivalent.
Not really, since you need to add a probe function instead. Note that you also don't need to handle most of the events that quartz does.
And it looks to me like there's a significant enough amount of boilerplate in them for it to be undesirable.
I'm not sure what in those functions qualifies as boilerplate?
I just took a quick look at it, and saw the gstreamer bug workaround, but that's it.Β So I take back what I said, it seems like it's just a case of quartz pins aligning closer to gstreamer pads.
And given how much boilerplate and how many naming problems callbacks cause in winegstreamer, it's probably better this way.
Wiring up GStreamer callbacks is annoying, yes, but that annoyance may be worthwhile. In particular adding another callback doesn't actually increase code complexity; it's just copying an existing pattern.
winegstreamer has some bad names, as I've complained in this thread, but that's a preΓ«xisting problem that wouldn't be exacerbated by adding a chain function.
https://github.com/Guy1524/wine/commit/366946876d7a53f80d91749f290f511294415...
, just in a different form. In the case of caps it makes things more than a little uglier
What does, appsink or a custom pad?Β With appsink all you need to do is set the desired caps property.
, especially if you later end up doing more complicated caps negotiation. Note also that GstPad has default handling of EOS events and GST_PAD_IS_EOS(), which I'm inclined to understand is more useful for MF than handling GST_EVENT_EOS directly.
I'm not sure what you're referring to here.Β All we need to do in terms of end of stream is read the "eos" property on the appsink during the RequestSample-derived command, and dispatch the relevant events.
Yes, my point is that it's not really any different for GstPad; quartz waits for GST_EVENT_EOS but that's only because it matches quartz's threading model much better.
To be quite clear, I'm not trying to argue that appsink shouldn't be used here; I'm just trying to understand all the potential factors behind the decision, and essentially stress-test it. So far as I see appsink provides a built-in buffer that matches mfplat's model nicely, which is good, but it also forces the use of probes and thereby some tricky parameter passing, which is not good.
Well, the probe isn't strictly necessary, another option is to wait on the paused state, then read the caps from the pads, but the pad probe way would be useful in a case where we want to enumerate all the fixed types to expose on the stream's media type handler.
Wait for what, exactly?
Well, at first, my thought was that decodebin may output a wide array of possible formats, and I wanted to expose them in the stream's media type handler.Β I can't just go off the caps exposed in pad-added, as they are just templates.Β As it happened though, it seemed like the only things which would vary are the video formats, so instead I decided to just take the first caps decodebin exposes, then create duplicate the type for every output format videoconvert exposes.Β I kept the pad probe method for getting caps, but I'm not currently aware of any circumstance where we'd still need to retain that ability to loop through every cap possibility.Β If you don't think this is something we'll rub up against in the future, I can get rid of the probe and have 0 sink callbacks :-)
Signed-off-by: Derek Lesho dlesho@codeweavers.com --- dlls/winegstreamer/gst_cbs.c | 13 ++ dlls/winegstreamer/gst_cbs.h | 8 ++ dlls/winegstreamer/gst_private.h | 5 + dlls/winegstreamer/media_source.c | 137 +++++++++++++++++++- dlls/winegstreamer/mfplat.c | 208 ++++++++++++++++++++++++++++++ 5 files changed, 368 insertions(+), 3 deletions(-)
diff --git a/dlls/winegstreamer/gst_cbs.c b/dlls/winegstreamer/gst_cbs.c index 4755f5b42f1..e56c987eb38 100644 --- a/dlls/winegstreamer/gst_cbs.c +++ b/dlls/winegstreamer/gst_cbs.c @@ -404,3 +404,16 @@ void source_all_streams_wrapper(GstElement *element, gpointer user)
call_cb(&cbdata); } + +GstPadProbeReturn caps_listener_wrapper(GstPad *pad, GstPadProbeInfo *info, gpointer user) +{ + struct cb_data cbdata = { STREAM_PAD_EVENT }; + + cbdata.u.pad_probe_data.pad = pad; + cbdata.u.pad_probe_data.info = info; + cbdata.u.pad_probe_data.user = user; + + call_cb(&cbdata); + + return cbdata.u.pad_probe_data.ret; +} diff --git a/dlls/winegstreamer/gst_cbs.h b/dlls/winegstreamer/gst_cbs.h index d87cc8c21e9..7173c09746e 100644 --- a/dlls/winegstreamer/gst_cbs.h +++ b/dlls/winegstreamer/gst_cbs.h @@ -52,6 +52,7 @@ enum CB_TYPE { SOURCE_STREAM_ADDED, SOURCE_STREAM_REMOVED, SOURCE_ALL_STREAMS, + STREAM_PAD_EVENT, MEDIA_SOURCE_MAX, };
@@ -137,6 +138,12 @@ struct cb_data { GstQuery *query; gboolean ret; } query_sink_data; + struct pad_probe_data { + GstPad *pad; + GstPadProbeInfo *info; + gpointer user; + GstPadProbeReturn ret; + } pad_probe_data; } u;
int finished; @@ -172,5 +179,6 @@ GstBusSyncReply watch_source_bus_wrapper(GstBus *bus, GstMessage *message, gpoin void source_stream_added_wrapper(GstElement *bin, GstPad *pad, gpointer user) DECLSPEC_HIDDEN; void source_stream_removed_wrapper(GstElement *element, GstPad *pad, gpointer user) DECLSPEC_HIDDEN; void source_all_streams_wrapper(GstElement *element, gpointer user) DECLSPEC_HIDDEN; +GstPadProbeReturn caps_listener_wrapper(GstPad *pad, GstPadProbeInfo *info, gpointer user);
#endif diff --git a/dlls/winegstreamer/gst_private.h b/dlls/winegstreamer/gst_private.h index ef07d3591e7..86392eea4e3 100644 --- a/dlls/winegstreamer/gst_private.h +++ b/dlls/winegstreamer/gst_private.h @@ -36,6 +36,7 @@ #include "winuser.h" #include "dshow.h" #include "strmif.h" +#include "mfobjects.h" #include "wine/heap.h" #include "wine/strmbase.h"
@@ -54,6 +55,10 @@ void start_dispatch_thread(void) DECLSPEC_HIDDEN;
extern HRESULT mfplat_get_class_object(REFCLSID rclsid, REFIID riid, void **obj) DECLSPEC_HIDDEN;
+HRESULT winegstreamer_stream_handler_create(REFIID riid, void **obj) DECLSPEC_HIDDEN; +GstCaps *make_mf_compatible_caps(GstCaps *caps); +IMFMediaType *mf_media_type_from_caps(GstCaps *caps); + HRESULT winegstreamer_stream_handler_create(REFIID riid, void **obj) DECLSPEC_HIDDEN;
#endif /* __GST_PRIVATE_INCLUDED__ */ diff --git a/dlls/winegstreamer/media_source.c b/dlls/winegstreamer/media_source.c index 29af2b72def..345b1fe4528 100644 --- a/dlls/winegstreamer/media_source.c +++ b/dlls/winegstreamer/media_source.c @@ -47,14 +47,19 @@ struct media_stream LONG ref; struct media_source *parent_source; IMFMediaEventQueue *event_queue; + IMFStreamDescriptor *descriptor; GstElement *appsink; GstPad *their_src, *my_sink; + GstCaps *their_caps; enum { STREAM_STUB, STREAM_INACTIVE, STREAM_SHUTDOWN, } state; + /* used when in STUB state: */ + DWORD stream_id; + HANDLE caps_event; };
struct media_source @@ -312,6 +317,8 @@ static ULONG WINAPI media_stream_Release(IMFMediaStream *iface) { if (stream->my_sink) gst_object_unref(GST_OBJECT(stream->my_sink)); + if (stream->descriptor) + IMFStreamDescriptor_Release(stream->descriptor); if (stream->event_queue) IMFMediaEventQueue_Release(stream->event_queue); if (stream->parent_source) @@ -393,7 +400,10 @@ static HRESULT WINAPI media_stream_GetStreamDescriptor(IMFMediaStream* iface, IM if (stream->state == STREAM_SHUTDOWN) return MF_E_SHUTDOWN;
- return E_NOTIMPL; + IMFStreamDescriptor_AddRef(stream->descriptor); + *descriptor = stream->descriptor; + + return S_OK; }
static HRESULT WINAPI media_stream_RequestSample(IMFMediaStream *iface, IUnknown *token) @@ -436,9 +446,12 @@ static HRESULT new_media_stream(struct media_source *source, GstPad *pad, DWORD IMFMediaSource_AddRef(&source->IMFMediaSource_iface); object->parent_source = source; object->their_src = pad; + object->stream_id = stream_id;
object->state = STREAM_STUB;
+ object->caps_event = CreateEventA(NULL, TRUE, FALSE, NULL); + if (FAILED(hr = MFCreateEventQueue(&object->event_queue))) goto fail;
@@ -455,10 +468,10 @@ static HRESULT new_media_stream(struct media_source *source, GstPad *pad, DWORD g_object_set(object->appsink, "wait-on-eos", FALSE, NULL);
object->my_sink = gst_element_get_static_pad(object->appsink, "sink"); - gst_pad_set_element_private(object->my_sink, object); - gst_pad_link(object->their_src, object->my_sink);
+ gst_pad_add_probe(object->my_sink, GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM, caps_listener_wrapper, object, NULL); + gst_element_sync_state_with_parent(object->appsink);
TRACE("->(%p)\n", object); @@ -473,6 +486,43 @@ static HRESULT new_media_stream(struct media_source *source, GstPad *pad, DWORD return hr; }
+static HRESULT media_stream_init_desc(struct media_stream *stream) +{ + HRESULT hr; + IMFMediaTypeHandler *type_handler; + IMFMediaType *stream_type = NULL; + + stream->their_caps = gst_caps_fixate(stream->their_caps); + + stream_type = mf_media_type_from_caps(stream->their_caps); + gst_caps_unref(stream->their_caps); + if (!stream_type) + { + hr = E_FAIL; + goto fail; + } + + if (FAILED(hr = MFCreateStreamDescriptor(stream->stream_id, 1, &stream_type, &stream->descriptor))) + goto fail; + + if (FAILED(hr = IMFStreamDescriptor_GetMediaTypeHandler(stream->descriptor, &type_handler))) + goto fail; + + if (FAILED(hr = IMFMediaTypeHandler_SetCurrentMediaType(type_handler, stream_type))) + goto fail; + + IMFMediaTypeHandler_Release(type_handler); + + stream->state = STREAM_INACTIVE; + + return S_OK; + fail: + ERR("media stream initialization failed with %x\n", hr); + if (type_handler) + IMFMediaTypeHandler_Release(type_handler); + return hr; +} + static HRESULT WINAPI media_source_QueryInterface(IMFMediaSource *iface, REFIID riid, void **out) { struct media_source *source = impl_from_IMFMediaSource(iface); @@ -680,6 +730,23 @@ static const IMFMediaSourceVtbl IMFMediaSource_vtbl = media_source_Shutdown, };
+/* If this callback is extended to use any significant win32 APIs, a wrapper function + should be added */ +gboolean stream_found(GstElement *bin, GstPad *pad, GstCaps *caps, gpointer user) +{ + GstCaps *target_caps; + + /* if the stream can be converted into an MF compatible type, we'll go that route + otherwise, we'll rely on decodebin for the whole process */ + + if ((target_caps = make_mf_compatible_caps(caps))) + { + gst_caps_unref(target_caps); + return FALSE; + } + return TRUE; +} + static void source_stream_added(GstElement *element, GstPad *pad, gpointer user) { struct media_source *source = (struct media_source *) user; @@ -697,6 +764,36 @@ static void source_stream_added(GstElement *element, GstPad *pad, gpointer user)
TRACE("stream-id: %u\n", stream_id);
+ /* This codepath is currently never triggered, as we don't need to ever restart the gstreamer pipeline. It is retained in + case this becomes necessary in the future, for example in a case where different media types require different + post-processing elements. */ + for (unsigned int i = 0; i < source->stream_count; i++) + { + DWORD existing_stream_id; + IMFStreamDescriptor *descriptor = source->streams[i]->descriptor; + + if (source->streams[i]->state == STREAM_STUB) + continue; + + if (FAILED(IMFStreamDescriptor_GetStreamIdentifier(descriptor, &existing_stream_id))) + goto leave; + + if (existing_stream_id == stream_id) + { + struct media_stream *existing_stream = source->streams[i]; + GstPadLinkReturn ret; + + TRACE("Found existing stream %p\n", existing_stream); + + existing_stream->their_src = pad; + + if ((ret = gst_pad_link(existing_stream->their_src, existing_stream->my_sink)) != GST_PAD_LINK_OK) + ERR("Error linking decodebin pad to stream %p, err = %d\n", existing_stream, ret); + + goto leave; + } + } + if (FAILED(new_media_stream(source, pad, stream_id, &stream))) { goto leave; @@ -737,6 +834,26 @@ static void source_all_streams(GstElement *element, gpointer user) SetEvent(source->all_streams_event); }
+static GstPadProbeReturn caps_listener(GstPad *pad, GstPadProbeInfo *info, gpointer user) +{ + struct media_stream *stream = (struct media_stream *) user; + GstEvent *event = gst_pad_probe_info_get_event(info); + + if (GST_EVENT_TYPE(event) == GST_EVENT_CAPS) + { + GstCaps *caps; + TRACE("got caps for stream %p\n", stream); + + gst_event_parse_caps(event, &caps); + stream->their_caps = gst_caps_copy(caps); + SetEvent(stream->caps_event); + + return GST_PAD_PROBE_REMOVE; + } + + return GST_PAD_PROBE_OK; +} + static HRESULT media_source_constructor(IMFByteStream *bytestream, struct media_source **out_media_source) { GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE( @@ -787,6 +904,8 @@ static HRESULT media_source_constructor(IMFByteStream *bytestream, struct media_
gst_bin_add(GST_BIN(object->container), object->decodebin);
+ if(!GetEnvironmentVariableA("MF_DECODE_IN_SOURCE", NULL, 0)) + g_signal_connect(object->decodebin, "autoplug-continue", G_CALLBACK(stream_found), object); g_signal_connect(object->decodebin, "pad-added", G_CALLBACK(source_stream_added_wrapper), object); g_signal_connect(object->decodebin, "pad-removed", G_CALLBACK(source_stream_removed_wrapper), object); g_signal_connect(object->decodebin, "no-more-pads", G_CALLBACK(source_all_streams_wrapper), object); @@ -812,6 +931,12 @@ static HRESULT media_source_constructor(IMFByteStream *bytestream, struct media_ }
WaitForSingleObject(object->all_streams_event, INFINITE); + for (unsigned int i = 0; i < object->stream_count; i++) + { + WaitForSingleObject(object->streams[i]->caps_event, INFINITE); + if (FAILED(hr = media_stream_init_desc(object->streams[i]))) + goto fail; + }
object->state = SOURCE_STOPPED;
@@ -1337,6 +1462,12 @@ void perform_cb_media_source(struct cb_data *cbdata) source_all_streams(data->element, data->user); break; } + case STREAM_PAD_EVENT: + { + struct pad_probe_data *data = &cbdata->u.pad_probe_data; + cbdata->u.pad_probe_data.ret = caps_listener(data->pad, data->info, data->user); + break; + } default: { ERR("Wrong callback forwarder called\n"); diff --git a/dlls/winegstreamer/mfplat.c b/dlls/winegstreamer/mfplat.c index c996f06211e..3667bc3cc38 100644 --- a/dlls/winegstreamer/mfplat.c +++ b/dlls/winegstreamer/mfplat.c @@ -16,6 +16,11 @@ * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA */
+#include "config.h" +#include <gst/gst.h> + +#include "gst_private.h" + #include <stdarg.h>
#include "gst_private.h" @@ -436,3 +441,206 @@ HRESULT mfplat_get_class_object(REFCLSID rclsid, REFIID riid, void **obj)
return CLASS_E_CLASSNOTAVAILABLE; } + +const static struct +{ + const GUID *subtype; + GstVideoFormat format; +} +uncompressed_formats[] = +{ + {&MFVideoFormat_ARGB32, GST_VIDEO_FORMAT_BGRA}, + {&MFVideoFormat_RGB32, GST_VIDEO_FORMAT_BGRx}, + {&MFVideoFormat_RGB24, GST_VIDEO_FORMAT_BGR}, + {&MFVideoFormat_RGB565, GST_VIDEO_FORMAT_BGR16}, + {&MFVideoFormat_RGB555, GST_VIDEO_FORMAT_BGR15}, +}; + +/* caps will be modified to represent the exact type needed for the format */ +static IMFMediaType* transform_to_media_type(GstCaps *caps) +{ + IMFMediaType *media_type; + GstStructure *info; + const char *mime_type; + + if (TRACE_ON(mfplat)) + { + gchar *human_readable = gst_caps_to_string(caps); + TRACE("caps = %s\n", debugstr_a(human_readable)); + g_free(human_readable); + } + + if (FAILED(MFCreateMediaType(&media_type))) + { + return NULL; + } + + info = gst_caps_get_structure(caps, 0); + mime_type = gst_structure_get_name(info); + + if (!(strncmp(mime_type, "video", 5))) + { + GstVideoInfo video_info; + + if (!(gst_video_info_from_caps(&video_info, caps))) + { + return NULL; + } + + IMFMediaType_SetGUID(media_type, &MF_MT_MAJOR_TYPE, &MFMediaType_Video); + + IMFMediaType_SetUINT64(media_type, &MF_MT_FRAME_SIZE, ((UINT64)video_info.width << 32) | video_info.height); + + IMFMediaType_SetUINT64(media_type, &MF_MT_FRAME_RATE, ((UINT64)video_info.fps_n << 32) | video_info.fps_d); + + if (!(strcmp(mime_type, "video/x-raw"))) + { + GUID fourcc_subtype = MFVideoFormat_Base; + unsigned int i; + + IMFMediaType_SetUINT32(media_type, &MF_MT_COMPRESSED, FALSE); + + /* First try FOURCC */ + if ((fourcc_subtype.Data1 = gst_video_format_to_fourcc(video_info.finfo->format))) + { + IMFMediaType_SetGUID(media_type, &MF_MT_SUBTYPE, &fourcc_subtype); + } + else + { + for (i = 0; i < ARRAY_SIZE(uncompressed_formats); i++) + { + if (uncompressed_formats[i].format == video_info.finfo->format) + { + IMFMediaType_SetGUID(media_type, &MF_MT_SUBTYPE, uncompressed_formats[i].subtype); + break; + } + } + if (i == ARRAY_SIZE(uncompressed_formats)) + FIXME("Unrecognized format.\n"); + } + } + else + { + FIXME("Unrecognized video format %s\n", mime_type); + IMFMediaType_Release(media_type); + return NULL; + } + } + else if (!(strncmp(mime_type, "audio", 5))) + { + gint rate, channels, bitrate; + guint64 channel_mask; + IMFMediaType_SetGUID(media_type, &MF_MT_MAJOR_TYPE, &MFMediaType_Audio); + + if (gst_structure_get_int(info, "rate", &rate)) + IMFMediaType_SetUINT32(media_type, &MF_MT_AUDIO_SAMPLES_PER_SECOND, rate); + + if (gst_structure_get_int(info, "channels", &channels)) + IMFMediaType_SetUINT32(media_type, &MF_MT_AUDIO_NUM_CHANNELS, channels); + + if (gst_structure_get(info, "channel-mask", GST_TYPE_BITMASK, &channel_mask, NULL)) + IMFMediaType_SetUINT32(media_type, &MF_MT_AUDIO_CHANNEL_MASK, (DWORD)channel_mask); + + if (gst_structure_get_int(info, "bitrate", &bitrate)) + IMFMediaType_SetUINT32(media_type, &MF_MT_AVG_BITRATE, bitrate); + + if (!(strcmp(mime_type, "audio/x-raw"))) + { + const char *format; + if ((format = gst_structure_get_string(info, "format"))) + { + char type; + unsigned int bits_per_sample; + char endian[2]; + char new_format[6]; + if ((strlen(format) > 5) || (sscanf(format, "%c%u%2c", &type, &bits_per_sample, endian) < 2)) + { + FIXME("Unhandled audio format %s\n", format); + IMFMediaType_Release(media_type); + return NULL; + } + + if (type == 'F') + { + IMFMediaType_SetGUID(media_type, &MF_MT_SUBTYPE, &MFAudioFormat_Float); + } + else if (type == 'U' || type == 'S') + { + IMFMediaType_SetGUID(media_type, &MF_MT_SUBTYPE, &MFAudioFormat_PCM); + if (bits_per_sample == 8) + type = 'U'; + else + type = 'S'; + } + else + { + FIXME("Unrecognized audio format: %s\n", format); + IMFMediaType_Release(media_type); + return NULL; + } + + IMFMediaType_SetUINT32(media_type, &MF_MT_AUDIO_BITS_PER_SAMPLE, bits_per_sample); + + if (endian[0] == 'B') + endian[0] = 'L'; + + sprintf(new_format, "%c%u%.2s", type, bits_per_sample, bits_per_sample > 8 ? endian : 0); + gst_caps_set_simple(caps, "format", G_TYPE_STRING, new_format, NULL); + } + else + { + ERR("Failed to get audio format\n"); + } + } + else + { + FIXME("Unrecognized audio format %s\n", mime_type); + IMFMediaType_Release(media_type); + return NULL; + } + } + else + { + IMFMediaType_Release(media_type); + return NULL; + } + + return media_type; +} + +/* returns NULL if doesn't match exactly */ +IMFMediaType *mf_media_type_from_caps(GstCaps *caps) +{ + GstCaps *writeable_caps; + IMFMediaType *ret; + + writeable_caps = gst_caps_copy(caps); + ret = transform_to_media_type(writeable_caps); + + if (!(gst_caps_is_equal(caps, writeable_caps))) + { + IMFMediaType_Release(ret); + ret = NULL; + } + gst_caps_unref(writeable_caps); + return ret; +} + +GstCaps *make_mf_compatible_caps(GstCaps *caps) +{ + GstCaps *ret; + IMFMediaType *media_type; + + ret = gst_caps_copy(caps); + + if ((media_type = transform_to_media_type(ret))) + IMFMediaType_Release(media_type); + + if (!media_type) + { + gst_caps_unref(ret); + return NULL; + } + + return ret; +}
On 9/8/20 10:47 AM, Derek Lesho wrote:
Signed-off-by: Derek Lesho dlesho@codeweavers.com
dlls/winegstreamer/gst_cbs.c | 13 ++ dlls/winegstreamer/gst_cbs.h | 8 ++ dlls/winegstreamer/gst_private.h | 5 + dlls/winegstreamer/media_source.c | 137 +++++++++++++++++++- dlls/winegstreamer/mfplat.c | 208 ++++++++++++++++++++++++++++++ 5 files changed, 368 insertions(+), 3 deletions(-)
diff --git a/dlls/winegstreamer/gst_cbs.c b/dlls/winegstreamer/gst_cbs.c index 4755f5b42f1..e56c987eb38 100644 --- a/dlls/winegstreamer/gst_cbs.c +++ b/dlls/winegstreamer/gst_cbs.c @@ -404,3 +404,16 @@ void source_all_streams_wrapper(GstElement *element, gpointer user)
call_cb(&cbdata);
}
+GstPadProbeReturn caps_listener_wrapper(GstPad *pad, GstPadProbeInfo *info, gpointer user) +{
- struct cb_data cbdata = { STREAM_PAD_EVENT };
- cbdata.u.pad_probe_data.pad = pad;
- cbdata.u.pad_probe_data.info = info;
- cbdata.u.pad_probe_data.user = user;
- call_cb(&cbdata);
- return cbdata.u.pad_probe_data.ret;
+} diff --git a/dlls/winegstreamer/gst_cbs.h b/dlls/winegstreamer/gst_cbs.h index d87cc8c21e9..7173c09746e 100644 --- a/dlls/winegstreamer/gst_cbs.h +++ b/dlls/winegstreamer/gst_cbs.h @@ -52,6 +52,7 @@ enum CB_TYPE { SOURCE_STREAM_ADDED, SOURCE_STREAM_REMOVED, SOURCE_ALL_STREAMS,
- STREAM_PAD_EVENT, MEDIA_SOURCE_MAX,
};
@@ -137,6 +138,12 @@ struct cb_data { GstQuery *query; gboolean ret; } query_sink_data;
struct pad_probe_data {
GstPad *pad;
GstPadProbeInfo *info;
gpointer user;
GstPadProbeReturn ret;
} pad_probe_data;
} u;
int finished;
@@ -172,5 +179,6 @@ GstBusSyncReply watch_source_bus_wrapper(GstBus *bus, GstMessage *message, gpoin void source_stream_added_wrapper(GstElement *bin, GstPad *pad, gpointer user) DECLSPEC_HIDDEN; void source_stream_removed_wrapper(GstElement *element, GstPad *pad, gpointer user) DECLSPEC_HIDDEN; void source_all_streams_wrapper(GstElement *element, gpointer user) DECLSPEC_HIDDEN; +GstPadProbeReturn caps_listener_wrapper(GstPad *pad, GstPadProbeInfo *info, gpointer user);
#endif diff --git a/dlls/winegstreamer/gst_private.h b/dlls/winegstreamer/gst_private.h index ef07d3591e7..86392eea4e3 100644 --- a/dlls/winegstreamer/gst_private.h +++ b/dlls/winegstreamer/gst_private.h @@ -36,6 +36,7 @@ #include "winuser.h" #include "dshow.h" #include "strmif.h" +#include "mfobjects.h" #include "wine/heap.h" #include "wine/strmbase.h"
@@ -54,6 +55,10 @@ void start_dispatch_thread(void) DECLSPEC_HIDDEN;
extern HRESULT mfplat_get_class_object(REFCLSID rclsid, REFIID riid, void **obj) DECLSPEC_HIDDEN;
+HRESULT winegstreamer_stream_handler_create(REFIID riid, void **obj) DECLSPEC_HIDDEN; +GstCaps *make_mf_compatible_caps(GstCaps *caps); +IMFMediaType *mf_media_type_from_caps(GstCaps *caps);
Missing DECLSPEC_HIDDEN.
HRESULT winegstreamer_stream_handler_create(REFIID riid, void **obj) DECLSPEC_HIDDEN;
#endif /* __GST_PRIVATE_INCLUDED__ */ diff --git a/dlls/winegstreamer/media_source.c b/dlls/winegstreamer/media_source.c index 29af2b72def..345b1fe4528 100644 --- a/dlls/winegstreamer/media_source.c +++ b/dlls/winegstreamer/media_source.c @@ -47,14 +47,19 @@ struct media_stream LONG ref; struct media_source *parent_source; IMFMediaEventQueue *event_queue;
- IMFStreamDescriptor *descriptor; GstElement *appsink; GstPad *their_src, *my_sink;
- GstCaps *their_caps; enum { STREAM_STUB, STREAM_INACTIVE, STREAM_SHUTDOWN, } state;
- /* used when in STUB state: */
- DWORD stream_id;
- HANDLE caps_event;
};
struct media_source @@ -312,6 +317,8 @@ static ULONG WINAPI media_stream_Release(IMFMediaStream *iface) { if (stream->my_sink) gst_object_unref(GST_OBJECT(stream->my_sink));
if (stream->descriptor)
IMFStreamDescriptor_Release(stream->descriptor); if (stream->event_queue) IMFMediaEventQueue_Release(stream->event_queue); if (stream->parent_source)
@@ -393,7 +400,10 @@ static HRESULT WINAPI media_stream_GetStreamDescriptor(IMFMediaStream* iface, IM if (stream->state == STREAM_SHUTDOWN) return MF_E_SHUTDOWN;
- return E_NOTIMPL;
- IMFStreamDescriptor_AddRef(stream->descriptor);
- *descriptor = stream->descriptor;
- return S_OK;
}
static HRESULT WINAPI media_stream_RequestSample(IMFMediaStream *iface, IUnknown *token) @@ -436,9 +446,12 @@ static HRESULT new_media_stream(struct media_source *source, GstPad *pad, DWORD IMFMediaSource_AddRef(&source->IMFMediaSource_iface); object->parent_source = source; object->their_src = pad;
object->stream_id = stream_id;
object->state = STREAM_STUB;
object->caps_event = CreateEventA(NULL, TRUE, FALSE, NULL);
if (FAILED(hr = MFCreateEventQueue(&object->event_queue))) goto fail;
@@ -455,10 +468,10 @@ static HRESULT new_media_stream(struct media_source *source, GstPad *pad, DWORD g_object_set(object->appsink, "wait-on-eos", FALSE, NULL);
object->my_sink = gst_element_get_static_pad(object->appsink, "sink");
- gst_pad_set_element_private(object->my_sink, object);
- gst_pad_link(object->their_src, object->my_sink);
gst_pad_add_probe(object->my_sink, GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM, caps_listener_wrapper, object, NULL);
gst_element_sync_state_with_parent(object->appsink);
TRACE("->(%p)\n", object);
@@ -473,6 +486,43 @@ static HRESULT new_media_stream(struct media_source *source, GstPad *pad, DWORD return hr; }
+static HRESULT media_stream_init_desc(struct media_stream *stream) +{
- HRESULT hr;
- IMFMediaTypeHandler *type_handler;
- IMFMediaType *stream_type = NULL;
- stream->their_caps = gst_caps_fixate(stream->their_caps);
I think this is unnecessary; the caps should already be fixed.
- stream_type = mf_media_type_from_caps(stream->their_caps);
- gst_caps_unref(stream->their_caps);
This seems a bit error-prone. I'd suggest getting rid of the field and instead calling gst_caps_copy(gst_pad_get_current_caps(...))
- if (!stream_type)
- {
hr = E_FAIL;
goto fail;
- }
- if (FAILED(hr = MFCreateStreamDescriptor(stream->stream_id, 1, &stream_type, &stream->descriptor)))
goto fail;
- if (FAILED(hr = IMFStreamDescriptor_GetMediaTypeHandler(stream->descriptor, &type_handler)))
goto fail;
- if (FAILED(hr = IMFMediaTypeHandler_SetCurrentMediaType(type_handler, stream_type)))
goto fail;
- IMFMediaTypeHandler_Release(type_handler);
- stream->state = STREAM_INACTIVE;
- return S_OK;
- fail:
- ERR("media stream initialization failed with %x\n", hr);
While this does tell you that one of the mfplat functions failed, it doesn't actually tell you which one. There's similar patterns elsewhere in this series.
- if (type_handler)
IMFMediaTypeHandler_Release(type_handler);
- return hr;
+}
static HRESULT WINAPI media_source_QueryInterface(IMFMediaSource *iface, REFIID riid, void **out) { struct media_source *source = impl_from_IMFMediaSource(iface); @@ -680,6 +730,23 @@ static const IMFMediaSourceVtbl IMFMediaSource_vtbl = media_source_Shutdown, };
+/* If this callback is extended to use any significant win32 APIs, a wrapper function
- should be added */
+gboolean stream_found(GstElement *bin, GstPad *pad, GstCaps *caps, gpointer user) +{
- GstCaps *target_caps;
- /* if the stream can be converted into an MF compatible type, we'll go that route
otherwise, we'll rely on decodebin for the whole process */
I think we should leave this part out for now.
- if ((target_caps = make_mf_compatible_caps(caps)))
- {
gst_caps_unref(target_caps);
return FALSE;
- }
- return TRUE;
+}
static void source_stream_added(GstElement *element, GstPad *pad, gpointer user) { struct media_source *source = (struct media_source *) user; @@ -697,6 +764,36 @@ static void source_stream_added(GstElement *element, GstPad *pad, gpointer user)
TRACE("stream-id: %u\n", stream_id);
- /* This codepath is currently never triggered, as we don't need to ever restart the gstreamer pipeline. It is retained in
case this becomes necessary in the future, for example in a case where different media types require different
post-processing elements. */
In general, if a code path is dead, it shouldn't be introduced (yet).
- for (unsigned int i = 0; i < source->stream_count; i++)
- {
DWORD existing_stream_id;
IMFStreamDescriptor *descriptor = source->streams[i]->descriptor;
if (source->streams[i]->state == STREAM_STUB)
continue;
if (FAILED(IMFStreamDescriptor_GetStreamIdentifier(descriptor, &existing_stream_id)))
goto leave;
if (existing_stream_id == stream_id)
{
struct media_stream *existing_stream = source->streams[i];
GstPadLinkReturn ret;
TRACE("Found existing stream %p\n", existing_stream);
existing_stream->their_src = pad;
if ((ret = gst_pad_link(existing_stream->their_src, existing_stream->my_sink)) != GST_PAD_LINK_OK)
ERR("Error linking decodebin pad to stream %p, err = %d\n", existing_stream, ret);
goto leave;
}
- }
- if (FAILED(new_media_stream(source, pad, stream_id, &stream))) { goto leave;
@@ -737,6 +834,26 @@ static void source_all_streams(GstElement *element, gpointer user) SetEvent(source->all_streams_event); }
+static GstPadProbeReturn caps_listener(GstPad *pad, GstPadProbeInfo *info, gpointer user) +{
- struct media_stream *stream = (struct media_stream *) user;
The cast is unnecessary, here and elsewhere.
- GstEvent *event = gst_pad_probe_info_get_event(info);
- if (GST_EVENT_TYPE(event) == GST_EVENT_CAPS)
- {
GstCaps *caps;
TRACE("got caps for stream %p\n", stream);
gst_event_parse_caps(event, &caps);
stream->their_caps = gst_caps_copy(caps);
SetEvent(stream->caps_event);
return GST_PAD_PROBE_REMOVE;
- }
- return GST_PAD_PROBE_OK;
+}
static HRESULT media_source_constructor(IMFByteStream *bytestream, struct media_source **out_media_source) { GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE( @@ -787,6 +904,8 @@ static HRESULT media_source_constructor(IMFByteStream *bytestream, struct media_
gst_bin_add(GST_BIN(object->container), object->decodebin);
- if(!GetEnvironmentVariableA("MF_DECODE_IN_SOURCE", NULL, 0))
Debugging code?
g_signal_connect(object->decodebin, "pad-added", G_CALLBACK(source_stream_added_wrapper), object); g_signal_connect(object->decodebin, "pad-removed", G_CALLBACK(source_stream_removed_wrapper), object); g_signal_connect(object->decodebin, "no-more-pads", G_CALLBACK(source_all_streams_wrapper), object);g_signal_connect(object->decodebin, "autoplug-continue", G_CALLBACK(stream_found), object);
@@ -812,6 +931,12 @@ static HRESULT media_source_constructor(IMFByteStream *bytestream, struct media_ }
WaitForSingleObject(object->all_streams_event, INFINITE);
- for (unsigned int i = 0; i < object->stream_count; i++)
Misplaced declaration.
{
WaitForSingleObject(object->streams[i]->caps_event, INFINITE);
if (FAILED(hr = media_stream_init_desc(object->streams[i])))
goto fail;
}
object->state = SOURCE_STOPPED;
@@ -1337,6 +1462,12 @@ void perform_cb_media_source(struct cb_data *cbdata) source_all_streams(data->element, data->user); break; }
- case STREAM_PAD_EVENT:
{
struct pad_probe_data *data = &cbdata->u.pad_probe_data;
cbdata->u.pad_probe_data.ret = caps_listener(data->pad, data->info, data->user);
break;
default: { ERR("Wrong callback forwarder called\n");}
diff --git a/dlls/winegstreamer/mfplat.c b/dlls/winegstreamer/mfplat.c index c996f06211e..3667bc3cc38 100644 --- a/dlls/winegstreamer/mfplat.c +++ b/dlls/winegstreamer/mfplat.c @@ -16,6 +16,11 @@
- Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA
*/
+#include "config.h" +#include <gst/gst.h>
+#include "gst_private.h"
#include <stdarg.h>
#include "gst_private.h" @@ -436,3 +441,206 @@ HRESULT mfplat_get_class_object(REFCLSID rclsid, REFIID riid, void **obj)
return CLASS_E_CLASSNOTAVAILABLE;
}
+const static struct
"static const"
+{
- const GUID *subtype;
- GstVideoFormat format;
+} +uncompressed_formats[] =
"uncompressed_video_formats", probably.
+{
- {&MFVideoFormat_ARGB32, GST_VIDEO_FORMAT_BGRA},
- {&MFVideoFormat_RGB32, GST_VIDEO_FORMAT_BGRx},
- {&MFVideoFormat_RGB24, GST_VIDEO_FORMAT_BGR},
- {&MFVideoFormat_RGB565, GST_VIDEO_FORMAT_BGR16},
- {&MFVideoFormat_RGB555, GST_VIDEO_FORMAT_BGR15},
+};
+/* caps will be modified to represent the exact type needed for the format */
This seems wrong in general.
+static IMFMediaType* transform_to_media_type(GstCaps *caps) +{
- IMFMediaType *media_type;
- GstStructure *info;
- const char *mime_type;
- if (TRACE_ON(mfplat))
- {
gchar *human_readable = gst_caps_to_string(caps);
TRACE("caps = %s\n", debugstr_a(human_readable));
g_free(human_readable);
- }
- if (FAILED(MFCreateMediaType(&media_type)))
- {
return NULL;
- }
- info = gst_caps_get_structure(caps, 0);
- mime_type = gst_structure_get_name(info);
- if (!(strncmp(mime_type, "video", 5)))
There's a lot of superfluous parentheses after local not operators in this series; this one seems especially superfluous.
- {
GstVideoInfo video_info;
if (!(gst_video_info_from_caps(&video_info, caps)))
{
return NULL;
}
IMFMediaType_SetGUID(media_type, &MF_MT_MAJOR_TYPE, &MFMediaType_Video);
IMFMediaType_SetUINT64(media_type, &MF_MT_FRAME_SIZE, ((UINT64)video_info.width << 32) | video_info.height);
IMFMediaType_SetUINT64(media_type, &MF_MT_FRAME_RATE, ((UINT64)video_info.fps_n << 32) | video_info.fps_d);
if (!(strcmp(mime_type, "video/x-raw")))
{
GUID fourcc_subtype = MFVideoFormat_Base;
unsigned int i;
IMFMediaType_SetUINT32(media_type, &MF_MT_COMPRESSED, FALSE);
/* First try FOURCC */
if ((fourcc_subtype.Data1 = gst_video_format_to_fourcc(video_info.finfo->format)))
{
IMFMediaType_SetGUID(media_type, &MF_MT_SUBTYPE, &fourcc_subtype);
}
else
{
for (i = 0; i < ARRAY_SIZE(uncompressed_formats); i++)
{
if (uncompressed_formats[i].format == video_info.finfo->format)
{
IMFMediaType_SetGUID(media_type, &MF_MT_SUBTYPE, uncompressed_formats[i].subtype);
break;
}
}
if (i == ARRAY_SIZE(uncompressed_formats))
FIXME("Unrecognized format.\n");
In which case the function should probably fail.
}
}
else
{
FIXME("Unrecognized video format %s\n", mime_type);
IMFMediaType_Release(media_type);
return NULL;
}
- }
- else if (!(strncmp(mime_type, "audio", 5)))
- {
gint rate, channels, bitrate;
guint64 channel_mask;
IMFMediaType_SetGUID(media_type, &MF_MT_MAJOR_TYPE, &MFMediaType_Audio);
if (gst_structure_get_int(info, "rate", &rate))
IMFMediaType_SetUINT32(media_type, &MF_MT_AUDIO_SAMPLES_PER_SECOND, rate);
if (gst_structure_get_int(info, "channels", &channels))
IMFMediaType_SetUINT32(media_type, &MF_MT_AUDIO_NUM_CHANNELS, channels);
if (gst_structure_get(info, "channel-mask", GST_TYPE_BITMASK, &channel_mask, NULL))
IMFMediaType_SetUINT32(media_type, &MF_MT_AUDIO_CHANNEL_MASK, (DWORD)channel_mask);
Superfluous cast.
if (gst_structure_get_int(info, "bitrate", &bitrate))
IMFMediaType_SetUINT32(media_type, &MF_MT_AVG_BITRATE, bitrate);
if (!(strcmp(mime_type, "audio/x-raw")))
{
const char *format;
if ((format = gst_structure_get_string(info, "format")))
You may find it easier to use gst_audio_info_from_caps() here.
{
char type;
unsigned int bits_per_sample;
char endian[2];
char new_format[6];
if ((strlen(format) > 5) || (sscanf(format, "%c%u%2c", &type, &bits_per_sample, endian) < 2))
{
FIXME("Unhandled audio format %s\n", format);
IMFMediaType_Release(media_type);
return NULL;
}
if (type == 'F')
{
IMFMediaType_SetGUID(media_type, &MF_MT_SUBTYPE, &MFAudioFormat_Float);
}
else if (type == 'U' || type == 'S')
{
IMFMediaType_SetGUID(media_type, &MF_MT_SUBTYPE, &MFAudioFormat_PCM);
if (bits_per_sample == 8)
type = 'U';
else
type = 'S';
}
else
{
FIXME("Unrecognized audio format: %s\n", format);
IMFMediaType_Release(media_type);
return NULL;
}
IMFMediaType_SetUINT32(media_type, &MF_MT_AUDIO_BITS_PER_SAMPLE, bits_per_sample);
if (endian[0] == 'B')
endian[0] = 'L';
sprintf(new_format, "%c%u%.2s", type, bits_per_sample, bits_per_sample > 8 ? endian : 0);
gst_caps_set_simple(caps, "format", G_TYPE_STRING, new_format, NULL);
}
else
{
ERR("Failed to get audio format\n");
}
}
else
{
FIXME("Unrecognized audio format %s\n", mime_type);
IMFMediaType_Release(media_type);
return NULL;
}
- }
- else
- {
IMFMediaType_Release(media_type);
return NULL;
- }
- return media_type;
+}
+/* returns NULL if doesn't match exactly */ +IMFMediaType *mf_media_type_from_caps(GstCaps *caps) +{
- GstCaps *writeable_caps;
- IMFMediaType *ret;
- writeable_caps = gst_caps_copy(caps);
- ret = transform_to_media_type(writeable_caps);
- if (!(gst_caps_is_equal(caps, writeable_caps)))
- {
IMFMediaType_Release(ret);
ret = NULL;
- }
- gst_caps_unref(writeable_caps);
- return ret;
+}
+GstCaps *make_mf_compatible_caps(GstCaps *caps) +{
- GstCaps *ret;
- IMFMediaType *media_type;
- ret = gst_caps_copy(caps);
- if ((media_type = transform_to_media_type(ret)))
IMFMediaType_Release(media_type);
- if (!media_type)
- {
gst_caps_unref(ret);
return NULL;
- }
- return ret;
+}
On 9/9/20 7:00 PM, Zebediah Figura wrote:
On 9/8/20 10:47 AM, Derek Lesho wrote:
Signed-off-by: Derek Lesho dlesho@codeweavers.com
dlls/winegstreamer/gst_cbs.c | 13 ++ dlls/winegstreamer/gst_cbs.h | 8 ++ dlls/winegstreamer/gst_private.h | 5 + dlls/winegstreamer/media_source.c | 137 +++++++++++++++++++- dlls/winegstreamer/mfplat.c | 208 ++++++++++++++++++++++++++++++ 5 files changed, 368 insertions(+), 3 deletions(-)
diff --git a/dlls/winegstreamer/gst_cbs.c b/dlls/winegstreamer/gst_cbs.c index 4755f5b42f1..e56c987eb38 100644 --- a/dlls/winegstreamer/gst_cbs.c +++ b/dlls/winegstreamer/gst_cbs.c @@ -404,3 +404,16 @@ void source_all_streams_wrapper(GstElement *element, gpointer user)
call_cb(&cbdata);
}
+GstPadProbeReturn caps_listener_wrapper(GstPad *pad, GstPadProbeInfo *info, gpointer user) +{
- struct cb_data cbdata = { STREAM_PAD_EVENT };
- cbdata.u.pad_probe_data.pad = pad;
- cbdata.u.pad_probe_data.info = info;
- cbdata.u.pad_probe_data.user = user;
- call_cb(&cbdata);
- return cbdata.u.pad_probe_data.ret;
+} diff --git a/dlls/winegstreamer/gst_cbs.h b/dlls/winegstreamer/gst_cbs.h index d87cc8c21e9..7173c09746e 100644 --- a/dlls/winegstreamer/gst_cbs.h +++ b/dlls/winegstreamer/gst_cbs.h @@ -52,6 +52,7 @@ enum CB_TYPE { SOURCE_STREAM_ADDED, SOURCE_STREAM_REMOVED, SOURCE_ALL_STREAMS,
- STREAM_PAD_EVENT, MEDIA_SOURCE_MAX, };
@@ -137,6 +138,12 @@ struct cb_data { GstQuery *query; gboolean ret; } query_sink_data;
struct pad_probe_data {
GstPad *pad;
GstPadProbeInfo *info;
gpointer user;
GstPadProbeReturn ret;
} pad_probe_data; } u; int finished;
@@ -172,5 +179,6 @@ GstBusSyncReply watch_source_bus_wrapper(GstBus *bus, GstMessage *message, gpoin void source_stream_added_wrapper(GstElement *bin, GstPad *pad, gpointer user) DECLSPEC_HIDDEN; void source_stream_removed_wrapper(GstElement *element, GstPad *pad, gpointer user) DECLSPEC_HIDDEN; void source_all_streams_wrapper(GstElement *element, gpointer user) DECLSPEC_HIDDEN; +GstPadProbeReturn caps_listener_wrapper(GstPad *pad, GstPadProbeInfo *info, gpointer user);
#endif diff --git a/dlls/winegstreamer/gst_private.h b/dlls/winegstreamer/gst_private.h index ef07d3591e7..86392eea4e3 100644 --- a/dlls/winegstreamer/gst_private.h +++ b/dlls/winegstreamer/gst_private.h @@ -36,6 +36,7 @@ #include "winuser.h" #include "dshow.h" #include "strmif.h" +#include "mfobjects.h" #include "wine/heap.h" #include "wine/strmbase.h"
@@ -54,6 +55,10 @@ void start_dispatch_thread(void) DECLSPEC_HIDDEN;
extern HRESULT mfplat_get_class_object(REFCLSID rclsid, REFIID riid, void **obj) DECLSPEC_HIDDEN;
+HRESULT winegstreamer_stream_handler_create(REFIID riid, void **obj) DECLSPEC_HIDDEN; +GstCaps *make_mf_compatible_caps(GstCaps *caps); +IMFMediaType *mf_media_type_from_caps(GstCaps *caps);
Missing DECLSPEC_HIDDEN.
π
HRESULT winegstreamer_stream_handler_create(REFIID riid, void **obj) DECLSPEC_HIDDEN;
#endif /* __GST_PRIVATE_INCLUDED__ */
diff --git a/dlls/winegstreamer/media_source.c b/dlls/winegstreamer/media_source.c index 29af2b72def..345b1fe4528 100644 --- a/dlls/winegstreamer/media_source.c +++ b/dlls/winegstreamer/media_source.c @@ -47,14 +47,19 @@ struct media_stream LONG ref; struct media_source *parent_source; IMFMediaEventQueue *event_queue;
IMFStreamDescriptor *descriptor; GstElement *appsink; GstPad *their_src, *my_sink;
GstCaps *their_caps; enum { STREAM_STUB, STREAM_INACTIVE, STREAM_SHUTDOWN, } state;
/* used when in STUB state: */
DWORD stream_id;
HANDLE caps_event; };
struct media_source
@@ -312,6 +317,8 @@ static ULONG WINAPI media_stream_Release(IMFMediaStream *iface) { if (stream->my_sink) gst_object_unref(GST_OBJECT(stream->my_sink));
if (stream->descriptor)
IMFStreamDescriptor_Release(stream->descriptor); if (stream->event_queue) IMFMediaEventQueue_Release(stream->event_queue); if (stream->parent_source)
@@ -393,7 +400,10 @@ static HRESULT WINAPI media_stream_GetStreamDescriptor(IMFMediaStream* iface, IM if (stream->state == STREAM_SHUTDOWN) return MF_E_SHUTDOWN;
- return E_NOTIMPL;
IMFStreamDescriptor_AddRef(stream->descriptor);
*descriptor = stream->descriptor;
return S_OK; }
static HRESULT WINAPI media_stream_RequestSample(IMFMediaStream *iface, IUnknown *token)
@@ -436,9 +446,12 @@ static HRESULT new_media_stream(struct media_source *source, GstPad *pad, DWORD IMFMediaSource_AddRef(&source->IMFMediaSource_iface); object->parent_source = source; object->their_src = pad;
object->stream_id = stream_id;
object->state = STREAM_STUB;
object->caps_event = CreateEventA(NULL, TRUE, FALSE, NULL);
if (FAILED(hr = MFCreateEventQueue(&object->event_queue))) goto fail;
@@ -455,10 +468,10 @@ static HRESULT new_media_stream(struct media_source *source, GstPad *pad, DWORD g_object_set(object->appsink, "wait-on-eos", FALSE, NULL);
object->my_sink = gst_element_get_static_pad(object->appsink, "sink");
- gst_pad_set_element_private(object->my_sink, object);
gst_pad_link(object->their_src, object->my_sink);
gst_pad_add_probe(object->my_sink, GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM, caps_listener_wrapper, object, NULL);
gst_element_sync_state_with_parent(object->appsink); TRACE("->(%p)\n", object);
@@ -473,6 +486,43 @@ static HRESULT new_media_stream(struct media_source *source, GstPad *pad, DWORD return hr; }
+static HRESULT media_stream_init_desc(struct media_stream *stream) +{
- HRESULT hr;
- IMFMediaTypeHandler *type_handler;
- IMFMediaType *stream_type = NULL;
- stream->their_caps = gst_caps_fixate(stream->their_caps);
I think this is unnecessary; the caps should already be fixed.
π
- stream_type = mf_media_type_from_caps(stream->their_caps);
- gst_caps_unref(stream->their_caps);
This seems a bit error-prone. I'd suggest getting rid of the field and instead calling gst_caps_copy(gst_pad_get_current_caps(...))
Can we be sure that the caps will be set after the pad probe callback is complete?
- if (!stream_type)
- {
hr = E_FAIL;
goto fail;
- }
- if (FAILED(hr = MFCreateStreamDescriptor(stream->stream_id, 1, &stream_type, &stream->descriptor)))
goto fail;
- if (FAILED(hr = IMFStreamDescriptor_GetMediaTypeHandler(stream->descriptor, &type_handler)))
goto fail;
- if (FAILED(hr = IMFMediaTypeHandler_SetCurrentMediaType(type_handler, stream_type)))
goto fail;
- IMFMediaTypeHandler_Release(type_handler);
- stream->state = STREAM_INACTIVE;
- return S_OK;
- fail:
- ERR("media stream initialization failed with %x\n", hr);
While this does tell you that one of the mfplat functions failed, it doesn't actually tell you which one. There's similar patterns elsewhere in this series.
Is that a problem?Β The likelyhood of one of these functions failing is very slim, and in that case, you could use use +mfplat to see the last function to run before the error.Β If conversion to a media type failed, the relevant conversion function will also emit an error.
- if (type_handler)
IMFMediaTypeHandler_Release(type_handler);
- return hr;
+}
- static HRESULT WINAPI media_source_QueryInterface(IMFMediaSource *iface, REFIID riid, void **out) { struct media_source *source = impl_from_IMFMediaSource(iface);
@@ -680,6 +730,23 @@ static const IMFMediaSourceVtbl IMFMediaSource_vtbl = media_source_Shutdown, };
+/* If this callback is extended to use any significant win32 APIs, a wrapper function
- should be added */
+gboolean stream_found(GstElement *bin, GstPad *pad, GstCaps *caps, gpointer user) +{
- GstCaps *target_caps;
- /* if the stream can be converted into an MF compatible type, we'll go that route
otherwise, we'll rely on decodebin for the whole process */
I think we should leave this part out for now.
I disagree, without it, a lot of compressed media type conversion code becomes dead, and this codepath which this triggers is closer to the windows one.
- if ((target_caps = make_mf_compatible_caps(caps)))
- {
gst_caps_unref(target_caps);
return FALSE;
- }
- return TRUE;
+}
- static void source_stream_added(GstElement *element, GstPad *pad, gpointer user) { struct media_source *source = (struct media_source *) user;
@@ -697,6 +764,36 @@ static void source_stream_added(GstElement *element, GstPad *pad, gpointer user)
TRACE("stream-id: %u\n", stream_id);
- /* This codepath is currently never triggered, as we don't need to ever restart the gstreamer pipeline. It is retained in
case this becomes necessary in the future, for example in a case where different media types require different
post-processing elements. */
In general, if a code path is dead, it shouldn't be introduced (yet).
π
- for (unsigned int i = 0; i < source->stream_count; i++)
- {
DWORD existing_stream_id;
IMFStreamDescriptor *descriptor = source->streams[i]->descriptor;
if (source->streams[i]->state == STREAM_STUB)
continue;
if (FAILED(IMFStreamDescriptor_GetStreamIdentifier(descriptor, &existing_stream_id)))
goto leave;
if (existing_stream_id == stream_id)
{
struct media_stream *existing_stream = source->streams[i];
GstPadLinkReturn ret;
TRACE("Found existing stream %p\n", existing_stream);
existing_stream->their_src = pad;
if ((ret = gst_pad_link(existing_stream->their_src, existing_stream->my_sink)) != GST_PAD_LINK_OK)
ERR("Error linking decodebin pad to stream %p, err = %d\n", existing_stream, ret);
goto leave;
}
- }
if (FAILED(new_media_stream(source, pad, stream_id, &stream))) { goto leave;
@@ -737,6 +834,26 @@ static void source_all_streams(GstElement *element, gpointer user) SetEvent(source->all_streams_event); }
+static GstPadProbeReturn caps_listener(GstPad *pad, GstPadProbeInfo *info, gpointer user) +{
- struct media_stream *stream = (struct media_stream *) user;
The cast is unnecessary, here and elsewhere.
π
- GstEvent *event = gst_pad_probe_info_get_event(info);
- if (GST_EVENT_TYPE(event) == GST_EVENT_CAPS)
- {
GstCaps *caps;
TRACE("got caps for stream %p\n", stream);
gst_event_parse_caps(event, &caps);
stream->their_caps = gst_caps_copy(caps);
SetEvent(stream->caps_event);
return GST_PAD_PROBE_REMOVE;
- }
- return GST_PAD_PROBE_OK;
+}
- static HRESULT media_source_constructor(IMFByteStream *bytestream, struct media_source **out_media_source) { GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE(
@@ -787,6 +904,8 @@ static HRESULT media_source_constructor(IMFByteStream *bytestream, struct media_
gst_bin_add(GST_BIN(object->container), object->decodebin);
- if(!GetEnvironmentVariableA("MF_DECODE_IN_SOURCE", NULL, 0))
Debugging code?
Yeah, it's useful for finding out whether an issue is caused by the real decoder MFT path, or topology loader.Β (As it skips all that)
g_signal_connect(object->decodebin, "autoplug-continue", G_CALLBACK(stream_found), object); g_signal_connect(object->decodebin, "pad-added", G_CALLBACK(source_stream_added_wrapper), object); g_signal_connect(object->decodebin, "pad-removed", G_CALLBACK(source_stream_removed_wrapper), object); g_signal_connect(object->decodebin, "no-more-pads", G_CALLBACK(source_all_streams_wrapper), object);
@@ -812,6 +931,12 @@ static HRESULT media_source_constructor(IMFByteStream *bytestream, struct media_ }
WaitForSingleObject(object->all_streams_event, INFINITE);
- for (unsigned int i = 0; i < object->stream_count; i++)
Misplaced declaration.
π
{
WaitForSingleObject(object->streams[i]->caps_event, INFINITE);
if (FAILED(hr = media_stream_init_desc(object->streams[i])))
goto fail;
}
object->state = SOURCE_STOPPED;
@@ -1337,6 +1462,12 @@ void perform_cb_media_source(struct cb_data *cbdata) source_all_streams(data->element, data->user); break; }
- case STREAM_PAD_EVENT:
{
struct pad_probe_data *data = &cbdata->u.pad_probe_data;
cbdata->u.pad_probe_data.ret = caps_listener(data->pad, data->info, data->user);
break;
} default: { ERR("Wrong callback forwarder called\n");
diff --git a/dlls/winegstreamer/mfplat.c b/dlls/winegstreamer/mfplat.c index c996f06211e..3667bc3cc38 100644 --- a/dlls/winegstreamer/mfplat.c +++ b/dlls/winegstreamer/mfplat.c @@ -16,6 +16,11 @@
- Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA
*/
+#include "config.h" +#include <gst/gst.h>
+#include "gst_private.h"
#include <stdarg.h>
#include "gst_private.h"
@@ -436,3 +441,206 @@ HRESULT mfplat_get_class_object(REFCLSID rclsid, REFIID riid, void **obj)
return CLASS_E_CLASSNOTAVAILABLE;
}
+const static struct
"static const"
π
+{
- const GUID *subtype;
- GstVideoFormat format;
+} +uncompressed_formats[] =
"uncompressed_video_formats", probably.
π
+{
- {&MFVideoFormat_ARGB32, GST_VIDEO_FORMAT_BGRA},
- {&MFVideoFormat_RGB32, GST_VIDEO_FORMAT_BGRx},
- {&MFVideoFormat_RGB24, GST_VIDEO_FORMAT_BGR},
- {&MFVideoFormat_RGB565, GST_VIDEO_FORMAT_BGR16},
- {&MFVideoFormat_RGB555, GST_VIDEO_FORMAT_BGR15},
+};
+/* caps will be modified to represent the exact type needed for the format */
This seems wrong in general.
Why?
+static IMFMediaType* transform_to_media_type(GstCaps *caps) +{
- IMFMediaType *media_type;
- GstStructure *info;
- const char *mime_type;
- if (TRACE_ON(mfplat))
- {
gchar *human_readable = gst_caps_to_string(caps);
TRACE("caps = %s\n", debugstr_a(human_readable));
g_free(human_readable);
- }
- if (FAILED(MFCreateMediaType(&media_type)))
- {
return NULL;
- }
- info = gst_caps_get_structure(caps, 0);
- mime_type = gst_structure_get_name(info);
- if (!(strncmp(mime_type, "video", 5)))
There's a lot of superfluous parentheses after local not operators in this series; this one seems especially superfluous.
π
- {
GstVideoInfo video_info;
if (!(gst_video_info_from_caps(&video_info, caps)))
{
return NULL;
}
IMFMediaType_SetGUID(media_type, &MF_MT_MAJOR_TYPE, &MFMediaType_Video);
IMFMediaType_SetUINT64(media_type, &MF_MT_FRAME_SIZE, ((UINT64)video_info.width << 32) | video_info.height);
IMFMediaType_SetUINT64(media_type, &MF_MT_FRAME_RATE, ((UINT64)video_info.fps_n << 32) | video_info.fps_d);
if (!(strcmp(mime_type, "video/x-raw")))
{
GUID fourcc_subtype = MFVideoFormat_Base;
unsigned int i;
IMFMediaType_SetUINT32(media_type, &MF_MT_COMPRESSED, FALSE);
/* First try FOURCC */
if ((fourcc_subtype.Data1 = gst_video_format_to_fourcc(video_info.finfo->format)))
{
IMFMediaType_SetGUID(media_type, &MF_MT_SUBTYPE, &fourcc_subtype);
}
else
{
for (i = 0; i < ARRAY_SIZE(uncompressed_formats); i++)
{
if (uncompressed_formats[i].format == video_info.finfo->format)
{
IMFMediaType_SetGUID(media_type, &MF_MT_SUBTYPE, uncompressed_formats[i].subtype);
break;
}
}
if (i == ARRAY_SIZE(uncompressed_formats))
FIXME("Unrecognized format.\n");
In which case the function should probably fail.
π
}
}
else
{
FIXME("Unrecognized video format %s\n", mime_type);
IMFMediaType_Release(media_type);
return NULL;
}
- }
- else if (!(strncmp(mime_type, "audio", 5)))
- {
gint rate, channels, bitrate;
guint64 channel_mask;
IMFMediaType_SetGUID(media_type, &MF_MT_MAJOR_TYPE, &MFMediaType_Audio);
if (gst_structure_get_int(info, "rate", &rate))
IMFMediaType_SetUINT32(media_type, &MF_MT_AUDIO_SAMPLES_PER_SECOND, rate);
if (gst_structure_get_int(info, "channels", &channels))
IMFMediaType_SetUINT32(media_type, &MF_MT_AUDIO_NUM_CHANNELS, channels);
if (gst_structure_get(info, "channel-mask", GST_TYPE_BITMASK, &channel_mask, NULL))
IMFMediaType_SetUINT32(media_type, &MF_MT_AUDIO_CHANNEL_MASK, (DWORD)channel_mask);
Superfluous cast.
π
if (gst_structure_get_int(info, "bitrate", &bitrate))
IMFMediaType_SetUINT32(media_type, &MF_MT_AVG_BITRATE, bitrate);
if (!(strcmp(mime_type, "audio/x-raw")))
{
const char *format;
if ((format = gst_structure_get_string(info, "format")))
You may find it easier to use gst_audio_info_from_caps() here.
Does this work on compressed audio types as well?
{
char type;
unsigned int bits_per_sample;
char endian[2];
char new_format[6];
if ((strlen(format) > 5) || (sscanf(format, "%c%u%2c", &type, &bits_per_sample, endian) < 2))
{
FIXME("Unhandled audio format %s\n", format);
IMFMediaType_Release(media_type);
return NULL;
}
if (type == 'F')
{
IMFMediaType_SetGUID(media_type, &MF_MT_SUBTYPE, &MFAudioFormat_Float);
}
else if (type == 'U' || type == 'S')
{
IMFMediaType_SetGUID(media_type, &MF_MT_SUBTYPE, &MFAudioFormat_PCM);
if (bits_per_sample == 8)
type = 'U';
else
type = 'S';
}
else
{
FIXME("Unrecognized audio format: %s\n", format);
IMFMediaType_Release(media_type);
return NULL;
}
IMFMediaType_SetUINT32(media_type, &MF_MT_AUDIO_BITS_PER_SAMPLE, bits_per_sample);
if (endian[0] == 'B')
endian[0] = 'L';
sprintf(new_format, "%c%u%.2s", type, bits_per_sample, bits_per_sample > 8 ? endian : 0);
gst_caps_set_simple(caps, "format", G_TYPE_STRING, new_format, NULL);
}
else
{
ERR("Failed to get audio format\n");
}
}
else
{
FIXME("Unrecognized audio format %s\n", mime_type);
IMFMediaType_Release(media_type);
return NULL;
}
- }
- else
- {
IMFMediaType_Release(media_type);
return NULL;
- }
- return media_type;
+}
+/* returns NULL if doesn't match exactly */ +IMFMediaType *mf_media_type_from_caps(GstCaps *caps) +{
- GstCaps *writeable_caps;
- IMFMediaType *ret;
- writeable_caps = gst_caps_copy(caps);
- ret = transform_to_media_type(writeable_caps);
- if (!(gst_caps_is_equal(caps, writeable_caps)))
- {
IMFMediaType_Release(ret);
ret = NULL;
- }
- gst_caps_unref(writeable_caps);
- return ret;
+}
+GstCaps *make_mf_compatible_caps(GstCaps *caps) +{
- GstCaps *ret;
- IMFMediaType *media_type;
- ret = gst_caps_copy(caps);
- if ((media_type = transform_to_media_type(ret)))
IMFMediaType_Release(media_type);
- if (!media_type)
- {
gst_caps_unref(ret);
return NULL;
- }
- return ret;
+}
On 9/10/20 12:02 PM, Derek Lesho wrote:
On 9/9/20 7:00 PM, Zebediah Figura wrote:
On 9/8/20 10:47 AM, Derek Lesho wrote:
Signed-off-by: Derek Lesho dlesho@codeweavers.com
dlls/winegstreamer/gst_cbs.c | 13 ++ dlls/winegstreamer/gst_cbs.h | 8 ++ dlls/winegstreamer/gst_private.h | 5 + dlls/winegstreamer/media_source.c | 137 +++++++++++++++++++- dlls/winegstreamer/mfplat.c | 208 ++++++++++++++++++++++++++++++ 5 files changed, 368 insertions(+), 3 deletions(-)
diff --git a/dlls/winegstreamer/gst_cbs.c b/dlls/winegstreamer/gst_cbs.c index 4755f5b42f1..e56c987eb38 100644 --- a/dlls/winegstreamer/gst_cbs.c +++ b/dlls/winegstreamer/gst_cbs.c @@ -404,3 +404,16 @@ void source_all_streams_wrapper(GstElement *element, gpointer user)
call_cb(&cbdata);
}
+GstPadProbeReturn caps_listener_wrapper(GstPad *pad, GstPadProbeInfo *info, gpointer user) +{
- struct cb_data cbdata = { STREAM_PAD_EVENT };
- cbdata.u.pad_probe_data.pad = pad;
- cbdata.u.pad_probe_data.info = info;
- cbdata.u.pad_probe_data.user = user;
- call_cb(&cbdata);
- return cbdata.u.pad_probe_data.ret;
+} diff --git a/dlls/winegstreamer/gst_cbs.h b/dlls/winegstreamer/gst_cbs.h index d87cc8c21e9..7173c09746e 100644 --- a/dlls/winegstreamer/gst_cbs.h +++ b/dlls/winegstreamer/gst_cbs.h @@ -52,6 +52,7 @@ enum CB_TYPE { SOURCE_STREAM_ADDED, SOURCE_STREAM_REMOVED, SOURCE_ALL_STREAMS,
- STREAM_PAD_EVENT, MEDIA_SOURCE_MAX, };
@@ -137,6 +138,12 @@ struct cb_data { GstQuery *query; gboolean ret; } query_sink_data;
struct pad_probe_data {
GstPad *pad;
GstPadProbeInfo *info;
gpointer user;
GstPadProbeReturn ret;
} pad_probe_data; } u; int finished;
@@ -172,5 +179,6 @@ GstBusSyncReply watch_source_bus_wrapper(GstBus *bus, GstMessage *message, gpoin void source_stream_added_wrapper(GstElement *bin, GstPad *pad, gpointer user) DECLSPEC_HIDDEN; void source_stream_removed_wrapper(GstElement *element, GstPad *pad, gpointer user) DECLSPEC_HIDDEN; void source_all_streams_wrapper(GstElement *element, gpointer user) DECLSPEC_HIDDEN; +GstPadProbeReturn caps_listener_wrapper(GstPad *pad, GstPadProbeInfo *info, gpointer user);
#endif diff --git a/dlls/winegstreamer/gst_private.h b/dlls/winegstreamer/gst_private.h index ef07d3591e7..86392eea4e3 100644 --- a/dlls/winegstreamer/gst_private.h +++ b/dlls/winegstreamer/gst_private.h @@ -36,6 +36,7 @@ #include "winuser.h" #include "dshow.h" #include "strmif.h" +#include "mfobjects.h" #include "wine/heap.h" #include "wine/strmbase.h"
@@ -54,6 +55,10 @@ void start_dispatch_thread(void) DECLSPEC_HIDDEN;
extern HRESULT mfplat_get_class_object(REFCLSID rclsid, REFIID riid, void **obj) DECLSPEC_HIDDEN;
+HRESULT winegstreamer_stream_handler_create(REFIID riid, void **obj) DECLSPEC_HIDDEN; +GstCaps *make_mf_compatible_caps(GstCaps *caps); +IMFMediaType *mf_media_type_from_caps(GstCaps *caps);
Missing DECLSPEC_HIDDEN.
π
HRESULT winegstreamer_stream_handler_create(REFIID riid, void **obj) DECLSPEC_HIDDEN;
#endif /* __GST_PRIVATE_INCLUDED__ */
diff --git a/dlls/winegstreamer/media_source.c b/dlls/winegstreamer/media_source.c index 29af2b72def..345b1fe4528 100644 --- a/dlls/winegstreamer/media_source.c +++ b/dlls/winegstreamer/media_source.c @@ -47,14 +47,19 @@ struct media_stream LONG ref; struct media_source *parent_source; IMFMediaEventQueue *event_queue;
IMFStreamDescriptor *descriptor; GstElement *appsink; GstPad *their_src, *my_sink;
GstCaps *their_caps; enum { STREAM_STUB, STREAM_INACTIVE, STREAM_SHUTDOWN, } state;
/* used when in STUB state: */
DWORD stream_id;
HANDLE caps_event; };
struct media_source
@@ -312,6 +317,8 @@ static ULONG WINAPI media_stream_Release(IMFMediaStream *iface) { if (stream->my_sink) gst_object_unref(GST_OBJECT(stream->my_sink));
if (stream->descriptor)
IMFStreamDescriptor_Release(stream->descriptor); if (stream->event_queue) IMFMediaEventQueue_Release(stream->event_queue); if (stream->parent_source)
@@ -393,7 +400,10 @@ static HRESULT WINAPI media_stream_GetStreamDescriptor(IMFMediaStream* iface, IM if (stream->state == STREAM_SHUTDOWN) return MF_E_SHUTDOWN;
- return E_NOTIMPL;
IMFStreamDescriptor_AddRef(stream->descriptor);
*descriptor = stream->descriptor;
return S_OK; }
static HRESULT WINAPI media_stream_RequestSample(IMFMediaStream *iface, IUnknown *token)
@@ -436,9 +446,12 @@ static HRESULT new_media_stream(struct media_source *source, GstPad *pad, DWORD IMFMediaSource_AddRef(&source->IMFMediaSource_iface); object->parent_source = source; object->their_src = pad;
object->stream_id = stream_id;
object->state = STREAM_STUB;
object->caps_event = CreateEventA(NULL, TRUE, FALSE, NULL);
if (FAILED(hr = MFCreateEventQueue(&object->event_queue))) goto fail;
@@ -455,10 +468,10 @@ static HRESULT new_media_stream(struct media_source *source, GstPad *pad, DWORD g_object_set(object->appsink, "wait-on-eos", FALSE, NULL);
object->my_sink = gst_element_get_static_pad(object->appsink, "sink");
- gst_pad_set_element_private(object->my_sink, object);
gst_pad_link(object->their_src, object->my_sink);
gst_pad_add_probe(object->my_sink, GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM, caps_listener_wrapper, object, NULL);
gst_element_sync_state_with_parent(object->appsink); TRACE("->(%p)\n", object);
@@ -473,6 +486,43 @@ static HRESULT new_media_stream(struct media_source *source, GstPad *pad, DWORD return hr; }
+static HRESULT media_stream_init_desc(struct media_stream *stream) +{
- HRESULT hr;
- IMFMediaTypeHandler *type_handler;
- IMFMediaType *stream_type = NULL;
- stream->their_caps = gst_caps_fixate(stream->their_caps);
I think this is unnecessary; the caps should already be fixed.
π
- stream_type = mf_media_type_from_caps(stream->their_caps);
- gst_caps_unref(stream->their_caps);
This seems a bit error-prone. I'd suggest getting rid of the field and instead calling gst_caps_copy(gst_pad_get_current_caps(...))
Can we be sure that the caps will be set after the pad probe callback is complete?
No, I guess we can't. In that case you might want to add a comment to the definition of "their_caps" (and maybe also set it to NULL?), lest someone assume that it's always valid.
- if (!stream_type)
- {
hr = E_FAIL;
goto fail;
- }
- if (FAILED(hr = MFCreateStreamDescriptor(stream->stream_id, 1, &stream_type, &stream->descriptor)))
goto fail;
- if (FAILED(hr = IMFStreamDescriptor_GetMediaTypeHandler(stream->descriptor, &type_handler)))
goto fail;
- if (FAILED(hr = IMFMediaTypeHandler_SetCurrentMediaType(type_handler, stream_type)))
goto fail;
- IMFMediaTypeHandler_Release(type_handler);
- stream->state = STREAM_INACTIVE;
- return S_OK;
- fail:
- ERR("media stream initialization failed with %x\n", hr);
While this does tell you that one of the mfplat functions failed, it doesn't actually tell you which one. There's similar patterns elsewhere in this series.
Is that a problem?Β The likelyhood of one of these functions failing is very slim, and in that case, you could use use +mfplat to see the last function to run before the error.Β If conversion to a media type failed, the relevant conversion function will also emit an error.
It's not necessarily a problem, but it strikes me as unidiomatic. The likelihood is indeed slim, which is kind of why I wouldn't necessarily bother with the trace in the first place. If I did and didn't want to emit more than one trace, though, I'd probably put it in the caller of this function.
- if (type_handler)
IMFMediaTypeHandler_Release(type_handler);
- return hr;
+}
- static HRESULT WINAPI media_source_QueryInterface(IMFMediaSource *iface, REFIID riid, void **out) { struct media_source *source = impl_from_IMFMediaSource(iface);
@@ -680,6 +730,23 @@ static const IMFMediaSourceVtbl IMFMediaSource_vtbl = media_source_Shutdown, };
+/* If this callback is extended to use any significant win32 APIs, a wrapper function
- should be added */
+gboolean stream_found(GstElement *bin, GstPad *pad, GstCaps *caps, gpointer user) +{
- GstCaps *target_caps;
- /* if the stream can be converted into an MF compatible type, we'll go that route
otherwise, we'll rely on decodebin for the whole process */
I think we should leave this part out for now.
I disagree, without it, a lot of compressed media type conversion code becomes dead, and this codepath which this triggers is closer to the windows one.
I don't think a need for such code has been shown. At least it shouldn't be part of this patch.
- if ((target_caps = make_mf_compatible_caps(caps)))
- {
gst_caps_unref(target_caps);
return FALSE;
- }
- return TRUE;
+}
- static void source_stream_added(GstElement *element, GstPad *pad, gpointer user) { struct media_source *source = (struct media_source *) user;
@@ -697,6 +764,36 @@ static void source_stream_added(GstElement *element, GstPad *pad, gpointer user)
TRACE("stream-id: %u\n", stream_id);
- /* This codepath is currently never triggered, as we don't need to ever restart the gstreamer pipeline. It is retained in
case this becomes necessary in the future, for example in a case where different media types require different
post-processing elements. */
In general, if a code path is dead, it shouldn't be introduced (yet).
π
- for (unsigned int i = 0; i < source->stream_count; i++)
- {
DWORD existing_stream_id;
IMFStreamDescriptor *descriptor = source->streams[i]->descriptor;
if (source->streams[i]->state == STREAM_STUB)
continue;
if (FAILED(IMFStreamDescriptor_GetStreamIdentifier(descriptor, &existing_stream_id)))
goto leave;
if (existing_stream_id == stream_id)
{
struct media_stream *existing_stream = source->streams[i];
GstPadLinkReturn ret;
TRACE("Found existing stream %p\n", existing_stream);
existing_stream->their_src = pad;
if ((ret = gst_pad_link(existing_stream->their_src, existing_stream->my_sink)) != GST_PAD_LINK_OK)
ERR("Error linking decodebin pad to stream %p, err = %d\n", existing_stream, ret);
goto leave;
}
- }
if (FAILED(new_media_stream(source, pad, stream_id, &stream))) { goto leave;
@@ -737,6 +834,26 @@ static void source_all_streams(GstElement *element, gpointer user) SetEvent(source->all_streams_event); }
+static GstPadProbeReturn caps_listener(GstPad *pad, GstPadProbeInfo *info, gpointer user) +{
- struct media_stream *stream = (struct media_stream *) user;
The cast is unnecessary, here and elsewhere.
π
- GstEvent *event = gst_pad_probe_info_get_event(info);
- if (GST_EVENT_TYPE(event) == GST_EVENT_CAPS)
- {
GstCaps *caps;
TRACE("got caps for stream %p\n", stream);
gst_event_parse_caps(event, &caps);
stream->their_caps = gst_caps_copy(caps);
SetEvent(stream->caps_event);
return GST_PAD_PROBE_REMOVE;
- }
- return GST_PAD_PROBE_OK;
+}
- static HRESULT media_source_constructor(IMFByteStream *bytestream, struct media_source **out_media_source) { GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE(
@@ -787,6 +904,8 @@ static HRESULT media_source_constructor(IMFByteStream *bytestream, struct media_
gst_bin_add(GST_BIN(object->container), object->decodebin);
- if(!GetEnvironmentVariableA("MF_DECODE_IN_SOURCE", NULL, 0))
Debugging code?
Yeah, it's useful for finding out whether an issue is caused by the real decoder MFT path, or topology loader.Β (As it skips all that)
I mean, you probably didn't mean to include this here. In general I think we try to avoid such debugging switches, and it's not clear to me this warrants an exception.
g_signal_connect(object->decodebin, "autoplug-continue", G_CALLBACK(stream_found), object); g_signal_connect(object->decodebin, "pad-added", G_CALLBACK(source_stream_added_wrapper), object); g_signal_connect(object->decodebin, "pad-removed", G_CALLBACK(source_stream_removed_wrapper), object); g_signal_connect(object->decodebin, "no-more-pads", G_CALLBACK(source_all_streams_wrapper), object);
@@ -812,6 +931,12 @@ static HRESULT media_source_constructor(IMFByteStream *bytestream, struct media_ }
WaitForSingleObject(object->all_streams_event, INFINITE);
- for (unsigned int i = 0; i < object->stream_count; i++)
Misplaced declaration.
π
{
WaitForSingleObject(object->streams[i]->caps_event, INFINITE);
if (FAILED(hr = media_stream_init_desc(object->streams[i])))
goto fail;
}
object->state = SOURCE_STOPPED;
@@ -1337,6 +1462,12 @@ void perform_cb_media_source(struct cb_data *cbdata) source_all_streams(data->element, data->user); break; }
- case STREAM_PAD_EVENT:
{
struct pad_probe_data *data = &cbdata->u.pad_probe_data;
cbdata->u.pad_probe_data.ret = caps_listener(data->pad, data->info, data->user);
break;
} default: { ERR("Wrong callback forwarder called\n");
diff --git a/dlls/winegstreamer/mfplat.c b/dlls/winegstreamer/mfplat.c index c996f06211e..3667bc3cc38 100644 --- a/dlls/winegstreamer/mfplat.c +++ b/dlls/winegstreamer/mfplat.c @@ -16,6 +16,11 @@
- Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA
*/
+#include "config.h" +#include <gst/gst.h>
+#include "gst_private.h"
#include <stdarg.h>
#include "gst_private.h"
@@ -436,3 +441,206 @@ HRESULT mfplat_get_class_object(REFCLSID rclsid, REFIID riid, void **obj)
return CLASS_E_CLASSNOTAVAILABLE;
}
+const static struct
"static const"
π
+{
- const GUID *subtype;
- GstVideoFormat format;
+} +uncompressed_formats[] =
"uncompressed_video_formats", probably.
π
+{
- {&MFVideoFormat_ARGB32, GST_VIDEO_FORMAT_BGRA},
- {&MFVideoFormat_RGB32, GST_VIDEO_FORMAT_BGRx},
- {&MFVideoFormat_RGB24, GST_VIDEO_FORMAT_BGR},
- {&MFVideoFormat_RGB565, GST_VIDEO_FORMAT_BGR16},
- {&MFVideoFormat_RGB555, GST_VIDEO_FORMAT_BGR15},
+};
+/* caps will be modified to represent the exact type needed for the format */
This seems wrong in general.
Why?
It's not really idiomatic code structure. More concretely, it also means you're copying the caps when you don't need to.
If you really need to do something like this, you probably want a separate function that takes a GstCaps and returns a new GstCaps that is compatible with mfplat, then you feed the output of that through mf_media_type_from_caps().
+static IMFMediaType* transform_to_media_type(GstCaps *caps) +{
- IMFMediaType *media_type;
- GstStructure *info;
- const char *mime_type;
- if (TRACE_ON(mfplat))
- {
gchar *human_readable = gst_caps_to_string(caps);
TRACE("caps = %s\n", debugstr_a(human_readable));
g_free(human_readable);
- }
- if (FAILED(MFCreateMediaType(&media_type)))
- {
return NULL;
- }
- info = gst_caps_get_structure(caps, 0);
- mime_type = gst_structure_get_name(info);
- if (!(strncmp(mime_type, "video", 5)))
There's a lot of superfluous parentheses after local not operators in this series; this one seems especially superfluous.
π
- {
GstVideoInfo video_info;
if (!(gst_video_info_from_caps(&video_info, caps)))
{
return NULL;
}
IMFMediaType_SetGUID(media_type, &MF_MT_MAJOR_TYPE, &MFMediaType_Video);
IMFMediaType_SetUINT64(media_type, &MF_MT_FRAME_SIZE, ((UINT64)video_info.width << 32) | video_info.height);
IMFMediaType_SetUINT64(media_type, &MF_MT_FRAME_RATE, ((UINT64)video_info.fps_n << 32) | video_info.fps_d);
if (!(strcmp(mime_type, "video/x-raw")))
{
GUID fourcc_subtype = MFVideoFormat_Base;
unsigned int i;
IMFMediaType_SetUINT32(media_type, &MF_MT_COMPRESSED, FALSE);
/* First try FOURCC */
if ((fourcc_subtype.Data1 = gst_video_format_to_fourcc(video_info.finfo->format)))
{
IMFMediaType_SetGUID(media_type, &MF_MT_SUBTYPE, &fourcc_subtype);
}
else
{
for (i = 0; i < ARRAY_SIZE(uncompressed_formats); i++)
{
if (uncompressed_formats[i].format == video_info.finfo->format)
{
IMFMediaType_SetGUID(media_type, &MF_MT_SUBTYPE, uncompressed_formats[i].subtype);
break;
}
}
if (i == ARRAY_SIZE(uncompressed_formats))
FIXME("Unrecognized format.\n");
In which case the function should probably fail.
π
}
}
else
{
FIXME("Unrecognized video format %s\n", mime_type);
IMFMediaType_Release(media_type);
return NULL;
}
- }
- else if (!(strncmp(mime_type, "audio", 5)))
- {
gint rate, channels, bitrate;
guint64 channel_mask;
IMFMediaType_SetGUID(media_type, &MF_MT_MAJOR_TYPE, &MFMediaType_Audio);
if (gst_structure_get_int(info, "rate", &rate))
IMFMediaType_SetUINT32(media_type, &MF_MT_AUDIO_SAMPLES_PER_SECOND, rate);
if (gst_structure_get_int(info, "channels", &channels))
IMFMediaType_SetUINT32(media_type, &MF_MT_AUDIO_NUM_CHANNELS, channels);
if (gst_structure_get(info, "channel-mask", GST_TYPE_BITMASK, &channel_mask, NULL))
IMFMediaType_SetUINT32(media_type, &MF_MT_AUDIO_CHANNEL_MASK, (DWORD)channel_mask);
Superfluous cast.
π
if (gst_structure_get_int(info, "bitrate", &bitrate))
IMFMediaType_SetUINT32(media_type, &MF_MT_AVG_BITRATE, bitrate);
if (!(strcmp(mime_type, "audio/x-raw")))
{
const char *format;
if ((format = gst_structure_get_string(info, "format")))
You may find it easier to use gst_audio_info_from_caps() here.
Does this work on compressed audio types as well?
Yes (it yields GST_AUDIO_FORMAT_ENCODED), but it doesn't matter since you're already dealing with raw audio here.
{
char type;
unsigned int bits_per_sample;
char endian[2];
char new_format[6];
if ((strlen(format) > 5) || (sscanf(format, "%c%u%2c", &type, &bits_per_sample, endian) < 2))
{
FIXME("Unhandled audio format %s\n", format);
IMFMediaType_Release(media_type);
return NULL;
}
if (type == 'F')
{
IMFMediaType_SetGUID(media_type, &MF_MT_SUBTYPE, &MFAudioFormat_Float);
}
else if (type == 'U' || type == 'S')
{
IMFMediaType_SetGUID(media_type, &MF_MT_SUBTYPE, &MFAudioFormat_PCM);
if (bits_per_sample == 8)
type = 'U';
else
type = 'S';
}
else
{
FIXME("Unrecognized audio format: %s\n", format);
IMFMediaType_Release(media_type);
return NULL;
}
IMFMediaType_SetUINT32(media_type, &MF_MT_AUDIO_BITS_PER_SAMPLE, bits_per_sample);
if (endian[0] == 'B')
endian[0] = 'L';
sprintf(new_format, "%c%u%.2s", type, bits_per_sample, bits_per_sample > 8 ? endian : 0);
gst_caps_set_simple(caps, "format", G_TYPE_STRING, new_format, NULL);
}
else
{
ERR("Failed to get audio format\n");
}
}
else
{
FIXME("Unrecognized audio format %s\n", mime_type);
IMFMediaType_Release(media_type);
return NULL;
}
- }
- else
- {
IMFMediaType_Release(media_type);
return NULL;
- }
- return media_type;
+}
+/* returns NULL if doesn't match exactly */ +IMFMediaType *mf_media_type_from_caps(GstCaps *caps) +{
- GstCaps *writeable_caps;
- IMFMediaType *ret;
- writeable_caps = gst_caps_copy(caps);
- ret = transform_to_media_type(writeable_caps);
- if (!(gst_caps_is_equal(caps, writeable_caps)))
- {
IMFMediaType_Release(ret);
ret = NULL;
- }
- gst_caps_unref(writeable_caps);
- return ret;
+}
+GstCaps *make_mf_compatible_caps(GstCaps *caps) +{
- GstCaps *ret;
- IMFMediaType *media_type;
- ret = gst_caps_copy(caps);
- if ((media_type = transform_to_media_type(ret)))
IMFMediaType_Release(media_type);
- if (!media_type)
- {
gst_caps_unref(ret);
return NULL;
- }
- return ret;
+}
On 9/10/20 7:54 PM, Zebediah Figura wrote:
It's not really idiomatic code structure. More concretely, it also means you're copying the caps when you don't need to.
This doesn't strike me as a concern during source initialization. We won't be calling this function while media is playing.
If you really need to do something like this, you probably want a separate function that takes a GstCaps and returns a new GstCaps that is compatible with mfplat, then you feed the output of that through mf_media_type_from_caps().
This is what I currently do[1], it's just that internally I prefer to wrap this into a single function, to keep the conversion code and the correction code next to eachother.
I'm about to send v2, and everything but this comment should be addressed.
1: https://github.com/Guy1524/wine/blob/mfplat_cleanup/dlls/winegstreamer/media...
On 9/14/20 2:28 PM, Derek Lesho wrote:
On 9/10/20 7:54 PM, Zebediah Figura wrote:
It's not really idiomatic code structure. More concretely, it also means you're copying the caps when you don't need to.
This doesn't strike me as a concern during source initialization. We won't be calling this function while media is playing.
Sure, though startup time shouldn't exactly be ignored, either. [For instance, quartz + gstreamer currently can take multiple hundreds of milliseconds to start up, which is actually kind of abhorrent.]
My concern is more about idiomatic structure than anything else, in any case. The function, for instance, is doing multiple things at once, and it's not how I'd expect a conversion function to behave. Wrapping it in a different function helps abstract that away to a more idiomatic interface, but the implementation remains harder to read simply because it's not (maximally) modularized. The fact that those tasks are (at least as of this patch) entirely orthogonal doesn't help either.
If you really need to do something like this, you probably want a separate function that takes a GstCaps and returns a new GstCaps that is compatible with mfplat, then you feed the output of that through mf_media_type_from_caps().
This is what I currently do[1], it's just that internally I prefer to wrap this into a single function, to keep the conversion code and the correction code next to eachother.
I'm about to send v2, and everything but this comment should be addressed.
1: https://github.com/Guy1524/wine/blob/mfplat_cleanup/dlls/winegstreamer/media...
On 9/14/20 3:49 PM, Zebediah Figura wrote:
On 9/14/20 2:28 PM, Derek Lesho wrote:
On 9/10/20 7:54 PM, Zebediah Figura wrote:
It's not really idiomatic code structure. More concretely, it also means you're copying the caps when you don't need to.
This doesn't strike me as a concern during source initialization. We won't be calling this function while media is playing.
Sure, though startup time shouldn't exactly be ignored, either. [For instance, quartz + gstreamer currently can take multiple hundreds of milliseconds to start up, which is actually kind of abhorrent.]
Oof, have you profiled this?Β I doubt that excessive caps copying would be the reason.Β Maybe the issue is with gstreamer?
My concern is more about idiomatic structure than anything else, in any case. The function, for instance, is doing multiple things at once, and it's not how I'd expect a conversion function to behave. Wrapping it in a different function helps abstract that away to a more idiomatic interface, but the implementation remains harder to read simply because it's not (maximally) modularized.
Maybe the function seems simpler to read for me than it actually is, since I wrote it.Β Would adding a better description comment `transform_to_media_type` help?
The fact that those tasks are (at least as of this patch) entirely orthogonal doesn't help either.
Can you elaborate what you mean by the tasks being orthogonal?Β As I said in my last email, the two functions based on `transform_to_media_type` are used hand-in-hand in the main parser path, as I linked in my last email.Β I do now realize though that `make_mf_compatible_caps` is dead code in this patch and should be moved up though.
If you really need to do something like this, you probably want a separate function that takes a GstCaps and returns a new GstCaps that is compatible with mfplat, then you feed the output of that through mf_media_type_from_caps().
This is what I currently do[1], it's just that internally I prefer to wrap this into a single function, to keep the conversion code and the correction code next to eachother.
I'm about to send v2, and everything but this comment should be addressed.
1: https://github.com/Guy1524/wine/blob/mfplat_cleanup/dlls/winegstreamer/media...
On 9/15/20 9:06 AM, Derek Lesho wrote:
On 9/14/20 3:49 PM, Zebediah Figura wrote:
On 9/14/20 2:28 PM, Derek Lesho wrote:
On 9/10/20 7:54 PM, Zebediah Figura wrote:
It's not really idiomatic code structure. More concretely, it also means you're copying the caps when you don't need to.
This doesn't strike me as a concern during source initialization. We won't be calling this function while media is playing.
Sure, though startup time shouldn't exactly be ignored, either. [For instance, quartz + gstreamer currently can take multiple hundreds of milliseconds to start up, which is actually kind of abhorrent.]
Oof, have you profiled this?Β I doubt that excessive caps copying would be the reason.Β Maybe the issue is with gstreamer?
I haven't profiled it. I don't think that caps copying is exactly cheap, though; from what I've seen gstreamer makes efforts to avoid it.
My concern is more about idiomatic structure than anything else, in any case. The function, for instance, is doing multiple things at once, and it's not how I'd expect a conversion function to behave. Wrapping it in a different function helps abstract that away to a more idiomatic interface, but the implementation remains harder to read simply because it's not (maximally) modularized.
Maybe the function seems simpler to read for me than it actually is, since I wrote it.Β Would adding a better description comment `transform_to_media_type` help?
In general it's better to improve the code structure rather than to document bad code structure.
The fact that those tasks are (at least as of this patch) entirely orthogonal doesn't help either.
Can you elaborate what you mean by the tasks being orthogonal?Β As I said in my last email, the two functions based on `transform_to_media_type` are used hand-in-hand in the main parser path, as I linked in my last email.Β I do now realize though that `make_mf_compatible_caps` is dead code in this patch and should be moved up though.
By which I mostly mean that the modifications made are orthogonal to the type conversion done. I.e. it might make more sense if you did catch-all handling of video format types that can't be converted (though returning failure and watching for that may likely be better structure), but in this case doing both things in the same function isn't really saving you anything.
If you really need to do something like this, you probably want a separate function that takes a GstCaps and returns a new GstCaps that is compatible with mfplat, then you feed the output of that through mf_media_type_from_caps().
This is what I currently do[1], it's just that internally I prefer to wrap this into a single function, to keep the conversion code and the correction code next to eachother.
I'm about to send v2, and everything but this comment should be addressed.
1: https://github.com/Guy1524/wine/blob/mfplat_cleanup/dlls/winegstreamer/media...
On 9/15/20 10:16 AM, Zebediah Figura wrote:
On 9/15/20 9:06 AM, Derek Lesho wrote:
On 9/14/20 3:49 PM, Zebediah Figura wrote:
On 9/14/20 2:28 PM, Derek Lesho wrote:
On 9/10/20 7:54 PM, Zebediah Figura wrote:
It's not really idiomatic code structure. More concretely, it also means you're copying the caps when you don't need to.
This doesn't strike me as a concern during source initialization. We won't be calling this function while media is playing.
Sure, though startup time shouldn't exactly be ignored, either. [For instance, quartz + gstreamer currently can take multiple hundreds of milliseconds to start up, which is actually kind of abhorrent.]
Oof, have you profiled this?Β I doubt that excessive caps copying would be the reason.Β Maybe the issue is with gstreamer?
I haven't profiled it. I don't think that caps copying is exactly cheap, though; from what I've seen gstreamer makes efforts to avoid it.
My concern is more about idiomatic structure than anything else, in any case. The function, for instance, is doing multiple things at once, and it's not how I'd expect a conversion function to behave. Wrapping it in a different function helps abstract that away to a more idiomatic interface, but the implementation remains harder to read simply because it's not (maximally) modularized.
Maybe the function seems simpler to read for me than it actually is, since I wrote it.Β Would adding a better description comment `transform_to_media_type` help?
In general it's better to improve the code structure rather than to document bad code structure.
The fact that those tasks are (at least as of this patch) entirely orthogonal doesn't help either.
Can you elaborate what you mean by the tasks being orthogonal?Β As I said in my last email, the two functions based on `transform_to_media_type` are used hand-in-hand in the main parser path, as I linked in my last email.Β I do now realize though that `make_mf_compatible_caps` is dead code in this patch and should be moved up though.
By which I mostly mean that the modifications made are orthogonal to the type conversion done. I.e. it might make more sense if you did catch-all handling of video format types that can't be converted (though returning failure and watching for that may likely be better structure), but in this case doing both things in the same function isn't really saving you anything.
Ok then, I'll factor out the function in v3.
If you really need to do something like this, you probably want a separate function that takes a GstCaps and returns a new GstCaps that is compatible with mfplat, then you feed the output of that through mf_media_type_from_caps().
This is what I currently do[1], it's just that internally I prefer to wrap this into a single function, to keep the conversion code and the correction code next to eachother.
I'm about to send v2, and everything but this comment should be addressed.
1: https://github.com/Guy1524/wine/blob/mfplat_cleanup/dlls/winegstreamer/media...
On 9/8/20 10:47 AM, Derek Lesho wrote:
Signed-off-by: Derek Lesho dlesho@codeweavers.com
dlls/winegstreamer/gst_cbs.c | 58 ++++++++ dlls/winegstreamer/gst_cbs.h | 12 +- dlls/winegstreamer/main.c | 3 + dlls/winegstreamer/media_source.c | 219 +++++++++++++++++++++++++++++- 4 files changed, 288 insertions(+), 4 deletions(-)
diff --git a/dlls/winegstreamer/gst_cbs.c b/dlls/winegstreamer/gst_cbs.c index bf7103b1606..8f48368c96a 100644 --- a/dlls/winegstreamer/gst_cbs.c +++ b/dlls/winegstreamer/gst_cbs.c @@ -49,6 +49,8 @@ static void CALLBACK perform_cb(TP_CALLBACK_INSTANCE *instance, void *user)
if (cbdata->type < GSTDEMUX_MAX) perform_cb_gstdemux(cbdata);
else if (cbdata->type < MEDIA_SOURCE_MAX)
perform_cb_media_source(cbdata);
pthread_mutex_lock(&cbdata->lock); cbdata->finished = 1;
@@ -301,3 +303,59 @@ gboolean query_sink_wrapper(GstPad *pad, GstObject *parent, GstQuery *query)
return cbdata.u.query_sink_data.ret;
}
+GstFlowReturn pull_from_bytestream_wrapper(GstPad *pad, GstObject *parent, guint64 ofs, guint len,
GstBuffer **buf)
+{
- struct cb_data cbdata = { PULL_FROM_BYTESTREAM };
- cbdata.u.getrange_data.pad = pad;
- cbdata.u.getrange_data.parent = parent;
- cbdata.u.getrange_data.ofs = ofs;
- cbdata.u.getrange_data.len = len;
- cbdata.u.getrange_data.buf = buf;
- call_cb(&cbdata);
- return cbdata.u.getrange_data.ret;
+}
+gboolean query_bytestream_wrapper(GstPad *pad, GstObject *parent, GstQuery *query) +{
- struct cb_data cbdata = { QUERY_BYTESTREAM };
- cbdata.u.query_function_data.pad = pad;
- cbdata.u.query_function_data.parent = parent;
- cbdata.u.query_function_data.query = query;
- call_cb(&cbdata);
- return cbdata.u.query_function_data.ret;
+}
+gboolean activate_bytestream_pad_mode_wrapper(GstPad *pad, GstObject *parent, GstPadMode mode, gboolean activate) +{
- struct cb_data cbdata = { ACTIVATE_BYTESTREAM_PAD_MODE };
- cbdata.u.activate_mode_data.pad = pad;
- cbdata.u.activate_mode_data.parent = parent;
- cbdata.u.activate_mode_data.mode = mode;
- cbdata.u.activate_mode_data.activate = activate;
- call_cb(&cbdata);
- return cbdata.u.activate_mode_data.ret;
+}
+gboolean process_bytestream_pad_event_wrapper(GstPad *pad, GstObject *parent, GstEvent *event) +{
- struct cb_data cbdata = { PROCESS_BYTESTREAM_PAD_EVENT };
- cbdata.u.event_src_data.pad = pad;
- cbdata.u.event_src_data.parent = parent;
- cbdata.u.event_src_data.event = event;
- call_cb(&cbdata);
- return cbdata.u.event_src_data.ret;
+} diff --git a/dlls/winegstreamer/gst_cbs.h b/dlls/winegstreamer/gst_cbs.h index 4725f23ad1a..10e999feea7 100644 --- a/dlls/winegstreamer/gst_cbs.h +++ b/dlls/winegstreamer/gst_cbs.h @@ -43,7 +43,12 @@ enum CB_TYPE { AUTOPLUG_BLACKLIST, UNKNOWN_TYPE, QUERY_SINK,
- GSTDEMUX_MAX
- GSTDEMUX_MAX,
- PULL_FROM_BYTESTREAM,
- QUERY_BYTESTREAM,
- ACTIVATE_BYTESTREAM_PAD_MODE,
- PROCESS_BYTESTREAM_PAD_EVENT,
- MEDIA_SOURCE_MAX,
};
struct cb_data { @@ -138,6 +143,7 @@ struct cb_data {
void mark_wine_thread(void) DECLSPEC_HIDDEN; void perform_cb_gstdemux(struct cb_data *data) DECLSPEC_HIDDEN; +void perform_cb_media_source(struct cb_data *data) DECLSPEC_HIDDEN;
GstBusSyncReply watch_bus_wrapper(GstBus *bus, GstMessage *msg, gpointer user) DECLSPEC_HIDDEN; void existing_new_pad_wrapper(GstElement *bin, GstPad *pad, gpointer user) DECLSPEC_HIDDEN; @@ -154,5 +160,9 @@ GstAutoplugSelectResult autoplug_blacklist_wrapper(GstElement *bin, GstPad *pad, void unknown_type_wrapper(GstElement *bin, GstPad *pad, GstCaps *caps, gpointer user) DECLSPEC_HIDDEN; void Gstreamer_transform_pad_added_wrapper(GstElement *filter, GstPad *pad, gpointer user) DECLSPEC_HIDDEN; gboolean query_sink_wrapper(GstPad *pad, GstObject *parent, GstQuery *query) DECLSPEC_HIDDEN; +GstFlowReturn pull_from_bytestream_wrapper(GstPad *pad, GstObject *parent, guint64 ofs, guint len, GstBuffer **buf) DECLSPEC_HIDDEN; +gboolean query_bytestream_wrapper(GstPad *pad, GstObject *parent, GstQuery *query) DECLSPEC_HIDDEN; +gboolean activate_bytestream_pad_mode_wrapper(GstPad *pad, GstObject *parent, GstPadMode mode, gboolean activate) DECLSPEC_HIDDEN; +gboolean process_bytestream_pad_event_wrapper(GstPad *pad, GstObject *parent, GstEvent *event) DECLSPEC_HIDDEN;
#endif diff --git a/dlls/winegstreamer/main.c b/dlls/winegstreamer/main.c index 2872710b3e2..4ca371d58bd 100644 --- a/dlls/winegstreamer/main.c +++ b/dlls/winegstreamer/main.c @@ -146,6 +146,9 @@ HRESULT WINAPI DllGetClassObject(REFCLSID clsid, REFIID iid, void **out)
TRACE("clsid %s, iid %s, out %p.\n", debugstr_guid(clsid), debugstr_guid(iid), out);
- if (!init_gstreamer())
return CLASS_E_CLASSNOTAVAILABLE;
- if (SUCCEEDED(hr = mfplat_get_class_object(clsid, iid, out))) return hr;
diff --git a/dlls/winegstreamer/media_source.c b/dlls/winegstreamer/media_source.c index 84ecf305d4c..6b3bd4a7869 100644 --- a/dlls/winegstreamer/media_source.c +++ b/dlls/winegstreamer/media_source.c @@ -17,7 +17,12 @@
- Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA
*/
+#include "config.h"
+#include <gst/gst.h>
#include "gst_private.h" +#include "gst_cbs.h"
#include <stdarg.h>
@@ -27,6 +32,7 @@ #include "mfapi.h" #include "mferror.h" #include "mfidl.h" +#include "mfobjects.h"
#include "wine/debug.h" #include "wine/heap.h" @@ -39,6 +45,8 @@ struct media_source IMFMediaSource IMFMediaSource_iface; LONG ref; IMFMediaEventQueue *event_queue;
- IMFByteStream *byte_stream;
- GstPad *my_src; enum { SOURCE_OPENING,
@@ -52,6 +60,154 @@ static inline struct media_source *impl_from_IMFMediaSource(IMFMediaSource *ifac return CONTAINING_RECORD(iface, struct media_source, IMFMediaSource_iface); }
+GstFlowReturn pull_from_bytestream(GstPad *pad, GstObject *parent, guint64 ofs, guint len,
GstBuffer **buf)
+{
- struct media_source *source = gst_pad_get_element_private(pad);
- IMFByteStream *byte_stream = source->byte_stream;
- ULONG bytes_read;
- GstMapInfo info;
- BOOL is_eof;
- HRESULT hr;
- TRACE("gstreamer requesting %u bytes at %s from source %p into buffer %p\n", len, wine_dbgstr_longlong(ofs), source, buf);
A bit of a long line, especially since the next longest line above is wrapped. (The word GStreamer seems a bit redundant here, also; same thing below.)
I'd probably recommend tracing *buf instead of buf; the double pointer isn't very interesting.
- if (ofs != GST_BUFFER_OFFSET_NONE)
- {
if (FAILED(IMFByteStream_SetCurrentPosition(byte_stream, ofs)))
return GST_FLOW_ERROR;
- }
- if (FAILED(IMFByteStream_IsEndOfStream(byte_stream, &is_eof)))
return GST_FLOW_ERROR;
- if (is_eof)
return GST_FLOW_EOS;
- if (!(*buf))
*buf = gst_buffer_new_and_alloc(len);
- gst_buffer_map(*buf, &info, GST_MAP_WRITE);
- hr = IMFByteStream_Read(byte_stream, info.data, len, &bytes_read);
- gst_buffer_unmap(*buf, &info);
- gst_buffer_set_size(*buf, bytes_read);
- if (FAILED(hr))
- {
return GST_FLOW_ERROR;
- }
Inconsistent use of braces.
- GST_BUFFER_OFFSET(*buf) = ofs;
This will set it to GST_BUFFER_OFFSET_NONE if ofs was -1. That's valid according to the GStreamer API, but may not be what you intended to do.
- return GST_FLOW_OK;
+}
+static gboolean query_bytestream(GstPad *pad, GstObject *parent, GstQuery *query) +{
- struct media_source *source = gst_pad_get_element_private(pad);
- GstFormat format;
- QWORD bytestream_len;
- TRACE("GStreamer queries source %p for %s\n", source, GST_QUERY_TYPE_NAME(query));
- if (FAILED(IMFByteStream_GetLength(source->byte_stream, &bytestream_len)))
return FALSE;
- switch (GST_QUERY_TYPE(query))
- {
case GST_QUERY_DURATION:
{
gst_query_parse_duration (query, &format, NULL);
if (format == GST_FORMAT_PERCENT) {
gst_query_set_duration (query, GST_FORMAT_PERCENT, GST_FORMAT_PERCENT_MAX);
return TRUE;
}
else if (format == GST_FORMAT_BYTES)
{
QWORD length;
IMFByteStream_GetLength(source->byte_stream, &length);
gst_query_set_duration (query, GST_FORMAT_BYTES, length);
return TRUE;
}
Inconsistent braces, and inconsistent spacing between function name and left parenthesis.
return FALSE;
}
case GST_QUERY_SEEKING:
{
gst_query_parse_seeking (query, &format, NULL, NULL, NULL);
if (format != GST_FORMAT_BYTES)
{
WARN("Cannot seek using format \"%s\".\n", gst_format_get_name(format));
return FALSE;
}
gst_query_set_seeking(query, GST_FORMAT_BYTES, 1, 0, bytestream_len);
return TRUE;
}
case GST_QUERY_SCHEDULING:
{
gst_query_set_scheduling(query, GST_SCHEDULING_FLAG_SEEKABLE, 1, -1, 0);
gst_query_add_scheduling_mode(query, GST_PAD_MODE_PULL);
return TRUE;
}
case GST_QUERY_CAPS:
{
GstStaticCaps any = GST_STATIC_CAPS_ANY;
GstCaps *caps, *filter;
caps = gst_static_caps_get(&any);
gst_query_parse_caps(query, &filter);
if (filter) {
GstCaps* filtered;
filtered = gst_caps_intersect_full(
filter, caps, GST_CAPS_INTERSECT_FIRST);
gst_caps_unref(caps);
caps = filtered;
}
gst_query_set_caps_result(query, caps);
gst_caps_unref(caps);
return TRUE;
}
What's the handling of GST_QUERY_CAPS for?
If it is indeed necessary, I think it can be simplified by just calling gst_pad_query_default(); that should do about the same thing as here.
default:
{
WARN("Unhandled query type %s\n", GST_QUERY_TYPE_NAME(query));
return FALSE;
}
- }
+}
+static gboolean activate_bytestream_pad_mode(GstPad *pad, GstObject *parent, GstPadMode mode, gboolean activate) +{
- struct media_source *source = gst_pad_get_element_private(pad);
- TRACE("%s source pad for mediasource %p in %s mode.\n",
activate ? "Activating" : "Deactivating", source, gst_pad_mode_get_name(mode));
- switch (mode) {
case GST_PAD_MODE_PULL:
return TRUE;
default:
return FALSE;
- }
- return FALSE;
+}
The switch seems a bit excessive. (Yes, it's copied from quartz, but even there I think it's excessive.) Note also that the braces and spacing are inconsistent.
+static gboolean process_bytestream_pad_event(GstPad *pad, GstObject *parent, GstEvent *event) +{
- struct media_source *source = gst_pad_get_element_private(pad);
- TRACE("source %p, type "%s".\n", source, GST_EVENT_TYPE_NAME(event));
- switch (event->type) {
/* the seek event should fail in pull mode */
case GST_EVENT_SEEK:
return FALSE;
default:
WARN("Ignoring \"%s\" event.\n", GST_EVENT_TYPE_NAME(event));
case GST_EVENT_TAG:
case GST_EVENT_QOS:
case GST_EVENT_RECONFIGURE:
return gst_pad_event_default(pad, parent, event);
- }
- return TRUE;
+}
static HRESULT WINAPI media_source_QueryInterface(IMFMediaSource *iface, REFIID riid, void **out) { struct media_source *source = impl_from_IMFMediaSource(iface); @@ -211,8 +367,12 @@ static HRESULT WINAPI media_source_Shutdown(IMFMediaSource *iface)
source->state = SOURCE_SHUTDOWN;
if (source->my_src)
gst_object_unref(GST_OBJECT(source->my_src));
if (source->event_queue) IMFMediaEventQueue_Shutdown(source->event_queue);
if (source->byte_stream)
IMFByteStream_Release(source->byte_stream);
return S_OK;
} @@ -236,19 +396,34 @@ static const IMFMediaSourceVtbl IMFMediaSource_vtbl =
static HRESULT media_source_constructor(IMFByteStream *bytestream, struct media_source **out_media_source) {
- GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE(
"mf_src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS_ANY);
Copied from quartz, I know, but can we please avoid Microsoft-style function/macro invocations?
struct media_source *object = heap_alloc_zero(sizeof(*object)); HRESULT hr; if (!object) return E_OUTOFMEMORY;
- object->IMFMediaSource_iface.lpVtbl = &IMFMediaSource_vtbl;
- object->ref = 1;
- object->byte_stream = bytestream;
- IMFByteStream_AddRef(bytestream);
- if (FAILED(hr = MFCreateEventQueue(&object->event_queue))) goto fail;
- object->state = SOURCE_STOPPED;
- object->my_src = gst_pad_new_from_static_template(&src_template, "mf-src");
- gst_pad_set_element_private(object->my_src, object);
- gst_pad_set_getrange_function(object->my_src, pull_from_bytestream_wrapper);
- gst_pad_set_query_function(object->my_src, query_bytestream_wrapper);
- gst_pad_set_activatemode_function(object->my_src, activate_bytestream_pad_mode_wrapper);
- gst_pad_set_event_function(object->my_src, process_bytestream_pad_event_wrapper);
- object->IMFMediaSource_iface.lpVtbl = &IMFMediaSource_vtbl;
- object->ref = 1;
object->state = SOURCE_STOPPED;
*out_media_source = object; return S_OK;
@@ -717,3 +892,41 @@ HRESULT winegstreamer_stream_handler_create(REFIID riid, void **obj)
return hr;
}
+/* helper for callback forwarding */ +void perform_cb_media_source(struct cb_data *cbdata) +{
- switch(cbdata->type)
- {
- case PULL_FROM_BYTESTREAM:
{
struct getrange_data *data = &cbdata->u.getrange_data;
cbdata->u.getrange_data.ret = pull_from_bytestream(data->pad, data->parent,
data->ofs, data->len, data->buf);
break;
}
- case QUERY_BYTESTREAM:
{
struct query_function_data *data = &cbdata->u.query_function_data;
cbdata->u.query_function_data.ret = query_bytestream(data->pad, data->parent, data->query);
break;
}
- case ACTIVATE_BYTESTREAM_PAD_MODE:
{
struct activate_mode_data *data = &cbdata->u.activate_mode_data;
cbdata->u.activate_mode_data.ret = activate_bytestream_pad_mode(data->pad, data->parent, data->mode, data->activate);
break;
}
- case PROCESS_BYTESTREAM_PAD_EVENT:
{
struct event_src_data *data = &cbdata->u.event_src_data;
cbdata->u.event_src_data.ret = process_bytestream_pad_event(data->pad, data->parent, data->event);
break;
}
- default:
{
ERR("Wrong callback forwarder called\n");
return;
The "return" is superfluous. I'd also recommend assert(), like in perform_cb_gstdemux().
}
- }
+}
On 9/9/20 5:49 PM, Zebediah Figura wrote:
On 9/8/20 10:47 AM, Derek Lesho wrote:
Signed-off-by: Derek Lesho dlesho@codeweavers.com
dlls/winegstreamer/gst_cbs.c | 58 ++++++++ dlls/winegstreamer/gst_cbs.h | 12 +- dlls/winegstreamer/main.c | 3 + dlls/winegstreamer/media_source.c | 219 +++++++++++++++++++++++++++++- 4 files changed, 288 insertions(+), 4 deletions(-)
diff --git a/dlls/winegstreamer/gst_cbs.c b/dlls/winegstreamer/gst_cbs.c index bf7103b1606..8f48368c96a 100644 --- a/dlls/winegstreamer/gst_cbs.c +++ b/dlls/winegstreamer/gst_cbs.c @@ -49,6 +49,8 @@ static void CALLBACK perform_cb(TP_CALLBACK_INSTANCE *instance, void *user)
if (cbdata->type < GSTDEMUX_MAX) perform_cb_gstdemux(cbdata);
else if (cbdata->type < MEDIA_SOURCE_MAX)
perform_cb_media_source(cbdata); pthread_mutex_lock(&cbdata->lock); cbdata->finished = 1;
@@ -301,3 +303,59 @@ gboolean query_sink_wrapper(GstPad *pad, GstObject *parent, GstQuery *query)
return cbdata.u.query_sink_data.ret;
}
+GstFlowReturn pull_from_bytestream_wrapper(GstPad *pad, GstObject *parent, guint64 ofs, guint len,
GstBuffer **buf)
+{
- struct cb_data cbdata = { PULL_FROM_BYTESTREAM };
- cbdata.u.getrange_data.pad = pad;
- cbdata.u.getrange_data.parent = parent;
- cbdata.u.getrange_data.ofs = ofs;
- cbdata.u.getrange_data.len = len;
- cbdata.u.getrange_data.buf = buf;
- call_cb(&cbdata);
- return cbdata.u.getrange_data.ret;
+}
+gboolean query_bytestream_wrapper(GstPad *pad, GstObject *parent, GstQuery *query) +{
- struct cb_data cbdata = { QUERY_BYTESTREAM };
- cbdata.u.query_function_data.pad = pad;
- cbdata.u.query_function_data.parent = parent;
- cbdata.u.query_function_data.query = query;
- call_cb(&cbdata);
- return cbdata.u.query_function_data.ret;
+}
+gboolean activate_bytestream_pad_mode_wrapper(GstPad *pad, GstObject *parent, GstPadMode mode, gboolean activate) +{
- struct cb_data cbdata = { ACTIVATE_BYTESTREAM_PAD_MODE };
- cbdata.u.activate_mode_data.pad = pad;
- cbdata.u.activate_mode_data.parent = parent;
- cbdata.u.activate_mode_data.mode = mode;
- cbdata.u.activate_mode_data.activate = activate;
- call_cb(&cbdata);
- return cbdata.u.activate_mode_data.ret;
+}
+gboolean process_bytestream_pad_event_wrapper(GstPad *pad, GstObject *parent, GstEvent *event) +{
- struct cb_data cbdata = { PROCESS_BYTESTREAM_PAD_EVENT };
- cbdata.u.event_src_data.pad = pad;
- cbdata.u.event_src_data.parent = parent;
- cbdata.u.event_src_data.event = event;
- call_cb(&cbdata);
- return cbdata.u.event_src_data.ret;
+} diff --git a/dlls/winegstreamer/gst_cbs.h b/dlls/winegstreamer/gst_cbs.h index 4725f23ad1a..10e999feea7 100644 --- a/dlls/winegstreamer/gst_cbs.h +++ b/dlls/winegstreamer/gst_cbs.h @@ -43,7 +43,12 @@ enum CB_TYPE { AUTOPLUG_BLACKLIST, UNKNOWN_TYPE, QUERY_SINK,
- GSTDEMUX_MAX
GSTDEMUX_MAX,
PULL_FROM_BYTESTREAM,
QUERY_BYTESTREAM,
ACTIVATE_BYTESTREAM_PAD_MODE,
PROCESS_BYTESTREAM_PAD_EVENT,
MEDIA_SOURCE_MAX, };
struct cb_data {
@@ -138,6 +143,7 @@ struct cb_data {
void mark_wine_thread(void) DECLSPEC_HIDDEN; void perform_cb_gstdemux(struct cb_data *data) DECLSPEC_HIDDEN; +void perform_cb_media_source(struct cb_data *data) DECLSPEC_HIDDEN;
GstBusSyncReply watch_bus_wrapper(GstBus *bus, GstMessage *msg, gpointer user) DECLSPEC_HIDDEN; void existing_new_pad_wrapper(GstElement *bin, GstPad *pad, gpointer user) DECLSPEC_HIDDEN; @@ -154,5 +160,9 @@ GstAutoplugSelectResult autoplug_blacklist_wrapper(GstElement *bin, GstPad *pad, void unknown_type_wrapper(GstElement *bin, GstPad *pad, GstCaps *caps, gpointer user) DECLSPEC_HIDDEN; void Gstreamer_transform_pad_added_wrapper(GstElement *filter, GstPad *pad, gpointer user) DECLSPEC_HIDDEN; gboolean query_sink_wrapper(GstPad *pad, GstObject *parent, GstQuery *query) DECLSPEC_HIDDEN; +GstFlowReturn pull_from_bytestream_wrapper(GstPad *pad, GstObject *parent, guint64 ofs, guint len, GstBuffer **buf) DECLSPEC_HIDDEN; +gboolean query_bytestream_wrapper(GstPad *pad, GstObject *parent, GstQuery *query) DECLSPEC_HIDDEN; +gboolean activate_bytestream_pad_mode_wrapper(GstPad *pad, GstObject *parent, GstPadMode mode, gboolean activate) DECLSPEC_HIDDEN; +gboolean process_bytestream_pad_event_wrapper(GstPad *pad, GstObject *parent, GstEvent *event) DECLSPEC_HIDDEN;
#endif diff --git a/dlls/winegstreamer/main.c b/dlls/winegstreamer/main.c index 2872710b3e2..4ca371d58bd 100644 --- a/dlls/winegstreamer/main.c +++ b/dlls/winegstreamer/main.c @@ -146,6 +146,9 @@ HRESULT WINAPI DllGetClassObject(REFCLSID clsid, REFIID iid, void **out)
TRACE("clsid %s, iid %s, out %p.\n", debugstr_guid(clsid), debugstr_guid(iid), out);
- if (!init_gstreamer())
return CLASS_E_CLASSNOTAVAILABLE;
if (SUCCEEDED(hr = mfplat_get_class_object(clsid, iid, out))) return hr;
diff --git a/dlls/winegstreamer/media_source.c b/dlls/winegstreamer/media_source.c index 84ecf305d4c..6b3bd4a7869 100644 --- a/dlls/winegstreamer/media_source.c +++ b/dlls/winegstreamer/media_source.c @@ -17,7 +17,12 @@
- Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA
*/
+#include "config.h"
+#include <gst/gst.h>
- #include "gst_private.h"
+#include "gst_cbs.h"
#include <stdarg.h>
@@ -27,6 +32,7 @@ #include "mfapi.h" #include "mferror.h" #include "mfidl.h" +#include "mfobjects.h"
#include "wine/debug.h" #include "wine/heap.h" @@ -39,6 +45,8 @@ struct media_source IMFMediaSource IMFMediaSource_iface; LONG ref; IMFMediaEventQueue *event_queue;
- IMFByteStream *byte_stream;
- GstPad *my_src; enum { SOURCE_OPENING,
@@ -52,6 +60,154 @@ static inline struct media_source *impl_from_IMFMediaSource(IMFMediaSource *ifac return CONTAINING_RECORD(iface, struct media_source, IMFMediaSource_iface); }
+GstFlowReturn pull_from_bytestream(GstPad *pad, GstObject *parent, guint64 ofs, guint len,
GstBuffer **buf)
+{
- struct media_source *source = gst_pad_get_element_private(pad);
- IMFByteStream *byte_stream = source->byte_stream;
- ULONG bytes_read;
- GstMapInfo info;
- BOOL is_eof;
- HRESULT hr;
- TRACE("gstreamer requesting %u bytes at %s from source %p into buffer %p\n", len, wine_dbgstr_longlong(ofs), source, buf);
A bit of a long line, especially since the next longest line above is wrapped. (The word GStreamer seems a bit redundant here, also; same thing below.)
π
I'd probably recommend tracing *buf instead of buf; the double pointer isn't very interesting.
π
- if (ofs != GST_BUFFER_OFFSET_NONE)
- {
if (FAILED(IMFByteStream_SetCurrentPosition(byte_stream, ofs)))
return GST_FLOW_ERROR;
- }
- if (FAILED(IMFByteStream_IsEndOfStream(byte_stream, &is_eof)))
return GST_FLOW_ERROR;
- if (is_eof)
return GST_FLOW_EOS;
- if (!(*buf))
*buf = gst_buffer_new_and_alloc(len);
- gst_buffer_map(*buf, &info, GST_MAP_WRITE);
- hr = IMFByteStream_Read(byte_stream, info.data, len, &bytes_read);
- gst_buffer_unmap(*buf, &info);
- gst_buffer_set_size(*buf, bytes_read);
- if (FAILED(hr))
- {
return GST_FLOW_ERROR;
- }
Inconsistent use of braces.
π
- GST_BUFFER_OFFSET(*buf) = ofs;
This will set it to GST_BUFFER_OFFSET_NONE if ofs was -1. That's valid according to the GStreamer API, but may not be what you intended to do.
Looks like setting this field isn't necessary anyway, demuxers still function without it.Β Would you like me to keep it in anyway?
- return GST_FLOW_OK;
+}
+static gboolean query_bytestream(GstPad *pad, GstObject *parent, GstQuery *query) +{
- struct media_source *source = gst_pad_get_element_private(pad);
- GstFormat format;
- QWORD bytestream_len;
- TRACE("GStreamer queries source %p for %s\n", source, GST_QUERY_TYPE_NAME(query));
- if (FAILED(IMFByteStream_GetLength(source->byte_stream, &bytestream_len)))
return FALSE;
- switch (GST_QUERY_TYPE(query))
- {
case GST_QUERY_DURATION:
{
gst_query_parse_duration (query, &format, NULL);
if (format == GST_FORMAT_PERCENT) {
gst_query_set_duration (query, GST_FORMAT_PERCENT, GST_FORMAT_PERCENT_MAX);
return TRUE;
}
else if (format == GST_FORMAT_BYTES)
{
QWORD length;
IMFByteStream_GetLength(source->byte_stream, &length);
gst_query_set_duration (query, GST_FORMAT_BYTES, length);
return TRUE;
}
Inconsistent braces, and inconsistent spacing between function name and left parenthesis.
π
return FALSE;
}
case GST_QUERY_SEEKING:
{
gst_query_parse_seeking (query, &format, NULL, NULL, NULL);
if (format != GST_FORMAT_BYTES)
{
WARN("Cannot seek using format \"%s\".\n", gst_format_get_name(format));
return FALSE;
}
gst_query_set_seeking(query, GST_FORMAT_BYTES, 1, 0, bytestream_len);
return TRUE;
}
case GST_QUERY_SCHEDULING:
{
gst_query_set_scheduling(query, GST_SCHEDULING_FLAG_SEEKABLE, 1, -1, 0);
gst_query_add_scheduling_mode(query, GST_PAD_MODE_PULL);
return TRUE;
}
case GST_QUERY_CAPS:
{
GstStaticCaps any = GST_STATIC_CAPS_ANY;
GstCaps *caps, *filter;
caps = gst_static_caps_get(&any);
gst_query_parse_caps(query, &filter);
if (filter) {
GstCaps* filtered;
filtered = gst_caps_intersect_full(
filter, caps, GST_CAPS_INTERSECT_FIRST);
gst_caps_unref(caps);
caps = filtered;
}
gst_query_set_caps_result(query, caps);
gst_caps_unref(caps);
return TRUE;
}
What's the handling of GST_QUERY_CAPS for?
Before the generic bytestream handler solution, each media source type's source pad had specific caps.Β Since that's no longer a thing, I'll just remove this.
If it is indeed necessary, I think it can be simplified by just calling gst_pad_query_default(); that should do about the same thing as here.
default:
{
WARN("Unhandled query type %s\n", GST_QUERY_TYPE_NAME(query));
return FALSE;
}
- }
+}
+static gboolean activate_bytestream_pad_mode(GstPad *pad, GstObject *parent, GstPadMode mode, gboolean activate) +{
- struct media_source *source = gst_pad_get_element_private(pad);
- TRACE("%s source pad for mediasource %p in %s mode.\n",
activate ? "Activating" : "Deactivating", source, gst_pad_mode_get_name(mode));
- switch (mode) {
case GST_PAD_MODE_PULL:
return TRUE;
default:
return FALSE;
- }
- return FALSE;
+}
The switch seems a bit excessive. (Yes, it's copied from quartz, but even there I think it's excessive.) Note also that the braces and spacing are inconsistent.
π
+static gboolean process_bytestream_pad_event(GstPad *pad, GstObject *parent, GstEvent *event) +{
- struct media_source *source = gst_pad_get_element_private(pad);
- TRACE("source %p, type "%s".\n", source, GST_EVENT_TYPE_NAME(event));
- switch (event->type) {
/* the seek event should fail in pull mode */
case GST_EVENT_SEEK:
return FALSE;
default:
WARN("Ignoring \"%s\" event.\n", GST_EVENT_TYPE_NAME(event));
case GST_EVENT_TAG:
case GST_EVENT_QOS:
case GST_EVENT_RECONFIGURE:
return gst_pad_event_default(pad, parent, event);
- }
- return TRUE;
+}
- static HRESULT WINAPI media_source_QueryInterface(IMFMediaSource *iface, REFIID riid, void **out) { struct media_source *source = impl_from_IMFMediaSource(iface);
@@ -211,8 +367,12 @@ static HRESULT WINAPI media_source_Shutdown(IMFMediaSource *iface)
source->state = SOURCE_SHUTDOWN;
if (source->my_src)
gst_object_unref(GST_OBJECT(source->my_src)); if (source->event_queue) IMFMediaEventQueue_Shutdown(source->event_queue);
if (source->byte_stream)
IMFByteStream_Release(source->byte_stream); return S_OK;
}
@@ -236,19 +396,34 @@ static const IMFMediaSourceVtbl IMFMediaSource_vtbl =
static HRESULT media_source_constructor(IMFByteStream *bytestream, struct media_source **out_media_source) {
- GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE(
"mf_src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS_ANY);
Copied from quartz, I know, but can we please avoid Microsoft-style function/macro invocations?
π
struct media_source *object = heap_alloc_zero(sizeof(*object)); HRESULT hr; if (!object) return E_OUTOFMEMORY;
- object->IMFMediaSource_iface.lpVtbl = &IMFMediaSource_vtbl;
- object->ref = 1;
- object->byte_stream = bytestream;
- IMFByteStream_AddRef(bytestream);
if (FAILED(hr = MFCreateEventQueue(&object->event_queue))) goto fail;
- object->state = SOURCE_STOPPED;
- object->my_src = gst_pad_new_from_static_template(&src_template, "mf-src");
- gst_pad_set_element_private(object->my_src, object);
- gst_pad_set_getrange_function(object->my_src, pull_from_bytestream_wrapper);
- gst_pad_set_query_function(object->my_src, query_bytestream_wrapper);
- gst_pad_set_activatemode_function(object->my_src, activate_bytestream_pad_mode_wrapper);
- gst_pad_set_event_function(object->my_src, process_bytestream_pad_event_wrapper);
- object->IMFMediaSource_iface.lpVtbl = &IMFMediaSource_vtbl;
- object->ref = 1;
object->state = SOURCE_STOPPED;
*out_media_source = object; return S_OK;
@@ -717,3 +892,41 @@ HRESULT winegstreamer_stream_handler_create(REFIID riid, void **obj)
return hr;
}
+/* helper for callback forwarding */ +void perform_cb_media_source(struct cb_data *cbdata) +{
- switch(cbdata->type)
- {
- case PULL_FROM_BYTESTREAM:
{
struct getrange_data *data = &cbdata->u.getrange_data;
cbdata->u.getrange_data.ret = pull_from_bytestream(data->pad, data->parent,
data->ofs, data->len, data->buf);
break;
}
- case QUERY_BYTESTREAM:
{
struct query_function_data *data = &cbdata->u.query_function_data;
cbdata->u.query_function_data.ret = query_bytestream(data->pad, data->parent, data->query);
break;
}
- case ACTIVATE_BYTESTREAM_PAD_MODE:
{
struct activate_mode_data *data = &cbdata->u.activate_mode_data;
cbdata->u.activate_mode_data.ret = activate_bytestream_pad_mode(data->pad, data->parent, data->mode, data->activate);
break;
}
- case PROCESS_BYTESTREAM_PAD_EVENT:
{
struct event_src_data *data = &cbdata->u.event_src_data;
cbdata->u.event_src_data.ret = process_bytestream_pad_event(data->pad, data->parent, data->event);
break;
}
- default:
{
ERR("Wrong callback forwarder called\n");
return;
The "return" is superfluous. I'd also recommend assert(), like in perform_cb_gstdemux().
π
}
- }
+}
On 9/10/20 10:16 AM, Derek Lesho wrote:
On 9/9/20 5:49 PM, Zebediah Figura wrote:
On 9/8/20 10:47 AM, Derek Lesho wrote:
Signed-off-by: Derek Lesho dlesho@codeweavers.com
dlls/winegstreamer/gst_cbs.c | 58 ++++++++ dlls/winegstreamer/gst_cbs.h | 12 +- dlls/winegstreamer/main.c | 3 + dlls/winegstreamer/media_source.c | 219 +++++++++++++++++++++++++++++- 4 files changed, 288 insertions(+), 4 deletions(-)
diff --git a/dlls/winegstreamer/gst_cbs.c b/dlls/winegstreamer/gst_cbs.c index bf7103b1606..8f48368c96a 100644 --- a/dlls/winegstreamer/gst_cbs.c +++ b/dlls/winegstreamer/gst_cbs.c @@ -49,6 +49,8 @@ static void CALLBACK perform_cb(TP_CALLBACK_INSTANCE *instance, void *user)
if (cbdata->type < GSTDEMUX_MAX) perform_cb_gstdemux(cbdata);
else if (cbdata->type < MEDIA_SOURCE_MAX)
perform_cb_media_source(cbdata); pthread_mutex_lock(&cbdata->lock); cbdata->finished = 1;
@@ -301,3 +303,59 @@ gboolean query_sink_wrapper(GstPad *pad, GstObject *parent, GstQuery *query)
return cbdata.u.query_sink_data.ret;
}
+GstFlowReturn pull_from_bytestream_wrapper(GstPad *pad, GstObject *parent, guint64 ofs, guint len,
GstBuffer **buf)
+{
- struct cb_data cbdata = { PULL_FROM_BYTESTREAM };
- cbdata.u.getrange_data.pad = pad;
- cbdata.u.getrange_data.parent = parent;
- cbdata.u.getrange_data.ofs = ofs;
- cbdata.u.getrange_data.len = len;
- cbdata.u.getrange_data.buf = buf;
- call_cb(&cbdata);
- return cbdata.u.getrange_data.ret;
+}
+gboolean query_bytestream_wrapper(GstPad *pad, GstObject *parent, GstQuery *query) +{
- struct cb_data cbdata = { QUERY_BYTESTREAM };
- cbdata.u.query_function_data.pad = pad;
- cbdata.u.query_function_data.parent = parent;
- cbdata.u.query_function_data.query = query;
- call_cb(&cbdata);
- return cbdata.u.query_function_data.ret;
+}
+gboolean activate_bytestream_pad_mode_wrapper(GstPad *pad, GstObject *parent, GstPadMode mode, gboolean activate) +{
- struct cb_data cbdata = { ACTIVATE_BYTESTREAM_PAD_MODE };
- cbdata.u.activate_mode_data.pad = pad;
- cbdata.u.activate_mode_data.parent = parent;
- cbdata.u.activate_mode_data.mode = mode;
- cbdata.u.activate_mode_data.activate = activate;
- call_cb(&cbdata);
- return cbdata.u.activate_mode_data.ret;
+}
+gboolean process_bytestream_pad_event_wrapper(GstPad *pad, GstObject *parent, GstEvent *event) +{
- struct cb_data cbdata = { PROCESS_BYTESTREAM_PAD_EVENT };
- cbdata.u.event_src_data.pad = pad;
- cbdata.u.event_src_data.parent = parent;
- cbdata.u.event_src_data.event = event;
- call_cb(&cbdata);
- return cbdata.u.event_src_data.ret;
+} diff --git a/dlls/winegstreamer/gst_cbs.h b/dlls/winegstreamer/gst_cbs.h index 4725f23ad1a..10e999feea7 100644 --- a/dlls/winegstreamer/gst_cbs.h +++ b/dlls/winegstreamer/gst_cbs.h @@ -43,7 +43,12 @@ enum CB_TYPE { AUTOPLUG_BLACKLIST, UNKNOWN_TYPE, QUERY_SINK,
- GSTDEMUX_MAX
GSTDEMUX_MAX,
PULL_FROM_BYTESTREAM,
QUERY_BYTESTREAM,
ACTIVATE_BYTESTREAM_PAD_MODE,
PROCESS_BYTESTREAM_PAD_EVENT,
MEDIA_SOURCE_MAX, };
struct cb_data {
@@ -138,6 +143,7 @@ struct cb_data {
void mark_wine_thread(void) DECLSPEC_HIDDEN; void perform_cb_gstdemux(struct cb_data *data) DECLSPEC_HIDDEN; +void perform_cb_media_source(struct cb_data *data) DECLSPEC_HIDDEN;
GstBusSyncReply watch_bus_wrapper(GstBus *bus, GstMessage *msg, gpointer user) DECLSPEC_HIDDEN; void existing_new_pad_wrapper(GstElement *bin, GstPad *pad, gpointer user) DECLSPEC_HIDDEN; @@ -154,5 +160,9 @@ GstAutoplugSelectResult autoplug_blacklist_wrapper(GstElement *bin, GstPad *pad, void unknown_type_wrapper(GstElement *bin, GstPad *pad, GstCaps *caps, gpointer user) DECLSPEC_HIDDEN; void Gstreamer_transform_pad_added_wrapper(GstElement *filter, GstPad *pad, gpointer user) DECLSPEC_HIDDEN; gboolean query_sink_wrapper(GstPad *pad, GstObject *parent, GstQuery *query) DECLSPEC_HIDDEN; +GstFlowReturn pull_from_bytestream_wrapper(GstPad *pad, GstObject *parent, guint64 ofs, guint len, GstBuffer **buf) DECLSPEC_HIDDEN; +gboolean query_bytestream_wrapper(GstPad *pad, GstObject *parent, GstQuery *query) DECLSPEC_HIDDEN; +gboolean activate_bytestream_pad_mode_wrapper(GstPad *pad, GstObject *parent, GstPadMode mode, gboolean activate) DECLSPEC_HIDDEN; +gboolean process_bytestream_pad_event_wrapper(GstPad *pad, GstObject *parent, GstEvent *event) DECLSPEC_HIDDEN;
#endif diff --git a/dlls/winegstreamer/main.c b/dlls/winegstreamer/main.c index 2872710b3e2..4ca371d58bd 100644 --- a/dlls/winegstreamer/main.c +++ b/dlls/winegstreamer/main.c @@ -146,6 +146,9 @@ HRESULT WINAPI DllGetClassObject(REFCLSID clsid, REFIID iid, void **out)
TRACE("clsid %s, iid %s, out %p.\n", debugstr_guid(clsid), debugstr_guid(iid), out);
- if (!init_gstreamer())
return CLASS_E_CLASSNOTAVAILABLE;
if (SUCCEEDED(hr = mfplat_get_class_object(clsid, iid, out))) return hr;
diff --git a/dlls/winegstreamer/media_source.c b/dlls/winegstreamer/media_source.c index 84ecf305d4c..6b3bd4a7869 100644 --- a/dlls/winegstreamer/media_source.c +++ b/dlls/winegstreamer/media_source.c @@ -17,7 +17,12 @@
- Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA
*/
+#include "config.h"
+#include <gst/gst.h>
- #include "gst_private.h"
+#include "gst_cbs.h"
#include <stdarg.h>
@@ -27,6 +32,7 @@ #include "mfapi.h" #include "mferror.h" #include "mfidl.h" +#include "mfobjects.h"
#include "wine/debug.h" #include "wine/heap.h" @@ -39,6 +45,8 @@ struct media_source IMFMediaSource IMFMediaSource_iface; LONG ref; IMFMediaEventQueue *event_queue;
- IMFByteStream *byte_stream;
- GstPad *my_src; enum { SOURCE_OPENING,
@@ -52,6 +60,154 @@ static inline struct media_source *impl_from_IMFMediaSource(IMFMediaSource *ifac return CONTAINING_RECORD(iface, struct media_source, IMFMediaSource_iface); }
+GstFlowReturn pull_from_bytestream(GstPad *pad, GstObject *parent, guint64 ofs, guint len,
GstBuffer **buf)
+{
- struct media_source *source = gst_pad_get_element_private(pad);
- IMFByteStream *byte_stream = source->byte_stream;
- ULONG bytes_read;
- GstMapInfo info;
- BOOL is_eof;
- HRESULT hr;
- TRACE("gstreamer requesting %u bytes at %s from source %p into buffer %p\n", len, wine_dbgstr_longlong(ofs), source, buf);
A bit of a long line, especially since the next longest line above is wrapped. (The word GStreamer seems a bit redundant here, also; same thing below.)
π
I'd probably recommend tracing *buf instead of buf; the double pointer isn't very interesting.
π
- if (ofs != GST_BUFFER_OFFSET_NONE)
- {
if (FAILED(IMFByteStream_SetCurrentPosition(byte_stream, ofs)))
return GST_FLOW_ERROR;
- }
- if (FAILED(IMFByteStream_IsEndOfStream(byte_stream, &is_eof)))
return GST_FLOW_ERROR;
- if (is_eof)
return GST_FLOW_EOS;
- if (!(*buf))
*buf = gst_buffer_new_and_alloc(len);
- gst_buffer_map(*buf, &info, GST_MAP_WRITE);
- hr = IMFByteStream_Read(byte_stream, info.data, len, &bytes_read);
- gst_buffer_unmap(*buf, &info);
- gst_buffer_set_size(*buf, bytes_read);
- if (FAILED(hr))
- {
return GST_FLOW_ERROR;
- }
Inconsistent use of braces.
π
- GST_BUFFER_OFFSET(*buf) = ofs;
This will set it to GST_BUFFER_OFFSET_NONE if ofs was -1. That's valid according to the GStreamer API, but may not be what you intended to do.
Looks like setting this field isn't necessary anyway, demuxers still function without it.Β Would you like me to keep it in anyway?
It's a little dangerous to claim "demuxers still function", unless you've tested or audited all demuxers (and all that remain to be written), but given that the documentation for GstPadGetRangeFunction doesn't say it's required, and the documentation for GstBuffer specifies that NULL is allowed, I think it's safe to forgo setting the offset entirely.
- return GST_FLOW_OK;
+}
+static gboolean query_bytestream(GstPad *pad, GstObject *parent, GstQuery *query) +{
- struct media_source *source = gst_pad_get_element_private(pad);
- GstFormat format;
- QWORD bytestream_len;
- TRACE("GStreamer queries source %p for %s\n", source, GST_QUERY_TYPE_NAME(query));
- if (FAILED(IMFByteStream_GetLength(source->byte_stream, &bytestream_len)))
return FALSE;
- switch (GST_QUERY_TYPE(query))
- {
case GST_QUERY_DURATION:
{
gst_query_parse_duration (query, &format, NULL);
if (format == GST_FORMAT_PERCENT) {
gst_query_set_duration (query, GST_FORMAT_PERCENT, GST_FORMAT_PERCENT_MAX);
return TRUE;
}
else if (format == GST_FORMAT_BYTES)
{
QWORD length;
IMFByteStream_GetLength(source->byte_stream, &length);
gst_query_set_duration (query, GST_FORMAT_BYTES, length);
return TRUE;
}
Inconsistent braces, and inconsistent spacing between function name and left parenthesis.
π
return FALSE;
}
case GST_QUERY_SEEKING:
{
gst_query_parse_seeking (query, &format, NULL, NULL, NULL);
if (format != GST_FORMAT_BYTES)
{
WARN("Cannot seek using format \"%s\".\n", gst_format_get_name(format));
return FALSE;
}
gst_query_set_seeking(query, GST_FORMAT_BYTES, 1, 0, bytestream_len);
return TRUE;
}
case GST_QUERY_SCHEDULING:
{
gst_query_set_scheduling(query, GST_SCHEDULING_FLAG_SEEKABLE, 1, -1, 0);
gst_query_add_scheduling_mode(query, GST_PAD_MODE_PULL);
return TRUE;
}
case GST_QUERY_CAPS:
{
GstStaticCaps any = GST_STATIC_CAPS_ANY;
GstCaps *caps, *filter;
caps = gst_static_caps_get(&any);
gst_query_parse_caps(query, &filter);
if (filter) {
GstCaps* filtered;
filtered = gst_caps_intersect_full(
filter, caps, GST_CAPS_INTERSECT_FIRST);
gst_caps_unref(caps);
caps = filtered;
}
gst_query_set_caps_result(query, caps);
gst_caps_unref(caps);
return TRUE;
}
What's the handling of GST_QUERY_CAPS for?
Before the generic bytestream handler solution, each media source type's source pad had specific caps.Β Since that's no longer a thing, I'll just remove this.
If it is indeed necessary, I think it can be simplified by just calling gst_pad_query_default(); that should do about the same thing as here.
default:
{
WARN("Unhandled query type %s\n", GST_QUERY_TYPE_NAME(query));
return FALSE;
}
- }
+}
+static gboolean activate_bytestream_pad_mode(GstPad *pad, GstObject *parent, GstPadMode mode, gboolean activate) +{
- struct media_source *source = gst_pad_get_element_private(pad);
- TRACE("%s source pad for mediasource %p in %s mode.\n",
activate ? "Activating" : "Deactivating", source, gst_pad_mode_get_name(mode));
- switch (mode) {
case GST_PAD_MODE_PULL:
return TRUE;
default:
return FALSE;
- }
- return FALSE;
+}
The switch seems a bit excessive. (Yes, it's copied from quartz, but even there I think it's excessive.) Note also that the braces and spacing are inconsistent.
π
+static gboolean process_bytestream_pad_event(GstPad *pad, GstObject *parent, GstEvent *event) +{
- struct media_source *source = gst_pad_get_element_private(pad);
- TRACE("source %p, type "%s".\n", source, GST_EVENT_TYPE_NAME(event));
- switch (event->type) {
/* the seek event should fail in pull mode */
case GST_EVENT_SEEK:
return FALSE;
default:
WARN("Ignoring \"%s\" event.\n", GST_EVENT_TYPE_NAME(event));
case GST_EVENT_TAG:
case GST_EVENT_QOS:
case GST_EVENT_RECONFIGURE:
return gst_pad_event_default(pad, parent, event);
- }
- return TRUE;
+}
- static HRESULT WINAPI media_source_QueryInterface(IMFMediaSource *iface, REFIID riid, void **out) { struct media_source *source = impl_from_IMFMediaSource(iface);
@@ -211,8 +367,12 @@ static HRESULT WINAPI media_source_Shutdown(IMFMediaSource *iface)
source->state = SOURCE_SHUTDOWN;
if (source->my_src)
gst_object_unref(GST_OBJECT(source->my_src)); if (source->event_queue) IMFMediaEventQueue_Shutdown(source->event_queue);
if (source->byte_stream)
IMFByteStream_Release(source->byte_stream); return S_OK;
}
@@ -236,19 +396,34 @@ static const IMFMediaSourceVtbl IMFMediaSource_vtbl =
static HRESULT media_source_constructor(IMFByteStream *bytestream, struct media_source **out_media_source) {
- GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE(
"mf_src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS_ANY);
Copied from quartz, I know, but can we please avoid Microsoft-style function/macro invocations?
π
struct media_source *object = heap_alloc_zero(sizeof(*object)); HRESULT hr; if (!object) return E_OUTOFMEMORY;
- object->IMFMediaSource_iface.lpVtbl = &IMFMediaSource_vtbl;
- object->ref = 1;
- object->byte_stream = bytestream;
- IMFByteStream_AddRef(bytestream);
if (FAILED(hr = MFCreateEventQueue(&object->event_queue))) goto fail;
- object->state = SOURCE_STOPPED;
- object->my_src = gst_pad_new_from_static_template(&src_template, "mf-src");
- gst_pad_set_element_private(object->my_src, object);
- gst_pad_set_getrange_function(object->my_src, pull_from_bytestream_wrapper);
- gst_pad_set_query_function(object->my_src, query_bytestream_wrapper);
- gst_pad_set_activatemode_function(object->my_src, activate_bytestream_pad_mode_wrapper);
- gst_pad_set_event_function(object->my_src, process_bytestream_pad_event_wrapper);
- object->IMFMediaSource_iface.lpVtbl = &IMFMediaSource_vtbl;
- object->ref = 1;
object->state = SOURCE_STOPPED;
*out_media_source = object; return S_OK;
@@ -717,3 +892,41 @@ HRESULT winegstreamer_stream_handler_create(REFIID riid, void **obj)
return hr;
}
+/* helper for callback forwarding */ +void perform_cb_media_source(struct cb_data *cbdata) +{
- switch(cbdata->type)
- {
- case PULL_FROM_BYTESTREAM:
{
struct getrange_data *data = &cbdata->u.getrange_data;
cbdata->u.getrange_data.ret = pull_from_bytestream(data->pad, data->parent,
data->ofs, data->len, data->buf);
break;
}
- case QUERY_BYTESTREAM:
{
struct query_function_data *data = &cbdata->u.query_function_data;
cbdata->u.query_function_data.ret = query_bytestream(data->pad, data->parent, data->query);
break;
}
- case ACTIVATE_BYTESTREAM_PAD_MODE:
{
struct activate_mode_data *data = &cbdata->u.activate_mode_data;
cbdata->u.activate_mode_data.ret = activate_bytestream_pad_mode(data->pad, data->parent, data->mode, data->activate);
break;
}
- case PROCESS_BYTESTREAM_PAD_EVENT:
{
struct event_src_data *data = &cbdata->u.event_src_data;
cbdata->u.event_src_data.ret = process_bytestream_pad_event(data->pad, data->parent, data->event);
break;
}
- default:
{
ERR("Wrong callback forwarder called\n");
return;
The "return" is superfluous. I'd also recommend assert(), like in perform_cb_gstdemux().
π
}
- }
+}