-- v3: wingstreamer: Create extra decodebin to decode compressed stream. winegstreamer: Implement wg_format_from_caps_video_wmv.
From: Ziqing Hui zhui@codeweavers.com
--- dlls/winegstreamer/wg_format.c | 57 ++++++++++++++++++++++++++++++++++ 1 file changed, 57 insertions(+)
diff --git a/dlls/winegstreamer/wg_format.c b/dlls/winegstreamer/wg_format.c index ac21b0af94f..1850972cdbc 100644 --- a/dlls/winegstreamer/wg_format.c +++ b/dlls/winegstreamer/wg_format.c @@ -232,6 +232,59 @@ static void wg_format_from_caps_video_cinepak(struct wg_format *format, const Gs format->u.video_cinepak.fps_d = fps_d; }
+static void wg_format_from_caps_video_wmv(struct wg_format *format, const GstCaps *caps) +{ + const GstStructure *structure = gst_caps_get_structure(caps, 0); + gint width, height, fps_n, fps_d, wmv_version = 0; + gchar format_buffer[4] = {'W','M','V','0'}; + enum wg_wmv_video_format wmv_format; + const gchar *wmv_format_str = NULL; + + if (!gst_structure_get_int(structure, "width", &width)) + { + GST_WARNING("Missing "width" value."); + return; + } + if (!gst_structure_get_int(structure, "height", &height)) + { + GST_WARNING("Missing "height" value."); + return; + } + + if (!(wmv_format_str = gst_structure_get_string(structure, "format"))) + { + if (!gst_structure_get_int(structure, "wmvversion", &wmv_version)) + GST_WARNING("Unable to get WMV format."); + format_buffer[3] += wmv_version; + wmv_format_str = format_buffer; + } + if (!strcmp(wmv_format_str, "WMV1")) + wmv_format = WG_WMV_VIDEO_FORMAT_WMV1; + else if (!strcmp(wmv_format_str, "WMV2")) + wmv_format = WG_WMV_VIDEO_FORMAT_WMV2; + else if (!strcmp(wmv_format_str, "WMV3")) + wmv_format = WG_WMV_VIDEO_FORMAT_WMV3; + else if (!strcmp(wmv_format_str, "WMVA")) + wmv_format = WG_WMV_VIDEO_FORMAT_WMVA; + else if (!strcmp(wmv_format_str, "WVC1")) + wmv_format = WG_WMV_VIDEO_FORMAT_WVC1; + else + wmv_format = WG_WMV_VIDEO_FORMAT_UNKNOWN; + + if (!gst_structure_get_fraction(structure, "framerate", &fps_n, &fps_d)) + { + fps_n = 0; + fps_d = 1; + } + + format->major_type = WG_MAJOR_TYPE_VIDEO_WMV; + format->u.video_wmv.width = width; + format->u.video_wmv.height = height; + format->u.video_wmv.format = wmv_format; + format->u.video_wmv.fps_n = fps_n; + format->u.video_wmv.fps_d = fps_d; +} + void wg_format_from_caps(struct wg_format *format, const GstCaps *caps) { const GstStructure *structure = gst_caps_get_structure(caps, 0); @@ -261,6 +314,10 @@ void wg_format_from_caps(struct wg_format *format, const GstCaps *caps) { wg_format_from_caps_video_cinepak(format, caps); } + else if (!strcmp(name, "video/x-wmv")) + { + wg_format_from_caps_video_wmv(format, caps); + } else { gchar *str = gst_caps_to_string(caps);
From: Ziqing Hui zhui@codeweavers.com
--- dlls/winegstreamer/wg_parser.c | 215 ++++++++++++++++++++++++++++----- 1 file changed, 184 insertions(+), 31 deletions(-)
diff --git a/dlls/winegstreamer/wg_parser.c b/dlls/winegstreamer/wg_parser.c index a8da149e7be..8dfa53d705f 100644 --- a/dlls/winegstreamer/wg_parser.c +++ b/dlls/winegstreamer/wg_parser.c @@ -104,15 +104,15 @@ struct wg_parser_stream uint32_t number;
GstPad *their_src, *post_sink, *post_src, *my_sink; - GstElement *flip; + GstElement *flip, *decodebin; GstSegment segment; - struct wg_format preferred_format, current_format; + struct wg_format preferred_format, current_format, stream_format;
pthread_cond_t event_cond, event_empty_cond; GstBuffer *buffer; GstMapInfo map_info;
- bool flushing, eos, enabled, has_caps, has_tags, has_buffer; + bool flushing, eos, enabled, has_caps, has_tags, has_buffer, no_more_pads;
uint64_t duration; gchar *tags[WG_PARSER_TAG_COUNT]; @@ -455,6 +455,33 @@ static NTSTATUS wg_parser_stream_notify_qos(void *args) return S_OK; }
+static bool parser_no_more_pads(struct wg_parser *parser) +{ + unsigned int i; + + for (i = 0; i < parser->stream_count; ++i) + { + if (!parser->streams[i]->no_more_pads) + return false; + } + + return parser->no_more_pads; +} + +static gboolean autoplug_continue_cb(GstElement * decodebin, GstPad *pad, GstCaps * caps, gpointer user) +{ + struct wg_format format; + + wg_format_from_caps(&format, caps); + + if (format.major_type != WG_MAJOR_TYPE_UNKNOWN + && format.major_type != WG_MAJOR_TYPE_VIDEO + && format.major_type != WG_MAJOR_TYPE_AUDIO) + return false; + + return true; +} + static GstAutoplugSelectResult autoplug_select_cb(GstElement *bin, GstPad *pad, GstCaps *caps, GstElementFactory *fact, gpointer user) { @@ -761,6 +788,7 @@ static struct wg_parser_stream *create_stream(struct wg_parser *parser)
stream->parser = parser; stream->number = parser->stream_count; + stream->no_more_pads = true; stream->current_format.major_type = WG_MAJOR_TYPE_UNKNOWN; pthread_cond_init(&stream->event_cond, NULL); pthread_cond_init(&stream->event_empty_cond, NULL); @@ -803,24 +831,15 @@ static void free_stream(struct wg_parser_stream *stream) free(stream); }
-static void pad_added_cb(GstElement *element, GstPad *pad, gpointer user) +static bool stream_create_post_processing_elements(struct wg_parser_stream *stream) { - struct wg_parser *parser = user; - struct wg_parser_stream *stream; + struct wg_parser *parser = stream->parser; const char *name; GstCaps *caps; - int ret; - - GST_LOG("parser %p, element %p, pad %p.", parser, element, pad);
- if (gst_pad_is_linked(pad)) - return; - - caps = gst_pad_query_caps(pad, NULL); + caps = gst_pad_query_caps(stream->their_src, NULL); name = gst_structure_get_name(gst_caps_get_structure(caps, 0)); - - if (!(stream = create_stream(parser))) - goto out; + gst_caps_unref(caps);
if (!strcmp(name, "video/x-raw")) { @@ -829,22 +848,22 @@ static void pad_added_cb(GstElement *element, GstPad *pad, gpointer user) /* DirectShow can express interlaced video, but downstream filters can't * necessarily consume it. In particular, the video renderer can't. */ if (!(deinterlace = create_element("deinterlace", "good"))) - goto out; + return false;
/* decodebin considers many YUV formats to be "raw", but some quartz * filters can't handle those. Also, videoflip can't handle all "raw" * formats either. Add a videoconvert to swap color spaces. */ if (!(vconv = create_element("videoconvert", "base"))) - goto out; + return false;
/* GStreamer outputs RGB video top-down, but DirectShow expects bottom-up. */ if (!(flip = create_element("videoflip", "good"))) - goto out; + return false;
/* videoflip does not support 15 and 16-bit RGB so add a second videoconvert * to do the final conversion. */ if (!(vconv2 = create_element("videoconvert", "base"))) - goto out; + return false;
/* The bin takes ownership of these elements. */ gst_bin_add(GST_BIN(parser->container), deinterlace); @@ -873,7 +892,7 @@ static void pad_added_cb(GstElement *element, GstPad *pad, gpointer user) * 64-bit formats either. Add an audioconvert to allow changing bit * depth and channel count. */ if (!(convert = create_element("audioconvert", "base"))) - goto out; + return false;
gst_bin_add(GST_BIN(parser->container), convert); gst_element_sync_state_with_parent(convert); @@ -882,15 +901,22 @@ static void pad_added_cb(GstElement *element, GstPad *pad, gpointer user) stream->post_src = gst_element_get_static_pad(convert, "src"); }
+ return true; +} + +static bool stream_link_elements(struct wg_parser_stream *stream) +{ + int ret; + if (stream->post_sink) { - if ((ret = gst_pad_link(pad, stream->post_sink)) < 0) + if ((ret = gst_pad_link(stream->their_src, stream->post_sink)) < 0) { GST_ERROR("Failed to link decodebin source pad to post-processing elements, error %s.", gst_pad_link_get_name(ret)); gst_object_unref(stream->post_sink); stream->post_sink = NULL; - goto out; + return false; }
if ((ret = gst_pad_link(stream->post_src, stream->my_sink)) < 0) @@ -901,25 +927,131 @@ static void pad_added_cb(GstElement *element, GstPad *pad, gpointer user) stream->post_src = NULL; gst_object_unref(stream->post_sink); stream->post_sink = NULL; - goto out; + return false; } } - else if ((ret = gst_pad_link(pad, stream->my_sink)) < 0) + else if ((ret = gst_pad_link(stream->their_src, stream->my_sink)) < 0) { GST_ERROR("Failed to link decodebin source pad to our sink pad, error %s.", gst_pad_link_get_name(ret)); - goto out; + return false; }
- gst_pad_set_active(stream->my_sink, 1); + return true; +} + +static void stream_decodebin_no_more_pads_cb(GstElement *element, gpointer user) +{ + struct wg_parser_stream *stream = user; + struct wg_parser *parser = stream->parser; + + GST_DEBUG("stream %p, parser %p, element %p.", stream, parser, element); + + pthread_mutex_lock(&parser->mutex); + stream->no_more_pads = true; + pthread_mutex_unlock(&parser->mutex); + pthread_cond_signal(&parser->init_cond); +} + +static void stream_decodebin_pad_added_cb(GstElement *element, GstPad *pad, gpointer user) +{ + struct wg_parser_stream *stream = user; + struct wg_parser *parser = stream->parser; + + GST_LOG("stream %p, parser %p, element %p, pad %p.", stream, parser, element, pad); + + if (gst_pad_is_linked(pad)) + return; + gst_object_ref(stream->their_src = pad); -out: + if (!stream_create_post_processing_elements(stream)) + return; + if (!stream_link_elements(stream)) + return; + gst_pad_set_active(stream->my_sink, 1); +} + +static bool stream_decodebin_create(struct wg_parser_stream *stream) +{ + struct wg_parser *parser = stream->parser; + + GST_LOG("stream %p, parser %p.", stream, parser); + + if (!(stream->decodebin = create_element("decodebin", "base"))) + return false; + gst_bin_add(GST_BIN(parser->container), stream->decodebin); + + if (parser->unlimited_buffering) + { + g_object_set(stream->decodebin, "max-size-buffers", G_MAXUINT, NULL); + g_object_set(stream->decodebin, "max-size-time", G_MAXUINT64, NULL); + g_object_set(stream->decodebin, "max-size-bytes", G_MAXUINT, NULL); + } + g_signal_connect(stream->decodebin, "pad-added", G_CALLBACK(stream_decodebin_pad_added_cb), stream); + g_signal_connect(stream->decodebin, "autoplug-select", G_CALLBACK(autoplug_select_cb), stream); + g_signal_connect(stream->decodebin, "no-more-pads", G_CALLBACK(stream_decodebin_no_more_pads_cb), stream); + + pthread_mutex_lock(&parser->mutex); + stream->no_more_pads = false; + pthread_mutex_unlock(&parser->mutex); + gst_element_sync_state_with_parent(stream->decodebin); + + GST_LOG("Created stream decodebin %p for %u.", stream->decodebin, stream->number); + + return true; +} + +static void pad_added_cb(GstElement *element, GstPad *pad, gpointer user) +{ + struct wg_parser_stream *stream; + struct wg_parser *parser = user; + GstPad *stream_decodebin_sink; + GstCaps *caps; + int ret; + + GST_LOG("parser %p, element %p, pad %p.", parser, element, pad); + + if (gst_pad_is_linked(pad)) + return; + + if (!(stream = create_stream(parser))) + return; + + caps = gst_pad_query_caps(pad, NULL); + wg_format_from_caps(&stream->stream_format, caps); gst_caps_unref(caps); + + /* For compressed stream, create an extra decodebin to decode it. */ + if (stream->stream_format.major_type != WG_MAJOR_TYPE_UNKNOWN + && stream->stream_format.major_type != WG_MAJOR_TYPE_VIDEO + && stream->stream_format.major_type != WG_MAJOR_TYPE_AUDIO) + { + if (!stream_decodebin_create(stream)) + { + GST_ERROR("Unable to create decodebin for stream %u.", stream->number); + return; + } + + stream_decodebin_sink = gst_element_get_static_pad(stream->decodebin, "sink"); + if ((ret = gst_pad_link(pad, stream_decodebin_sink)) < 0) + GST_ERROR("Failed to link pads, error %d.", ret); + gst_object_unref(stream_decodebin_sink); + + return; + } + + gst_object_ref(stream->their_src = pad); + if (!stream_create_post_processing_elements(stream)) + return; + if (!stream_link_elements(stream)) + return; + gst_pad_set_active(stream->my_sink, 1); }
static void pad_removed_cb(GstElement *element, GstPad *pad, gpointer user) { struct wg_parser *parser = user; + bool done = false; unsigned int i; char *name;
@@ -928,8 +1060,16 @@ static void pad_removed_cb(GstElement *element, GstPad *pad, gpointer user) for (i = 0; i < parser->stream_count; ++i) { struct wg_parser_stream *stream = parser->streams[i]; + GstPad *stream_decodebin_sink_peer = NULL; + GstPad *stream_decodebin_sink = NULL; + + if (stream->decodebin) + { + stream_decodebin_sink = gst_element_get_static_pad(stream->decodebin, "sink"); + stream_decodebin_sink_peer = gst_pad_get_peer(stream_decodebin_sink); + }
- if (stream->their_src == pad) + if (stream->their_src == pad || stream_decodebin_sink_peer == pad) { if (stream->post_sink) gst_pad_unlink(stream->their_src, stream->post_sink); @@ -937,8 +1077,20 @@ static void pad_removed_cb(GstElement *element, GstPad *pad, gpointer user) gst_pad_unlink(stream->their_src, stream->my_sink); gst_object_unref(stream->their_src); stream->their_src = NULL; - return; + + if (stream_decodebin_sink_peer == pad) + gst_pad_unlink(pad, stream_decodebin_sink); + + done = true; } + + if (stream_decodebin_sink_peer) + gst_object_unref(stream_decodebin_sink_peer); + if (stream_decodebin_sink) + gst_object_unref(stream_decodebin_sink); + + if (done) + return; }
name = gst_pad_get_name(pad); @@ -1377,7 +1529,7 @@ static NTSTATUS wg_parser_connect(void *args)
pthread_mutex_lock(&parser->mutex);
- while (!parser->no_more_pads && !parser->error) + while (!parser_no_more_pads(parser) && !parser->error) pthread_cond_wait(&parser->init_cond, &parser->mutex); if (parser->error) { @@ -1564,6 +1716,7 @@ static BOOL decodebin_parser_init_gst(struct wg_parser *parser)
g_signal_connect(element, "pad-added", G_CALLBACK(pad_added_cb), parser); g_signal_connect(element, "pad-removed", G_CALLBACK(pad_removed_cb), parser); + g_signal_connect(element, "autoplug-continue", G_CALLBACK(autoplug_continue_cb), parser); g_signal_connect(element, "autoplug-select", G_CALLBACK(autoplug_select_cb), parser); g_signal_connect(element, "no-more-pads", G_CALLBACK(no_more_pads_cb), parser);
- We record codec format(aka stream format) in `struct wg_parser_stream`, but this struct is created in pad_added_cb() by create_stream(). So we can't get the stream struct in autoplug_continue_cb() because we don't have wg_parser_stream when autoplug_continue_cb() is called. We should record the codec format when pad_added_cb() is called. Am I right?
Yes, that, sorry.
- if 1. is correct, do we still need to call wg_fromat_from_caps() in autoplug_continue_cb(),?
No, I think it's clear that won't work. That suggestion of mine was broken in the first place :-(
Zebediah Figura (@zfigura) commented about dlls/winegstreamer/wg_format.c:
format->u.video_cinepak.fps_d = fps_d;
}
+static void wg_format_from_caps_video_wmv(struct wg_format *format, const GstCaps *caps) +{
- const GstStructure *structure = gst_caps_get_structure(caps, 0);
- gint width, height, fps_n, fps_d, wmv_version = 0;
- gchar format_buffer[4] = {'W','M','V','0'};
I missed this the first time, but this is missing a null terminator, so strcmp() won't [reliably] work on it.
Zebediah Figura (@zfigura) commented about dlls/winegstreamer/wg_parser.c:
+static bool stream_decodebin_create(struct wg_parser_stream *stream) +{
- struct wg_parser *parser = stream->parser;
- GST_LOG("stream %p, parser %p.", stream, parser);
- if (!(stream->decodebin = create_element("decodebin", "base")))
return false;
- gst_bin_add(GST_BIN(parser->container), stream->decodebin);
- if (parser->unlimited_buffering)
- {
g_object_set(stream->decodebin, "max-size-buffers", G_MAXUINT, NULL);
g_object_set(stream->decodebin, "max-size-time", G_MAXUINT64, NULL);
g_object_set(stream->decodebin, "max-size-bytes", G_MAXUINT, NULL);
- }
decodebin doesn't buffer without a demuxer (and we don't want it to anyway), so we shouldn't need these.