Performance issues can occur when a video output sample has a 2D buffer, because winegstreamer currently outputs only to a linear buffer, which must then be copied into the 2D buffer. Worse, a linear lock of the 2D buffer requires the current contents to be copied to a linear buffer, even when we intend to overwrite it, because MF linear buffers do not support write-only locking.
-- v3: winegstreamer: Support 2D sample buffer. mf: Test color convert 2D buffers. mf: Test WMV decoder 2D buffers. mf: Test H.264 decoder 2D buffers. mf: Test sample copier 2D buffers. mfplat/tests: Test NV12 negative stride in MFCreateMediaBufferFromMediaType(). mf/tests: Remove todo for an H.264 decoder test. mf/tests: Stop checking samples at the end of the expected array.
From: Conor McCarthy cmccarthy@codeweavers.com
Prevents crashing in test_h264_decoder_concat_streams() when too many samples are emitted. --- dlls/mf/tests/transform.c | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-)
diff --git a/dlls/mf/tests/transform.c b/dlls/mf/tests/transform.c index f82dfa1b090..d39513752a8 100644 --- a/dlls/mf/tests/transform.c +++ b/dlls/mf/tests/transform.c @@ -1164,7 +1164,8 @@ static void enum_mf_media_buffers(IMFSample *sample, const struct sample_desc *s ok(hr == S_OK, "GetBufferByIndex returned %#lx\n", hr); ok(i < sample_desc->buffer_count, "got unexpected buffer\n");
- callback(buffer, sample_desc->buffers + i, context); + if (i < sample_desc->buffer_count) + callback(buffer, sample_desc->buffers + i, context);
IMFMediaBuffer_Release(buffer); winetest_pop_context(); @@ -1200,6 +1201,8 @@ static void enum_mf_samples(IMFCollection *samples, const struct sample_desc *co
IMFSample_Release(sample); winetest_pop_context(); + if (!state.sample.buffer_count) + break; } ok(hr == E_INVALIDARG, "GetElement returned %#lx\n", hr); }
From: Conor McCarthy cmccarthy@codeweavers.com
--- dlls/mf/tests/transform.c | 1 - 1 file changed, 1 deletion(-)
diff --git a/dlls/mf/tests/transform.c b/dlls/mf/tests/transform.c index d39513752a8..c70cceea785 100644 --- a/dlls/mf/tests/transform.c +++ b/dlls/mf/tests/transform.c @@ -5051,7 +5051,6 @@ static void test_h264_decoder(void) ok(ref == 1, "Release returned %ld\n", ref);
ret = check_mf_sample_collection(output_samples, &expect_output_sample_i420, L"i420frame.bmp"); - todo_wine /* wg_transform_set_output_format() should convert already processed samples instead of dropping */ ok(ret == 0, "got %lu%% diff\n", ret); IMFCollection_Release(output_samples);
From: Conor McCarthy cmccarthy@codeweavers.com
--- dlls/mfplat/tests/mfplat.c | 34 ++++++++++++++++++++++++++++++++++ 1 file changed, 34 insertions(+)
diff --git a/dlls/mfplat/tests/mfplat.c b/dlls/mfplat/tests/mfplat.c index 21bcc86c8d1..34b58f942d9 100644 --- a/dlls/mfplat/tests/mfplat.c +++ b/dlls/mfplat/tests/mfplat.c @@ -8167,6 +8167,40 @@ static void test_MFCreateMediaBufferFromMediaType(void)
IMFMediaBuffer_Release(buffer);
+ hr = IMFMediaType_SetGUID(media_type, &MF_MT_SUBTYPE, &MFVideoFormat_NV12); + ok(hr == S_OK, "Failed to set attribute, hr %#lx.\n", hr); + hr = IMFMediaType_SetUINT64(media_type, &MF_MT_FRAME_SIZE, (UINT64)96 << 32 | 96); + ok(hr == S_OK, "Failed to set attribute, hr %#lx.\n", hr); + hr = IMFMediaType_SetUINT32(media_type, &MF_MT_DEFAULT_STRIDE, 96); + ok(hr == S_OK, "Failed to set attribute, hr %#lx.\n", hr); + hr = pMFCreateMediaBufferFromMediaType(media_type, 0, 0, 0, &buffer); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + + hr = IMFMediaBuffer_QueryInterface(buffer, &IID_IMF2DBuffer, (void **)&buffer_2d); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMF2DBuffer_Lock2D(buffer_2d, &data, &pitch); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(pitch == 128, "got pitch %ld.\n", pitch); + hr = IMF2DBuffer_Unlock2D(buffer_2d); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + IMF2DBuffer_Release(buffer_2d); + + IMFMediaBuffer_Release(buffer); + + /* A linear buffer is created for YUV if MF_MT_DEFAULT_STRIDE is negative */ + hr = IMFMediaType_SetUINT32(media_type, &MF_MT_DEFAULT_STRIDE, -128); + ok(hr == S_OK, "Failed to set attribute, hr %#lx.\n", hr); + hr = pMFCreateMediaBufferFromMediaType(media_type, 0, 0, 0, &buffer); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + + hr = IMFMediaBuffer_QueryInterface(buffer, &IID_IMF2DBuffer, (void **)&buffer_2d); + todo_wine + ok(hr == E_NOINTERFACE, "Unexpected hr %#lx.\n", hr); + if (SUCCEEDED(hr)) + IMF2DBuffer_Release(buffer_2d); + + IMFMediaBuffer_Release(buffer); + /* MF_MT_FRAME_SIZE doesn't work with compressed formats */ hr = IMFMediaType_DeleteItem(media_type, &MF_MT_FRAME_SIZE); ok(hr == S_OK, "Unexpected hr %#lx.\n", hr);
From: Conor McCarthy cmccarthy@codeweavers.com
--- dlls/mf/tests/transform.c | 17 +++++++++++++---- 1 file changed, 13 insertions(+), 4 deletions(-)
diff --git a/dlls/mf/tests/transform.c b/dlls/mf/tests/transform.c index c70cceea785..ba221761594 100644 --- a/dlls/mf/tests/transform.c +++ b/dlls/mf/tests/transform.c @@ -1756,7 +1756,7 @@ static BOOL is_sample_copier_available_type(IMFMediaType *type) return IsEqualGUID(&major, &MFMediaType_Video) || IsEqualGUID(&major, &MFMediaType_Audio); }
-static void test_sample_copier(void) +static void test_sample_copier(BOOL use_2d_buffer) { static const struct attribute_desc expect_transform_attributes[] = { @@ -1779,8 +1779,13 @@ static void test_sample_copier(void) win_skip("MFCreateSampleCopierMFT() is not available.\n"); return; } + if (use_2d_buffer && !pMFCreateMediaBufferFromMediaType) + { + win_skip("MFCreateMediaBufferFromMediaType() is unsupported.\n"); + return; + }
- winetest_push_context("copier"); + winetest_push_context("copier %s", use_2d_buffer ? "2d" : "1d");
hr = pMFCreateSampleCopierMFT(&copier); ok(hr == S_OK, "Failed to create sample copier, hr %#lx.\n", hr); @@ -1910,7 +1915,10 @@ static void test_sample_copier(void) ok(!flags, "Unexpected flags %#lx.\n", flags);
/* Pushing samples. */ - hr = MFCreateAlignedMemoryBuffer(output_info.cbSize, output_info.cbAlignment, &media_buffer); + if (use_2d_buffer) + hr = pMFCreateMediaBufferFromMediaType(mediatype, 0, 0, 0, &media_buffer); + else + hr = MFCreateAlignedMemoryBuffer(output_info.cbSize, output_info.cbAlignment, &media_buffer); ok(hr == S_OK, "Failed to create media buffer, hr %#lx.\n", hr);
hr = IMFSample_AddBuffer(sample, media_buffer); @@ -11124,7 +11132,8 @@ START_TEST(transform)
init_functions();
- test_sample_copier(); + test_sample_copier(FALSE); + test_sample_copier(TRUE); test_sample_copier_output_processing(); test_aac_encoder(); test_aac_decoder();
From: Conor McCarthy cmccarthy@codeweavers.com
--- dlls/mf/tests/i420frame-2d.bmp | Bin 0 -> 67638 bytes dlls/mf/tests/resource.rc | 4 ++ dlls/mf/tests/transform.c | 108 +++++++++++++++++++++++++++++---- 3 files changed, 99 insertions(+), 13 deletions(-) create mode 100644 dlls/mf/tests/i420frame-2d.bmp
diff --git a/dlls/mf/tests/i420frame-2d.bmp b/dlls/mf/tests/i420frame-2d.bmp new file mode 100644 index 0000000000000000000000000000000000000000..90889b2dbe858574bc4ab1afe1ace6fccea3bd7a GIT binary patch literal 67638 zcmZ?rH9Non24)Nl3>pj!3=Iqn3<(Sj42%p4U~vYhJcI)wUpF-TAH@R~24~Nn{XdX+ zH0}p3?m=NckZ}*P8{~#jJaB0M6bA!|N8^6r;vN+C0~z-qyFqRk#RHcHKyfgTcr@+@ zF782LKag<`vK!=vQ9N*I02BuUiAUpp;Nl(>_5&IBAiF_s7{vpZ20(Ezka#ri2QKbG zVLy;@53(ENhEY6lX#f-l1BplDe&FIB6!rrd_aM732~7GwiU%$XKyfgTcr@+@F782L zKag<`vK!=vQ9N*I02BuUiAUpp;Nl(>_5&IBAiF_s7{vpZ20(Ezka#ri2QKbGVLy;@ z53(ENhEY6lX#f-l1BplDe&FIB6!rrd_aM7LZWzS_mj*y_FpzjO?guXJL190TaSyT^ z<c3i^aA^P(2Lp*m<9^`c9u)Qi8TTN&S1l9xKZ*w~3_x)(ka#ri2QKbGVLy;@53(EN zhEY6lX#f-l1BplDe&FIB6!rrd_aM7LZWzS_mj*y_FpzjO?guXJL190TaSyT^<c3i^ zaA^P(2Lp*m<9^`c9u)Qi8TTN&L2eku1D6IsaWIg0H0}p3?m=NckZ}*P8{~#jJaB0M z6bA!|N8^6r;vN+C0~z-qyC<3{{U5~x7Y3j>7)U%C_X8LAps*jvxChw{a>FPdxHJHY zgMq}OaX)Zz4+{H%jC+vXAUBNSflC9RI2cGg8utSi_n@#J$hZgD4RXUM9=J3Bii3f~ zqj5iQaSsanfsA{Q-5@uN;(<#8pg0&vJR0`{7x$pBAIP`|*$r~TC?2>p0E&Zw#G`RP zaB&X``+<ymklk6Q?)@Ld0~ZFMI2cGg8utSi_n@#J$hZgD4RXUM9=J3Bii3f~qj5iQ zaSsanfsA{Q-5@uN;(<#8pg0&vJR0`{7x$pBAIP`|*$r~TC?2>p0E&Zw#G`RPaB&X` z`+<ymkli3RjN*Yy1E4q<NIV+%0~hz8uph{{2iXmB!zdoOGysZ&fyAS6KX7pm3j2YK zdyw6MQ6K(~;(-eTP#g>-9*z5fi+fPm4`kef>;}1E6c1b)0L8&T;?cMtxVQ&}{XoV& z$Zn7uM)AO<0Z<$aBp!|Xfs1=k*bijfgX{*mVH6Ks8UV$?K;qH3AGo*&h5bOrJ;-j5 z8%FWKr2$YJ3?v?n`+<vlP}mP-+=J`}xnUF!Tp9qy!9e2CxF5K<2ZjAW#y!Yx%@2A1 zNAbXg0Voaz5|76Hz{Nc%><2RLL3V@OFp38*4S?ccAn|D24_w@X!hRs*9%MJj4WoGA z(f}w91`?0P{lLXNDC`F^?m>2g+%Sp<E)9U<U?A~m+z(vbgTj6w;~r!;$PJ@-;L-pn z4h9mB#{Iy>Jt*u4GVVckgWNER2QCeO;$R^0XxtB6+=Ie?AmbinH^>d6c;M0iC=Lb^ zkH-DL#XTtO2QuzKc7xn7iU%$YfZ|{v@o3x+T-<}gejwu>WH-nSqj=!b04NRy5|76H zz{Nc%><2RLL3V@OFp38*4S?ccAn|D24_w@X!hRs*9%MJj4WoGA(f}w91`?0P{lLXN zDC`F^?m>2g+%Sp<E)9U<U?A~m+z(vbgTj6w;~r%9YZ`;I1OSb#e@(#pv!nLYHUvh) zAAbnYHuPVQrhojwKjQQMB^m<)!GD&<)?XU6AAbmphCgjX0DtJ89ZmnV4gS&mPapxV zqA?&~{aG4YziQNe{2?$J{<IAN{Goq#H2u>y_($_Ufdn{_#(;qJXK8Hx#8Lb4hrnp~ z(>4U~hyK~o^iSL1AI<*+5?~gM0RijJ(%AZ}QTy?Sz-ai>HU#j8{@Ky=Put)h&Hn@v zU?7bF0qf7w*!sXx`|*dsX!z4M1n`Ic+0pb*+u$F~{{#}CCXE3B>(A2Idd*S$@rS@@ z_|rB7@Q41{(ezK-;2+KZ1QH;PgMT#qN5da)2#lsb+J*q$z&|@$|Ijw{H3xn9*FYjb zPfsLjZy42&8Ug|&0#w~+&yuKJU{pV92rMHJpz1z*mPGB#M)jkHfEkGZRrlGmBx*Ms z)sGqir$_{-y3d{^QTwS;{iq=jMIu1eefBJg+M`DGqlUl-5&^32vu8=v{$W%<Y6y&m zA88?g8hVtcztQ$PX(50f{-o({!07)>A`zh5Ponloqxw<7X#a_{5I_w*%KM*6Bm#8% zNz|@1svk83?vV&kb)P*;qV{{E`cXrGwA%l}X!{X0_$Y3F=aC4|tUpVl_PkO3s39;K Pex!u}YUrJ%IQ;<t8njuL
literal 0 HcmV?d00001
diff --git a/dlls/mf/tests/resource.rc b/dlls/mf/tests/resource.rc index dff5719dd3b..2efb52d91ae 100644 --- a/dlls/mf/tests/resource.rc +++ b/dlls/mf/tests/resource.rc @@ -121,6 +121,10 @@ nv12frame-flip-2d.bmp RCDATA nv12frame-flip-2d.bmp /* @makedep: i420frame.bmp */ i420frame.bmp RCDATA i420frame.bmp
+/* Generated from running the tests on Windows */ +/* @makedep: i420frame.bmp */ +i420frame-2d.bmp RCDATA i420frame-2d.bmp + /* Generated from running the tests on Windows */ /* @makedep: rgb32frame.bmp */ rgb32frame.bmp RCDATA rgb32frame.bmp diff --git a/dlls/mf/tests/transform.c b/dlls/mf/tests/transform.c index ba221761594..fcee5814bb0 100644 --- a/dlls/mf/tests/transform.c +++ b/dlls/mf/tests/transform.c @@ -4477,7 +4477,7 @@ failed: CoUninitialize(); }
-static void test_h264_decoder(void) +static void test_h264_decoder(BOOL use_2d_buffer) { const GUID *const class_id = &CLSID_MSH264DecoderMFT; const struct transform_info expect_mft_info = @@ -4519,7 +4519,7 @@ static void test_h264_decoder(void) ATTR_UINT32(CODECAPI_AVDecVideoAcceleration_H264, 1), {0}, }; - static const DWORD input_width = 120, input_height = 248; + static const DWORD input_width = 120, input_height = 248, aligned_width_2d = 128; const media_type_desc default_outputs[] = { { @@ -4763,6 +4763,20 @@ static void test_h264_decoder(void) .cbSize = 0x1000, };
+ const struct attribute_desc nv12_default_stride[] = + { + ATTR_GUID(MF_MT_MAJOR_TYPE, MFMediaType_Video, .required = TRUE), + ATTR_GUID(MF_MT_SUBTYPE, MFVideoFormat_NV12, .required = TRUE), + ATTR_RATIO(MF_MT_FRAME_SIZE, actual_width, actual_height, .required = TRUE), + {0}, + }; + const struct attribute_desc i420_default_stride[] = + { + ATTR_GUID(MF_MT_MAJOR_TYPE, MFMediaType_Video, .required = TRUE), + ATTR_GUID(MF_MT_SUBTYPE, MFVideoFormat_I420, .required = TRUE), + ATTR_RATIO(MF_MT_FRAME_SIZE, actual_width, actual_height, .required = TRUE), + {0}, + }; const struct attribute_desc output_sample_attributes[] = { ATTR_UINT32(MFSampleExtension_CleanPoint, 1), @@ -4780,6 +4794,25 @@ static void test_h264_decoder(void) .sample_time = 0, .sample_duration = 333667, .buffer_count = 1, .buffers = &output_buffer_desc_nv12, }; + const struct sample_desc output_sample_desc_nv12_1d = + { + .attributes = output_sample_attributes, + .sample_time = 0, .sample_duration = 333667, + .total_length = aligned_width_2d * actual_height * 3 / 2, + .buffer_count = 1, .buffers = &output_buffer_desc_nv12, .todo_length = TRUE, + }; + const struct buffer_desc output_buffer_desc_nv12_2d = + { + .length = aligned_width_2d * actual_height * 3 / 2, + .compare = compare_nv12, .compare_rect = {.right = 82, .bottom = 84}, + .dump = dump_nv12, .size = {.cx = aligned_width_2d, .cy = actual_height}, + }; + const struct sample_desc output_sample_desc_nv12_2d = + { + .attributes = output_sample_attributes, + .sample_time = 0, .sample_duration = 333667, + .buffer_count = 1, .buffers = &output_buffer_desc_nv12_2d, + }; const struct buffer_desc output_buffer_desc_i420 = { .length = actual_width * actual_height * 3 / 2, @@ -4792,9 +4825,29 @@ static void test_h264_decoder(void) .sample_time = 333667, .sample_duration = 333667, .buffer_count = 1, .buffers = &output_buffer_desc_i420, }; + const struct sample_desc output_sample_desc_i420_1d = + { + .attributes = output_sample_attributes, + .sample_time = 333667, .sample_duration = 333667, + .total_length = aligned_width_2d * actual_height * 3 / 2, + .buffer_count = 1, .buffers = &output_buffer_desc_i420, .todo_length = TRUE, + }; + const struct buffer_desc output_buffer_desc_i420_2d = + { + .length = aligned_width_2d * actual_height * 3 / 2, + .compare = compare_i420, .compare_rect = {.right = 82, .bottom = 84}, + .dump = dump_i420, .size = {.cx = aligned_width_2d, .cy = actual_height}, + }; + const struct sample_desc output_sample_desc_i420_2d = + { + .attributes = output_sample_attributes, + .sample_time = 333667, .sample_duration = 333667, + .buffer_count = 1, .buffers = &output_buffer_desc_i420_2d, + };
MFT_REGISTER_TYPE_INFO input_type = {MFMediaType_Video, MFVideoFormat_H264}; MFT_REGISTER_TYPE_INFO output_type = {MFMediaType_Video, MFVideoFormat_NV12}; + const struct attribute_desc *sample_attr_desc; IMFSample *input_sample, *output_sample; const BYTE *h264_encoded_data; IMFCollection *output_samples; @@ -4807,10 +4860,16 @@ static void test_h264_decoder(void) DWORD flags; HRESULT hr;
+ if (use_2d_buffer && !pMFCreateMediaBufferFromMediaType) + { + win_skip("MFCreateMediaBufferFromMediaType() is unsupported.\n"); + return; + } + hr = CoInitialize(NULL); ok(hr == S_OK, "Failed to initialize, hr %#lx.\n", hr);
- winetest_push_context("h264dec"); + winetest_push_context("h264dec %s", use_2d_buffer ? "2d" : "1d");
if (!check_mft_enum(MFT_CATEGORY_VIDEO_DECODER, &input_type, &output_type, class_id)) goto failed; @@ -4919,10 +4978,11 @@ static void test_h264_decoder(void) ok(output_status == 0, "got output[0].dwStatus %#lx\n", output_status);
i = 0; + sample_attr_desc = use_2d_buffer ? nv12_default_stride : NULL; input_sample = next_h264_sample(&h264_encoded_data, &h264_encoded_data_len); while (1) { - output_sample = create_sample(NULL, output_info.cbSize); + output_sample = create_sample_(NULL, actual_width * actual_height * 3 / 2, sample_attr_desc); hr = check_mft_process_output(transform, output_sample, &output_status); if (hr != MF_E_TRANSFORM_NEED_MORE_INPUT) break;
@@ -4992,7 +5052,7 @@ static void test_h264_decoder(void) hr = MFCreateCollection(&output_samples); ok(hr == S_OK, "MFCreateCollection returned %#lx\n", hr);
- output_sample = create_sample(NULL, output_info.cbSize); + output_sample = create_sample_(NULL, actual_width * actual_height * 3 / 2, sample_attr_desc); hr = check_mft_process_output(transform, output_sample, &output_status); ok(hr == S_OK, "ProcessOutput returned %#lx\n", hr); ok(output_status == 0, "got output[0].dwStatus %#lx\n", output_status); @@ -5001,8 +5061,18 @@ static void test_h264_decoder(void) ref = IMFSample_Release(output_sample); ok(ref == 1, "Release returned %ld\n", ref);
- ret = check_mf_sample_collection(output_samples, &output_sample_desc_nv12, L"nv12frame.bmp"); - ok(ret == 0, "got %lu%% diff\n", ret); + if (!use_2d_buffer) + { + ret = check_mf_sample_collection(output_samples, &output_sample_desc_nv12, L"nv12frame.bmp"); + ok(ret == 0, "got %lu%% diff\n", ret); + } + else + { + ret = check_mf_sample_collection(output_samples, &output_sample_desc_nv12_1d, L"nv12frame.bmp"); + ok(ret == 0, "got %lu%% diff\n", ret); + ret = check_2d_mf_sample_collection(output_samples, &output_sample_desc_nv12_2d, L"nv12frame-2d.bmp"); + ok(ret == 0, "2d got %lu%% diff\n", ret); + } IMFCollection_Release(output_samples);
/* we can change it, but only with the correct frame size */ @@ -5019,7 +5089,8 @@ static void test_h264_decoder(void)
check_mft_get_output_current_type_(__LINE__, transform, expect_new_output_type_desc, FALSE, TRUE);
- output_sample = create_sample(NULL, actual_width * actual_height * 2); + sample_attr_desc = use_2d_buffer ? i420_default_stride : NULL; + output_sample = create_sample_(NULL, actual_width * actual_height * 3 / 2, sample_attr_desc); hr = check_mft_process_output(transform, output_sample, &output_status); todo_wine ok(hr == MF_E_TRANSFORM_STREAM_CHANGE, "ProcessOutput returned %#lx\n", hr); @@ -5049,7 +5120,7 @@ static void test_h264_decoder(void) hr = MFCreateCollection(&output_samples); ok(hr == S_OK, "MFCreateCollection returned %#lx\n", hr);
- output_sample = create_sample(NULL, actual_width * actual_height * 2); + output_sample = create_sample_(NULL, actual_width * actual_height * 3 / 2, sample_attr_desc); hr = check_mft_process_output(transform, output_sample, &output_status); ok(hr == S_OK, "ProcessOutput returned %#lx\n", hr); ok(output_status == 0, "got output[0].dwStatus %#lx\n", output_status); @@ -5058,11 +5129,21 @@ static void test_h264_decoder(void) ref = IMFSample_Release(output_sample); ok(ref == 1, "Release returned %ld\n", ref);
- ret = check_mf_sample_collection(output_samples, &expect_output_sample_i420, L"i420frame.bmp"); - ok(ret == 0, "got %lu%% diff\n", ret); + if (!use_2d_buffer) + { + ret = check_mf_sample_collection(output_samples, &expect_output_sample_i420, L"i420frame.bmp"); + ok(ret == 0, "got %lu%% diff\n", ret); + } + else + { + ret = check_mf_sample_collection(output_samples, &output_sample_desc_i420_1d, L"i420frame.bmp"); + ok(ret == 0, "got %lu%% diff\n", ret); + ret = check_2d_mf_sample_collection(output_samples, &output_sample_desc_i420_2d, L"i420frame-2d.bmp"); + ok(ret == 0, "2d got %lu%% diff\n", ret); + } IMFCollection_Release(output_samples);
- output_sample = create_sample(NULL, actual_width * actual_height * 2); + output_sample = create_sample_(NULL, actual_width * actual_height * 3 / 2, sample_attr_desc); hr = check_mft_process_output(transform, output_sample, &output_status); todo_wine_if(hr == S_OK) /* when VA-API plugin is used */ ok(hr == MF_E_TRANSFORM_NEED_MORE_INPUT, "ProcessOutput returned %#lx\n", hr); @@ -11143,7 +11224,8 @@ START_TEST(transform) test_wma_decoder_dmo_input_type(); test_wma_decoder_dmo_output_type(); test_h264_encoder(); - test_h264_decoder(); + test_h264_decoder(FALSE); + test_h264_decoder(TRUE); test_h264_decoder_timestamps(); test_wmv_encoder(); test_wmv_decoder();
From: Conor McCarthy cmccarthy@codeweavers.com
--- dlls/mf/tests/transform.c | 75 ++++++++++++++++++++++++++++++++++----- 1 file changed, 66 insertions(+), 9 deletions(-)
diff --git a/dlls/mf/tests/transform.c b/dlls/mf/tests/transform.c index fcee5814bb0..c24a54044be 100644 --- a/dlls/mf/tests/transform.c +++ b/dlls/mf/tests/transform.c @@ -6654,7 +6654,7 @@ failed: CoUninitialize(); }
-static void test_wmv_decoder(void) +static void test_wmv_decoder(BOOL use_2d_buffer) { const GUID *const class_id = &CLSID_CWMVDecMediaObject; const struct transform_info expect_mft_info = @@ -6737,7 +6737,7 @@ static void test_wmv_decoder(void) {ATTR_GUID(MF_MT_SUBTYPE, MFVideoFormat_VC1S)}, }; static const MFVideoArea actual_aperture = {.Area={96,96}}; - static const DWORD actual_width = 96, actual_height = 96; + static const DWORD actual_width = 96, actual_height = 96, nv12_aligned_width = 128; const struct attribute_desc expect_output_attributes[] = { ATTR_BLOB(MF_MT_GEOMETRIC_APERTURE, &actual_aperture, sizeof(actual_aperture)), @@ -6984,6 +6984,12 @@ static void test_wmv_decoder(void) .compare = compare_nv12, .compare_rect = {.right = 82, .bottom = 84}, .dump = dump_nv12, .size = {.cx = actual_width, .cy = actual_height}, }; + const struct buffer_desc output_buffer_desc_nv12_2d = + { + .length = nv12_aligned_width * actual_height * 3 / 2, + .compare = compare_nv12, .compare_rect = {.right = 82, .bottom = 84}, + .dump = dump_nv12, .size = {.cx = nv12_aligned_width, .cy = actual_height}, + }; const struct buffer_desc output_buffer_desc_rgb = { .length = actual_width * actual_height * 4, @@ -6996,6 +7002,12 @@ static void test_wmv_decoder(void) .sample_time = 0, .sample_duration = 333333, .buffer_count = 1, .buffers = &output_buffer_desc_nv12, }; + const struct sample_desc output_sample_desc_nv12_2d = + { + .attributes = output_sample_attributes, + .sample_time = 0, .sample_duration = 333333, + .buffer_count = 1, .buffers = &output_buffer_desc_nv12_2d, + }; const struct sample_desc output_sample_desc_rgb = { .attributes = output_sample_attributes, @@ -7010,8 +7022,11 @@ static void test_wmv_decoder(void) const MFT_INPUT_STREAM_INFO *expect_input_info; const MFT_OUTPUT_STREAM_INFO *expect_output_info; const struct sample_desc *output_sample_desc; + const struct sample_desc *output_sample_desc_2d; const WCHAR *result_bitmap; + const WCHAR *result_bitmap_2d; ULONG delta; + BOOL skip_; BOOL new_transform; BOOL todo; } @@ -7024,7 +7039,9 @@ static void test_wmv_decoder(void) .expect_input_info = &expect_input_info, .expect_output_info = &expect_output_info, .output_sample_desc = &output_sample_desc_nv12, + .output_sample_desc_2d = &output_sample_desc_nv12_2d, .result_bitmap = L"nv12frame.bmp", + .result_bitmap_2d = L"nv12frame-2d.bmp", .delta = 0, },
@@ -7037,6 +7054,19 @@ static void test_wmv_decoder(void) .output_sample_desc = &output_sample_desc_nv12, .result_bitmap = L"nv12frame.bmp", .delta = 0, + .skip_ = use_2d_buffer, /* negative stride is invalid for 2D YUV */ + }, + + { + /* WMV1 -> RGB (negative stride) instead of YUV for 2D */ + .output_type_desc = output_type_desc_rgb_negative_stride, + .expect_output_type_desc = expect_output_type_desc_rgb_negative_stride, + .expect_input_info = &expect_input_info_rgb, + .expect_output_info = &expect_output_info_rgb, + .output_sample_desc = &output_sample_desc_rgb, + .result_bitmap = L"rgb32frame-flip.bmp", + .delta = 5, + .skip_ = !use_2d_buffer, },
{ @@ -7046,8 +7076,9 @@ static void test_wmv_decoder(void) .expect_input_info = &expect_input_info_rgb, .expect_output_info = &expect_output_info_rgb, .output_sample_desc = &output_sample_desc_rgb, - .result_bitmap = L"rgb32frame-flip.bmp", + .result_bitmap = use_2d_buffer ? L"rgb32frame.bmp" : L"rgb32frame-flip.bmp", .delta = 5, + .todo = use_2d_buffer, },
{ @@ -7068,8 +7099,9 @@ static void test_wmv_decoder(void) .expect_input_info = &expect_input_info_rgb, .expect_output_info = &expect_output_info_rgb, .output_sample_desc = &output_sample_desc_rgb, - .result_bitmap = L"rgb32frame-flip.bmp", + .result_bitmap = use_2d_buffer ? L"rgb32frame.bmp" : L"rgb32frame-flip.bmp", .delta = 5, + .todo = use_2d_buffer, },
{ @@ -7091,8 +7123,9 @@ static void test_wmv_decoder(void) .expect_input_info = &expect_input_info_rgb, .expect_output_info = &expect_output_info_rgb, .output_sample_desc = &output_sample_desc_rgb, - .result_bitmap = L"rgb32frame.bmp", + .result_bitmap = use_2d_buffer ? L"rgb32frame-flip.bmp" : L"rgb32frame.bmp", .delta = 5, + .todo = use_2d_buffer, },
{ @@ -7122,6 +7155,7 @@ static void test_wmv_decoder(void)
MFT_REGISTER_TYPE_INFO output_type = {MFMediaType_Video, MFVideoFormat_NV12}; MFT_REGISTER_TYPE_INFO input_type = {MFMediaType_Video, MFVideoFormat_WMV1}; + const struct attribute_desc *sample_attr_desc; IMFSample *input_sample, *output_sample; MFT_OUTPUT_STREAM_INFO output_info; MFT_INPUT_STREAM_INFO input_info; @@ -7134,10 +7168,16 @@ static void test_wmv_decoder(void) ULONG i, j, ret, ref; HRESULT hr;
+ if (use_2d_buffer && !pMFCreateMediaBufferFromMediaType) + { + win_skip("MFCreateMediaBufferFromMediaType() is unsupported.\n"); + return; + } + hr = CoInitialize(NULL); ok(hr == S_OK, "Failed to initialize, hr %#lx.\n", hr);
- winetest_push_context("wmvdec"); + winetest_push_context("wmvdec %s", use_2d_buffer ? "2d" : "1d");
if (!has_video_processor) { @@ -7240,6 +7280,12 @@ static void test_wmv_decoder(void) check_mft_set_input_type(transform, input_type_desc, S_OK); }
+ if (transform_tests[j].skip_) + { + winetest_pop_context(); + continue; + } + check_mft_set_output_type_required(transform, transform_tests[j].output_type_desc); check_mft_set_output_type(transform, transform_tests[j].output_type_desc, S_OK); check_mft_get_output_current_type_(__LINE__, transform, transform_tests[j].expect_output_type_desc, FALSE, TRUE); @@ -7282,7 +7328,8 @@ static void test_wmv_decoder(void) hr = MFCreateCollection(&output_samples); ok(hr == S_OK, "MFCreateCollection returned %#lx\n", hr);
- output_sample = create_sample(NULL, transform_tests[j].expect_output_info->cbSize); + sample_attr_desc = use_2d_buffer ? transform_tests[j].output_type_desc : NULL; + output_sample = create_sample_(NULL, transform_tests[j].expect_output_info->cbSize, sample_attr_desc); for (i = 0; SUCCEEDED(hr = check_mft_process_output(transform, output_sample, &output_status)); i++) { winetest_push_context("%lu", i); @@ -7291,7 +7338,7 @@ static void test_wmv_decoder(void) ok(hr == S_OK, "AddElement returned %#lx\n", hr); ref = IMFSample_Release(output_sample); ok(ref == 1, "Release returned %ld\n", ref); - output_sample = create_sample(NULL, transform_tests[j].expect_output_info->cbSize); + output_sample = create_sample_(NULL, transform_tests[j].expect_output_info->cbSize, sample_attr_desc); winetest_pop_context(); } ok(hr == MF_E_TRANSFORM_NEED_MORE_INPUT, "ProcessOutput returned %#lx\n", hr); @@ -7303,6 +7350,15 @@ static void test_wmv_decoder(void) transform_tests[j].result_bitmap); todo_wine_if(transform_tests[j].todo) ok(ret <= transform_tests[j].delta, "got %lu%% diff\n", ret); + if (use_2d_buffer) + { + ret = check_2d_mf_sample_collection(output_samples, transform_tests[j].output_sample_desc_2d + ? transform_tests[j].output_sample_desc_2d : transform_tests[j].output_sample_desc, + transform_tests[j].result_bitmap_2d ? transform_tests[j].result_bitmap_2d + : transform_tests[j].result_bitmap); + todo_wine_if(transform_tests[j].todo) + ok(ret <= transform_tests[j].delta, "got %lu%% diff\n", ret); + } IMFCollection_Release(output_samples);
hr = IMFTransform_SetOutputType(transform, 0, NULL, 0); @@ -11228,7 +11284,8 @@ START_TEST(transform) test_h264_decoder(TRUE); test_h264_decoder_timestamps(); test_wmv_encoder(); - test_wmv_decoder(); + test_wmv_decoder(FALSE); + test_wmv_decoder(TRUE); test_wmv_decoder_timestamps(); test_wmv_decoder_dmo_input_type(); test_wmv_decoder_dmo_output_type();
From: Conor McCarthy cmccarthy@codeweavers.com
--- dlls/mf/tests/transform.c | 26 ++++++++++++++++++++------ 1 file changed, 20 insertions(+), 6 deletions(-)
diff --git a/dlls/mf/tests/transform.c b/dlls/mf/tests/transform.c index c24a54044be..6a16332286f 100644 --- a/dlls/mf/tests/transform.c +++ b/dlls/mf/tests/transform.c @@ -8408,7 +8408,7 @@ static void test_wmv_decoder_media_object(void) winetest_pop_context(); }
-static void test_color_convert(void) +static void test_color_convert(BOOL use_2d_buffer) { const GUID *const class_id = &CLSID_CColorConvertDMO; const struct transform_info expect_mft_info = @@ -8691,6 +8691,7 @@ static void test_color_convert(void)
MFT_REGISTER_TYPE_INFO output_type = {MFMediaType_Video, MFVideoFormat_NV12}; MFT_REGISTER_TYPE_INFO input_type = {MFMediaType_Video, MFVideoFormat_I420}; + const struct attribute_desc *sample_attr_desc; IMFSample *input_sample, *output_sample; IMFCollection *output_samples; DWORD length, output_status; @@ -8701,10 +8702,16 @@ static void test_color_convert(void) ULONG i, ret, ref; HRESULT hr;
+ if (use_2d_buffer && !pMFCreateMediaBufferFromMediaType) + { + win_skip("MFCreateMediaBufferFromMediaType() is unsupported.\n"); + return; + } + hr = CoInitialize(NULL); ok(hr == S_OK, "Failed to initialize, hr %#lx.\n", hr);
- winetest_push_context("colorconv"); + winetest_push_context("colorconv %s", use_2d_buffer ? "2d" : "1d");
if (!check_mft_enum(MFT_CATEGORY_VIDEO_EFFECT, &input_type, &output_type, class_id)) goto failed; @@ -8795,7 +8802,8 @@ static void test_color_convert(void) hr = MFCreateCollection(&output_samples); ok(hr == S_OK, "MFCreateCollection returned %#lx\n", hr);
- output_sample = create_sample(NULL, output_info.cbSize); + sample_attr_desc = use_2d_buffer ? color_conversion_tests[i].output_type_desc : NULL; + output_sample = create_sample_(NULL, output_info.cbSize, sample_attr_desc); hr = check_mft_process_output(transform, output_sample, &output_status); ok(hr == S_OK, "ProcessOutput returned %#lx\n", hr); ok(output_status == 0, "got output[0].dwStatus %#lx\n", output_status); @@ -8806,15 +8814,20 @@ static void test_color_convert(void)
ret = check_mf_sample_collection(output_samples, &output_sample_desc, color_conversion_tests[i].result_bitmap); ok(ret <= color_conversion_tests[i].delta, "got %lu%% diff\n", ret); + if (use_2d_buffer) + { + ret = check_2d_mf_sample_collection(output_samples, &output_sample_desc, color_conversion_tests[i].result_bitmap); + ok(ret <= color_conversion_tests[i].delta, "got %lu%% diff\n", ret); + } IMFCollection_Release(output_samples);
- output_sample = create_sample(NULL, output_info.cbSize); + output_sample = create_sample_(NULL, output_info.cbSize, sample_attr_desc); hr = check_mft_process_output(transform, output_sample, &output_status); ok(hr == MF_E_TRANSFORM_NEED_MORE_INPUT, "ProcessOutput returned %#lx\n", hr); ok(output_status == 0, "got output[0].dwStatus %#lx\n", output_status); hr = IMFSample_GetTotalLength(output_sample, &length); ok(hr == S_OK, "GetTotalLength returned %#lx\n", hr); - ok(length == 0, "got length %lu\n", length); + ok(length == 0 || broken(use_2d_buffer && length == output_info.cbSize), "got length %lu\n", length); ret = IMFSample_Release(output_sample); ok(ret == 0, "Release returned %lu\n", ret); winetest_pop_context(); @@ -11292,7 +11305,8 @@ START_TEST(transform) test_wmv_decoder_dmo_get_size_info(); test_wmv_decoder_media_object(); test_audio_convert(); - test_color_convert(); + test_color_convert(FALSE); + test_color_convert(TRUE); test_video_processor(FALSE); test_video_processor(TRUE); test_mp3_decoder();
From: Conor McCarthy cmccarthy@codeweavers.com
Performance issues can occur when a video output sample has a 2D buffer, because winegstreamer currently outputs only to a linear buffer, which must then be copied into the 2D buffer. Worse, a linear lock of the 2D buffer requires the current contents to be copied to a linear buffer, even when we intend to overwrite it, because MF linear buffers do not support write-only locking. --- dlls/mf/tests/transform.c | 2 +- dlls/winegstreamer/aac_decoder.c | 2 +- dlls/winegstreamer/color_convert.c | 7 ++- dlls/winegstreamer/gst_private.h | 3 +- dlls/winegstreamer/resampler.c | 2 +- dlls/winegstreamer/unixlib.h | 2 + dlls/winegstreamer/video_decoder.c | 3 +- dlls/winegstreamer/video_encoder.c | 2 +- dlls/winegstreamer/video_processor.c | 5 ++- dlls/winegstreamer/wg_sample.c | 66 +++++++++++++++++++++++++--- dlls/winegstreamer/wg_transform.c | 42 +++++++++++++++--- dlls/winegstreamer/wma_decoder.c | 2 +- 12 files changed, 115 insertions(+), 23 deletions(-)
diff --git a/dlls/mf/tests/transform.c b/dlls/mf/tests/transform.c index 6a16332286f..53ed83bb617 100644 --- a/dlls/mf/tests/transform.c +++ b/dlls/mf/tests/transform.c @@ -4799,7 +4799,7 @@ static void test_h264_decoder(BOOL use_2d_buffer) .attributes = output_sample_attributes, .sample_time = 0, .sample_duration = 333667, .total_length = aligned_width_2d * actual_height * 3 / 2, - .buffer_count = 1, .buffers = &output_buffer_desc_nv12, .todo_length = TRUE, + .buffer_count = 1, .buffers = &output_buffer_desc_nv12, }; const struct buffer_desc output_buffer_desc_nv12_2d = { diff --git a/dlls/winegstreamer/aac_decoder.c b/dlls/winegstreamer/aac_decoder.c index 9e6c5c20cb2..b78102bc1b0 100644 --- a/dlls/winegstreamer/aac_decoder.c +++ b/dlls/winegstreamer/aac_decoder.c @@ -558,7 +558,7 @@ static HRESULT WINAPI transform_ProcessOutput(IMFTransform *iface, DWORD flags, if (!samples->pSample) return E_INVALIDARG;
- if (SUCCEEDED(hr = wg_transform_read_mf(decoder->wg_transform, samples->pSample, &samples->dwStatus, NULL))) + if (SUCCEEDED(hr = wg_transform_read_mf(decoder->wg_transform, samples->pSample, 0, &samples->dwStatus, NULL))) wg_sample_queue_flush(decoder->wg_sample_queue, false); else samples->dwStatus = MFT_OUTPUT_DATA_BUFFER_NO_SAMPLE; diff --git a/dlls/winegstreamer/color_convert.c b/dlls/winegstreamer/color_convert.c index 938d8b7b6f2..c5c7141f4ce 100644 --- a/dlls/winegstreamer/color_convert.c +++ b/dlls/winegstreamer/color_convert.c @@ -651,6 +651,7 @@ static HRESULT WINAPI transform_ProcessOutput(IMFTransform *iface, DWORD flags, MFT_OUTPUT_DATA_BUFFER *samples, DWORD *status) { struct color_convert *impl = impl_from_IMFTransform(iface); + MFT_OUTPUT_STREAM_INFO info; HRESULT hr;
TRACE("iface %p, flags %#lx, count %lu, samples %p, status %p.\n", iface, flags, count, samples, status); @@ -665,7 +666,11 @@ static HRESULT WINAPI transform_ProcessOutput(IMFTransform *iface, DWORD flags, if (!samples->pSample) return E_INVALIDARG;
- if (SUCCEEDED(hr = wg_transform_read_mf(impl->wg_transform, samples->pSample, &samples->dwStatus, NULL))) + if (FAILED(hr = IMFTransform_GetOutputStreamInfo(iface, 0, &info))) + return hr; + + if (SUCCEEDED(hr = wg_transform_read_mf(impl->wg_transform, samples->pSample, + info.cbSize, &samples->dwStatus, NULL))) wg_sample_queue_flush(impl->wg_sample_queue, false);
return hr; diff --git a/dlls/winegstreamer/gst_private.h b/dlls/winegstreamer/gst_private.h index d50c9fa63fb..213a0a15f3a 100644 --- a/dlls/winegstreamer/gst_private.h +++ b/dlls/winegstreamer/gst_private.h @@ -166,7 +166,8 @@ HRESULT wg_transform_push_quartz(wg_transform_t transform, struct wg_sample *sam struct wg_sample_queue *queue); HRESULT wg_transform_push_dmo(wg_transform_t transform, IMediaBuffer *media_buffer, DWORD flags, REFERENCE_TIME time_stamp, REFERENCE_TIME time_length, struct wg_sample_queue *queue); -HRESULT wg_transform_read_mf(wg_transform_t transform, IMFSample *sample, DWORD *flags, bool *preserve_timestamps); +HRESULT wg_transform_read_mf(wg_transform_t transform, IMFSample *sample, + DWORD plane_size, DWORD *flags, bool *preserve_timestamps); HRESULT wg_transform_read_quartz(wg_transform_t transform, struct wg_sample *sample); HRESULT wg_transform_read_dmo(wg_transform_t transform, DMO_OUTPUT_DATA_BUFFER *buffer);
diff --git a/dlls/winegstreamer/resampler.c b/dlls/winegstreamer/resampler.c index 910d109c2c6..287d02a8015 100644 --- a/dlls/winegstreamer/resampler.c +++ b/dlls/winegstreamer/resampler.c @@ -536,7 +536,7 @@ static HRESULT WINAPI transform_ProcessOutput(IMFTransform *iface, DWORD flags, return MF_E_TRANSFORM_NEED_MORE_INPUT; }
- if (SUCCEEDED(hr = wg_transform_read_mf(impl->wg_transform, samples->pSample, &samples->dwStatus, NULL))) + if (SUCCEEDED(hr = wg_transform_read_mf(impl->wg_transform, samples->pSample, 0, &samples->dwStatus, NULL))) wg_sample_queue_flush(impl->wg_sample_queue, false);
return hr; diff --git a/dlls/winegstreamer/unixlib.h b/dlls/winegstreamer/unixlib.h index 179f15f78f7..5d11934fac2 100644 --- a/dlls/winegstreamer/unixlib.h +++ b/dlls/winegstreamer/unixlib.h @@ -192,6 +192,7 @@ struct wg_sample UINT32 flags; UINT32 max_size; UINT32 size; + UINT32 stride; UINT64 data; /* pointer to user memory */ };
@@ -334,6 +335,7 @@ struct wg_parser_stream_seek_params struct wg_transform_attrs { UINT32 output_plane_align; + UINT32 output_plane_stride; UINT32 input_queue_length; BOOL allow_format_change; BOOL low_latency; diff --git a/dlls/winegstreamer/video_decoder.c b/dlls/winegstreamer/video_decoder.c index becf148aeef..e56f7b9baf7 100644 --- a/dlls/winegstreamer/video_decoder.c +++ b/dlls/winegstreamer/video_decoder.c @@ -994,7 +994,8 @@ static HRESULT WINAPI transform_ProcessOutput(IMFTransform *iface, DWORD flags, } }
- if (SUCCEEDED(hr = wg_transform_read_mf(decoder->wg_transform, sample, &samples->dwStatus, &preserve_timestamps))) + if (SUCCEEDED(hr = wg_transform_read_mf(decoder->wg_transform, sample, + sample_size, &samples->dwStatus, &preserve_timestamps))) { wg_sample_queue_flush(decoder->wg_sample_queue, false);
diff --git a/dlls/winegstreamer/video_encoder.c b/dlls/winegstreamer/video_encoder.c index 41291928660..f26bec66c84 100644 --- a/dlls/winegstreamer/video_encoder.c +++ b/dlls/winegstreamer/video_encoder.c @@ -541,7 +541,7 @@ static HRESULT WINAPI transform_ProcessOutput(IMFTransform *iface, DWORD flags, if (!samples->pSample) return E_INVALIDARG;
- if (SUCCEEDED(hr = wg_transform_read_mf(encoder->wg_transform, samples->pSample, &samples->dwStatus, NULL))) + if (SUCCEEDED(hr = wg_transform_read_mf(encoder->wg_transform, samples->pSample, 0, &samples->dwStatus, NULL))) wg_sample_queue_flush(encoder->wg_sample_queue, false);
return hr; diff --git a/dlls/winegstreamer/video_processor.c b/dlls/winegstreamer/video_processor.c index 587be98c98b..3e327228e65 100644 --- a/dlls/winegstreamer/video_processor.c +++ b/dlls/winegstreamer/video_processor.c @@ -695,6 +695,7 @@ static HRESULT WINAPI video_processor_ProcessOutput(IMFTransform *iface, DWORD f MFT_OUTPUT_DATA_BUFFER *samples, DWORD *status) { struct video_processor *impl = impl_from_IMFTransform(iface); + MFT_OUTPUT_STREAM_INFO info; IMFSample *output_sample; HRESULT hr; BOOL playback_mode, provide_samples; @@ -708,6 +709,8 @@ static HRESULT WINAPI video_processor_ProcessOutput(IMFTransform *iface, DWORD f return MF_E_TRANSFORM_TYPE_NOT_SET;
samples->dwStatus = 0; + if (FAILED(hr = IMFTransform_GetOutputStreamInfo(iface, 0, &info))) + return hr;
if (FAILED(IMFAttributes_GetUINT32(impl->attributes, &MF_XVP_PLAYBACK_MODE, (UINT32 *) &playback_mode))) playback_mode = FALSE; @@ -728,7 +731,7 @@ static HRESULT WINAPI video_processor_ProcessOutput(IMFTransform *iface, DWORD f IMFSample_AddRef(output_sample); }
- if (FAILED(hr = wg_transform_read_mf(impl->wg_transform, output_sample, &samples->dwStatus, NULL))) + if (FAILED(hr = wg_transform_read_mf(impl->wg_transform, output_sample, info.cbSize, &samples->dwStatus, NULL))) goto done; wg_sample_queue_flush(impl->wg_sample_queue, false);
diff --git a/dlls/winegstreamer/wg_sample.c b/dlls/winegstreamer/wg_sample.c index 2fc2679337f..94dfe483fb6 100644 --- a/dlls/winegstreamer/wg_sample.c +++ b/dlls/winegstreamer/wg_sample.c @@ -52,6 +52,7 @@ struct sample { IMFSample *sample; IMFMediaBuffer *buffer; + IMF2DBuffer2 *buffer2d; } mf; struct { @@ -79,8 +80,16 @@ static void mf_sample_destroy(struct wg_sample *wg_sample)
TRACE_(mfplat)("wg_sample %p.\n", wg_sample);
- IMFMediaBuffer_Unlock(sample->u.mf.buffer); - IMFMediaBuffer_Release(sample->u.mf.buffer); + if (sample->u.mf.buffer2d) + { + IMF2DBuffer2_Unlock2D(sample->u.mf.buffer2d); + IMF2DBuffer2_Release(sample->u.mf.buffer2d); + } + else + { + IMFMediaBuffer_Unlock(sample->u.mf.buffer); + IMFMediaBuffer_Release(sample->u.mf.buffer); + } IMFSample_Release(sample->u.mf.sample); }
@@ -92,21 +101,49 @@ static const struct wg_sample_ops mf_sample_ops = HRESULT wg_sample_create_mf(IMFSample *mf_sample, struct wg_sample **out) { DWORD current_length, max_length; + BYTE *scanline0, *buffer = NULL; + IMF2DBuffer2 *buffer2d; struct sample *sample; - BYTE *buffer; - HRESULT hr; + HRESULT hr = S_OK; + LONG pitch = 0;
if (!(sample = calloc(1, sizeof(*sample)))) return E_OUTOFMEMORY; if (FAILED(hr = IMFSample_ConvertToContiguousBuffer(mf_sample, &sample->u.mf.buffer))) goto fail; - if (FAILED(hr = IMFMediaBuffer_Lock(sample->u.mf.buffer, &buffer, &max_length, ¤t_length))) + if (SUCCEEDED(IMFMediaBuffer_QueryInterface(sample->u.mf.buffer, &IID_IMF2DBuffer2, (void **)&buffer2d))) + { + /* The result of ConvertToContiguousBuffer() can be an existing 2D buffer, which does + * not need to be contiguous by the definition of 'contiguous' for buffers. For 2D + * buffers, call Lock2DSize() and set up GStreamer to output with the correct stride. + * This avoids Lock() copying the entire contents into a linear buffer even though the + * current contents are about to be overwritten, and avoids Unlock() copying the new + * contents. Resolves performance issues on lower spec hardware. */ + if (SUCCEEDED(hr = IMF2DBuffer2_Lock2DSize(buffer2d, MF2DBuffer_LockFlags_ReadWrite, &scanline0, &pitch, &buffer, &max_length))) + { + IMFMediaBuffer_Release(sample->u.mf.buffer); + sample->u.mf.buffer = NULL; + sample->u.mf.buffer2d = buffer2d; + IMF2DBuffer2_GetContiguousLength(buffer2d, ¤t_length); + if (pitch < 0) + pitch = -pitch; + } + else + { + IMF2DBuffer2_Release(buffer2d); + } + if (FAILED(hr)) + goto fail; + } + + if (!buffer && FAILED(hr = IMFMediaBuffer_Lock(sample->u.mf.buffer, &buffer, &max_length, ¤t_length))) goto fail;
IMFSample_AddRef((sample->u.mf.sample = mf_sample)); sample->wg_sample.data = (UINT_PTR)buffer; sample->wg_sample.size = current_length; sample->wg_sample.max_size = max_length; + sample->wg_sample.stride = pitch; sample->ops = &mf_sample_ops;
*out = &sample->wg_sample; @@ -338,10 +375,12 @@ HRESULT wg_transform_push_mf(wg_transform_t transform, IMFSample *sample, return hr; }
-HRESULT wg_transform_read_mf(wg_transform_t transform, IMFSample *sample, DWORD *flags, bool *preserve_timestamps) +HRESULT wg_transform_read_mf(wg_transform_t transform, IMFSample *sample, + DWORD plane_size, DWORD *flags, bool *preserve_timestamps) { struct wg_sample *wg_sample; IMFMediaBuffer *buffer; + DWORD sample_size; HRESULT hr;
TRACE_(mfplat)("transform %#I64x, sample %p, flags %p.\n", transform, sample, flags); @@ -372,7 +411,20 @@ HRESULT wg_transform_read_mf(wg_transform_t transform, IMFSample *sample, DWORD
if (SUCCEEDED(hr = IMFSample_ConvertToContiguousBuffer(sample, &buffer))) { - hr = IMFMediaBuffer_SetCurrentLength(buffer, wg_sample->size); + if (wg_sample->stride && plane_size) + { + /* The sample size must match the frame size, which differs from the contiguous length + * if the buffer has extra width. MF allows a frame to be placed in a wider 2D buffer. */ + sample_size = min(plane_size, wg_sample->size); + } + else + { + if (wg_sample->stride) + FIXME("Expected a plane size.\n"); + sample_size = wg_sample->size; + } + + hr = IMFMediaBuffer_SetCurrentLength(buffer, sample_size); IMFMediaBuffer_Release(buffer); }
diff --git a/dlls/winegstreamer/wg_transform.c b/dlls/winegstreamer/wg_transform.c index f8bf4474756..e7bbdccb4e9 100644 --- a/dlls/winegstreamer/wg_transform.c +++ b/dlls/winegstreamer/wg_transform.c @@ -104,7 +104,7 @@ static struct wg_transform *get_transform(wg_transform_t trans) return (struct wg_transform *)(ULONG_PTR)trans; }
-static void align_video_info_planes(MFVideoInfo *video_info, gsize plane_align, +static void align_video_info_planes(MFVideoInfo *video_info, gsize plane_align, guint stride, GstVideoInfo *info, GstVideoAlignment *align) { bool fix_nv12 = !plane_align && info->finfo->format == GST_VIDEO_FORMAT_NV12 && (info->width & 3) && (info->width & 3) != 3; @@ -123,6 +123,27 @@ static void align_video_info_planes(MFVideoInfo *video_info, gsize plane_align, align->padding_left = aperture->OffsetY.value; }
+ if (stride) + { + /* The MF sample has a 2D buffer. Set padding_right to match its stride. */ + guint width = align->padding_left + info->width + align->padding_right; + const GstVideoFormatInfo *finfo = info->finfo; + gint comp[GST_VIDEO_MAX_COMPONENTS]; + gint pixel_stride; + + gst_video_format_info_component(finfo, 0, comp); + pixel_stride = finfo->pixel_stride[comp[0]]; + + if (stride % pixel_stride) + GST_ERROR("Stride %u not aligned to pixel size", stride); + stride /= pixel_stride; + + if (stride < width) + GST_ERROR("Invalid stride %u", stride); + else + align->padding_right += stride - width; + } + if (video_info->VideoFlags & MFVideoFlag_BottomUpLinearRep) { gsize top = align->padding_top; @@ -217,7 +238,7 @@ static void wg_video_buffer_pool_class_init(WgVideoBufferPoolClass *klass) pool_class->alloc_buffer = wg_video_buffer_pool_alloc_buffer; }
-static WgVideoBufferPool *wg_video_buffer_pool_create(GstCaps *caps, gsize plane_align, +static WgVideoBufferPool *wg_video_buffer_pool_create(GstCaps *caps, gsize plane_align, gsize output_plane_stride, GstAllocator *allocator, MFVideoInfo *video_info, GstVideoAlignment *align) { WgVideoBufferPool *pool; @@ -229,7 +250,7 @@ static WgVideoBufferPool *wg_video_buffer_pool_create(GstCaps *caps, gsize plane
gst_video_info_from_caps(&pool->info, caps); max_size = pool->info.size; - align_video_info_planes(video_info, plane_align, &pool->info, align); + align_video_info_planes(video_info, plane_align, output_plane_stride, &pool->info, align); /* GStreamer assumes NV12 pools must accommodate a stride alignment of 4, but we use 2 */ max_size = max(max_size, pool->info.size);
@@ -313,7 +334,7 @@ static gboolean transform_sink_query_allocation(struct wg_transform *transform, return false;
if (!(pool = wg_video_buffer_pool_create(caps, transform->attrs.output_plane_align, - transform->allocator, &transform->output_info, &align))) + transform->attrs.output_plane_stride, transform->allocator, &transform->output_info, &align))) return false;
if ((params = gst_structure_new("video-meta", @@ -896,7 +917,7 @@ NTSTATUS wg_transform_push_data(void *args) }
if (!(buffer = gst_buffer_new_wrapped_full(GST_MEMORY_FLAG_READONLY, wg_sample_data(sample), sample->max_size, - 0, sample->size, sample, wg_sample_free_notify))) + 0, sample->stride ? sample->max_size : sample->size, sample, wg_sample_free_notify))) { GST_ERROR("Failed to allocate input buffer"); return STATUS_NO_MEMORY; @@ -911,7 +932,7 @@ NTSTATUS wg_transform_push_data(void *args) if (!strcmp(input_mime, "video/x-raw") && gst_video_info_from_caps(&video_info, transform->input_caps)) { GstVideoAlignment align; - align_video_info_planes(&transform->input_info, 0, &video_info, &align); + align_video_info_planes(&transform->input_info, 0, sample->stride, &video_info, &align); buffer_add_video_meta(buffer, &video_info); }
@@ -1214,6 +1235,13 @@ NTSTATUS wg_transform_read_data(void *args) bool discard_data; NTSTATUS status;
+ if (sample->stride != transform->attrs.output_plane_stride) + { + GST_INFO("Reconfiguring to stride %u", sample->stride); + transform->attrs.output_plane_stride = sample->stride; + push_event(transform->my_sink, gst_event_new_reconfigure()); + } + if (!transform->output_sample && !get_transform_output(transform, sample)) { sample->size = 0; @@ -1237,7 +1265,7 @@ NTSTATUS wg_transform_read_data(void *args) dst_video_info = src_video_info;
/* set the desired output buffer alignment and stride on the dest video info */ - align_video_info_planes(&transform->output_info, plane_align, &dst_video_info, &align); + align_video_info_planes(&transform->output_info, plane_align, sample->stride, &dst_video_info, &align);
/* copy the actual output buffer alignment and stride to the src video info */ if ((meta = gst_buffer_get_video_meta(output_buffer))) diff --git a/dlls/winegstreamer/wma_decoder.c b/dlls/winegstreamer/wma_decoder.c index ca7a5f278bf..8ff4d09cfaa 100644 --- a/dlls/winegstreamer/wma_decoder.c +++ b/dlls/winegstreamer/wma_decoder.c @@ -552,7 +552,7 @@ static HRESULT WINAPI transform_ProcessOutput(IMFTransform *iface, DWORD flags, return MF_E_TRANSFORM_NEED_MORE_INPUT; }
- if (SUCCEEDED(hr = wg_transform_read_mf(decoder->wg_transform, samples->pSample, &samples->dwStatus, NULL))) + if (SUCCEEDED(hr = wg_transform_read_mf(decoder->wg_transform, samples->pSample, 0, &samples->dwStatus, NULL))) wg_sample_queue_flush(decoder->wg_sample_queue, false);
return hr;
Added tests, some of which reveal existing issues, but fixes for those are not included in this MR. The `todo_length` in `output_sample_desc_i420_1d` is because in the video decoder the frame size is used instead of GetOutputStreamInfo().
Is this path supposed to be on CPU in the first place?
On Thu Aug 28 00:30:12 2025 +0000, Elizabeth Figura wrote:
Is this path supposed to be on CPU in the first place?
IDirect3DSurface9 and IMFDXGIBuffer are on the CPU, but IMF2DBuffer is just a memory buffer.