From: Rémi Bernon rbernon@codeweavers.com
Because it was inconsistently done before we need to adjust the frame size temporarily on the wg_transform side. This fixes one test but breaks a different one. Next change will fix it properly. --- dlls/mf/tests/transform.c | 41 ++++++++++++------------ dlls/winegstreamer/mfplat.c | 33 ++++++++++--------- dlls/winegstreamer/wg_transform.c | 53 +++++++++++++++++++++++++++---- 3 files changed, 84 insertions(+), 43 deletions(-)
diff --git a/dlls/mf/tests/transform.c b/dlls/mf/tests/transform.c index 3bd13384620..a7c659c7cbf 100644 --- a/dlls/mf/tests/transform.c +++ b/dlls/mf/tests/transform.c @@ -7394,17 +7394,32 @@ static void test_video_processor(void) .sample_time = 0, .sample_duration = 10000000, .buffer_count = 1, .buffers = &rgb32_buffer_desc, }; + const struct buffer_desc rgb32_buffer_desc_todo = + { + .length = actual_width * actual_height * 4, + .compare = compare_rgb32, .dump = dump_rgb32, .rect = {.top = 12, .right = 82, .bottom = 96}, + .todo_length = TRUE, + }; + const struct sample_desc rgb32_sample_desc_todo = + { + .attributes = output_sample_attributes, + .sample_time = 0, .sample_duration = 10000000, + .buffer_count = 1, .buffers = &rgb32_buffer_desc_todo, + .todo_length = TRUE, + };
const struct buffer_desc rgb32_cropped_buffer_desc = { .length = 82 * 84 * 4, .compare = compare_rgb32, .dump = dump_rgb32, + .todo_length = TRUE }; const struct sample_desc rgb32_cropped_sample_desc = { .attributes = output_sample_attributes, .sample_time = 0, .sample_duration = 10000000, .buffer_count = 1, .buffers = &rgb32_cropped_buffer_desc, + .todo_length = TRUE };
const struct buffer_desc rgb555_buffer_desc = @@ -7492,7 +7507,7 @@ static void test_video_processor(void) }, { .input_type_desc = rgb32_with_aperture, .output_type_desc = rgb32_with_aperture, - .output_sample_desc = &rgb32_sample_desc, .result_bitmap = L"rgb32frame.bmp", + .output_sample_desc = &rgb32_sample_desc_todo, .result_bitmap = L"rgb32frame.bmp", .broken = TRUE /* old Windows version incorrectly rescale */ }, { @@ -7515,7 +7530,7 @@ static void test_video_processor(void) }, { .input_type_desc = rgb32_no_aperture, .output_type_desc = rgb32_with_aperture, - .output_sample_desc = &rgb32_sample_desc, .result_bitmap = L"rgb32frame-bogus.bmp", + .output_sample_desc = &rgb32_sample_desc_todo, .result_bitmap = L"rgb32frame-bogus.bmp", }, { .input_type_desc = rgb32_with_aperture, .output_type_desc = rgb32_no_aperture, @@ -7843,23 +7858,6 @@ static void test_video_processor(void) check_mft_set_input_type(transform, test->input_type_desc); check_mft_get_input_current_type(transform, test->input_type_desc);
- if (i >= 15) - { - IMFMediaType *media_type; - HRESULT hr; - - hr = MFCreateMediaType(&media_type); - ok(hr == S_OK, "MFCreateMediaType returned hr %#lx.\n", hr); - init_media_type(media_type, test->output_type_desc, -1); - hr = IMFTransform_SetOutputType(transform, 0, media_type, 0); - todo_wine - ok(hr == S_OK, "SetOutputType returned %#lx.\n", hr); - IMFMediaType_Release(media_type); - - winetest_pop_context(); - continue; - } - check_mft_set_output_type_required(transform, test->output_type_desc); check_mft_set_output_type(transform, test->output_type_desc, S_OK); check_mft_get_output_current_type(transform, test->output_type_desc); @@ -7969,6 +7967,7 @@ static void test_video_processor(void) ok(ref == 1, "Release returned %ld\n", ref);
ret = check_mf_sample_collection(output_samples, test->output_sample_desc, test->result_bitmap); + todo_wine_if(i == 10 || i == 15) ok(ret <= test->delta || broken(test->broken), "got %lu%% diff\n", ret); IMFCollection_Release(output_samples);
@@ -8007,8 +8006,8 @@ static void test_video_processor(void) check_mft_set_output_type(transform, rgb32_no_aperture, S_OK); check_mft_get_output_current_type(transform, rgb32_no_aperture);
- check_mft_set_input_type_(__LINE__, transform, nv12_with_aperture, TRUE); - check_mft_get_input_current_type_(__LINE__, transform, nv12_with_aperture, TRUE, FALSE); + check_mft_set_input_type(transform, nv12_with_aperture); + check_mft_get_input_current_type(transform, nv12_with_aperture);
/* output type is the same as before */ check_mft_get_output_current_type(transform, rgb32_no_aperture); diff --git a/dlls/winegstreamer/mfplat.c b/dlls/winegstreamer/mfplat.c index e6d9fb9fd2c..c27a2646937 100644 --- a/dlls/winegstreamer/mfplat.c +++ b/dlls/winegstreamer/mfplat.c @@ -519,6 +519,22 @@ static IMFMediaType *mf_media_type_from_wg_format_video(const struct wg_format * if (FAILED(MFCreateMediaType(&type))) return NULL;
+ if (format->u.video.padding.left || format->u.video.padding.right + || format->u.video.padding.top || format->u.video.padding.bottom) + { + MFVideoArea aperture = + { + .OffsetX = {.value = format->u.video.padding.left}, + .OffsetY = {.value = format->u.video.padding.top}, + .Area.cx = width, .Area.cy = height, + }; + width += format->u.video.padding.left + format->u.video.padding.right; + height += format->u.video.padding.top + format->u.video.padding.bottom; + + IMFMediaType_SetBlob(type, &MF_MT_MINIMUM_DISPLAY_APERTURE, + (BYTE *)&aperture, sizeof(aperture)); + } + IMFMediaType_SetGUID(type, &MF_MT_MAJOR_TYPE, &MFMediaType_Video); IMFMediaType_SetGUID(type, &MF_MT_SUBTYPE, video_formats[i].subtype); IMFMediaType_SetUINT64(type, &MF_MT_FRAME_SIZE, make_uint64(width, height)); @@ -532,21 +548,6 @@ static IMFMediaType *mf_media_type_from_wg_format_video(const struct wg_format * stride = -stride; IMFMediaType_SetUINT32(type, &MF_MT_DEFAULT_STRIDE, stride);
- if (format->u.video.padding.left || format->u.video.padding.right - || format->u.video.padding.top || format->u.video.padding.bottom) - { - MFVideoArea aperture = - { - .OffsetX = {.value = format->u.video.padding.left}, - .OffsetY = {.value = format->u.video.padding.top}, - .Area.cx = width - format->u.video.padding.right - format->u.video.padding.left, - .Area.cy = height - format->u.video.padding.bottom - format->u.video.padding.top, - }; - - IMFMediaType_SetBlob(type, &MF_MT_MINIMUM_DISPLAY_APERTURE, - (BYTE *)&aperture, sizeof(aperture)); - } - return type; } } @@ -706,6 +707,8 @@ static void mf_media_type_to_wg_format_video(IMFMediaType *type, const GUID *sub format->u.video.padding.top = aperture.OffsetY.value; format->u.video.padding.right = format->u.video.width - aperture.Area.cx - aperture.OffsetX.value; format->u.video.padding.bottom = format->u.video.height - aperture.Area.cy - aperture.OffsetY.value; + format->u.video.width -= format->u.video.padding.left + format->u.video.padding.right; + format->u.video.height -= format->u.video.padding.top + format->u.video.padding.bottom; }
if (SUCCEEDED(IMFMediaType_GetUINT64(type, &MF_MT_FRAME_RATE, &frame_rate)) && (UINT32)frame_rate) diff --git a/dlls/winegstreamer/wg_transform.c b/dlls/winegstreamer/wg_transform.c index 6882e16bf1c..65b95229096 100644 --- a/dlls/winegstreamer/wg_transform.c +++ b/dlls/winegstreamer/wg_transform.c @@ -92,6 +92,48 @@ static void align_video_info_planes(struct wg_format *format, gsize plane_align, } }
+static void update_format_width_padding(struct wg_format *max, struct wg_format *min) +{ + max->u.video.padding.right += max->u.video.width - min->u.video.width; + max->u.video.width = min->u.video.width; +} + +static void update_format_height_padding(struct wg_format *max, struct wg_format *min) +{ + max->u.video.padding.bottom += abs(max->u.video.height) - abs(min->u.video.height); + max->u.video.height = (max->u.video.height < 0 ? -1 : 1) * abs(min->u.video.height); +} + +static void update_format_padding(struct wg_format *input_format, struct wg_format *output_format) +{ + if (input_format->major_type == WG_MAJOR_TYPE_VIDEO) + { + input_format->u.video.width += input_format->u.video.padding.left + input_format->u.video.padding.right; + input_format->u.video.height += input_format->u.video.padding.top + input_format->u.video.padding.bottom; + } + + if (output_format->major_type == WG_MAJOR_TYPE_VIDEO) + { + output_format->u.video.width += output_format->u.video.padding.left + output_format->u.video.padding.right; + output_format->u.video.height += output_format->u.video.padding.top + output_format->u.video.padding.bottom; + } + + if (input_format->major_type != output_format->major_type) + return; + if (input_format->major_type != WG_MAJOR_TYPE_VIDEO) + return; + + if (input_format->u.video.width > output_format->u.video.width) + update_format_width_padding(input_format, output_format); + else + update_format_width_padding(output_format, input_format); + + if (abs(input_format->u.video.height) > abs(output_format->u.video.height)) + update_format_height_padding(input_format, output_format); + else + update_format_height_padding(output_format, input_format); +} + typedef struct { GstVideoBufferPool parent; @@ -433,6 +475,8 @@ NTSTATUS wg_transform_create(void *args) if (!(transform->allocator = wg_allocator_create())) goto out; transform->attrs = *params->attrs; + + update_format_padding(&input_format, &output_format); transform->input_format = input_format; transform->output_format = output_format;
@@ -964,18 +1008,13 @@ NTSTATUS wg_transform_read_data(void *args) wg_format_from_caps(&output_format, output_caps); if (output_format.major_type == WG_MAJOR_TYPE_VIDEO) { + if (transform->output_format.u.video.height < 0) + output_format.u.video.height *= -1; output_format.u.video.padding.left = align.padding_left; - output_format.u.video.width += output_format.u.video.padding.left; output_format.u.video.padding.right = align.padding_right; - output_format.u.video.width += output_format.u.video.padding.right; output_format.u.video.padding.top = align.padding_top; - output_format.u.video.height += output_format.u.video.padding.top; output_format.u.video.padding.bottom = align.padding_bottom; - output_format.u.video.height += output_format.u.video.padding.bottom; GST_INFO("new video padding rect %s", wine_dbgstr_rect(&output_format.u.video.padding)); - - if (transform->output_format.u.video.height < 0) - output_format.u.video.height *= -1; }
if (format)