the yuv2 format video stream captured from v4l requires conversion to RGB24 format for rendering.I have verified that FFmpeg-8.0.1-essentials_build supports YUV capture format under Wine.The attachment contains format support obtained using ffmpeg.exe, along with video capture data processed via yuy2. [ffmpeg_support.log](/uploads/6bbdfa9249ef1cfae3edfa4e11663538/ffmpeg_support.log)
[test_yuyv422.avi](/uploads/b2b0fb25fedad17bf60cf5aedaa6c5bf/test_yuyv422.avi)
-- v2: quartz: Supports yuy2 video stream rendering qcap: supports capturing camera footage in yuy2 format
From: liuchenghao linkmecry@gmail.com
supports direct reading of yuv2 video streams under v4l --- dlls/qcap/v4l.c | 191 +++++++++++++++++++++++++---------------- dlls/qcap/vfwcapture.c | 7 +- 2 files changed, 123 insertions(+), 75 deletions(-)
diff --git a/dlls/qcap/v4l.c b/dlls/qcap/v4l.c index 5efb4b7e505..34a603fe357 100644 --- a/dlls/qcap/v4l.c +++ b/dlls/qcap/v4l.c @@ -368,32 +368,44 @@ static NTSTATUS v4l_device_read_frame( void *args ) return TRUE; }
-static void fill_caps(__u32 pixelformat, __u32 width, __u32 height, +static void fill_caps(__u32 pixelformat, const GUID *subtype, __u32 width, __u32 height, __u32 max_fps, __u32 min_fps, struct caps *caps) { - LONG depth = 24; + LONG depth; + DWORD compression; + + if (pixelformat == V4L2_PIX_FMT_YUYV) + { + depth = 16; + compression = mmioFOURCC('Y','U','Y','2'); + } + else + { + depth = 24; + compression = BI_RGB; + }
memset(caps, 0, sizeof(*caps)); - caps->video_info.dwBitRate = width * height * depth * max_fps; - caps->video_info.bmiHeader.biSize = sizeof(caps->video_info.bmiHeader); + caps->video_info.AvgTimePerFrame = (REFERENCE_TIME)(10000000ULL / (max_fps ? max_fps : 1)); + caps->video_info.dwBitRate = (width * height * (depth)) * max_fps; + caps->video_info.bmiHeader.biSize = sizeof(BITMAPINFOHEADER); caps->video_info.bmiHeader.biWidth = width; caps->video_info.bmiHeader.biHeight = height; caps->video_info.bmiHeader.biPlanes = 1; caps->video_info.bmiHeader.biBitCount = depth; - caps->video_info.bmiHeader.biCompression = BI_RGB; - caps->video_info.bmiHeader.biSizeImage = width * height * depth / 8; + caps->video_info.bmiHeader.biCompression = compression; + caps->video_info.bmiHeader.biSizeImage = width * height * ((depth + 7) / 8); + caps->media_type.majortype = MEDIATYPE_Video; - caps->media_type.subtype = MEDIASUBTYPE_RGB24; + caps->media_type.subtype = *subtype; caps->media_type.bFixedSizeSamples = TRUE; caps->media_type.bTemporalCompression = FALSE; - caps->media_type.lSampleSize = width * height * depth / 8; + caps->media_type.lSampleSize = caps->video_info.bmiHeader.biSizeImage; caps->media_type.formattype = FORMAT_VideoInfo; caps->media_type.pUnk = NULL; caps->media_type.cbFormat = sizeof(VIDEOINFOHEADER); - /* We reallocate the caps array, so pbFormat has to be set after all caps - * have been enumerated. */ - caps->config.MaxFrameInterval = 10000000 / max_fps; - caps->config.MinFrameInterval = 10000000 / min_fps; + caps->config.MaxFrameInterval = 10000000 / min_fps; + caps->config.MinFrameInterval = 10000000 / max_fps; caps->config.MaxOutputSize.cx = width; caps->config.MaxOutputSize.cy = height; caps->config.MinOutputSize.cx = width; @@ -427,14 +439,19 @@ static NTSTATUS v4l_device_get_caps_count( void *args ) static NTSTATUS v4l_device_create( void *args ) { const struct create_params *params = args; - struct v4l2_frmsizeenum frmsize = {0}; struct video_capture_device *device; struct v4l2_capability caps = {{0}}; - struct v4l2_format format = {0}; + struct caps *new_caps; + struct v4l2_fmtdesc fmt_desc = {0}; + struct v4l2_frmsizeenum frmsize = {0}; + struct v4l2_frmivalenum frmival = {0}; BOOL have_libv4l2; char path[20]; - HRESULT hr; int fd, i; + HRESULT hr; + __u32 max_fps = 30, min_fps = 30; + GUID subtype; + __u32 width, height;
have_libv4l2 = video_init();
@@ -442,7 +459,7 @@ static NTSTATUS v4l_device_create( void *args ) return E_OUTOFMEMORY;
sprintf(path, "/dev/video%i", params->index); - TRACE("Opening device %s.\n", path); + FIXME("Opening device %s.\n", path); #ifdef O_CLOEXEC if ((fd = video_open(path, O_RDWR | O_NONBLOCK | O_CLOEXEC)) == -1 && errno == EINVAL) #endif @@ -452,7 +469,7 @@ static NTSTATUS v4l_device_create( void *args ) WARN("Failed to open video device: %s\n", strerror(errno)); goto error; } - fcntl(fd, F_SETFD, FD_CLOEXEC); /* in case O_CLOEXEC isn't supported */ + fcntl(fd, F_SETFD, FD_CLOEXEC); device->fd = fd;
if (xioctl(fd, VIDIOC_QUERYCAP, &caps) == -1) @@ -484,77 +501,103 @@ static NTSTATUS v4l_device_create( void *args ) goto error; }
- format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; - if (xioctl(fd, VIDIOC_G_FMT, &format) == -1) - { - ERR("Failed to get device format: %s\n", strerror(errno)); - goto error; - } - - format.fmt.pix.pixelformat = V4L2_PIX_FMT_BGR24; - if (xioctl(fd, VIDIOC_TRY_FMT, &format) == -1 - || format.fmt.pix.pixelformat != V4L2_PIX_FMT_BGR24) - { - ERR("This device doesn't support V4L2_PIX_FMT_BGR24 format.\n"); - goto error; - } + /* Enumerate supported pixel formats */ + fmt_desc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
- frmsize.pixel_format = V4L2_PIX_FMT_BGR24; - while (xioctl(fd, VIDIOC_ENUM_FRAMESIZES, &frmsize) != -1) + while (xioctl(fd, VIDIOC_ENUM_FMT, &fmt_desc) == 0) { - struct v4l2_frmivalenum frmival = {0}; - __u32 max_fps = 30, min_fps = 30; - struct caps *new_caps; + TRACE("Device supports format: %.4s (0x%08x)\n", (char*)&fmt_desc.pixelformat, fmt_desc.pixelformat);
- frmival.pixel_format = format.fmt.pix.pixelformat; - if (frmsize.type == V4L2_FRMSIZE_TYPE_DISCRETE) - { - frmival.width = frmsize.discrete.width; - frmival.height = frmsize.discrete.height; - } - else if (frmsize.type == V4L2_FRMSIZE_TYPE_STEPWISE) - { - frmival.width = frmsize.stepwise.max_width; - frmival.height = frmsize.stepwise.min_height; - } - else + if (fmt_desc.pixelformat != V4L2_PIX_FMT_BGR24 && + fmt_desc.pixelformat != V4L2_PIX_FMT_YUYV) { - FIXME("Unhandled frame size type: %d.\n", frmsize.type); + fmt_desc.index++; continue; }
- if (xioctl(fd, VIDIOC_ENUM_FRAMEINTERVALS, &frmival) != -1) + frmsize.pixel_format = fmt_desc.pixelformat; + frmsize.index = 0; + + while (xioctl(fd, VIDIOC_ENUM_FRAMESIZES, &frmsize) != -1) { - if (frmival.type == V4L2_FRMIVAL_TYPE_DISCRETE) + if (frmsize.type == V4L2_FRMSIZE_TYPE_DISCRETE) + { + width = frmsize.discrete.width; + height = frmsize.discrete.height; + } + else if (frmsize.type == V4L2_FRMSIZE_TYPE_STEPWISE) { - max_fps = frmival.discrete.denominator / frmival.discrete.numerator; - min_fps = max_fps; + /* For simplicity, just use min resolution in stepwise case */ + width = frmsize.stepwise.min_width; + height = frmsize.stepwise.min_height; } - else if (frmival.type == V4L2_FRMIVAL_TYPE_STEPWISE - || frmival.type == V4L2_FRMIVAL_TYPE_CONTINUOUS) + else { - min_fps = frmival.stepwise.max.denominator / frmival.stepwise.max.numerator; - max_fps = frmival.stepwise.min.denominator / frmival.stepwise.min.numerator; + FIXME("Unhandled frame size type: %d.\n", frmsize.type); + frmsize.index++; + continue; } + + /* Get frame intervals (FPS) */ + frmival.pixel_format = frmsize.pixel_format; + frmival.width = width; + frmival.height = height; + frmival.index = 0; + + if (xioctl(fd, VIDIOC_ENUM_FRAMEINTERVALS, &frmival) == 0) + { + if (frmival.type == V4L2_FRMIVAL_TYPE_DISCRETE) + { + max_fps = min_fps = frmival.discrete.denominator / frmival.discrete.numerator; + } + else if (frmival.type == V4L2_FRMIVAL_TYPE_STEPWISE || + frmival.type == V4L2_FRMIVAL_TYPE_CONTINUOUS) + { + /* Note: min interval => max fps */ + min_fps = frmival.stepwise.max.denominator / frmival.stepwise.max.numerator; + max_fps = frmival.stepwise.min.denominator / frmival.stepwise.min.numerator; + if (min_fps > max_fps) min_fps = max_fps; + } + } + else + { + WARN("Failed to enumerate frame intervals for %dx%d fmt=0x%x: %s\n", + width, height, frmsize.pixel_format, strerror(errno)); + /* Use defaults */ + } + + /* Create caps entry */ + new_caps = realloc(device->caps, (device->caps_count + 1) * sizeof(*device->caps)); + if (!new_caps) + goto error; + device->caps = new_caps; + + if (fmt_desc.pixelformat == V4L2_PIX_FMT_YUYV) + subtype = MEDIASUBTYPE_YUY2; + else + subtype = MEDIASUBTYPE_RGB24; + + fill_caps(frmsize.pixel_format, &subtype, width, height, max_fps, min_fps, + &device->caps[device->caps_count]); + device->caps_count++; + + frmsize.index++; } - else - ERR("Failed to get fps: %s.\n", strerror(errno)); - - new_caps = realloc(device->caps, (device->caps_count + 1) * sizeof(*device->caps)); - if (!new_caps) - goto error; - device->caps = new_caps; - fill_caps(format.fmt.pix.pixelformat, frmsize.discrete.width, frmsize.discrete.height, - max_fps, min_fps, &device->caps[device->caps_count]); - device->caps_count++; - - frmsize.index++; + + fmt_desc.index++; + } + + if (device->caps_count == 0) + { + WARN("No supported formats found (need BGR24 or YUYV).\n"); + goto error; }
- /* We reallocate the caps array, so we have to delay setting pbFormat. */ + /* Set pbFormat after all caps are allocated */ for (i = 0; i < device->caps_count; ++i) device->caps[i].media_type.pbFormat = (BYTE *)&device->caps[i].video_info;
+ /* Try to set the first format (for validation and image buffer allocation) */ if (FAILED(hr = set_caps(device, &device->caps[0], TRUE))) { if (hr == VFW_E_TYPE_NOT_ACCEPTED && !have_libv4l2) @@ -562,9 +605,11 @@ static NTSTATUS v4l_device_create( void *args ) goto error; }
- TRACE("Format: %d bpp - %dx%d.\n", device->current_caps->video_info.bmiHeader.biBitCount, - (int)device->current_caps->video_info.bmiHeader.biWidth, - (int)device->current_caps->video_info.bmiHeader.biHeight); + TRACE("Initialized with format: %.4s (%dx%d, %d bpp)\n", + (char*)&device->current_caps->pixelformat, + (int)device->current_caps->video_info.bmiHeader.biWidth, + (int)device->current_caps->video_info.bmiHeader.biHeight, + (int)device->current_caps->video_info.bmiHeader.biBitCount);
*params->device = (ULONG_PTR)device; return S_OK; diff --git a/dlls/qcap/vfwcapture.c b/dlls/qcap/vfwcapture.c index bf3ded9223c..cc9ab931063 100644 --- a/dlls/qcap/vfwcapture.c +++ b/dlls/qcap/vfwcapture.c @@ -149,6 +149,7 @@ static DWORD WINAPI stream_thread(void *arg) IMediaSample *sample; HRESULT hr; BYTE *data; + NTSTATUS status;
EnterCriticalSection(&filter->state_cs);
@@ -174,8 +175,10 @@ static DWORD WINAPI stream_thread(void *arg)
params.device = filter->device; params.data = data; - if (!V4L_CALL( read_frame, ¶ms )) - { + + status = V4L_CALL(read_frame, ¶ms); + if (!NT_SUCCESS(status)) { + ERR("read_frame failed: 0x%lx\n", status); IMediaSample_Release(sample); break; }
From: liuchenghao linkmecry@gmail.com
the yuv2 format video stream captured from v4l requires conversion to RGB24 format for rendering. --- dlls/quartz/videorenderer.c | 146 ++++++++++++++++++++++++++++++++++-- 1 file changed, 139 insertions(+), 7 deletions(-)
diff --git a/dlls/quartz/videorenderer.c b/dlls/quartz/videorenderer.c index bbd34678dfd..d5df15b2e21 100644 --- a/dlls/quartz/videorenderer.c +++ b/dlls/quartz/videorenderer.c @@ -79,6 +79,100 @@ static void VideoRenderer_AutoShowWindow(struct video_renderer *This) ShowWindow(This->window.hwnd, SW_SHOW); }
+static DWORD yuv2rgb(int Y, int U, int V) +{ + int C = Y - 16; + int D = U - 128; + int E = V - 128; + + int R = (298 * C + 409 * E + 128) >> 8; + int G = (298 * C - 100 * D - 208 * E + 128) >> 8; + int B = (298 * C + 516 * D + 128) >> 8; + + if (R < 0) R = 0; else if (R > 255) R = 255; + if (G < 0) G = 0; else if (G > 255) G = 255; + if (B < 0) B = 0; else if (B > 255) B = 255; + + return (B << 16) | (G << 8) | R; /* BGR order for DIB */ +} + +static HRESULT convert_yuy2_to_rgb_and_create_bmi(struct video_renderer *filter, + const BYTE *yuy2_data, + LONG data_size, + BYTE **rgb_buffer, + BITMAPINFO **rgb_bmi) +{ + AM_MEDIA_TYPE *mt = &filter->renderer.sink.pin.mt; + VIDEOINFOHEADER *vih = (VIDEOINFOHEADER *)mt->pbFormat; + int src_width = vih->bmiHeader.biWidth; + int src_height = abs(vih->bmiHeader.biHeight); + DWORD rgb_size = src_width * src_height * 3; + int y, x, idx, out_idx0, out_idx1; + BYTE Y0, U, Y1, V; + DWORD rgb0, rgb1; + + *rgb_buffer = HeapAlloc(GetProcessHeap(), 0, rgb_size); + if (!*rgb_buffer) + { + ERR("Failed to allocate RGB buffer!\n"); + return E_OUTOFMEMORY; + } + + for (y = 0; y < src_height; y++) + { + for (x = 0; x < src_width; x += 2) + { + idx = (y * src_width + x) * 2; + if (idx + 3 >= data_size) + break; + + Y0 = yuy2_data[idx]; + U = yuy2_data[idx + 1]; + Y1 = yuy2_data[idx + 2]; + V = yuy2_data[idx + 3]; + + rgb0 = yuv2rgb(Y0, U, V); + rgb1 = yuv2rgb(Y1, U, V); + + out_idx0 = (y * src_width + x) * 3; + out_idx1 = out_idx0 + 3; + + if (out_idx1 + 2 < (int)rgb_size) + { + (*rgb_buffer)[out_idx0 + 0] = (rgb0 >> 16) & 0xFF; /* B */ + (*rgb_buffer)[out_idx0 + 1] = (rgb0 >> 8) & 0xFF; /* G */ + (*rgb_buffer)[out_idx0 + 2] = (rgb0 >> 0) & 0xFF; /* R */ + + if (x + 1 < src_width) + { + (*rgb_buffer)[out_idx1 + 0] = (rgb1 >> 16) & 0xFF; /* B */ + (*rgb_buffer)[out_idx1 + 1] = (rgb1 >> 8) & 0xFF; /* G */ + (*rgb_buffer)[out_idx1 + 2] = (rgb1 >> 0) & 0xFF; /* R */ + } + } + } + } + + *rgb_bmi = (BITMAPINFO*)HeapAlloc(GetProcessHeap(), HEAP_ZERO_MEMORY, + sizeof(BITMAPINFO)); + if (!*rgb_bmi) + { + HeapFree(GetProcessHeap(), 0, *rgb_buffer); + *rgb_buffer = NULL; + return E_OUTOFMEMORY; + } + + (*rgb_bmi)->bmiHeader.biSize = sizeof(BITMAPINFOHEADER); + (*rgb_bmi)->bmiHeader.biWidth = src_width; + (*rgb_bmi)->bmiHeader.biHeight = -src_height; /* top-down */ + (*rgb_bmi)->bmiHeader.biPlanes = 1; + (*rgb_bmi)->bmiHeader.biBitCount = 24; + (*rgb_bmi)->bmiHeader.biCompression = BI_RGB; + (*rgb_bmi)->bmiHeader.biSizeImage = 0; + + return S_OK; +} + static HRESULT video_renderer_render(struct strmbase_renderer *iface, IMediaSample *pSample) { struct video_renderer *filter = impl_from_strmbase_renderer(iface); @@ -86,8 +180,11 @@ static HRESULT video_renderer_render(struct strmbase_renderer *iface, IMediaSamp LPBYTE pbSrcStream = NULL; HRESULT hr; HDC dc; - - TRACE("filter %p, sample %p.\n", filter, pSample); + AM_MEDIA_TYPE *mt = &filter->renderer.sink.pin.mt; + LONG size; + BOOL is_yuy2; + LPBYTE rgb_buffer = NULL; + BITMAPINFO *rgb_bmi = NULL;
hr = IMediaSample_GetPointer(pSample, &pbSrcStream); if (FAILED(hr)) @@ -96,11 +193,45 @@ static HRESULT video_renderer_render(struct strmbase_renderer *iface, IMediaSamp return hr; }
+ is_yuy2 = IsEqualGUID(&mt->subtype, &MEDIASUBTYPE_YUY2); + + if (is_yuy2) + { + size = IMediaSample_GetActualDataLength(pSample); + hr = convert_yuy2_to_rgb_and_create_bmi(filter, pbSrcStream, size, + &rgb_buffer, &rgb_bmi); + if (FAILED(hr)) + return hr; + } + dc = GetDC(filter->window.hwnd); - StretchDIBits(dc, dst.left, dst.top, dst.right - dst.left, dst.bottom - dst.top, - src.left, src.top, src.right - src.left, src.bottom - src.top, pbSrcStream, - (BITMAPINFO *)get_bitmap_header(&filter->renderer.sink.pin.mt), DIB_RGB_COLORS, SRCCOPY); - ReleaseDC(filter->window.hwnd, dc); + if (dc) + { + if (is_yuy2 && rgb_buffer && rgb_bmi) + { + StretchDIBits(dc, + dst.left, dst.top, dst.right - dst.left, dst.bottom - dst.top, + src.left, src.top, src.right - src.left, src.bottom - src.top, + rgb_buffer, + rgb_bmi, + DIB_RGB_COLORS, SRCCOPY); + } + else + { + StretchDIBits(dc, + dst.left, dst.top, dst.right - dst.left, dst.bottom - dst.top, + src.left, src.top, src.right - src.left, src.bottom - src.top, + pbSrcStream, + (BITMAPINFO *)get_bitmap_header(mt), + DIB_RGB_COLORS, SRCCOPY); + } + ReleaseDC(filter->window.hwnd, dc); + } + + if (rgb_buffer) + HeapFree(GetProcessHeap(), 0, rgb_buffer); + if (rgb_bmi) + HeapFree(GetProcessHeap(), 0, rgb_bmi);
return S_OK; } @@ -113,7 +244,8 @@ static HRESULT video_renderer_query_accept(struct strmbase_renderer *iface, cons if (!IsEqualGUID(&mt->subtype, &MEDIASUBTYPE_RGB32) && !IsEqualGUID(&mt->subtype, &MEDIASUBTYPE_RGB24) && !IsEqualGUID(&mt->subtype, &MEDIASUBTYPE_RGB565) - && !IsEqualGUID(&mt->subtype, &MEDIASUBTYPE_RGB8)) + && !IsEqualGUID(&mt->subtype, &MEDIASUBTYPE_RGB8) + && !IsEqualGUID(&mt->subtype, &MEDIASUBTYPE_YUY2)) return S_FALSE;
if (!IsEqualGUID(&mt->formattype, &FORMAT_VideoInfo)