Akka Arrh is using that.
From: Paul Gofman pgofman@codeweavers.com
--- dlls/opengl32/tests/opengl.c | 8 +++++++- dlls/winex11.drv/opengl.c | 24 ++++++++++++++++++++++++ 2 files changed, 31 insertions(+), 1 deletion(-)
diff --git a/dlls/opengl32/tests/opengl.c b/dlls/opengl32/tests/opengl.c index ebeefffb72c..070229254a1 100644 --- a/dlls/opengl32/tests/opengl.c +++ b/dlls/opengl32/tests/opengl.c @@ -750,7 +750,8 @@ static void test_makecurrent(HDC winhdc) static void test_colorbits(HDC hdc) { const int iAttribList[] = { WGL_COLOR_BITS_ARB, WGL_RED_BITS_ARB, WGL_GREEN_BITS_ARB, - WGL_BLUE_BITS_ARB, WGL_ALPHA_BITS_ARB }; + WGL_BLUE_BITS_ARB, WGL_ALPHA_BITS_ARB, WGL_BLUE_SHIFT_ARB, WGL_GREEN_SHIFT_ARB, + WGL_RED_SHIFT_ARB, WGL_ALPHA_SHIFT_ARB, }; int iAttribRet[ARRAY_SIZE(iAttribList)]; const int iAttribs[] = { WGL_ALPHA_BITS_ARB, 1, 0 }; unsigned int nFormats; @@ -778,6 +779,11 @@ static void test_colorbits(HDC hdc) skip("wglGetPixelFormatAttribivARB failed\n"); return; } + ok(!iAttribRet[5], "got %d.\n", iAttribRet[5]); + ok(iAttribRet[6] == iAttribRet[3], "got %d.\n", iAttribRet[6]); + ok(iAttribRet[7] == iAttribRet[6] + iAttribRet[2], "got %d.\n", iAttribRet[7]); + ok(iAttribRet[8] == iAttribRet[7] + iAttribRet[1], "got %d.\n", iAttribRet[8]); + iAttribRet[1] += iAttribRet[2]+iAttribRet[3]+iAttribRet[4]; ok(iAttribRet[0] == iAttribRet[1], "WGL_COLOR_BITS_ARB (%d) does not equal R+G+B+A (%d)!\n", iAttribRet[0], iAttribRet[1]); diff --git a/dlls/winex11.drv/opengl.c b/dlls/winex11.drv/opengl.c index cd132f059df..02a7980d911 100644 --- a/dlls/winex11.drv/opengl.c +++ b/dlls/winex11.drv/opengl.c @@ -2691,6 +2691,7 @@ static BOOL X11DRV_wglGetPixelFormatAttribivARB( HDC hdc, int iPixelFormat, int int hTest; int tmp; int curGLXAttr = 0; + PIXELFORMATDESCRIPTOR pfd;
TRACE("(%p, %d, %d, %d, %p, %p)\n", hdc, iPixelFormat, iLayerPlane, nAttributes, piAttributes, piValues);
@@ -2707,6 +2708,12 @@ static BOOL X11DRV_wglGetPixelFormatAttribivARB( HDC hdc, int iPixelFormat, int WARN("Unable to convert iPixelFormat %d to a GLX one!\n", iPixelFormat); }
+ if (!describe_pixel_format(iPixelFormat, &pfd)) + { + WARN("describe_pixel_format failed.\n"); + memset(&pfd, 0, sizeof(pfd)); + } + for (i = 0; i < nAttributes; ++i) { const int curWGLAttr = piAttributes[i]; TRACE("pAttr[%d] = %x\n", i, curWGLAttr); @@ -2808,6 +2815,23 @@ static BOOL X11DRV_wglGetPixelFormatAttribivARB( HDC hdc, int iPixelFormat, int curGLXAttr = GLX_AUX_BUFFERS; break;
+ case WGL_RED_SHIFT_ARB: + if (!pfd.nSize) goto pix_error; + piValues[i] = pfd.cRedShift; + continue; + case WGL_GREEN_SHIFT_ARB: + if (!pfd.nSize) goto pix_error; + piValues[i] = pfd.cGreenShift; + continue; + case WGL_BLUE_SHIFT_ARB: + if (!pfd.nSize) goto pix_error; + piValues[i] = pfd.cBlueShift; + continue; + case WGL_ALPHA_SHIFT_ARB: + if (!pfd.nSize) goto pix_error; + piValues[i] = pfd.cAlphaShift; + continue; + case WGL_SUPPORT_GDI_ARB: if (!fmt) goto pix_error; piValues[i] = (fmt->dwFlags & PFD_SUPPORT_GDI) != 0;
This merge request was approved by Matteo Bruni.
I did some more extensive tests and it looks like this shift business is quite a mess[*]
But this MR only exposes existent info to the application, so it seems fine to me. It did rebase cleanly on top of current master for me.
[*]: Both Nvidia and AMD Windows return questionable shifts for a few particular pixel formats. AMD also reports 128-bit float formats with RGBA ordering, which would break the test, but we don't support those so we don't care.
Attaching the modified test that I used locally [0001-opengl32-tests-Hack-tests-for-_SHIFT_ARB.txt](/uploads/924ece652979cbe7e3bd57b59e9768c7/0001-opengl32-tests-Hack-tests-for-_SHIFT_ARB.txt)
I guess in any case it only returns the same as DescribePixelFormat which apps use anyway?