diff --git a/include/vkd3d_utils.h b/include/vkd3d_utils.h index 686ddf386..845894c22 100644 --- a/include/vkd3d_utils.h +++ b/include/vkd3d_utils.h @@ -86,6 +86,7 @@ VKD3D_UTILS_API HRESULT WINAPI D3DCreateBlob(SIZE_T data_size, ID3DBlob **blob); VKD3D_UTILS_API HRESULT WINAPI D3DPreprocess(const void *data, SIZE_T size, const char *filename, const D3D_SHADER_MACRO *defines, ID3DInclude *include, ID3DBlob **shader, ID3DBlob **error_messages); +VKD3D_UTILS_API HRESULT WINAPI D3DReflect(const void *data, SIZE_T data_size, REFIID iid, void **reflection); /** * Set a callback to be called when vkd3d-utils outputs debug logging.
We'd want a \since 1.11 on that, right? (And move it after D3DStripShader(), if we're going to stick with ordering things by the version they were introduced.)
Fixed in v6.
diff --git a/libs/vkd3d-common/blob.c b/libs/vkd3d-common/blob.c index 0f6d5a5ee..59e9834d4 100644 --- a/libs/vkd3d-common/blob.c +++ b/libs/vkd3d-common/blob.c @@ -23,6 +23,7 @@ #include "vkd3d_blob.h" #include "vkd3d_debug.h" #include "vkd3d_memory.h" +#include "vkd3d_d3d12shader.h" struct vkd3d_blob {
Why do we need that?
Because of GUID definition :-/
+static ULONG STDMETHODCALLTYPE d3d12_reflection_AddRef(ID3D12ShaderReflection *iface) +{ + struct d3d12_reflection *reflection = impl_from_ID3D12ShaderReflection(iface); + ULONG refcount = InterlockedIncrement(&reflection->refcount); + + TRACE("%p increasing refcount to %u.\n", reflection, refcount); + + return refcount; +} + +static ULONG STDMETHODCALLTYPE d3d12_reflection_Release(ID3D12ShaderReflection *iface) +{ + struct d3d12_reflection *reflection = impl_from_ID3D12ShaderReflection(iface); + ULONG refcount = InterlockedDecrement(&reflection->refcount); + + TRACE("%p decreasing refcount to %u.\n", reflection, refcount); + + if (!refcount) + { + free(reflection); + } + + return refcount; +}
We have vkd3d_atomic_increment_u32()/vkd3d_atomic_decrement_u32() now, see e.g. vkd3d_blob_AddRef()/vkd3d_blob_Release().
Fixed in v6.
+static void check_signature_element(const D3D12_SIGNATURE_PARAMETER_DESC *desc, + const D3D12_SIGNATURE_PARAMETER_DESC *expect) +{ + ok(!strcmp(desc->SemanticName, expect->SemanticName), "Got name \"%s\".\n", desc->SemanticName); + ok(desc->SemanticIndex == expect->SemanticIndex, "Got index %u.\n", desc->SemanticIndex); + ok(desc->Register == expect->Register, "Got register %u.\n", desc->Register); + ok(desc->SystemValueType == expect->SystemValueType, "Got sysval %u.\n", desc->SystemValueType); + ok(desc->ComponentType == expect->ComponentType, "Got data type %u.\n", desc->ComponentType); + ok(desc->Mask == expect->Mask, "Got mask %#x.\n", desc->Mask); + todo_if(desc->ReadWriteMask != expect->ReadWriteMask) + ok(desc->ReadWriteMask == expect->ReadWriteMask, "Got used mask %#x.\n", desc->ReadWriteMask); + ok(desc->Stream == expect->Stream, "Got stream %u.\n", desc->Stream); +}
I imagine this is borrowed from something like d3dcompiler's check_parameter_desc() in Wine, but it's unfortunate to lose the original line numbers for ok(). See e.g. check_heap_desc() for how we usually implement that kind of helper in vkd3d.
I omitted them because the test contexts provide all the necessary information. I can restore them if preferred though.