I am trying to use the GPU to do some math on 2D arrays, which I supply to the compute shader as simple 2d textures (in R32_FLOAT) without mipmaps, tied to Shader Resource Views. They work, they are passed to the shader, and are addressible.
But I got stuck trying to create an Unordered Access View on a similar 2d texture. The CreateUnorderedAccessView(...) returns E_INVALIDARG, but I don't know how to expand that into more detail. Same problem happens with CreateTexture2D(...) whenever I use D3D_BIND_UNORDERED_ACCESS flag.
My computer supports D3D11_0 and cs_5_0. Here's my code:
HRESULT Create_Simple_Texture_2D(ID3D11Device* device, ID3D11Texture2D** texture, UINT width, UINT height, void* initdata, UINT initsize)
{
*texture = nullptr;
UINT elementsize = sizeof(float);
DXGI_FORMAT format = DXGI_FORMAT_R32_FLOAT;
D3D11_TEXTURE2D_DESC desc = {};
{
// this works
// desc.BindFlags = D3D11_BIND_SHADER_RESOURCE; // input to GPU
// desc.Usage = D3D11_USAGE_DEFAULT; // GPU read&write
// desc.CPUAccessFlags = 0; // no CPU access
// this works too
// desc.BindFlags = D3D11_BIND_SHADER_RESOURCE; // input to GPU
// desc.Usage = D3D11_USAGE_DYNAMIC; // CPU write -> GPU read
// desc.CPUAccessFlags = D3D11_CPU_ACCESS_WRITE; // CPU write
// and this
// desc.BindFlags = 0; //
// desc.Usage = D3D11_USAGE_STAGING; //
// desc.CPUAccessFlags = D3D11_CPU_ACCESS_READ; // CPU read
// BUT NOT THIS:
desc.BindFlags = D3D11_BIND_SHADER_RESOURCE | D3D11_BIND_UNORDERED_ACCESS;
desc.Usage = D3D11_USAGE_DEFAULT; // GPU read&write
desc.CPUAccessFlags = 0; // no CPU access, except at creation
desc.Width = width;
desc.Height = height;
desc.Format = format; // see above
desc.ArraySize = 1; // single texture
desc.MipLevels = 1; // no other mipmaps
desc.MiscFlags = 0;
{
desc.SampleDesc.Count = 1;
desc.SampleDesc.Quality = D3D11_STANDARD_MULTISAMPLE_PATTERN;
}
}
const UINT expected = desc.Width * desc.Height * desc.ArraySize * elementsize;
if (initdata && (initsize != expected)) wcprintf("\n\tWARNING: declared array size mismatch in Create_Simple_Texture_2D(...)\n\n");
D3D11_SUBRESOURCE_DATA sub = {};
{
sub.pSysMem = (void*)(new char[expected]);
sub.SysMemPitch = desc.Width * elementsize;
sub.SysMemSlicePitch = 0;
const UINT copysize = min(expected, initsize);
if (sub.pSysMem && initdata && copysize) memcpy((void*)sub.pSysMem, initdata, copysize);
}
HRESULT result = device->CreateTexture2D(&desc, &sub, texture);
if (sub.pSysMem) delete[] sub.pSysMem;
return result;
}
TL;DR: Enable the Direct3D debug device. See also this blog post.
The problem here is your use of
desc.SampleDesc.Quality = D3D11_STANDARD_MULTISAMPLE_PATTERN;.The Direct3D Debug Layer when enabled tells you this:
You normally use this to mean "no MSAA"
Since you already started with a zero-filled struct with
D3D11_TEXTURE2D_DESC desc = {};, just remove your set ofdesc.SampleDesc.Qualityentirely and you'll be good to go.