GPU/D3D11: Don't add UV limits to input layout when unused

This commit is contained in:
Connor McLaughlin 2020-08-11 03:52:55 +10:00
parent 367b5ad5ff
commit fba2488bc5

View file

@ -362,9 +362,10 @@ bool GPU_HW_D3D11::CompileShaders()
if (!vs_bytecode) if (!vs_bytecode)
return false; return false;
const HRESULT hr = m_device->CreateInputLayout(attributes.data(), static_cast<UINT>(attributes.size()), const UINT num_attributes = static_cast<UINT>(attributes.size()) - (m_using_uv_limits ? 0 : 1);
vs_bytecode->GetBufferPointer(), vs_bytecode->GetBufferSize(), const HRESULT hr =
m_batch_input_layout.ReleaseAndGetAddressOf()); m_device->CreateInputLayout(attributes.data(), num_attributes, vs_bytecode->GetBufferPointer(),
vs_bytecode->GetBufferSize(), m_batch_input_layout.ReleaseAndGetAddressOf());
if (FAILED(hr)) if (FAILED(hr))
{ {
Log_ErrorPrintf("CreateInputLayout failed: 0x%08X", hr); Log_ErrorPrintf("CreateInputLayout failed: 0x%08X", hr);