ShaderGen: Disable interface blocks when using AMD OpenGL

SSAA/per sample shading is broken otherwise.
This commit is contained in:
Connor McLaughlin 2021-03-14 01:09:16 +10:00
parent 75e473897e
commit 0feb817130

View file

@ -2,6 +2,7 @@
#include "common/assert.h" #include "common/assert.h"
#include "common/log.h" #include "common/log.h"
#include <cstdio> #include <cstdio>
#include <cstring>
#include <glad.h> #include <glad.h>
Log_SetChannel(ShaderGen); Log_SetChannel(ShaderGen);
@ -16,6 +17,14 @@ ShaderGen::ShaderGen(HostDisplay::RenderAPI render_api, bool supports_dual_sourc
m_use_glsl_interface_blocks = (IsVulkan() || GLAD_GL_ES_VERSION_3_2 || GLAD_GL_VERSION_3_2); m_use_glsl_interface_blocks = (IsVulkan() || GLAD_GL_ES_VERSION_3_2 || GLAD_GL_VERSION_3_2);
m_use_glsl_binding_layout = (IsVulkan() || UseGLSLBindingLayout()); m_use_glsl_binding_layout = (IsVulkan() || UseGLSLBindingLayout());
if (m_render_api == HostDisplay::RenderAPI::OpenGL)
{
// SSAA with interface blocks is broken on AMD's OpenGL driver.
const char* gl_vendor = reinterpret_cast<const char*>(glGetString(GL_VENDOR));
if (std::strcmp(gl_vendor, "ATI Technologies Inc.") == 0)
m_use_glsl_interface_blocks = false;
}
} }
} }