GPU/ShaderGen: Remove texcoord offset

Doesn't seem to be needed anymore, and causes issues in some games (e.g.
flickering polygons in Evil Dead)
This commit is contained in:
Connor McLaughlin 2021-01-11 14:07:45 +10:00
parent ba2710fb4c
commit 97971464d1

View file

@ -97,8 +97,6 @@ std::string GPU_HW_ShaderGen::GenerateBatchVertexShader(bool textured)
CONSTANT float POS_EPSILON = 0.00001; CONSTANT float POS_EPSILON = 0.00001;
#endif #endif
#endif #endif
CONSTANT float TEX_EPSILON = 0.00001;
)"; )";
if (textured) if (textured)
@ -159,10 +157,8 @@ CONSTANT float TEX_EPSILON = 0.00001;
v_col0 = a_col0; v_col0 = a_col0;
#if TEXTURED #if TEXTURED
// Fudge the texture coordinates by half a pixel in screen-space. v_tex0 = float2(float((a_texcoord & 0xFFFFu) * RESOLUTION_SCALE),
// This fixes the rounding/interpolation error on NVIDIA GPUs with shared edges between triangles. float((a_texcoord >> 16) * RESOLUTION_SCALE));
v_tex0 = float2(float((a_texcoord & 0xFFFFu) * RESOLUTION_SCALE) + TEX_EPSILON,
float((a_texcoord >> 16) * RESOLUTION_SCALE) + TEX_EPSILON);
// base_x,base_y,palette_x,palette_y // base_x,base_y,palette_x,palette_y
v_texpage.x = (a_texpage & 15u) * 64u * RESOLUTION_SCALE; v_texpage.x = (a_texpage & 15u) * 64u * RESOLUTION_SCALE;