mirror of
https://github.com/RetroDECK/Duckstation.git
synced 2024-11-29 09:05:41 +00:00
GPU: Set line width at higher internal resolutions
This commit is contained in:
parent
d3ecad391d
commit
8987fa93c2
|
@ -19,6 +19,7 @@ bool GPU_HW_OpenGL::Initialize(System* system, DMA* dma, InterruptController* in
|
|||
if (!GPU_HW::Initialize(system, dma, interrupt_controller, timers))
|
||||
return false;
|
||||
|
||||
SetMaxResolutionScale();
|
||||
CreateFramebuffer();
|
||||
CreateVertexBuffer();
|
||||
if (!CompilePrograms())
|
||||
|
@ -103,6 +104,24 @@ std::tuple<s32, s32> GPU_HW_OpenGL::ConvertToFramebufferCoordinates(s32 x, s32 y
|
|||
return std::make_tuple(x, static_cast<s32>(static_cast<s32>(VRAM_HEIGHT) - y));
|
||||
}
|
||||
|
||||
void GPU_HW_OpenGL::SetMaxResolutionScale()
|
||||
{
|
||||
GLint max_texture_size = VRAM_WIDTH;
|
||||
glGetIntegerv(GL_MAX_TEXTURE_SIZE, &max_texture_size);
|
||||
Log_InfoPrintf("Max texture size: %dx%d", max_texture_size, max_texture_size);
|
||||
const int max_texture_scale = max_texture_size / VRAM_WIDTH;
|
||||
|
||||
std::array<int, 2> line_width_range = {{1, 1}};
|
||||
glGetIntegerv(GL_ALIASED_LINE_WIDTH_RANGE, line_width_range.data());
|
||||
Log_InfoPrintf("Max line width: %d", line_width_range[1]);
|
||||
|
||||
const u32 max_resolution_scale = std::min(max_texture_scale, line_width_range[1]);
|
||||
Log_InfoPrintf("Maximum resolution scale is %u", max_resolution_scale);
|
||||
m_system->GetSettings().max_gpu_resolution_scale = max_resolution_scale;
|
||||
m_system->GetSettings().gpu_resolution_scale =
|
||||
std::min(m_system->GetSettings().gpu_resolution_scale, max_resolution_scale);
|
||||
}
|
||||
|
||||
void GPU_HW_OpenGL::CreateFramebuffer()
|
||||
{
|
||||
// save old vram texture/fbo, in case we're changing scale
|
||||
|
@ -550,6 +569,7 @@ void GPU_HW_OpenGL::FlushRender()
|
|||
glDisable(GL_DEPTH_TEST);
|
||||
glEnable(GL_SCISSOR_TEST);
|
||||
glDepthMask(GL_FALSE);
|
||||
glLineWidth(static_cast<float>(m_resolution_scale));
|
||||
SetProgram();
|
||||
SetViewport();
|
||||
SetScissor();
|
||||
|
|
|
@ -37,6 +37,7 @@ private:
|
|||
|
||||
std::tuple<s32, s32> ConvertToFramebufferCoordinates(s32 x, s32 y);
|
||||
|
||||
void SetMaxResolutionScale();
|
||||
void CreateFramebuffer();
|
||||
void ClearFramebuffer();
|
||||
void DestroyFramebuffer();
|
||||
|
|
|
@ -12,6 +12,7 @@ struct Settings
|
|||
|
||||
GPUBackend gpu_backend = GPUBackend::OpenGL;
|
||||
u32 gpu_resolution_scale = 1;
|
||||
u32 max_gpu_resolution_scale = 1;
|
||||
|
||||
// TODO: Controllers, memory cards, etc.
|
||||
};
|
||||
|
|
|
@ -524,7 +524,7 @@ void SDLInterface::RenderMainMenuBar()
|
|||
if (ImGui::BeginMenu("Internal Resolution"))
|
||||
{
|
||||
const u32 current_internal_resolution = m_system->GetSettings().gpu_resolution_scale;
|
||||
for (u32 scale = 1; scale <= 16; scale++)
|
||||
for (u32 scale = 1; scale <= m_system->GetSettings().max_gpu_resolution_scale; scale++)
|
||||
{
|
||||
if (ImGui::MenuItem(
|
||||
TinyString::FromFormat("%ux (%ux%u)", scale, scale * GPU::VRAM_WIDTH, scale * GPU::VRAM_HEIGHT),
|
||||
|
|
Loading…
Reference in a new issue