mirror of
https://github.com/RetroDECK/Duckstation.git
synced 2025-01-17 22:25:37 +00:00
GPU: Move resolution scale to hardware backend only
This commit is contained in:
parent
c9feb7ea07
commit
c52c0608ae
|
@ -28,7 +28,7 @@ bool GPU::Initialize(System* system, DMA* dma, InterruptController* interrupt_co
|
||||||
|
|
||||||
void GPU::UpdateSettings()
|
void GPU::UpdateSettings()
|
||||||
{
|
{
|
||||||
m_resolution_scale = std::clamp<u32>(m_system->GetSettings().gpu_resolution_scale, 1, m_max_resolution_scale);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void GPU::Reset()
|
void GPU::Reset()
|
||||||
|
|
|
@ -52,9 +52,7 @@ public:
|
||||||
void DMARead(u32* words, u32 word_count);
|
void DMARead(u32* words, u32 word_count);
|
||||||
void DMAWrite(const u32* words, u32 word_count);
|
void DMAWrite(const u32* words, u32 word_count);
|
||||||
|
|
||||||
// Resolution scaling.
|
// Recompile shaders/recreate framebuffers when needed.
|
||||||
u32 GetResolutionScale() const { return m_resolution_scale; }
|
|
||||||
u32 GetMaxResolutionScale() const { return m_max_resolution_scale; }
|
|
||||||
virtual void UpdateSettings();
|
virtual void UpdateSettings();
|
||||||
|
|
||||||
// Ticks for hblank/vblank.
|
// Ticks for hblank/vblank.
|
||||||
|
@ -299,10 +297,6 @@ protected:
|
||||||
InterruptController* m_interrupt_controller = nullptr;
|
InterruptController* m_interrupt_controller = nullptr;
|
||||||
Timers* m_timers = nullptr;
|
Timers* m_timers = nullptr;
|
||||||
|
|
||||||
// Resolution scale.
|
|
||||||
u32 m_resolution_scale = 1;
|
|
||||||
u32 m_max_resolution_scale = 1;
|
|
||||||
|
|
||||||
union GPUSTAT
|
union GPUSTAT
|
||||||
{
|
{
|
||||||
u32 bits;
|
u32 bits;
|
||||||
|
|
|
@ -17,10 +17,24 @@ void GPU_HW::Reset()
|
||||||
m_batch = {};
|
m_batch = {};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
bool GPU_HW::Initialize(System* system, DMA* dma, InterruptController* interrupt_controller, Timers* timers)
|
||||||
|
{
|
||||||
|
if (!GPU::Initialize(system, dma, interrupt_controller, timers))
|
||||||
|
return false;
|
||||||
|
|
||||||
|
m_resolution_scale = std::clamp<u32>(m_system->GetSettings().gpu_resolution_scale, 1, m_max_resolution_scale);
|
||||||
|
m_system->GetSettings().gpu_resolution_scale = m_resolution_scale;
|
||||||
|
m_system->GetSettings().max_gpu_resolution_scale = m_max_resolution_scale;
|
||||||
|
m_true_color = m_system->GetSettings().gpu_true_color;
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
void GPU_HW::UpdateSettings()
|
void GPU_HW::UpdateSettings()
|
||||||
{
|
{
|
||||||
GPU::UpdateSettings();
|
GPU::UpdateSettings();
|
||||||
|
|
||||||
|
m_resolution_scale = std::clamp<u32>(m_system->GetSettings().gpu_resolution_scale, 1, m_max_resolution_scale);
|
||||||
|
m_system->GetSettings().gpu_resolution_scale = m_resolution_scale;
|
||||||
m_true_color = m_system->GetSettings().gpu_true_color;
|
m_true_color = m_system->GetSettings().gpu_true_color;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -11,6 +11,7 @@ public:
|
||||||
GPU_HW();
|
GPU_HW();
|
||||||
virtual ~GPU_HW();
|
virtual ~GPU_HW();
|
||||||
|
|
||||||
|
virtual bool Initialize(System* system, DMA* dma, InterruptController* interrupt_controller, Timers* timers) override;
|
||||||
virtual void Reset() override;
|
virtual void Reset() override;
|
||||||
virtual void UpdateSettings() override;
|
virtual void UpdateSettings() override;
|
||||||
|
|
||||||
|
@ -108,6 +109,8 @@ protected:
|
||||||
std::string GenerateDisplayFragmentShader(bool depth_24bit, bool interlaced);
|
std::string GenerateDisplayFragmentShader(bool depth_24bit, bool interlaced);
|
||||||
|
|
||||||
HWRenderBatch m_batch = {};
|
HWRenderBatch m_batch = {};
|
||||||
|
u32 m_resolution_scale = 1;
|
||||||
|
u32 m_max_resolution_scale = 1;
|
||||||
bool m_true_color = false;
|
bool m_true_color = false;
|
||||||
|
|
||||||
private:
|
private:
|
||||||
|
|
|
@ -16,10 +16,11 @@ GPU_HW_OpenGL::~GPU_HW_OpenGL()
|
||||||
|
|
||||||
bool GPU_HW_OpenGL::Initialize(System* system, DMA* dma, InterruptController* interrupt_controller, Timers* timers)
|
bool GPU_HW_OpenGL::Initialize(System* system, DMA* dma, InterruptController* interrupt_controller, Timers* timers)
|
||||||
{
|
{
|
||||||
|
SetMaxResolutionScale();
|
||||||
|
|
||||||
if (!GPU_HW::Initialize(system, dma, interrupt_controller, timers))
|
if (!GPU_HW::Initialize(system, dma, interrupt_controller, timers))
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
SetMaxResolutionScale();
|
|
||||||
CreateFramebuffer();
|
CreateFramebuffer();
|
||||||
CreateVertexBuffer();
|
CreateVertexBuffer();
|
||||||
if (!CompilePrograms())
|
if (!CompilePrograms())
|
||||||
|
@ -149,8 +150,6 @@ void GPU_HW_OpenGL::SetMaxResolutionScale()
|
||||||
|
|
||||||
m_max_resolution_scale = std::min(max_texture_scale, line_width_range[1]);
|
m_max_resolution_scale = std::min(max_texture_scale, line_width_range[1]);
|
||||||
Log_InfoPrintf("Maximum resolution scale is %u", m_max_resolution_scale);
|
Log_InfoPrintf("Maximum resolution scale is %u", m_max_resolution_scale);
|
||||||
|
|
||||||
m_resolution_scale = std::min(m_resolution_scale, m_max_resolution_scale);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void GPU_HW_OpenGL::CreateFramebuffer()
|
void GPU_HW_OpenGL::CreateFramebuffer()
|
||||||
|
|
|
@ -130,10 +130,6 @@ bool System::CreateGPU()
|
||||||
|
|
||||||
m_bus->SetGPU(m_gpu.get());
|
m_bus->SetGPU(m_gpu.get());
|
||||||
m_dma->SetGPU(m_gpu.get());
|
m_dma->SetGPU(m_gpu.get());
|
||||||
|
|
||||||
// the new GPU could have a lower maximum resolution
|
|
||||||
m_settings.gpu_resolution_scale = m_gpu->GetResolutionScale();
|
|
||||||
m_settings.max_gpu_resolution_scale = m_gpu->GetMaxResolutionScale();
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue