CPU/Recompiler: Add register allocation safety check to fastmem

This commit is contained in:
Connor McLaughlin 2020-10-26 21:56:56 +10:00
parent f2e1159173
commit cbc7e3d64a
3 changed files with 15 additions and 3 deletions

View file

@ -305,7 +305,7 @@ void ExecuteRecompiler()
const u32 pc = g_state.regs.pc; const u32 pc = g_state.regs.pc;
g_state.current_instruction_pc = pc; g_state.current_instruction_pc = pc;
const u32 fast_map_index = GetFastMapIndex(pc); const u32 fast_map_index = GetFastMapIndex(pc);
s_fast_map[fast_map_index](); s_single_block_asm_dispatcher[fast_map_index]();
} }
TimingEvents::RunEvents(); TimingEvents::RunEvents();

View file

@ -1336,7 +1336,8 @@ void CodeGenerator::EmitLoadGuestMemoryFastmem(const CodeBlockInstruction& cbi,
actual_address = a64::MemOperand(GetFastmemBasePtrReg(), GetHostReg32(address)); actual_address = a64::MemOperand(GetFastmemBasePtrReg(), GetHostReg32(address));
} }
// TODO: movsx/zx inline here m_register_cache.InhibitAllocation();
switch (size) switch (size)
{ {
case RegSize_8: case RegSize_8:
@ -1370,6 +1371,7 @@ void CodeGenerator::EmitLoadGuestMemoryFastmem(const CodeBlockInstruction& cbi,
EmitBranch(GetCurrentNearCodePointer(), false); EmitBranch(GetCurrentNearCodePointer(), false);
SwitchToNearCode(); SwitchToNearCode();
m_register_cache.UnunhibitAllocation();
m_block->loadstore_backpatch_info.push_back(bpi); m_block->loadstore_backpatch_info.push_back(bpi);
} }
@ -1472,6 +1474,8 @@ void CodeGenerator::EmitStoreGuestMemoryFastmem(const CodeBlockInstruction& cbi,
actual_address = a64::MemOperand(GetFastmemBasePtrReg(), GetHostReg32(address)); actual_address = a64::MemOperand(GetFastmemBasePtrReg(), GetHostReg32(address));
} }
m_register_cache.InhibitAllocation();
switch (value.size) switch (value.size)
{ {
case RegSize_8: case RegSize_8:
@ -1504,6 +1508,7 @@ void CodeGenerator::EmitStoreGuestMemoryFastmem(const CodeBlockInstruction& cbi,
EmitBranch(GetCurrentNearCodePointer(), false); EmitBranch(GetCurrentNearCodePointer(), false);
SwitchToNearCode(); SwitchToNearCode();
m_register_cache.UnunhibitAllocation();
m_block->loadstore_backpatch_info.push_back(bpi); m_block->loadstore_backpatch_info.push_back(bpi);
} }
@ -2124,6 +2129,8 @@ CodeCache::SingleBlockDispatcherFunction CodeGenerator::CompileSingleBlockDispat
m_register_cache.ReserveCalleeSavedRegisters(); m_register_cache.ReserveCalleeSavedRegisters();
const u32 stack_adjust = PrepareStackForCall(); const u32 stack_adjust = PrepareStackForCall();
EmitLoadGlobalAddress(RCPUPTR, &g_state);
m_emit->blr(GetHostReg64(RARG1)); m_emit->blr(GetHostReg64(RARG1));
RestoreStackAfterCall(stack_adjust); RestoreStackAfterCall(stack_adjust);

View file

@ -1824,7 +1824,8 @@ void CodeGenerator::EmitLoadGuestMemoryFastmem(const CodeBlockInstruction& cbi,
bpi.host_pc = GetCurrentNearCodePointer(); bpi.host_pc = GetCurrentNearCodePointer();
} }
// TODO: movsx/zx inline here m_register_cache.InhibitAllocation();
switch (size) switch (size)
{ {
case RegSize_8: case RegSize_8:
@ -1896,6 +1897,7 @@ void CodeGenerator::EmitLoadGuestMemoryFastmem(const CodeBlockInstruction& cbi,
m_emit->jmp(GetCurrentNearCodePointer()); m_emit->jmp(GetCurrentNearCodePointer());
SwitchToNearCode(); SwitchToNearCode();
m_register_cache.UnunhibitAllocation();
m_block->loadstore_backpatch_info.push_back(bpi); m_block->loadstore_backpatch_info.push_back(bpi);
} }
@ -1993,6 +1995,8 @@ void CodeGenerator::EmitStoreGuestMemoryFastmem(const CodeBlockInstruction& cbi,
bpi.host_pc = GetCurrentNearCodePointer(); bpi.host_pc = GetCurrentNearCodePointer();
} }
m_register_cache.InhibitAllocation();
switch (value.size) switch (value.size)
{ {
case RegSize_8: case RegSize_8:
@ -2107,6 +2111,7 @@ void CodeGenerator::EmitStoreGuestMemoryFastmem(const CodeBlockInstruction& cbi,
m_emit->jmp(GetCurrentNearCodePointer()); m_emit->jmp(GetCurrentNearCodePointer());
SwitchToNearCode(); SwitchToNearCode();
m_register_cache.UnunhibitAllocation();
m_block->loadstore_backpatch_info.push_back(bpi); m_block->loadstore_backpatch_info.push_back(bpi);
} }