2022-12-04 11:03:45 +00:00
|
|
|
// SPDX-FileCopyrightText: 2019-2022 Connor McLaughlin <stenzek@gmail.com>
|
|
|
|
// SPDX-License-Identifier: (GPL-3.0 OR CC-BY-NC-ND-4.0)
|
|
|
|
|
2019-11-19 10:30:04 +00:00
|
|
|
#include "cpu_code_cache.h"
|
2020-07-31 07:09:18 +00:00
|
|
|
#include "bus.h"
|
2020-07-31 17:53:53 +00:00
|
|
|
#include "common/assert.h"
|
2020-01-10 03:31:12 +00:00
|
|
|
#include "common/log.h"
|
2019-11-19 10:30:04 +00:00
|
|
|
#include "cpu_core.h"
|
2020-08-08 06:44:12 +00:00
|
|
|
#include "cpu_core_private.h"
|
2019-11-19 10:30:04 +00:00
|
|
|
#include "cpu_disasm.h"
|
2023-08-15 13:12:21 +00:00
|
|
|
#include "cpu_recompiler_types.h"
|
2020-10-18 04:43:55 +00:00
|
|
|
#include "settings.h"
|
2019-11-19 14:15:14 +00:00
|
|
|
#include "system.h"
|
2020-07-31 07:09:18 +00:00
|
|
|
#include "timing_event.h"
|
2019-11-19 10:30:04 +00:00
|
|
|
Log_SetChannel(CPU::CodeCache);
|
|
|
|
|
2023-09-17 02:28:11 +00:00
|
|
|
#ifdef ENABLE_RECOMPILER
|
2019-11-27 15:55:33 +00:00
|
|
|
#include "cpu_recompiler_code_generator.h"
|
|
|
|
#endif
|
|
|
|
|
2023-08-15 13:12:21 +00:00
|
|
|
#include <zlib.h>
|
|
|
|
|
2020-07-31 07:09:18 +00:00
|
|
|
namespace CPU::CodeCache {
|
2019-11-19 10:30:04 +00:00
|
|
|
|
2021-05-25 16:48:46 +00:00
|
|
|
static constexpr bool USE_BLOCK_LINKING = true;
|
|
|
|
|
|
|
|
// Fall blocks back to interpreter if we recompile more than 20 times within 100 frames.
|
|
|
|
static constexpr u32 RECOMPILE_FRAMES_TO_FALL_BACK_TO_INTERPRETER = 100;
|
|
|
|
static constexpr u32 RECOMPILE_COUNT_TO_FALL_BACK_TO_INTERPRETER = 20;
|
2021-07-27 03:11:46 +00:00
|
|
|
static constexpr u32 INVALIDATE_THRESHOLD_TO_DISABLE_LINKING = 10;
|
2019-11-22 07:57:02 +00:00
|
|
|
|
2023-09-17 02:28:11 +00:00
|
|
|
#ifdef ENABLE_RECOMPILER
|
2020-08-22 03:24:57 +00:00
|
|
|
|
|
|
|
// Currently remapping the code buffer doesn't work in macOS or Haiku.
|
2022-08-22 09:55:38 +00:00
|
|
|
#if !defined(__HAIKU__) && !defined(__APPLE__)
|
2020-08-22 03:24:57 +00:00
|
|
|
#define USE_STATIC_CODE_BUFFER 1
|
|
|
|
#endif
|
|
|
|
|
2023-10-01 03:57:25 +00:00
|
|
|
#if defined(CPU_ARCH_ARM32)
|
2020-11-22 03:59:26 +00:00
|
|
|
// Use a smaller code buffer size on AArch32 to have a better chance of being in range.
|
|
|
|
static constexpr u32 RECOMPILER_CODE_CACHE_SIZE = 16 * 1024 * 1024;
|
|
|
|
static constexpr u32 RECOMPILER_FAR_CODE_CACHE_SIZE = 8 * 1024 * 1024;
|
|
|
|
#else
|
2019-12-03 10:45:14 +00:00
|
|
|
static constexpr u32 RECOMPILER_CODE_CACHE_SIZE = 32 * 1024 * 1024;
|
2020-11-22 03:59:26 +00:00
|
|
|
static constexpr u32 RECOMPILER_FAR_CODE_CACHE_SIZE = 16 * 1024 * 1024;
|
|
|
|
#endif
|
2020-11-07 08:49:59 +00:00
|
|
|
static constexpr u32 CODE_WRITE_FAULT_THRESHOLD_FOR_SLOWMEM = 10;
|
2020-08-22 03:24:57 +00:00
|
|
|
|
|
|
|
#ifdef USE_STATIC_CODE_BUFFER
|
2020-07-31 07:09:18 +00:00
|
|
|
static constexpr u32 RECOMPILER_GUARD_SIZE = 4096;
|
|
|
|
alignas(Recompiler::CODE_STORAGE_ALIGNMENT) static u8
|
|
|
|
s_code_storage[RECOMPILER_CODE_CACHE_SIZE + RECOMPILER_FAR_CODE_CACHE_SIZE];
|
2020-08-22 03:20:37 +00:00
|
|
|
#endif
|
2020-08-22 03:24:57 +00:00
|
|
|
|
2020-07-31 07:09:18 +00:00
|
|
|
static JitCodeBuffer s_code_buffer;
|
2023-08-15 13:12:21 +00:00
|
|
|
|
|
|
|
#endif
|
|
|
|
|
2023-09-17 02:28:11 +00:00
|
|
|
#ifdef ENABLE_RECOMPILER
|
2021-07-20 02:33:37 +00:00
|
|
|
static FastMapTable s_fast_map[FAST_MAP_TABLE_COUNT];
|
|
|
|
static std::unique_ptr<CodeBlock::HostCodePointer[]> s_fast_map_pointers;
|
2020-08-08 05:42:11 +00:00
|
|
|
|
2020-10-18 04:43:09 +00:00
|
|
|
DispatcherFunction s_asm_dispatcher;
|
|
|
|
SingleBlockDispatcherFunction s_single_block_asm_dispatcher;
|
2020-08-08 05:42:11 +00:00
|
|
|
|
2021-07-20 02:33:37 +00:00
|
|
|
static FastMapTable DecodeFastMapPointer(u32 slot, FastMapTable ptr)
|
2020-08-08 05:42:11 +00:00
|
|
|
{
|
2021-07-20 02:33:37 +00:00
|
|
|
if constexpr (sizeof(void*) == 8)
|
|
|
|
return reinterpret_cast<FastMapTable>(reinterpret_cast<u8*>(ptr) + (static_cast<u64>(slot) << 17));
|
|
|
|
else
|
|
|
|
return reinterpret_cast<FastMapTable>(reinterpret_cast<u8*>(ptr) + (slot << 16));
|
|
|
|
}
|
|
|
|
|
|
|
|
static FastMapTable EncodeFastMapPointer(u32 slot, FastMapTable ptr)
|
|
|
|
{
|
|
|
|
if constexpr (sizeof(void*) == 8)
|
|
|
|
return reinterpret_cast<FastMapTable>(reinterpret_cast<u8*>(ptr) - (static_cast<u64>(slot) << 17));
|
|
|
|
else
|
|
|
|
return reinterpret_cast<FastMapTable>(reinterpret_cast<u8*>(ptr) - (slot << 16));
|
|
|
|
}
|
|
|
|
|
|
|
|
static CodeBlock::HostCodePointer* OffsetFastMapPointer(FastMapTable fake_ptr, u32 pc)
|
|
|
|
{
|
|
|
|
u8* fake_byte_ptr = reinterpret_cast<u8*>(fake_ptr);
|
|
|
|
if constexpr (sizeof(void*) == 8)
|
|
|
|
return reinterpret_cast<CodeBlock::HostCodePointer*>(fake_byte_ptr + (static_cast<u64>(pc) << 1));
|
|
|
|
else
|
|
|
|
return reinterpret_cast<CodeBlock::HostCodePointer*>(fake_byte_ptr + pc);
|
2020-08-08 05:42:11 +00:00
|
|
|
}
|
|
|
|
|
2020-10-18 04:43:09 +00:00
|
|
|
static void CompileDispatcher();
|
2020-08-08 05:42:11 +00:00
|
|
|
static void FastCompileBlockFunction();
|
2021-07-20 02:33:37 +00:00
|
|
|
static void InvalidCodeFunction();
|
|
|
|
|
|
|
|
static constexpr u32 GetTableCount(u32 start, u32 end)
|
|
|
|
{
|
|
|
|
return ((end >> FAST_MAP_TABLE_SHIFT) - (start >> FAST_MAP_TABLE_SHIFT)) + 1;
|
|
|
|
}
|
|
|
|
|
|
|
|
static void AllocateFastMapTables(u32 start, u32 end, FastMapTable& table_ptr)
|
|
|
|
{
|
|
|
|
const u32 start_slot = start >> FAST_MAP_TABLE_SHIFT;
|
|
|
|
const u32 count = GetTableCount(start, end);
|
|
|
|
for (u32 i = 0; i < count; i++)
|
|
|
|
{
|
|
|
|
const u32 slot = start_slot + i;
|
|
|
|
|
|
|
|
s_fast_map[slot] = EncodeFastMapPointer(slot, table_ptr);
|
|
|
|
table_ptr += FAST_MAP_TABLE_SIZE;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
static void AllocateFastMap()
|
|
|
|
{
|
|
|
|
static constexpr VirtualMemoryAddress ranges[][2] = {
|
|
|
|
{0x00000000, 0x00800000}, // RAM
|
|
|
|
{0x1F000000, 0x1F800000}, // EXP1
|
|
|
|
{0x1FC00000, 0x1FC80000}, // BIOS
|
|
|
|
|
|
|
|
{0x80000000, 0x80800000}, // RAM
|
|
|
|
{0x9F000000, 0x9F800000}, // EXP1
|
|
|
|
{0x9FC00000, 0x9FC80000}, // BIOS
|
|
|
|
|
|
|
|
{0xA0000000, 0xA0800000}, // RAM
|
|
|
|
{0xBF000000, 0xBF800000}, // EXP1
|
|
|
|
{0xBFC00000, 0xBFC80000} // BIOS
|
|
|
|
};
|
|
|
|
|
|
|
|
u32 num_tables = 1; // unreachable table
|
|
|
|
for (u32 i = 0; i < countof(ranges); i++)
|
|
|
|
num_tables += GetTableCount(ranges[i][0], ranges[i][1]);
|
|
|
|
|
|
|
|
const u32 num_slots = FAST_MAP_TABLE_SIZE * num_tables;
|
|
|
|
if (!s_fast_map_pointers)
|
|
|
|
s_fast_map_pointers = std::make_unique<CodeBlock::HostCodePointer[]>(num_slots);
|
|
|
|
|
|
|
|
FastMapTable table_ptr = s_fast_map_pointers.get();
|
|
|
|
FastMapTable table_ptr_end = table_ptr + num_slots;
|
|
|
|
|
|
|
|
// Fill the first table with invalid/unreachable.
|
|
|
|
for (u32 i = 0; i < FAST_MAP_TABLE_SIZE; i++)
|
|
|
|
table_ptr[i] = InvalidCodeFunction;
|
|
|
|
|
|
|
|
// And the remaining with block compile pointers.
|
|
|
|
for (u32 i = FAST_MAP_TABLE_SIZE; i < num_slots; i++)
|
|
|
|
table_ptr[i] = FastCompileBlockFunction;
|
|
|
|
|
|
|
|
// Mark everything as unreachable to begin with.
|
|
|
|
for (u32 i = 0; i < FAST_MAP_TABLE_COUNT; i++)
|
|
|
|
s_fast_map[i] = EncodeFastMapPointer(i, table_ptr);
|
|
|
|
table_ptr += FAST_MAP_TABLE_SIZE;
|
|
|
|
|
|
|
|
// Allocate ranges.
|
|
|
|
for (u32 i = 0; i < countof(ranges); i++)
|
|
|
|
AllocateFastMapTables(ranges[i][0], ranges[i][1], table_ptr);
|
|
|
|
|
|
|
|
Assert(table_ptr == table_ptr_end);
|
|
|
|
}
|
2020-08-08 05:42:11 +00:00
|
|
|
|
|
|
|
static void ResetFastMap()
|
|
|
|
{
|
2021-07-20 02:33:37 +00:00
|
|
|
if (!s_fast_map_pointers)
|
|
|
|
return;
|
|
|
|
|
|
|
|
for (u32 i = 0; i < FAST_MAP_TABLE_COUNT; i++)
|
|
|
|
{
|
|
|
|
FastMapTable ptr = DecodeFastMapPointer(i, s_fast_map[i]);
|
|
|
|
if (ptr == s_fast_map_pointers.get())
|
|
|
|
continue;
|
|
|
|
|
|
|
|
for (u32 j = 0; j < FAST_MAP_TABLE_SIZE; j++)
|
|
|
|
ptr[j] = FastCompileBlockFunction;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
static void FreeFastMap()
|
|
|
|
{
|
|
|
|
std::memset(s_fast_map, 0, sizeof(s_fast_map));
|
|
|
|
s_fast_map_pointers.reset();
|
2020-08-08 05:42:11 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
static void SetFastMap(u32 pc, CodeBlock::HostCodePointer function)
|
|
|
|
{
|
2021-07-20 02:33:37 +00:00
|
|
|
if (!s_fast_map_pointers)
|
|
|
|
return;
|
|
|
|
|
|
|
|
const u32 slot = pc >> FAST_MAP_TABLE_SHIFT;
|
|
|
|
FastMapTable encoded_ptr = s_fast_map[slot];
|
|
|
|
|
|
|
|
const FastMapTable table_ptr = DecodeFastMapPointer(slot, encoded_ptr);
|
|
|
|
Assert(table_ptr != nullptr && table_ptr != s_fast_map_pointers.get());
|
|
|
|
|
|
|
|
CodeBlock::HostCodePointer* ptr = OffsetFastMapPointer(encoded_ptr, pc);
|
|
|
|
*ptr = function;
|
2020-08-08 05:42:11 +00:00
|
|
|
}
|
|
|
|
|
2020-07-31 07:09:18 +00:00
|
|
|
#endif
|
2019-11-19 10:30:04 +00:00
|
|
|
|
2020-07-31 07:09:18 +00:00
|
|
|
using BlockMap = std::unordered_map<u32, CodeBlock*>;
|
2020-10-18 04:43:55 +00:00
|
|
|
using HostCodeMap = std::map<CodeBlock::HostCodePointer, CodeBlock*>;
|
2019-11-19 10:30:04 +00:00
|
|
|
|
2020-07-31 07:09:18 +00:00
|
|
|
void LogCurrentState();
|
|
|
|
|
|
|
|
/// Returns the block key for the current execution state.
|
|
|
|
static CodeBlockKey GetNextBlockKey();
|
|
|
|
|
|
|
|
/// Looks up the block in the cache if it's already been compiled.
|
2022-11-05 04:43:52 +00:00
|
|
|
static CodeBlock* LookupBlock(CodeBlockKey key, bool allow_flush);
|
2020-07-31 07:09:18 +00:00
|
|
|
|
|
|
|
/// Can the current block execute? This will re-validate the block if necessary.
|
|
|
|
/// The block can also be flushed if recompilation failed, so ignore the pointer if false is returned.
|
2022-11-05 04:43:52 +00:00
|
|
|
static bool RevalidateBlock(CodeBlock* block, bool allow_flush);
|
2020-07-31 07:09:18 +00:00
|
|
|
|
2022-11-05 04:43:52 +00:00
|
|
|
static bool CompileBlock(CodeBlock* block, bool allow_flush);
|
2020-12-30 15:48:43 +00:00
|
|
|
static void RemoveReferencesToBlock(CodeBlock* block);
|
2020-07-31 07:09:18 +00:00
|
|
|
static void AddBlockToPageMap(CodeBlock* block);
|
|
|
|
static void RemoveBlockFromPageMap(CodeBlock* block);
|
|
|
|
|
2021-05-22 04:55:25 +00:00
|
|
|
/// Link block from to to. Returns the successor index.
|
|
|
|
static void LinkBlock(CodeBlock* from, CodeBlock* to, void* host_pc, void* host_resolve_pc, u32 host_pc_size);
|
2020-07-31 07:09:18 +00:00
|
|
|
|
|
|
|
/// Unlink all blocks which point to this block, and any that this block links to.
|
|
|
|
static void UnlinkBlock(CodeBlock* block);
|
2019-11-19 10:30:04 +00:00
|
|
|
|
2020-10-18 04:43:55 +00:00
|
|
|
static void ClearState();
|
|
|
|
|
2020-07-31 07:09:18 +00:00
|
|
|
static BlockMap s_blocks;
|
2021-05-02 10:46:48 +00:00
|
|
|
static std::array<std::vector<CodeBlock*>, Bus::RAM_8MB_CODE_PAGE_COUNT> m_ram_block_map;
|
2020-07-31 07:09:18 +00:00
|
|
|
|
2023-09-17 02:28:11 +00:00
|
|
|
#ifdef ENABLE_RECOMPILER
|
2020-10-18 04:43:55 +00:00
|
|
|
static HostCodeMap s_host_code_map;
|
|
|
|
|
|
|
|
static void AddBlockToHostCodeMap(CodeBlock* block);
|
|
|
|
static void RemoveBlockFromHostCodeMap(CodeBlock* block);
|
2020-11-20 15:56:51 +00:00
|
|
|
|
2020-10-18 04:43:55 +00:00
|
|
|
static bool InitializeFastmem();
|
|
|
|
static void ShutdownFastmem();
|
2020-11-22 15:06:25 +00:00
|
|
|
static Common::PageFaultHandler::HandlerResult LUTPageFaultHandler(void* exception_pc, void* fault_address,
|
|
|
|
bool is_write);
|
2023-09-17 02:28:11 +00:00
|
|
|
#ifdef ENABLE_MMAP_FASTMEM
|
2020-11-22 15:06:25 +00:00
|
|
|
static Common::PageFaultHandler::HandlerResult MMapPageFaultHandler(void* exception_pc, void* fault_address,
|
|
|
|
bool is_write);
|
|
|
|
#endif
|
2023-09-17 02:28:11 +00:00
|
|
|
#endif // ENABLE_RECOMPILER
|
2020-10-18 04:43:55 +00:00
|
|
|
|
|
|
|
void Initialize()
|
2019-11-19 10:30:04 +00:00
|
|
|
{
|
2020-07-31 07:09:18 +00:00
|
|
|
Assert(s_blocks.empty());
|
2019-11-19 10:30:04 +00:00
|
|
|
|
2023-09-17 02:28:11 +00:00
|
|
|
#ifdef ENABLE_RECOMPILER
|
2020-10-18 04:43:55 +00:00
|
|
|
if (g_settings.IsUsingRecompiler())
|
|
|
|
{
|
2020-08-22 03:24:57 +00:00
|
|
|
#ifdef USE_STATIC_CODE_BUFFER
|
2020-12-06 08:13:07 +00:00
|
|
|
const bool has_buffer = s_code_buffer.Initialize(s_code_storage, sizeof(s_code_storage),
|
|
|
|
RECOMPILER_FAR_CODE_CACHE_SIZE, RECOMPILER_GUARD_SIZE);
|
2020-08-22 03:20:37 +00:00
|
|
|
#else
|
2020-12-06 08:13:07 +00:00
|
|
|
const bool has_buffer = false;
|
2020-08-22 03:20:37 +00:00
|
|
|
#endif
|
2020-12-06 08:13:07 +00:00
|
|
|
if (!has_buffer && !s_code_buffer.Allocate(RECOMPILER_CODE_CACHE_SIZE, RECOMPILER_FAR_CODE_CACHE_SIZE))
|
2020-10-18 04:43:55 +00:00
|
|
|
{
|
|
|
|
Panic("Failed to initialize code space");
|
|
|
|
}
|
2023-08-15 13:12:21 +00:00
|
|
|
}
|
|
|
|
#endif
|
2020-10-18 04:43:55 +00:00
|
|
|
|
2023-10-01 06:30:28 +00:00
|
|
|
AllocateFastMap();
|
2023-08-15 13:12:21 +00:00
|
|
|
|
2023-09-17 02:28:11 +00:00
|
|
|
#ifdef ENABLE_RECOMPILER
|
2023-08-15 13:12:21 +00:00
|
|
|
if (g_settings.IsUsingRecompiler())
|
|
|
|
{
|
2020-10-18 04:43:55 +00:00
|
|
|
if (g_settings.IsUsingFastmem() && !InitializeFastmem())
|
|
|
|
Panic("Failed to initialize fastmem");
|
|
|
|
|
2023-08-15 13:12:21 +00:00
|
|
|
AllocateFastMap();
|
2020-10-18 04:43:55 +00:00
|
|
|
CompileDispatcher();
|
2021-07-20 02:33:37 +00:00
|
|
|
ResetFastMap();
|
2020-07-31 07:09:18 +00:00
|
|
|
}
|
2020-10-18 04:43:55 +00:00
|
|
|
#endif
|
|
|
|
}
|
2020-08-08 05:42:11 +00:00
|
|
|
|
2020-10-18 04:43:55 +00:00
|
|
|
void ClearState()
|
|
|
|
{
|
|
|
|
Bus::ClearRAMCodePageFlags();
|
|
|
|
for (auto& it : m_ram_block_map)
|
|
|
|
it.clear();
|
|
|
|
|
|
|
|
for (const auto& it : s_blocks)
|
|
|
|
delete it.second;
|
|
|
|
|
|
|
|
s_blocks.clear();
|
2023-09-17 02:28:11 +00:00
|
|
|
#ifdef ENABLE_RECOMPILER
|
2020-10-18 04:43:55 +00:00
|
|
|
s_host_code_map.clear();
|
|
|
|
s_code_buffer.Reset();
|
2020-08-08 05:42:11 +00:00
|
|
|
ResetFastMap();
|
2019-11-27 15:55:33 +00:00
|
|
|
#endif
|
2019-11-19 10:30:04 +00:00
|
|
|
}
|
|
|
|
|
2020-07-31 07:09:18 +00:00
|
|
|
void Shutdown()
|
2019-11-19 10:30:04 +00:00
|
|
|
{
|
2020-10-18 04:43:55 +00:00
|
|
|
ClearState();
|
2023-09-17 02:28:11 +00:00
|
|
|
#ifdef ENABLE_RECOMPILER
|
2020-11-22 15:06:25 +00:00
|
|
|
ShutdownFastmem();
|
2021-07-20 02:33:37 +00:00
|
|
|
FreeFastMap();
|
2020-07-31 07:09:18 +00:00
|
|
|
s_code_buffer.Destroy();
|
2020-07-31 17:53:53 +00:00
|
|
|
#endif
|
2020-07-31 07:09:18 +00:00
|
|
|
}
|
2019-11-19 15:19:03 +00:00
|
|
|
|
2020-08-19 13:26:57 +00:00
|
|
|
template<PGXPMode pgxp_mode>
|
2023-08-15 13:12:21 +00:00
|
|
|
[[noreturn]] static void ExecuteImpl()
|
2020-07-31 07:09:18 +00:00
|
|
|
{
|
|
|
|
CodeBlockKey next_block_key;
|
|
|
|
|
2023-08-15 13:12:21 +00:00
|
|
|
for (;;)
|
2019-11-19 10:30:04 +00:00
|
|
|
{
|
2023-08-15 13:12:21 +00:00
|
|
|
TimingEvents::RunEvents();
|
2019-11-19 10:30:04 +00:00
|
|
|
|
2020-07-31 07:09:18 +00:00
|
|
|
next_block_key = GetNextBlockKey();
|
|
|
|
while (g_state.pending_ticks < g_state.downcount)
|
2019-11-19 10:30:04 +00:00
|
|
|
{
|
2022-11-05 04:43:52 +00:00
|
|
|
CodeBlock* block = LookupBlock(next_block_key, true);
|
2020-07-31 07:09:18 +00:00
|
|
|
if (!block)
|
|
|
|
{
|
2021-01-01 07:16:54 +00:00
|
|
|
InterpretUncachedBlock<pgxp_mode>();
|
2021-04-14 09:02:11 +00:00
|
|
|
next_block_key = GetNextBlockKey();
|
2020-07-31 07:09:18 +00:00
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
|
|
|
reexecute_block:
|
2020-10-16 15:28:08 +00:00
|
|
|
Assert(!(HasPendingInterrupt()));
|
2019-11-23 15:10:51 +00:00
|
|
|
|
2019-11-22 06:45:13 +00:00
|
|
|
#if 0
|
2020-08-08 18:32:52 +00:00
|
|
|
const u32 tick = TimingEvents::GetGlobalTickCounter() + CPU::GetPendingTicks();
|
|
|
|
if (tick == 4188233674)
|
2020-07-31 07:09:18 +00:00
|
|
|
__debugbreak();
|
2019-11-22 06:45:13 +00:00
|
|
|
#endif
|
|
|
|
|
2019-11-23 15:10:51 +00:00
|
|
|
#if 0
|
2020-07-31 07:09:18 +00:00
|
|
|
LogCurrentState();
|
2019-11-23 15:10:51 +00:00
|
|
|
#endif
|
|
|
|
|
2020-08-29 12:07:33 +00:00
|
|
|
if (g_settings.cpu_recompiler_icache)
|
|
|
|
CheckAndUpdateICacheTags(block->icache_line_count, block->uncached_fetch_ticks);
|
|
|
|
|
2020-08-22 15:30:20 +00:00
|
|
|
InterpretCachedBlock<pgxp_mode>(*block);
|
2019-11-19 10:30:04 +00:00
|
|
|
|
2020-07-31 07:09:18 +00:00
|
|
|
if (g_state.pending_ticks >= g_state.downcount)
|
|
|
|
break;
|
2020-10-16 15:28:08 +00:00
|
|
|
else if (!USE_BLOCK_LINKING)
|
2020-07-31 07:09:18 +00:00
|
|
|
continue;
|
2019-11-23 03:16:43 +00:00
|
|
|
|
2020-07-31 07:09:18 +00:00
|
|
|
next_block_key = GetNextBlockKey();
|
|
|
|
if (next_block_key.bits == block->key.bits)
|
2019-11-21 14:32:40 +00:00
|
|
|
{
|
2020-07-31 07:09:18 +00:00
|
|
|
// we can jump straight to it if there's no pending interrupts
|
|
|
|
// ensure it's not a self-modifying block
|
2022-11-05 04:43:52 +00:00
|
|
|
if (!block->invalidated || RevalidateBlock(block, true))
|
2020-07-31 07:09:18 +00:00
|
|
|
goto reexecute_block;
|
|
|
|
}
|
|
|
|
else if (!block->invalidated)
|
|
|
|
{
|
|
|
|
// Try to find an already-linked block.
|
|
|
|
// TODO: Don't need to dereference the block, just store a pointer to the code.
|
2021-05-22 04:55:25 +00:00
|
|
|
for (const CodeBlock::LinkInfo& li : block->link_successors)
|
2019-11-23 03:16:43 +00:00
|
|
|
{
|
2021-05-22 04:55:25 +00:00
|
|
|
CodeBlock* linked_block = li.block;
|
2020-07-31 07:09:18 +00:00
|
|
|
if (linked_block->key.bits == next_block_key.bits)
|
2019-11-23 03:16:43 +00:00
|
|
|
{
|
2022-11-05 04:43:52 +00:00
|
|
|
if (linked_block->invalidated && !RevalidateBlock(linked_block, true))
|
2020-07-31 07:09:18 +00:00
|
|
|
{
|
|
|
|
// CanExecuteBlock can result in a block flush, so stop iterating here.
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Execute the linked block
|
|
|
|
block = linked_block;
|
|
|
|
goto reexecute_block;
|
2019-11-23 03:16:43 +00:00
|
|
|
}
|
2020-07-31 07:09:18 +00:00
|
|
|
}
|
2019-11-23 03:16:43 +00:00
|
|
|
|
2020-07-31 07:09:18 +00:00
|
|
|
// No acceptable blocks found in the successor list, try a new one.
|
2022-11-05 04:43:52 +00:00
|
|
|
CodeBlock* next_block = LookupBlock(next_block_key, false);
|
2020-07-31 07:09:18 +00:00
|
|
|
if (next_block)
|
|
|
|
{
|
|
|
|
// Link the previous block to this new block if we find a new block.
|
2021-05-22 04:55:25 +00:00
|
|
|
LinkBlock(block, next_block, nullptr, nullptr, 0);
|
2020-07-31 07:09:18 +00:00
|
|
|
block = next_block;
|
2019-11-21 14:32:40 +00:00
|
|
|
goto reexecute_block;
|
2019-11-23 03:16:43 +00:00
|
|
|
}
|
|
|
|
}
|
2019-11-19 15:19:03 +00:00
|
|
|
}
|
2019-11-19 10:30:04 +00:00
|
|
|
}
|
2019-12-12 13:34:53 +00:00
|
|
|
|
|
|
|
// in case we switch to interpreter...
|
2023-08-15 13:12:21 +00:00
|
|
|
g_state.npc = g_state.pc;
|
2020-08-19 13:26:57 +00:00
|
|
|
}
|
|
|
|
|
2023-09-17 02:28:11 +00:00
|
|
|
#ifdef ENABLE_RECOMPILER
|
2020-08-08 05:42:11 +00:00
|
|
|
|
2020-10-18 04:43:09 +00:00
|
|
|
void CompileDispatcher()
|
|
|
|
{
|
2021-03-19 08:47:31 +00:00
|
|
|
s_code_buffer.WriteProtect(false);
|
|
|
|
|
2020-10-18 04:43:09 +00:00
|
|
|
{
|
|
|
|
Recompiler::CodeGenerator cg(&s_code_buffer);
|
|
|
|
s_asm_dispatcher = cg.CompileDispatcher();
|
|
|
|
}
|
|
|
|
{
|
|
|
|
Recompiler::CodeGenerator cg(&s_code_buffer);
|
|
|
|
s_single_block_asm_dispatcher = cg.CompileSingleBlockDispatcher();
|
|
|
|
}
|
2021-03-19 08:47:31 +00:00
|
|
|
|
|
|
|
s_code_buffer.WriteProtect(true);
|
2020-10-18 04:43:09 +00:00
|
|
|
}
|
|
|
|
|
2021-07-20 02:33:37 +00:00
|
|
|
FastMapTable* GetFastMapPointer()
|
2020-10-18 04:43:09 +00:00
|
|
|
{
|
2021-07-20 02:33:37 +00:00
|
|
|
return s_fast_map;
|
2020-10-18 04:43:09 +00:00
|
|
|
}
|
|
|
|
|
2023-08-15 13:12:21 +00:00
|
|
|
[[noreturn]] static void ExecuteRecompiler()
|
2020-08-08 05:42:11 +00:00
|
|
|
{
|
2020-10-18 04:43:09 +00:00
|
|
|
#if 0
|
2023-08-15 13:12:21 +00:00
|
|
|
for (;;)
|
2020-08-08 05:42:11 +00:00
|
|
|
{
|
2020-10-16 15:28:08 +00:00
|
|
|
if (HasPendingInterrupt())
|
|
|
|
DispatchInterrupt();
|
|
|
|
|
2023-08-15 13:12:21 +00:00
|
|
|
TimingEvents::RunEvents();
|
2020-08-08 05:42:11 +00:00
|
|
|
|
|
|
|
while (g_state.pending_ticks < g_state.downcount)
|
|
|
|
{
|
2021-02-28 05:03:12 +00:00
|
|
|
#if 0
|
|
|
|
LogCurrentState();
|
|
|
|
#endif
|
|
|
|
|
2023-08-15 13:12:21 +00:00
|
|
|
const u32 pc = g_state.pc;
|
2021-07-20 02:33:37 +00:00
|
|
|
s_single_block_asm_dispatcher(s_fast_map[pc >> 16][pc >> 2]);
|
2020-08-08 05:42:11 +00:00
|
|
|
}
|
|
|
|
}
|
2020-10-18 04:43:09 +00:00
|
|
|
#else
|
|
|
|
s_asm_dispatcher();
|
|
|
|
#endif
|
2023-10-01 03:20:12 +00:00
|
|
|
UnreachableCode();
|
2023-08-15 13:12:21 +00:00
|
|
|
}
|
2020-08-08 05:42:11 +00:00
|
|
|
|
2023-08-15 13:12:21 +00:00
|
|
|
#endif
|
|
|
|
|
|
|
|
[[noreturn]] void Execute()
|
|
|
|
{
|
|
|
|
switch (g_settings.cpu_execution_mode)
|
|
|
|
{
|
2023-09-17 02:28:11 +00:00
|
|
|
#ifdef ENABLE_RECOMPILER
|
2023-08-15 13:12:21 +00:00
|
|
|
case CPUExecutionMode::Recompiler:
|
|
|
|
ExecuteRecompiler();
|
|
|
|
break;
|
|
|
|
#endif
|
|
|
|
|
|
|
|
default:
|
|
|
|
{
|
|
|
|
if (g_settings.gpu_pgxp_enable)
|
|
|
|
{
|
|
|
|
if (g_settings.gpu_pgxp_cpu)
|
|
|
|
ExecuteImpl<PGXPMode::CPU>();
|
|
|
|
else
|
|
|
|
ExecuteImpl<PGXPMode::Memory>();
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
ExecuteImpl<PGXPMode::Disabled>();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-09-17 02:28:11 +00:00
|
|
|
#if defined(ENABLE_RECOMPILER)
|
2023-08-15 13:12:21 +00:00
|
|
|
|
|
|
|
JitCodeBuffer& GetCodeBuffer()
|
|
|
|
{
|
|
|
|
return s_code_buffer;
|
2020-08-08 05:42:11 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
#endif
|
|
|
|
|
2020-10-18 04:43:55 +00:00
|
|
|
void Reinitialize()
|
2019-11-23 10:22:09 +00:00
|
|
|
{
|
2020-10-18 04:43:55 +00:00
|
|
|
ClearState();
|
|
|
|
|
2023-09-17 02:28:11 +00:00
|
|
|
#ifdef ENABLE_RECOMPILER
|
2020-10-18 04:43:55 +00:00
|
|
|
ShutdownFastmem();
|
2023-08-15 13:12:21 +00:00
|
|
|
#endif
|
|
|
|
|
2023-09-17 02:28:11 +00:00
|
|
|
#if defined(ENABLE_RECOMPILER)
|
2020-10-18 04:43:55 +00:00
|
|
|
s_code_buffer.Destroy();
|
|
|
|
|
|
|
|
if (g_settings.IsUsingRecompiler())
|
|
|
|
{
|
|
|
|
#ifdef USE_STATIC_CODE_BUFFER
|
|
|
|
if (!s_code_buffer.Initialize(s_code_storage, sizeof(s_code_storage), RECOMPILER_FAR_CODE_CACHE_SIZE,
|
|
|
|
RECOMPILER_GUARD_SIZE))
|
|
|
|
#else
|
|
|
|
if (!s_code_buffer.Allocate(RECOMPILER_CODE_CACHE_SIZE, RECOMPILER_FAR_CODE_CACHE_SIZE))
|
|
|
|
#endif
|
|
|
|
{
|
|
|
|
Panic("Failed to initialize code space");
|
|
|
|
}
|
2023-08-15 13:12:21 +00:00
|
|
|
}
|
|
|
|
#endif
|
2020-10-18 04:43:55 +00:00
|
|
|
|
2023-09-17 02:28:11 +00:00
|
|
|
#ifdef ENABLE_RECOMPILER
|
2023-08-15 13:12:21 +00:00
|
|
|
if (g_settings.IsUsingRecompiler())
|
|
|
|
{
|
2020-10-18 04:43:55 +00:00
|
|
|
if (g_settings.IsUsingFastmem() && !InitializeFastmem())
|
|
|
|
Panic("Failed to initialize fastmem");
|
|
|
|
|
2021-07-31 03:29:02 +00:00
|
|
|
AllocateFastMap();
|
2020-10-18 04:43:55 +00:00
|
|
|
CompileDispatcher();
|
2021-07-31 03:29:02 +00:00
|
|
|
ResetFastMap();
|
2020-10-18 04:43:55 +00:00
|
|
|
}
|
2019-11-27 15:55:33 +00:00
|
|
|
#endif
|
2019-11-23 10:22:09 +00:00
|
|
|
}
|
|
|
|
|
2020-07-31 07:09:18 +00:00
|
|
|
void Flush()
|
2019-11-19 10:30:04 +00:00
|
|
|
{
|
2020-10-18 04:43:55 +00:00
|
|
|
ClearState();
|
2023-09-17 02:28:11 +00:00
|
|
|
#ifdef ENABLE_RECOMPILER
|
2020-10-18 04:43:55 +00:00
|
|
|
if (g_settings.IsUsingRecompiler())
|
|
|
|
CompileDispatcher();
|
2020-11-20 15:56:51 +00:00
|
|
|
#endif
|
2019-11-19 10:30:04 +00:00
|
|
|
}
|
|
|
|
|
2023-08-15 13:12:21 +00:00
|
|
|
#ifndef _MSC_VER
|
2023-10-01 06:30:28 +00:00
|
|
|
void __debugbreak()
|
|
|
|
{
|
|
|
|
}
|
2023-08-15 13:12:21 +00:00
|
|
|
#endif
|
|
|
|
|
2020-07-31 07:09:18 +00:00
|
|
|
void LogCurrentState()
|
2019-11-21 13:33:58 +00:00
|
|
|
{
|
2023-08-15 13:12:21 +00:00
|
|
|
#if 0
|
|
|
|
if ((TimingEvents::GetGlobalTickCounter() + GetPendingTicks()) == 2546728915)
|
|
|
|
__debugbreak();
|
|
|
|
#endif
|
|
|
|
#if 0
|
|
|
|
if ((TimingEvents::GetGlobalTickCounter() + GetPendingTicks()) < 2546729174)
|
|
|
|
return;
|
|
|
|
#endif
|
|
|
|
|
2020-07-31 07:09:18 +00:00
|
|
|
const auto& regs = g_state.regs;
|
2023-08-15 13:12:21 +00:00
|
|
|
WriteToExecutionLog(
|
|
|
|
"tick=%u dc=%u/%u pc=%08X at=%08X v0=%08X v1=%08X a0=%08X a1=%08X a2=%08X a3=%08X t0=%08X "
|
|
|
|
"t1=%08X t2=%08X t3=%08X t4=%08X t5=%08X t6=%08X t7=%08X s0=%08X s1=%08X s2=%08X s3=%08X s4=%08X "
|
|
|
|
"s5=%08X s6=%08X s7=%08X t8=%08X t9=%08X k0=%08X k1=%08X gp=%08X sp=%08X fp=%08X ra=%08X ldr=%s "
|
|
|
|
"ldv=%08X cause=%08X sr=%08X gte=%08X\n",
|
|
|
|
TimingEvents::GetGlobalTickCounter() + GetPendingTicks(), g_state.pending_ticks, g_state.downcount, g_state.pc,
|
|
|
|
regs.at, regs.v0, regs.v1, regs.a0, regs.a1, regs.a2, regs.a3, regs.t0, regs.t1, regs.t2, regs.t3, regs.t4, regs.t5,
|
|
|
|
regs.t6, regs.t7, regs.s0, regs.s1, regs.s2, regs.s3, regs.s4, regs.s5, regs.s6, regs.s7, regs.t8, regs.t9, regs.k0,
|
|
|
|
regs.k1, regs.gp, regs.sp, regs.fp, regs.ra,
|
|
|
|
(g_state.next_load_delay_reg == Reg::count) ? "NONE" : GetRegName(g_state.next_load_delay_reg),
|
|
|
|
(g_state.next_load_delay_reg == Reg::count) ? 0 : g_state.next_load_delay_value, g_state.cop0_regs.cause.bits,
|
|
|
|
g_state.cop0_regs.sr.bits, static_cast<u32>(crc32(0, (const Bytef*)&g_state.gte_regs, sizeof(g_state.gte_regs))));
|
2019-11-21 13:33:58 +00:00
|
|
|
}
|
|
|
|
|
2020-07-31 07:09:18 +00:00
|
|
|
CodeBlockKey GetNextBlockKey()
|
2019-11-19 10:30:04 +00:00
|
|
|
{
|
2023-09-03 04:30:26 +00:00
|
|
|
CodeBlockKey key;
|
|
|
|
key.bits = 0;
|
2023-08-15 13:12:21 +00:00
|
|
|
key.SetPC(g_state.pc);
|
2020-07-31 07:09:18 +00:00
|
|
|
key.user_mode = InUserMode();
|
2019-11-19 15:19:03 +00:00
|
|
|
return key;
|
|
|
|
}
|
2019-11-19 10:30:04 +00:00
|
|
|
|
2021-04-26 16:56:55 +00:00
|
|
|
// assumes it has already been unlinked
|
|
|
|
static void FallbackExistingBlockToInterpreter(CodeBlock* block)
|
|
|
|
{
|
|
|
|
// Replace with null so we don't try to compile it again.
|
|
|
|
s_blocks.emplace(block->key.bits, nullptr);
|
|
|
|
delete block;
|
|
|
|
}
|
|
|
|
|
2022-11-05 04:43:52 +00:00
|
|
|
CodeBlock* LookupBlock(CodeBlockKey key, bool allow_flush)
|
2019-11-19 15:19:03 +00:00
|
|
|
{
|
2020-07-31 07:09:18 +00:00
|
|
|
BlockMap::iterator iter = s_blocks.find(key.bits);
|
|
|
|
if (iter != s_blocks.end())
|
2019-11-21 14:32:40 +00:00
|
|
|
{
|
|
|
|
// ensure it hasn't been invalidated
|
|
|
|
CodeBlock* existing_block = iter->second;
|
2021-04-26 16:56:55 +00:00
|
|
|
if (!existing_block || !existing_block->invalidated)
|
|
|
|
return existing_block;
|
|
|
|
|
|
|
|
// if compilation fails or we're forced back to the interpreter, bail out
|
2022-11-05 04:43:52 +00:00
|
|
|
if (RevalidateBlock(existing_block, allow_flush))
|
2019-11-21 14:32:40 +00:00
|
|
|
return existing_block;
|
2021-04-26 16:56:55 +00:00
|
|
|
else
|
|
|
|
return nullptr;
|
2019-11-21 14:32:40 +00:00
|
|
|
}
|
2019-11-19 10:30:04 +00:00
|
|
|
|
2019-11-21 14:32:40 +00:00
|
|
|
CodeBlock* block = new CodeBlock(key);
|
2021-04-26 16:56:55 +00:00
|
|
|
block->recompile_frame_number = System::GetFrameNumber();
|
|
|
|
|
2022-11-05 04:43:52 +00:00
|
|
|
if (CompileBlock(block, allow_flush))
|
2019-11-19 10:30:04 +00:00
|
|
|
{
|
2019-11-21 14:32:40 +00:00
|
|
|
// add it to the page map if it's in ram
|
|
|
|
AddBlockToPageMap(block);
|
2020-08-08 05:42:11 +00:00
|
|
|
|
2023-09-17 02:28:11 +00:00
|
|
|
#ifdef ENABLE_RECOMPILER
|
2020-08-08 05:42:11 +00:00
|
|
|
SetFastMap(block->GetPC(), block->host_code);
|
2020-10-30 10:44:21 +00:00
|
|
|
AddBlockToHostCodeMap(block);
|
2020-08-08 05:42:11 +00:00
|
|
|
#endif
|
2019-11-19 10:30:04 +00:00
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
2019-11-19 15:19:03 +00:00
|
|
|
Log_ErrorPrintf("Failed to compile block at PC=0x%08X", key.GetPC());
|
2019-11-21 14:32:40 +00:00
|
|
|
delete block;
|
|
|
|
block = nullptr;
|
2019-11-19 10:30:04 +00:00
|
|
|
}
|
|
|
|
|
2022-11-05 04:43:52 +00:00
|
|
|
if (block || allow_flush)
|
|
|
|
s_blocks.emplace(key.bits, block);
|
|
|
|
|
2019-11-19 10:30:04 +00:00
|
|
|
return block;
|
|
|
|
}
|
|
|
|
|
2022-11-05 04:43:52 +00:00
|
|
|
bool RevalidateBlock(CodeBlock* block, bool allow_flush)
|
2019-11-21 14:32:40 +00:00
|
|
|
{
|
|
|
|
for (const CodeBlockInstruction& cbi : block->instructions)
|
|
|
|
{
|
2020-08-29 12:07:33 +00:00
|
|
|
u32 new_code = 0;
|
|
|
|
SafeReadInstruction(cbi.pc, &new_code);
|
2019-11-21 14:32:40 +00:00
|
|
|
if (cbi.instruction.bits != new_code)
|
|
|
|
{
|
|
|
|
Log_DebugPrintf("Block 0x%08X changed at PC 0x%08X - %08X to %08X - recompiling.", block->GetPC(), cbi.pc,
|
|
|
|
cbi.instruction.bits, new_code);
|
|
|
|
goto recompile;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// re-add it to the page map since it's still up-to-date
|
|
|
|
block->invalidated = false;
|
|
|
|
AddBlockToPageMap(block);
|
2023-09-17 02:28:11 +00:00
|
|
|
#ifdef ENABLE_RECOMPILER
|
2020-08-08 05:42:11 +00:00
|
|
|
SetFastMap(block->GetPC(), block->host_code);
|
|
|
|
#endif
|
2019-11-21 14:32:40 +00:00
|
|
|
return true;
|
|
|
|
|
|
|
|
recompile:
|
2020-12-30 15:48:43 +00:00
|
|
|
// remove any references to the block from the lookup table.
|
|
|
|
// this is an edge case where compiling causes a flush-all due to no space,
|
|
|
|
// and we don't want to nuke the block we're compiling...
|
|
|
|
RemoveReferencesToBlock(block);
|
|
|
|
|
2023-09-17 02:28:11 +00:00
|
|
|
#ifdef ENABLE_RECOMPILER
|
2020-10-18 04:43:55 +00:00
|
|
|
RemoveBlockFromHostCodeMap(block);
|
2020-11-20 15:56:51 +00:00
|
|
|
#endif
|
2020-10-18 04:43:55 +00:00
|
|
|
|
2021-04-26 16:56:55 +00:00
|
|
|
const u32 frame_number = System::GetFrameNumber();
|
|
|
|
const u32 frame_diff = frame_number - block->recompile_frame_number;
|
|
|
|
if (frame_diff <= RECOMPILE_FRAMES_TO_FALL_BACK_TO_INTERPRETER)
|
|
|
|
{
|
|
|
|
block->recompile_count++;
|
|
|
|
|
2021-05-12 03:23:40 +00:00
|
|
|
if (block->recompile_count >= RECOMPILE_COUNT_TO_FALL_BACK_TO_INTERPRETER)
|
2021-04-26 16:56:55 +00:00
|
|
|
{
|
|
|
|
Log_PerfPrintf("Block 0x%08X has been recompiled %u times in %u frames, falling back to interpreter",
|
2021-07-20 02:33:37 +00:00
|
|
|
block->GetPC(), block->recompile_count, frame_diff);
|
2021-04-26 16:56:55 +00:00
|
|
|
|
|
|
|
FallbackExistingBlockToInterpreter(block);
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
// It's been a while since this block was modified, so it's all good.
|
|
|
|
block->recompile_frame_number = frame_number;
|
|
|
|
block->recompile_count = 0;
|
|
|
|
}
|
|
|
|
|
2019-11-21 14:32:40 +00:00
|
|
|
block->instructions.clear();
|
2021-04-26 16:56:55 +00:00
|
|
|
|
2022-11-05 04:43:52 +00:00
|
|
|
if (!CompileBlock(block, allow_flush))
|
2019-11-21 14:32:40 +00:00
|
|
|
{
|
2021-04-26 16:56:55 +00:00
|
|
|
Log_PerfPrintf("Failed to recompile block 0x%08X, falling back to interpreter.", block->GetPC());
|
|
|
|
FallbackExistingBlockToInterpreter(block);
|
2019-11-21 14:32:40 +00:00
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
2020-12-30 15:48:43 +00:00
|
|
|
AddBlockToPageMap(block);
|
|
|
|
|
2023-09-17 02:28:11 +00:00
|
|
|
#ifdef ENABLE_RECOMPILER
|
2019-11-21 14:32:40 +00:00
|
|
|
// re-add to page map again
|
2020-12-30 15:48:43 +00:00
|
|
|
SetFastMap(block->GetPC(), block->host_code);
|
2020-10-18 04:43:55 +00:00
|
|
|
AddBlockToHostCodeMap(block);
|
2020-11-20 15:56:51 +00:00
|
|
|
#endif
|
2019-11-21 14:32:40 +00:00
|
|
|
|
2022-01-06 12:46:54 +00:00
|
|
|
// block is valid again
|
|
|
|
block->invalidated = false;
|
|
|
|
|
2020-12-30 15:48:43 +00:00
|
|
|
// re-insert into the block map since we removed it earlier.
|
|
|
|
s_blocks.emplace(block->key.bits, block);
|
2019-11-21 14:32:40 +00:00
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2022-11-05 04:43:52 +00:00
|
|
|
bool CompileBlock(CodeBlock* block, bool allow_flush)
|
2019-11-19 10:30:04 +00:00
|
|
|
{
|
|
|
|
u32 pc = block->GetPC();
|
|
|
|
bool is_branch_delay_slot = false;
|
|
|
|
bool is_load_delay_slot = false;
|
|
|
|
|
2019-11-22 07:57:02 +00:00
|
|
|
#if 0
|
|
|
|
if (pc == 0x0005aa90)
|
|
|
|
__debugbreak();
|
|
|
|
#endif
|
|
|
|
|
2020-12-30 15:48:43 +00:00
|
|
|
block->icache_line_count = 0;
|
|
|
|
block->uncached_fetch_ticks = 0;
|
|
|
|
block->contains_double_branches = false;
|
|
|
|
block->contains_loadstore_instructions = false;
|
|
|
|
|
2020-08-29 12:07:33 +00:00
|
|
|
u32 last_cache_line = ICACHE_LINES;
|
|
|
|
|
2019-11-19 10:30:04 +00:00
|
|
|
for (;;)
|
|
|
|
{
|
|
|
|
CodeBlockInstruction cbi = {};
|
2020-08-29 12:07:33 +00:00
|
|
|
if (!SafeReadInstruction(pc, &cbi.instruction.bits) || !IsInvalidInstruction(cbi.instruction))
|
2019-11-19 10:30:04 +00:00
|
|
|
break;
|
|
|
|
|
|
|
|
cbi.pc = pc;
|
|
|
|
cbi.is_branch_delay_slot = is_branch_delay_slot;
|
|
|
|
cbi.is_load_delay_slot = is_load_delay_slot;
|
2019-11-19 14:15:14 +00:00
|
|
|
cbi.is_branch_instruction = IsBranchInstruction(cbi.instruction);
|
2021-03-05 15:33:12 +00:00
|
|
|
cbi.is_direct_branch_instruction = IsDirectBranchInstruction(cbi.instruction);
|
2020-11-18 10:34:25 +00:00
|
|
|
cbi.is_unconditional_branch_instruction = IsUnconditionalBranchInstruction(cbi.instruction);
|
2019-11-19 14:15:14 +00:00
|
|
|
cbi.is_load_instruction = IsMemoryLoadInstruction(cbi.instruction);
|
|
|
|
cbi.is_store_instruction = IsMemoryStoreInstruction(cbi.instruction);
|
|
|
|
cbi.has_load_delay = InstructionHasLoadDelay(cbi.instruction);
|
2020-07-31 07:09:18 +00:00
|
|
|
cbi.can_trap = CanInstructionTrap(cbi.instruction, InUserMode());
|
2021-05-22 04:55:25 +00:00
|
|
|
cbi.is_direct_branch_instruction = IsDirectBranchInstruction(cbi.instruction);
|
2019-11-19 10:30:04 +00:00
|
|
|
|
2020-08-29 12:07:33 +00:00
|
|
|
if (g_settings.cpu_recompiler_icache)
|
|
|
|
{
|
|
|
|
const u32 icache_line = GetICacheLine(pc);
|
|
|
|
if (icache_line != last_cache_line)
|
|
|
|
{
|
|
|
|
block->icache_line_count++;
|
|
|
|
last_cache_line = icache_line;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-01-11 08:21:48 +00:00
|
|
|
block->uncached_fetch_ticks += GetInstructionReadTicks(pc);
|
2020-10-18 04:43:55 +00:00
|
|
|
block->contains_loadstore_instructions |= cbi.is_load_instruction;
|
|
|
|
block->contains_loadstore_instructions |= cbi.is_store_instruction;
|
|
|
|
|
2020-11-18 10:34:25 +00:00
|
|
|
pc += sizeof(cbi.instruction.bits);
|
|
|
|
|
|
|
|
if (is_branch_delay_slot && cbi.is_branch_instruction)
|
|
|
|
{
|
2021-03-05 15:33:12 +00:00
|
|
|
const CodeBlockInstruction& prev_cbi = block->instructions.back();
|
|
|
|
if (!prev_cbi.is_unconditional_branch_instruction || !prev_cbi.is_direct_branch_instruction)
|
2020-11-18 10:34:25 +00:00
|
|
|
{
|
2021-03-05 15:33:12 +00:00
|
|
|
Log_WarningPrintf("Conditional or indirect branch delay slot at %08X, skipping block", cbi.pc);
|
2020-11-18 10:34:25 +00:00
|
|
|
return false;
|
|
|
|
}
|
2020-12-13 15:21:49 +00:00
|
|
|
if (!IsDirectBranchInstruction(cbi.instruction))
|
|
|
|
{
|
|
|
|
Log_WarningPrintf("Indirect branch in delay slot at %08X, skipping block", cbi.pc);
|
|
|
|
return false;
|
|
|
|
}
|
2020-11-18 10:34:25 +00:00
|
|
|
|
|
|
|
// change the pc for the second branch's delay slot, it comes from the first branch
|
2021-05-22 04:55:25 +00:00
|
|
|
pc = GetDirectBranchTarget(prev_cbi.instruction, prev_cbi.pc);
|
2020-11-18 10:34:25 +00:00
|
|
|
Log_DevPrintf("Double branch at %08X, using delay slot from %08X -> %08X", cbi.pc, prev_cbi.pc, pc);
|
|
|
|
}
|
|
|
|
|
2019-11-19 10:30:04 +00:00
|
|
|
// instruction is decoded now
|
|
|
|
block->instructions.push_back(cbi);
|
|
|
|
|
|
|
|
// if we're in a branch delay slot, the block is now done
|
|
|
|
// except if this is a branch in a branch delay slot, then we grab the one after that, and so on...
|
2019-11-19 14:15:14 +00:00
|
|
|
if (is_branch_delay_slot && !cbi.is_branch_instruction)
|
2019-11-19 10:30:04 +00:00
|
|
|
break;
|
|
|
|
|
|
|
|
// if this is a branch, we grab the next instruction (delay slot), and then exit
|
2019-11-19 14:15:14 +00:00
|
|
|
is_branch_delay_slot = cbi.is_branch_instruction;
|
|
|
|
|
|
|
|
// same for load delay
|
|
|
|
is_load_delay_slot = cbi.has_load_delay;
|
2019-11-19 10:30:04 +00:00
|
|
|
|
|
|
|
// is this a non-branchy exit? (e.g. syscall)
|
|
|
|
if (IsExitBlockInstruction(cbi.instruction))
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (!block->instructions.empty())
|
|
|
|
{
|
|
|
|
block->instructions.back().is_last_instruction = true;
|
|
|
|
|
|
|
|
#ifdef _DEBUG
|
|
|
|
SmallString disasm;
|
|
|
|
Log_DebugPrintf("Block at 0x%08X", block->GetPC());
|
|
|
|
for (const CodeBlockInstruction& cbi : block->instructions)
|
|
|
|
{
|
2020-12-16 15:18:02 +00:00
|
|
|
CPU::DisassembleInstruction(&disasm, cbi.pc, cbi.instruction.bits);
|
2019-11-19 10:30:04 +00:00
|
|
|
Log_DebugPrintf("[%s %s 0x%08X] %08X %s", cbi.is_branch_delay_slot ? "BD" : " ",
|
2023-09-20 13:49:14 +00:00
|
|
|
cbi.is_load_delay_slot ? "LD" : " ", cbi.pc, cbi.instruction.bits, disasm.c_str());
|
2019-11-19 10:30:04 +00:00
|
|
|
}
|
|
|
|
#endif
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
Log_WarningPrintf("Empty block compiled at 0x%08X", block->key.GetPC());
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
2023-09-17 02:28:11 +00:00
|
|
|
#ifdef ENABLE_RECOMPILER
|
2020-10-18 04:43:55 +00:00
|
|
|
if (g_settings.IsUsingRecompiler())
|
2019-11-19 10:30:04 +00:00
|
|
|
{
|
|
|
|
// Ensure we're not going to run out of space while compiling this block.
|
2020-07-31 07:09:18 +00:00
|
|
|
if (s_code_buffer.GetFreeCodeSpace() <
|
2019-11-22 07:57:02 +00:00
|
|
|
(block->instructions.size() * Recompiler::MAX_NEAR_HOST_BYTES_PER_INSTRUCTION) ||
|
2020-07-31 07:09:18 +00:00
|
|
|
s_code_buffer.GetFreeFarCodeSpace() <
|
2019-11-22 07:57:02 +00:00
|
|
|
(block->instructions.size() * Recompiler::MAX_FAR_HOST_BYTES_PER_INSTRUCTION))
|
2019-11-19 10:30:04 +00:00
|
|
|
{
|
2022-11-05 04:43:52 +00:00
|
|
|
if (allow_flush)
|
|
|
|
{
|
|
|
|
Log_WarningPrintf("Out of code space, flushing all blocks.");
|
|
|
|
Flush();
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
Log_ErrorPrintf("Out of code space and cannot flush while compiling %08X.", block->GetPC());
|
|
|
|
return false;
|
|
|
|
}
|
2019-11-19 10:30:04 +00:00
|
|
|
}
|
|
|
|
|
2021-03-19 08:47:31 +00:00
|
|
|
s_code_buffer.WriteProtect(false);
|
2020-07-31 07:09:18 +00:00
|
|
|
Recompiler::CodeGenerator codegen(&s_code_buffer);
|
2021-03-19 08:47:31 +00:00
|
|
|
const bool compile_result = codegen.CompileBlock(block, &block->host_code, &block->host_code_size);
|
|
|
|
s_code_buffer.WriteProtect(true);
|
|
|
|
|
|
|
|
if (!compile_result)
|
2019-11-19 10:30:04 +00:00
|
|
|
{
|
|
|
|
Log_ErrorPrintf("Failed to compile host code for block at 0x%08X", block->key.GetPC());
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
}
|
2019-11-27 15:55:33 +00:00
|
|
|
#endif
|
2019-11-19 10:30:04 +00:00
|
|
|
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2023-09-17 02:28:11 +00:00
|
|
|
#ifdef ENABLE_RECOMPILER
|
2020-08-08 05:42:11 +00:00
|
|
|
|
|
|
|
void FastCompileBlockFunction()
|
|
|
|
{
|
2022-11-05 04:43:52 +00:00
|
|
|
CodeBlock* block = LookupBlock(GetNextBlockKey(), true);
|
2020-08-08 05:42:11 +00:00
|
|
|
if (block)
|
2021-07-20 02:33:37 +00:00
|
|
|
{
|
2020-10-18 04:43:09 +00:00
|
|
|
s_single_block_asm_dispatcher(block->host_code);
|
2021-07-20 12:41:50 +00:00
|
|
|
return;
|
2021-07-20 02:33:37 +00:00
|
|
|
}
|
2021-07-20 12:41:50 +00:00
|
|
|
|
|
|
|
if (g_settings.gpu_pgxp_enable)
|
2021-07-20 02:33:37 +00:00
|
|
|
{
|
|
|
|
if (g_settings.gpu_pgxp_cpu)
|
|
|
|
InterpretUncachedBlock<PGXPMode::CPU>();
|
|
|
|
else
|
|
|
|
InterpretUncachedBlock<PGXPMode::Memory>();
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
InterpretUncachedBlock<PGXPMode::Disabled>();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void InvalidCodeFunction()
|
|
|
|
{
|
2023-08-15 13:12:21 +00:00
|
|
|
Log_ErrorPrintf("Trying to execute invalid code at 0x%08X", g_state.pc);
|
2021-07-20 02:33:37 +00:00
|
|
|
if (g_settings.gpu_pgxp_enable)
|
|
|
|
{
|
|
|
|
if (g_settings.gpu_pgxp_cpu)
|
|
|
|
InterpretUncachedBlock<PGXPMode::CPU>();
|
|
|
|
else
|
|
|
|
InterpretUncachedBlock<PGXPMode::Memory>();
|
|
|
|
}
|
2020-08-08 05:42:11 +00:00
|
|
|
else
|
2021-07-20 02:33:37 +00:00
|
|
|
{
|
2021-01-01 07:16:54 +00:00
|
|
|
InterpretUncachedBlock<PGXPMode::Disabled>();
|
2021-07-20 02:33:37 +00:00
|
|
|
}
|
2020-08-08 05:42:11 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
#endif
|
|
|
|
|
2022-01-06 12:46:54 +00:00
|
|
|
static void InvalidateBlock(CodeBlock* block, bool allow_frame_invalidation)
|
2019-11-19 10:30:04 +00:00
|
|
|
{
|
2021-12-02 08:40:31 +00:00
|
|
|
// Invalidate forces the block to be checked again.
|
|
|
|
Log_DebugPrintf("Invalidating block at 0x%08X", block->GetPC());
|
|
|
|
block->invalidated = true;
|
2021-05-22 04:55:25 +00:00
|
|
|
|
2021-12-02 08:40:31 +00:00
|
|
|
if (block->can_link)
|
|
|
|
{
|
|
|
|
const u32 frame_number = System::GetFrameNumber();
|
2022-01-06 12:46:54 +00:00
|
|
|
if (allow_frame_invalidation)
|
2021-05-22 04:55:25 +00:00
|
|
|
{
|
2022-01-06 12:46:54 +00:00
|
|
|
const u32 frame_diff = frame_number - block->invalidate_frame_number;
|
|
|
|
if (frame_diff <= INVALIDATE_THRESHOLD_TO_DISABLE_LINKING)
|
|
|
|
{
|
|
|
|
Log_DevPrintf("Block 0x%08X has been invalidated in %u frames, disabling linking", block->GetPC(), frame_diff);
|
|
|
|
block->can_link = false;
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
// It's been a while since this block was modified, so it's all good.
|
|
|
|
block->invalidate_frame_number = frame_number;
|
|
|
|
}
|
2021-12-02 08:40:31 +00:00
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
2022-01-06 12:46:54 +00:00
|
|
|
// don't trigger frame number based invalidation for this block (e.g. memory save states)
|
|
|
|
block->invalidate_frame_number = frame_number - INVALIDATE_THRESHOLD_TO_DISABLE_LINKING - 1;
|
2021-05-22 04:55:25 +00:00
|
|
|
}
|
2021-12-02 08:40:31 +00:00
|
|
|
}
|
2021-05-22 04:55:25 +00:00
|
|
|
|
2021-12-02 08:40:31 +00:00
|
|
|
UnlinkBlock(block);
|
2021-05-22 04:55:25 +00:00
|
|
|
|
2023-09-17 02:28:11 +00:00
|
|
|
#ifdef ENABLE_RECOMPILER
|
2021-12-02 08:40:31 +00:00
|
|
|
SetFastMap(block->GetPC(), FastCompileBlockFunction);
|
2020-08-08 05:42:11 +00:00
|
|
|
#endif
|
2021-12-02 08:40:31 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void InvalidateBlocksWithPageIndex(u32 page_index)
|
|
|
|
{
|
|
|
|
DebugAssert(page_index < Bus::RAM_8MB_CODE_PAGE_COUNT);
|
|
|
|
auto& blocks = m_ram_block_map[page_index];
|
|
|
|
for (CodeBlock* block : blocks)
|
2022-01-06 12:46:54 +00:00
|
|
|
InvalidateBlock(block, true);
|
2019-11-19 10:30:04 +00:00
|
|
|
|
2019-11-21 14:32:40 +00:00
|
|
|
// Block will be re-added next execution.
|
|
|
|
blocks.clear();
|
2020-07-31 07:09:18 +00:00
|
|
|
Bus::ClearRAMCodePage(page_index);
|
2019-11-19 10:30:04 +00:00
|
|
|
}
|
|
|
|
|
2021-12-02 08:40:31 +00:00
|
|
|
void InvalidateAll()
|
|
|
|
{
|
|
|
|
for (auto& it : s_blocks)
|
|
|
|
{
|
|
|
|
CodeBlock* block = it.second;
|
|
|
|
if (block && !block->invalidated)
|
2022-01-06 12:46:54 +00:00
|
|
|
InvalidateBlock(block, false);
|
2021-12-02 08:40:31 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
Bus::ClearRAMCodePageFlags();
|
|
|
|
for (auto& it : m_ram_block_map)
|
|
|
|
it.clear();
|
|
|
|
}
|
|
|
|
|
2020-12-30 15:48:43 +00:00
|
|
|
void RemoveReferencesToBlock(CodeBlock* block)
|
2019-11-19 10:30:04 +00:00
|
|
|
{
|
2020-07-31 07:09:18 +00:00
|
|
|
BlockMap::iterator iter = s_blocks.find(block->key.GetPC());
|
|
|
|
Assert(iter != s_blocks.end() && iter->second == block);
|
2019-11-19 10:30:04 +00:00
|
|
|
|
2023-09-17 02:28:11 +00:00
|
|
|
#ifdef ENABLE_RECOMPILER
|
2020-08-08 05:42:11 +00:00
|
|
|
SetFastMap(block->GetPC(), FastCompileBlockFunction);
|
|
|
|
#endif
|
|
|
|
|
2019-11-21 14:32:40 +00:00
|
|
|
// if it's been invalidated it won't be in the page map
|
2020-12-29 04:44:45 +00:00
|
|
|
if (!block->invalidated)
|
2019-11-21 14:32:40 +00:00
|
|
|
RemoveBlockFromPageMap(block);
|
|
|
|
|
2020-07-31 07:09:18 +00:00
|
|
|
UnlinkBlock(block);
|
2023-09-17 02:28:11 +00:00
|
|
|
#ifdef ENABLE_RECOMPILER
|
2020-12-29 04:44:45 +00:00
|
|
|
if (!block->invalidated)
|
|
|
|
RemoveBlockFromHostCodeMap(block);
|
2020-10-18 04:43:55 +00:00
|
|
|
#endif
|
2020-07-31 07:09:18 +00:00
|
|
|
|
|
|
|
s_blocks.erase(iter);
|
2019-11-21 14:32:40 +00:00
|
|
|
}
|
|
|
|
|
2020-07-31 07:09:18 +00:00
|
|
|
void AddBlockToPageMap(CodeBlock* block)
|
2019-11-21 14:32:40 +00:00
|
|
|
{
|
|
|
|
if (!block->IsInRAM())
|
|
|
|
return;
|
|
|
|
|
2019-11-19 10:30:04 +00:00
|
|
|
const u32 start_page = block->GetStartPageIndex();
|
|
|
|
const u32 end_page = block->GetEndPageIndex();
|
2019-11-26 09:45:36 +00:00
|
|
|
for (u32 page = start_page; page <= end_page; page++)
|
2019-11-19 10:30:04 +00:00
|
|
|
{
|
2019-11-21 14:32:40 +00:00
|
|
|
m_ram_block_map[page].push_back(block);
|
2020-07-31 07:09:18 +00:00
|
|
|
Bus::SetRAMCodePage(page);
|
2019-11-19 10:30:04 +00:00
|
|
|
}
|
2019-11-21 14:32:40 +00:00
|
|
|
}
|
2019-11-19 10:30:04 +00:00
|
|
|
|
2020-07-31 07:09:18 +00:00
|
|
|
void RemoveBlockFromPageMap(CodeBlock* block)
|
2019-11-21 14:32:40 +00:00
|
|
|
{
|
|
|
|
if (!block->IsInRAM())
|
|
|
|
return;
|
2019-11-19 10:30:04 +00:00
|
|
|
|
2019-11-21 14:32:40 +00:00
|
|
|
const u32 start_page = block->GetStartPageIndex();
|
|
|
|
const u32 end_page = block->GetEndPageIndex();
|
2019-11-26 09:45:36 +00:00
|
|
|
for (u32 page = start_page; page <= end_page; page++)
|
2019-11-19 10:30:04 +00:00
|
|
|
{
|
2019-11-21 14:32:40 +00:00
|
|
|
auto& page_blocks = m_ram_block_map[page];
|
|
|
|
auto page_block_iter = std::find(page_blocks.begin(), page_blocks.end(), block);
|
|
|
|
Assert(page_block_iter != page_blocks.end());
|
|
|
|
page_blocks.erase(page_block_iter);
|
2019-11-19 10:30:04 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-05-22 04:55:25 +00:00
|
|
|
void LinkBlock(CodeBlock* from, CodeBlock* to, void* host_pc, void* host_resolve_pc, u32 host_pc_size)
|
2019-11-23 03:16:43 +00:00
|
|
|
{
|
|
|
|
Log_DebugPrintf("Linking block %p(%08x) to %p(%08x)", from, from->GetPC(), to, to->GetPC());
|
2021-05-22 04:55:25 +00:00
|
|
|
|
|
|
|
CodeBlock::LinkInfo li;
|
|
|
|
li.block = to;
|
|
|
|
li.host_pc = host_pc;
|
|
|
|
li.host_resolve_pc = host_resolve_pc;
|
|
|
|
li.host_pc_size = host_pc_size;
|
|
|
|
from->link_successors.push_back(li);
|
|
|
|
|
|
|
|
li.block = from;
|
|
|
|
to->link_predecessors.push_back(li);
|
|
|
|
|
2023-09-17 02:28:11 +00:00
|
|
|
#ifdef ENABLE_RECOMPILER
|
2021-05-22 04:55:25 +00:00
|
|
|
// apply in code
|
|
|
|
if (host_pc)
|
|
|
|
{
|
|
|
|
Log_ProfilePrintf("Backpatching %p(%08x) to jump to block %p (%08x)", host_pc, from->GetPC(), to, to->GetPC());
|
|
|
|
s_code_buffer.WriteProtect(false);
|
|
|
|
Recompiler::CodeGenerator::BackpatchBranch(host_pc, host_pc_size, reinterpret_cast<void*>(to->host_code));
|
|
|
|
s_code_buffer.WriteProtect(true);
|
|
|
|
}
|
2021-07-27 03:11:46 +00:00
|
|
|
#endif
|
2019-11-23 03:16:43 +00:00
|
|
|
}
|
|
|
|
|
2020-07-31 07:09:18 +00:00
|
|
|
void UnlinkBlock(CodeBlock* block)
|
2019-11-23 03:16:43 +00:00
|
|
|
{
|
2021-05-22 04:55:25 +00:00
|
|
|
if (block->link_predecessors.empty() && block->link_successors.empty())
|
|
|
|
return;
|
|
|
|
|
2023-09-17 02:28:11 +00:00
|
|
|
#ifdef ENABLE_RECOMPILER
|
2021-05-22 04:55:25 +00:00
|
|
|
if (g_settings.IsUsingRecompiler() && g_settings.cpu_recompiler_block_linking)
|
|
|
|
s_code_buffer.WriteProtect(false);
|
|
|
|
#endif
|
|
|
|
|
|
|
|
for (CodeBlock::LinkInfo& li : block->link_predecessors)
|
2019-11-23 03:16:43 +00:00
|
|
|
{
|
2021-05-22 04:55:25 +00:00
|
|
|
auto iter = std::find_if(li.block->link_successors.begin(), li.block->link_successors.end(),
|
|
|
|
[block](const CodeBlock::LinkInfo& li) { return li.block == block; });
|
|
|
|
Assert(iter != li.block->link_successors.end());
|
|
|
|
|
2023-09-17 02:28:11 +00:00
|
|
|
#ifdef ENABLE_RECOMPILER
|
2021-05-22 04:55:25 +00:00
|
|
|
// Restore blocks linked to this block back to the resolver
|
|
|
|
if (li.host_pc)
|
|
|
|
{
|
|
|
|
Log_ProfilePrintf("Backpatching %p(%08x) [predecessor] to jump to resolver", li.host_pc, li.block->GetPC());
|
|
|
|
Recompiler::CodeGenerator::BackpatchBranch(li.host_pc, li.host_pc_size, li.host_resolve_pc);
|
|
|
|
}
|
2021-07-27 03:11:46 +00:00
|
|
|
#endif
|
2021-05-22 04:55:25 +00:00
|
|
|
|
|
|
|
li.block->link_successors.erase(iter);
|
2019-11-23 03:16:43 +00:00
|
|
|
}
|
|
|
|
block->link_predecessors.clear();
|
|
|
|
|
2021-05-22 04:55:25 +00:00
|
|
|
for (CodeBlock::LinkInfo& li : block->link_successors)
|
2019-11-23 03:16:43 +00:00
|
|
|
{
|
2021-05-22 04:55:25 +00:00
|
|
|
auto iter = std::find_if(li.block->link_predecessors.begin(), li.block->link_predecessors.end(),
|
|
|
|
[block](const CodeBlock::LinkInfo& li) { return li.block == block; });
|
|
|
|
Assert(iter != li.block->link_predecessors.end());
|
|
|
|
|
2023-09-17 02:28:11 +00:00
|
|
|
#ifdef ENABLE_RECOMPILER
|
2021-05-22 04:55:25 +00:00
|
|
|
// Restore blocks we're linking to back to the resolver, since the successor won't be linked to us to backpatch if
|
|
|
|
// it changes.
|
|
|
|
if (li.host_pc)
|
|
|
|
{
|
|
|
|
Log_ProfilePrintf("Backpatching %p(%08x) [successor] to jump to resolver", li.host_pc, li.block->GetPC());
|
|
|
|
Recompiler::CodeGenerator::BackpatchBranch(li.host_pc, li.host_pc_size, li.host_resolve_pc);
|
|
|
|
}
|
2021-07-27 03:11:46 +00:00
|
|
|
#endif
|
2021-05-22 04:55:25 +00:00
|
|
|
|
|
|
|
// Don't have to do anything special for successors - just let the successor know it's no longer linked.
|
|
|
|
li.block->link_predecessors.erase(iter);
|
2019-11-23 03:16:43 +00:00
|
|
|
}
|
|
|
|
block->link_successors.clear();
|
2021-05-22 04:55:25 +00:00
|
|
|
|
2023-09-17 02:28:11 +00:00
|
|
|
#ifdef ENABLE_RECOMPILER
|
2021-05-22 04:55:25 +00:00
|
|
|
if (g_settings.IsUsingRecompiler() && g_settings.cpu_recompiler_block_linking)
|
|
|
|
s_code_buffer.WriteProtect(true);
|
|
|
|
#endif
|
2019-11-23 03:16:43 +00:00
|
|
|
}
|
|
|
|
|
2023-09-17 02:28:11 +00:00
|
|
|
#ifdef ENABLE_RECOMPILER
|
2020-10-18 04:43:55 +00:00
|
|
|
|
|
|
|
void AddBlockToHostCodeMap(CodeBlock* block)
|
|
|
|
{
|
|
|
|
if (!g_settings.IsUsingRecompiler())
|
|
|
|
return;
|
|
|
|
|
|
|
|
auto ir = s_host_code_map.emplace(block->host_code, block);
|
|
|
|
Assert(ir.second);
|
|
|
|
}
|
|
|
|
|
|
|
|
void RemoveBlockFromHostCodeMap(CodeBlock* block)
|
|
|
|
{
|
|
|
|
if (!g_settings.IsUsingRecompiler())
|
|
|
|
return;
|
|
|
|
|
|
|
|
HostCodeMap::iterator hc_iter = s_host_code_map.find(block->host_code);
|
|
|
|
Assert(hc_iter != s_host_code_map.end());
|
|
|
|
s_host_code_map.erase(hc_iter);
|
|
|
|
}
|
|
|
|
|
|
|
|
bool InitializeFastmem()
|
|
|
|
{
|
2020-11-22 15:06:25 +00:00
|
|
|
const CPUFastmemMode mode = g_settings.cpu_fastmem_mode;
|
|
|
|
Assert(mode != CPUFastmemMode::Disabled);
|
|
|
|
|
2023-09-17 02:28:11 +00:00
|
|
|
#ifdef ENABLE_MMAP_FASTMEM
|
2020-11-22 15:06:25 +00:00
|
|
|
const auto handler = (mode == CPUFastmemMode::MMap) ? MMapPageFaultHandler : LUTPageFaultHandler;
|
|
|
|
#else
|
|
|
|
const auto handler = LUTPageFaultHandler;
|
|
|
|
Assert(mode != CPUFastmemMode::MMap);
|
|
|
|
#endif
|
|
|
|
|
2021-07-10 08:01:31 +00:00
|
|
|
if (!Common::PageFaultHandler::InstallHandler(&s_host_code_map, s_code_buffer.GetCodePointer(),
|
|
|
|
s_code_buffer.GetTotalSize(), handler))
|
2020-10-18 04:43:55 +00:00
|
|
|
{
|
|
|
|
Log_ErrorPrintf("Failed to install page fault handler");
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
2021-04-11 02:42:51 +00:00
|
|
|
Bus::UpdateFastmemViews(mode);
|
2023-10-01 06:30:28 +00:00
|
|
|
CPU::UpdateMemoryPointers();
|
2020-10-18 04:43:55 +00:00
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
void ShutdownFastmem()
|
|
|
|
{
|
|
|
|
Common::PageFaultHandler::RemoveHandler(&s_host_code_map);
|
2021-04-11 02:42:51 +00:00
|
|
|
Bus::UpdateFastmemViews(CPUFastmemMode::Disabled);
|
2023-10-01 06:30:28 +00:00
|
|
|
CPU::UpdateMemoryPointers();
|
2020-10-18 04:43:55 +00:00
|
|
|
}
|
|
|
|
|
2023-09-17 02:28:11 +00:00
|
|
|
#ifdef ENABLE_MMAP_FASTMEM
|
2020-11-22 15:06:25 +00:00
|
|
|
|
|
|
|
Common::PageFaultHandler::HandlerResult MMapPageFaultHandler(void* exception_pc, void* fault_address, bool is_write)
|
2020-10-18 04:43:55 +00:00
|
|
|
{
|
2023-10-01 06:30:28 +00:00
|
|
|
if (static_cast<u8*>(fault_address) < static_cast<u8*>(g_state.fastmem_base) ||
|
|
|
|
(static_cast<u8*>(fault_address) - static_cast<u8*>(g_state.fastmem_base)) >=
|
|
|
|
static_cast<ptrdiff_t>(Bus::FASTMEM_ARENA_SIZE))
|
2020-10-18 04:43:55 +00:00
|
|
|
{
|
|
|
|
return Common::PageFaultHandler::HandlerResult::ExecuteNextHandler;
|
|
|
|
}
|
|
|
|
|
2023-10-01 06:30:28 +00:00
|
|
|
const PhysicalMemoryAddress fastmem_address = static_cast<PhysicalMemoryAddress>(
|
|
|
|
static_cast<ptrdiff_t>(static_cast<u8*>(fault_address) - static_cast<u8*>(g_state.fastmem_base)));
|
2020-10-18 04:43:55 +00:00
|
|
|
|
|
|
|
Log_DevPrintf("Page fault handler invoked at PC=%p Address=%p %s, fastmem offset 0x%08X", exception_pc, fault_address,
|
|
|
|
is_write ? "(write)" : "(read)", fastmem_address);
|
|
|
|
|
|
|
|
// use upper_bound to find the next block after the pc
|
|
|
|
HostCodeMap::iterator upper_iter =
|
|
|
|
s_host_code_map.upper_bound(reinterpret_cast<CodeBlock::HostCodePointer>(exception_pc));
|
|
|
|
if (upper_iter == s_host_code_map.begin())
|
|
|
|
return Common::PageFaultHandler::HandlerResult::ExecuteNextHandler;
|
|
|
|
|
|
|
|
// then decrement it by one to (hopefully) get the block we want
|
|
|
|
upper_iter--;
|
|
|
|
|
|
|
|
// find the loadstore info in the code block
|
|
|
|
CodeBlock* block = upper_iter->second;
|
|
|
|
for (auto bpi_iter = block->loadstore_backpatch_info.begin(); bpi_iter != block->loadstore_backpatch_info.end();
|
|
|
|
++bpi_iter)
|
|
|
|
{
|
2020-11-07 08:49:59 +00:00
|
|
|
Recompiler::LoadStoreBackpatchInfo& lbi = *bpi_iter;
|
2020-10-18 04:43:55 +00:00
|
|
|
if (lbi.host_pc == exception_pc)
|
|
|
|
{
|
2020-11-07 08:49:59 +00:00
|
|
|
if (is_write && !g_state.cop0_regs.sr.Isc && Bus::IsRAMAddress(fastmem_address))
|
|
|
|
{
|
|
|
|
// this is probably a code page, since we aren't going to fault due to requiring fastmem on RAM.
|
|
|
|
const u32 code_page_index = Bus::GetRAMCodePageIndex(fastmem_address);
|
|
|
|
if (Bus::IsRAMCodePage(code_page_index))
|
|
|
|
{
|
|
|
|
if (++lbi.fault_count < CODE_WRITE_FAULT_THRESHOLD_FOR_SLOWMEM)
|
|
|
|
{
|
|
|
|
InvalidateBlocksWithPageIndex(code_page_index);
|
|
|
|
return Common::PageFaultHandler::HandlerResult::ContinueExecution;
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
Log_DevPrintf("Backpatching code write at %p (%08X) address %p (%08X) to slowmem after threshold",
|
|
|
|
exception_pc, lbi.guest_pc, fault_address, fastmem_address);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-10-18 04:43:55 +00:00
|
|
|
// found it, do fixup
|
2021-03-19 08:47:31 +00:00
|
|
|
s_code_buffer.WriteProtect(false);
|
|
|
|
const bool backpatch_result = Recompiler::CodeGenerator::BackpatchLoadStore(lbi);
|
|
|
|
s_code_buffer.WriteProtect(true);
|
|
|
|
if (backpatch_result)
|
2020-10-18 04:43:55 +00:00
|
|
|
{
|
|
|
|
// remove the backpatch entry since we won't be coming back to this one
|
|
|
|
block->loadstore_backpatch_info.erase(bpi_iter);
|
|
|
|
return Common::PageFaultHandler::HandlerResult::ContinueExecution;
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
Log_ErrorPrintf("Failed to backpatch %p in block 0x%08X", exception_pc, block->GetPC());
|
|
|
|
return Common::PageFaultHandler::HandlerResult::ExecuteNextHandler;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// we didn't find the pc in our list..
|
|
|
|
Log_ErrorPrintf("Loadstore PC not found for %p in block 0x%08X", exception_pc, block->GetPC());
|
|
|
|
return Common::PageFaultHandler::HandlerResult::ExecuteNextHandler;
|
|
|
|
}
|
|
|
|
|
2020-11-22 15:06:25 +00:00
|
|
|
#endif
|
|
|
|
|
|
|
|
Common::PageFaultHandler::HandlerResult LUTPageFaultHandler(void* exception_pc, void* fault_address, bool is_write)
|
|
|
|
{
|
|
|
|
// use upper_bound to find the next block after the pc
|
|
|
|
HostCodeMap::iterator upper_iter =
|
|
|
|
s_host_code_map.upper_bound(reinterpret_cast<CodeBlock::HostCodePointer>(exception_pc));
|
|
|
|
if (upper_iter == s_host_code_map.begin())
|
|
|
|
return Common::PageFaultHandler::HandlerResult::ExecuteNextHandler;
|
|
|
|
|
|
|
|
// then decrement it by one to (hopefully) get the block we want
|
|
|
|
upper_iter--;
|
|
|
|
|
|
|
|
// find the loadstore info in the code block
|
|
|
|
CodeBlock* block = upper_iter->second;
|
|
|
|
for (auto bpi_iter = block->loadstore_backpatch_info.begin(); bpi_iter != block->loadstore_backpatch_info.end();
|
|
|
|
++bpi_iter)
|
|
|
|
{
|
|
|
|
Recompiler::LoadStoreBackpatchInfo& lbi = *bpi_iter;
|
|
|
|
if (lbi.host_pc == exception_pc)
|
|
|
|
{
|
|
|
|
// found it, do fixup
|
2021-03-19 08:47:31 +00:00
|
|
|
s_code_buffer.WriteProtect(false);
|
|
|
|
const bool backpatch_result = Recompiler::CodeGenerator::BackpatchLoadStore(lbi);
|
|
|
|
s_code_buffer.WriteProtect(true);
|
|
|
|
if (backpatch_result)
|
2020-11-22 15:06:25 +00:00
|
|
|
{
|
|
|
|
// remove the backpatch entry since we won't be coming back to this one
|
|
|
|
block->loadstore_backpatch_info.erase(bpi_iter);
|
|
|
|
return Common::PageFaultHandler::HandlerResult::ContinueExecution;
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
Log_ErrorPrintf("Failed to backpatch %p in block 0x%08X", exception_pc, block->GetPC());
|
|
|
|
return Common::PageFaultHandler::HandlerResult::ExecuteNextHandler;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// we didn't find the pc in our list..
|
|
|
|
Log_ErrorPrintf("Loadstore PC not found for %p in block 0x%08X", exception_pc, block->GetPC());
|
|
|
|
return Common::PageFaultHandler::HandlerResult::ExecuteNextHandler;
|
|
|
|
}
|
2020-11-20 15:56:51 +00:00
|
|
|
|
2023-09-17 02:28:11 +00:00
|
|
|
#endif // ENABLE_RECOMPILER
|
2020-10-18 04:43:55 +00:00
|
|
|
|
2020-07-31 07:09:18 +00:00
|
|
|
} // namespace CPU::CodeCache
|
2021-05-22 04:55:25 +00:00
|
|
|
|
2023-09-17 02:28:11 +00:00
|
|
|
#ifdef ENABLE_RECOMPILER
|
2021-05-22 04:55:25 +00:00
|
|
|
|
|
|
|
void CPU::Recompiler::Thunks::ResolveBranch(CodeBlock* block, void* host_pc, void* host_resolve_pc, u32 host_pc_size)
|
|
|
|
{
|
|
|
|
using namespace CPU::CodeCache;
|
|
|
|
|
|
|
|
CodeBlockKey key = GetNextBlockKey();
|
2022-11-05 04:43:52 +00:00
|
|
|
CodeBlock* successor_block = LookupBlock(key, false);
|
|
|
|
if (!successor_block || (successor_block->invalidated && !RevalidateBlock(successor_block, false)) ||
|
|
|
|
!block->can_link || !successor_block->can_link)
|
2021-05-22 04:55:25 +00:00
|
|
|
{
|
|
|
|
// just turn it into a return to the dispatcher instead.
|
|
|
|
s_code_buffer.WriteProtect(false);
|
|
|
|
CodeGenerator::BackpatchReturn(host_pc, host_pc_size);
|
|
|
|
s_code_buffer.WriteProtect(true);
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
// link blocks!
|
|
|
|
LinkBlock(block, successor_block, host_pc, host_resolve_pc, host_pc_size);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void CPU::Recompiler::Thunks::LogPC(u32 pc)
|
|
|
|
{
|
2023-08-15 13:12:21 +00:00
|
|
|
#if 1
|
2021-05-22 04:55:25 +00:00
|
|
|
CPU::CodeCache::LogCurrentState();
|
|
|
|
#endif
|
|
|
|
#if 0
|
|
|
|
if (TimingEvents::GetGlobalTickCounter() + GetPendingTicks() == 382856482)
|
|
|
|
__debugbreak();
|
|
|
|
#endif
|
|
|
|
}
|
|
|
|
|
2023-09-17 02:28:11 +00:00
|
|
|
#endif // ENABLE_RECOMPILER
|