From 5e74a8b850d67bc44b0e41e2ec9f3220dfa811ac Mon Sep 17 00:00:00 2001 From: JosJuice Date: Sat, 30 Sep 2023 19:13:52 +0200 Subject: [PATCH 1/6] Jit64: Don't make use of fastmem arena when dcache is enabled Some code paths in EmuCodeBlock.cpp that were checking fastmem_arena should really also be checking m_enable_dcache. Because JitArm64 centralizes more or less all memory access to the EmitBackpatchRoutine function and because that function already contained a check, JitArm64 works fine without the additional checks added by this commit. Regardless, I added the checks to MMU.cpp instead of EmuCodeBlock.cpp where applicable so they would be available to JitArm64. Maybe one day JitArm64 will need them if its code gets restructured. --- Source/Core/Core/PowerPC/Jit64Common/EmuCodeBlock.cpp | 6 ++++-- Source/Core/Core/PowerPC/MMU.cpp | 6 ++++++ 2 files changed, 10 insertions(+), 2 deletions(-) diff --git a/Source/Core/Core/PowerPC/Jit64Common/EmuCodeBlock.cpp b/Source/Core/Core/PowerPC/Jit64Common/EmuCodeBlock.cpp index 04036e11ab..53bae65dd4 100644 --- a/Source/Core/Core/PowerPC/Jit64Common/EmuCodeBlock.cpp +++ b/Source/Core/Core/PowerPC/Jit64Common/EmuCodeBlock.cpp @@ -372,7 +372,8 @@ void EmuCodeBlock::SafeLoadToReg(X64Reg reg_value, const Gen::OpArg& opAddress, FixupBranch exit; const bool dr_set = (flags & SAFE_LOADSTORE_DR_ON) || m_jit.m_ppc_state.msr.DR; - const bool fast_check_address = !slowmem && dr_set && m_jit.jo.fastmem_arena; + const bool fast_check_address = + !slowmem && dr_set && m_jit.jo.fastmem_arena && !m_jit.m_ppc_state.m_enable_dcache; if (fast_check_address) { FixupBranch slow = CheckIfSafeAddress(R(reg_value), reg_addr, registersInUse); @@ -541,7 +542,8 @@ void EmuCodeBlock::SafeWriteRegToReg(OpArg reg_value, X64Reg reg_addr, int acces FixupBranch exit; const bool dr_set = (flags & SAFE_LOADSTORE_DR_ON) || m_jit.m_ppc_state.msr.DR; - const bool fast_check_address = !slowmem && dr_set && m_jit.jo.fastmem_arena; + const bool fast_check_address = + !slowmem && dr_set && m_jit.jo.fastmem_arena && !m_jit.m_ppc_state.m_enable_dcache; if (fast_check_address) { FixupBranch slow = CheckIfSafeAddress(reg_value, reg_addr, registersInUse); diff --git a/Source/Core/Core/PowerPC/MMU.cpp b/Source/Core/Core/PowerPC/MMU.cpp index 3526ec1522..dcb1b75654 100644 --- a/Source/Core/Core/PowerPC/MMU.cpp +++ b/Source/Core/Core/PowerPC/MMU.cpp @@ -914,6 +914,9 @@ bool MMU::IsOptimizableRAMAddress(const u32 address) const if (!m_ppc_state.msr.DR) return false; + if (m_ppc_state.m_enable_dcache) + return false; + // TODO: This API needs to take an access size // // We store whether an access can be optimized to an unchecked access @@ -1211,6 +1214,9 @@ u32 MMU::IsOptimizableMMIOAccess(u32 address, u32 access_size) const if (!m_ppc_state.msr.DR) return 0; + if (m_ppc_state.m_enable_dcache) + return 0; + // Translate address // If we also optimize for TLB mappings, we'd have to clear the // JitCache on each TLB invalidation. From 899d61bc7dd093393aafcd88c4f072ff879228a7 Mon Sep 17 00:00:00 2001 From: JosJuice Date: Sat, 30 Sep 2023 17:32:51 +0200 Subject: [PATCH 2/6] Jit64: Recompile asm routines on cache clear This is needed so that the checks added in the previous commit will be reevaluated if the value of m_enable_dcache changes. JitArm64 was already recompiling its asm routines on cache clear by necessity. It doesn't have the same setup as Jit64 where the asm routines are in a separate region, so clearing the JitArm64 cache results in the asm routines being cleared too. --- Source/Core/Common/CodeBlock.h | 11 ++++++++--- Source/Core/Core/PowerPC/Jit64/Jit.cpp | 1 + Source/Core/Core/PowerPC/Jit64/JitAsm.cpp | 10 +++++++++- Source/Core/Core/PowerPC/Jit64/JitAsm.h | 1 + Source/Core/VideoCommon/VertexLoaderARM64.cpp | 2 +- Source/Core/VideoCommon/VertexLoaderX64.cpp | 2 +- 6 files changed, 21 insertions(+), 6 deletions(-) diff --git a/Source/Core/Common/CodeBlock.h b/Source/Core/Common/CodeBlock.h index aa35d3b782..fbd4cdb00a 100644 --- a/Source/Core/Common/CodeBlock.h +++ b/Source/Core/Common/CodeBlock.h @@ -82,9 +82,14 @@ public: } bool IsInSpace(const u8* ptr) const { return ptr >= region && ptr < (region + region_size); } - // Cannot currently be undone. Will write protect the entire code region. - // Start over if you need to change the code (call FreeCodeSpace(), AllocCodeSpace()). - void WriteProtect() { Common::WriteProtectMemory(region, region_size, true); } + void WriteProtect(bool allow_execute) + { + Common::WriteProtectMemory(region, region_size, allow_execute); + } + void UnWriteProtect(bool allow_execute) + { + Common::UnWriteProtectMemory(region, region_size, allow_execute); + } void ResetCodePtr() { T::SetCodePtr(region, region + region_size); } size_t GetSpaceLeft() const { diff --git a/Source/Core/Core/PowerPC/Jit64/Jit.cpp b/Source/Core/Core/PowerPC/Jit64/Jit.cpp index 9a5f603573..6dbb998d3a 100644 --- a/Source/Core/Core/PowerPC/Jit64/Jit.cpp +++ b/Source/Core/Core/PowerPC/Jit64/Jit.cpp @@ -305,6 +305,7 @@ void Jit64::ClearCache() ClearCodeSpace(); Clear(); RefreshConfig(InitFastmemArena::No); + asm_routines.Regenerate(); ResetFreeMemoryRanges(); } diff --git a/Source/Core/Core/PowerPC/Jit64/JitAsm.cpp b/Source/Core/Core/PowerPC/Jit64/JitAsm.cpp index ef667cce86..36189dbb40 100644 --- a/Source/Core/Core/PowerPC/Jit64/JitAsm.cpp +++ b/Source/Core/Core/PowerPC/Jit64/JitAsm.cpp @@ -32,7 +32,15 @@ void Jit64AsmRoutineManager::Init() { m_const_pool.Init(AllocChildCodeSpace(4096), 4096); Generate(); - WriteProtect(); + WriteProtect(true); +} + +void Jit64AsmRoutineManager::Regenerate() +{ + UnWriteProtect(false); + ResetCodePtr(); + Generate(); + WriteProtect(true); } // PLAN: no more block numbers - crazy opcodes just contain offset within diff --git a/Source/Core/Core/PowerPC/Jit64/JitAsm.h b/Source/Core/Core/PowerPC/Jit64/JitAsm.h index 3f2c2297fe..4db3c8f477 100644 --- a/Source/Core/Core/PowerPC/Jit64/JitAsm.h +++ b/Source/Core/Core/PowerPC/Jit64/JitAsm.h @@ -35,6 +35,7 @@ public: explicit Jit64AsmRoutineManager(Jit64& jit); void Init(); + void Regenerate(); void ResetStack(Gen::X64CodeBlock& emitter); diff --git a/Source/Core/VideoCommon/VertexLoaderARM64.cpp b/Source/Core/VideoCommon/VertexLoaderARM64.cpp index 03bb82d5c2..12b6b1d79a 100644 --- a/Source/Core/VideoCommon/VertexLoaderARM64.cpp +++ b/Source/Core/VideoCommon/VertexLoaderARM64.cpp @@ -56,7 +56,7 @@ VertexLoaderARM64::VertexLoaderARM64(const TVtxDesc& vtx_desc, const VAT& vtx_at const Common::ScopedJITPageWriteAndNoExecute enable_jit_page_writes; ClearCodeSpace(); GenerateVertexLoader(); - WriteProtect(); + WriteProtect(true); } // Returns the register to use as the base and an offset from that register. diff --git a/Source/Core/VideoCommon/VertexLoaderX64.cpp b/Source/Core/VideoCommon/VertexLoaderX64.cpp index 0280ec4e83..9aba5a58c8 100644 --- a/Source/Core/VideoCommon/VertexLoaderX64.cpp +++ b/Source/Core/VideoCommon/VertexLoaderX64.cpp @@ -49,7 +49,7 @@ VertexLoaderX64::VertexLoaderX64(const TVtxDesc& vtx_desc, const VAT& vtx_att) AllocCodeSpace(4096); ClearCodeSpace(); GenerateVertexLoader(); - WriteProtect(); + WriteProtect(true); Common::JitRegister::Register(region, GetCodePtr(), "VertexLoaderX64\nVtx desc: \n{}\nVAT:\n{}", vtx_desc, vtx_att); From b3bfcc5d7f06bb029bf919525f7722ec01f7a28f Mon Sep 17 00:00:00 2001 From: JosJuice Date: Sat, 9 Sep 2023 16:19:47 +0200 Subject: [PATCH 3/6] PowerPC: Allow toggling write-back cache during emulation Now that PR 10575 is merged, the JIT automatically clears its cache when this setting is changed, making this reasonable to implement. --- .../features/settings/model/BooleanSetting.kt | 1 - Source/Core/Core/PowerPC/PowerPC.cpp | 21 +++++++++++++++++-- Source/Core/Core/PowerPC/PowerPC.h | 4 ++++ .../Core/DolphinQt/Settings/AdvancedPane.cpp | 1 - 4 files changed, 23 insertions(+), 4 deletions(-) diff --git a/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/settings/model/BooleanSetting.kt b/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/settings/model/BooleanSetting.kt index 298f2ebe24..5ccc6b4aad 100644 --- a/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/settings/model/BooleanSetting.kt +++ b/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/settings/model/BooleanSetting.kt @@ -902,7 +902,6 @@ enum class BooleanSetting( MAIN_OVERRIDE_REGION_SETTINGS, MAIN_MMU, MAIN_PAUSE_ON_PANIC, - MAIN_ACCURATE_CPU_CACHE, MAIN_RAM_OVERRIDE_ENABLE, MAIN_CUSTOM_RTC_ENABLE, MAIN_DSP_JIT, diff --git a/Source/Core/Core/PowerPC/PowerPC.cpp b/Source/Core/Core/PowerPC/PowerPC.cpp index d5bd82ecd2..335658f532 100644 --- a/Source/Core/Core/PowerPC/PowerPC.cpp +++ b/Source/Core/Core/PowerPC/PowerPC.cpp @@ -18,6 +18,7 @@ #include "Common/FloatUtils.h" #include "Common/Logging/Log.h" +#include "Core/CPUThreadConfigCallback.h" #include "Core/Config/MainSettings.h" #include "Core/ConfigManager.h" #include "Core/Core.h" @@ -262,8 +263,25 @@ CPUCore DefaultCPUCore() #endif } +void PowerPCManager::RefreshConfig() +{ + const bool old_enable_dcache = m_ppc_state.m_enable_dcache; + + m_ppc_state.m_enable_dcache = Config::Get(Config::MAIN_ACCURATE_CPU_CACHE); + + if (old_enable_dcache && !m_ppc_state.m_enable_dcache) + { + INFO_LOG_FMT(POWERPC, "Flushing data cache"); + m_ppc_state.dCache.FlushAll(); + } +} + void PowerPCManager::Init(CPUCore cpu_core) { + m_registered_config_callback_id = + CPUThreadConfigCallback::AddConfigChangedCallback([this] { RefreshConfig(); }); + RefreshConfig(); + m_invalidate_cache_thread_safe = m_system.GetCoreTiming().RegisterEvent("invalidateEmulatedCache", InvalidateCacheThreadSafe); @@ -273,8 +291,6 @@ void PowerPCManager::Init(CPUCore cpu_core) m_ppc_state.iCache.Init(); m_ppc_state.dCache.Init(); - m_ppc_state.m_enable_dcache = Config::Get(Config::MAIN_ACCURATE_CPU_CACHE); - if (Config::Get(Config::MAIN_ENABLE_DEBUGGING)) m_breakpoints.ClearAllTemporary(); } @@ -307,6 +323,7 @@ void PowerPCManager::ScheduleInvalidateCacheThreadSafe(u32 address) void PowerPCManager::Shutdown() { + CPUThreadConfigCallback::RemoveConfigChangedCallback(m_registered_config_callback_id); InjectExternalCPUCore(nullptr); m_system.GetJitInterface().Shutdown(); m_system.GetInterpreter().Shutdown(); diff --git a/Source/Core/Core/PowerPC/PowerPC.h b/Source/Core/Core/PowerPC/PowerPC.h index 30cbd47b41..7612018a93 100644 --- a/Source/Core/Core/PowerPC/PowerPC.h +++ b/Source/Core/Core/PowerPC/PowerPC.h @@ -13,6 +13,7 @@ #include "Common/CommonTypes.h" +#include "Core/CPUThreadConfigCallback.h" #include "Core/Debugger/PPCDebugInterface.h" #include "Core/PowerPC/BreakPoints.h" #include "Core/PowerPC/ConditionRegister.h" @@ -297,6 +298,7 @@ private: void InitializeCPUCore(CPUCore cpu_core); void ApplyMode(); void ResetRegisters(); + void RefreshConfig(); PowerPCState m_ppc_state; @@ -308,6 +310,8 @@ private: MemChecks m_memchecks; PPCDebugInterface m_debug_interface; + CPUThreadConfigCallback::ConfigChangedCallbackID m_registered_config_callback_id; + CoreTiming::EventType* m_invalidate_cache_thread_safe = nullptr; Core::System& m_system; diff --git a/Source/Core/DolphinQt/Settings/AdvancedPane.cpp b/Source/Core/DolphinQt/Settings/AdvancedPane.cpp index af50a3f7e9..132ac4bf69 100644 --- a/Source/Core/DolphinQt/Settings/AdvancedPane.cpp +++ b/Source/Core/DolphinQt/Settings/AdvancedPane.cpp @@ -255,7 +255,6 @@ void AdvancedPane::Update() m_cpu_emulation_engine_combobox->setEnabled(!running); m_enable_mmu_checkbox->setEnabled(!running); m_pause_on_panic_checkbox->setEnabled(!running); - m_accurate_cpu_cache_checkbox->setEnabled(!running); { QFont bf = font(); From 0606433404ba2ab220ea3d3468911ed045c01cac Mon Sep 17 00:00:00 2001 From: JosJuice Date: Sun, 1 Oct 2023 21:57:28 +0200 Subject: [PATCH 4/6] JitArm64: Check fastmem instead of fastmem_arena Preparation for the next commit. JitArm64 has been conflating these two flags. Most of the stuff that's been guarded by fastmem_arena checks in fact requires fastmem. When we have fastmem_arena without fastmem, it would be possible to do things a bit more efficiently than what this commit does, but it's non-trivial and therefore I will leave it out of this PR. With this commit, we effectively have the same behavior as before this PR - plus the added ability to toggle fastmem with a cache clear. --- Source/Core/Core/PowerPC/JitArm64/Jit.cpp | 4 +-- Source/Core/Core/PowerPC/JitArm64/Jit.h | 10 +++---- .../PowerPC/JitArm64/JitArm64_BackPatch.cpp | 4 +-- .../PowerPC/JitArm64/JitArm64_LoadStore.cpp | 30 +++++++++---------- .../JitArm64/JitArm64_LoadStoreFloating.cpp | 12 ++++---- .../JitArm64/JitArm64_LoadStorePaired.cpp | 24 +++++++-------- Source/Core/Core/PowerPC/JitArm64/JitAsm.cpp | 4 +-- Source/Core/Core/PowerPC/JitInterface.cpp | 10 +++++-- 8 files changed, 52 insertions(+), 46 deletions(-) diff --git a/Source/Core/Core/PowerPC/JitArm64/Jit.cpp b/Source/Core/Core/PowerPC/JitArm64/Jit.cpp index 961395e83c..efc24f988a 100644 --- a/Source/Core/Core/PowerPC/JitArm64/Jit.cpp +++ b/Source/Core/Core/PowerPC/JitArm64/Jit.cpp @@ -364,8 +364,8 @@ void JitArm64::EmitStoreMembase(const ARM64Reg& msr) auto& memory = m_system.GetMemory(); ARM64Reg WD = gpr.GetReg(); ARM64Reg XD = EncodeRegTo64(WD); - MOVP2R(MEM_REG, jo.fastmem_arena ? memory.GetLogicalBase() : memory.GetLogicalPageMappingsBase()); - MOVP2R(XD, jo.fastmem_arena ? memory.GetPhysicalBase() : memory.GetPhysicalPageMappingsBase()); + MOVP2R(MEM_REG, jo.fastmem ? memory.GetLogicalBase() : memory.GetLogicalPageMappingsBase()); + MOVP2R(XD, jo.fastmem ? memory.GetPhysicalBase() : memory.GetPhysicalPageMappingsBase()); TST(msr, LogicalImm(1 << (31 - 27), 32)); CSEL(MEM_REG, MEM_REG, XD, CCFlags::CC_NEQ); STR(IndexType::Unsigned, MEM_REG, PPC_REG, PPCSTATE_OFF(mem_ptr)); diff --git a/Source/Core/Core/PowerPC/JitArm64/Jit.h b/Source/Core/Core/PowerPC/JitArm64/Jit.h index 7025ffaa2d..e8056fb3d8 100644 --- a/Source/Core/Core/PowerPC/JitArm64/Jit.h +++ b/Source/Core/Core/PowerPC/JitArm64/Jit.h @@ -233,7 +233,7 @@ protected: // Only emits fast access code. Must only be used if the guest address is known in advance // and IsOptimizableRAMAddress returns true for it, otherwise Dolphin will likely crash! AlwaysUnsafe, - // Best in most cases. If backpatching is possible (!emitting_routine && jo.fastmem_arena): + // Best in most cases. If backpatching is possible (!emitting_routine && jo.fastmem): // Tries to run fast access code, and if that fails, uses backpatching to replace the code // with a call to the slow C++ code. Otherwise: Checks whether the fast access code will work, // then branches to either the fast access code or the slow C++ code. @@ -259,10 +259,10 @@ protected: // // emitting_routine && mode == Auto: X2 // emitting_routine && mode == Auto && !(flags & BackPatchInfo::FLAG_STORE): X3 - // emitting_routine && mode != AlwaysSafe && !jo.fastmem_arena: X3 - // mode != AlwaysSafe && !jo.fastmem_arena: X2 - // !emitting_routine && mode != AlwaysSafe && !jo.fastmem_arena: X30 - // !emitting_routine && mode == Auto && jo.fastmem_arena: X30 + // emitting_routine && mode != AlwaysSafe && !jo.fastmem: X3 + // mode != AlwaysSafe && !jo.fastmem: X2 + // !emitting_routine && mode != AlwaysSafe && !jo.fastmem: X30 + // !emitting_routine && mode == Auto && jo.fastmem: X30 // // Furthermore, any callee-saved register which isn't marked in gprs_to_push/fprs_to_push // may be clobbered if mode != AlwaysUnsafe. diff --git a/Source/Core/Core/PowerPC/JitArm64/JitArm64_BackPatch.cpp b/Source/Core/Core/PowerPC/JitArm64/JitArm64_BackPatch.cpp index 8e14e694c4..2f2add68fc 100644 --- a/Source/Core/Core/PowerPC/JitArm64/JitArm64_BackPatch.cpp +++ b/Source/Core/Core/PowerPC/JitArm64/JitArm64_BackPatch.cpp @@ -74,7 +74,7 @@ void JitArm64::EmitBackpatchRoutine(u32 flags, MemAccessMode mode, ARM64Reg RS, ARM64Reg memory_base = MEM_REG; ARM64Reg memory_offset = addr; - if (!jo.fastmem_arena) + if (!jo.fastmem) { const ARM64Reg temp = emitting_routine ? ARM64Reg::W3 : ARM64Reg::W30; @@ -158,7 +158,7 @@ void JitArm64::EmitBackpatchRoutine(u32 flags, MemAccessMode mode, ARM64Reg RS, in_far_code = true; SwitchToFarCode(); - if (jo.fastmem_arena && !emitting_routine) + if (jo.fastmem && !emitting_routine) { FastmemArea* fastmem_area = &m_fault_to_handler[fastmem_end]; fastmem_area->fastmem_code = fastmem_start; diff --git a/Source/Core/Core/PowerPC/JitArm64/JitArm64_LoadStore.cpp b/Source/Core/Core/PowerPC/JitArm64/JitArm64_LoadStore.cpp index e353e87731..e328219cb6 100644 --- a/Source/Core/Core/PowerPC/JitArm64/JitArm64_LoadStore.cpp +++ b/Source/Core/Core/PowerPC/JitArm64/JitArm64_LoadStore.cpp @@ -28,7 +28,7 @@ void JitArm64::SafeLoadToReg(u32 dest, s32 addr, s32 offsetReg, u32 flags, s32 o { // We want to make sure to not get LR as a temp register gpr.Lock(ARM64Reg::W0, ARM64Reg::W30); - if (!jo.fastmem_arena) + if (!jo.fastmem) gpr.Lock(ARM64Reg::W2); gpr.BindToRegister(dest, dest == (u32)addr || dest == (u32)offsetReg, false); @@ -124,7 +124,7 @@ void JitArm64::SafeLoadToReg(u32 dest, s32 addr, s32 offsetReg, u32 flags, s32 o BitSet32 fprs_in_use = fpr.GetCallerSavedUsed(); if (!update || early_update) regs_in_use[DecodeReg(ARM64Reg::W0)] = 0; - if (!jo.fastmem_arena) + if (!jo.fastmem) regs_in_use[DecodeReg(ARM64Reg::W2)] = 0; if (!jo.memcheck) regs_in_use[DecodeReg(dest_reg)] = 0; @@ -166,7 +166,7 @@ void JitArm64::SafeLoadToReg(u32 dest, s32 addr, s32 offsetReg, u32 flags, s32 o } gpr.Unlock(ARM64Reg::W0, ARM64Reg::W30); - if (!jo.fastmem_arena) + if (!jo.fastmem) gpr.Unlock(ARM64Reg::W2); } @@ -175,7 +175,7 @@ void JitArm64::SafeStoreFromReg(s32 dest, u32 value, s32 regOffset, u32 flags, s { // We want to make sure to not get LR as a temp register gpr.Lock(ARM64Reg::W0, ARM64Reg::W1, ARM64Reg::W30); - if (!jo.fastmem_arena) + if (!jo.fastmem) gpr.Lock(ARM64Reg::W2); ARM64Reg RS = gpr.R(value); @@ -272,7 +272,7 @@ void JitArm64::SafeStoreFromReg(s32 dest, u32 value, s32 regOffset, u32 flags, s regs_in_use[DecodeReg(ARM64Reg::W0)] = 0; if (!update || early_update) regs_in_use[DecodeReg(ARM64Reg::W1)] = 0; - if (!jo.fastmem_arena) + if (!jo.fastmem) regs_in_use[DecodeReg(ARM64Reg::W2)] = 0; u32 access_size = BackPatchInfo::GetFlagSize(flags); @@ -335,7 +335,7 @@ void JitArm64::SafeStoreFromReg(s32 dest, u32 value, s32 regOffset, u32 flags, s } gpr.Unlock(ARM64Reg::W0, ARM64Reg::W1, ARM64Reg::W30); - if (!jo.fastmem_arena) + if (!jo.fastmem) gpr.Unlock(ARM64Reg::W2); } @@ -519,7 +519,7 @@ void JitArm64::lmw(UGeckoInstruction inst) s32 offset = inst.SIMM_16; gpr.Lock(ARM64Reg::W0, ARM64Reg::W30); - if (!jo.fastmem_arena) + if (!jo.fastmem) gpr.Lock(ARM64Reg::W2); // MMU games make use of a >= d despite this being invalid according to the PEM. @@ -554,7 +554,7 @@ void JitArm64::lmw(UGeckoInstruction inst) BitSet32 regs_in_use = gpr.GetCallerSavedUsed(); BitSet32 fprs_in_use = fpr.GetCallerSavedUsed(); regs_in_use[DecodeReg(addr_reg)] = 0; - if (!jo.fastmem_arena) + if (!jo.fastmem) regs_in_use[DecodeReg(ARM64Reg::W2)] = 0; if (!jo.memcheck) regs_in_use[DecodeReg(dest_reg)] = 0; @@ -567,7 +567,7 @@ void JitArm64::lmw(UGeckoInstruction inst) } gpr.Unlock(ARM64Reg::W0, ARM64Reg::W30); - if (!jo.fastmem_arena) + if (!jo.fastmem) gpr.Unlock(ARM64Reg::W2); if (!a_is_addr_base_reg) gpr.Unlock(addr_base_reg); @@ -582,7 +582,7 @@ void JitArm64::stmw(UGeckoInstruction inst) s32 offset = inst.SIMM_16; gpr.Lock(ARM64Reg::W0, ARM64Reg::W1, ARM64Reg::W30); - if (!jo.fastmem_arena) + if (!jo.fastmem) gpr.Lock(ARM64Reg::W2); ARM64Reg addr_reg = ARM64Reg::W1; @@ -615,7 +615,7 @@ void JitArm64::stmw(UGeckoInstruction inst) BitSet32 fprs_in_use = fpr.GetCallerSavedUsed(); regs_in_use[DecodeReg(ARM64Reg::W0)] = 0; regs_in_use[DecodeReg(addr_reg)] = 0; - if (!jo.fastmem_arena) + if (!jo.fastmem) regs_in_use[DecodeReg(ARM64Reg::W2)] = 0; EmitBackpatchRoutine(flags, MemAccessMode::Auto, src_reg, EncodeRegTo64(addr_reg), regs_in_use, @@ -623,7 +623,7 @@ void JitArm64::stmw(UGeckoInstruction inst) } gpr.Unlock(ARM64Reg::W0, ARM64Reg::W1, ARM64Reg::W30); - if (!jo.fastmem_arena) + if (!jo.fastmem) gpr.Unlock(ARM64Reg::W2); if (!a_is_addr_base_reg) gpr.Unlock(addr_base_reg); @@ -818,12 +818,12 @@ void JitArm64::dcbz(UGeckoInstruction inst) int a = inst.RA, b = inst.RB; gpr.Lock(ARM64Reg::W0, ARM64Reg::W30); - if (!jo.fastmem_arena) + if (!jo.fastmem) gpr.Lock(ARM64Reg::W2); Common::ScopeGuard register_guard([&] { gpr.Unlock(ARM64Reg::W0, ARM64Reg::W30); - if (!jo.fastmem_arena) + if (!jo.fastmem) gpr.Unlock(ARM64Reg::W2); }); @@ -892,7 +892,7 @@ void JitArm64::dcbz(UGeckoInstruction inst) BitSet32 gprs_to_push = gpr.GetCallerSavedUsed(); BitSet32 fprs_to_push = fpr.GetCallerSavedUsed(); gprs_to_push[DecodeReg(ARM64Reg::W0)] = 0; - if (!jo.fastmem_arena) + if (!jo.fastmem) gprs_to_push[DecodeReg(ARM64Reg::W2)] = 0; EmitBackpatchRoutine(BackPatchInfo::FLAG_ZERO_256, MemAccessMode::Auto, ARM64Reg::W0, diff --git a/Source/Core/Core/PowerPC/JitArm64/JitArm64_LoadStoreFloating.cpp b/Source/Core/Core/PowerPC/JitArm64/JitArm64_LoadStoreFloating.cpp index f83a6bf71d..50fd5edc3a 100644 --- a/Source/Core/Core/PowerPC/JitArm64/JitArm64_LoadStoreFloating.cpp +++ b/Source/Core/Core/PowerPC/JitArm64/JitArm64_LoadStoreFloating.cpp @@ -79,7 +79,7 @@ void JitArm64::lfXX(UGeckoInstruction inst) gpr.Lock(ARM64Reg::W0, ARM64Reg::W30); fpr.Lock(ARM64Reg::Q0); - if (!jo.fastmem_arena) + if (!jo.fastmem) gpr.Lock(ARM64Reg::W2); const ARM64Reg VD = fpr.RW(inst.FD, type, false); @@ -168,7 +168,7 @@ void JitArm64::lfXX(UGeckoInstruction inst) BitSet32 fprs_in_use = fpr.GetCallerSavedUsed(); if (!update || early_update) regs_in_use[DecodeReg(ARM64Reg::W0)] = 0; - if (!jo.fastmem_arena) + if (!jo.fastmem) regs_in_use[DecodeReg(ARM64Reg::W2)] = 0; fprs_in_use[DecodeReg(ARM64Reg::Q0)] = 0; if (!jo.memcheck) @@ -194,7 +194,7 @@ void JitArm64::lfXX(UGeckoInstruction inst) gpr.Unlock(ARM64Reg::W0, ARM64Reg::W30); fpr.Unlock(ARM64Reg::Q0); - if (!jo.fastmem_arena) + if (!jo.fastmem) gpr.Unlock(ARM64Reg::W2); } @@ -279,7 +279,7 @@ void JitArm64::stfXX(UGeckoInstruction inst) } gpr.Lock(ARM64Reg::W0, ARM64Reg::W1, ARM64Reg::W30); - if (!jo.fastmem_arena) + if (!jo.fastmem) gpr.Lock(ARM64Reg::W2); ARM64Reg addr_reg = ARM64Reg::W1; @@ -372,7 +372,7 @@ void JitArm64::stfXX(UGeckoInstruction inst) regs_in_use[DecodeReg(ARM64Reg::W0)] = 0; if (!update || early_update) regs_in_use[DecodeReg(ARM64Reg::W1)] = 0; - if (!jo.fastmem_arena) + if (!jo.fastmem) regs_in_use[DecodeReg(ARM64Reg::W2)] = 0; fprs_in_use[DecodeReg(ARM64Reg::Q0)] = 0; @@ -428,6 +428,6 @@ void JitArm64::stfXX(UGeckoInstruction inst) gpr.Unlock(ARM64Reg::W0, ARM64Reg::W1, ARM64Reg::W30); fpr.Unlock(ARM64Reg::Q0); - if (!jo.fastmem_arena) + if (!jo.fastmem) gpr.Unlock(ARM64Reg::W2); } diff --git a/Source/Core/Core/PowerPC/JitArm64/JitArm64_LoadStorePaired.cpp b/Source/Core/Core/PowerPC/JitArm64/JitArm64_LoadStorePaired.cpp index 9a74720842..cf668358f0 100644 --- a/Source/Core/Core/PowerPC/JitArm64/JitArm64_LoadStorePaired.cpp +++ b/Source/Core/Core/PowerPC/JitArm64/JitArm64_LoadStorePaired.cpp @@ -22,8 +22,8 @@ void JitArm64::psq_lXX(UGeckoInstruction inst) INSTRUCTION_START JITDISABLE(bJITLoadStorePairedOff); - // If we have a fastmem arena, the asm routines assume address translation is on. - FALLBACK_IF(!js.assumeNoPairedQuantize && jo.fastmem_arena && !m_ppc_state.msr.DR); + // If fastmem is enabled, the asm routines assume address translation is on. + FALLBACK_IF(!js.assumeNoPairedQuantize && jo.fastmem && !m_ppc_state.msr.DR); // X30 is LR // X0 is the address @@ -44,7 +44,7 @@ void JitArm64::psq_lXX(UGeckoInstruction inst) gpr.Lock(ARM64Reg::W1, ARM64Reg::W2, ARM64Reg::W3); fpr.Lock(ARM64Reg::Q1); } - else if (!jo.fastmem_arena) + else if (!jo.fastmem) { gpr.Lock(ARM64Reg::W2); } @@ -86,7 +86,7 @@ void JitArm64::psq_lXX(UGeckoInstruction inst) // Wipe the registers we are using as temporaries if (!update || early_update) gprs_in_use[DecodeReg(ARM64Reg::W0)] = false; - if (!jo.fastmem_arena) + if (!jo.fastmem) gprs_in_use[DecodeReg(ARM64Reg::W2)] = false; fprs_in_use[DecodeReg(ARM64Reg::Q0)] = false; if (!jo.memcheck) @@ -136,7 +136,7 @@ void JitArm64::psq_lXX(UGeckoInstruction inst) gpr.Unlock(ARM64Reg::W1, ARM64Reg::W2, ARM64Reg::W3); fpr.Unlock(ARM64Reg::Q1); } - else if (!jo.fastmem_arena) + else if (!jo.fastmem) { gpr.Unlock(ARM64Reg::W2); } @@ -147,8 +147,8 @@ void JitArm64::psq_stXX(UGeckoInstruction inst) INSTRUCTION_START JITDISABLE(bJITLoadStorePairedOff); - // If we have a fastmem arena, the asm routines assume address translation is on. - FALLBACK_IF(!js.assumeNoPairedQuantize && jo.fastmem_arena && !m_ppc_state.msr.DR); + // If fastmem is enabled, the asm routines assume address translation is on. + FALLBACK_IF(!js.assumeNoPairedQuantize && jo.fastmem && !m_ppc_state.msr.DR); // X30 is LR // X0 contains the scale @@ -199,9 +199,9 @@ void JitArm64::psq_stXX(UGeckoInstruction inst) } gpr.Lock(ARM64Reg::W0, ARM64Reg::W1, ARM64Reg::W30); - if (!js.assumeNoPairedQuantize || !jo.fastmem_arena) + if (!js.assumeNoPairedQuantize || !jo.fastmem) gpr.Lock(ARM64Reg::W2); - if (!js.assumeNoPairedQuantize && !jo.fastmem_arena) + if (!js.assumeNoPairedQuantize && !jo.fastmem) gpr.Lock(ARM64Reg::W3); constexpr ARM64Reg scale_reg = ARM64Reg::W0; @@ -241,7 +241,7 @@ void JitArm64::psq_stXX(UGeckoInstruction inst) gprs_in_use[DecodeReg(ARM64Reg::W0)] = false; if (!update || early_update) gprs_in_use[DecodeReg(ARM64Reg::W1)] = false; - if (!jo.fastmem_arena) + if (!jo.fastmem) gprs_in_use[DecodeReg(ARM64Reg::W2)] = false; u32 flags = BackPatchInfo::FLAG_STORE | BackPatchInfo::FLAG_FLOAT | BackPatchInfo::FLAG_SIZE_32; @@ -275,9 +275,9 @@ void JitArm64::psq_stXX(UGeckoInstruction inst) gpr.Unlock(ARM64Reg::W0, ARM64Reg::W1, ARM64Reg::W30); fpr.Unlock(ARM64Reg::Q0); - if (!js.assumeNoPairedQuantize || !jo.fastmem_arena) + if (!js.assumeNoPairedQuantize || !jo.fastmem) gpr.Unlock(ARM64Reg::W2); - if (!js.assumeNoPairedQuantize && !jo.fastmem_arena) + if (!js.assumeNoPairedQuantize && !jo.fastmem) gpr.Unlock(ARM64Reg::W3); if (!js.assumeNoPairedQuantize) fpr.Unlock(ARM64Reg::Q1); diff --git a/Source/Core/Core/PowerPC/JitArm64/JitAsm.cpp b/Source/Core/Core/PowerPC/JitArm64/JitAsm.cpp index e6296f4d31..77d64de300 100644 --- a/Source/Core/Core/PowerPC/JitArm64/JitAsm.cpp +++ b/Source/Core/Core/PowerPC/JitArm64/JitAsm.cpp @@ -724,7 +724,7 @@ void JitArm64::GenerateQuantizedStores() // X0 is the scale // X1 is the address // X2 is a temporary - // X3 is a temporary if jo.fastmem_arena is false (used in EmitBackpatchRoutine) + // X3 is a temporary if jo.fastmem is false (used in EmitBackpatchRoutine) // X30 is LR // Q0 is the register // Q1 is a temporary @@ -733,7 +733,7 @@ void JitArm64::GenerateQuantizedStores() BitSet32 gprs_to_push = CALLER_SAVED_GPRS & ~BitSet32{0, 2}; if (!jo.memcheck) gprs_to_push &= ~BitSet32{1}; - if (!jo.fastmem_arena) + if (!jo.fastmem) gprs_to_push &= ~BitSet32{3}; BitSet32 fprs_to_push = BitSet32(0xFFFFFFFF) & ~BitSet32{0, 1}; ARM64FloatEmitter float_emit(this); diff --git a/Source/Core/Core/PowerPC/JitInterface.cpp b/Source/Core/Core/PowerPC/JitInterface.cpp index 9fed234c69..568419a1e4 100644 --- a/Source/Core/Core/PowerPC/JitInterface.cpp +++ b/Source/Core/Core/PowerPC/JitInterface.cpp @@ -105,15 +105,21 @@ void JitInterface::UpdateMembase() auto& ppc_state = m_system.GetPPCState(); auto& memory = m_system.GetMemory(); +#ifdef _M_ARM_64 + // JitArm64 is currently using the no fastmem arena code path even when only fastmem is off. + const bool fastmem_arena = m_jit->jo.fastmem; +#else + const bool fastmem_arena = m_jit->jo.fastmem_arena; +#endif if (ppc_state.msr.DR) { ppc_state.mem_ptr = - m_jit->jo.fastmem_arena ? memory.GetLogicalBase() : memory.GetLogicalPageMappingsBase(); + fastmem_arena ? memory.GetLogicalBase() : memory.GetLogicalPageMappingsBase(); } else { ppc_state.mem_ptr = - m_jit->jo.fastmem_arena ? memory.GetPhysicalBase() : memory.GetPhysicalPageMappingsBase(); + fastmem_arena ? memory.GetPhysicalBase() : memory.GetPhysicalPageMappingsBase(); } } From 8686536d7d7b29c15dcbb2cc428e4295cb16f75d Mon Sep 17 00:00:00 2001 From: JosJuice Date: Sat, 30 Sep 2023 19:38:09 +0200 Subject: [PATCH 5/6] Jit: Always initialize fastmem arena If dcache is enabled when the game starts, initializing the fastmem arena is still useful in case the user changes the dcache setting. And initializing it doesn't really cost anything. --- .../Core/PowerPC/CachedInterpreter/CachedInterpreter.cpp | 4 ++-- Source/Core/Core/PowerPC/Jit64/Jit.cpp | 7 +++++-- Source/Core/Core/PowerPC/JitArm64/Jit.cpp | 7 +++++-- Source/Core/Core/PowerPC/JitCommon/JitBase.cpp | 8 +------- Source/Core/Core/PowerPC/JitCommon/JitBase.h | 8 +------- 5 files changed, 14 insertions(+), 20 deletions(-) diff --git a/Source/Core/Core/PowerPC/CachedInterpreter/CachedInterpreter.cpp b/Source/Core/Core/PowerPC/CachedInterpreter/CachedInterpreter.cpp index 632fbb23f1..8fbed42f06 100644 --- a/Source/Core/Core/PowerPC/CachedInterpreter/CachedInterpreter.cpp +++ b/Source/Core/Core/PowerPC/CachedInterpreter/CachedInterpreter.cpp @@ -82,7 +82,7 @@ CachedInterpreter::~CachedInterpreter() = default; void CachedInterpreter::Init() { - RefreshConfig(InitFastmemArena::No); + RefreshConfig(); m_code.reserve(CODE_SIZE / sizeof(Instruction)); @@ -384,5 +384,5 @@ void CachedInterpreter::ClearCache() { m_code.clear(); m_block_cache.Clear(); - RefreshConfig(InitFastmemArena::No); + RefreshConfig(); } diff --git a/Source/Core/Core/PowerPC/Jit64/Jit.cpp b/Source/Core/Core/PowerPC/Jit64/Jit.cpp index 6dbb998d3a..fa96bd4b20 100644 --- a/Source/Core/Core/PowerPC/Jit64/Jit.cpp +++ b/Source/Core/Core/PowerPC/Jit64/Jit.cpp @@ -251,7 +251,10 @@ bool Jit64::BackPatch(SContext* ctx) void Jit64::Init() { - RefreshConfig(InitFastmemArena::Yes); + auto& memory = m_system.GetMemory(); + jo.fastmem_arena = memory.InitFastmemArena(); + + RefreshConfig(); EnableBlockLink(); @@ -304,7 +307,7 @@ void Jit64::ClearCache() m_const_pool.Clear(); ClearCodeSpace(); Clear(); - RefreshConfig(InitFastmemArena::No); + RefreshConfig(); asm_routines.Regenerate(); ResetFreeMemoryRanges(); } diff --git a/Source/Core/Core/PowerPC/JitArm64/Jit.cpp b/Source/Core/Core/PowerPC/JitArm64/Jit.cpp index efc24f988a..4c927ed427 100644 --- a/Source/Core/Core/PowerPC/JitArm64/Jit.cpp +++ b/Source/Core/Core/PowerPC/JitArm64/Jit.cpp @@ -47,7 +47,10 @@ JitArm64::~JitArm64() = default; void JitArm64::Init() { - RefreshConfig(InitFastmemArena::Yes); + auto& memory = m_system.GetMemory(); + jo.fastmem_arena = memory.InitFastmemArena(); + + RefreshConfig(); const size_t child_code_size = jo.memcheck ? FARCODE_SIZE_MMU : FARCODE_SIZE; AllocCodeSpace(CODE_SIZE + child_code_size); @@ -155,7 +158,7 @@ void JitArm64::ClearCache() const Common::ScopedJITPageWriteAndNoExecute enable_jit_page_writes; ClearCodeSpace(); m_far_code.ClearCodeSpace(); - RefreshConfig(InitFastmemArena::No); + RefreshConfig(); GenerateAsm(); diff --git a/Source/Core/Core/PowerPC/JitCommon/JitBase.cpp b/Source/Core/Core/PowerPC/JitCommon/JitBase.cpp index 88d28ea8ac..bcdafd9725 100644 --- a/Source/Core/Core/PowerPC/JitCommon/JitBase.cpp +++ b/Source/Core/Core/PowerPC/JitCommon/JitBase.cpp @@ -115,7 +115,7 @@ bool JitBase::DoesConfigNeedRefresh() }); } -void JitBase::RefreshConfig(InitFastmemArena init_fastmem_arena) +void JitBase::RefreshConfig() { for (const auto& [member, config_info] : JIT_SETTINGS) this->*member = Config::Get(*config_info); @@ -132,12 +132,6 @@ void JitBase::RefreshConfig(InitFastmemArena init_fastmem_arena) analyzer.SetFloatExceptionsEnabled(m_enable_float_exceptions); analyzer.SetDivByZeroExceptionsEnabled(m_enable_div_by_zero_exceptions); - if (init_fastmem_arena != InitFastmemArena::No) - { - auto& memory = m_system.GetMemory(); - jo.fastmem_arena = m_fastmem_enabled && memory.InitFastmemArena(); - } - bool any_watchpoints = m_system.GetPowerPC().GetMemChecks().HasAny(); jo.fastmem = m_fastmem_enabled && jo.fastmem_arena && (m_ppc_state.msr.DR || !any_watchpoints) && EMM::IsExceptionHandlerSupported(); diff --git a/Source/Core/Core/PowerPC/JitCommon/JitBase.h b/Source/Core/Core/PowerPC/JitCommon/JitBase.h index f96c068b98..6c95559438 100644 --- a/Source/Core/Core/PowerPC/JitCommon/JitBase.h +++ b/Source/Core/Core/PowerPC/JitCommon/JitBase.h @@ -163,14 +163,8 @@ protected: static const std::array*>, 22> JIT_SETTINGS; - enum class InitFastmemArena - { - No, - Yes, - }; - bool DoesConfigNeedRefresh(); - void RefreshConfig(InitFastmemArena init_fastmem_arena); + void RefreshConfig(); void InitBLROptimization(); void ProtectStack(); From d04e67be3d26b7017f2bb061618181fd71611f9a Mon Sep 17 00:00:00 2001 From: JosJuice Date: Sun, 1 Oct 2023 22:54:09 +0200 Subject: [PATCH 6/6] Add fastmem arena setting Just for debugging. --- .../dolphinemu/features/settings/model/BooleanSetting.kt | 2 ++ .../features/settings/ui/SettingsFragmentPresenter.kt | 8 ++++++++ Source/Android/app/src/main/res/values/strings.xml | 1 + Source/Core/Core/Config/MainSettings.cpp | 1 + Source/Core/Core/Config/MainSettings.h | 1 + Source/Core/Core/PowerPC/Jit64/Jit.cpp | 3 +-- Source/Core/Core/PowerPC/JitArm64/Jit.cpp | 3 +-- Source/Core/Core/PowerPC/JitCommon/JitBase.cpp | 6 ++++++ Source/Core/Core/PowerPC/JitCommon/JitBase.h | 2 ++ Source/Core/DolphinQt/MenuBar.cpp | 7 +++++++ Source/Core/DolphinQt/MenuBar.h | 1 + 11 files changed, 31 insertions(+), 4 deletions(-) diff --git a/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/settings/model/BooleanSetting.kt b/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/settings/model/BooleanSetting.kt index 5ccc6b4aad..2d2a0065b4 100644 --- a/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/settings/model/BooleanSetting.kt +++ b/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/settings/model/BooleanSetting.kt @@ -14,6 +14,7 @@ enum class BooleanSetting( MAIN_SKIP_IPL(Settings.FILE_DOLPHIN, Settings.SECTION_INI_CORE, "SkipIPL", true), MAIN_DSP_HLE(Settings.FILE_DOLPHIN, Settings.SECTION_INI_CORE, "DSPHLE", true), MAIN_FASTMEM(Settings.FILE_DOLPHIN, Settings.SECTION_INI_CORE, "Fastmem", true), + MAIN_FASTMEM_ARENA(Settings.FILE_DOLPHIN, Settings.SECTION_INI_CORE, "FastmemArena", true), MAIN_CPU_THREAD(Settings.FILE_DOLPHIN, Settings.SECTION_INI_CORE, "CPUThread", true), MAIN_SYNC_ON_SKIP_IDLE( Settings.FILE_DOLPHIN, @@ -897,6 +898,7 @@ enum class BooleanSetting( companion object { private val NOT_RUNTIME_EDITABLE_ARRAY = arrayOf( MAIN_DSP_HLE, + MAIN_FASTMEM_ARENA, MAIN_CPU_THREAD, MAIN_ENABLE_CHEATS, MAIN_OVERRIDE_REGION_SETTINGS, diff --git a/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/settings/ui/SettingsFragmentPresenter.kt b/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/settings/ui/SettingsFragmentPresenter.kt index 099ae980e6..b85a3999cc 100644 --- a/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/settings/ui/SettingsFragmentPresenter.kt +++ b/Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/settings/ui/SettingsFragmentPresenter.kt @@ -1941,6 +1941,14 @@ class SettingsFragmentPresenter( 0 ) ) + sl.add( + InvertedSwitchSetting( + context, + BooleanSetting.MAIN_FASTMEM_ARENA, + R.string.debug_fastmem_arena, + 0 + ) + ) sl.add(HeaderSetting(context, R.string.debug_jit_header, 0)) sl.add( diff --git a/Source/Android/app/src/main/res/values/strings.xml b/Source/Android/app/src/main/res/values/strings.xml index a81cb58a59..044c1ad65d 100644 --- a/Source/Android/app/src/main/res/values/strings.xml +++ b/Source/Android/app/src/main/res/values/strings.xml @@ -399,6 +399,7 @@ Debug Warning: Debug settings will slow emulation Disable Fastmem + Disable Fastmem Arena Jit Jit Disabled Jit Load Store Disabled diff --git a/Source/Core/Core/Config/MainSettings.cpp b/Source/Core/Core/Config/MainSettings.cpp index be901fe289..4e545fc402 100644 --- a/Source/Core/Core/Config/MainSettings.cpp +++ b/Source/Core/Core/Config/MainSettings.cpp @@ -38,6 +38,7 @@ const Info MAIN_CPU_CORE{{System::Main, "Core", "CPUCore"}, PowerPC::DefaultCPUCore()}; const Info MAIN_JIT_FOLLOW_BRANCH{{System::Main, "Core", "JITFollowBranch"}, true}; const Info MAIN_FASTMEM{{System::Main, "Core", "Fastmem"}, true}; +const Info MAIN_FASTMEM_ARENA{{System::Main, "Core", "FastmemArena"}, true}; const Info MAIN_ACCURATE_CPU_CACHE{{System::Main, "Core", "AccurateCPUCache"}, false}; const Info MAIN_DSP_HLE{{System::Main, "Core", "DSPHLE"}, true}; const Info MAIN_MAX_FALLBACK{{System::Main, "Core", "MaxFallback"}, 100}; diff --git a/Source/Core/Core/Config/MainSettings.h b/Source/Core/Core/Config/MainSettings.h index 0d591ee5db..b591a00de4 100644 --- a/Source/Core/Core/Config/MainSettings.h +++ b/Source/Core/Core/Config/MainSettings.h @@ -56,6 +56,7 @@ extern const Info MAIN_SKIP_IPL; extern const Info MAIN_CPU_CORE; extern const Info MAIN_JIT_FOLLOW_BRANCH; extern const Info MAIN_FASTMEM; +extern const Info MAIN_FASTMEM_ARENA; extern const Info MAIN_ACCURATE_CPU_CACHE; // Should really be in the DSP section, but we're kind of stuck with bad decisions made in the past. extern const Info MAIN_DSP_HLE; diff --git a/Source/Core/Core/PowerPC/Jit64/Jit.cpp b/Source/Core/Core/PowerPC/Jit64/Jit.cpp index fa96bd4b20..ea65a5c6f0 100644 --- a/Source/Core/Core/PowerPC/Jit64/Jit.cpp +++ b/Source/Core/Core/PowerPC/Jit64/Jit.cpp @@ -251,8 +251,7 @@ bool Jit64::BackPatch(SContext* ctx) void Jit64::Init() { - auto& memory = m_system.GetMemory(); - jo.fastmem_arena = memory.InitFastmemArena(); + InitFastmemArena(); RefreshConfig(); diff --git a/Source/Core/Core/PowerPC/JitArm64/Jit.cpp b/Source/Core/Core/PowerPC/JitArm64/Jit.cpp index 4c927ed427..436229c07b 100644 --- a/Source/Core/Core/PowerPC/JitArm64/Jit.cpp +++ b/Source/Core/Core/PowerPC/JitArm64/Jit.cpp @@ -47,8 +47,7 @@ JitArm64::~JitArm64() = default; void JitArm64::Init() { - auto& memory = m_system.GetMemory(); - jo.fastmem_arena = memory.InitFastmemArena(); + InitFastmemArena(); RefreshConfig(); diff --git a/Source/Core/Core/PowerPC/JitCommon/JitBase.cpp b/Source/Core/Core/PowerPC/JitCommon/JitBase.cpp index bcdafd9725..c1cce80401 100644 --- a/Source/Core/Core/PowerPC/JitCommon/JitBase.cpp +++ b/Source/Core/Core/PowerPC/JitCommon/JitBase.cpp @@ -140,6 +140,12 @@ void JitBase::RefreshConfig() jo.div_by_zero_exceptions = m_enable_div_by_zero_exceptions; } +void JitBase::InitFastmemArena() +{ + auto& memory = m_system.GetMemory(); + jo.fastmem_arena = Config::Get(Config::MAIN_FASTMEM_ARENA) && memory.InitFastmemArena(); +} + void JitBase::InitBLROptimization() { m_enable_blr_optimization = diff --git a/Source/Core/Core/PowerPC/JitCommon/JitBase.h b/Source/Core/Core/PowerPC/JitCommon/JitBase.h index 6c95559438..00fea6ef84 100644 --- a/Source/Core/Core/PowerPC/JitCommon/JitBase.h +++ b/Source/Core/Core/PowerPC/JitCommon/JitBase.h @@ -166,6 +166,8 @@ protected: bool DoesConfigNeedRefresh(); void RefreshConfig(); + void InitFastmemArena(); + void InitBLROptimization(); void ProtectStack(); void UnprotectStack(); diff --git a/Source/Core/DolphinQt/MenuBar.cpp b/Source/Core/DolphinQt/MenuBar.cpp index ad8428bee3..2b68cf640d 100644 --- a/Source/Core/DolphinQt/MenuBar.cpp +++ b/Source/Core/DolphinQt/MenuBar.cpp @@ -139,6 +139,7 @@ void MenuBar::OnEmulationStateChanged(Core::State state) m_jit_interpreter_core->setEnabled(running); m_jit_block_linking->setEnabled(!running); m_jit_disable_cache->setEnabled(!running); + m_jit_disable_fastmem_arena->setEnabled(!running); m_jit_clear_cache->setEnabled(running); m_jit_log_coverage->setEnabled(!running); m_jit_search_instruction->setEnabled(running); @@ -847,6 +848,12 @@ void MenuBar::AddJITMenu() connect(m_jit_disable_fastmem, &QAction::toggled, [](bool enabled) { Config::SetBaseOrCurrent(Config::MAIN_FASTMEM, !enabled); }); + m_jit_disable_fastmem_arena = m_jit->addAction(tr("Disable Fastmem Arena")); + m_jit_disable_fastmem_arena->setCheckable(true); + m_jit_disable_fastmem_arena->setChecked(!Config::Get(Config::MAIN_FASTMEM_ARENA)); + connect(m_jit_disable_fastmem_arena, &QAction::toggled, + [](bool enabled) { Config::SetBaseOrCurrent(Config::MAIN_FASTMEM_ARENA, !enabled); }); + m_jit_clear_cache = m_jit->addAction(tr("Clear Cache"), this, &MenuBar::ClearCache); m_jit->addSeparator(); diff --git a/Source/Core/DolphinQt/MenuBar.h b/Source/Core/DolphinQt/MenuBar.h index a5c5730b6b..e505160afa 100644 --- a/Source/Core/DolphinQt/MenuBar.h +++ b/Source/Core/DolphinQt/MenuBar.h @@ -264,6 +264,7 @@ private: QAction* m_jit_block_linking; QAction* m_jit_disable_cache; QAction* m_jit_disable_fastmem; + QAction* m_jit_disable_fastmem_arena; QAction* m_jit_clear_cache; QAction* m_jit_log_coverage; QAction* m_jit_search_instruction;