From 27ceda2c6cf01b165bc7517343f4e68a258db7a6 Mon Sep 17 00:00:00 2001 From: MrPurple666 Date: Tue, 1 Apr 2025 04:34:05 -0300 Subject: [PATCH] WIP: DO-NOT-MERGE: NCE experiments: Fix build errors --- src/core/arm/nce/arm_nce.cpp | 32 ++++++++++++------------ src/core/arm/nce/interpreter_visitor.cpp | 12 +++------ 2 files changed, 20 insertions(+), 24 deletions(-) diff --git a/src/core/arm/nce/arm_nce.cpp b/src/core/arm/nce/arm_nce.cpp index 76b476e672..90891e241d 100644 --- a/src/core/arm/nce/arm_nce.cpp +++ b/src/core/arm/nce/arm_nce.cpp @@ -382,23 +382,23 @@ void ArmNce::SignalInterrupt(Kernel::KThread* thread) { } } +const std::size_t CACHE_PAGE_SIZE = 4096; + void ArmNce::ClearInstructionCache() { - #if defined(__GNUC__) || defined(__clang__) - const size_t PAGE_SIZE = 4096; - void* start = (void*)((uintptr_t)__builtin_return_address(0) & ~(PAGE_SIZE - 1)); - void* end = (void*)((uintptr_t)start + PAGE_SIZE * 2); // Clear two pages for better coverage - - // Prefetch next likely pages - __builtin_prefetch((void*)((uintptr_t)end), 1, 3); - __builtin___clear_cache(static_cast(start), static_cast(end)); - #endif - - #ifdef __aarch64__ - // Ensure all previous memory operations complete - asm volatile("dmb ish" ::: "memory"); - asm volatile("dsb ish" ::: "memory"); - asm volatile("isb" ::: "memory"); - #endif +#if defined(__GNUC__) || defined(__clang__) + void* start = (void*)((uintptr_t)__builtin_return_address(0) & ~(CACHE_PAGE_SIZE - 1)); + void* end = + (void*)((uintptr_t)start + CACHE_PAGE_SIZE * 2); // Clear two pages for better coverage + // Prefetch next likely pages + __builtin_prefetch((void*)((uintptr_t)end), 1, 3); + __builtin___clear_cache(static_cast(start), static_cast(end)); +#endif +#ifdef __aarch64__ + // Ensure all previous memory operations complete + asm volatile("dmb ish" ::: "memory"); + asm volatile("dsb ish" ::: "memory"); + asm volatile("isb" ::: "memory"); +#endif } void ArmNce::InvalidateCacheRange(u64 addr, std::size_t size) { diff --git a/src/core/arm/nce/interpreter_visitor.cpp b/src/core/arm/nce/interpreter_visitor.cpp index 31ab7735d2..b1cf0f1a2c 100644 --- a/src/core/arm/nce/interpreter_visitor.cpp +++ b/src/core/arm/nce/interpreter_visitor.cpp @@ -408,6 +408,8 @@ bool InterpreterVisitor::RegisterImmediate(bool wback, bool postindex, size_t sc bool signed_ = false; size_t regsize = 0; + const size_t datasize = 8 << scale; + if (opc.Bit<1>() == 0) { memop = opc.Bit<0>() ? MemOp::Load : MemOp::Store; regsize = size == 0b11 ? 64 : 32; @@ -427,7 +429,6 @@ bool InterpreterVisitor::RegisterImmediate(bool wback, bool postindex, size_t sc return false; } - // Use aligned access where possible alignas(8) u64 address; if (Rn == Reg::SP) { address = this->GetSp(); @@ -435,21 +436,18 @@ bool InterpreterVisitor::RegisterImmediate(bool wback, bool postindex, size_t sc address = this->GetReg(Rn); } - // Pre-index addressing if (!postindex) { address += offset; } - // Alignment optimization for common cases - const bool is_aligned = (address % 8) == 0; + //const bool is_aligned = (address % 8) == 0; - // Enhanced prefetching for loads with aligned addresses if (memop == MemOp::Load) { const size_t CACHE_LINE_SIZE = 64; if ((address % 16) == 0) { __builtin_prefetch((void*)address, 0, 3); __builtin_prefetch((void*)(address + CACHE_LINE_SIZE), 0, 3); - if (datasize >= 32) { + if (datasize >= 32) { // Now datasize is in scope __builtin_prefetch((void*)(address + CACHE_LINE_SIZE * 2), 0, 2); } } else if ((address % 8) == 0) { @@ -457,7 +455,6 @@ bool InterpreterVisitor::RegisterImmediate(bool wback, bool postindex, size_t sc } } - const size_t datasize = 8 << scale; switch (memop) { case MemOp::Store: { u64 data = this->GetReg(Rt); @@ -483,7 +480,6 @@ bool InterpreterVisitor::RegisterImmediate(bool wback, bool postindex, size_t sc if (postindex) { address += offset; } - if (Rn == Reg::SP) { this->SetSp(address); } else {