core/memory; Migrate over SetCurrentPageTable() to the Memory class

Now that literally every other API function is converted over to the
Memory class, we can just move the file-local page table into the Memory
implementation class, finally getting rid of global state within the
memory code.
This commit is contained in:
Lioncash 2019-11-26 18:34:30 -05:00
parent 50a518be69
commit e7e939104b
3 changed files with 34 additions and 26 deletions

View file

@ -20,9 +20,6 @@
#include "video_core/gpu.h"
namespace Memory {
namespace {
Common::PageTable* current_page_table = nullptr;
} // Anonymous namespace
// Implementation class used to keep the specifics of the memory subsystem hidden
// from outside classes. This also allows modification to the internals of the memory
@ -30,6 +27,17 @@ Common::PageTable* current_page_table = nullptr;
struct Memory::Impl {
explicit Impl(Core::System& system_) : system{system_} {}
void SetCurrentPageTable(Kernel::Process& process) {
current_page_table = &process.VMManager().page_table;
const std::size_t address_space_width = process.VMManager().GetAddressSpaceWidth();
system.ArmInterface(0).PageTableChanged(*current_page_table, address_space_width);
system.ArmInterface(1).PageTableChanged(*current_page_table, address_space_width);
system.ArmInterface(2).PageTableChanged(*current_page_table, address_space_width);
system.ArmInterface(3).PageTableChanged(*current_page_table, address_space_width);
}
void MapMemoryRegion(Common::PageTable& page_table, VAddr base, u64 size, u8* target) {
ASSERT_MSG((size & PAGE_MASK) == 0, "non-page aligned size: {:016X}", size);
ASSERT_MSG((base & PAGE_MASK) == 0, "non-page aligned base: {:016X}", base);
@ -575,12 +583,17 @@ struct Memory::Impl {
}
}
Common::PageTable* current_page_table = nullptr;
Core::System& system;
};
Memory::Memory(Core::System& system) : impl{std::make_unique<Impl>(system)} {}
Memory::~Memory() = default;
void Memory::SetCurrentPageTable(Kernel::Process& process) {
impl->SetCurrentPageTable(process);
}
void Memory::MapMemoryRegion(Common::PageTable& page_table, VAddr base, u64 size, u8* target) {
impl->MapMemoryRegion(page_table, base, size, target);
}
@ -695,18 +708,6 @@ void Memory::RasterizerMarkRegionCached(VAddr vaddr, u64 size, bool cached) {
impl->RasterizerMarkRegionCached(vaddr, size, cached);
}
void SetCurrentPageTable(Kernel::Process& process) {
current_page_table = &process.VMManager().page_table;
const std::size_t address_space_width = process.VMManager().GetAddressSpaceWidth();
auto& system = Core::System::GetInstance();
system.ArmInterface(0).PageTableChanged(*current_page_table, address_space_width);
system.ArmInterface(1).PageTableChanged(*current_page_table, address_space_width);
system.ArmInterface(2).PageTableChanged(*current_page_table, address_space_width);
system.ArmInterface(3).PageTableChanged(*current_page_table, address_space_width);
}
bool IsKernelVirtualAddress(const VAddr vaddr) {
return KERNEL_REGION_VADDR <= vaddr && vaddr < KERNEL_REGION_END;
}