kernel: Add KAutoObject and KSlabHeap
This commit is contained in:
@ -110,6 +110,14 @@ __declspec(dllimport) void __stdcall DebugBreak(void);
|
||||
return static_cast<T>(key) == 0; \
|
||||
}
|
||||
|
||||
#define CITRA_NON_COPYABLE(cls) \
|
||||
cls(const cls&) = delete; \
|
||||
cls& operator=(const cls&) = delete
|
||||
|
||||
#define CITRA_NON_MOVEABLE(cls) \
|
||||
cls(cls&&) = delete; \
|
||||
cls& operator=(cls&&) = delete
|
||||
|
||||
// Generic function to get last error message.
|
||||
// Call directly after the command or use the error num.
|
||||
// This function might change the error code.
|
||||
|
@ -147,6 +147,9 @@ add_library(citra_core STATIC
|
||||
hle/kernel/ipc.h
|
||||
hle/kernel/ipc_debugger/recorder.cpp
|
||||
hle/kernel/ipc_debugger/recorder.h
|
||||
hle/kernel/k_auto_object.cpp
|
||||
hle/kernel/k_auto_object.h
|
||||
hle/kernel/k_slab_heap.h
|
||||
hle/kernel/kernel.cpp
|
||||
hle/kernel/kernel.h
|
||||
hle/kernel/memory.cpp
|
||||
@ -171,6 +174,7 @@ add_library(citra_core STATIC
|
||||
hle/kernel/shared_memory.h
|
||||
hle/kernel/shared_page.cpp
|
||||
hle/kernel/shared_page.h
|
||||
hle/kernel/slab_helpers.h
|
||||
hle/kernel/svc.cpp
|
||||
hle/kernel/svc.h
|
||||
hle/kernel/svc_wrapper.h
|
||||
|
23
src/core/hle/kernel/k_auto_object.cpp
Normal file
23
src/core/hle/kernel/k_auto_object.cpp
Normal file
@ -0,0 +1,23 @@
|
||||
// Copyright 2023 Citra Emulator Project
|
||||
// Licensed under GPLv2 or any later version
|
||||
// Refer to the license.txt file included.
|
||||
|
||||
#include "core/hle/kernel/k_auto_object.h"
|
||||
#include "core/hle/kernel/kernel.h"
|
||||
|
||||
namespace Kernel {
|
||||
|
||||
KAutoObject* KAutoObject::Create(KAutoObject* obj) {
|
||||
obj->m_ref_count = 1;
|
||||
return obj;
|
||||
}
|
||||
|
||||
void KAutoObject::RegisterWithKernel() {
|
||||
m_kernel.RegisterKernelObject(this);
|
||||
}
|
||||
|
||||
void KAutoObject::UnregisterWithKernel(KernelSystem& kernel, KAutoObject* self) {
|
||||
kernel.UnregisterKernelObject(self);
|
||||
}
|
||||
|
||||
} // namespace Kernel
|
288
src/core/hle/kernel/k_auto_object.h
Normal file
288
src/core/hle/kernel/k_auto_object.h
Normal file
@ -0,0 +1,288 @@
|
||||
// Copyright 2023 Citra Emulator Project
|
||||
// Licensed under GPLv2 or any later version
|
||||
// Refer to the license.txt file included.
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <atomic>
|
||||
|
||||
#include "common/assert.h"
|
||||
#include "common/common_funcs.h"
|
||||
#include "common/common_types.h"
|
||||
|
||||
namespace Kernel {
|
||||
|
||||
class KernelSystem;
|
||||
class Process;
|
||||
|
||||
enum class ClassTokenType : u32 {
|
||||
KAutoObject = 0,
|
||||
WaitObject = 1,
|
||||
KSemaphore = 27,
|
||||
KEvent = 31,
|
||||
KTimer = 53,
|
||||
KMutex = 57,
|
||||
KDebug = 77,
|
||||
KServerPort = 85,
|
||||
KDmaObject = 89,
|
||||
KClientPort = 101,
|
||||
KCodeSet = 104,
|
||||
KSession = 112,
|
||||
KThread = 141,
|
||||
KServerSession = 149,
|
||||
KAddressArbiter = 152,
|
||||
KClientSession = 165,
|
||||
KPort = 168,
|
||||
KSharedMemory = 176,
|
||||
KProcess = 197,
|
||||
KResourceLimit = 200,
|
||||
};
|
||||
DECLARE_ENUM_FLAG_OPERATORS(ClassTokenType)
|
||||
|
||||
#define KERNEL_AUTOOBJECT_TRAITS_IMPL(CLASS, BASE_CLASS, ATTRIBUTE) \
|
||||
private: \
|
||||
static constexpr inline const char* const TypeName = #CLASS; \
|
||||
static constexpr inline auto ClassToken = ClassTokenType::CLASS; \
|
||||
\
|
||||
public: \
|
||||
CITRA_NON_COPYABLE(CLASS); \
|
||||
CITRA_NON_MOVEABLE(CLASS); \
|
||||
\
|
||||
using BaseClass = BASE_CLASS; \
|
||||
static constexpr TypeObj GetStaticTypeObj() { \
|
||||
return TypeObj(TypeName, ClassToken); \
|
||||
} \
|
||||
static constexpr const char* GetStaticTypeName() { return TypeName; } \
|
||||
virtual TypeObj GetTypeObj() ATTRIBUTE { return GetStaticTypeObj(); } \
|
||||
virtual const char* GetTypeName() ATTRIBUTE { return GetStaticTypeName(); } \
|
||||
\
|
||||
private: \
|
||||
constexpr bool operator!=(const TypeObj& rhs)
|
||||
|
||||
#define KERNEL_AUTOOBJECT_TRAITS(CLASS, BASE_CLASS) \
|
||||
KERNEL_AUTOOBJECT_TRAITS_IMPL(CLASS, BASE_CLASS, const override)
|
||||
|
||||
class KAutoObject {
|
||||
protected:
|
||||
class TypeObj {
|
||||
public:
|
||||
constexpr explicit TypeObj(const char* n, ClassTokenType tok)
|
||||
: m_name(n), m_class_token(tok) {}
|
||||
|
||||
constexpr const char* GetName() const {
|
||||
return m_name;
|
||||
}
|
||||
constexpr ClassTokenType GetClassToken() const {
|
||||
return m_class_token;
|
||||
}
|
||||
|
||||
constexpr bool operator==(const TypeObj& rhs) const {
|
||||
return this->GetClassToken() == rhs.GetClassToken();
|
||||
}
|
||||
|
||||
constexpr bool operator!=(const TypeObj& rhs) const {
|
||||
return this->GetClassToken() != rhs.GetClassToken();
|
||||
}
|
||||
|
||||
constexpr bool IsDerivedFrom(const TypeObj& rhs) const {
|
||||
return (this->GetClassToken() | rhs.GetClassToken()) == this->GetClassToken();
|
||||
}
|
||||
|
||||
private:
|
||||
const char* m_name;
|
||||
ClassTokenType m_class_token;
|
||||
};
|
||||
|
||||
private:
|
||||
KERNEL_AUTOOBJECT_TRAITS_IMPL(KAutoObject, KAutoObject, const);
|
||||
|
||||
public:
|
||||
explicit KAutoObject(KernelSystem& kernel) : m_kernel(kernel) {
|
||||
RegisterWithKernel();
|
||||
}
|
||||
virtual ~KAutoObject() = default;
|
||||
|
||||
static KAutoObject* Create(KAutoObject* ptr);
|
||||
|
||||
// Destroy is responsible for destroying the auto object's resources when ref_count hits zero.
|
||||
virtual void Destroy() {
|
||||
UNIMPLEMENTED();
|
||||
}
|
||||
|
||||
// Finalize is responsible for cleaning up resource, but does not destroy the object.
|
||||
virtual void Finalize() {}
|
||||
|
||||
virtual Process* GetOwner() const {
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
u32 GetReferenceCount() const {
|
||||
return m_ref_count.load();
|
||||
}
|
||||
|
||||
bool IsDerivedFrom(const TypeObj& rhs) const {
|
||||
return this->GetTypeObj().IsDerivedFrom(rhs);
|
||||
}
|
||||
|
||||
bool IsDerivedFrom(const KAutoObject& rhs) const {
|
||||
return this->IsDerivedFrom(rhs.GetTypeObj());
|
||||
}
|
||||
|
||||
template <typename Derived>
|
||||
Derived DynamicCast() {
|
||||
static_assert(std::is_pointer_v<Derived>);
|
||||
using DerivedType = std::remove_pointer_t<Derived>;
|
||||
|
||||
if (this->IsDerivedFrom(DerivedType::GetStaticTypeObj())) {
|
||||
return static_cast<Derived>(this);
|
||||
} else {
|
||||
return nullptr;
|
||||
}
|
||||
}
|
||||
|
||||
template <typename Derived>
|
||||
const Derived DynamicCast() const {
|
||||
static_assert(std::is_pointer_v<Derived>);
|
||||
using DerivedType = std::remove_pointer_t<Derived>;
|
||||
|
||||
if (this->IsDerivedFrom(DerivedType::GetStaticTypeObj())) {
|
||||
return static_cast<Derived>(this);
|
||||
} else {
|
||||
return nullptr;
|
||||
}
|
||||
}
|
||||
|
||||
bool Open() {
|
||||
// Atomically increment the reference count, only if it's positive.
|
||||
u32 cur_ref_count = m_ref_count.load(std::memory_order_acquire);
|
||||
do {
|
||||
if (cur_ref_count == 0) {
|
||||
return false;
|
||||
}
|
||||
ASSERT(cur_ref_count < cur_ref_count + 1);
|
||||
} while (!m_ref_count.compare_exchange_weak(cur_ref_count, cur_ref_count + 1,
|
||||
std::memory_order_relaxed));
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
void Close() {
|
||||
// Atomically decrement the reference count, not allowing it to become negative.
|
||||
u32 cur_ref_count = m_ref_count.load(std::memory_order_acquire);
|
||||
do {
|
||||
ASSERT(cur_ref_count > 0);
|
||||
} while (!m_ref_count.compare_exchange_weak(cur_ref_count, cur_ref_count - 1,
|
||||
std::memory_order_acq_rel));
|
||||
|
||||
// If ref count hits zero, destroy the object.
|
||||
if (cur_ref_count - 1 == 0) {
|
||||
KernelSystem& kernel = m_kernel;
|
||||
this->Destroy();
|
||||
KAutoObject::UnregisterWithKernel(kernel, this);
|
||||
}
|
||||
}
|
||||
|
||||
private:
|
||||
void RegisterWithKernel();
|
||||
static void UnregisterWithKernel(KernelSystem& kernel, KAutoObject* self);
|
||||
|
||||
protected:
|
||||
KernelSystem& m_kernel;
|
||||
|
||||
private:
|
||||
std::atomic<u32> m_ref_count{};
|
||||
};
|
||||
|
||||
template <typename T>
|
||||
class KScopedAutoObject {
|
||||
public:
|
||||
CITRA_NON_COPYABLE(KScopedAutoObject);
|
||||
|
||||
constexpr KScopedAutoObject() = default;
|
||||
|
||||
constexpr KScopedAutoObject(T* o) : m_obj(o) {
|
||||
if (m_obj != nullptr) {
|
||||
m_obj->Open();
|
||||
}
|
||||
}
|
||||
|
||||
~KScopedAutoObject() {
|
||||
if (m_obj != nullptr) {
|
||||
m_obj->Close();
|
||||
}
|
||||
m_obj = nullptr;
|
||||
}
|
||||
|
||||
template <typename U>
|
||||
requires(std::derived_from<T, U> || std::derived_from<U, T>)
|
||||
constexpr KScopedAutoObject(KScopedAutoObject<U>&& rhs) {
|
||||
if constexpr (std::derived_from<U, T>) {
|
||||
// Upcast.
|
||||
m_obj = rhs.m_obj;
|
||||
rhs.m_obj = nullptr;
|
||||
} else {
|
||||
// Downcast.
|
||||
T* derived = nullptr;
|
||||
if (rhs.m_obj != nullptr) {
|
||||
derived = rhs.m_obj->template DynamicCast<T*>();
|
||||
if (derived == nullptr) {
|
||||
rhs.m_obj->Close();
|
||||
}
|
||||
}
|
||||
|
||||
m_obj = derived;
|
||||
rhs.m_obj = nullptr;
|
||||
}
|
||||
}
|
||||
|
||||
constexpr KScopedAutoObject<T>& operator=(KScopedAutoObject<T>&& rhs) {
|
||||
rhs.Swap(*this);
|
||||
return *this;
|
||||
}
|
||||
|
||||
constexpr T* operator->() {
|
||||
return m_obj;
|
||||
}
|
||||
constexpr T& operator*() {
|
||||
return *m_obj;
|
||||
}
|
||||
|
||||
constexpr void Reset(T* o) {
|
||||
KScopedAutoObject(o).Swap(*this);
|
||||
}
|
||||
|
||||
constexpr T* GetPointerUnsafe() {
|
||||
return m_obj;
|
||||
}
|
||||
|
||||
constexpr T* GetPointerUnsafe() const {
|
||||
return m_obj;
|
||||
}
|
||||
|
||||
constexpr T* ReleasePointerUnsafe() {
|
||||
T* ret = m_obj;
|
||||
m_obj = nullptr;
|
||||
return ret;
|
||||
}
|
||||
|
||||
constexpr bool IsNull() const {
|
||||
return m_obj == nullptr;
|
||||
}
|
||||
constexpr bool IsNotNull() const {
|
||||
return m_obj != nullptr;
|
||||
}
|
||||
|
||||
private:
|
||||
template <typename U>
|
||||
friend class KScopedAutoObject;
|
||||
|
||||
private:
|
||||
T* m_obj{};
|
||||
|
||||
private:
|
||||
constexpr void Swap(KScopedAutoObject& rhs) noexcept {
|
||||
std::swap(m_obj, rhs.m_obj);
|
||||
}
|
||||
};
|
||||
|
||||
} // namespace Kernel
|
191
src/core/hle/kernel/k_slab_heap.h
Normal file
191
src/core/hle/kernel/k_slab_heap.h
Normal file
@ -0,0 +1,191 @@
|
||||
// Copyright 2023 Citra Emulator Project
|
||||
// Licensed under GPLv2 or any later version
|
||||
// Refer to the license.txt file included.
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <atomic>
|
||||
|
||||
#include "common/assert.h"
|
||||
#include "common/atomic_ops.h"
|
||||
#include "common/common_funcs.h"
|
||||
#include "common/common_types.h"
|
||||
|
||||
namespace Kernel {
|
||||
|
||||
class KernelSystem;
|
||||
|
||||
namespace impl {
|
||||
|
||||
class KSlabHeapImpl {
|
||||
CITRA_NON_COPYABLE(KSlabHeapImpl);
|
||||
CITRA_NON_MOVEABLE(KSlabHeapImpl);
|
||||
|
||||
public:
|
||||
struct Node {
|
||||
Node* next{};
|
||||
};
|
||||
|
||||
public:
|
||||
constexpr KSlabHeapImpl() = default;
|
||||
|
||||
void Initialize() {
|
||||
ASSERT(m_head == nullptr);
|
||||
}
|
||||
|
||||
Node* GetHead() const {
|
||||
return m_head;
|
||||
}
|
||||
|
||||
void* Allocate() {
|
||||
Node* ret = m_head;
|
||||
if (ret != nullptr) [[likely]] {
|
||||
m_head = ret->next;
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
|
||||
void Free(void* obj) {
|
||||
Node* node = static_cast<Node*>(obj);
|
||||
node->next = m_head;
|
||||
m_head = node;
|
||||
}
|
||||
|
||||
private:
|
||||
std::atomic<Node*> m_head{};
|
||||
};
|
||||
|
||||
} // namespace impl
|
||||
|
||||
class KSlabHeapBase : protected impl::KSlabHeapImpl {
|
||||
CITRA_NON_COPYABLE(KSlabHeapBase);
|
||||
CITRA_NON_MOVEABLE(KSlabHeapBase);
|
||||
|
||||
private:
|
||||
size_t m_obj_size{};
|
||||
uintptr_t m_peak{};
|
||||
uintptr_t m_start{};
|
||||
uintptr_t m_end{};
|
||||
|
||||
private:
|
||||
void UpdatePeakImpl(uintptr_t obj) {
|
||||
const uintptr_t alloc_peak = obj + this->GetObjectSize();
|
||||
uintptr_t cur_peak = m_peak;
|
||||
do {
|
||||
if (alloc_peak <= cur_peak) {
|
||||
break;
|
||||
}
|
||||
} while (
|
||||
!Common::AtomicCompareAndSwap(std::addressof(m_peak), alloc_peak, cur_peak, cur_peak));
|
||||
}
|
||||
|
||||
public:
|
||||
constexpr KSlabHeapBase() = default;
|
||||
|
||||
bool Contains(uintptr_t address) const {
|
||||
return m_start <= address && address < m_end;
|
||||
}
|
||||
|
||||
void Initialize(size_t obj_size, void* memory, size_t memory_size) {
|
||||
// Ensure we don't initialize a slab using null memory.
|
||||
ASSERT(memory != nullptr);
|
||||
|
||||
// Set our object size.
|
||||
m_obj_size = obj_size;
|
||||
|
||||
// Initialize the base allocator.
|
||||
KSlabHeapImpl::Initialize();
|
||||
|
||||
// Set our tracking variables.
|
||||
const size_t num_obj = (memory_size / obj_size);
|
||||
m_start = reinterpret_cast<uintptr_t>(memory);
|
||||
m_end = m_start + num_obj * obj_size;
|
||||
m_peak = m_start;
|
||||
|
||||
// Free the objects.
|
||||
u8* cur = reinterpret_cast<u8*>(m_end);
|
||||
|
||||
for (size_t i = 0; i < num_obj; i++) {
|
||||
cur -= obj_size;
|
||||
KSlabHeapImpl::Free(cur);
|
||||
}
|
||||
}
|
||||
|
||||
size_t GetSlabHeapSize() const {
|
||||
return (m_end - m_start) / this->GetObjectSize();
|
||||
}
|
||||
|
||||
size_t GetObjectSize() const {
|
||||
return m_obj_size;
|
||||
}
|
||||
|
||||
void* Allocate() {
|
||||
void* obj = KSlabHeapImpl::Allocate();
|
||||
return obj;
|
||||
}
|
||||
|
||||
void Free(void* obj) {
|
||||
// Don't allow freeing an object that wasn't allocated from this heap.
|
||||
const bool contained = this->Contains(reinterpret_cast<uintptr_t>(obj));
|
||||
ASSERT(contained);
|
||||
KSlabHeapImpl::Free(obj);
|
||||
}
|
||||
|
||||
size_t GetObjectIndex(const void* obj) const {
|
||||
return (reinterpret_cast<uintptr_t>(obj) - m_start) / this->GetObjectSize();
|
||||
}
|
||||
|
||||
size_t GetPeakIndex() const {
|
||||
return this->GetObjectIndex(reinterpret_cast<const void*>(m_peak));
|
||||
}
|
||||
|
||||
uintptr_t GetSlabHeapAddress() const {
|
||||
return m_start;
|
||||
}
|
||||
|
||||
size_t GetNumRemaining() const {
|
||||
// Only calculate the number of remaining objects under debug configuration.
|
||||
return 0;
|
||||
}
|
||||
};
|
||||
|
||||
template <typename T>
|
||||
class KSlabHeap final : public KSlabHeapBase {
|
||||
private:
|
||||
using BaseHeap = KSlabHeapBase;
|
||||
|
||||
public:
|
||||
constexpr KSlabHeap() = default;
|
||||
|
||||
void Initialize(void* memory, size_t memory_size) {
|
||||
BaseHeap::Initialize(sizeof(T), memory, memory_size);
|
||||
}
|
||||
|
||||
T* Allocate() {
|
||||
T* obj = static_cast<T*>(BaseHeap::Allocate());
|
||||
|
||||
if (obj != nullptr) [[likely]] {
|
||||
std::construct_at(obj);
|
||||
}
|
||||
return obj;
|
||||
}
|
||||
|
||||
T* Allocate(KernelSystem& kernel) {
|
||||
T* obj = static_cast<T*>(BaseHeap::Allocate());
|
||||
|
||||
if (obj != nullptr) [[likely]] {
|
||||
std::construct_at(obj, kernel);
|
||||
}
|
||||
return obj;
|
||||
}
|
||||
|
||||
void Free(T* obj) {
|
||||
BaseHeap::Free(obj);
|
||||
}
|
||||
|
||||
size_t GetObjectIndex(const T* obj) const {
|
||||
return BaseHeap::GetObjectIndex(obj);
|
||||
}
|
||||
};
|
||||
|
||||
} // namespace Kernel
|
@ -6,10 +6,8 @@
|
||||
#include <boost/serialization/unordered_map.hpp>
|
||||
#include <boost/serialization/vector.hpp>
|
||||
#include "common/archives.h"
|
||||
#include "common/serialization/atomic.h"
|
||||
#include "core/hle/kernel/client_port.h"
|
||||
#include "core/hle/kernel/config_mem.h"
|
||||
#include "core/hle/kernel/handle_table.h"
|
||||
#include "core/hle/kernel/ipc_debugger/recorder.h"
|
||||
#include "core/hle/kernel/kernel.h"
|
||||
#include "core/hle/kernel/memory.h"
|
||||
@ -29,6 +27,7 @@ KernelSystem::KernelSystem(Memory::MemorySystem& memory, Core::Timing& timing,
|
||||
: memory(memory), timing(timing),
|
||||
prepare_reschedule_callback(std::move(prepare_reschedule_callback)), memory_mode(memory_mode),
|
||||
n3ds_hw_caps(n3ds_hw_caps) {
|
||||
slab_heap_container = std::make_unique<SlabHeapContainer>();
|
||||
std::generate(memory_regions.begin(), memory_regions.end(),
|
||||
[] { return std::make_shared<MemoryRegionInfo>(); });
|
||||
MemoryInit(memory_mode, n3ds_hw_caps.memory_mode, override_init_time);
|
||||
@ -195,6 +194,21 @@ void KernelSystem::serialize(Archive& ar, const unsigned int file_version) {
|
||||
}
|
||||
}
|
||||
|
||||
void KernelSystem::RegisterKernelObject(KAutoObject* object) {
|
||||
registered_objects.insert(object);
|
||||
}
|
||||
|
||||
void KernelSystem::UnregisterKernelObject(KAutoObject* object) {
|
||||
registered_objects.erase(object);
|
||||
}
|
||||
|
||||
struct KernelSystem::SlabHeapContainer {
|
||||
};
|
||||
|
||||
template <typename T>
|
||||
KSlabHeap<T>& KernelSystem::SlabHeap() {
|
||||
}
|
||||
|
||||
SERIALIZE_IMPL(KernelSystem)
|
||||
|
||||
} // namespace Kernel
|
||||
|
@ -8,6 +8,7 @@
|
||||
#include <atomic>
|
||||
#include <functional>
|
||||
#include <memory>
|
||||
#include <unordered_set>
|
||||
#include <mutex>
|
||||
#include <span>
|
||||
#include <string>
|
||||
@ -58,6 +59,7 @@ class SharedMemory;
|
||||
class ThreadManager;
|
||||
class TimerManager;
|
||||
class VMManager;
|
||||
class KAutoObject;
|
||||
struct AddressMapping;
|
||||
|
||||
enum class ResetType {
|
||||
@ -132,6 +134,9 @@ private:
|
||||
friend class boost::serialization::access;
|
||||
};
|
||||
|
||||
template <typename T>
|
||||
class KSlabHeap;
|
||||
|
||||
class KernelSystem {
|
||||
public:
|
||||
explicit KernelSystem(Memory::MemorySystem& memory, Core::Timing& timing,
|
||||
@ -264,6 +269,18 @@ public:
|
||||
|
||||
u32 GenerateObjectID();
|
||||
|
||||
/// Gets the slab heap for the specified kernel object type.
|
||||
template <typename T>
|
||||
KSlabHeap<T>& SlabHeap();
|
||||
|
||||
/// Registers all kernel objects with the global emulation state, this is purely for tracking
|
||||
/// leaks after emulation has been shutdown.
|
||||
void RegisterKernelObject(KAutoObject* object);
|
||||
|
||||
/// Unregisters a kernel object previously registered with RegisterKernelObject when it was
|
||||
/// destroyed during the current emulation session.
|
||||
void UnregisterKernelObject(KAutoObject* object);
|
||||
|
||||
/// Retrieves a process from the current list of processes.
|
||||
std::shared_ptr<Process> GetProcessById(u32 process_id) const;
|
||||
|
||||
@ -377,6 +394,12 @@ private:
|
||||
MemoryMode memory_mode;
|
||||
New3dsHwCapabilities n3ds_hw_caps;
|
||||
|
||||
/// Helper to encapsulate all slab heaps in a single heap allocated container
|
||||
struct SlabHeapContainer;
|
||||
std::unique_ptr<SlabHeapContainer> slab_heap_container;
|
||||
|
||||
std::unordered_set<KAutoObject*> registered_objects;
|
||||
|
||||
/*
|
||||
* Synchronizes access to the internal HLE kernel structures, it is acquired when a guest
|
||||
* application thread performs a syscall. It should be acquired by any host threads that read or
|
||||
|
130
src/core/hle/kernel/slab_helpers.h
Normal file
130
src/core/hle/kernel/slab_helpers.h
Normal file
@ -0,0 +1,130 @@
|
||||
// Copyright 2023 Citra Emulator Project
|
||||
// Licensed under GPLv2 or any later version
|
||||
// Refer to the license.txt file included.
|
||||
|
||||
#pragma once
|
||||
|
||||
#include "core/hle/kernel/k_auto_object.h"
|
||||
#include "core/hle/kernel/kernel.h"
|
||||
|
||||
namespace Kernel {
|
||||
|
||||
template <class Derived>
|
||||
class KSlabAllocated {
|
||||
public:
|
||||
constexpr KSlabAllocated() = default;
|
||||
|
||||
size_t GetSlabIndex(KernelSystem& kernel) const {
|
||||
return kernel.SlabHeap<Derived>().GetIndex(static_cast<const Derived*>(this));
|
||||
}
|
||||
|
||||
public:
|
||||
static void InitializeSlabHeap(KernelSystem& kernel, void* memory, size_t memory_size) {
|
||||
kernel.SlabHeap<Derived>().Initialize(memory, memory_size);
|
||||
}
|
||||
|
||||
static Derived* Allocate(KernelSystem& kernel) {
|
||||
return kernel.SlabHeap<Derived>().Allocate(kernel);
|
||||
}
|
||||
|
||||
static void Free(KernelSystem& kernel, Derived* obj) {
|
||||
kernel.SlabHeap<Derived>().Free(obj);
|
||||
}
|
||||
|
||||
static size_t GetObjectSize(KernelSystem& kernel) {
|
||||
return kernel.SlabHeap<Derived>().GetObjectSize();
|
||||
}
|
||||
|
||||
static size_t GetSlabHeapSize(KernelSystem& kernel) {
|
||||
return kernel.SlabHeap<Derived>().GetSlabHeapSize();
|
||||
}
|
||||
|
||||
static size_t GetPeakIndex(KernelSystem& kernel) {
|
||||
return kernel.SlabHeap<Derived>().GetPeakIndex();
|
||||
}
|
||||
|
||||
static uintptr_t GetSlabHeapAddress(KernelSystem& kernel) {
|
||||
return kernel.SlabHeap<Derived>().GetSlabHeapAddress();
|
||||
}
|
||||
|
||||
static size_t GetNumRemaining(KernelSystem& kernel) {
|
||||
return kernel.SlabHeap<Derived>().GetNumRemaining();
|
||||
}
|
||||
};
|
||||
|
||||
template <typename Derived, typename Base>
|
||||
class KAutoObjectWithSlabHeap : public Base {
|
||||
static_assert(std::is_base_of<KAutoObject, Base>::value);
|
||||
|
||||
private:
|
||||
static Derived* Allocate(KernelSystem& kernel) {
|
||||
return kernel.SlabHeap<Derived>().Allocate(kernel);
|
||||
}
|
||||
|
||||
static void Free(KernelSystem& kernel, Derived* obj) {
|
||||
kernel.SlabHeap<Derived>().Free(obj);
|
||||
}
|
||||
|
||||
public:
|
||||
explicit KAutoObjectWithSlabHeap(KernelSystem& kernel) : Base(kernel) {}
|
||||
virtual ~KAutoObjectWithSlabHeap() = default;
|
||||
|
||||
virtual void Destroy() override {
|
||||
const bool is_initialized = this->IsInitialized();
|
||||
uintptr_t arg = 0;
|
||||
if (is_initialized) {
|
||||
arg = this->GetPostDestroyArgument();
|
||||
this->Finalize();
|
||||
}
|
||||
Free(Base::m_kernel, static_cast<Derived*>(this));
|
||||
if (is_initialized) {
|
||||
Derived::PostDestroy(arg);
|
||||
}
|
||||
}
|
||||
|
||||
virtual bool IsInitialized() const {
|
||||
return true;
|
||||
}
|
||||
virtual uintptr_t GetPostDestroyArgument() const {
|
||||
return 0;
|
||||
}
|
||||
|
||||
size_t GetSlabIndex() const {
|
||||
return SlabHeap<Derived>(Base::m_kernel).GetObjectIndex(static_cast<const Derived*>(this));
|
||||
}
|
||||
|
||||
public:
|
||||
static void InitializeSlabHeap(KernelSystem& kernel, void* memory, size_t memory_size) {
|
||||
kernel.SlabHeap<Derived>().Initialize(memory, memory_size);
|
||||
}
|
||||
|
||||
static Derived* Create(KernelSystem& kernel) {
|
||||
Derived* obj = Allocate(kernel);
|
||||
if (obj != nullptr) {
|
||||
KAutoObject::Create(obj);
|
||||
}
|
||||
return obj;
|
||||
}
|
||||
|
||||
static size_t GetObjectSize(KernelSystem& kernel) {
|
||||
return kernel.SlabHeap<Derived>().GetObjectSize();
|
||||
}
|
||||
|
||||
static size_t GetSlabHeapSize(KernelSystem& kernel) {
|
||||
return kernel.SlabHeap<Derived>().GetSlabHeapSize();
|
||||
}
|
||||
|
||||
static size_t GetPeakIndex(KernelSystem& kernel) {
|
||||
return kernel.SlabHeap<Derived>().GetPeakIndex();
|
||||
}
|
||||
|
||||
static uintptr_t GetSlabHeapAddress(KernelSystem& kernel) {
|
||||
return kernel.SlabHeap<Derived>().GetSlabHeapAddress();
|
||||
}
|
||||
|
||||
static size_t GetNumRemaining(KernelSystem& kernel) {
|
||||
return kernel.SlabHeap<Derived>().GetNumRemaining();
|
||||
}
|
||||
};
|
||||
|
||||
} // namespace Kernel
|
Reference in New Issue
Block a user