Compare commits
	
		
			1 Commits
		
	
	
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
|  | c1876c322f | 
| @@ -86,6 +86,7 @@ add_library(citra_common STATIC | ||||
|     file_util.cpp | ||||
|     file_util.h | ||||
|     hash.h | ||||
|     intrusive_list.h | ||||
|     linear_disk_cache.h | ||||
|     literals.h | ||||
|     logging/backend.cpp | ||||
| @@ -107,8 +108,11 @@ add_library(citra_common STATIC | ||||
|     microprofile.h | ||||
|     microprofileui.h | ||||
|     misc.cpp | ||||
|     page_table.cpp | ||||
|     page_table.h | ||||
|     param_package.cpp | ||||
|     param_package.h | ||||
|     parent_of_member.h | ||||
|     polyfill_thread.h | ||||
|     precompiled_headers.h | ||||
|     quaternion.h | ||||
|   | ||||
| @@ -110,6 +110,14 @@ __declspec(dllimport) void __stdcall DebugBreak(void); | ||||
|         return static_cast<T>(key) == 0;                                                           \ | ||||
|     } | ||||
|  | ||||
| #define CITRA_NON_COPYABLE(cls)                                                                     \ | ||||
|     cls(const cls&) = delete;                                                                       \ | ||||
|     cls& operator=(const cls&) = delete | ||||
|  | ||||
| #define CITRA_NON_MOVEABLE(cls)                                                                     \ | ||||
|     cls(cls&&) = delete;                                                                            \ | ||||
|     cls& operator=(cls&&) = delete | ||||
|  | ||||
| // Generic function to get last error message. | ||||
| // Call directly after the command or use the error num. | ||||
| // This function might change the error code. | ||||
|   | ||||
							
								
								
									
										631
									
								
								src/common/intrusive_list.h
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										631
									
								
								src/common/intrusive_list.h
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,631 @@ | ||||
| // SPDX-FileCopyrightText: Copyright 2023 yuzu Emulator Project | ||||
| // SPDX-License-Identifier: GPL-2.0-or-later | ||||
|  | ||||
| #pragma once | ||||
|  | ||||
| #include "common/common_funcs.h" | ||||
| #include "common/parent_of_member.h" | ||||
|  | ||||
| namespace Common { | ||||
|  | ||||
| // Forward declare implementation class for Node. | ||||
| namespace impl { | ||||
|  | ||||
| class IntrusiveListImpl; | ||||
|  | ||||
| } | ||||
|  | ||||
| class IntrusiveListNode { | ||||
|     CITRA_NON_COPYABLE(IntrusiveListNode); | ||||
|  | ||||
| private: | ||||
|     friend class impl::IntrusiveListImpl; | ||||
|  | ||||
|     IntrusiveListNode* m_prev; | ||||
|     IntrusiveListNode* m_next; | ||||
|  | ||||
| public: | ||||
|     constexpr IntrusiveListNode() : m_prev(this), m_next(this) {} | ||||
|  | ||||
|     constexpr bool IsLinked() const { | ||||
|         return m_next != this; | ||||
|     } | ||||
|  | ||||
| private: | ||||
|     constexpr void LinkPrev(IntrusiveListNode* node) { | ||||
|         // We can't link an already linked node. | ||||
|         ASSERT(!node->IsLinked()); | ||||
|         this->SplicePrev(node, node); | ||||
|     } | ||||
|  | ||||
|     constexpr void SplicePrev(IntrusiveListNode* first, IntrusiveListNode* last) { | ||||
|         // Splice a range into the list. | ||||
|         auto last_prev = last->m_prev; | ||||
|         first->m_prev = m_prev; | ||||
|         last_prev->m_next = this; | ||||
|         m_prev->m_next = first; | ||||
|         m_prev = last_prev; | ||||
|     } | ||||
|  | ||||
|     constexpr void LinkNext(IntrusiveListNode* node) { | ||||
|         // We can't link an already linked node. | ||||
|         ASSERT(!node->IsLinked()); | ||||
|         return this->SpliceNext(node, node); | ||||
|     } | ||||
|  | ||||
|     constexpr void SpliceNext(IntrusiveListNode* first, IntrusiveListNode* last) { | ||||
|         // Splice a range into the list. | ||||
|         auto last_prev = last->m_prev; | ||||
|         first->m_prev = this; | ||||
|         last_prev->m_next = m_next; | ||||
|         m_next->m_prev = last_prev; | ||||
|         m_next = first; | ||||
|     } | ||||
|  | ||||
|     constexpr void Unlink() { | ||||
|         this->Unlink(m_next); | ||||
|     } | ||||
|  | ||||
|     constexpr void Unlink(IntrusiveListNode* last) { | ||||
|         // Unlink a node from a next node. | ||||
|         auto last_prev = last->m_prev; | ||||
|         m_prev->m_next = last; | ||||
|         last->m_prev = m_prev; | ||||
|         last_prev->m_next = this; | ||||
|         m_prev = last_prev; | ||||
|     } | ||||
|  | ||||
|     constexpr IntrusiveListNode* GetPrev() { | ||||
|         return m_prev; | ||||
|     } | ||||
|  | ||||
|     constexpr const IntrusiveListNode* GetPrev() const { | ||||
|         return m_prev; | ||||
|     } | ||||
|  | ||||
|     constexpr IntrusiveListNode* GetNext() { | ||||
|         return m_next; | ||||
|     } | ||||
|  | ||||
|     constexpr const IntrusiveListNode* GetNext() const { | ||||
|         return m_next; | ||||
|     } | ||||
| }; | ||||
| // DEPRECATED: static_assert(std::is_literal_type<IntrusiveListNode>::value); | ||||
|  | ||||
| namespace impl { | ||||
|  | ||||
| class IntrusiveListImpl { | ||||
|     CITRA_NON_COPYABLE(IntrusiveListImpl); | ||||
|  | ||||
| private: | ||||
|     IntrusiveListNode m_root_node; | ||||
|  | ||||
| public: | ||||
|     template <bool Const> | ||||
|     class Iterator; | ||||
|  | ||||
|     using value_type = IntrusiveListNode; | ||||
|     using size_type = size_t; | ||||
|     using difference_type = ptrdiff_t; | ||||
|     using pointer = value_type*; | ||||
|     using const_pointer = const value_type*; | ||||
|     using reference = value_type&; | ||||
|     using const_reference = const value_type&; | ||||
|     using iterator = Iterator<false>; | ||||
|     using const_iterator = Iterator<true>; | ||||
|     using reverse_iterator = std::reverse_iterator<iterator>; | ||||
|     using const_reverse_iterator = std::reverse_iterator<const_iterator>; | ||||
|  | ||||
|     template <bool Const> | ||||
|     class Iterator { | ||||
|     public: | ||||
|         using iterator_category = std::bidirectional_iterator_tag; | ||||
|         using value_type = typename IntrusiveListImpl::value_type; | ||||
|         using difference_type = typename IntrusiveListImpl::difference_type; | ||||
|         using pointer = | ||||
|             std::conditional_t<Const, IntrusiveListImpl::const_pointer, IntrusiveListImpl::pointer>; | ||||
|         using reference = std::conditional_t<Const, IntrusiveListImpl::const_reference, | ||||
|                                              IntrusiveListImpl::reference>; | ||||
|  | ||||
|     private: | ||||
|         pointer m_node; | ||||
|  | ||||
|     public: | ||||
|         constexpr explicit Iterator(pointer n) : m_node(n) {} | ||||
|  | ||||
|         constexpr bool operator==(const Iterator& rhs) const { | ||||
|             return m_node == rhs.m_node; | ||||
|         } | ||||
|  | ||||
|         constexpr pointer operator->() const { | ||||
|             return m_node; | ||||
|         } | ||||
|  | ||||
|         constexpr reference operator*() const { | ||||
|             return *m_node; | ||||
|         } | ||||
|  | ||||
|         constexpr Iterator& operator++() { | ||||
|             m_node = m_node->m_next; | ||||
|             return *this; | ||||
|         } | ||||
|  | ||||
|         constexpr Iterator& operator--() { | ||||
|             m_node = m_node->m_prev; | ||||
|             return *this; | ||||
|         } | ||||
|  | ||||
|         constexpr Iterator operator++(int) { | ||||
|             const Iterator it{*this}; | ||||
|             ++(*this); | ||||
|             return it; | ||||
|         } | ||||
|  | ||||
|         constexpr Iterator operator--(int) { | ||||
|             const Iterator it{*this}; | ||||
|             --(*this); | ||||
|             return it; | ||||
|         } | ||||
|  | ||||
|         constexpr operator Iterator<true>() const { | ||||
|             return Iterator<true>(m_node); | ||||
|         } | ||||
|  | ||||
|         constexpr Iterator<false> GetNonConstIterator() const { | ||||
|             return Iterator<false>(const_cast<IntrusiveListImpl::pointer>(m_node)); | ||||
|         } | ||||
|     }; | ||||
|  | ||||
| public: | ||||
|     constexpr IntrusiveListImpl() : m_root_node() {} | ||||
|  | ||||
|     // Iterator accessors. | ||||
|     constexpr iterator begin() { | ||||
|         return iterator(m_root_node.GetNext()); | ||||
|     } | ||||
|  | ||||
|     constexpr const_iterator begin() const { | ||||
|         return const_iterator(m_root_node.GetNext()); | ||||
|     } | ||||
|  | ||||
|     constexpr iterator end() { | ||||
|         return iterator(std::addressof(m_root_node)); | ||||
|     } | ||||
|  | ||||
|     constexpr const_iterator end() const { | ||||
|         return const_iterator(std::addressof(m_root_node)); | ||||
|     } | ||||
|  | ||||
|     constexpr iterator iterator_to(reference v) { | ||||
|         // Only allow iterator_to for values in lists. | ||||
|         ASSERT(v.IsLinked()); | ||||
|         return iterator(std::addressof(v)); | ||||
|     } | ||||
|  | ||||
|     constexpr const_iterator iterator_to(const_reference v) const { | ||||
|         // Only allow iterator_to for values in lists. | ||||
|         ASSERT(v.IsLinked()); | ||||
|         return const_iterator(std::addressof(v)); | ||||
|     } | ||||
|  | ||||
|     // Content management. | ||||
|     constexpr bool empty() const { | ||||
|         return !m_root_node.IsLinked(); | ||||
|     } | ||||
|  | ||||
|     constexpr size_type size() const { | ||||
|         return static_cast<size_type>(std::distance(this->begin(), this->end())); | ||||
|     } | ||||
|  | ||||
|     constexpr reference back() { | ||||
|         return *m_root_node.GetPrev(); | ||||
|     } | ||||
|  | ||||
|     constexpr const_reference back() const { | ||||
|         return *m_root_node.GetPrev(); | ||||
|     } | ||||
|  | ||||
|     constexpr reference front() { | ||||
|         return *m_root_node.GetNext(); | ||||
|     } | ||||
|  | ||||
|     constexpr const_reference front() const { | ||||
|         return *m_root_node.GetNext(); | ||||
|     } | ||||
|  | ||||
|     constexpr void push_back(reference node) { | ||||
|         m_root_node.LinkPrev(std::addressof(node)); | ||||
|     } | ||||
|  | ||||
|     constexpr void push_front(reference node) { | ||||
|         m_root_node.LinkNext(std::addressof(node)); | ||||
|     } | ||||
|  | ||||
|     constexpr void pop_back() { | ||||
|         m_root_node.GetPrev()->Unlink(); | ||||
|     } | ||||
|  | ||||
|     constexpr void pop_front() { | ||||
|         m_root_node.GetNext()->Unlink(); | ||||
|     } | ||||
|  | ||||
|     constexpr iterator insert(const_iterator pos, reference node) { | ||||
|         pos.GetNonConstIterator()->LinkPrev(std::addressof(node)); | ||||
|         return iterator(std::addressof(node)); | ||||
|     } | ||||
|  | ||||
|     constexpr void splice(const_iterator pos, IntrusiveListImpl& o) { | ||||
|         splice_impl(pos, o.begin(), o.end()); | ||||
|     } | ||||
|  | ||||
|     constexpr void splice(const_iterator pos, IntrusiveListImpl&, const_iterator first) { | ||||
|         const_iterator last(first); | ||||
|         std::advance(last, 1); | ||||
|         splice_impl(pos, first, last); | ||||
|     } | ||||
|  | ||||
|     constexpr void splice(const_iterator pos, IntrusiveListImpl&, const_iterator first, | ||||
|                           const_iterator last) { | ||||
|         splice_impl(pos, first, last); | ||||
|     } | ||||
|  | ||||
|     constexpr iterator erase(const_iterator pos) { | ||||
|         if (pos == this->end()) { | ||||
|             return this->end(); | ||||
|         } | ||||
|         iterator it(pos.GetNonConstIterator()); | ||||
|         (it++)->Unlink(); | ||||
|         return it; | ||||
|     } | ||||
|  | ||||
|     constexpr void clear() { | ||||
|         while (!this->empty()) { | ||||
|             this->pop_front(); | ||||
|         } | ||||
|     } | ||||
|  | ||||
| private: | ||||
|     constexpr void splice_impl(const_iterator _pos, const_iterator _first, const_iterator _last) { | ||||
|         if (_first == _last) { | ||||
|             return; | ||||
|         } | ||||
|         iterator pos(_pos.GetNonConstIterator()); | ||||
|         iterator first(_first.GetNonConstIterator()); | ||||
|         iterator last(_last.GetNonConstIterator()); | ||||
|         first->Unlink(std::addressof(*last)); | ||||
|         pos->SplicePrev(std::addressof(*first), std::addressof(*first)); | ||||
|     } | ||||
| }; | ||||
|  | ||||
| } // namespace impl | ||||
|  | ||||
| template <class T, class Traits> | ||||
| class IntrusiveList { | ||||
|     CITRA_NON_COPYABLE(IntrusiveList); | ||||
|  | ||||
| private: | ||||
|     impl::IntrusiveListImpl m_impl; | ||||
|  | ||||
| public: | ||||
|     template <bool Const> | ||||
|     class Iterator; | ||||
|  | ||||
|     using value_type = T; | ||||
|     using size_type = size_t; | ||||
|     using difference_type = ptrdiff_t; | ||||
|     using pointer = value_type*; | ||||
|     using const_pointer = const value_type*; | ||||
|     using reference = value_type&; | ||||
|     using const_reference = const value_type&; | ||||
|     using iterator = Iterator<false>; | ||||
|     using const_iterator = Iterator<true>; | ||||
|     using reverse_iterator = std::reverse_iterator<iterator>; | ||||
|     using const_reverse_iterator = std::reverse_iterator<const_iterator>; | ||||
|  | ||||
|     template <bool Const> | ||||
|     class Iterator { | ||||
|     public: | ||||
|         friend class Common::IntrusiveList<T, Traits>; | ||||
|  | ||||
|         using ImplIterator = | ||||
|             std::conditional_t<Const, Common::impl::IntrusiveListImpl::const_iterator, | ||||
|                                Common::impl::IntrusiveListImpl::iterator>; | ||||
|  | ||||
|         using iterator_category = std::bidirectional_iterator_tag; | ||||
|         using value_type = typename IntrusiveList::value_type; | ||||
|         using difference_type = typename IntrusiveList::difference_type; | ||||
|         using pointer = | ||||
|             std::conditional_t<Const, IntrusiveList::const_pointer, IntrusiveList::pointer>; | ||||
|         using reference = | ||||
|             std::conditional_t<Const, IntrusiveList::const_reference, IntrusiveList::reference>; | ||||
|  | ||||
|     private: | ||||
|         ImplIterator m_iterator; | ||||
|  | ||||
|     private: | ||||
|         constexpr explicit Iterator(ImplIterator it) : m_iterator(it) {} | ||||
|  | ||||
|         constexpr ImplIterator GetImplIterator() const { | ||||
|             return m_iterator; | ||||
|         } | ||||
|  | ||||
|     public: | ||||
|         constexpr bool operator==(const Iterator& rhs) const { | ||||
|             return m_iterator == rhs.m_iterator; | ||||
|         } | ||||
|  | ||||
|         constexpr pointer operator->() const { | ||||
|             return std::addressof(Traits::GetParent(*m_iterator)); | ||||
|         } | ||||
|  | ||||
|         constexpr reference operator*() const { | ||||
|             return Traits::GetParent(*m_iterator); | ||||
|         } | ||||
|  | ||||
|         constexpr Iterator& operator++() { | ||||
|             ++m_iterator; | ||||
|             return *this; | ||||
|         } | ||||
|  | ||||
|         constexpr Iterator& operator--() { | ||||
|             --m_iterator; | ||||
|             return *this; | ||||
|         } | ||||
|  | ||||
|         constexpr Iterator operator++(int) { | ||||
|             const Iterator it{*this}; | ||||
|             ++m_iterator; | ||||
|             return it; | ||||
|         } | ||||
|  | ||||
|         constexpr Iterator operator--(int) { | ||||
|             const Iterator it{*this}; | ||||
|             --m_iterator; | ||||
|             return it; | ||||
|         } | ||||
|  | ||||
|         constexpr operator Iterator<true>() const { | ||||
|             return Iterator<true>(m_iterator); | ||||
|         } | ||||
|     }; | ||||
|  | ||||
| private: | ||||
|     static constexpr IntrusiveListNode& GetNode(reference ref) { | ||||
|         return Traits::GetNode(ref); | ||||
|     } | ||||
|  | ||||
|     static constexpr IntrusiveListNode const& GetNode(const_reference ref) { | ||||
|         return Traits::GetNode(ref); | ||||
|     } | ||||
|  | ||||
|     static constexpr reference GetParent(IntrusiveListNode& node) { | ||||
|         return Traits::GetParent(node); | ||||
|     } | ||||
|  | ||||
|     static constexpr const_reference GetParent(IntrusiveListNode const& node) { | ||||
|         return Traits::GetParent(node); | ||||
|     } | ||||
|  | ||||
| public: | ||||
|     constexpr IntrusiveList() : m_impl() {} | ||||
|  | ||||
|     // Iterator accessors. | ||||
|     constexpr iterator begin() { | ||||
|         return iterator(m_impl.begin()); | ||||
|     } | ||||
|  | ||||
|     constexpr const_iterator begin() const { | ||||
|         return const_iterator(m_impl.begin()); | ||||
|     } | ||||
|  | ||||
|     constexpr iterator end() { | ||||
|         return iterator(m_impl.end()); | ||||
|     } | ||||
|  | ||||
|     constexpr const_iterator end() const { | ||||
|         return const_iterator(m_impl.end()); | ||||
|     } | ||||
|  | ||||
|     constexpr const_iterator cbegin() const { | ||||
|         return this->begin(); | ||||
|     } | ||||
|  | ||||
|     constexpr const_iterator cend() const { | ||||
|         return this->end(); | ||||
|     } | ||||
|  | ||||
|     constexpr reverse_iterator rbegin() { | ||||
|         return reverse_iterator(this->end()); | ||||
|     } | ||||
|  | ||||
|     constexpr const_reverse_iterator rbegin() const { | ||||
|         return const_reverse_iterator(this->end()); | ||||
|     } | ||||
|  | ||||
|     constexpr reverse_iterator rend() { | ||||
|         return reverse_iterator(this->begin()); | ||||
|     } | ||||
|  | ||||
|     constexpr const_reverse_iterator rend() const { | ||||
|         return const_reverse_iterator(this->begin()); | ||||
|     } | ||||
|  | ||||
|     constexpr const_reverse_iterator crbegin() const { | ||||
|         return this->rbegin(); | ||||
|     } | ||||
|  | ||||
|     constexpr const_reverse_iterator crend() const { | ||||
|         return this->rend(); | ||||
|     } | ||||
|  | ||||
|     constexpr iterator iterator_to(reference v) { | ||||
|         return iterator(m_impl.iterator_to(GetNode(v))); | ||||
|     } | ||||
|  | ||||
|     constexpr const_iterator iterator_to(const_reference v) const { | ||||
|         return const_iterator(m_impl.iterator_to(GetNode(v))); | ||||
|     } | ||||
|  | ||||
|     // Content management. | ||||
|     constexpr bool empty() const { | ||||
|         return m_impl.empty(); | ||||
|     } | ||||
|  | ||||
|     constexpr size_type size() const { | ||||
|         return m_impl.size(); | ||||
|     } | ||||
|  | ||||
|     constexpr reference back() { | ||||
|         return GetParent(m_impl.back()); | ||||
|     } | ||||
|  | ||||
|     constexpr const_reference back() const { | ||||
|         return GetParent(m_impl.back()); | ||||
|     } | ||||
|  | ||||
|     constexpr reference front() { | ||||
|         return GetParent(m_impl.front()); | ||||
|     } | ||||
|  | ||||
|     constexpr const_reference front() const { | ||||
|         return GetParent(m_impl.front()); | ||||
|     } | ||||
|  | ||||
|     constexpr void push_back(reference ref) { | ||||
|         m_impl.push_back(GetNode(ref)); | ||||
|     } | ||||
|  | ||||
|     constexpr void push_front(reference ref) { | ||||
|         m_impl.push_front(GetNode(ref)); | ||||
|     } | ||||
|  | ||||
|     constexpr void pop_back() { | ||||
|         m_impl.pop_back(); | ||||
|     } | ||||
|  | ||||
|     constexpr void pop_front() { | ||||
|         m_impl.pop_front(); | ||||
|     } | ||||
|  | ||||
|     constexpr iterator insert(const_iterator pos, reference ref) { | ||||
|         return iterator(m_impl.insert(pos.GetImplIterator(), GetNode(ref))); | ||||
|     } | ||||
|  | ||||
|     constexpr void splice(const_iterator pos, IntrusiveList& o) { | ||||
|         m_impl.splice(pos.GetImplIterator(), o.m_impl); | ||||
|     } | ||||
|  | ||||
|     constexpr void splice(const_iterator pos, IntrusiveList& o, const_iterator first) { | ||||
|         m_impl.splice(pos.GetImplIterator(), o.m_impl, first.GetImplIterator()); | ||||
|     } | ||||
|  | ||||
|     constexpr void splice(const_iterator pos, IntrusiveList& o, const_iterator first, | ||||
|                           const_iterator last) { | ||||
|         m_impl.splice(pos.GetImplIterator(), o.m_impl, first.GetImplIterator(), | ||||
|                       last.GetImplIterator()); | ||||
|     } | ||||
|  | ||||
|     constexpr iterator erase(const_iterator pos) { | ||||
|         return iterator(m_impl.erase(pos.GetImplIterator())); | ||||
|     } | ||||
|  | ||||
|     constexpr void clear() { | ||||
|         m_impl.clear(); | ||||
|     } | ||||
| }; | ||||
|  | ||||
| template <auto T, class Derived = Common::impl::GetParentType<T>> | ||||
| class IntrusiveListMemberTraits; | ||||
|  | ||||
| template <class Parent, IntrusiveListNode Parent::*Member, class Derived> | ||||
| class IntrusiveListMemberTraits<Member, Derived> { | ||||
| public: | ||||
|     using ListType = IntrusiveList<Derived, IntrusiveListMemberTraits>; | ||||
|  | ||||
| private: | ||||
|     friend class IntrusiveList<Derived, IntrusiveListMemberTraits>; | ||||
|  | ||||
|     static constexpr IntrusiveListNode& GetNode(Derived& parent) { | ||||
|         return parent.*Member; | ||||
|     } | ||||
|  | ||||
|     static constexpr IntrusiveListNode const& GetNode(Derived const& parent) { | ||||
|         return parent.*Member; | ||||
|     } | ||||
|  | ||||
|     static Derived& GetParent(IntrusiveListNode& node) { | ||||
|         return Common::GetParentReference<Member, Derived>(std::addressof(node)); | ||||
|     } | ||||
|  | ||||
|     static Derived const& GetParent(IntrusiveListNode const& node) { | ||||
|         return Common::GetParentReference<Member, Derived>(std::addressof(node)); | ||||
|     } | ||||
| }; | ||||
|  | ||||
| template <auto T, class Derived = Common::impl::GetParentType<T>> | ||||
| class IntrusiveListMemberTraitsByNonConstexprOffsetOf; | ||||
|  | ||||
| template <class Parent, IntrusiveListNode Parent::*Member, class Derived> | ||||
| class IntrusiveListMemberTraitsByNonConstexprOffsetOf<Member, Derived> { | ||||
| public: | ||||
|     using ListType = IntrusiveList<Derived, IntrusiveListMemberTraitsByNonConstexprOffsetOf>; | ||||
|  | ||||
| private: | ||||
|     friend class IntrusiveList<Derived, IntrusiveListMemberTraitsByNonConstexprOffsetOf>; | ||||
|  | ||||
|     static constexpr IntrusiveListNode& GetNode(Derived& parent) { | ||||
|         return parent.*Member; | ||||
|     } | ||||
|  | ||||
|     static constexpr IntrusiveListNode const& GetNode(Derived const& parent) { | ||||
|         return parent.*Member; | ||||
|     } | ||||
|  | ||||
|     static Derived& GetParent(IntrusiveListNode& node) { | ||||
|         return *reinterpret_cast<Derived*>(reinterpret_cast<char*>(std::addressof(node)) - | ||||
|                                            GetOffset()); | ||||
|     } | ||||
|  | ||||
|     static Derived const& GetParent(IntrusiveListNode const& node) { | ||||
|         return *reinterpret_cast<const Derived*>( | ||||
|             reinterpret_cast<const char*>(std::addressof(node)) - GetOffset()); | ||||
|     } | ||||
|  | ||||
|     static uintptr_t GetOffset() { | ||||
|         return reinterpret_cast<uintptr_t>(std::addressof(reinterpret_cast<Derived*>(0)->*Member)); | ||||
|     } | ||||
| }; | ||||
|  | ||||
| template <class Derived> | ||||
| class IntrusiveListBaseNode : public IntrusiveListNode {}; | ||||
|  | ||||
| template <class Derived> | ||||
| class IntrusiveListBaseTraits { | ||||
| public: | ||||
|     using ListType = IntrusiveList<Derived, IntrusiveListBaseTraits>; | ||||
|  | ||||
| private: | ||||
|     friend class IntrusiveList<Derived, IntrusiveListBaseTraits>; | ||||
|  | ||||
|     static constexpr IntrusiveListNode& GetNode(Derived& parent) { | ||||
|         return static_cast<IntrusiveListNode&>( | ||||
|             static_cast<IntrusiveListBaseNode<Derived>&>(parent)); | ||||
|     } | ||||
|  | ||||
|     static constexpr IntrusiveListNode const& GetNode(Derived const& parent) { | ||||
|         return static_cast<const IntrusiveListNode&>( | ||||
|             static_cast<const IntrusiveListBaseNode<Derived>&>(parent)); | ||||
|     } | ||||
|  | ||||
|     static constexpr Derived& GetParent(IntrusiveListNode& node) { | ||||
|         return static_cast<Derived&>(static_cast<IntrusiveListBaseNode<Derived>&>(node)); | ||||
|     } | ||||
|  | ||||
|     static constexpr Derived const& GetParent(IntrusiveListNode const& node) { | ||||
|         return static_cast<const Derived&>( | ||||
|             static_cast<const IntrusiveListBaseNode<Derived>&>(node)); | ||||
|     } | ||||
| }; | ||||
|  | ||||
| } // namespace Common | ||||
							
								
								
									
										64
									
								
								src/common/page_table.cpp
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										64
									
								
								src/common/page_table.cpp
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,64 @@ | ||||
| // Copyright 2023 Citra Emulator Project | ||||
| // Licensed under GPLv2 or any later version | ||||
| // Refer to the license.txt file included. | ||||
|  | ||||
| #include "common/page_table.h" | ||||
|  | ||||
| namespace Common { | ||||
|  | ||||
| PageTable::PageTable() = default; | ||||
|  | ||||
| PageTable::~PageTable() noexcept = default; | ||||
|  | ||||
| bool PageTable::BeginTraversal(TraversalEntry* out_entry, TraversalContext* out_context, VAddr address) const { | ||||
|     // Setup invalid defaults. | ||||
|     out_entry->phys_addr = 0; | ||||
|     out_entry->block_size = page_size; | ||||
|     out_context->next_page = 0; | ||||
|  | ||||
|     // Validate that we can read the actual entry. | ||||
|     const auto page = address / page_size; | ||||
|     if (page >= backing_addr.size()) { | ||||
|         return false; | ||||
|     } | ||||
|  | ||||
|     // Validate that the entry is mapped. | ||||
|     const auto phys_addr = backing_addr[page]; | ||||
|     if (phys_addr == 0) { | ||||
|         return false; | ||||
|     } | ||||
|  | ||||
|     // Populate the results. | ||||
|     out_entry->phys_addr = phys_addr + address; | ||||
|     out_context->next_page = page + 1; | ||||
|     out_context->next_offset = address + page_size; | ||||
|  | ||||
|     return true; | ||||
| } | ||||
|  | ||||
| bool PageTable::ContinueTraversal(TraversalEntry* out_entry, TraversalContext* context) const { | ||||
|     // Setup invalid defaults. | ||||
|     out_entry->phys_addr = 0; | ||||
|     out_entry->block_size = page_size; | ||||
|  | ||||
|     // Validate that we can read the actual entry. | ||||
|     const auto page = context->next_page; | ||||
|     if (page >= backing_addr.size()) { | ||||
|         return false; | ||||
|     } | ||||
|  | ||||
|     // Validate that the entry is mapped. | ||||
|     const auto phys_addr = backing_addr[page]; | ||||
|     if (phys_addr == 0) { | ||||
|         return false; | ||||
|     } | ||||
|  | ||||
|     // Populate the results. | ||||
|     out_entry->phys_addr = phys_addr + context->next_offset; | ||||
|     context->next_page = page + 1; | ||||
|     context->next_offset += page_size; | ||||
|  | ||||
|     return true; | ||||
| } | ||||
|  | ||||
| } // namespace Common | ||||
							
								
								
									
										116
									
								
								src/common/page_table.h
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										116
									
								
								src/common/page_table.h
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,116 @@ | ||||
| // Copyright 2023 Citra Emulator Project | ||||
| // Licensed under GPLv2 or any later version | ||||
| // Refer to the license.txt file included. | ||||
|  | ||||
| #pragma once | ||||
|  | ||||
| #include "common/common_types.h" | ||||
|  | ||||
| namespace Common { | ||||
|  | ||||
| enum class PageType : u8 { | ||||
|     /// Page is unmapped and should cause an access error. | ||||
|     Unmapped, | ||||
|     /// Page is mapped to regular memory. This is the only type you can get pointers to. | ||||
|     Memory, | ||||
|     /// Page is mapped to regular memory, but also needs to check for rasterizer cache flushing and | ||||
|     /// invalidation | ||||
|     RasterizerCachedMemory, | ||||
| }; | ||||
|  | ||||
| /** | ||||
|  * A (reasonably) fast way of allowing switchable and remappable process address spaces. It loosely | ||||
|  * mimics the way a real CPU page table works. | ||||
|  */ | ||||
| struct PageTable { | ||||
|     struct TraversalEntry { | ||||
|         u64 phys_addr{}; | ||||
|         std::size_t block_size{}; | ||||
|     }; | ||||
|  | ||||
|     struct TraversalContext { | ||||
|         u64 next_page{}; | ||||
|         u64 next_offset{}; | ||||
|     }; | ||||
|  | ||||
|     /// Number of bits reserved for attribute tagging. | ||||
|     /// This can be at most the guaranteed alignment of the pointers in the page table. | ||||
|     static constexpr int ATTRIBUTE_BITS = 2; | ||||
|     static constexpr size_t PAGE_BITS = 12; | ||||
|     static constexpr size_t NUM_ENTRIES = 1 << (32 - PAGE_BITS); | ||||
|  | ||||
|     /** | ||||
|      * Pair of host pointer and page type attribute. | ||||
|      * This uses the lower bits of a given pointer to store the attribute tag. | ||||
|      * Writing and reading the pointer attribute pair is guaranteed to be atomic for the same method | ||||
|      * call. In other words, they are guaranteed to be synchronized at all times. | ||||
|      */ | ||||
|     class PageInfo { | ||||
|     public: | ||||
|         /// Returns the page pointer | ||||
|         [[nodiscard]] uintptr_t Pointer() const noexcept { | ||||
|             return ExtractPointer(raw); | ||||
|         } | ||||
|  | ||||
|         /// Returns the page type attribute | ||||
|         [[nodiscard]] PageType Type() const noexcept { | ||||
|             return ExtractType(raw); | ||||
|         } | ||||
|  | ||||
|         /// Returns the page pointer and attribute pair, extracted from the same atomic read | ||||
|         [[nodiscard]] std::pair<uintptr_t, PageType> PointerType() const noexcept { | ||||
|             return {ExtractPointer(raw), ExtractType(raw)}; | ||||
|         } | ||||
|  | ||||
|         /// Returns the raw representation of the page information. | ||||
|         /// Use ExtractPointer and ExtractType to unpack the value. | ||||
|         [[nodiscard]] uintptr_t Raw() const noexcept { | ||||
|             return raw; | ||||
|         } | ||||
|  | ||||
|         /// Write a page pointer and type pair atomically | ||||
|         void Store(uintptr_t pointer, PageType type) noexcept { | ||||
|             raw = pointer | static_cast<uintptr_t>(type); | ||||
|         } | ||||
|  | ||||
|         /// Unpack a pointer from a page info raw representation | ||||
|         [[nodiscard]] static uintptr_t ExtractPointer(uintptr_t raw) noexcept { | ||||
|             return raw & (~uintptr_t{0} << ATTRIBUTE_BITS); | ||||
|         } | ||||
|  | ||||
|         /// Unpack a page type from a page info raw representation | ||||
|         [[nodiscard]] static PageType ExtractType(uintptr_t raw) noexcept { | ||||
|             return static_cast<PageType>(raw & ((uintptr_t{1} << ATTRIBUTE_BITS) - 1)); | ||||
|         } | ||||
|  | ||||
|     private: | ||||
|         uintptr_t raw; | ||||
|     }; | ||||
|  | ||||
|     PageTable(); | ||||
|     ~PageTable() noexcept; | ||||
|  | ||||
|     PageTable(const PageTable&) = delete; | ||||
|     PageTable& operator=(const PageTable&) = delete; | ||||
|  | ||||
|     PageTable(PageTable&&) noexcept = default; | ||||
|     PageTable& operator=(PageTable&&) noexcept = default; | ||||
|  | ||||
|     bool BeginTraversal(TraversalEntry* out_entry, TraversalContext* out_context, VAddr address) const; | ||||
|     bool ContinueTraversal(TraversalEntry* out_entry, TraversalContext* context) const; | ||||
|  | ||||
|     PAddr GetPhysicalAddress(VAddr virt_addr) const { | ||||
|         return backing_addr[virt_addr / page_size] + virt_addr; | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|      * Vector of memory pointers backing each page. An entry can only be non-null if the | ||||
|      * corresponding attribute element is of type `Memory`. | ||||
|      */ | ||||
|     std::array<PageInfo, NUM_ENTRIES> pointers; | ||||
|     std::array<u64, NUM_ENTRIES> blocks; | ||||
|     std::array<u64, NUM_ENTRIES> backing_addr; | ||||
|     std::size_t page_size{}; | ||||
| }; | ||||
|  | ||||
| } // namespace Common | ||||
							
								
								
									
										190
									
								
								src/common/parent_of_member.h
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										190
									
								
								src/common/parent_of_member.h
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,190 @@ | ||||
| // SPDX-FileCopyrightText: Copyright 2021 yuzu Emulator Project | ||||
| // SPDX-License-Identifier: GPL-2.0-or-later | ||||
|  | ||||
| #pragma once | ||||
|  | ||||
| #include <type_traits> | ||||
|  | ||||
| #include "common/assert.h" | ||||
|  | ||||
| namespace Common { | ||||
| namespace detail { | ||||
| template <typename T, size_t Size, size_t Align> | ||||
| struct TypedStorageImpl { | ||||
|     alignas(Align) u8 storage_[Size]; | ||||
| }; | ||||
| } // namespace detail | ||||
|  | ||||
| template <typename T> | ||||
| using TypedStorage = detail::TypedStorageImpl<T, sizeof(T), alignof(T)>; | ||||
|  | ||||
| template <typename T> | ||||
| static constexpr T* GetPointer(TypedStorage<T>& ts) { | ||||
|     return static_cast<T*>(static_cast<void*>(std::addressof(ts.storage_))); | ||||
| } | ||||
|  | ||||
| template <typename T> | ||||
| static constexpr const T* GetPointer(const TypedStorage<T>& ts) { | ||||
|     return static_cast<const T*>(static_cast<const void*>(std::addressof(ts.storage_))); | ||||
| } | ||||
|  | ||||
| namespace impl { | ||||
|  | ||||
| template <size_t MaxDepth> | ||||
| struct OffsetOfUnionHolder { | ||||
|     template <typename ParentType, typename MemberType, size_t Offset> | ||||
|     union UnionImpl { | ||||
|         using PaddingMember = char; | ||||
|         static constexpr size_t GetOffset() { | ||||
|             return Offset; | ||||
|         } | ||||
|  | ||||
| #pragma pack(push, 1) | ||||
|         struct { | ||||
|             PaddingMember padding[Offset]; | ||||
|             MemberType members[(sizeof(ParentType) / sizeof(MemberType)) + 1]; | ||||
|         } data; | ||||
| #pragma pack(pop) | ||||
|         UnionImpl<ParentType, MemberType, Offset + 1> next_union; | ||||
|     }; | ||||
|  | ||||
|     template <typename ParentType, typename MemberType> | ||||
|     union UnionImpl<ParentType, MemberType, 0> { | ||||
|         static constexpr size_t GetOffset() { | ||||
|             return 0; | ||||
|         } | ||||
|  | ||||
|         struct { | ||||
|             MemberType members[(sizeof(ParentType) / sizeof(MemberType)) + 1]; | ||||
|         } data; | ||||
|         UnionImpl<ParentType, MemberType, 1> next_union; | ||||
|     }; | ||||
|  | ||||
|     template <typename ParentType, typename MemberType> | ||||
|     union UnionImpl<ParentType, MemberType, MaxDepth> {}; | ||||
| }; | ||||
|  | ||||
| template <typename ParentType, typename MemberType> | ||||
| struct OffsetOfCalculator { | ||||
|     using UnionHolder = | ||||
|         typename OffsetOfUnionHolder<sizeof(MemberType)>::template UnionImpl<ParentType, MemberType, | ||||
|                                                                              0>; | ||||
|     union Union { | ||||
|         char c{}; | ||||
|         UnionHolder first_union; | ||||
|         TypedStorage<ParentType> parent; | ||||
|  | ||||
|         constexpr Union() : c() {} | ||||
|     }; | ||||
|     static constexpr Union U = {}; | ||||
|  | ||||
|     static constexpr const MemberType* GetNextAddress(const MemberType* start, | ||||
|                                                       const MemberType* target) { | ||||
|         while (start < target) { | ||||
|             start++; | ||||
|         } | ||||
|         return start; | ||||
|     } | ||||
|  | ||||
|     static constexpr std::ptrdiff_t GetDifference(const MemberType* start, | ||||
|                                                   const MemberType* target) { | ||||
|         return (target - start) * sizeof(MemberType); | ||||
|     } | ||||
|  | ||||
|     template <typename CurUnion> | ||||
|     static constexpr std::ptrdiff_t OffsetOfImpl(MemberType ParentType::*member, | ||||
|                                                  CurUnion& cur_union) { | ||||
|         constexpr size_t Offset = CurUnion::GetOffset(); | ||||
|         const auto target = std::addressof(GetPointer(U.parent)->*member); | ||||
|         const auto start = std::addressof(cur_union.data.members[0]); | ||||
|         const auto next = GetNextAddress(start, target); | ||||
|  | ||||
|         if (next != target) { | ||||
|             if constexpr (Offset < sizeof(MemberType) - 1) { | ||||
|                 return OffsetOfImpl(member, cur_union.next_union); | ||||
|             } else { | ||||
|                 UNREACHABLE(); | ||||
|             } | ||||
|         } | ||||
|  | ||||
|         return static_cast<ptrdiff_t>(static_cast<size_t>(next - start) * sizeof(MemberType) + | ||||
|                                       Offset); | ||||
|     } | ||||
|  | ||||
|     static constexpr std::ptrdiff_t OffsetOf(MemberType ParentType::*member) { | ||||
|         return OffsetOfImpl(member, U.first_union); | ||||
|     } | ||||
| }; | ||||
|  | ||||
| template <typename T> | ||||
| struct GetMemberPointerTraits; | ||||
|  | ||||
| template <typename P, typename M> | ||||
| struct GetMemberPointerTraits<M P::*> { | ||||
|     using Parent = P; | ||||
|     using Member = M; | ||||
| }; | ||||
|  | ||||
| template <auto MemberPtr> | ||||
| using GetParentType = typename GetMemberPointerTraits<decltype(MemberPtr)>::Parent; | ||||
|  | ||||
| template <auto MemberPtr> | ||||
| using GetMemberType = typename GetMemberPointerTraits<decltype(MemberPtr)>::Member; | ||||
|  | ||||
| template <auto MemberPtr, typename RealParentType = GetParentType<MemberPtr>> | ||||
| constexpr std::ptrdiff_t OffsetOf() { | ||||
|     using DeducedParentType = GetParentType<MemberPtr>; | ||||
|     using MemberType = GetMemberType<MemberPtr>; | ||||
|     static_assert(std::is_base_of<DeducedParentType, RealParentType>::value || | ||||
|                   std::is_same<RealParentType, DeducedParentType>::value); | ||||
|  | ||||
|     return OffsetOfCalculator<RealParentType, MemberType>::OffsetOf(MemberPtr); | ||||
| }; | ||||
|  | ||||
| } // namespace impl | ||||
|  | ||||
| template <auto MemberPtr, typename RealParentType = impl::GetParentType<MemberPtr>> | ||||
| constexpr RealParentType& GetParentReference(impl::GetMemberType<MemberPtr>* member) { | ||||
|     std::ptrdiff_t Offset = impl::OffsetOf<MemberPtr, RealParentType>(); | ||||
|     return *static_cast<RealParentType*>( | ||||
|         static_cast<void*>(static_cast<uint8_t*>(static_cast<void*>(member)) - Offset)); | ||||
| } | ||||
|  | ||||
| template <auto MemberPtr, typename RealParentType = impl::GetParentType<MemberPtr>> | ||||
| constexpr RealParentType const& GetParentReference(impl::GetMemberType<MemberPtr> const* member) { | ||||
|     std::ptrdiff_t Offset = impl::OffsetOf<MemberPtr, RealParentType>(); | ||||
|     return *static_cast<const RealParentType*>(static_cast<const void*>( | ||||
|         static_cast<const uint8_t*>(static_cast<const void*>(member)) - Offset)); | ||||
| } | ||||
|  | ||||
| template <auto MemberPtr, typename RealParentType = impl::GetParentType<MemberPtr>> | ||||
| constexpr RealParentType* GetParentPointer(impl::GetMemberType<MemberPtr>* member) { | ||||
|     return std::addressof(GetParentReference<MemberPtr, RealParentType>(member)); | ||||
| } | ||||
|  | ||||
| template <auto MemberPtr, typename RealParentType = impl::GetParentType<MemberPtr>> | ||||
| constexpr RealParentType const* GetParentPointer(impl::GetMemberType<MemberPtr> const* member) { | ||||
|     return std::addressof(GetParentReference<MemberPtr, RealParentType>(member)); | ||||
| } | ||||
|  | ||||
| template <auto MemberPtr, typename RealParentType = impl::GetParentType<MemberPtr>> | ||||
| constexpr RealParentType& GetParentReference(impl::GetMemberType<MemberPtr>& member) { | ||||
|     return GetParentReference<MemberPtr, RealParentType>(std::addressof(member)); | ||||
| } | ||||
|  | ||||
| template <auto MemberPtr, typename RealParentType = impl::GetParentType<MemberPtr>> | ||||
| constexpr RealParentType const& GetParentReference(impl::GetMemberType<MemberPtr> const& member) { | ||||
|     return GetParentReference<MemberPtr, RealParentType>(std::addressof(member)); | ||||
| } | ||||
|  | ||||
| template <auto MemberPtr, typename RealParentType = impl::GetParentType<MemberPtr>> | ||||
| constexpr RealParentType* GetParentPointer(impl::GetMemberType<MemberPtr>& member) { | ||||
|     return std::addressof(GetParentReference<MemberPtr, RealParentType>(member)); | ||||
| } | ||||
|  | ||||
| template <auto MemberPtr, typename RealParentType = impl::GetParentType<MemberPtr>> | ||||
| constexpr RealParentType const* GetParentPointer(impl::GetMemberType<MemberPtr> const& member) { | ||||
|     return std::addressof(GetParentReference<MemberPtr, RealParentType>(member)); | ||||
| } | ||||
|  | ||||
| } // namespace Common | ||||
| @@ -149,6 +149,27 @@ add_library(citra_core STATIC | ||||
|     hle/kernel/ipc_debugger/recorder.h | ||||
|     hle/kernel/kernel.cpp | ||||
|     hle/kernel/kernel.h | ||||
|     hle/kernel/k_auto_object.cpp | ||||
|     hle/kernel/k_auto_object.h | ||||
|     hle/kernel/k_class_token.cpp | ||||
|     hle/kernel/k_class_token.h | ||||
|     hle/kernel/k_linked_list.cpp | ||||
|     hle/kernel/k_linked_list.h | ||||
|     hle/kernel/k_memory_block.cpp | ||||
|     hle/kernel/k_memory_block.h | ||||
|     hle/kernel/k_memory_block_manager.cpp | ||||
|     hle/kernel/k_memory_block_manager.h | ||||
|     hle/kernel/k_memory_manager.cpp | ||||
|     hle/kernel/k_memory_manager.h | ||||
|     hle/kernel/k_page_group.cpp | ||||
|     hle/kernel/k_page_group.h | ||||
|     hle/kernel/k_page_heap.cpp | ||||
|     hle/kernel/k_page_heap.h | ||||
|     hle/kernel/k_page_manager.cpp | ||||
|     hle/kernel/k_page_manager.h | ||||
|     hle/kernel/k_page_table.cpp | ||||
|     hle/kernel/k_page_table.h | ||||
|     hle/kernel/k_slab_heap.h | ||||
|     hle/kernel/memory.cpp | ||||
|     hle/kernel/memory.h | ||||
|     hle/kernel/mutex.cpp | ||||
| @@ -473,6 +494,7 @@ add_library(citra_core STATIC | ||||
|     tracer/citrace.h | ||||
|     tracer/recorder.cpp | ||||
|     tracer/recorder.h | ||||
|     hle/kernel/slab_helpers.h | ||||
| ) | ||||
|  | ||||
| create_target_directory_groups(citra_core) | ||||
|   | ||||
							
								
								
									
										22
									
								
								src/core/hle/kernel/k_auto_object.cpp
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										22
									
								
								src/core/hle/kernel/k_auto_object.cpp
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,22 @@ | ||||
| // SPDX-FileCopyrightText: Copyright 2021 yuzu Emulator Project | ||||
| // SPDX-License-Identifier: GPL-2.0-or-later | ||||
|  | ||||
| #include "core/hle/kernel/k_auto_object.h" | ||||
| #include "core/hle/kernel/kernel.h" | ||||
|  | ||||
| namespace Kernel { | ||||
|  | ||||
| KAutoObject* KAutoObject::Create(KAutoObject* obj) { | ||||
|     obj->m_ref_count = 1; | ||||
|     return obj; | ||||
| } | ||||
|  | ||||
| void KAutoObject::RegisterWithKernel() { | ||||
|     m_kernel.RegisterKernelObject(this); | ||||
| } | ||||
|  | ||||
| void KAutoObject::UnregisterWithKernel(KernelCore& kernel, KAutoObject* self) { | ||||
|     kernel.UnregisterKernelObject(self); | ||||
| } | ||||
|  | ||||
| } // namespace Kernel | ||||
							
								
								
									
										268
									
								
								src/core/hle/kernel/k_auto_object.h
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										268
									
								
								src/core/hle/kernel/k_auto_object.h
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,268 @@ | ||||
| // SPDX-FileCopyrightText: Copyright 2021 yuzu Emulator Project | ||||
| // SPDX-License-Identifier: GPL-2.0-or-later | ||||
|  | ||||
| #pragma once | ||||
|  | ||||
| #include <atomic> | ||||
| #include <string> | ||||
|  | ||||
| #include "common/assert.h" | ||||
| #include "common/common_funcs.h" | ||||
| #include "common/common_types.h" | ||||
| #include "core/hle/kernel/k_class_token.h" | ||||
|  | ||||
| namespace Kernel { | ||||
|  | ||||
| class KernelSystem; | ||||
| class KProcess; | ||||
|  | ||||
| #define KERNEL_AUTOOBJECT_TRAITS_IMPL(CLASS, BASE_CLASS, ATTRIBUTE)                                \ | ||||
| private:                                                                                           \ | ||||
|     friend class ::Kernel::KClassTokenGenerator;                                                   \ | ||||
|     static constexpr inline auto ObjectType = ::Kernel::KClassTokenGenerator::ObjectType::CLASS;   \ | ||||
|     static constexpr inline const char* const TypeName = #CLASS;                                   \ | ||||
|     static constexpr inline ClassTokenType ClassToken() { return ::Kernel::ClassToken<CLASS>; }    \ | ||||
|                                                                                                    \ | ||||
| public:                                                                                            \ | ||||
|     CITRA_NON_COPYABLE(CLASS);                                                                     \ | ||||
|     CITRA_NON_MOVEABLE(CLASS);                                                                     \ | ||||
|                                                                                                    \ | ||||
|     using BaseClass = BASE_CLASS;                                                                  \ | ||||
|     static constexpr TypeObj GetStaticTypeObj() {                                                  \ | ||||
|         constexpr ClassTokenType Token = ClassToken();                                             \ | ||||
|         return TypeObj(TypeName, Token);                                                           \ | ||||
|     }                                                                                              \ | ||||
|     static constexpr const char* GetStaticTypeName() { return TypeName; }                          \ | ||||
|     virtual TypeObj GetTypeObj() ATTRIBUTE { return GetStaticTypeObj(); }                          \ | ||||
|     virtual const char* GetTypeName() ATTRIBUTE { return GetStaticTypeName(); }                    \ | ||||
|                                                                                                    \ | ||||
| private:                                                                                           \ | ||||
|     constexpr bool operator!=(const TypeObj& rhs) | ||||
|  | ||||
| #define KERNEL_AUTOOBJECT_TRAITS(CLASS, BASE_CLASS)                                                \ | ||||
|     KERNEL_AUTOOBJECT_TRAITS_IMPL(CLASS, BASE_CLASS, const override) | ||||
|  | ||||
| class KAutoObject { | ||||
| protected: | ||||
|     class TypeObj { | ||||
|     public: | ||||
|         constexpr explicit TypeObj(const char* n, ClassTokenType tok) | ||||
|             : m_name(n), m_class_token(tok) {} | ||||
|  | ||||
|         constexpr const char* GetName() const { | ||||
|             return m_name; | ||||
|         } | ||||
|         constexpr ClassTokenType GetClassToken() const { | ||||
|             return m_class_token; | ||||
|         } | ||||
|  | ||||
|         constexpr bool operator==(const TypeObj& rhs) const { | ||||
|             return this->GetClassToken() == rhs.GetClassToken(); | ||||
|         } | ||||
|  | ||||
|         constexpr bool operator!=(const TypeObj& rhs) const { | ||||
|             return this->GetClassToken() != rhs.GetClassToken(); | ||||
|         } | ||||
|  | ||||
|         constexpr bool IsDerivedFrom(const TypeObj& rhs) const { | ||||
|             return (this->GetClassToken() | rhs.GetClassToken()) == this->GetClassToken(); | ||||
|         } | ||||
|  | ||||
|     private: | ||||
|         const char* m_name; | ||||
|         ClassTokenType m_class_token; | ||||
|     }; | ||||
|  | ||||
| private: | ||||
|     KERNEL_AUTOOBJECT_TRAITS_IMPL(KAutoObject, KAutoObject, const); | ||||
|  | ||||
| public: | ||||
|     explicit KAutoObject(KernelSystem& kernel) : m_kernel(kernel) { | ||||
|         RegisterWithKernel(); | ||||
|     } | ||||
|     virtual ~KAutoObject() = default; | ||||
|  | ||||
|     static KAutoObject* Create(KAutoObject* ptr); | ||||
|  | ||||
|     // Destroy is responsible for destroying the auto object's resources when ref_count hits zero. | ||||
|     virtual void Destroy() { | ||||
|         UNIMPLEMENTED(); | ||||
|     } | ||||
|  | ||||
|     // Finalize is responsible for cleaning up resource, but does not destroy the object. | ||||
|     virtual void Finalize() {} | ||||
|  | ||||
|     virtual KProcess* GetOwner() const { | ||||
|         return nullptr; | ||||
|     } | ||||
|  | ||||
|     u32 GetReferenceCount() const { | ||||
|         return m_ref_count.load(); | ||||
|     } | ||||
|  | ||||
|     bool IsDerivedFrom(const TypeObj& rhs) const { | ||||
|         return this->GetTypeObj().IsDerivedFrom(rhs); | ||||
|     } | ||||
|  | ||||
|     bool IsDerivedFrom(const KAutoObject& rhs) const { | ||||
|         return this->IsDerivedFrom(rhs.GetTypeObj()); | ||||
|     } | ||||
|  | ||||
|     template <typename Derived> | ||||
|     Derived DynamicCast() { | ||||
|         static_assert(std::is_pointer_v<Derived>); | ||||
|         using DerivedType = std::remove_pointer_t<Derived>; | ||||
|  | ||||
|         if (this->IsDerivedFrom(DerivedType::GetStaticTypeObj())) { | ||||
|             return static_cast<Derived>(this); | ||||
|         } else { | ||||
|             return nullptr; | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     template <typename Derived> | ||||
|     const Derived DynamicCast() const { | ||||
|         static_assert(std::is_pointer_v<Derived>); | ||||
|         using DerivedType = std::remove_pointer_t<Derived>; | ||||
|  | ||||
|         if (this->IsDerivedFrom(DerivedType::GetStaticTypeObj())) { | ||||
|             return static_cast<Derived>(this); | ||||
|         } else { | ||||
|             return nullptr; | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     bool Open() { | ||||
|         // Atomically increment the reference count, only if it's positive. | ||||
|         u32 cur_ref_count = m_ref_count.load(std::memory_order_acquire); | ||||
|         do { | ||||
|             if (cur_ref_count == 0) { | ||||
|                 return false; | ||||
|             } | ||||
|             ASSERT(cur_ref_count < cur_ref_count + 1); | ||||
|         } while (!m_ref_count.compare_exchange_weak(cur_ref_count, cur_ref_count + 1, | ||||
|                                                     std::memory_order_relaxed)); | ||||
|  | ||||
|         return true; | ||||
|     } | ||||
|  | ||||
|     void Close() { | ||||
|         // Atomically decrement the reference count, not allowing it to become negative. | ||||
|         u32 cur_ref_count = m_ref_count.load(std::memory_order_acquire); | ||||
|         do { | ||||
|             ASSERT(cur_ref_count > 0); | ||||
|         } while (!m_ref_count.compare_exchange_weak(cur_ref_count, cur_ref_count - 1, | ||||
|                                                     std::memory_order_acq_rel)); | ||||
|  | ||||
|         // If ref count hits zero, destroy the object. | ||||
|         if (cur_ref_count - 1 == 0) { | ||||
|             KernelSystem& kernel = m_kernel; | ||||
|             this->Destroy(); | ||||
|             KAutoObject::UnregisterWithKernel(kernel, this); | ||||
|         } | ||||
|     } | ||||
|  | ||||
| private: | ||||
|     void RegisterWithKernel(); | ||||
|     static void UnregisterWithKernel(KernelSystem& kernel, KAutoObject* self); | ||||
|  | ||||
| protected: | ||||
|     KernelSystem& m_kernel; | ||||
|  | ||||
| private: | ||||
|     std::atomic<u32> m_ref_count{}; | ||||
| }; | ||||
|  | ||||
| template <typename T> | ||||
| class KScopedAutoObject { | ||||
| public: | ||||
|     CITRA_NON_COPYABLE(KScopedAutoObject); | ||||
|  | ||||
|     constexpr KScopedAutoObject() = default; | ||||
|  | ||||
|     constexpr KScopedAutoObject(T* o) : m_obj(o) { | ||||
|         if (m_obj != nullptr) { | ||||
|             m_obj->Open(); | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     ~KScopedAutoObject() { | ||||
|         if (m_obj != nullptr) { | ||||
|             m_obj->Close(); | ||||
|         } | ||||
|         m_obj = nullptr; | ||||
|     } | ||||
|  | ||||
|     template <typename U> | ||||
|         requires(std::derived_from<T, U> || std::derived_from<U, T>) | ||||
|     constexpr KScopedAutoObject(KScopedAutoObject<U>&& rhs) { | ||||
|         if constexpr (std::derived_from<U, T>) { | ||||
|             // Upcast. | ||||
|             m_obj = rhs.m_obj; | ||||
|             rhs.m_obj = nullptr; | ||||
|         } else { | ||||
|             // Downcast. | ||||
|             T* derived = nullptr; | ||||
|             if (rhs.m_obj != nullptr) { | ||||
|                 derived = rhs.m_obj->template DynamicCast<T*>(); | ||||
|                 if (derived == nullptr) { | ||||
|                     rhs.m_obj->Close(); | ||||
|                 } | ||||
|             } | ||||
|  | ||||
|             m_obj = derived; | ||||
|             rhs.m_obj = nullptr; | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     constexpr KScopedAutoObject<T>& operator=(KScopedAutoObject<T>&& rhs) { | ||||
|         rhs.Swap(*this); | ||||
|         return *this; | ||||
|     } | ||||
|  | ||||
|     constexpr T* operator->() { | ||||
|         return m_obj; | ||||
|     } | ||||
|     constexpr T& operator*() { | ||||
|         return *m_obj; | ||||
|     } | ||||
|  | ||||
|     constexpr void Reset(T* o) { | ||||
|         KScopedAutoObject(o).Swap(*this); | ||||
|     } | ||||
|  | ||||
|     constexpr T* GetPointerUnsafe() { | ||||
|         return m_obj; | ||||
|     } | ||||
|  | ||||
|     constexpr T* GetPointerUnsafe() const { | ||||
|         return m_obj; | ||||
|     } | ||||
|  | ||||
|     constexpr T* ReleasePointerUnsafe() { | ||||
|         T* ret = m_obj; | ||||
|         m_obj = nullptr; | ||||
|         return ret; | ||||
|     } | ||||
|  | ||||
|     constexpr bool IsNull() const { | ||||
|         return m_obj == nullptr; | ||||
|     } | ||||
|     constexpr bool IsNotNull() const { | ||||
|         return m_obj != nullptr; | ||||
|     } | ||||
|  | ||||
| private: | ||||
|     template <typename U> | ||||
|     friend class KScopedAutoObject; | ||||
|  | ||||
| private: | ||||
|     T* m_obj{}; | ||||
|  | ||||
| private: | ||||
|     constexpr void Swap(KScopedAutoObject& rhs) noexcept { | ||||
|         std::swap(m_obj, rhs.m_obj); | ||||
|     } | ||||
| }; | ||||
|  | ||||
| } // namespace Kernel | ||||
							
								
								
									
										125
									
								
								src/core/hle/kernel/k_class_token.cpp
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										125
									
								
								src/core/hle/kernel/k_class_token.cpp
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,125 @@ | ||||
| // SPDX-FileCopyrightText: Copyright 2021 yuzu Emulator Project | ||||
| // SPDX-License-Identifier: GPL-2.0-or-later | ||||
|  | ||||
| #include "core/hle/kernel/k_auto_object.h" | ||||
| #include "core/hle/kernel/k_class_token.h" | ||||
| #include "core/hle/kernel/k_client_port.h" | ||||
| #include "core/hle/kernel/k_client_session.h" | ||||
| #include "core/hle/kernel/k_code_memory.h" | ||||
| #include "core/hle/kernel/k_event.h" | ||||
| #include "core/hle/kernel/k_port.h" | ||||
| #include "core/hle/kernel/k_process.h" | ||||
| #include "core/hle/kernel/k_readable_event.h" | ||||
| #include "core/hle/kernel/k_resource_limit.h" | ||||
| #include "core/hle/kernel/k_server_port.h" | ||||
| #include "core/hle/kernel/k_server_session.h" | ||||
| #include "core/hle/kernel/k_session.h" | ||||
| #include "core/hle/kernel/k_shared_memory.h" | ||||
| #include "core/hle/kernel/k_synchronization_object.h" | ||||
| #include "core/hle/kernel/k_system_resource.h" | ||||
| #include "core/hle/kernel/k_thread.h" | ||||
| #include "core/hle/kernel/k_transfer_memory.h" | ||||
|  | ||||
| namespace Kernel { | ||||
|  | ||||
| // Ensure that we generate correct class tokens for all types. | ||||
|  | ||||
| // Ensure that the absolute token values are correct. | ||||
| static_assert(ClassToken<KAutoObject> == 0b00000000'00000000); | ||||
| static_assert(ClassToken<KSynchronizationObject> == 0b00000000'00000001); | ||||
| static_assert(ClassToken<KReadableEvent> == 0b00000000'00000011); | ||||
| // static_assert(ClassToken<KInterruptEvent> == 0b00000111'00000011); | ||||
| // static_assert(ClassToken<KDebug> == 0b00001011'00000001); | ||||
| static_assert(ClassToken<KThread> == 0b00010011'00000001); | ||||
| static_assert(ClassToken<KServerPort> == 0b00100011'00000001); | ||||
| static_assert(ClassToken<KServerSession> == 0b01000011'00000001); | ||||
| static_assert(ClassToken<KClientPort> == 0b10000011'00000001); | ||||
| static_assert(ClassToken<KClientSession> == 0b00001101'00000000); | ||||
| static_assert(ClassToken<KProcess> == 0b00010101'00000001); | ||||
| static_assert(ClassToken<KResourceLimit> == 0b00100101'00000000); | ||||
| // static_assert(ClassToken<KLightSession> == 0b01000101'00000000); | ||||
| static_assert(ClassToken<KPort> == 0b10000101'00000000); | ||||
| static_assert(ClassToken<KSession> == 0b00011001'00000000); | ||||
| static_assert(ClassToken<KSharedMemory> == 0b00101001'00000000); | ||||
| static_assert(ClassToken<KEvent> == 0b01001001'00000000); | ||||
| // static_assert(ClassToken<KLightClientSession> == 0b00110001'00000000); | ||||
| // static_assert(ClassToken<KLightServerSession> == 0b01010001'00000000); | ||||
| static_assert(ClassToken<KTransferMemory> == 0b01010001'00000000); | ||||
| // static_assert(ClassToken<KDeviceAddressSpace> == 0b01100001'00000000); | ||||
| // static_assert(ClassToken<KSessionRequest> == 0b10100001'00000000); | ||||
| static_assert(ClassToken<KCodeMemory> == 0b10100001'00000000); | ||||
|  | ||||
| // Ensure that the token hierarchy is correct. | ||||
|  | ||||
| // Base classes | ||||
| static_assert(ClassToken<KAutoObject> == (0b00000000)); | ||||
| static_assert(ClassToken<KSynchronizationObject> == (0b00000001 | ClassToken<KAutoObject>)); | ||||
| static_assert(ClassToken<KReadableEvent> == (0b00000010 | ClassToken<KSynchronizationObject>)); | ||||
|  | ||||
| // Final classes | ||||
| // static_assert(ClassToken<KInterruptEvent> == ((0b00000111 << 8) | ClassToken<KReadableEvent>)); | ||||
| // static_assert(ClassToken<KDebug> == ((0b00001011 << 8) | ClassToken<KSynchronizationObject>)); | ||||
| static_assert(ClassToken<KThread> == ((0b00010011 << 8) | ClassToken<KSynchronizationObject>)); | ||||
| static_assert(ClassToken<KServerPort> == ((0b00100011 << 8) | ClassToken<KSynchronizationObject>)); | ||||
| static_assert(ClassToken<KServerSession> == | ||||
|               ((0b01000011 << 8) | ClassToken<KSynchronizationObject>)); | ||||
| static_assert(ClassToken<KClientPort> == ((0b10000011 << 8) | ClassToken<KSynchronizationObject>)); | ||||
| static_assert(ClassToken<KClientSession> == ((0b00001101 << 8) | ClassToken<KAutoObject>)); | ||||
| static_assert(ClassToken<KProcess> == ((0b00010101 << 8) | ClassToken<KSynchronizationObject>)); | ||||
| static_assert(ClassToken<KResourceLimit> == ((0b00100101 << 8) | ClassToken<KAutoObject>)); | ||||
| // static_assert(ClassToken<KLightSession> == ((0b01000101 << 8) | ClassToken<KAutoObject>)); | ||||
| static_assert(ClassToken<KPort> == ((0b10000101 << 8) | ClassToken<KAutoObject>)); | ||||
| static_assert(ClassToken<KSession> == ((0b00011001 << 8) | ClassToken<KAutoObject>)); | ||||
| static_assert(ClassToken<KSharedMemory> == ((0b00101001 << 8) | ClassToken<KAutoObject>)); | ||||
| static_assert(ClassToken<KEvent> == ((0b01001001 << 8) | ClassToken<KAutoObject>)); | ||||
| // static_assert(ClassToken<KLightClientSession> == ((0b00110001 << 8) | ClassToken<KAutoObject>)); | ||||
| // static_assert(ClassToken<KLightServerSession> == ((0b01010001 << 8) | ClassToken<KAutoObject>)); | ||||
| static_assert(ClassToken<KTransferMemory> == ((0b01010001 << 8) | ClassToken<KAutoObject>)); | ||||
| // static_assert(ClassToken<KDeviceAddressSpace> == ((0b01100001 << 8) | ClassToken<KAutoObject>)); | ||||
| // static_assert(ClassToken<KSessionRequest> == ((0b10100001 << 8) | ClassToken<KAutoObject>)); | ||||
| static_assert(ClassToken<KCodeMemory> == ((0b10100001 << 8) | ClassToken<KAutoObject>)); | ||||
|  | ||||
| // Ensure that the token hierarchy reflects the class hierarchy. | ||||
|  | ||||
| // Base classes. | ||||
| static_assert(!std::is_final_v<KSynchronizationObject> && | ||||
|               std::is_base_of_v<KAutoObject, KSynchronizationObject>); | ||||
| static_assert(!std::is_final_v<KReadableEvent> && | ||||
|               std::is_base_of_v<KSynchronizationObject, KReadableEvent>); | ||||
|  | ||||
| // Final classes | ||||
| // static_assert(std::is_final_v<KInterruptEvent> && | ||||
| //              std::is_base_of_v<KReadableEvent, KInterruptEvent>); | ||||
| // static_assert(std::is_final_v<KDebug> && | ||||
| //              std::is_base_of_v<KSynchronizationObject, KDebug>); | ||||
| static_assert(std::is_final_v<KThread> && std::is_base_of_v<KSynchronizationObject, KThread>); | ||||
| static_assert(std::is_final_v<KServerPort> && | ||||
|               std::is_base_of_v<KSynchronizationObject, KServerPort>); | ||||
| static_assert(std::is_final_v<KServerSession> && | ||||
|               std::is_base_of_v<KSynchronizationObject, KServerSession>); | ||||
| static_assert(std::is_final_v<KClientPort> && | ||||
|               std::is_base_of_v<KSynchronizationObject, KClientPort>); | ||||
| static_assert(std::is_final_v<KClientSession> && std::is_base_of_v<KAutoObject, KClientSession>); | ||||
| static_assert(std::is_final_v<KProcess> && std::is_base_of_v<KSynchronizationObject, KProcess>); | ||||
| static_assert(std::is_final_v<KResourceLimit> && std::is_base_of_v<KAutoObject, KResourceLimit>); | ||||
| // static_assert(std::is_final_v<KLightSession> && | ||||
| //              std::is_base_of_v<KAutoObject, KLightSession>); | ||||
| static_assert(std::is_final_v<KPort> && std::is_base_of_v<KAutoObject, KPort>); | ||||
| static_assert(std::is_final_v<KSession> && std::is_base_of_v<KAutoObject, KSession>); | ||||
| static_assert(std::is_final_v<KSharedMemory> && std::is_base_of_v<KAutoObject, KSharedMemory>); | ||||
| static_assert(std::is_final_v<KEvent> && std::is_base_of_v<KAutoObject, KEvent>); | ||||
| // static_assert(std::is_final_v<KLightClientSession> && | ||||
| //              std::is_base_of_v<KAutoObject, KLightClientSession>); | ||||
| // static_assert(std::is_final_v<KLightServerSession> && | ||||
| //              std::is_base_of_v<KAutoObject, KLightServerSession>); | ||||
| static_assert(std::is_final_v<KTransferMemory> && std::is_base_of_v<KAutoObject, KTransferMemory>); | ||||
| // static_assert(std::is_final_v<KDeviceAddressSpace> && | ||||
| //              std::is_base_of_v<KAutoObject, KDeviceAddressSpace>); | ||||
| // static_assert(std::is_final_v<KSessionRequest> && | ||||
| //              std::is_base_of_v<KAutoObject, KSessionRequest>); | ||||
| // static_assert(std::is_final_v<KCodeMemory> && | ||||
| //              std::is_base_of_v<KAutoObject, KCodeMemory>); | ||||
|  | ||||
| static_assert(std::is_base_of_v<KAutoObject, KSystemResource>); | ||||
|  | ||||
| } // namespace Kernel | ||||
							
								
								
									
										111
									
								
								src/core/hle/kernel/k_class_token.h
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										111
									
								
								src/core/hle/kernel/k_class_token.h
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,111 @@ | ||||
| // SPDX-FileCopyrightText: Copyright 2021 yuzu Emulator Project | ||||
| // SPDX-License-Identifier: GPL-2.0-or-later | ||||
|  | ||||
| #pragma once | ||||
|  | ||||
| #include "common/common_types.h" | ||||
|  | ||||
| namespace Kernel { | ||||
|  | ||||
| class KAutoObject; | ||||
| class KSynchronizationObject; | ||||
|  | ||||
| class KClassTokenGenerator { | ||||
| public: | ||||
|     using TokenBaseType = u8; | ||||
|  | ||||
| public: | ||||
|     static constexpr size_t BaseClassBits = 1; | ||||
|     static constexpr size_t FinalClassBits = (sizeof(TokenBaseType) * CHAR_BIT) - BaseClassBits - 1; | ||||
|     // One bit per base class. | ||||
|     static constexpr size_t NumBaseClasses = BaseClassBits; | ||||
|     // Final classes are permutations of three bits. | ||||
|     static constexpr size_t NumFinalClasses = [] { | ||||
|         TokenBaseType index = 0; | ||||
|         for (size_t i = 0; i < FinalClassBits; i++) { | ||||
|             for (size_t j = i + 1; j < FinalClassBits; j++) { | ||||
|                 for (size_t k = j + 1; k < FinalClassBits; k++) { | ||||
|                     index++; | ||||
|                 } | ||||
|             } | ||||
|         } | ||||
|         return index; | ||||
|     }(); | ||||
|  | ||||
| private: | ||||
|     template <TokenBaseType Index> | ||||
|     static constexpr inline TokenBaseType BaseClassToken = 1U << Index; | ||||
|  | ||||
|     template <TokenBaseType Index> | ||||
|     static constexpr inline TokenBaseType FinalClassToken = [] { | ||||
|         TokenBaseType index = 0; | ||||
|         for (size_t i = 0; i < FinalClassBits; i++) { | ||||
|             for (size_t j = i + 1; j < FinalClassBits; j++) { | ||||
|                 for (size_t k = j + 1; k < FinalClassBits; k++) { | ||||
|                     if ((index++) == Index) { | ||||
|                         return static_cast<TokenBaseType>(((1ULL << i) | (1ULL << j) | (1ULL << k)) | ||||
|                                                           << BaseClassBits); | ||||
|                     } | ||||
|                 } | ||||
|             } | ||||
|         } | ||||
|         UNREACHABLE(); | ||||
|     }(); | ||||
|  | ||||
|     template <typename T> | ||||
|     static constexpr inline TokenBaseType GetClassToken() { | ||||
|         static_assert(std::is_base_of<KAutoObject, T>::value); | ||||
|         if constexpr (std::is_same<T, KAutoObject>::value) { | ||||
|             static_assert(T::ObjectType == ObjectType::KAutoObject); | ||||
|             return 0; | ||||
|         } else if constexpr (std::is_same<T, KSynchronizationObject>::value) { | ||||
|             static_assert(T::ObjectType == ObjectType::KSynchronizationObject); | ||||
|             return 1; | ||||
|         } else if constexpr (ObjectType::FinalClassesStart <= T::ObjectType && | ||||
|                              T::ObjectType < ObjectType::FinalClassesEnd) { | ||||
|             constexpr auto ClassIndex = static_cast<TokenBaseType>(T::ObjectType) - | ||||
|                                         static_cast<TokenBaseType>(ObjectType::FinalClassesStart); | ||||
|             return FinalClassToken<ClassIndex> | GetClassToken<typename T::BaseClass>(); | ||||
|         } else { | ||||
|             static_assert(!std::is_same<T, T>::value, "GetClassToken: Invalid Type"); | ||||
|         } | ||||
|     }; | ||||
|  | ||||
| public: | ||||
|     enum class ObjectType { | ||||
|         KAutoObject, | ||||
|         KSynchronizationObject, | ||||
|  | ||||
|         FinalClassesStart, | ||||
|         KSemaphore, | ||||
|         KEvent, | ||||
|         KTimer, | ||||
|         KMutex, | ||||
|         KDebug, | ||||
|         KServerPort, | ||||
|         KDmaObject, | ||||
|         KClientPort, | ||||
|         KCodeSet, | ||||
|         KSession, | ||||
|         KThread, | ||||
|         KServerSession, | ||||
|         KAddressArbiter, | ||||
|         KClientSession, | ||||
|         KPort, | ||||
|         KSharedMemory, | ||||
|         KProcess, | ||||
|         KResourceLimit, | ||||
|  | ||||
|         FinalClassesEnd = FinalClassesStart + NumFinalClasses, | ||||
|     }; | ||||
|  | ||||
|     template <typename T> | ||||
|     static constexpr inline TokenBaseType ClassToken = GetClassToken<T>(); | ||||
| }; | ||||
|  | ||||
| using ClassTokenType = KClassTokenGenerator::TokenBaseType; | ||||
|  | ||||
| template <typename T> | ||||
| static constexpr inline ClassTokenType ClassToken = KClassTokenGenerator::ClassToken<T>; | ||||
|  | ||||
| } // namespace Kernel | ||||
							
								
								
									
										0
									
								
								src/core/hle/kernel/k_linked_list.cpp
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										0
									
								
								src/core/hle/kernel/k_linked_list.cpp
									
									
									
									
									
										Normal file
									
								
							
							
								
								
									
										237
									
								
								src/core/hle/kernel/k_linked_list.h
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										237
									
								
								src/core/hle/kernel/k_linked_list.h
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,237 @@ | ||||
| // Copyright 2023 Citra Emulator Project | ||||
| // Licensed under GPLv2 or any later version | ||||
| // Refer to the license.txt file included. | ||||
|  | ||||
| #pragma once | ||||
|  | ||||
| #include "common/intrusive_list.h" | ||||
| #include "core/hle/kernel/slab_helpers.h" | ||||
|  | ||||
| namespace Kernel { | ||||
|  | ||||
| class KernelSystem; | ||||
|  | ||||
| class KLinkedListNode : public Common::IntrusiveListBaseNode<KLinkedListNode>, | ||||
|                         public KSlabAllocated<KLinkedListNode> { | ||||
|  | ||||
| public: | ||||
|     explicit KLinkedListNode(KernelSystem&) {} | ||||
|     KLinkedListNode() = default; | ||||
|  | ||||
|     void Initialize(void* it) { | ||||
|         m_item = it; | ||||
|     } | ||||
|  | ||||
|     void* GetItem() const { | ||||
|         return m_item; | ||||
|     } | ||||
|  | ||||
| private: | ||||
|     void* m_item = nullptr; | ||||
| }; | ||||
|  | ||||
| template <typename T> | ||||
| class KLinkedList : private Common::IntrusiveListBaseTraits<KLinkedListNode>::ListType { | ||||
| private: | ||||
|     using BaseList = Common::IntrusiveListBaseTraits<KLinkedListNode>::ListType; | ||||
|  | ||||
| public: | ||||
|     template <bool Const> | ||||
|     class Iterator; | ||||
|  | ||||
|     using value_type = T; | ||||
|     using size_type = size_t; | ||||
|     using difference_type = ptrdiff_t; | ||||
|     using pointer = value_type*; | ||||
|     using const_pointer = const value_type*; | ||||
|     using reference = value_type&; | ||||
|     using const_reference = const value_type&; | ||||
|     using iterator = Iterator<false>; | ||||
|     using const_iterator = Iterator<true>; | ||||
|     using reverse_iterator = std::reverse_iterator<iterator>; | ||||
|     using const_reverse_iterator = std::reverse_iterator<const_iterator>; | ||||
|  | ||||
|     template <bool Const> | ||||
|     class Iterator { | ||||
|     private: | ||||
|         using BaseIterator = BaseList::iterator; | ||||
|         friend class KLinkedList; | ||||
|  | ||||
|     public: | ||||
|         using iterator_category = std::bidirectional_iterator_tag; | ||||
|         using value_type = typename KLinkedList::value_type; | ||||
|         using difference_type = typename KLinkedList::difference_type; | ||||
|         using pointer = std::conditional_t<Const, KLinkedList::const_pointer, KLinkedList::pointer>; | ||||
|         using reference = | ||||
|             std::conditional_t<Const, KLinkedList::const_reference, KLinkedList::reference>; | ||||
|  | ||||
|     public: | ||||
|         explicit Iterator(BaseIterator it) : m_base_it(it) {} | ||||
|  | ||||
|         pointer GetItem() const { | ||||
|             return static_cast<pointer>(m_base_it->GetItem()); | ||||
|         } | ||||
|  | ||||
|         bool operator==(const Iterator& rhs) const { | ||||
|             return m_base_it == rhs.m_base_it; | ||||
|         } | ||||
|  | ||||
|         bool operator!=(const Iterator& rhs) const { | ||||
|             return !(*this == rhs); | ||||
|         } | ||||
|  | ||||
|         pointer operator->() const { | ||||
|             return this->GetItem(); | ||||
|         } | ||||
|  | ||||
|         reference operator*() const { | ||||
|             return *this->GetItem(); | ||||
|         } | ||||
|  | ||||
|         Iterator& operator++() { | ||||
|             ++m_base_it; | ||||
|             return *this; | ||||
|         } | ||||
|  | ||||
|         Iterator& operator--() { | ||||
|             --m_base_it; | ||||
|             return *this; | ||||
|         } | ||||
|  | ||||
|         Iterator operator++(int) { | ||||
|             const Iterator it{*this}; | ||||
|             ++(*this); | ||||
|             return it; | ||||
|         } | ||||
|  | ||||
|         Iterator operator--(int) { | ||||
|             const Iterator it{*this}; | ||||
|             --(*this); | ||||
|             return it; | ||||
|         } | ||||
|  | ||||
|         operator Iterator<true>() const { | ||||
|             return Iterator<true>(m_base_it); | ||||
|         } | ||||
|  | ||||
|     private: | ||||
|         BaseIterator m_base_it; | ||||
|     }; | ||||
|  | ||||
| public: | ||||
|     constexpr KLinkedList(KernelSystem& kernel_) : BaseList(), kernel{kernel_} {} | ||||
|  | ||||
|     ~KLinkedList() { | ||||
|         // Erase all elements. | ||||
|         for (auto it = begin(); it != end(); it = erase(it)) { | ||||
|         } | ||||
|  | ||||
|         // Ensure we succeeded. | ||||
|         ASSERT(this->empty()); | ||||
|     } | ||||
|  | ||||
|     // Iterator accessors. | ||||
|     iterator begin() { | ||||
|         return iterator(BaseList::begin()); | ||||
|     } | ||||
|  | ||||
|     const_iterator begin() const { | ||||
|         return const_iterator(BaseList::begin()); | ||||
|     } | ||||
|  | ||||
|     iterator end() { | ||||
|         return iterator(BaseList::end()); | ||||
|     } | ||||
|  | ||||
|     const_iterator end() const { | ||||
|         return const_iterator(BaseList::end()); | ||||
|     } | ||||
|  | ||||
|     const_iterator cbegin() const { | ||||
|         return this->begin(); | ||||
|     } | ||||
|  | ||||
|     const_iterator cend() const { | ||||
|         return this->end(); | ||||
|     } | ||||
|  | ||||
|     reverse_iterator rbegin() { | ||||
|         return reverse_iterator(this->end()); | ||||
|     } | ||||
|  | ||||
|     const_reverse_iterator rbegin() const { | ||||
|         return const_reverse_iterator(this->end()); | ||||
|     } | ||||
|  | ||||
|     reverse_iterator rend() { | ||||
|         return reverse_iterator(this->begin()); | ||||
|     } | ||||
|  | ||||
|     const_reverse_iterator rend() const { | ||||
|         return const_reverse_iterator(this->begin()); | ||||
|     } | ||||
|  | ||||
|     const_reverse_iterator crbegin() const { | ||||
|         return this->rbegin(); | ||||
|     } | ||||
|  | ||||
|     const_reverse_iterator crend() const { | ||||
|         return this->rend(); | ||||
|     } | ||||
|  | ||||
|     // Content management. | ||||
|     using BaseList::empty; | ||||
|     using BaseList::size; | ||||
|  | ||||
|     reference back() { | ||||
|         return *(--this->end()); | ||||
|     } | ||||
|  | ||||
|     const_reference back() const { | ||||
|         return *(--this->end()); | ||||
|     } | ||||
|  | ||||
|     reference front() { | ||||
|         return *this->begin(); | ||||
|     } | ||||
|  | ||||
|     const_reference front() const { | ||||
|         return *this->begin(); | ||||
|     } | ||||
|  | ||||
|     iterator insert(const_iterator pos, reference ref) { | ||||
|         KLinkedListNode* new_node = KLinkedListNode::Allocate(kernel); | ||||
|         ASSERT(new_node != nullptr); | ||||
|         new_node->Initialize(std::addressof(ref)); | ||||
|         return iterator(BaseList::insert(pos.m_base_it, *new_node)); | ||||
|     } | ||||
|  | ||||
|     void push_back(reference ref) { | ||||
|         this->insert(this->end(), ref); | ||||
|     } | ||||
|  | ||||
|     void push_front(reference ref) { | ||||
|         this->insert(this->begin(), ref); | ||||
|     } | ||||
|  | ||||
|     void pop_back() { | ||||
|         this->erase(--this->end()); | ||||
|     } | ||||
|  | ||||
|     void pop_front() { | ||||
|         this->erase(this->begin()); | ||||
|     } | ||||
|  | ||||
|     iterator erase(const iterator pos) { | ||||
|         KLinkedListNode* freed_node = std::addressof(*pos.m_base_it); | ||||
|         iterator ret = iterator(BaseList::erase(pos.m_base_it)); | ||||
|         KLinkedListNode::Free(kernel, freed_node); | ||||
|  | ||||
|         return ret; | ||||
|     } | ||||
|  | ||||
| private: | ||||
|     KernelSystem& kernel; | ||||
| }; | ||||
|  | ||||
| } // namespace Kernel | ||||
							
								
								
									
										41
									
								
								src/core/hle/kernel/k_memory_block.cpp
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										41
									
								
								src/core/hle/kernel/k_memory_block.cpp
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,41 @@ | ||||
| // Copyright 2023 Citra Emulator Project | ||||
| // Licensed under GPLv2 or any later version | ||||
| // Refer to the license.txt file included. | ||||
|  | ||||
| #include "core/hle/kernel/k_memory_block.h" | ||||
|  | ||||
| namespace Kernel { | ||||
|  | ||||
| void KMemoryBlock::ShrinkBlock(VAddr addr, u32 num_pages) { | ||||
|     const VAddr end_addr = addr + (num_pages << Memory::CITRA_PAGE_BITS) - 1; | ||||
|     const VAddr last_addr = this->GetLastAddress(); | ||||
|     if (m_base_addr < end_addr && end_addr < last_addr) { | ||||
|         m_base_addr = end_addr + 1; | ||||
|         m_num_pages = (last_addr - end_addr) >> Memory::CITRA_PAGE_BITS; | ||||
|         return; | ||||
|     } | ||||
|     if (m_base_addr < addr && addr < last_addr) { | ||||
|         m_num_pages = (addr - m_base_addr) >> Memory::CITRA_PAGE_BITS; | ||||
|         return; | ||||
|     } | ||||
| } | ||||
|  | ||||
| void KMemoryBlock::GrowBlock(VAddr addr, u32 num_pages) { | ||||
|     const u32 end_addr = addr + (num_pages << Memory::CITRA_PAGE_BITS) - 1; | ||||
|     const u32 last_addr = this->GetLastAddress(); | ||||
|     if (addr < m_base_addr) { | ||||
|         m_base_addr = addr; | ||||
|         m_num_pages = (last_addr - addr + 1) >> Memory::CITRA_PAGE_BITS; | ||||
|     } | ||||
|     if (last_addr < end_addr) { | ||||
|         m_num_pages = (end_addr - m_base_addr + 1) >> Memory::CITRA_PAGE_BITS; | ||||
|     } | ||||
| } | ||||
|  | ||||
| bool KMemoryBlock::IncludesRange(VAddr addr, u32 num_pages) { | ||||
|     const u32 end_addr = addr + (num_pages << Memory::CITRA_PAGE_BITS) - 1; | ||||
|     const u32 last_addr = this->GetLastAddress(); | ||||
|     return m_base_addr >= addr && last_addr <= end_addr; | ||||
| } | ||||
|  | ||||
| } // namespace Kernel | ||||
							
								
								
									
										187
									
								
								src/core/hle/kernel/k_memory_block.h
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										187
									
								
								src/core/hle/kernel/k_memory_block.h
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,187 @@ | ||||
| // Copyright 2023 Citra Emulator Project | ||||
| // Licensed under GPLv2 or any later version | ||||
| // Refer to the license.txt file included. | ||||
|  | ||||
| #pragma once | ||||
|  | ||||
| #include "common/common_funcs.h" | ||||
| #include "core/hle/kernel/slab_helpers.h" | ||||
| #include "core/memory.h" | ||||
|  | ||||
| namespace Kernel { | ||||
|  | ||||
| enum class KMemoryPermission : u32 { | ||||
|     None = 0x0, | ||||
|     UserRead = 0x1, | ||||
|     UserWrite = 0x2, | ||||
|     UserReadWrite = UserRead | UserWrite, | ||||
|     UserExecute = 0x4, | ||||
|     UserReadExecute = UserRead | UserExecute, | ||||
|     KernelRead = 0x8, | ||||
|     KernelWrite = 0x10, | ||||
|     KernelExecute = 0x20, | ||||
|     KernelReadWrite = KernelRead | KernelWrite, | ||||
|     DontCare = 0x10000000, | ||||
| }; | ||||
| DECLARE_ENUM_FLAG_OPERATORS(KMemoryPermission) | ||||
|  | ||||
| enum class KMemoryState : u32 { | ||||
|     Free = 0x0, | ||||
|     Reserved = 0x1, | ||||
|     Io = 0x2, | ||||
|     Static = 0x3, | ||||
|     Code = 0x4, | ||||
|     Private = 0x5, | ||||
|     Shared = 0x6, | ||||
|     Continuous = 0x7, | ||||
|     Aliased = 0x8, | ||||
|     Alias = 0x9, | ||||
|     Aliascode = 0xA, | ||||
|     Locked = 0xB, | ||||
|     KernelMask = 0xFF, | ||||
|  | ||||
|     FlagDeallocatable = 0x100, | ||||
|     FlagProtectible = 0x200, | ||||
|     FlagDebuggable = 0x400, | ||||
|     FlagIpcAllowed = 0x800, | ||||
|     FlagMapped = 0x1000, | ||||
|     FlagPrivate = 0x2000, | ||||
|     FlagShared = 0x4000, | ||||
|     FlagsPrivateOrShared = 0x6000, | ||||
|     FlagCodeAllowed = 0x8000, | ||||
|     FlagsIpc = 0x1800, | ||||
|     FlagsPrivateData = 0x3800, | ||||
|     FlagsPrivateCodeAllowed = 0xB800, | ||||
|     FlagsPrivateCode = 0xBC00, | ||||
|     FlagsCode = 0x9C00, | ||||
|  | ||||
|     KernelIo = 0x1002, | ||||
|     KernelStatic = 0x1003, | ||||
|     KernelShared = 0x5806, | ||||
|     KernelLinear = 0x3907, | ||||
|     KernelAliased = 0x3A08, | ||||
|     KernelAlias = 0x1A09, | ||||
|     KernelAliasCode = 0x9C0A, | ||||
|     PrivateAliasCode = 0xBC0A, | ||||
|     PrivateCode = 0xBC04, | ||||
|     PrivateData = 0xBB05, | ||||
|     KernelLocked = 0x380B, | ||||
|     FlagsAny = 0xFFFFFFFF, | ||||
| }; | ||||
| DECLARE_ENUM_FLAG_OPERATORS(KMemoryState) | ||||
|  | ||||
| struct KMemoryInfo { | ||||
|     VAddr m_base_address; | ||||
|     u32 m_size; | ||||
|     KMemoryPermission m_perms; | ||||
|     KMemoryState m_state; | ||||
|  | ||||
|     constexpr VAddr GetAddress() const { | ||||
|         return m_base_address; | ||||
|     } | ||||
|  | ||||
|     constexpr u32 GetSize() const { | ||||
|         return m_size; | ||||
|     } | ||||
|  | ||||
|     constexpr u32 GetNumPages() const { | ||||
|         return this->GetSize() >> Memory::CITRA_PAGE_BITS; | ||||
|     } | ||||
|  | ||||
|     constexpr VAddr GetEndAddress() const { | ||||
|         return this->GetAddress() + this->GetSize(); | ||||
|     } | ||||
|  | ||||
|     constexpr VAddr GetLastAddress() const { | ||||
|         return this->GetEndAddress() - 1; | ||||
|     } | ||||
|  | ||||
|     constexpr KMemoryPermission GetPerms() const { | ||||
|         return m_perms; | ||||
|     } | ||||
|  | ||||
|     constexpr KMemoryState GetState() const { | ||||
|         return m_state; | ||||
|     } | ||||
| }; | ||||
|  | ||||
| struct KMemoryBlock : public KSlabAllocated<KMemoryBlock> { | ||||
| public: | ||||
|     explicit KMemoryBlock() = default; | ||||
|  | ||||
|     constexpr void Initialize(VAddr base_addr, u32 num_pages, u32 tag, KMemoryState state, | ||||
|                               KMemoryPermission perms) { | ||||
|         m_base_addr = base_addr; | ||||
|         m_num_pages = num_pages; | ||||
|         m_permission = perms; | ||||
|         m_memory_state = state; | ||||
|         m_tag = tag; | ||||
|     } | ||||
|  | ||||
|     constexpr bool Contains(VAddr addr) const { | ||||
|         return this->GetAddress() <= addr && addr <= this->GetLastAddress(); | ||||
|     } | ||||
|  | ||||
|     constexpr KMemoryInfo GetInfo() const { | ||||
|         return { | ||||
|             .m_base_address = m_base_addr, | ||||
|             .m_size = this->GetSize(), | ||||
|             .m_perms = m_permission, | ||||
|             .m_state = m_memory_state, | ||||
|         }; | ||||
|     } | ||||
|  | ||||
|     constexpr bool HasProperties(KMemoryState s, KMemoryPermission p, u32 t) const { | ||||
|         return m_memory_state == s && m_permission == p && m_tag == t; | ||||
|     } | ||||
|  | ||||
|     constexpr bool HasSameProperties(const KMemoryBlock& rhs) const { | ||||
|         return m_memory_state == rhs.m_memory_state && m_permission == rhs.m_permission && | ||||
|                m_tag == rhs.m_tag; | ||||
|     } | ||||
|  | ||||
|     constexpr u32 GetSize() const { | ||||
|         return m_num_pages << Memory::CITRA_PAGE_BITS; | ||||
|     } | ||||
|  | ||||
|     constexpr u32 GetEndAddress() const { | ||||
|         return this->GetAddress() + this->GetSize(); | ||||
|     } | ||||
|  | ||||
|     constexpr u32 GetLastAddress() const { | ||||
|         return this->GetEndAddress() - 1; | ||||
|     } | ||||
|  | ||||
|     constexpr u32 GetAddress() const { | ||||
|         return m_base_addr; | ||||
|     } | ||||
|  | ||||
|     constexpr u32 GetNumPages() const { | ||||
|         return m_num_pages; | ||||
|     } | ||||
|  | ||||
|     constexpr KMemoryPermission GetPermission() const { | ||||
|         return m_permission; | ||||
|     } | ||||
|  | ||||
|     constexpr KMemoryState GetState() const { | ||||
|         return m_memory_state; | ||||
|     } | ||||
|  | ||||
|     constexpr u32 GetTag() const { | ||||
|         return m_tag; | ||||
|     } | ||||
|  | ||||
|     void ShrinkBlock(VAddr addr, u32 num_pages); | ||||
|     void GrowBlock(VAddr addr, u32 num_pages); | ||||
|     bool IncludesRange(VAddr addr, u32 num_pages); | ||||
|  | ||||
| private: | ||||
|     u32 m_base_addr{}; | ||||
|     u32 m_num_pages{}; | ||||
|     KMemoryPermission m_permission{}; | ||||
|     KMemoryState m_memory_state{}; | ||||
|     u32 m_tag{}; | ||||
| }; | ||||
|  | ||||
| } // namespace Kernel | ||||
							
								
								
									
										199
									
								
								src/core/hle/kernel/k_memory_block_manager.cpp
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										199
									
								
								src/core/hle/kernel/k_memory_block_manager.cpp
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,199 @@ | ||||
| // Copyright 2023 Citra Emulator Project | ||||
| // Licensed under GPLv2 or any later version | ||||
| // Refer to the license.txt file included. | ||||
|  | ||||
| #include "common/scope_exit.h" | ||||
| #include "core/hle/kernel/k_memory_block_manager.h" | ||||
|  | ||||
| namespace Kernel { | ||||
|  | ||||
| void KMemoryBlockManager::Initialize(u32 addr_space_start, u32 addr_space_end) { | ||||
|     const u32 num_pages = (addr_space_end - addr_space_start) >> Memory::CITRA_PAGE_BITS; | ||||
|     KMemoryBlock* block = KMemoryBlock::Allocate(m_kernel); | ||||
|     block->Initialize(addr_space_start, num_pages, 0, KMemoryState::Free, KMemoryPermission::None); | ||||
|     m_blocks.push_back(*block); | ||||
| } | ||||
|  | ||||
| s64 KMemoryBlockManager::GetTotalCommittedMemory() { | ||||
|     u32 total_commited_memory{}; | ||||
|     for (const auto& block : m_blocks) { | ||||
|         const KMemoryInfo info = block.GetInfo(); | ||||
|         if (info.GetAddress() - 0x1C000000 >= 0x4000000 && | ||||
|             True(info.GetState() & KMemoryState::Private)) { | ||||
|             total_commited_memory += info.GetSize(); | ||||
|         } | ||||
|     } | ||||
|     return total_commited_memory; | ||||
| } | ||||
|  | ||||
| KMemoryBlock* KMemoryBlockManager::FindFreeBlockInRegion(VAddr start, u32 num_pages, | ||||
|                                                          u32 block_num_pages) { | ||||
|     const VAddr end = start + (num_pages << Memory::CITRA_PAGE_BITS); | ||||
|     const u32 block_size = block_num_pages << Memory::CITRA_PAGE_BITS; | ||||
|     for (auto& block : m_blocks) { | ||||
|         const KMemoryInfo info = block.GetInfo(); | ||||
|         if (info.GetState() != KMemoryState::Free) { | ||||
|             continue; | ||||
|         } | ||||
|         const VAddr block_start = std::max(info.GetAddress(), start); | ||||
|         const VAddr block_end = block_start + block_size; | ||||
|         if (block_end <= end && block_end <= info.GetEndAddress()) { | ||||
|             return std::addressof(block); | ||||
|         } | ||||
|     } | ||||
|     return nullptr; | ||||
| } | ||||
|  | ||||
| void KMemoryBlockManager::CoalesceBlocks() { | ||||
|     auto it = m_blocks.begin(); | ||||
|     while (true) { | ||||
|         iterator prev = it++; | ||||
|         if (it == m_blocks.end()) { | ||||
|             break; | ||||
|         } | ||||
|  | ||||
|         // Merge adjacent blocks with the same properties. | ||||
|         if (prev->HasSameProperties(*it)) { | ||||
|             KMemoryBlock* block = std::addressof(*it); | ||||
|             const KMemoryInfo info = block->GetInfo(); | ||||
|             prev->GrowBlock(info.GetAddress(), info.GetNumPages()); | ||||
|             KMemoryBlock::Free(m_kernel, block); | ||||
|             m_blocks.erase(it); | ||||
|             it = prev; | ||||
|         } | ||||
|     } | ||||
| } | ||||
|  | ||||
| ResultCode KMemoryBlockManager::MutateRange(VAddr addr, u32 num_pages, KMemoryState state, | ||||
|                                             KMemoryPermission perms, u32 tag) { | ||||
|     // Initialize iterators. | ||||
|     const VAddr last_addr = addr + (num_pages << Memory::CITRA_PAGE_BITS) - 1; | ||||
|     iterator begin = FindIterator(addr); | ||||
|     iterator end = FindIterator(last_addr); | ||||
|  | ||||
|     // Before returning we have to coalesce. | ||||
|     SCOPE_EXIT({ this->CoalesceBlocks(); }); | ||||
|  | ||||
|     // Begin and end addresses are in different blocks. We need to shrink/remove | ||||
|     // any blocks in that range and insert a new one with the new attributes. | ||||
|     if (begin != end) { | ||||
|         // Any blocks in-between begin and end can be completely erased. | ||||
|         for (auto it = std::next(begin); it != end;) { | ||||
|             KMemoryBlock::Free(m_kernel, std::addressof(*it)); | ||||
|             it = m_blocks.erase(it); | ||||
|         } | ||||
|  | ||||
|         // If begin block has same properties, grow it to accomodate the range. | ||||
|         if (begin->HasProperties(state, perms, tag)) { | ||||
|             begin->GrowBlock(addr, num_pages); | ||||
|             // If the end block is fully overwritten, remove it. | ||||
|             if (end->GetLastAddress() == last_addr) { | ||||
|                 KMemoryBlock::Free(m_kernel, std::addressof(*end)); | ||||
|                 m_blocks.erase(end); | ||||
|                 R_SUCCEED(); | ||||
|             } | ||||
|         } else if (end->HasProperties(state, perms, tag)) { | ||||
|             // If end block has same properties, grow it to accomodate the range. | ||||
|             end->GrowBlock(addr, num_pages); | ||||
|  | ||||
|             // Remove start block if fully overwritten | ||||
|             if (begin->GetAddress() == addr) { | ||||
|                 KMemoryBlock::Free(m_kernel, std::addressof(*begin)); | ||||
|                 m_blocks.erase(begin); | ||||
|                 R_SUCCEED(); | ||||
|             } | ||||
|         } else { | ||||
|             // Neither begin and end blocks have required properties. | ||||
|             // Shrink them both and create a new block in-between. | ||||
|             if (begin->IncludesRange(addr, num_pages)) { | ||||
|                 KMemoryBlock::Free(m_kernel, std::addressof(*begin)); | ||||
|                 begin = m_blocks.erase(begin); | ||||
|             } else { | ||||
|                 // Otherwise cut off the part that inside our range | ||||
|                 begin->ShrinkBlock(addr, num_pages); | ||||
|             } | ||||
|  | ||||
|             // If the end block is fully inside the range, remove it | ||||
|             if (end->IncludesRange(addr, num_pages)) { | ||||
|                 KMemoryBlock::Free(m_kernel, std::addressof(*end)); | ||||
|                 end = m_blocks.erase(end); | ||||
|             } else { | ||||
|                 // Otherwise cut off the part that inside our range | ||||
|                 end->ShrinkBlock(addr, num_pages); | ||||
|             } | ||||
|  | ||||
|             // The range [va, endVa] is now void, create new block in its place. | ||||
|             KMemoryBlock* block = KMemoryBlock::Allocate(m_kernel); | ||||
|             block->Initialize(addr, num_pages, 0, state, perms); | ||||
|  | ||||
|             // Insert it to the block list | ||||
|             m_blocks.insert(end, *block); | ||||
|             R_SUCCEED(); | ||||
|         } | ||||
|  | ||||
|         // Shrink the block containing the start va | ||||
|         begin->ShrinkBlock(addr, num_pages); | ||||
|         R_SUCCEED(); | ||||
|     } | ||||
|  | ||||
|     // Start and end address are in same block, we have to split that. | ||||
|     if (!begin->HasProperties(state, perms, tag)) { | ||||
|         const KMemoryInfo info = begin->GetInfo(); | ||||
|         const u32 pages_in_block = (addr - info.GetAddress()) >> Memory::CITRA_PAGE_BITS; | ||||
|  | ||||
|         // Block has same starting address, we can just adjust the size. | ||||
|         if (info.GetAddress() == addr) { | ||||
|             // Block size matches, simply change attributes. | ||||
|             if (info.GetSize() == num_pages << Memory::CITRA_PAGE_BITS) { | ||||
|                 begin->Initialize(addr, num_pages, tag, state, perms); | ||||
|                 R_SUCCEED(); | ||||
|             } | ||||
|             // Block size is bigger, split, insert new block after and update | ||||
|             begin->ShrinkBlock(addr, num_pages); | ||||
|             KMemoryBlock* block = KMemoryBlock::Allocate(m_kernel); | ||||
|             block->Initialize(addr, num_pages, tag, state, perms); | ||||
|  | ||||
|             // Insert it to the block list. | ||||
|             m_blocks.insert(begin, *block); | ||||
|             R_SUCCEED(); | ||||
|         } | ||||
|  | ||||
|         // Same end address, but different base addr. | ||||
|         if (info.GetLastAddress() == last_addr) { | ||||
|             begin->ShrinkBlock(addr, num_pages); | ||||
|             KMemoryBlock* block = KMemoryBlock::Allocate(m_kernel); | ||||
|             block->Initialize(addr, num_pages, tag, state, perms); | ||||
|  | ||||
|             // Insert it to the block list | ||||
|             m_blocks.insert(++begin, *block); | ||||
|             R_SUCCEED(); | ||||
|         } | ||||
|  | ||||
|         // Block fully contains start and end addresses. Shrink it to [last_addr, block_end] range. | ||||
|         begin->ShrinkBlock(0, num_pages + (addr >> Memory::CITRA_PAGE_BITS)); | ||||
|  | ||||
|         // Create a new block for [addr, last_addr] with the provided attributes. | ||||
|         KMemoryBlock* middle_block = KMemoryBlock::Allocate(m_kernel); | ||||
|         middle_block->Initialize(addr, num_pages, tag, state, perms); | ||||
|         begin = m_blocks.insert(begin, *middle_block); | ||||
|  | ||||
|         // Create another block for the third range [block_addr, addr]. | ||||
|         KMemoryBlock* start_block = KMemoryBlock::Allocate(m_kernel); | ||||
|         start_block->Initialize(info.GetAddress(), pages_in_block, 0, info.GetState(), | ||||
|                                 info.GetPerms()); | ||||
|         m_blocks.insert(begin, *start_block); | ||||
|     } | ||||
|  | ||||
|     // We are done :) | ||||
|     R_SUCCEED(); | ||||
| } | ||||
|  | ||||
| void KMemoryBlockManager::Finalize() { | ||||
|     auto it = m_blocks.begin(); | ||||
|     while (it != m_blocks.end()) { | ||||
|         KMemoryBlock::Free(m_kernel, std::addressof(*it)); | ||||
|         it = m_blocks.erase(it); | ||||
|     } | ||||
| } | ||||
|  | ||||
| } // namespace Kernel | ||||
							
								
								
									
										41
									
								
								src/core/hle/kernel/k_memory_block_manager.h
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										41
									
								
								src/core/hle/kernel/k_memory_block_manager.h
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,41 @@ | ||||
| // Copyright 2023 Citra Emulator Project | ||||
| // Licensed under GPLv2 or any later version | ||||
| // Refer to the license.txt file included. | ||||
|  | ||||
| #pragma once | ||||
|  | ||||
| #include "core/hle/kernel/k_linked_list.h" | ||||
| #include "core/hle/kernel/k_memory_block.h" | ||||
|  | ||||
| namespace Kernel { | ||||
|  | ||||
| class KMemoryBlockManager final { | ||||
|     using BlockList = KLinkedList<KMemoryBlock>; | ||||
|     using iterator = BlockList::iterator; | ||||
|  | ||||
| public: | ||||
|     explicit KMemoryBlockManager(KernelSystem& kernel) : m_kernel{kernel}, m_blocks{kernel} {} | ||||
|     ~KMemoryBlockManager() = default; | ||||
|  | ||||
|     void Initialize(u32 addr_space_start, u32 addr_sce_end); | ||||
|     void Finalize(); | ||||
|  | ||||
|     void CoalesceBlocks(); | ||||
|     s64 GetTotalCommittedMemory(); | ||||
|     ResultCode MutateRange(VAddr addr, u32 num_pages, KMemoryState state, KMemoryPermission perms, | ||||
|                            u32 tag); | ||||
|  | ||||
|     KMemoryBlock* GetMemoryBlockContainingAddr(u32 addr); | ||||
|     KMemoryBlock* FindFreeBlockInRegion(VAddr start, u32 num_pages, u32 block_num_pages); | ||||
|  | ||||
|     iterator FindIterator(VAddr address) { | ||||
|         return std::find_if(m_blocks.begin(), m_blocks.end(), | ||||
|                             [address](auto& block) { return block.Contains(address); }); | ||||
|     } | ||||
|  | ||||
| private: | ||||
|     KernelSystem& m_kernel; | ||||
|     BlockList m_blocks; | ||||
| }; | ||||
|  | ||||
| } // namespace Kernel | ||||
							
								
								
									
										73
									
								
								src/core/hle/kernel/k_memory_manager.cpp
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										73
									
								
								src/core/hle/kernel/k_memory_manager.cpp
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,73 @@ | ||||
| // Copyright 2023 Citra Emulator Project | ||||
| // Licensed under GPLv2 or any later version | ||||
| // Refer to the license.txt file included. | ||||
|  | ||||
| #include "core/hle/kernel/k_memory_manager.h" | ||||
|  | ||||
| namespace Kernel { | ||||
|  | ||||
| void KMemoryManager::Initialize(FcramLayout* layout, u32 fcram_addr, u32 fcram_size) { | ||||
|     m_application_heap.Initialize(layout->application_addr, layout->application_size); | ||||
|     m_system_heap.Initialize(layout->system_addr, layout->system_size); | ||||
|     m_base_heap.Initialize(layout->base_addr, layout->base_size); | ||||
|     m_page_manager.Initialize(fcram_addr, fcram_size >> Memory::CITRA_PAGE_BITS); | ||||
| } | ||||
|  | ||||
| u32 KMemoryManager::ConvertSharedMemPaLinearWithAppMemType(PAddr addr) { | ||||
|     int v2; // r1 | ||||
|  | ||||
|     const u32 fcram_offset = addr - Memory::FCRAM_PADDR; | ||||
|     if ((unsigned __int8)g_kernelSharedConfigPagePtr->appMemType == 7) { | ||||
|         v2 = 0x30000000; | ||||
|     } else { | ||||
|         v2 = 0x14000000; | ||||
|     } | ||||
|     return fcram_offset + v2; | ||||
| } | ||||
|  | ||||
| VAddr KMemoryManager::AllocateContiguous(u32 num_pages, u32 page_alignment, MemoryOperation op) { | ||||
|     // KLightScopedMutex m{m_page_manager.GetMutex()}; | ||||
|  | ||||
|     if (True(op & MemoryOperation::Kernel)) { | ||||
|         m_page_manager.GetKernelMemoryUsage() += num_pages << Memory::CITRA_PAGE_BITS; | ||||
|     } | ||||
|  | ||||
|     switch (op & MemoryOperation::RegionMask) { | ||||
|     case MemoryOperation::RegionApplication: | ||||
|         return m_application_heap.AllocateContiguous(num_pages, page_alignment); | ||||
|     case MemoryOperation::RegionSystem: | ||||
|         return m_system_heap.AllocateContiguous(num_pages, page_alignment); | ||||
|     case MemoryOperation::RegionBase: | ||||
|         return m_base_heap.AllocateContiguous(num_pages, page_alignment); | ||||
|     default: | ||||
|         UNREACHABLE(); | ||||
|         return 0; | ||||
|     } | ||||
| } | ||||
|  | ||||
| VAddr KMemoryManager::AllocateContiguousBackwards(u32 num_pages, MemoryOperation op) { | ||||
|     // KLightScopedMutex m{m_page_manager.GetMutex()}; | ||||
|  | ||||
|     if (True(op & MemoryOperation::Kernel)) { | ||||
|         m_page_manager.GetKernelMemoryUsage() += num_pages << Memory::CITRA_PAGE_BITS; | ||||
|     } | ||||
|  | ||||
|     switch (op & MemoryOperation::RegionMask) { | ||||
|     case MemoryOperation::RegionApplication: | ||||
|         return m_application_heap.AllocateBackwards(num_pages); | ||||
|     case MemoryOperation::RegionSystem: | ||||
|         return m_system_heap.AllocateBackwards(num_pages); | ||||
|     case MemoryOperation::RegionBase: | ||||
|         return m_base_heap.AllocateBackwards(num_pages); | ||||
|     default: | ||||
|         UNREACHABLE(); | ||||
|         return 0; | ||||
|     } | ||||
| } | ||||
|  | ||||
| void KMemoryManager::FreeContiguousLocked(u32 addr, u32 num_pages) { | ||||
|     // KLightScopedMutex m{m_page_manager.GetMutex()}; | ||||
|     m_page_manager.FreeContiguous(addr, num_pages, MemoryOperation::None); | ||||
| } | ||||
|  | ||||
| } // namespace Kernel | ||||
							
								
								
									
										60
									
								
								src/core/hle/kernel/k_memory_manager.h
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										60
									
								
								src/core/hle/kernel/k_memory_manager.h
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,60 @@ | ||||
| // Copyright 2023 Citra Emulator Project | ||||
| // Licensed under GPLv2 or any later version | ||||
| // Refer to the license.txt file included. | ||||
|  | ||||
| #pragma once | ||||
|  | ||||
| #include "core/hle/kernel/k_page_heap.h" | ||||
| #include "core/hle/kernel/k_page_manager.h" | ||||
|  | ||||
| namespace Memory { | ||||
| class MemorySystem; | ||||
| } | ||||
|  | ||||
| namespace Kernel { | ||||
|  | ||||
| struct FcramLayout { | ||||
|     u32 application_addr; | ||||
|     u32 application_size; | ||||
|     u32 system_addr; | ||||
|     u32 system_size; | ||||
|     u32 base_addr; | ||||
|     u32 base_size; | ||||
| }; | ||||
|  | ||||
| class KMemoryManager { | ||||
| public: | ||||
|     explicit KMemoryManager(Memory::MemorySystem& memory) | ||||
|         : m_application_heap{memory}, m_system_heap{memory}, m_base_heap{memory}, m_page_manager{ | ||||
|                                                                                       memory, | ||||
|                                                                                       this} {} | ||||
|     ~KMemoryManager() = default; | ||||
|  | ||||
|     void Initialize(FcramLayout* layout, u32 fcram_addr, u32 fcram_size); | ||||
|  | ||||
|     u32 ConvertSharedMemPaLinearWithAppMemType(PAddr addr); | ||||
|  | ||||
|     KPageHeap& GetApplicationHeap() noexcept { | ||||
|         return m_application_heap; | ||||
|     } | ||||
|  | ||||
|     KPageHeap& GetSystemHeap() noexcept { | ||||
|         return m_system_heap; | ||||
|     } | ||||
|  | ||||
|     KPageHeap& GetBaseHeap() noexcept { | ||||
|         return m_base_heap; | ||||
|     } | ||||
|  | ||||
|     VAddr AllocateContiguous(u32 num_pages, u32 page_alignment, MemoryOperation op); | ||||
|     VAddr AllocateContiguousBackwards(u32 num_pages, MemoryOperation op); | ||||
|     void FreeContiguousLocked(u32 addr, u32 num_pages); | ||||
|  | ||||
| private: | ||||
|     KPageHeap m_application_heap; | ||||
|     KPageHeap m_system_heap; | ||||
|     KPageHeap m_base_heap; | ||||
|     KPageManager m_page_manager; | ||||
| }; | ||||
|  | ||||
| } // namespace Kernel | ||||
							
								
								
									
										82
									
								
								src/core/hle/kernel/k_page_group.cpp
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										82
									
								
								src/core/hle/kernel/k_page_group.cpp
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,82 @@ | ||||
| // Copyright 2023 Citra Emulator Project | ||||
| // Licensed under GPLv2 or any later version | ||||
| // Refer to the license.txt file included. | ||||
|  | ||||
| #include "core/hle/kernel/k_page_group.h" | ||||
| #include "core/hle/kernel/k_page_manager.h" | ||||
|  | ||||
| namespace Kernel { | ||||
|  | ||||
| KPageGroup::~KPageGroup() { | ||||
|     EraseAll(); | ||||
| } | ||||
|  | ||||
| void KPageGroup::AddRange(u32 addr, u32 num_pages) { | ||||
|     // If the provided range is empty there is nothing to do. | ||||
|     if (num_pages == 0 || addr + (num_pages << Memory::CITRA_PAGE_BITS) == 0) { | ||||
|         return; | ||||
|     } | ||||
|  | ||||
|     // KScopedSchedulerLock lk{m_kernel}; | ||||
|  | ||||
|     // Attempt to coaelse with last block if possible. | ||||
|     if (!m_blocks.empty()) { | ||||
|         KBlockInfo& last = m_blocks.back(); | ||||
|         if (addr != 0 && addr == last.GetEndAddress()) { | ||||
|             last.m_num_pages += num_pages; | ||||
|             return; | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     // Allocate and initialize the new block. | ||||
|     KBlockInfo* new_block = KBlockInfo::Allocate(m_kernel); | ||||
|     new_block->Initialize(addr, num_pages); | ||||
|  | ||||
|     // Push the block to the list. | ||||
|     m_blocks.push_back(*new_block); | ||||
| } | ||||
|  | ||||
| void KPageGroup::IncrefPages() { | ||||
|     // Iterate over block list and increment page reference counts. | ||||
|     for (const auto& block : m_blocks) { | ||||
|         m_page_manager->IncrefPages(block.GetAddress(), block.GetNumPages()); | ||||
|     } | ||||
| } | ||||
|  | ||||
| u32 KPageGroup::GetTotalNumPages() { | ||||
|     // Iterate over block list and count number of pages. | ||||
|     u32 total_num_pages{}; | ||||
|     for (const auto& block : m_blocks) { | ||||
|         total_num_pages = block.GetNumPages(); | ||||
|     } | ||||
|     return total_num_pages; | ||||
| } | ||||
|  | ||||
| void KPageGroup::EraseAll() { | ||||
|     // Free all blocks referenced in the linked list. | ||||
|     auto it = m_blocks.begin(); | ||||
|     while (it != m_blocks.end()) { | ||||
|         KBlockInfo::Free(m_kernel, std::addressof(*it)); | ||||
|         it = m_blocks.erase(it); | ||||
|     } | ||||
| } | ||||
|  | ||||
| bool KPageGroup::IsEquivalentTo(const KPageGroup& rhs) const { | ||||
|     auto lit = m_blocks.begin(); | ||||
|     auto rit = rhs.m_blocks.begin(); | ||||
|     auto lend = m_blocks.end(); | ||||
|     auto rend = rhs.m_blocks.end(); | ||||
|  | ||||
|     while (lit != lend && rit != rend) { | ||||
|         if (*lit != *rit) { | ||||
|             return false; | ||||
|         } | ||||
|  | ||||
|         ++lit; | ||||
|         ++rit; | ||||
|     } | ||||
|  | ||||
|     return lit == lend && rit == rend; | ||||
| } | ||||
|  | ||||
| } // namespace Kernel | ||||
							
								
								
									
										102
									
								
								src/core/hle/kernel/k_page_group.h
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										102
									
								
								src/core/hle/kernel/k_page_group.h
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,102 @@ | ||||
| // Copyright 2023 Citra Emulator Project | ||||
| // Licensed under GPLv2 or any later version | ||||
| // Refer to the license.txt file included. | ||||
|  | ||||
| #pragma once | ||||
|  | ||||
| #include "core/hle/kernel/k_linked_list.h" | ||||
| #include "core/hle/kernel/slab_helpers.h" | ||||
| #include "core/memory.h" | ||||
|  | ||||
| namespace Kernel { | ||||
|  | ||||
| struct KBlockInfo final : public KSlabAllocated<KBlockInfo> { | ||||
| public: | ||||
|     explicit KBlockInfo() = default; | ||||
|     ~KBlockInfo() = default; | ||||
|  | ||||
|     void Initialize(u32 address, u32 num_pages) { | ||||
|         m_base_address = address; | ||||
|         m_num_pages = num_pages; | ||||
|     } | ||||
|  | ||||
|     constexpr u32 GetAddress() const { | ||||
|         return m_base_address; | ||||
|     } | ||||
|  | ||||
|     constexpr u32 GetEndAddress() const { | ||||
|         return this->GetAddress() + this->GetSize(); | ||||
|     } | ||||
|  | ||||
|     constexpr u32 GetSize() const { | ||||
|         return m_num_pages << Memory::CITRA_PAGE_BITS; | ||||
|     } | ||||
|  | ||||
|     constexpr u32 GetNumPages() const { | ||||
|         return m_num_pages; | ||||
|     } | ||||
|  | ||||
|     constexpr bool IsEquivalentTo(const KBlockInfo& rhs) const { | ||||
|         return m_base_address == rhs.m_base_address && m_num_pages == rhs.m_num_pages; | ||||
|     } | ||||
|  | ||||
|     constexpr bool operator==(const KBlockInfo& rhs) const { | ||||
|         return this->IsEquivalentTo(rhs); | ||||
|     } | ||||
|  | ||||
|     constexpr bool operator!=(const KBlockInfo& rhs) const { | ||||
|         return !(*this == rhs); | ||||
|     } | ||||
|  | ||||
| public: | ||||
|     u32 m_base_address; | ||||
|     u32 m_num_pages; | ||||
| }; | ||||
|  | ||||
| class KPageManager; | ||||
| class KernelSystem; | ||||
|  | ||||
| class KPageGroup { | ||||
|     using BlockInfoList = KLinkedList<KBlockInfo>; | ||||
|     using iterator = BlockInfoList::const_iterator; | ||||
|  | ||||
| public: | ||||
|     explicit KPageGroup(KernelSystem& kernel, KPageManager* page_manager) | ||||
|         : m_kernel{kernel}, m_page_manager{page_manager}, m_blocks{kernel} {} | ||||
|     ~KPageGroup(); | ||||
|  | ||||
|     iterator begin() const { | ||||
|         return this->m_blocks.begin(); | ||||
|     } | ||||
|     iterator end() const { | ||||
|         return this->m_blocks.end(); | ||||
|     } | ||||
|     bool empty() const { | ||||
|         return this->m_blocks.empty(); | ||||
|     } | ||||
|  | ||||
|     void AddRange(u32 addr, u32 num_pages); | ||||
|     void IncrefPages(); | ||||
|  | ||||
|     void EraseAll(); | ||||
|     void FreeMemory(); | ||||
|  | ||||
|     u32 GetTotalNumPages(); | ||||
|  | ||||
|     bool IsEquivalentTo(const KPageGroup& rhs) const; | ||||
|  | ||||
|     bool operator==(const KPageGroup& rhs) const { | ||||
|         return this->IsEquivalentTo(rhs); | ||||
|     } | ||||
|  | ||||
|     bool operator!=(const KPageGroup& rhs) const { | ||||
|         return !(*this == rhs); | ||||
|     } | ||||
|  | ||||
| private: | ||||
|     KernelSystem& m_kernel; | ||||
|     KPageManager* m_page_manager{}; | ||||
|     BlockInfoList m_blocks; | ||||
| }; | ||||
|  | ||||
| } // namespace Kernel | ||||
							
								
								
									
										408
									
								
								src/core/hle/kernel/k_page_heap.cpp
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										408
									
								
								src/core/hle/kernel/k_page_heap.cpp
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,408 @@ | ||||
| // Copyright 2023 Citra Emulator Project | ||||
| // Licensed under GPLv2 or any later version | ||||
| // Refer to the license.txt file included. | ||||
|  | ||||
| #include "core/hle/kernel/k_page_heap.h" | ||||
|  | ||||
| namespace Kernel { | ||||
|  | ||||
| void KPageHeap::Initialize(VAddr region_start, u32 region_size) { | ||||
|     m_region_start = region_start; | ||||
|     m_region_size = region_size; | ||||
|  | ||||
|     // Retrieve the first block in the provided region. | ||||
|     Block* first_block = m_memory.GetPointer<Block>(m_region_start); | ||||
|     ASSERT(first_block); | ||||
|  | ||||
|     // Initialize the block. | ||||
|     first_block->num_pages = this->GetNumPages(); | ||||
|     first_block->current = first_block; | ||||
|  | ||||
|     // Insert the block to our block list. | ||||
|     m_blocks.push_front(*first_block); | ||||
| } | ||||
|  | ||||
| u32 KPageHeap::GetTotalNumPages() { | ||||
|     // Iterate over the blocks. | ||||
|     u32 total_num_pages{}; | ||||
|     for (const auto& block : m_blocks) { | ||||
|         total_num_pages = block.num_pages; | ||||
|     } | ||||
|     return total_num_pages; | ||||
| } | ||||
|  | ||||
| void KPageHeap::FreeBlock(u32 addr, u32 num_pages) { | ||||
|     // Return if there are no pages to free. | ||||
|     if (num_pages == 0) { | ||||
|         return; | ||||
|     } | ||||
|  | ||||
|     // Return if unable to insert block at the beginning. | ||||
|     auto start_block = std::addressof(m_blocks.front()); | ||||
|     if (this->TryInsert(addr, num_pages, nullptr, start_block)) { | ||||
|         return; | ||||
|     } | ||||
|  | ||||
|     // Iterate over the blocks. | ||||
|     for (auto it = m_blocks.begin(); it != m_blocks.end();) { | ||||
|         // Attempt to insert. | ||||
|         Block* block = std::addressof(*it++); | ||||
|         Block* next_block = std::addressof(*it++); | ||||
|         if (this->TryInsert(addr, num_pages, block, next_block)) { | ||||
|             break; | ||||
|         } | ||||
|     } | ||||
| } | ||||
|  | ||||
| void* KPageHeap::AllocateBackwards(u32 size) { | ||||
|     // Ensure allocation is possible. | ||||
|     if (size == 0) [[unlikely]] { | ||||
|         return nullptr; | ||||
|     } | ||||
|  | ||||
|     // Iterate over block list backwards. | ||||
|     u32 remaining = size; | ||||
|     for (auto it = m_blocks.rbegin(); it != m_blocks.rend(); it++) { | ||||
|         // If block does not cover remaining pages continue. | ||||
|         auto block = std::addressof(*it); | ||||
|         const u32 num_pages = block->num_pages; | ||||
|         if (remaining > num_pages) { | ||||
|             remaining -= num_pages; | ||||
|             continue; | ||||
|         } | ||||
|  | ||||
|         // Split last block at our boundary. | ||||
|         const u32 new_block_pages = num_pages - remaining; | ||||
|         auto new_block = this->SplitBlock(block, new_block_pages); | ||||
|         ASSERT(new_block && new_block->num_pages == new_block_pages); | ||||
|  | ||||
|         // new_block.prev = 0; | ||||
|         this->SetLastBlock(block); | ||||
|  | ||||
|         // Return final block which points to our allocated memory. | ||||
|         return new_block; | ||||
|     } | ||||
|  | ||||
|     return nullptr; | ||||
| } | ||||
|  | ||||
| void* KPageHeap::AllocateContiguous(u32 size, u32 page_alignment) { | ||||
|     KPageHeapBlock* next;  // r6 | ||||
|     KPageHeapBlock* v13;   // r1 | ||||
|     KPageHeapBlock* prev;  // [sp+0h] [bp-30h] | ||||
|     KPageHeapBlock* block; // [sp+4h] [bp-2Ch] | ||||
|  | ||||
|     // Ensure allocation is possible. | ||||
|     if (m_blocks.empty() || size == 0) [[unlikely]] { | ||||
|         return nullptr; | ||||
|     } | ||||
|  | ||||
|     for (auto it = m_blocks.begin(); it != m_blocks.end(); it++) { | ||||
|         // Ensure block is valid. | ||||
|         auto block = std::addressof(*it); | ||||
|         this->ValidateBlock(block); | ||||
|     } | ||||
|  | ||||
|     KPageHeapBlock* current_node = m_link.next; | ||||
|     while (current_node) { | ||||
|         u32 misalignment = 0; | ||||
|         KPageHeap::ValidateBlock(current_node); | ||||
|         const u32 num_pages = current_node->num_pages; | ||||
|         // if (current_node->num_pages > this->GetNumPages() || this->GetRegionEnd() < (unsigned | ||||
|         // int)current_node + 4096 * num_pages) { | ||||
|         //     UNREACHABLE(); | ||||
|         // } | ||||
|         if (page_alignment > 1) { | ||||
|             const u32 v11 = ((unsigned int)current_node >> 12) % page_alignment; | ||||
|             if (v11) { | ||||
|                 misalignment = page_alignment - v11; | ||||
|             } | ||||
|         } | ||||
|  | ||||
|         if (size + misalignment <= num_pages) { | ||||
|             block = current_node; | ||||
|             if (misalignment) { | ||||
|                 block = KPageHeap::SplitBlock(current_node, misalignment); | ||||
|             } | ||||
|  | ||||
|             KPageHeap::SplitBlock(block, size); | ||||
|             KPageHeap::ValidateBlock(block); | ||||
|             prev = block->link.prev; | ||||
|             next = block->link.next; | ||||
|             KPageHeap::ValidateBlock(prev); | ||||
|             KPageHeap::ValidateBlock(next); | ||||
|  | ||||
|             if (prev) { | ||||
|                 prev->link.next = next; | ||||
|                 v13 = prev; | ||||
|             } else { | ||||
|                 m_link.next = next; | ||||
|                 if (!next) { | ||||
|                     m_link.prev = 0; | ||||
|                     goto LABEL_28; | ||||
|                 } | ||||
|                 m_link.next->link.prev = 0; | ||||
|                 v13 = m_link.next; | ||||
|             } | ||||
|             KPageHeap::UpdateBlockMac(v13); | ||||
|             if (next) { | ||||
|                 next->link.prev = prev; | ||||
|                 KPageHeap::UpdateBlockMac(next); | ||||
|             LABEL_29: | ||||
|                 if (block->num_pages != size) { | ||||
|                     UNREACHABLE(); | ||||
|                 } | ||||
|                 return block; | ||||
|             } | ||||
|         LABEL_28: | ||||
|             KPageHeap::SetLastBlock(prev); | ||||
|             goto LABEL_29; | ||||
|         } | ||||
|         current_node = current_node->link.next; | ||||
|     } | ||||
|  | ||||
|     for (KPageHeapBlock* j = m_link.next; j; j = j->link.next) { | ||||
|         KPageHeap::ValidateBlock(j); | ||||
|     } | ||||
|  | ||||
|     return 0; | ||||
| } | ||||
|  | ||||
| void KPageHeap::SetLastBlock(KPageHeapBlock* block) { | ||||
|     m_link.prev = block; | ||||
|     if (!block) [[unlikely]] { | ||||
|         m_link.next = nullptr; | ||||
|         return; | ||||
|     } | ||||
|  | ||||
|     /*u32 v2 = m_key[0]; | ||||
|     u32 v3 = m_key[1]; | ||||
|     u32 v4 = m_key[2]; | ||||
|     u32 v5 = m_key[3]; | ||||
|     for (int i = 0; i < 2; i++) { | ||||
|         int v7 = 0; | ||||
|         do { | ||||
|             v2 -= *(u32 *)((char *)&block->num_pages + v7) - __ROR4__(v3, 3); | ||||
|             v7 += 4; | ||||
|             v3 -= __ROR4__(v4, (v5 & 0xF) + 3) ^ __ROR4__(v5, (v2 & 0xF) + 13); | ||||
|             v4 -= __ROR4__(v5, v2) * v3; | ||||
|             v5 -= __ROR4__(v2, v3) * v4; | ||||
|         } while ( v7 < 20 ); | ||||
|     } | ||||
|  | ||||
|     if ((v2 ^ v3) != block->mac) { | ||||
|         UNREACHABLE(); | ||||
|     }*/ | ||||
|  | ||||
|     m_link.prev->link.next = nullptr; | ||||
| } | ||||
|  | ||||
| KPageHeap::Block* KPageHeap::SplitBlock(Block* block, u32 new_block_size) { | ||||
|     const u32 num_pages = block->num_pages; | ||||
|     ASSERT(block->num_pages <= this->GetNumPages()); | ||||
|     // if (block->num_pages > this->GetNumPages() || this->GetRegionEnd() < (unsigned int)block + | ||||
|     // 4096 * num_pages) { | ||||
|     //     UNREACHABLE(); | ||||
|     // } | ||||
|  | ||||
|     if (!new_block_size || num_pages == new_block_size) [[unlikely]] { | ||||
|         return nullptr; | ||||
|     } | ||||
|  | ||||
|     Block* new_block = (Block*)((char*)block + Memory::CITRA_PAGE_SIZE * new_block_size); | ||||
|     Block* next = block->link.next; | ||||
|     const u32 v12 = num_pages - new_block_size; | ||||
|     new_block->nonce = 0; | ||||
|     new_block->num_pages = v12; | ||||
|     new_block->mac = 0; | ||||
|     new_block->link.next = next; | ||||
|     new_block->link.prev = block; | ||||
|     new_block->current = new_block; | ||||
|  | ||||
|     if (new_block->num_pages != v12) { | ||||
|         UNREACHABLE(); | ||||
|     } | ||||
|  | ||||
|     block->link.next = new_block; | ||||
|     block->num_pages = new_block_size; | ||||
|  | ||||
|     if (block->num_pages != new_block_size) { | ||||
|         UNREACHABLE(); | ||||
|     } | ||||
|  | ||||
|     KPageHeapBlock* v13 = new_block->link.next; | ||||
|     KPageHeap::ValidateBlock(v13); | ||||
|     if (v13) { | ||||
|         v13->link.prev = new_block; | ||||
|         KPageHeap::UpdateBlockMac(v13); | ||||
|     } else { | ||||
|         KPageHeap::SetLastBlock(new_block); | ||||
|     } | ||||
|  | ||||
|     return new_block; | ||||
| } | ||||
|  | ||||
| bool KPageHeap::TryInsert(u32 freedAddr, u32 freedNumPages, Block* prev, Block* next) { | ||||
|     KPageHeapBlock* v6;    // r5 | ||||
|     u32 numPages;          // r8 | ||||
|     u32 freedAddrEnd;      // r11 | ||||
|     u32 regionSize;        // r0 | ||||
|     u32 prevRightNeighour; // r10 | ||||
|     KPageHeapBlock* nxt;   // r9 | ||||
|     u32 regionStart;       // r0 | ||||
|     bool v14;              // cc | ||||
|     BOOL result;           // r0 | ||||
|     bool v16;              // zf | ||||
|     u32 v17;               // r8 | ||||
|     u32 v18;               // r1 | ||||
|     u32 v19;               // r9 | ||||
|     KPageHeapBlock* next;  // r5 | ||||
|     u32 v21;               // r3 | ||||
|     u32 v22;               // r0 | ||||
|     u32 v23;               // r1 | ||||
|     u32 v24;               // r2 | ||||
|     int i;                 // r11 | ||||
|     int v26;               // r12 | ||||
|     int v27;               // r10 | ||||
|     bool v28;              // zf | ||||
|     u32 v29;               // r4 | ||||
|     u32 v30;               // r1 | ||||
|     u32 v31;               // r8 | ||||
|     KPageHeapBlock* v32;   // r4 | ||||
|     u32 v33;               // [sp+4h] [bp-3Ch] | ||||
|  | ||||
|     v6 = (KPageHeapBlock*)freedAddr; | ||||
|     numPages = 0; | ||||
|     freedAddrEnd = freedAddr + (freedNumPages << 12); | ||||
|     v33 = 0; | ||||
|     regionSize = this->regionSize; | ||||
|     if (freedNumPages > regionSize >> 12 || regionSize + this->regionStart < freedAddrEnd) | ||||
|         kernelpanic(); | ||||
|     if (prev) { | ||||
|         KPageHeap::ValidateBlock(this, prev); | ||||
|         numPages = prev->numPages; | ||||
|     } | ||||
|     if (next) { | ||||
|         KPageHeap::ValidateBlock(this, next); | ||||
|         v33 = next->numPages; | ||||
|     } | ||||
|     if (prev) { | ||||
|         if ((KPageHeapBlock*)((char*)prev + 4096 * prev->numPages - 1) < prev) | ||||
|             kernelpanic(); | ||||
|         prevRightNeighour = (u32)prev + 4096 * prev->numPages; | ||||
|     } else { | ||||
|         prevRightNeighour = this->regionStart; | ||||
|     } | ||||
|     if (next) | ||||
|         nxt = next; | ||||
|     else | ||||
|         nxt = (KPageHeapBlock*)(this->regionStart + this->regionSize); | ||||
|     regionStart = this->regionStart; | ||||
|     if (regionStart > prevRightNeighour || regionStart + this->regionSize < (unsigned int)nxt) | ||||
|         kernelpanic(); | ||||
|     v14 = prevRightNeighour > (unsigned int)v6; | ||||
|     if (prevRightNeighour <= (unsigned int)v6) | ||||
|         v14 = freedAddrEnd > (unsigned int)nxt; | ||||
|     result = 0; | ||||
|     if (!v14) { | ||||
|         v6->nonce = 0; | ||||
|         v6->link.prev = prev; | ||||
|         v6->mac = 0; | ||||
|         v6->numPages = freedNumPages; | ||||
|         v6->link.next = next; | ||||
|         v6->current = v6; | ||||
|         KPageHeap::UpdateBlockMac(this, v6); | ||||
|         if (v6->numPages != freedNumPages) | ||||
|             kernelpanic(); | ||||
|         if (prev) { | ||||
|             prev->link.next = v6; | ||||
|             KPageHeap::UpdateBlockMac(this, prev); | ||||
|             if (prev->numPages != numPages) | ||||
|                 kernelpanic(); | ||||
|         } else { | ||||
|             this->link.next = v6; | ||||
|             if (v6) { | ||||
|                 v21 = this->key[2]; | ||||
|                 v22 = this->key[0]; | ||||
|                 v23 = this->key[1]; | ||||
|                 v24 = this->key[3]; | ||||
|                 for (i = 0; i < 2; ++i) { | ||||
|                     v26 = 0; | ||||
|                     do { | ||||
|                         v27 = *(u32*)((char*)&v6->numPages + v26); | ||||
|                         v26 += 4; | ||||
|                         v22 -= v27 - __ROR4__(v23, 3); | ||||
|                         v23 -= __ROR4__(v21, (v24 & 0xF) + 3) ^ __ROR4__(v24, (v22 & 0xF) + 13); | ||||
|                         v21 -= __ROR4__(v24, v22) * v23; | ||||
|                         v24 -= __ROR4__(v22, v23) * v21; | ||||
|                     } while (v26 < 20); | ||||
|                 } | ||||
|                 if ((v22 ^ v23) != v6->mac) | ||||
|                     kernelpanic(); | ||||
|                 this->link.next->link.prev = 0; | ||||
|                 KPageHeap::UpdateBlockMac(this, this->link.next); | ||||
|             } else { | ||||
|                 this->link.prev = 0; | ||||
|             } | ||||
|         } | ||||
|         if (next) { | ||||
|             next->link.prev = v6; | ||||
|             KPageHeap::UpdateBlockMac(this, next); | ||||
|             if (next->numPages != v33) | ||||
|                 kernelpanic(); | ||||
|         } else { | ||||
|             KPageHeap::SetLastBlock(this, v6); | ||||
|         } | ||||
|         v16 = prev == 0; | ||||
|         if (prev) | ||||
|             v16 = v6 == 0; | ||||
|         if (!v16 && (KPageHeapBlock*)((char*)prev + 4096 * prev->numPages) == v6) { | ||||
|             KPageHeap::ValidateBlock(this, prev); | ||||
|             v17 = prev->numPages; | ||||
|             KPageHeap::ValidateBlock(this, v6); | ||||
|             v18 = prev->numPages; | ||||
|             v19 = v6->numPages; | ||||
|             prev->link.next = v6->link.next; | ||||
|             prev->numPages = v18 + v19; | ||||
|             KPageHeap::UpdateBlockMac(this, prev); | ||||
|             if (prev->numPages != v17 + v19) | ||||
|                 kernelpanic(); | ||||
|             next = v6->link.next; | ||||
|             KPageHeap::ValidateBlock(this, next); | ||||
|             if (next) { | ||||
|                 next->link.prev = prev; | ||||
|                 KPageHeap::UpdateBlockMac(this, next); | ||||
|             } else { | ||||
|                 KPageHeap::SetLastBlock(this, prev); | ||||
|             } | ||||
|             v6 = prev; | ||||
|         } | ||||
|         v28 = v6 == 0; | ||||
|         if (v6) | ||||
|             v28 = next == 0; | ||||
|         if (!v28 && (KPageHeapBlock*)((char*)v6 + 4096 * v6->numPages) == next) { | ||||
|             KPageHeap::ValidateBlock(this, v6); | ||||
|             v29 = v6->numPages; | ||||
|             KPageHeap::ValidateBlock(this, next); | ||||
|             v30 = v6->numPages; | ||||
|             v31 = next->numPages; | ||||
|             v6->link.next = next->link.next; | ||||
|             v6->numPages = v30 + v31; | ||||
|             KPageHeap::UpdateBlockMac(this, v6); | ||||
|             if (v6->numPages != v29 + v31) | ||||
|                 kernelpanic(); | ||||
|             v32 = next->link.next; | ||||
|             KPageHeap::ValidateBlock(this, v32); | ||||
|             if (v32) { | ||||
|                 v32->link.prev = v6; | ||||
|                 KPageHeap::UpdateBlockMac(this, v32); | ||||
|             } else { | ||||
|                 KPageHeap::SetLastBlock(this, v6); | ||||
|             } | ||||
|         } | ||||
|         return 1; | ||||
|     } | ||||
|     return result; | ||||
| } | ||||
|  | ||||
| } // namespace Kernel | ||||
							
								
								
									
										64
									
								
								src/core/hle/kernel/k_page_heap.h
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										64
									
								
								src/core/hle/kernel/k_page_heap.h
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,64 @@ | ||||
| // Copyright 2023 Citra Emulator Project | ||||
| // Licensed under GPLv2 or any later version | ||||
| // Refer to the license.txt file included. | ||||
|  | ||||
| #pragma once | ||||
|  | ||||
| #include "common/intrusive_list.h" | ||||
| #include "core/memory.h" | ||||
|  | ||||
| namespace Kernel { | ||||
|  | ||||
| class KPageHeap final { | ||||
| public: | ||||
|     explicit KPageHeap(Memory::MemorySystem& memory) : m_memory{memory} {} | ||||
|     ~KPageHeap() = default; | ||||
|  | ||||
|     constexpr u32 GetNumPages() const { | ||||
|         return m_region_size >> Memory::CITRA_PAGE_BITS; | ||||
|     } | ||||
|  | ||||
|     constexpr u32 GetRegionStart() const { | ||||
|         return m_region_start; | ||||
|     } | ||||
|  | ||||
|     constexpr u32 GetRegionEnd() const { | ||||
|         return m_region_start + m_region_size; | ||||
|     } | ||||
|  | ||||
|     constexpr bool Contains(u32 addr) const { | ||||
|         return this->GetRegionStart() <= addr && addr < this->GetRegionEnd(); | ||||
|     } | ||||
|  | ||||
| public: | ||||
|     void Initialize(VAddr region_start, u32 region_size); | ||||
|     u32 GetTotalNumPages(); | ||||
|  | ||||
|     VAddr AllocateBackwards(u32 size); | ||||
|     VAddr AllocateContiguous(u32 size, u32 page_alignment); | ||||
|     void FreeBlock(u32 addr, u32 num_pages); | ||||
|  | ||||
| private: | ||||
|     struct Block final : public Common::IntrusiveListBaseNode<Block> { | ||||
|         u32 num_pages; | ||||
|         Block* current; | ||||
|         u32 nonce; | ||||
|         u32 mac; | ||||
|     }; | ||||
|  | ||||
|     using BlockList = Common::IntrusiveListBaseTraits<Block>::ListType; | ||||
|     using iterator = BlockList::iterator; | ||||
|  | ||||
|     Block* SplitBlock(Block* block, u32 new_block_size); | ||||
|     bool TryInsert(u32 freed_addr, u32 num_freed_pages, Block* prev_block, Block* next_block); | ||||
|     void SetLastBlock(Block* block); | ||||
|  | ||||
| private: | ||||
|     BlockList m_blocks{}; | ||||
|     Memory::MemorySystem& m_memory; | ||||
|     u32 m_region_start{}; | ||||
|     u32 m_region_size{}; | ||||
|     std::array<u32, 4> m_key{}; | ||||
| }; | ||||
|  | ||||
| } // namespace Kernel | ||||
							
								
								
									
										107
									
								
								src/core/hle/kernel/k_page_manager.cpp
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										107
									
								
								src/core/hle/kernel/k_page_manager.cpp
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,107 @@ | ||||
| // Copyright 2023 Citra Emulator Project | ||||
| // Licensed under GPLv2 or any later version | ||||
| // Refer to the license.txt file included. | ||||
|  | ||||
| #include "core/hle/kernel/k_memory_manager.h" | ||||
| #include "core/hle/kernel/k_page_manager.h" | ||||
| #include "core/memory.h" | ||||
|  | ||||
| namespace Kernel { | ||||
|  | ||||
| void KPageManager::Initialize(u32 start_addr, u32 num_pages) { | ||||
|     // Initialize page manager address range. | ||||
|     m_start_addr = start_addr; | ||||
|     m_num_pages = num_pages; | ||||
|  | ||||
|     // Compute the number of pages to allocate from the base heap. | ||||
|     const u32 num_ref_counts_pages = ((sizeof(u32) * num_pages - 1) >> Memory::CITRA_PAGE_BITS) + 1; | ||||
|     auto& base_heap = m_memory_manager->GetBaseHeap(); | ||||
|  | ||||
|     // Allocate page refcounting memory. | ||||
|     u32 ref_counts_addr{}; | ||||
|     { | ||||
|         // KLightScopedMutex m{m_mutex}; | ||||
|         m_kernel_memory_usage += num_ref_counts_pages << Memory::CITRA_PAGE_BITS; | ||||
|         ref_counts_addr = base_heap.AllocateContiguous(num_ref_counts_pages, 0); | ||||
|         m_page_ref_counts = m_memory.GetPointer<u32>(ref_counts_addr); | ||||
|         ASSERT(m_page_ref_counts); | ||||
|     } | ||||
|  | ||||
|     // Zero-initialize reference counts. | ||||
|     if (num_pages) { | ||||
|         std::memset(m_page_ref_counts, 0, num_ref_counts_pages << Memory::CITRA_PAGE_BITS); | ||||
|     } | ||||
|  | ||||
|     // Track allocated pages. | ||||
|     this->IncrefPages(ref_counts_addr, num_ref_counts_pages); | ||||
| } | ||||
|  | ||||
| void KPageManager::IncrefPages(u32 addr, u32 num_pages) { | ||||
|     // KLightScopedMutex m{m_mutex}; | ||||
|  | ||||
|     // Increment page reference counts. | ||||
|     const u32 page_start = (addr - m_start_addr) >> Memory::CITRA_PAGE_BITS; | ||||
|     const u32 page_end = num_pages + page_start; | ||||
|     for (u32 page = page_start; page < page_end; page++) { | ||||
|         m_page_ref_counts[page_start]++; | ||||
|     } | ||||
| } | ||||
|  | ||||
| void KPageManager::FreeContiguous(u32 addr, u32 num_pages, MemoryOperation op) { | ||||
|     // Ensure the provided address is in range. | ||||
|     const u32 page_start = (addr - m_start_addr) >> Memory::CITRA_PAGE_BITS; | ||||
|     const u32 page_end = page_start + num_pages; | ||||
|     if (page_start >= page_end) [[unlikely]] { | ||||
|         return; | ||||
|     } | ||||
|  | ||||
|     // Retrieve page heaps from the memory manager. | ||||
|     auto& application_heap = m_memory_manager->GetApplicationHeap(); | ||||
|     auto& base_heap = m_memory_manager->GetBaseHeap(); | ||||
|     auto& system_heap = m_memory_manager->GetSystemHeap(); | ||||
|  | ||||
|     // Frees the range of pages provided from the appropriate heap. | ||||
|     const auto FreePages = [&](u32 start_page, u32 num_pages) { | ||||
|         const u32 current_addr = m_start_addr + (start_page << Memory::CITRA_PAGE_BITS); | ||||
|         if (base_heap.Contains(current_addr)) { | ||||
|             base_heap.FreeBlock(current_addr, num_pages); | ||||
|         } else if (system_heap.Contains(current_addr)) { | ||||
|             system_heap.FreeBlock(current_addr, num_pages); | ||||
|         } else { | ||||
|             application_heap.FreeBlock(current_addr, num_pages); | ||||
|         } | ||||
|         // Update kernel memory usage if requested. | ||||
|         if (True(op & MemoryOperation::Kernel)) { | ||||
|             m_kernel_memory_usage -= num_pages << Memory::CITRA_PAGE_BITS; | ||||
|         } | ||||
|     }; | ||||
|  | ||||
|     // Iterate over the range of pages to free. | ||||
|     u32 start_free_page = 0; | ||||
|     u32 num_pages_to_free = 0; | ||||
|     for (u32 page = page_start; page < page_end; page++) { | ||||
|         const u32 new_count = --m_page_ref_counts[page]; | ||||
|         if (new_count) { | ||||
|             // Nothing to free, continue to next page. | ||||
|             if (num_pages_to_free <= 0) { | ||||
|                 continue; | ||||
|             } | ||||
|             // Free accumulated pages and reset. | ||||
|             FreePages(start_free_page, num_pages_to_free); | ||||
|             num_pages_to_free = 0; | ||||
|         } else if (num_pages_to_free <= 0) { | ||||
|             start_free_page = page; | ||||
|             num_pages_to_free = 1; | ||||
|         } else { | ||||
|             // Advance number of pages to free. | ||||
|             num_pages_to_free++; | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     // Free any remaining pages. | ||||
|     if (num_pages_to_free > 0) { | ||||
|         FreePages(start_free_page, num_pages_to_free); | ||||
|     } | ||||
| } | ||||
|  | ||||
| } // namespace Kernel | ||||
							
								
								
									
										63
									
								
								src/core/hle/kernel/k_page_manager.h
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										63
									
								
								src/core/hle/kernel/k_page_manager.h
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,63 @@ | ||||
| // Copyright 2023 Citra Emulator Project | ||||
| // Licensed under GPLv2 or any later version | ||||
| // Refer to the license.txt file included. | ||||
|  | ||||
| #pragma once | ||||
|  | ||||
| #include <atomic> | ||||
|  | ||||
| #include "common/common_funcs.h" | ||||
| #include "common/common_types.h" | ||||
|  | ||||
| namespace Memory { | ||||
| class MemorySystem; | ||||
| } | ||||
|  | ||||
| namespace Kernel { | ||||
|  | ||||
| enum class MemoryOperation : u32 { | ||||
|     None = 0x0, | ||||
|     RegionApplication = 0x100, | ||||
|     RegionSystem = 0x200, | ||||
|     RegionBase = 0x300, | ||||
|     Kernel = 0x80000000, | ||||
|     RegionBaseKernel = Kernel | RegionBase, | ||||
|     Free = 0x1, | ||||
|     Reserve = 0x2, | ||||
|     Alloc = 0x3, | ||||
|     Map = 0x4, | ||||
|     Unmap = 0x5, | ||||
|     Prot = 0x6, | ||||
|     OpMask = 0xFF, | ||||
|     RegionMask = 0xF00, | ||||
|     LinearFlag = 0x10000, | ||||
| }; | ||||
| DECLARE_ENUM_FLAG_OPERATORS(MemoryOperation) | ||||
|  | ||||
| class KMemoryManager; | ||||
|  | ||||
| class KPageManager { | ||||
| public: | ||||
|     explicit KPageManager(Memory::MemorySystem& memory, KMemoryManager* memory_manager) | ||||
|         : m_memory{memory}, m_memory_manager{memory_manager} {} | ||||
|     ~KPageManager() = default; | ||||
|  | ||||
|     std::atomic<u32>& GetKernelMemoryUsage() noexcept { | ||||
|         return m_kernel_memory_usage; | ||||
|     } | ||||
|  | ||||
|     void Initialize(u32 start_addr, u32 num_pages); | ||||
|     void IncrefPages(u32 addr, u32 num_pages); | ||||
|     void FreeContiguous(u32 data, u32 num_pages, MemoryOperation op); | ||||
|  | ||||
| private: | ||||
|     Memory::MemorySystem& m_memory; | ||||
|     KMemoryManager* m_memory_manager{}; | ||||
|     u32 m_start_addr{}; | ||||
|     u32 m_num_pages{}; | ||||
|     u32* m_page_ref_counts{}; | ||||
|     std::atomic<u32> m_kernel_memory_usage{}; | ||||
|     // KLightMutex m_mutex; | ||||
| }; | ||||
|  | ||||
| } // namespace Kernel | ||||
							
								
								
									
										1511
									
								
								src/core/hle/kernel/k_page_table.cpp
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										1511
									
								
								src/core/hle/kernel/k_page_table.cpp
									
									
									
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										134
									
								
								src/core/hle/kernel/k_page_table.h
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										134
									
								
								src/core/hle/kernel/k_page_table.h
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,134 @@ | ||||
| // Copyright 2023 Citra Emulator Project | ||||
| // Licensed under GPLv2 or any later version | ||||
| // Refer to the license.txt file included. | ||||
|  | ||||
| #pragma once | ||||
|  | ||||
| #include "common/common_funcs.h" | ||||
| #include "core/hle/kernel/k_memory_block_manager.h" | ||||
| #include "core/hle/result.h" | ||||
|  | ||||
| namespace Common { | ||||
| class PageTable; | ||||
| } | ||||
|  | ||||
| namespace Kernel { | ||||
|  | ||||
| enum class KMemoryUpdateFlags { | ||||
|     None = 0x0, | ||||
|     State = 0x1, | ||||
|     Perms = 0x100, | ||||
|     StateAndPerms = State | Perms, | ||||
| }; | ||||
| DECLARE_ENUM_FLAG_OPERATORS(KMemoryUpdateFlags) | ||||
|  | ||||
| enum class MemoryOperation : u32; | ||||
|  | ||||
| class KPageGroup; | ||||
| class KPageManager; | ||||
|  | ||||
| class KPageTable { | ||||
| public: | ||||
|     explicit KPageTable(KernelSystem& kernel, KPageManager* page_manager) | ||||
|         : m_kernel{kernel}, m_page_manager{page_manager}, m_memory_block_manager{kernel} {} | ||||
|     ~KPageTable() = default; | ||||
|  | ||||
|     Common::PageTable& GetImpl() { | ||||
|         return *m_impl; | ||||
|     } | ||||
|  | ||||
|     void InitizalizeL1Table(u32** outL1TablePtr, u32* L1Table); | ||||
|  | ||||
|     ResultCode CheckAndUpdateAddrRangeMaskedStateAndPerms( | ||||
|         u32 addr, u32 num_pages, KMemoryState state_mask, KMemoryState expected_state, | ||||
|         KMemoryPermission min_perms, KMemoryState new_state, KMemoryPermission new_perms); | ||||
|     ResultCode CheckAddressRangeSizeAndState(u32 addr, u32 size, KMemoryState state); | ||||
|     ResultCode CheckAddressRangeSizeAndStateFlags(u32 addr, u32 size, KMemoryState stateMask, | ||||
|                                                   KMemoryState expectedStateFlags); | ||||
|     ResultCode CheckMemoryBlockAttributes(u32 addr, u32 size, KMemoryState state, | ||||
|                                           KMemoryPermission perms); | ||||
|     ResultCode CheckAddrRangeMaskedStateAndPerms(u32 addr, u32 size, KMemoryState stateMask, | ||||
|                                                  KMemoryState expectedState, | ||||
|                                                  KMemoryPermission minPerms); | ||||
|     ResultCode CheckAndChangeGroupStateAndPerms(u32 addr, KPageGroup* pgGroup, | ||||
|                                                 KMemoryState stateMask, KMemoryState expectedState, | ||||
|                                                 KMemoryPermission minPerms, KMemoryState newState, | ||||
|                                                 KMemoryPermission newPerms); | ||||
|  | ||||
|     ResultCode MapL2Entries(u32 va, u32 pa, u32 numPages_reused, u32* attribsPtr, bool isLarge); | ||||
|     ResultCode MapL1Entries(u32 va, u32 pa, u32 numPages, u32* attribsPtr, bool isLarge); | ||||
|     ResultCode MapContiguousPhysicalAddressRange(u32 va, u32 pa, u32 numPages, u32* mmuAttribs); | ||||
|     ResultCode MergeContiguousEntries(u32 va); | ||||
|     ResultCode MapNewlyAllocatedPhysicalAddressRange(u32 va, u32 pa, u32 numPages, u32* mmuAttrbis); | ||||
|  | ||||
|     ResultCode RemapMemoryInterprocess(KPageTable* dstPgTbl, KPageTable* srcPgTbl, u32 dstAddr, | ||||
|                                        u32 srcAddr, u32 numPages, KMemoryState dstMemState, | ||||
|                                        KMemoryPermission dstMemPerms); | ||||
|  | ||||
|     ResultCode ChangePageAttributes(u32 addr, u32 size, u32* mmuAttribs); | ||||
|     ResultCode CheckAndUnmapPageGroup(u32 addr, KPageGroup* pgGroup); | ||||
|  | ||||
|     ResultCode CreateAlias(u32 srcAddr, u32 dstAddr, u32 numPages, KMemoryState expectedStateSrc, | ||||
|                            KMemoryPermission expectedMinPermsSrc, KMemoryState newStateSrc, | ||||
|                            KMemoryPermission newPermsSrc, KMemoryState newStateDst, | ||||
|                            KMemoryPermission newPermsDst); | ||||
|     ResultCode DestroyAlias(u32 srcAddr, u32 dstAddr, u32 numPages, KMemoryState expectedStateSrc, | ||||
|                             KMemoryPermission expectedMinPermsSrc, KMemoryState expectedStateDst, | ||||
|                             KMemoryPermission expectedMinPermsDst, KMemoryState newStateSrc, | ||||
|                             KMemoryPermission newPermsSrc); | ||||
|  | ||||
|     void Unmap(u32 addr, u32 numPages); | ||||
|     void UnmapEntries(u32 currentVa, u32 numPages, KPageGroup* outPgGroupUnmapped); | ||||
|  | ||||
|     ResultCode OperateOnGroup(u32 addr, KPageGroup* pgGroup, KMemoryState state, | ||||
|                               KMemoryPermission perms, KMemoryUpdateFlags updateFlags); | ||||
|     ResultCode OperateOnAnyFreeBlockInRegionWithGuardPage(u32* outAddr, u32 blockNumPages, | ||||
|                                                           u32 regionStart, u32 regionNumPages, | ||||
|                                                           u32 pa, KMemoryState state, | ||||
|                                                           KMemoryPermission perms, | ||||
|                                                           KMemoryUpdateFlags updateFlags, | ||||
|                                                           MemoryOperation region); | ||||
|     ResultCode Operate(u32 va, u32 numPages, u32 pa, KMemoryState state, KMemoryPermission perms, | ||||
|                        KMemoryUpdateFlags updateFlags, MemoryOperation region); | ||||
|  | ||||
|     ResultCode MakePageGroup(KPageGroup& pg, VAddr addr, u32 num_pages); | ||||
|     ResultCode QueryInfo(KMemoryInfo* outMemoryInfo, u32* pageInfo, u32 addr); | ||||
|     ResultCode CopyMemoryInterprocessForIpc(u32 dstAddr, KPageTable* srcPgTbl, u32 srcAddr, | ||||
|                                             u32 size); | ||||
|     ResultCode SplitContiguousEntries(u32 va, u32 size); | ||||
|  | ||||
|     u32 ConvertVaToPa(u32** L1TablePtr, u32 va); | ||||
|  | ||||
|     void InvalidateAllTlbEntries(); | ||||
|     void InvalidateEntireInstructionCache(); | ||||
|     void InvalidateEntireInstructionCacheLocal(); | ||||
|     void InvalidateTlbEntryByMva(u32 addr); | ||||
|     void InvalidateDataCacheRange(u32 addr, u32 size); | ||||
|     void InvalidateDataCacheRangeLocal(u32 addr, u32 size); | ||||
|  | ||||
|     void CleanInvalidateEntireDataCacheLocal(); | ||||
|     void CleanInvalidateDataCacheRangeLocal(u32 addr, u32 size); | ||||
|     void CleanInvalidateDataCacheRange(u32 addr, u32 size); | ||||
|     void CleanInvalidateInstructionCacheRange(u32 addr, u32 size); | ||||
|     void CleanInvalidateEntireDataCache(); | ||||
|     void CleanDataCacheRange(u32 addr, u32 size); | ||||
|  | ||||
| private: | ||||
|     KernelSystem& m_kernel; | ||||
|     KPageManager* m_page_manager; | ||||
|     // KLightMutex mutex; | ||||
|     std::unique_ptr<Common::PageTable> m_impl{}; | ||||
|     std::array<bool, 4> m_tlb_needs_invalidating{}; | ||||
|     KMemoryBlockManager m_memory_block_manager; | ||||
|     u32 m_translation_table_base{}; | ||||
|     u8 m_asid{}; | ||||
|     bool m_is_kernel{}; | ||||
|     bool m_use_small_pages{}; | ||||
|     u32 m_address_space_start{}; | ||||
|     u32 m_address_space_end{}; | ||||
|     u32 m_linear_address_range_start{}; | ||||
|     u32 m_translation_table_size{}; | ||||
|     u32* m_l1_table{}; | ||||
| }; | ||||
|  | ||||
| } // namespace Kernel | ||||
							
								
								
									
										191
									
								
								src/core/hle/kernel/k_slab_heap.h
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										191
									
								
								src/core/hle/kernel/k_slab_heap.h
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,191 @@ | ||||
| // Copyright 2023 Citra Emulator Project | ||||
| // Licensed under GPLv2 or any later version | ||||
| // Refer to the license.txt file included. | ||||
|  | ||||
| #pragma once | ||||
|  | ||||
| #include <atomic> | ||||
|  | ||||
| #include "common/assert.h" | ||||
| #include "common/atomic_ops.h" | ||||
| #include "common/common_funcs.h" | ||||
| #include "common/common_types.h" | ||||
|  | ||||
| namespace Kernel { | ||||
|  | ||||
| class KernelSystem; | ||||
|  | ||||
| namespace impl { | ||||
|  | ||||
| class KSlabHeapImpl { | ||||
|     CITRA_NON_COPYABLE(KSlabHeapImpl); | ||||
|     CITRA_NON_MOVEABLE(KSlabHeapImpl); | ||||
|  | ||||
| public: | ||||
|     struct Node { | ||||
|         Node* next{}; | ||||
|     }; | ||||
|  | ||||
| public: | ||||
|     constexpr KSlabHeapImpl() = default; | ||||
|  | ||||
|     void Initialize() { | ||||
|         ASSERT(m_head == nullptr); | ||||
|     } | ||||
|  | ||||
|     Node* GetHead() const { | ||||
|         return m_head; | ||||
|     } | ||||
|  | ||||
|     void* Allocate() { | ||||
|         Node* ret = m_head; | ||||
|         if (ret != nullptr) [[likely]] { | ||||
|             m_head = ret->next; | ||||
|         } | ||||
|         return ret; | ||||
|     } | ||||
|  | ||||
|     void Free(void* obj) { | ||||
|         Node* node = static_cast<Node*>(obj); | ||||
|         node->next = m_head; | ||||
|         m_head = node; | ||||
|     } | ||||
|  | ||||
| private: | ||||
|     std::atomic<Node*> m_head{}; | ||||
| }; | ||||
|  | ||||
| } // namespace impl | ||||
|  | ||||
| class KSlabHeapBase : protected impl::KSlabHeapImpl { | ||||
|     CITRA_NON_COPYABLE(KSlabHeapBase); | ||||
|     CITRA_NON_MOVEABLE(KSlabHeapBase); | ||||
|  | ||||
| private: | ||||
|     size_t m_obj_size{}; | ||||
|     uintptr_t m_peak{}; | ||||
|     uintptr_t m_start{}; | ||||
|     uintptr_t m_end{}; | ||||
|  | ||||
| private: | ||||
|     void UpdatePeakImpl(uintptr_t obj) { | ||||
|         const uintptr_t alloc_peak = obj + this->GetObjectSize(); | ||||
|         uintptr_t cur_peak = m_peak; | ||||
|         do { | ||||
|             if (alloc_peak <= cur_peak) { | ||||
|                 break; | ||||
|             } | ||||
|         } while ( | ||||
|             !Common::AtomicCompareAndSwap(std::addressof(m_peak), alloc_peak, cur_peak, cur_peak)); | ||||
|     } | ||||
|  | ||||
| public: | ||||
|     constexpr KSlabHeapBase() = default; | ||||
|  | ||||
|     bool Contains(uintptr_t address) const { | ||||
|         return m_start <= address && address < m_end; | ||||
|     } | ||||
|  | ||||
|     void Initialize(size_t obj_size, void* memory, size_t memory_size) { | ||||
|         // Ensure we don't initialize a slab using null memory. | ||||
|         ASSERT(memory != nullptr); | ||||
|  | ||||
|         // Set our object size. | ||||
|         m_obj_size = obj_size; | ||||
|  | ||||
|         // Initialize the base allocator. | ||||
|         KSlabHeapImpl::Initialize(); | ||||
|  | ||||
|         // Set our tracking variables. | ||||
|         const size_t num_obj = (memory_size / obj_size); | ||||
|         m_start = reinterpret_cast<uintptr_t>(memory); | ||||
|         m_end = m_start + num_obj * obj_size; | ||||
|         m_peak = m_start; | ||||
|  | ||||
|         // Free the objects. | ||||
|         u8* cur = reinterpret_cast<u8*>(m_end); | ||||
|  | ||||
|         for (size_t i = 0; i < num_obj; i++) { | ||||
|             cur -= obj_size; | ||||
|             KSlabHeapImpl::Free(cur); | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     size_t GetSlabHeapSize() const { | ||||
|         return (m_end - m_start) / this->GetObjectSize(); | ||||
|     } | ||||
|  | ||||
|     size_t GetObjectSize() const { | ||||
|         return m_obj_size; | ||||
|     } | ||||
|  | ||||
|     void* Allocate() { | ||||
|         void* obj = KSlabHeapImpl::Allocate(); | ||||
|         return obj; | ||||
|     } | ||||
|  | ||||
|     void Free(void* obj) { | ||||
|         // Don't allow freeing an object that wasn't allocated from this heap. | ||||
|         const bool contained = this->Contains(reinterpret_cast<uintptr_t>(obj)); | ||||
|         ASSERT(contained); | ||||
|         KSlabHeapImpl::Free(obj); | ||||
|     } | ||||
|  | ||||
|     size_t GetObjectIndex(const void* obj) const { | ||||
|         return (reinterpret_cast<uintptr_t>(obj) - m_start) / this->GetObjectSize(); | ||||
|     } | ||||
|  | ||||
|     size_t GetPeakIndex() const { | ||||
|         return this->GetObjectIndex(reinterpret_cast<const void*>(m_peak)); | ||||
|     } | ||||
|  | ||||
|     uintptr_t GetSlabHeapAddress() const { | ||||
|         return m_start; | ||||
|     } | ||||
|  | ||||
|     size_t GetNumRemaining() const { | ||||
|         // Only calculate the number of remaining objects under debug configuration. | ||||
|         return 0; | ||||
|     } | ||||
| }; | ||||
|  | ||||
| template <typename T> | ||||
| class KSlabHeap final : public KSlabHeapBase { | ||||
| private: | ||||
|     using BaseHeap = KSlabHeapBase; | ||||
|  | ||||
| public: | ||||
|     constexpr KSlabHeap() = default; | ||||
|  | ||||
|     void Initialize(void* memory, size_t memory_size) { | ||||
|         BaseHeap::Initialize(sizeof(T), memory, memory_size); | ||||
|     } | ||||
|  | ||||
|     T* Allocate() { | ||||
|         T* obj = static_cast<T*>(BaseHeap::Allocate()); | ||||
|  | ||||
|         if (obj != nullptr) [[likely]] { | ||||
|             std::construct_at(obj); | ||||
|         } | ||||
|         return obj; | ||||
|     } | ||||
|  | ||||
|     T* Allocate(KernelSystem& kernel) { | ||||
|         T* obj = static_cast<T*>(BaseHeap::Allocate()); | ||||
|  | ||||
|         if (obj != nullptr) [[likely]] { | ||||
|             std::construct_at(obj, kernel); | ||||
|         } | ||||
|         return obj; | ||||
|     } | ||||
|  | ||||
|     void Free(T* obj) { | ||||
|         BaseHeap::Free(obj); | ||||
|     } | ||||
|  | ||||
|     size_t GetObjectIndex(const T* obj) const { | ||||
|         return BaseHeap::GetObjectIndex(obj); | ||||
|     } | ||||
| }; | ||||
|  | ||||
| } // namespace Kernel | ||||
| @@ -8,9 +8,11 @@ | ||||
| #include "common/archives.h" | ||||
| #include "common/serialization/atomic.h" | ||||
| #include "core/hle/kernel/client_port.h" | ||||
| #include "core/hle/kernel/config_mem.h" | ||||
| #include "core/hle/kernel/handle_table.h" | ||||
| #include "core/hle/kernel/ipc_debugger/recorder.h" | ||||
| #include "core/hle/kernel/k_linked_list.h" | ||||
| #include "core/hle/kernel/k_memory_block.h" | ||||
| #include "core/hle/kernel/k_page_group.h" | ||||
| #include "core/hle/kernel/k_slab_heap.h" | ||||
| #include "core/hle/kernel/kernel.h" | ||||
| #include "core/hle/kernel/memory.h" | ||||
| #include "core/hle/kernel/process.h" | ||||
| @@ -29,6 +31,7 @@ KernelSystem::KernelSystem(Memory::MemorySystem& memory, Core::Timing& timing, | ||||
|     : memory(memory), timing(timing), | ||||
|       prepare_reschedule_callback(std::move(prepare_reschedule_callback)), memory_mode(memory_mode), | ||||
|       n3ds_hw_caps(n3ds_hw_caps) { | ||||
|     slab_heap_container = std::make_unique<SlabHeapContainer>(); | ||||
|     std::generate(memory_regions.begin(), memory_regions.end(), | ||||
|                   [] { return std::make_shared<MemoryRegionInfo>(); }); | ||||
|     MemoryInit(memory_mode, n3ds_hw_caps.memory_mode, override_init_time); | ||||
| @@ -192,6 +195,27 @@ void KernelSystem::serialize(Archive& ar, const unsigned int file_version) { | ||||
|     } | ||||
| } | ||||
|  | ||||
| struct KernelSystem::SlabHeapContainer { | ||||
|     KSlabHeap<KLinkedListNode> linked_list_node; | ||||
|     KSlabHeap<KBlockInfo> block_info; | ||||
|     KSlabHeap<KMemoryBlock> memory_block; | ||||
| }; | ||||
|  | ||||
| template <typename T> | ||||
| KSlabHeap<T>& KernelSystem::SlabHeap() { | ||||
|     if constexpr (std::is_same_v<T, KLinkedListNode>) { | ||||
|         return slab_heap_container->linked_list_node; | ||||
|     } else if constexpr (std::is_same_v<T, KBlockInfo>) { | ||||
|         return slab_heap_container->block_info; | ||||
|     } else if constexpr (std::is_same_v<T, KMemoryBlock>) { | ||||
|         return slab_heap_container->memory_block; | ||||
|     } | ||||
| } | ||||
|  | ||||
| template KSlabHeap<KLinkedListNode>& KernelSystem::SlabHeap(); | ||||
| template KSlabHeap<KBlockInfo>& KernelSystem::SlabHeap(); | ||||
| template KSlabHeap<KMemoryBlock>& KernelSystem::SlabHeap(); | ||||
|  | ||||
| SERIALIZE_IMPL(KernelSystem) | ||||
|  | ||||
| } // namespace Kernel | ||||
|   | ||||
| @@ -130,6 +130,9 @@ private: | ||||
|     friend class boost::serialization::access; | ||||
| }; | ||||
|  | ||||
| template <typename T> | ||||
| class KSlabHeap; | ||||
|  | ||||
| class KernelSystem { | ||||
| public: | ||||
|     explicit KernelSystem(Memory::MemorySystem& memory, Core::Timing& timing, | ||||
| @@ -260,6 +263,10 @@ public: | ||||
|                                                               MemoryPermission other_permissions, | ||||
|                                                               std::string name = "Unknown Applet"); | ||||
|  | ||||
|     /// Gets the slab heap for the specified kernel object type. | ||||
|     template <typename T> | ||||
|     KSlabHeap<T>& SlabHeap(); | ||||
|  | ||||
|     u32 GenerateObjectID(); | ||||
|  | ||||
|     /// Retrieves a process from the current list of processes. | ||||
| @@ -369,6 +376,10 @@ private: | ||||
|     MemoryMode memory_mode; | ||||
|     New3dsHwCapabilities n3ds_hw_caps; | ||||
|  | ||||
|     /// Helper to encapsulate all slab heaps in a single heap allocated container | ||||
|     struct SlabHeapContainer; | ||||
|     std::unique_ptr<SlabHeapContainer> slab_heap_container; | ||||
|  | ||||
|     friend class boost::serialization::access; | ||||
|     template <class Archive> | ||||
|     void serialize(Archive& ar, const unsigned int file_version); | ||||
|   | ||||
							
								
								
									
										130
									
								
								src/core/hle/kernel/slab_helpers.h
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										130
									
								
								src/core/hle/kernel/slab_helpers.h
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,130 @@ | ||||
| // Copyright 2023 Citra Emulator Project | ||||
| // Licensed under GPLv2 or any later version | ||||
| // Refer to the license.txt file included. | ||||
|  | ||||
| #pragma once | ||||
|  | ||||
| #include "core/hle/kernel/k_auto_object.h" | ||||
| #include "core/hle/kernel/kernel.h" | ||||
|  | ||||
| namespace Kernel { | ||||
|  | ||||
| template <class Derived> | ||||
| class KSlabAllocated { | ||||
| public: | ||||
|     constexpr KSlabAllocated() = default; | ||||
|  | ||||
|     size_t GetSlabIndex(KernelSystem& kernel) const { | ||||
|         return kernel.SlabHeap<Derived>().GetIndex(static_cast<const Derived*>(this)); | ||||
|     } | ||||
|  | ||||
| public: | ||||
|     static void InitializeSlabHeap(KernelSystem& kernel, void* memory, size_t memory_size) { | ||||
|         kernel.SlabHeap<Derived>().Initialize(memory, memory_size); | ||||
|     } | ||||
|  | ||||
|     static Derived* Allocate(KernelSystem& kernel) { | ||||
|         return kernel.SlabHeap<Derived>().Allocate(kernel); | ||||
|     } | ||||
|  | ||||
|     static void Free(KernelSystem& kernel, Derived* obj) { | ||||
|         kernel.SlabHeap<Derived>().Free(obj); | ||||
|     } | ||||
|  | ||||
|     static size_t GetObjectSize(KernelSystem& kernel) { | ||||
|         return kernel.SlabHeap<Derived>().GetObjectSize(); | ||||
|     } | ||||
|  | ||||
|     static size_t GetSlabHeapSize(KernelSystem& kernel) { | ||||
|         return kernel.SlabHeap<Derived>().GetSlabHeapSize(); | ||||
|     } | ||||
|  | ||||
|     static size_t GetPeakIndex(KernelSystem& kernel) { | ||||
|         return kernel.SlabHeap<Derived>().GetPeakIndex(); | ||||
|     } | ||||
|  | ||||
|     static uintptr_t GetSlabHeapAddress(KernelSystem& kernel) { | ||||
|         return kernel.SlabHeap<Derived>().GetSlabHeapAddress(); | ||||
|     } | ||||
|  | ||||
|     static size_t GetNumRemaining(KernelSystem& kernel) { | ||||
|         return kernel.SlabHeap<Derived>().GetNumRemaining(); | ||||
|     } | ||||
| }; | ||||
|  | ||||
| template <typename Derived, typename Base> | ||||
| class KAutoObjectWithSlabHeap : public Base { | ||||
|     static_assert(std::is_base_of<KAutoObject, Base>::value); | ||||
|  | ||||
| private: | ||||
|     static Derived* Allocate(KernelSystem& kernel) { | ||||
|         return kernel.SlabHeap<Derived>().Allocate(kernel); | ||||
|     } | ||||
|  | ||||
|     static void Free(KernelSystem& kernel, Derived* obj) { | ||||
|         kernel.SlabHeap<Derived>().Free(obj); | ||||
|     } | ||||
|  | ||||
| public: | ||||
|     explicit KAutoObjectWithSlabHeap(KernelSystem& kernel) : Base(kernel) {} | ||||
|     virtual ~KAutoObjectWithSlabHeap() = default; | ||||
|  | ||||
|     virtual void Destroy() override { | ||||
|         const bool is_initialized = this->IsInitialized(); | ||||
|         uintptr_t arg = 0; | ||||
|         if (is_initialized) { | ||||
|             arg = this->GetPostDestroyArgument(); | ||||
|             this->Finalize(); | ||||
|         } | ||||
|         Free(Base::m_kernel, static_cast<Derived*>(this)); | ||||
|         if (is_initialized) { | ||||
|             Derived::PostDestroy(arg); | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     virtual bool IsInitialized() const { | ||||
|         return true; | ||||
|     } | ||||
|     virtual uintptr_t GetPostDestroyArgument() const { | ||||
|         return 0; | ||||
|     } | ||||
|  | ||||
|     size_t GetSlabIndex() const { | ||||
|         return SlabHeap<Derived>(Base::m_kernel).GetObjectIndex(static_cast<const Derived*>(this)); | ||||
|     } | ||||
|  | ||||
| public: | ||||
|     static void InitializeSlabHeap(KernelSystem& kernel, void* memory, size_t memory_size) { | ||||
|         kernel.SlabHeap<Derived>().Initialize(memory, memory_size); | ||||
|     } | ||||
|  | ||||
|     static Derived* Create(KernelSystem& kernel) { | ||||
|         Derived* obj = Allocate(kernel); | ||||
|         if (obj != nullptr) { | ||||
|             KAutoObject::Create(obj); | ||||
|         } | ||||
|         return obj; | ||||
|     } | ||||
|  | ||||
|     static size_t GetObjectSize(KernelSystem& kernel) { | ||||
|         return kernel.SlabHeap<Derived>().GetObjectSize(); | ||||
|     } | ||||
|  | ||||
|     static size_t GetSlabHeapSize(KernelSystem& kernel) { | ||||
|         return kernel.SlabHeap<Derived>().GetSlabHeapSize(); | ||||
|     } | ||||
|  | ||||
|     static size_t GetPeakIndex(KernelSystem& kernel) { | ||||
|         return kernel.SlabHeap<Derived>().GetPeakIndex(); | ||||
|     } | ||||
|  | ||||
|     static uintptr_t GetSlabHeapAddress(KernelSystem& kernel) { | ||||
|         return kernel.SlabHeap<Derived>().GetSlabHeapAddress(); | ||||
|     } | ||||
|  | ||||
|     static size_t GetNumRemaining(KernelSystem& kernel) { | ||||
|         return kernel.SlabHeap<Derived>().GetNumRemaining(); | ||||
|     } | ||||
| }; | ||||
|  | ||||
| } // namespace Kernel | ||||
| @@ -408,3 +408,130 @@ private: | ||||
|     auto CONCAT2(check_result_L, __LINE__) = source;                                               \ | ||||
|     if (CONCAT2(check_result_L, __LINE__).IsError())                                               \ | ||||
|         return CONCAT2(check_result_L, __LINE__); | ||||
|  | ||||
| #define R_SUCCEEDED(res) (static_cast<ResultCode>(res).IsSuccess()) | ||||
| #define R_FAILED(res) (!static_cast<ResultCode>(res).IsSuccess()) | ||||
|  | ||||
| namespace ResultImpl { | ||||
| template <auto EvaluateResult, class F> | ||||
| class ScopedResultGuard { | ||||
| private: | ||||
|     ResultCode& m_ref; | ||||
|     F m_f; | ||||
|  | ||||
| public: | ||||
|     constexpr ScopedResultGuard(ResultCode& ref, F f) : m_ref(ref), m_f(std::move(f)) {} | ||||
|     constexpr ~ScopedResultGuard() { | ||||
|         if (EvaluateResult(m_ref)) { | ||||
|             m_f(); | ||||
|         } | ||||
|     } | ||||
| }; | ||||
|  | ||||
| template <auto EvaluateResult> | ||||
| class ResultReferenceForScopedResultGuard { | ||||
| private: | ||||
|     ResultCode& m_ref; | ||||
|  | ||||
| public: | ||||
|     constexpr ResultReferenceForScopedResultGuard(ResultCode& r) : m_ref(r) {} | ||||
|     constexpr operator ResultCode&() const { | ||||
|         return m_ref; | ||||
|     } | ||||
| }; | ||||
|  | ||||
| template <auto EvaluateResult, typename F> | ||||
| constexpr ScopedResultGuard<EvaluateResult, F> operator+( | ||||
|     ResultReferenceForScopedResultGuard<EvaluateResult> ref, F&& f) { | ||||
|     return ScopedResultGuard<EvaluateResult, F>(static_cast<ResultCode&>(ref), std::forward<F>(f)); | ||||
| } | ||||
|  | ||||
| constexpr bool EvaluateResultSuccess(const ResultCode& r) { | ||||
|     return R_SUCCEEDED(r); | ||||
| } | ||||
| constexpr bool EvaluateResultFailure(const ResultCode& r) { | ||||
|     return R_FAILED(r); | ||||
| } | ||||
|  | ||||
| template <typename T> | ||||
| constexpr void UpdateCurrentResultReference(T result_reference, ResultCode result) = delete; | ||||
| // Intentionally not defined | ||||
|  | ||||
| template <> | ||||
| constexpr void UpdateCurrentResultReference<ResultCode&>(ResultCode& result_reference, | ||||
|                                                          ResultCode result) { | ||||
|     result_reference = result; | ||||
| } | ||||
|  | ||||
| template <> | ||||
| constexpr void UpdateCurrentResultReference<const ResultCode>(ResultCode result_reference, | ||||
|                                                               ResultCode result) {} | ||||
| } // namespace ResultImpl | ||||
|  | ||||
| #define DECLARE_CURRENT_RESULT_REFERENCE_AND_STORAGE(COUNTER_VALUE)                                \ | ||||
|     [[maybe_unused]] constexpr bool CONCAT2(HasPrevRef_, COUNTER_VALUE) =                          \ | ||||
|         std::same_as<decltype(__TmpCurrentResultReference), ResultCode&>;                          \ | ||||
|     [[maybe_unused]] Result CONCAT2(PrevRef_, COUNTER_VALUE) = __TmpCurrentResultReference;        \ | ||||
|     [[maybe_unused]] Result CONCAT2(__tmp_result_, COUNTER_VALUE) = ResultSuccess;                 \ | ||||
|     Result& __TmpCurrentResultReference = CONCAT2(HasPrevRef_, COUNTER_VALUE)                      \ | ||||
|                                               ? CONCAT2(PrevRef_, COUNTER_VALUE)                   \ | ||||
|                                               : CONCAT2(__tmp_result_, COUNTER_VALUE) | ||||
|  | ||||
| #define ON_RESULT_RETURN_IMPL(...)                                                                 \ | ||||
|     static_assert(std::same_as<decltype(__TmpCurrentResultReference), ResultCode&>);               \ | ||||
|     auto CONCAT2(RESULT_GUARD_STATE_, __COUNTER__) =                                               \ | ||||
|         ResultImpl::ResultReferenceForScopedResultGuard<__VA_ARGS__>(                              \ | ||||
|             __TmpCurrentResultReference) +                                                         \ | ||||
|         [&]() | ||||
|  | ||||
| #define ON_RESULT_FAILURE_2 ON_RESULT_RETURN_IMPL(ResultImpl::EvaluateResultFailure) | ||||
|  | ||||
| #define ON_RESULT_FAILURE                                                                          \ | ||||
|     DECLARE_CURRENT_RESULT_REFERENCE_AND_STORAGE(__COUNTER__);                                     \ | ||||
|     ON_RESULT_FAILURE_2 | ||||
|  | ||||
| #define ON_RESULT_SUCCESS_2 ON_RESULT_RETURN_IMPL(ResultImpl::EvaluateResultSuccess) | ||||
|  | ||||
| #define ON_RESULT_SUCCESS                                                                          \ | ||||
|     DECLARE_CURRENT_RESULT_REFERENCE_AND_STORAGE(__COUNTER__);                                     \ | ||||
|     ON_RESULT_SUCCESS_2 | ||||
|  | ||||
| constexpr inline ResultCode __TmpCurrentResultReference = RESULT_SUCCESS; | ||||
|  | ||||
| /// Returns a result. | ||||
| #define R_RETURN(res_expr)                                                                         \ | ||||
|     {                                                                                              \ | ||||
|         const ResultCode _tmp_r_throw_rc = (res_expr);                                             \ | ||||
|         ResultImpl::UpdateCurrentResultReference<decltype(__TmpCurrentResultReference)>(           \ | ||||
|             __TmpCurrentResultReference, _tmp_r_throw_rc);                                         \ | ||||
|         return _tmp_r_throw_rc;                                                                    \ | ||||
|     } | ||||
|  | ||||
| /// Returns ResultSuccess() | ||||
| #define R_SUCCEED() R_RETURN(RESULT_SUCCESS) | ||||
|  | ||||
| /// Throws a result. | ||||
| #define R_THROW(res_expr) R_RETURN(res_expr) | ||||
|  | ||||
| /// Evaluates a boolean expression, and returns a result unless that expression is true. | ||||
| #define R_UNLESS(expr, res)                                                                        \ | ||||
|     {                                                                                              \ | ||||
|         if (!(expr)) {                                                                             \ | ||||
|             R_THROW(res);                                                                          \ | ||||
|         }                                                                                          \ | ||||
|     } | ||||
|  | ||||
| /// Evaluates an expression that returns a result, and returns the result if it would fail. | ||||
| #define R_TRY(res_expr)                                                                            \ | ||||
|     {                                                                                              \ | ||||
|         const auto _tmp_r_try_rc = (res_expr);                                                     \ | ||||
|         if (R_FAILED(_tmp_r_try_rc)) {                                                             \ | ||||
|             R_THROW(_tmp_r_try_rc);                                                                \ | ||||
|         }                                                                                          \ | ||||
|     } | ||||
|  | ||||
| /// Evaluates a boolean expression, and succeeds if that expression is true. | ||||
| #define R_SUCCEED_IF(expr) R_UNLESS(!(expr), RESULT_SUCCESS) | ||||
|  | ||||
| /// Evaluates a boolean expression, and asserts if that expression is false. | ||||
| #define R_ASSERT(expr) ASSERT(R_SUCCEEDED(expr)) | ||||
|   | ||||
| @@ -329,6 +329,11 @@ public: | ||||
|      */ | ||||
|     u8* GetPointer(VAddr vaddr); | ||||
|  | ||||
|     template <typename T> | ||||
|     T* GetPointer(VAddr vaddr) { | ||||
|         return reinterpret_cast<T*>(GetPointer(vaddr)); | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|      * Gets a pointer to the given address. | ||||
|      * | ||||
|   | ||||
		Reference in New Issue
	
	Block a user