Skip to content
GitLab
Explore
Sign in
Primary navigation
Search or go to…
Project
S
Suyu
Manage
Activity
Members
Labels
Plan
Issues
0
Issue boards
Milestones
Wiki
Code
Merge requests
0
Repository
Branches
Commits
Tags
Repository graph
Compare revisions
Snippets
Build
Pipelines
Jobs
Pipeline schedules
Artifacts
Deploy
Releases
Package Registry
Model registry
Operate
Environments
Terraform modules
Monitor
Incidents
Analyze
Value stream analytics
Contributor analytics
CI/CD analytics
Repository analytics
Model experiments
Help
Help
Support
GitLab documentation
Compare GitLab plans
Community forum
Contribute to GitLab
Provide feedback
Terms and privacy
Keyboard shortcuts
?
Snippets
Groups
Projects
Show more breadcrumbs
many-archive
Suyu
Commits
15d9b041
There was an error fetching the commit references. Please try again later.
Commit
15d9b041
authored
3 years ago
by
bunnei
Browse files
Options
Downloads
Patches
Plain Diff
core: hle: kernel: k_slab_heap: Refresh to use guest allocations.
parent
a25cd4bb
No related branches found
No related tags found
No related merge requests found
Changes
2
Hide whitespace changes
Inline
Side-by-side
Showing
2 changed files
src/core/hle/kernel/k_slab_heap.h
+106
-124
106 additions, 124 deletions
src/core/hle/kernel/k_slab_heap.h
src/core/hle/kernel/slab_helpers.h
+1
-1
1 addition, 1 deletion
src/core/hle/kernel/slab_helpers.h
with
107 additions
and
125 deletions
src/core/hle/kernel/k_slab_heap.h
+
106
−
124
View file @
15d9b041
...
...
@@ -16,39 +16,34 @@ class KernelCore;
namespace
impl
{
class
KSlabHeapImpl
final
{
public:
class
KSlabHeapImpl
{
YUZU_NON_COPYABLE
(
KSlabHeapImpl
);
YUZU_NON_MOVEABLE
(
KSlabHeapImpl
);
public:
struct
Node
{
Node
*
next
{};
};
public
:
constexpr
KSlabHeapImpl
()
=
default
;
constexpr
~
KSlabHeapImpl
()
=
default
;
void
Initialize
(
std
::
size_t
size
)
{
ASSERT
(
head
==
nullptr
);
obj_size
=
size
;
}
constexpr
std
::
size_t
GetObjectSize
()
const
{
return
obj_size
;
void
Initialize
()
{
ASSERT
(
m_head
==
nullptr
);
}
Node
*
GetHead
()
const
{
return
head
;
return
m_
head
;
}
void
*
Allocate
()
{
Node
*
ret
=
head
.
load
();
Node
*
ret
=
m_
head
.
load
();
do
{
if
(
ret
==
nullptr
)
{
break
;
}
}
while
(
!
head
.
compare_exchange_weak
(
ret
,
ret
->
next
));
}
while
(
!
m_
head
.
compare_exchange_weak
(
ret
,
ret
->
next
));
return
ret
;
}
...
...
@@ -56,170 +51,157 @@ public:
void
Free
(
void
*
obj
)
{
Node
*
node
=
static_cast
<
Node
*>
(
obj
);
Node
*
cur_head
=
head
.
load
();
Node
*
cur_head
=
m_
head
.
load
();
do
{
node
->
next
=
cur_head
;
}
while
(
!
head
.
compare_exchange_weak
(
cur_head
,
node
));
}
while
(
!
m_
head
.
compare_exchange_weak
(
cur_head
,
node
));
}
private
:
std
::
atomic
<
Node
*>
head
{};
std
::
size_t
obj_size
{};
std
::
atomic
<
Node
*>
m_head
{};
};
}
// namespace impl
class
KSlabHeapBase
{
public:
template
<
bool
SupportDynamicExpansion
>
class
KSlabHeapBase
:
protected
impl
::
KSlabHeapImpl
{
YUZU_NON_COPYABLE
(
KSlabHeapBase
);
YUZU_NON_MOVEABLE
(
KSlabHeapBase
);
constexpr
KSlabHeapBase
()
=
default
;
constexpr
~
KSlabHeapBase
()
=
default
;
private:
size_t
m_obj_size
{};
uintptr_t
m_peak
{};
uintptr_t
m_start
{};
uintptr_t
m_end
{};
constexpr
bool
Contains
(
uintptr_t
addr
)
const
{
return
start
<=
addr
&&
addr
<
end
;
}
private
:
void
UpdatePeakImpl
(
uintptr_t
obj
)
{
static_assert
(
std
::
atomic_ref
<
uintptr_t
>::
is_always_lock_free
);
std
::
atomic_ref
<
uintptr_t
>
peak_ref
(
m_peak
);
constexpr
std
::
size_t
GetSlabHeapSize
()
const
{
return
(
end
-
start
)
/
GetObjectSize
();
const
uintptr_t
alloc_peak
=
obj
+
this
->
GetObjectSize
();
uintptr_t
cur_peak
=
m_peak
;
do
{
if
(
alloc_peak
<=
cur_peak
)
{
break
;
}
}
while
(
!
peak_ref
.
compare_exchange_strong
(
cur_peak
,
alloc_peak
));
}
constexpr
std
::
size_t
GetObjectSize
()
const
{
return
impl
.
GetObjectSize
();
}
public
:
constexpr
KSlabHeapBase
()
=
default
;
constexpr
uintptr_t
GetSlabHeapA
ddress
(
)
const
{
return
start
;
bool
Contains
(
uintptr_t
a
ddress
)
const
{
return
m_
start
<=
address
&&
address
<
m_end
;
}
std
::
size_t
GetObjectIndexImpl
(
const
void
*
obj
)
const
{
return
(
reinterpret_cast
<
uintptr_t
>
(
obj
)
-
start
)
/
GetObjectSize
();
void
Initialize
(
size_t
obj_size
,
void
*
memory
,
size_t
memory_size
)
{
// Ensure we don't initialize a slab using null memory.
ASSERT
(
memory
!=
nullptr
);
// Set our object size.
m_obj_size
=
obj_size
;
// Initialize the base allocator.
KSlabHeapImpl
::
Initialize
();
// Set our tracking variables.
const
size_t
num_obj
=
(
memory_size
/
obj_size
);
m_start
=
reinterpret_cast
<
uintptr_t
>
(
memory
);
m_end
=
m_start
+
num_obj
*
obj_size
;
m_peak
=
m_start
;
// Free the objects.
u8
*
cur
=
reinterpret_cast
<
u8
*>
(
m_end
);
for
(
size_t
i
=
0
;
i
<
num_obj
;
i
++
)
{
cur
-=
obj_size
;
KSlabHeapImpl
::
Free
(
cur
);
}
}
std
::
size_t
Get
PeakIndex
()
const
{
return
GetObjectIndexImpl
(
reinterpret_cast
<
const
void
*>
(
peak
)
);
size_t
Get
SlabHeapSize
()
const
{
return
(
m_end
-
m_start
)
/
this
->
GetObjectSize
(
);
}
void
*
AllocateImpl
()
{
return
impl
.
Allocate
()
;
size_t
GetObjectSize
()
const
{
return
m_obj_size
;
}
void
FreeImpl
(
void
*
obj
)
{
// Don't allow freeing an object that wasn't allocated from this heap
ASSERT
(
Contains
(
reinterpret_cast
<
uintptr_t
>
(
obj
)));
void
*
Allocate
()
{
void
*
obj
=
KSlabHeapImpl
::
Allocate
();
impl
.
Free
(
obj
)
;
return
obj
;
}
void
InitializeImpl
(
std
::
size_t
obj_size
,
void
*
memory
,
std
::
size_t
memory_size
)
{
//
Ensure we don't initialize a slab using null memory
ASSERT
(
memory
!=
nullptr
);
// Initialize the base allocator
impl
.
Initialize
(
obj_size
);
void
Free
(
void
*
obj
)
{
//
Don't allow freeing an object that wasn't allocated from this heap.
const
bool
contained
=
this
->
Contains
(
reinterpret_cast
<
uintptr_t
>
(
obj
)
);
ASSERT
(
contained
);
KSlabHeapImpl
::
Free
(
obj
);
}
// Set our tracking variables
const
std
::
size_t
num_obj
=
(
memory_size
/
obj_size
);
start
=
reinterpret_cast
<
uintptr_t
>
(
memory
);
end
=
start
+
num_obj
*
obj_size
;
peak
=
start
;
size_t
GetObjectIndex
(
const
void
*
obj
)
const
{
if
constexpr
(
SupportDynamicExpansion
)
{
if
(
!
this
->
Contains
(
reinterpret_cast
<
uintptr_t
>
(
obj
)))
{
return
std
::
numeric_limits
<
size_t
>::
max
();
}
}
// Free the objects
u8
*
cur
=
reinterpret_cast
<
u8
*>
(
end
);
return
(
reinterpret_cast
<
uintptr_t
>
(
obj
)
-
m_start
)
/
this
->
GetObjectSize
();
}
for
(
std
::
size_t
i
{};
i
<
num_obj
;
i
++
)
{
cur
-=
obj_size
;
impl
.
Free
(
cur
);
}
size_t
GetPeakIndex
()
const
{
return
this
->
GetObjectIndex
(
reinterpret_cast
<
const
void
*>
(
m_peak
));
}
private
:
using
Impl
=
impl
::
KSlabHeapImpl
;
uintptr_t
GetSlabHeapAddress
()
const
{
return
m_start
;
}
Impl
impl
;
uintptr_t
peak
{};
uintptr_t
start
{}
;
uintptr_t
end
{};
size_t
GetNumRemaining
()
const
{
// Only calculate the number of remaining objects under debug configuration.
return
0
;
}
};
template
<
typename
T
>
class
KSlabHeap
final
:
public
KSlabHeapBase
{
public:
enum
class
AllocationType
{
Host
,
Guest
,
};
class
KSlabHeap
final
:
public
KSlabHeapBase
<
false
>
{
private:
using
BaseHeap
=
KSlabHeapBase
<
false
>
;
explicit
constexpr
KSlabHeap
(
AllocationType
allocation_type_
=
AllocationType
::
Host
)
:
KSlabHeap
Base
(),
allocation_type
{
allocation_type_
}
{}
public:
constexpr
KSlabHeap
()
=
default
;
void
Initialize
(
void
*
memory
,
std
::
size_t
memory_size
)
{
if
(
allocation_type
==
AllocationType
::
Guest
)
{
InitializeImpl
(
sizeof
(
T
),
memory
,
memory_size
);
}
void
Initialize
(
void
*
memory
,
size_t
memory_size
)
{
BaseHeap
::
Initialize
(
sizeof
(
T
),
memory
,
memory_size
);
}
T
*
Allocate
()
{
switch
(
allocation_type
)
{
case
AllocationType
::
Host
:
// Fallback for cases where we do not yet support allocating guest memory from the slab
// heap, such as for kernel memory regions.
return
new
T
;
case
AllocationType
::
Guest
:
T
*
obj
=
static_cast
<
T
*>
(
AllocateImpl
());
if
(
obj
!=
nullptr
)
{
new
(
obj
)
T
();
}
return
obj
;
}
T
*
obj
=
static_cast
<
T
*>
(
BaseHeap
::
Allocate
());
UNREACHABLE_MSG
(
"Invalid AllocationType {}"
,
allocation_type
);
return
nullptr
;
if
(
obj
!=
nullptr
)
[[
likely
]]
{
std
::
construct_at
(
obj
);
}
return
obj
;
}
T
*
AllocateWithKernel
(
KernelCore
&
kernel
)
{
switch
(
allocation_type
)
{
case
AllocationType
::
Host
:
// Fallback for cases where we do not yet support allocating guest memory from the slab
// heap, such as for kernel memory regions.
return
new
T
(
kernel
);
T
*
Allocate
(
KernelCore
&
kernel
)
{
T
*
obj
=
static_cast
<
T
*>
(
BaseHeap
::
Allocate
());
case
AllocationType
::
Guest
:
T
*
obj
=
static_cast
<
T
*>
(
AllocateImpl
());
if
(
obj
!=
nullptr
)
{
new
(
obj
)
T
(
kernel
);
}
return
obj
;
if
(
obj
!=
nullptr
)
[[
likely
]]
{
std
::
construct_at
(
obj
,
kernel
);
}
UNREACHABLE_MSG
(
"Invalid AllocationType {}"
,
allocation_type
);
return
nullptr
;
return
obj
;
}
void
Free
(
T
*
obj
)
{
switch
(
allocation_type
)
{
case
AllocationType
::
Host
:
// Fallback for cases where we do not yet support allocating guest memory from the slab
// heap, such as for kernel memory regions.
delete
obj
;
return
;
case
AllocationType
::
Guest
:
FreeImpl
(
obj
);
return
;
}
UNREACHABLE_MSG
(
"Invalid AllocationType {}"
,
allocation_type
);
BaseHeap
::
Free
(
obj
);
}
constexpr
std
::
size_t
GetObjectIndex
(
const
T
*
obj
)
const
{
return
GetObjectIndex
Impl
(
obj
);
size_t
GetObjectIndex
(
const
T
*
obj
)
const
{
return
BaseHeap
::
GetObjectIndex
(
obj
);
}
private
:
const
AllocationType
allocation_type
;
};
}
// namespace Kernel
This diff is collapsed.
Click to expand it.
src/core/hle/kernel/slab_helpers.h
+
1
−
1
View file @
15d9b041
...
...
@@ -59,7 +59,7 @@ class KAutoObjectWithSlabHeapAndContainer : public Base {
private:
static
Derived
*
Allocate
(
KernelCore
&
kernel
)
{
return
kernel
.
SlabHeap
<
Derived
>
().
Allocate
WithKernel
(
kernel
);
return
kernel
.
SlabHeap
<
Derived
>
().
Allocate
(
kernel
);
}
static
void
Free
(
KernelCore
&
kernel
,
Derived
*
obj
)
{
...
...
This diff is collapsed.
Click to expand it.
Preview
0%
Loading
Try again
or
attach a new file
.
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Save comment
Cancel
Please
register
or
sign in
to comment