summaryrefslogtreecommitdiff
path: root/crates/alloc_buddy/src/lib.rs
diff options
context:
space:
mode:
authorNathan Ringo <nathan@remexre.com>2024-09-01 16:09:51 -0500
committerNathan Ringo <nathan@remexre.com>2024-09-01 16:09:51 -0500
commitdfaaf221460f1270d198e7efca97f3cd43fae188 (patch)
tree896d7bd2753e9738b164c3f69166e792ada1a987 /crates/alloc_buddy/src/lib.rs
parentdb8181101d52da0d138b7109f4aac2ff722e288a (diff)
Implements block merging and fixes a provenance bug.
Diffstat (limited to 'crates/alloc_buddy/src/lib.rs')
-rw-r--r--crates/alloc_buddy/src/lib.rs82
1 files changed, 64 insertions, 18 deletions
diff --git a/crates/alloc_buddy/src/lib.rs b/crates/alloc_buddy/src/lib.rs
index 0ace886..02d45f8 100644
--- a/crates/alloc_buddy/src/lib.rs
+++ b/crates/alloc_buddy/src/lib.rs
@@ -69,7 +69,7 @@ pub struct BuddyAllocator<
free_list_sentinels: NonNull<FreeListNode>,
/// The metadata associated with each tree.
- trees: &'allocator mut [Tree<'allocator, PAGE_SIZE, PAGE_SIZE_BITS>],
+ trees: &'allocator [Tree<PAGE_SIZE, PAGE_SIZE_BITS>],
/// The shared bitset.
bitset: &'allocator mut Bitset,
@@ -251,7 +251,7 @@ impl<
// Fast-path: try allocating from the right size class's free list.
// Find the corresponding tree and the offset of the subregion in the tree.
- let (tree_index, offset) = self.tree_index_and_offset(ptr);
+ let (tree_index, offset, ptr) = self.tree_index_offset_and_ptr(ptr);
// Mark the pointer as no longer being in the tree.
let Ok(()) =
@@ -271,7 +271,7 @@ impl<
let ptr = self.free_list(size_class_to_split).pop().unwrap();
// Find the corresponding tree and the offset of the subregion in the tree.
- let (tree_index, offset) = self.tree_index_and_offset(ptr);
+ let (tree_index, offset, ptr) = self.tree_index_offset_and_ptr(ptr);
// Mark the pointer as no longer being in the tree.
let Ok(()) = self.trees[tree_index].bitset_mark_as_absent(
@@ -282,10 +282,12 @@ impl<
panic!("the bit for tree {tree_index}, offset {offset} was not set in {self:#?}")
};
- // For each size class between the one we found and the one we want, split the page and
- // add the other half to the free list (and mark it as present in the bitset).
+ // For each size class between the one we found and the one we want, split the
+ // subregion and add the other half to the free list (and mark it as present in the
+ // bitset).
while size_class_to_split != size_class {
- // Move to the next size class.
+ // Move to the next size class, since that's the size class of the subregion we'll
+ // be putting back.
size_class_to_split -= 1;
let offset_to_other_half = PAGE_SIZE << size_class_to_split;
@@ -295,13 +297,13 @@ impl<
let other_half_ptr = unsafe { ptr.add(offset_to_other_half) };
let other_half_offset = offset + offset_to_other_half;
- // Mark the pointer as being in the tree.
+ // Mark the pointer as being in the free list.
let Ok(()) = self.trees[tree_index].bitset_mark_as_present(
self.bitset,
size_class_to_split,
other_half_offset,
) else {
- panic!("the bit for tree {tree_index}, offset {offset} was already set in {self:#?}")
+ panic!("the bit for tree {tree_index}, offset {other_half_offset} was already set in {self:#?}")
};
// Return the pointer to the appropriate free list.
@@ -326,10 +328,10 @@ impl<
/// - `ptr` must have been returned by a call to `alloc` with the same `size_class`.
/// - `ptr` must convey ownership over the entire region.
#[requires(size_class < SIZE_CLASS_COUNT)]
- pub unsafe fn dealloc(&mut self, ptr: NonNull<u8>, size_class: usize) {
+ pub unsafe fn dealloc(&mut self, ptr: NonNull<u8>, mut size_class: usize) {
// Find the corresponding tree and the offset of the subregion in the tree.
- let (tree_index, offset) = self.tree_index_and_offset(ptr);
- let tree = &mut self.trees[tree_index];
+ let (tree_index, mut offset, mut ptr) = self.tree_index_offset_and_ptr(ptr);
+ let tree = &self.trees[tree_index];
// Ensure that the memory is marked as not being in the free list.
assert_eq!(
@@ -341,7 +343,33 @@ impl<
assert!(size_class <= tree.size_class);
while size_class < tree.size_class {
let buddy_offset = offset ^ (PAGE_SIZE << size_class);
- todo!("{buddy_offset:#x} {offset:#x}");
+ match tree.bitset_get(self.bitset, size_class, buddy_offset) {
+ SubregionStatus::InFreeList => {
+ // If the buddy was present, we can remove it from the bitset and free list and
+ // keep merging.
+ let buddy_node = tree
+ .base_ptr
+ .unwrap()
+ .cast::<FreeListNode>()
+ .byte_add(buddy_offset);
+ let buddy_ptr = FreeListNode::unlink(buddy_node);
+
+ let Ok(()) = tree.bitset_mark_as_absent(self.bitset, size_class, buddy_offset)
+ else {
+ panic!("the bit for tree {tree_index}, offset {buddy_offset} was not set in {self:#?}")
+ };
+
+ // Continue merging with the lower of the two pointers (so it will still have a
+ // power-of-two-aligned offset).
+ ptr = ptr.min(buddy_ptr);
+ offset = offset.min(buddy_offset);
+ size_class += 1;
+ }
+ SubregionStatus::NotInFreeList => {
+ // If the buddy was absent, we won't be able to do any more merging.
+ break;
+ }
+ }
}
// Mark the pointer as being in the tree.
@@ -363,10 +391,11 @@ impl<
unsafe { FreeList::from_sentinel(sentinel) }
}
- /// Returns the index of the tree this pointer was allocated in.
+ /// Returns the index of the tree this pointer was allocated in, as well as the offset of the
+ /// pointer in the tree and a copy of the pointer with the provenance of the tree.
#[ensures(self.trees[ret.0].contains(ptr.as_ptr() as usize))]
- #[ensures(ret.1 < (1 << self.trees[ret.0].size_class))]
- fn tree_index_and_offset(&self, ptr: NonNull<u8>) -> (usize, usize) {
+ #[ensures(ret.1 < (PAGE_SIZE << self.trees[ret.0].size_class))]
+ fn tree_index_offset_and_ptr(&self, ptr: NonNull<u8>) -> (usize, usize, NonNull<u8>) {
let addr = ptr.as_ptr() as usize;
let tree_index = self
.trees
@@ -378,8 +407,10 @@ impl<
i - 1
}
};
- let offset = addr - self.trees[tree_index].base_addr();
- (tree_index, offset)
+ let tree = &self.trees[tree_index];
+ let offset = addr - tree.base_addr();
+ let ptr = unsafe { tree.base_ptr.unwrap().cast().add(offset) };
+ (tree_index, offset, ptr)
}
}
@@ -405,13 +436,28 @@ impl<
}
}
+ struct Bitsets<'allocator, const PAGE_SIZE: usize, const PAGE_SIZE_BITS: usize>(
+ &'allocator [Tree<PAGE_SIZE, PAGE_SIZE_BITS>],
+ &'allocator Bitset,
+ );
+
+ impl<'allocator, const PAGE_SIZE: usize, const PAGE_SIZE_BITS: usize> fmt::Debug
+ for Bitsets<'allocator, PAGE_SIZE, PAGE_SIZE_BITS>
+ {
+ fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+ fmt.debug_list()
+ .entries(self.0.iter().map(|tree| tree.debug_bitset(self.1)))
+ .finish()
+ }
+ }
+
fmt.debug_struct("BuddyAllocator")
.field(
"free_lists",
&FreeLists::<SIZE_CLASS_COUNT>(self.free_list_sentinels),
)
.field("trees", &self.trees)
- .field("bitset", &self.bitset)
+ .field("bitsets", &Bitsets(self.trees, self.bitset))
.finish()
}
}