summaryrefslogtreecommitdiff
path: root/crates/buddy_allocator/src/lib.rs
diff options
context:
space:
mode:
authorNathan Ringo <nathan@remexre.com>2024-09-01 09:53:58 -0500
committerNathan Ringo <nathan@remexre.com>2024-09-01 09:53:58 -0500
commitac876162d111ced97969f5e17accb5d4aec789f6 (patch)
treeeb8d49a4bdf9a98f9064f684ed096a43c4c68eb3 /crates/buddy_allocator/src/lib.rs
parent439b93dd3e22311caee6d69eb4aa1da5b81a0731 (diff)
More buddy work, but we've got UB again... This time relating to stacked borrows...
Diffstat (limited to 'crates/buddy_allocator/src/lib.rs')
-rw-r--r--crates/buddy_allocator/src/lib.rs89
1 files changed, 74 insertions, 15 deletions
diff --git a/crates/buddy_allocator/src/lib.rs b/crates/buddy_allocator/src/lib.rs
index 441c77f..4292444 100644
--- a/crates/buddy_allocator/src/lib.rs
+++ b/crates/buddy_allocator/src/lib.rs
@@ -50,9 +50,13 @@ mod tree;
// mod stripe;
-use crate::{bitvec::BitVec, free_list::FreeList, tree::Tree};
+use crate::{
+ bitvec::{Bitset, SubregionStatus},
+ free_list::FreeList,
+ tree::Tree,
+};
use allocator_api2::alloc::{AllocError, Layout, LayoutError};
-use contracts::requires;
+use contracts::{ensures, requires};
use core::{mem, pin::Pin, ptr::NonNull};
use vernos_physmem_free_list::FreeListAllocator;
use vernos_utils::pin::{pin_project_slice_mut, pin_project_slice_mut_iter};
@@ -72,7 +76,7 @@ pub struct BuddyAllocator<
trees: &'allocator mut [Tree<'allocator, PAGE_SIZE, PAGE_SIZE_BITS>],
/// The shared bitset.
- bitset: &'allocator mut BitVec,
+ bitset: &'allocator mut Bitset,
}
impl<
@@ -149,8 +153,10 @@ impl<
)
.as_mut()
};
- let bitset: &'allocator mut BitVec = unsafe {
- mem::transmute::<&mut [usize], &mut BitVec>(
+ // SAFETY: This type is valid when zero-initialized, and is a newtype for `[usize]` so has
+ // the same layout.
+ let bitset: &'allocator mut Bitset = unsafe {
+ mem::transmute::<&mut [usize], &mut Bitset>(
NonNull::slice_from_raw_parts(
metadata.byte_add(metadata_layout.bitset_offset).cast(),
metadata_layout.bitset_len,
@@ -222,16 +228,14 @@ impl<
// UNWRAP: The pointer must have been non-null when it was in the free-list
// allocator.
let ptr = NonNull::new(tree.base_ptr as *mut u8).unwrap();
+ let free_list = pin_project_slice_mut(free_lists.as_mut(), tree.size_class);
// SAFETY: This region is not owned or borrowed by anything else (the only thing it
// could be would be the metadata, but that's not in this tree), is of the correct
// size, and is composed of untyped memory.
- unsafe {
- pin_project_slice_mut(free_lists.as_mut(), tree.size_class).push(ptr);
- }
+ unsafe { free_list.push(ptr) };
// Then, set a bit in the bitset to say that this region is present.
- assert!(!bitset.get(tree.bitset_offset));
- bitset.set(tree.bitset_offset, true);
+ tree.bitset_mark_as_present(bitset, tree.size_class, 0);
}
}
@@ -244,9 +248,24 @@ impl<
}
/// Tries to allocate a subregion of a particular size class from the allocator.
- #[requires((0..SIZE_CLASS_COUNT).contains(&size_class))]
+ #[requires(size_class < SIZE_CLASS_COUNT)]
pub fn alloc(&mut self, size_class: usize) -> Result<NonNull<u8>, AllocError> {
- Err(AllocError)
+ if let Some(ptr) = self.free_list_mut(size_class).pop() {
+ // Fast-path: try allocating from the right size class's free list.
+
+ // Find the corresponding tree and the offset of the subregion in the tree.
+ let (tree_index, offset) = self.tree_index_and_offset(ptr);
+ let tree = &mut self.trees[tree_index];
+
+ // Mark the pointer as no longer being in the tree.
+ tree.bitset_mark_as_absent(&mut self.bitset, size_class, offset);
+
+ // Return the pointer.
+ Ok(ptr)
+ } else {
+ // Try finding a larger chunk of memory in a larger size class.
+ Err(AllocError)
+ }
}
/// Returns a subregion of a particular size class to the allocator.
@@ -255,9 +274,49 @@ impl<
///
/// - `ptr` must have been returned by a call to `alloc` with the same `size_class`.
/// - `ptr` must convey ownership over the entire region.
- #[requires((0..SIZE_CLASS_COUNT).contains(&size_class))]
- pub unsafe fn dealloc(&mut self, ptr: NonNull<u8>, size_class: usize) {
- todo!()
+ #[requires(size_class < SIZE_CLASS_COUNT)]
+ pub unsafe fn dealloc(&mut self, ptr: NonNull<u8>, mut size_class: usize) {
+ // Find the corresponding tree and the offset of the subregion in the tree.
+ let (tree_index, offset) = self.tree_index_and_offset(ptr);
+ let tree = &mut self.trees[tree_index];
+
+ // Ensure that the memory is marked as not being in the free list.
+ assert_eq!(
+ tree.bitset_get(&mut self.bitset, size_class, offset),
+ SubregionStatus::NotInFreeList
+ );
+
+ // Start combining size classes.
+ while size_class < tree.size_class {
+ let buddy_offset = offset ^ (PAGE_SIZE << size_class);
+ todo!("{buddy_offset:#x} {offset:#x}");
+ }
+
+ todo!("{tree:#?}")
+ }
+
+ /// Returns a (pinned) mutable reference to the free list with the given size class.
+ fn free_list_mut(&mut self, i: usize) -> Pin<&mut FreeList> {
+ pin_project_slice_mut(self.free_lists.as_mut(), i)
+ }
+
+ /// Returns the index of the tree this pointer was allocated in.
+ #[ensures(self.trees[ret.0].contains(ptr.as_ptr() as usize))]
+ #[ensures(ret.1 < (1 << self.trees[ret.0].size_class))]
+ fn tree_index_and_offset(&self, ptr: NonNull<u8>) -> (usize, usize) {
+ let addr = ptr.as_ptr() as usize;
+ let tree_index = self
+ .trees
+ .binary_search_by_key(&addr, |tree| tree.base_ptr as usize);
+ let tree_index = match tree_index {
+ Ok(i) => i,
+ Err(i) => {
+ assert_ne!(i, 0);
+ i - 1
+ }
+ };
+ let offset = addr - self.trees[tree_index].base_ptr as usize;
+ (tree_index, offset)
}
}