diff options
-rw-r--r-- | crates/alloc_buddy/src/lib.rs | 113 |
1 files changed, 50 insertions, 63 deletions
diff --git a/crates/alloc_buddy/src/lib.rs b/crates/alloc_buddy/src/lib.rs index 02d45f8..a2a509b 100644 --- a/crates/alloc_buddy/src/lib.rs +++ b/crates/alloc_buddy/src/lib.rs @@ -1,49 +1,6 @@ //! A buddy allocator, used to allocate pages. -//! -//! The allocator can be split into three abstractions: stripes, trees, and the allocator. -//! -//! TODO: See if there's standard terminology for these. -//! -//! ## Stripes -//! -//! The buddy allocator works in terms of size classes, which are power-of-two sized, starting at a -//! single page and going up from there. Each stripe corresponds to a single size class and a -//! particular region of memory. -//! -//! A stripe contains a circular doubly-linked free-list for subregions of that size class, and a -//! bitset marking whether a particular region has been allocated or not. Being a circular -//! doubly-linked list makes it cheap to remove an element whose address we know, as well as cheap -//! to push and pop elements. -//! -//! It's efficient to go from the address of a subregion to the index of its corresponding bit, so -//! when we hand out a subregion from the free-list or put one back, it's cheap to read or write -//! its bit. -//! -//! ## Trees -//! -//! A tree is logically a collection of stripes, one per size class. To pack the structures more -//! efficiently, they are stored interleaved with each other, and the tree manages this. -//! -//! The important logic at the level of trees happens when we allocate a subregion from a size -//! class whose stripe's free-list is empty, and when we free subregions. -//! -//! When a stripe's free-list is empty, the tree instead allocates a subregion of a larger size -//! from the next stripe. It can then store the unused portion in the current size class. -//! -//! The really important bit is the ability to merge subregions when they're freed. When we free a -//! subregion of a certain size class, we can check whether its neighbor (its buddy) is unallocated -//! as well. If so, we can remove it from the free-list by its address. We can combine the two -//! subregions into one of the next larger size class, and then return that subregion to the next -//! stripe. -//! -//! ## The buddy allocator -//! -//! Finally, the overall buddy allocator needs to be able to handle multiple memory regions. To -//! facilitate this, the trees are stored in an array, which forms the overall allocator. #![no_std] -extern crate std; // TODO: Remove me - mod bitset; mod free_list; mod tree; @@ -190,7 +147,7 @@ impl< // Slice the trees slice to fit how many trees actually remained, then sort them by // address. let trees = &mut trees[..i]; - trees.sort_by_key(|tree| tree.base_addr()); + trees.sort_unstable_by_key(|tree| tree.base_addr()); // Compute the number of bits that belong to each tree and assign them. let mut bitset_offset = 0; @@ -201,28 +158,60 @@ impl< assert!(bitset_offset <= metadata_layout.bitset_len_bits); // Add regions the trees should own to the free lists, excluding the subregions that - // metadata is allocated in. + // metadata is allocated in. We construct the allocator now, so that we can use its + // helpers. + let mut buddy_allocator = BuddyAllocator { + free_list_sentinels, + trees, + bitset, + }; // Since we know that the metadata was allocated at the end of the page range it was - // allocated in, we can just test the end addresses. - // + // allocated in, we can just test the end addresses to figure out which tree we're going to + // have to be careful about. + let metadata_addr_lo = metadata.cast::<u8>().as_ptr() as usize; // SAFETY: This is the one-past-the-end address. let metadata_addr_hi = unsafe { metadata.cast::<u8>().add(metadata.len()).as_ptr() } as usize; - for tree in &mut *trees { + for (tree_index, tree) in trees.iter().enumerate() { // SAFETY: This is the one-past-the-end address. let tree_addr_hi = tree.base_addr() + (PAGE_SIZE << tree.size_class); if metadata_addr_hi == tree_addr_hi { - // If the metadata was present inside the tree... TODO. - std::eprintln!("TODO"); + // If the metadata was present inside the tree, we effectively do a binary search + // for the start of the metadata, handling the half that does not contain the start + // address of the metadata each time. + let mut ptr = tree.base_ptr.unwrap().cast::<u8>(); + let mut offset = 0; + let mut size_class = tree.size_class; + while ptr.as_ptr() as usize != metadata_addr_lo { + assert!(size_class != 0); + size_class -= 1; + + // SAFETY: This must still be in-bounds. + let higher_half = unsafe { ptr.add(PAGE_SIZE << size_class) }; + let higher_half_addr = higher_half.as_ptr() as usize; + if metadata_addr_lo < higher_half_addr { + // If the metadata boundary is below the higher half boundary, we can + // just "zoom into" the boundary. We don't actually have to do anything, + // since we already decremented the size class. + } else { + // If the metadata boundary is at or above the higher half boundary, we can + // add the lower half to the appropriate free list. + let mut free_list = buddy_allocator.free_list(size_class); + unsafe { free_list.push(ptr) }; + let Ok(()) = + tree.bitset_mark_as_present(buddy_allocator.bitset, size_class, offset) + else { + panic!("the bit for tree {tree_index}, offset {offset} was not set in {buddy_allocator:#?}") + }; + ptr = higher_half; + offset += PAGE_SIZE << size_class; + } + } } else { // Otherwise, we can take the whole range of the tree and add it to the free list. - // SAFETY: There are as many sentinels as size classes, so this will be in-bounds. - let sentinel = unsafe { free_list_sentinels.add(tree.size_class) }; - - // SAFETY: The free list is kept valid. - let mut free_list = unsafe { FreeList::from_sentinel(sentinel) }; + let mut free_list = buddy_allocator.free_list(tree.size_class); // SAFETY: This region is not owned or borrowed by anything else (the only thing it // could be would be the metadata, but that's not in this tree), is of the correct @@ -230,18 +219,16 @@ impl< unsafe { free_list.push(tree.base_ptr.unwrap().cast()) }; // Then, set a bit in the bitset to say that this region is present. - let Ok(()) = tree.bitset_mark_as_present(bitset, tree.size_class, 0) else { - panic!("the first bit was already set in {tree:#?}\nbitset = {bitset:?}") + let Ok(()) = + tree.bitset_mark_as_present(buddy_allocator.bitset, tree.size_class, 0) + else { + panic!("the bit for tree {tree_index}, offset 0 was not set in {buddy_allocator:#?}") }; } } - // Make the buddy allocator's struct and return it. - Ok(BuddyAllocator { - free_list_sentinels, - trees, - bitset, - }) + // Return the allocator. + Ok(buddy_allocator) } /// Tries to allocate a subregion of a particular size class from the allocator. |