From 22231ec036268ca2adb1f0b0feed0a91ea68728f Mon Sep 17 00:00:00 2001 From: Nathan Ringo Date: Sat, 14 Sep 2024 23:04:03 -0500 Subject: Start of virtual memory allocator; got an ICE, though! --- crates/alloc_vma_tree/src/lib.rs | 317 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 317 insertions(+) create mode 100644 crates/alloc_vma_tree/src/lib.rs (limited to 'crates/alloc_vma_tree/src') diff --git a/crates/alloc_vma_tree/src/lib.rs b/crates/alloc_vma_tree/src/lib.rs new file mode 100644 index 0000000..38dd312 --- /dev/null +++ b/crates/alloc_vma_tree/src/lib.rs @@ -0,0 +1,317 @@ +//! A data structure to manage virtual address ranges. +#![no_std] + +use allocator_api2::alloc::{AllocError, Allocator, Layout}; +use contracts::{ensures, requires}; +use core::{marker::PhantomData, mem::MaybeUninit, ops::Range, ptr::NonNull}; +use vernos_utils::cold; + +/// A data structure to manage virtual address ranges. +/// +/// This is a pair of sorted trees of virtual memory areas. The two trees contain the same nodes, +/// but one is sorted by size (so we can find a best-fit allocation) and one is sorted by the start +/// address (so we can find a neighbor to merge free VMAs with). +#[derive(Default)] +pub struct VMATree { + roots: Option<( + VMARef, + VMARef, + )>, + allocator: ALLOCATOR, +} + +impl VMATree { + /// Creates a new VMATree that will use the given allocator. + pub const fn new_in(allocator: ALLOCATOR) -> VMATree { + VMATree { + roots: None, + allocator, + } + } + + /// Adds a new VMA to the tree. + /// + /// The VMA will be marked as free. + #[requires(addrs.start & ((1 << PAGE_SIZE_BITS) - 1) == 0)] + #[requires(addrs.end & ((1 << PAGE_SIZE_BITS) - 1) == 0)] + pub fn add(&mut self, addrs: Range) -> Result<(), AllocError> { + if let Some((addr_root, size_root)) = &mut self.roots { + todo!() + } else { + cold(); + let node = VMANode::new_in(addrs, &self.allocator)?; + self.roots = Some((VMARef(node, PhantomData), VMARef(node, PhantomData))); + Ok(()) + } + } +} + +impl Drop + for VMATree +{ + fn drop(&mut self) { + todo!() + } +} + +unsafe impl Send + for VMATree +where + ALLOCATOR: Send, +{ +} + +/// A non-null pointer to a VMA node in one of the two trees. +struct VMARef( + NonNull>, + PhantomData, +); + +impl VMARef { + /// Returns the parent, if there was one. + /// + /// # Safety + /// + /// - The pointer must be valid. + pub unsafe fn parent(self) -> Option> { + let ptr = KIND::parent(self.0); + if self == ptr { + None + } else { + Some(ptr) + } + } + + /// Returns the left child, if there was one. + /// + /// # Safety + /// + /// - The pointer must be valid. + pub unsafe fn left(self) -> Option> { + let ptr = KIND::left(self.0); + if self == ptr { + None + } else { + Some(ptr) + } + } + + /// Returns the right child, if there was one. + /// + /// # Safety + /// + /// - The pointer must be valid. + pub unsafe fn right(self) -> Option> { + let ptr = KIND::right(self.0); + if self == ptr { + None + } else { + Some(ptr) + } + } +} + +impl Clone for VMARef { + fn clone(&self) -> VMARef { + *self + } +} + +impl Copy for VMARef {} + +impl Eq for VMARef {} + +impl PartialEq for VMARef { + fn eq(&self, other: &VMARef) -> bool { + self.0 == other.0 + } +} + +/// A "physical" VMA node. +struct VMANode { + addr_and_state: usize, + size_and_balance: usize, + + /// A self-pointer if we're the root. + addr_parent: VMARef, + + /// A self-pointer if we have no left child. + addr_left_child: VMARef, + + /// A self-pointer if we have no right child. + addr_right_child: VMARef, + + /// A self-pointer if we're the root. + size_parent: VMARef, + + /// A self-pointer if we have no left child. + size_left_child: VMARef, + + /// A self-pointer if we have no right child. + size_right_child: VMARef, +} + +impl VMANode { + /// Allocates a new node in the given allocator. + /// + /// The node has the given range of addresses and is in the `Free` state. All of its pointers + /// are self-pointers. + #[requires(PAGE_SIZE_BITS > 0)] + #[requires(addrs.start & ((1 << PAGE_SIZE_BITS) - 1) == 0)] + #[requires(addrs.end & ((1 << PAGE_SIZE_BITS) - 1) == 0)] + pub fn new_in( + addrs: Range, + allocator: &ALLOCATOR, + ) -> Result>, AllocError> { + let layout = Layout::new::>(); + let mut ptr: NonNull> = allocator.allocate(layout)?.cast(); + + // SAFETY: This needs to be OK for the allocator to meet the conditions of its trait. + VMANode::init_in(unsafe { ptr.as_mut() }, addrs); + + Ok(ptr.cast()) + } + + /// Initializes a node. + /// + /// The node has the given range of addresses and is in the `Free` state. All of its pointers + /// are self-pointers. + #[requires(PAGE_SIZE_BITS > 0)] + #[requires(addrs.start & ((1 << PAGE_SIZE_BITS) - 1) == 0)] + #[requires(addrs.end & ((1 << PAGE_SIZE_BITS) - 1) == 0)] + pub fn init_in(maybe_uninit: &mut MaybeUninit>, addrs: Range) { + let ptr = NonNull::from(&*maybe_uninit).cast(); + maybe_uninit.write(VMANode { + addr_and_state: addrs.start, + size_and_balance: (addrs.end - addrs.start) + | ((Balance::Balanced as usize) << 2) + | (Balance::Balanced as usize), + addr_parent: VMARef(ptr, PhantomData), + addr_left_child: VMARef(ptr, PhantomData), + addr_right_child: VMARef(ptr, PhantomData), + size_parent: VMARef(ptr, PhantomData), + size_left_child: VMARef(ptr, PhantomData), + size_right_child: VMARef(ptr, PhantomData), + }); + } + + /// Returns the range of addresses represented by this node. + #[ensures(ret.start & ((1 << PAGE_SIZE_BITS) - 1) == 0)] + #[ensures(ret.end & ((1 << PAGE_SIZE_BITS) - 1) == 0)] + pub fn addrs(&self) -> Range { + let addr = self.addr_and_state & !((1 << PAGE_SIZE_BITS) - 1); + let size = self.size_and_balance & !((1 << PAGE_SIZE_BITS) - 1); + addr..addr + size + } + + /// Returns the state of the addresses represented by this node. + #[requires(PAGE_SIZE_BITS >= 1)] + pub fn state(&self) -> State { + match self.addr_and_state & 1 { + 0 => State::Free, + _ => State::Used, + } + } +} + +#[derive(Clone, Copy, Debug, Eq, PartialEq)] +enum Balance { + LeftHeavy = 0, + Balanced = 1, + RightHeavy = 2, +} + +impl Balance { + pub fn from_bits(bits: usize) -> Balance { + match bits { + 0 => Balance::LeftHeavy, + 1 => Balance::Balanced, + 2 => Balance::RightHeavy, + _ => panic!("invalid Balance: {bits}"), + } + } +} + +#[derive(Clone, Copy, Debug, Eq, PartialEq)] +enum State { + Free, + Used, +} + +trait TreeKind: Sized { + unsafe fn balance( + node: NonNull>, + ) -> Balance; + + unsafe fn parent( + node: NonNull>, + ) -> VMARef; + + unsafe fn left( + node: NonNull>, + ) -> VMARef; + + unsafe fn right( + node: NonNull>, + ) -> VMARef; +} + +enum AddrTree {} + +impl TreeKind for AddrTree { + #[requires(PAGE_SIZE_BITS >= 4)] + unsafe fn balance( + node: NonNull>, + ) -> Balance { + let size_and_balance = (*node.as_ptr()).size_and_balance; + Balance::from_bits(size_and_balance & 0b11) + } + + unsafe fn parent( + node: NonNull>, + ) -> VMARef { + (*node.as_ptr()).addr_parent + } + + unsafe fn left( + node: NonNull>, + ) -> VMARef { + (*node.as_ptr()).addr_left_child + } + + unsafe fn right( + node: NonNull>, + ) -> VMARef { + (*node.as_ptr()).addr_right_child + } +} + +enum SizeTree {} + +impl TreeKind for SizeTree { + #[requires(PAGE_SIZE_BITS >= 4)] + unsafe fn balance( + node: NonNull>, + ) -> Balance { + let size_and_balance = (*node.as_ptr()).size_and_balance; + Balance::from_bits((size_and_balance >> 2) & 0b11) + } + + unsafe fn parent( + node: NonNull>, + ) -> VMARef { + (*node.as_ptr()).size_parent + } + + unsafe fn left( + node: NonNull>, + ) -> VMARef { + (*node.as_ptr()).size_left_child + } + + unsafe fn right( + node: NonNull>, + ) -> VMARef { + (*node.as_ptr()).size_right_child + } +} -- cgit v1.2.3