summaryrefslogtreecommitdiff
path: root/crates/alloc_vma_tree/src/lib.rs
diff options
context:
space:
mode:
Diffstat (limited to 'crates/alloc_vma_tree/src/lib.rs')
-rw-r--r--crates/alloc_vma_tree/src/lib.rs317
1 files changed, 317 insertions, 0 deletions
diff --git a/crates/alloc_vma_tree/src/lib.rs b/crates/alloc_vma_tree/src/lib.rs
new file mode 100644
index 0000000..38dd312
--- /dev/null
+++ b/crates/alloc_vma_tree/src/lib.rs
@@ -0,0 +1,317 @@
+//! A data structure to manage virtual address ranges.
+#![no_std]
+
+use allocator_api2::alloc::{AllocError, Allocator, Layout};
+use contracts::{ensures, requires};
+use core::{marker::PhantomData, mem::MaybeUninit, ops::Range, ptr::NonNull};
+use vernos_utils::cold;
+
+/// A data structure to manage virtual address ranges.
+///
+/// This is a pair of sorted trees of virtual memory areas. The two trees contain the same nodes,
+/// but one is sorted by size (so we can find a best-fit allocation) and one is sorted by the start
+/// address (so we can find a neighbor to merge free VMAs with).
+#[derive(Default)]
+pub struct VMATree<const PAGE_SIZE_BITS: usize, ALLOCATOR: Allocator> {
+ roots: Option<(
+ VMARef<PAGE_SIZE_BITS, AddrTree>,
+ VMARef<PAGE_SIZE_BITS, SizeTree>,
+ )>,
+ allocator: ALLOCATOR,
+}
+
+impl<const PAGE_SIZE_BITS: usize, ALLOCATOR: Allocator> VMATree<PAGE_SIZE_BITS, ALLOCATOR> {
+ /// Creates a new VMATree that will use the given allocator.
+ pub const fn new_in(allocator: ALLOCATOR) -> VMATree<PAGE_SIZE_BITS, ALLOCATOR> {
+ VMATree {
+ roots: None,
+ allocator,
+ }
+ }
+
+ /// Adds a new VMA to the tree.
+ ///
+ /// The VMA will be marked as free.
+ #[requires(addrs.start & ((1 << PAGE_SIZE_BITS) - 1) == 0)]
+ #[requires(addrs.end & ((1 << PAGE_SIZE_BITS) - 1) == 0)]
+ pub fn add(&mut self, addrs: Range<usize>) -> Result<(), AllocError> {
+ if let Some((addr_root, size_root)) = &mut self.roots {
+ todo!()
+ } else {
+ cold();
+ let node = VMANode::new_in(addrs, &self.allocator)?;
+ self.roots = Some((VMARef(node, PhantomData), VMARef(node, PhantomData)));
+ Ok(())
+ }
+ }
+}
+
+impl<const PAGE_SIZE_BITS: usize, ALLOCATOR: Allocator> Drop
+ for VMATree<PAGE_SIZE_BITS, ALLOCATOR>
+{
+ fn drop(&mut self) {
+ todo!()
+ }
+}
+
+unsafe impl<const PAGE_SIZE_BITS: usize, ALLOCATOR: Allocator> Send
+ for VMATree<PAGE_SIZE_BITS, ALLOCATOR>
+where
+ ALLOCATOR: Send,
+{
+}
+
+/// A non-null pointer to a VMA node in one of the two trees.
+struct VMARef<const PAGE_SIZE_BITS: usize, KIND: TreeKind>(
+ NonNull<VMANode<PAGE_SIZE_BITS>>,
+ PhantomData<KIND>,
+);
+
+impl<const PAGE_SIZE_BITS: usize, KIND: TreeKind> VMARef<PAGE_SIZE_BITS, KIND> {
+ /// Returns the parent, if there was one.
+ ///
+ /// # Safety
+ ///
+ /// - The pointer must be valid.
+ pub unsafe fn parent(self) -> Option<VMARef<PAGE_SIZE_BITS, KIND>> {
+ let ptr = KIND::parent(self.0);
+ if self == ptr {
+ None
+ } else {
+ Some(ptr)
+ }
+ }
+
+ /// Returns the left child, if there was one.
+ ///
+ /// # Safety
+ ///
+ /// - The pointer must be valid.
+ pub unsafe fn left(self) -> Option<VMARef<PAGE_SIZE_BITS, KIND>> {
+ let ptr = KIND::left(self.0);
+ if self == ptr {
+ None
+ } else {
+ Some(ptr)
+ }
+ }
+
+ /// Returns the right child, if there was one.
+ ///
+ /// # Safety
+ ///
+ /// - The pointer must be valid.
+ pub unsafe fn right(self) -> Option<VMARef<PAGE_SIZE_BITS, KIND>> {
+ let ptr = KIND::right(self.0);
+ if self == ptr {
+ None
+ } else {
+ Some(ptr)
+ }
+ }
+}
+
+impl<const PAGE_SIZE_BITS: usize, KIND: TreeKind> Clone for VMARef<PAGE_SIZE_BITS, KIND> {
+ fn clone(&self) -> VMARef<PAGE_SIZE_BITS, KIND> {
+ *self
+ }
+}
+
+impl<const PAGE_SIZE_BITS: usize, KIND: TreeKind> Copy for VMARef<PAGE_SIZE_BITS, KIND> {}
+
+impl<const PAGE_SIZE_BITS: usize, KIND: TreeKind> Eq for VMARef<PAGE_SIZE_BITS, KIND> {}
+
+impl<const PAGE_SIZE_BITS: usize, KIND: TreeKind> PartialEq for VMARef<PAGE_SIZE_BITS, KIND> {
+ fn eq(&self, other: &VMARef<PAGE_SIZE_BITS, KIND>) -> bool {
+ self.0 == other.0
+ }
+}
+
+/// A "physical" VMA node.
+struct VMANode<const PAGE_SIZE_BITS: usize> {
+ addr_and_state: usize,
+ size_and_balance: usize,
+
+ /// A self-pointer if we're the root.
+ addr_parent: VMARef<PAGE_SIZE_BITS, AddrTree>,
+
+ /// A self-pointer if we have no left child.
+ addr_left_child: VMARef<PAGE_SIZE_BITS, AddrTree>,
+
+ /// A self-pointer if we have no right child.
+ addr_right_child: VMARef<PAGE_SIZE_BITS, AddrTree>,
+
+ /// A self-pointer if we're the root.
+ size_parent: VMARef<PAGE_SIZE_BITS, SizeTree>,
+
+ /// A self-pointer if we have no left child.
+ size_left_child: VMARef<PAGE_SIZE_BITS, SizeTree>,
+
+ /// A self-pointer if we have no right child.
+ size_right_child: VMARef<PAGE_SIZE_BITS, SizeTree>,
+}
+
+impl<const PAGE_SIZE_BITS: usize> VMANode<PAGE_SIZE_BITS> {
+ /// Allocates a new node in the given allocator.
+ ///
+ /// The node has the given range of addresses and is in the `Free` state. All of its pointers
+ /// are self-pointers.
+ #[requires(PAGE_SIZE_BITS > 0)]
+ #[requires(addrs.start & ((1 << PAGE_SIZE_BITS) - 1) == 0)]
+ #[requires(addrs.end & ((1 << PAGE_SIZE_BITS) - 1) == 0)]
+ pub fn new_in<ALLOCATOR: Allocator>(
+ addrs: Range<usize>,
+ allocator: &ALLOCATOR,
+ ) -> Result<NonNull<VMANode<PAGE_SIZE_BITS>>, AllocError> {
+ let layout = Layout::new::<MaybeUninit<Self>>();
+ let mut ptr: NonNull<MaybeUninit<Self>> = allocator.allocate(layout)?.cast();
+
+ // SAFETY: This needs to be OK for the allocator to meet the conditions of its trait.
+ VMANode::init_in(unsafe { ptr.as_mut() }, addrs);
+
+ Ok(ptr.cast())
+ }
+
+ /// Initializes a node.
+ ///
+ /// The node has the given range of addresses and is in the `Free` state. All of its pointers
+ /// are self-pointers.
+ #[requires(PAGE_SIZE_BITS > 0)]
+ #[requires(addrs.start & ((1 << PAGE_SIZE_BITS) - 1) == 0)]
+ #[requires(addrs.end & ((1 << PAGE_SIZE_BITS) - 1) == 0)]
+ pub fn init_in(maybe_uninit: &mut MaybeUninit<VMANode<PAGE_SIZE_BITS>>, addrs: Range<usize>) {
+ let ptr = NonNull::from(&*maybe_uninit).cast();
+ maybe_uninit.write(VMANode {
+ addr_and_state: addrs.start,
+ size_and_balance: (addrs.end - addrs.start)
+ | ((Balance::Balanced as usize) << 2)
+ | (Balance::Balanced as usize),
+ addr_parent: VMARef(ptr, PhantomData),
+ addr_left_child: VMARef(ptr, PhantomData),
+ addr_right_child: VMARef(ptr, PhantomData),
+ size_parent: VMARef(ptr, PhantomData),
+ size_left_child: VMARef(ptr, PhantomData),
+ size_right_child: VMARef(ptr, PhantomData),
+ });
+ }
+
+ /// Returns the range of addresses represented by this node.
+ #[ensures(ret.start & ((1 << PAGE_SIZE_BITS) - 1) == 0)]
+ #[ensures(ret.end & ((1 << PAGE_SIZE_BITS) - 1) == 0)]
+ pub fn addrs(&self) -> Range<usize> {
+ let addr = self.addr_and_state & !((1 << PAGE_SIZE_BITS) - 1);
+ let size = self.size_and_balance & !((1 << PAGE_SIZE_BITS) - 1);
+ addr..addr + size
+ }
+
+ /// Returns the state of the addresses represented by this node.
+ #[requires(PAGE_SIZE_BITS >= 1)]
+ pub fn state(&self) -> State {
+ match self.addr_and_state & 1 {
+ 0 => State::Free,
+ _ => State::Used,
+ }
+ }
+}
+
+#[derive(Clone, Copy, Debug, Eq, PartialEq)]
+enum Balance {
+ LeftHeavy = 0,
+ Balanced = 1,
+ RightHeavy = 2,
+}
+
+impl Balance {
+ pub fn from_bits(bits: usize) -> Balance {
+ match bits {
+ 0 => Balance::LeftHeavy,
+ 1 => Balance::Balanced,
+ 2 => Balance::RightHeavy,
+ _ => panic!("invalid Balance: {bits}"),
+ }
+ }
+}
+
+#[derive(Clone, Copy, Debug, Eq, PartialEq)]
+enum State {
+ Free,
+ Used,
+}
+
+trait TreeKind: Sized {
+ unsafe fn balance<const PAGE_SIZE_BITS: usize>(
+ node: NonNull<VMANode<PAGE_SIZE_BITS>>,
+ ) -> Balance;
+
+ unsafe fn parent<const PAGE_SIZE_BITS: usize>(
+ node: NonNull<VMANode<PAGE_SIZE_BITS>>,
+ ) -> VMARef<PAGE_SIZE_BITS, Self>;
+
+ unsafe fn left<const PAGE_SIZE_BITS: usize>(
+ node: NonNull<VMANode<PAGE_SIZE_BITS>>,
+ ) -> VMARef<PAGE_SIZE_BITS, Self>;
+
+ unsafe fn right<const PAGE_SIZE_BITS: usize>(
+ node: NonNull<VMANode<PAGE_SIZE_BITS>>,
+ ) -> VMARef<PAGE_SIZE_BITS, Self>;
+}
+
+enum AddrTree {}
+
+impl TreeKind for AddrTree {
+ #[requires(PAGE_SIZE_BITS >= 4)]
+ unsafe fn balance<const PAGE_SIZE_BITS: usize>(
+ node: NonNull<VMANode<PAGE_SIZE_BITS>>,
+ ) -> Balance {
+ let size_and_balance = (*node.as_ptr()).size_and_balance;
+ Balance::from_bits(size_and_balance & 0b11)
+ }
+
+ unsafe fn parent<const PAGE_SIZE_BITS: usize>(
+ node: NonNull<VMANode<PAGE_SIZE_BITS>>,
+ ) -> VMARef<PAGE_SIZE_BITS, Self> {
+ (*node.as_ptr()).addr_parent
+ }
+
+ unsafe fn left<const PAGE_SIZE_BITS: usize>(
+ node: NonNull<VMANode<PAGE_SIZE_BITS>>,
+ ) -> VMARef<PAGE_SIZE_BITS, Self> {
+ (*node.as_ptr()).addr_left_child
+ }
+
+ unsafe fn right<const PAGE_SIZE_BITS: usize>(
+ node: NonNull<VMANode<PAGE_SIZE_BITS>>,
+ ) -> VMARef<PAGE_SIZE_BITS, Self> {
+ (*node.as_ptr()).addr_right_child
+ }
+}
+
+enum SizeTree {}
+
+impl TreeKind for SizeTree {
+ #[requires(PAGE_SIZE_BITS >= 4)]
+ unsafe fn balance<const PAGE_SIZE_BITS: usize>(
+ node: NonNull<VMANode<PAGE_SIZE_BITS>>,
+ ) -> Balance {
+ let size_and_balance = (*node.as_ptr()).size_and_balance;
+ Balance::from_bits((size_and_balance >> 2) & 0b11)
+ }
+
+ unsafe fn parent<const PAGE_SIZE_BITS: usize>(
+ node: NonNull<VMANode<PAGE_SIZE_BITS>>,
+ ) -> VMARef<PAGE_SIZE_BITS, Self> {
+ (*node.as_ptr()).size_parent
+ }
+
+ unsafe fn left<const PAGE_SIZE_BITS: usize>(
+ node: NonNull<VMANode<PAGE_SIZE_BITS>>,
+ ) -> VMARef<PAGE_SIZE_BITS, Self> {
+ (*node.as_ptr()).size_left_child
+ }
+
+ unsafe fn right<const PAGE_SIZE_BITS: usize>(
+ node: NonNull<VMANode<PAGE_SIZE_BITS>>,
+ ) -> VMARef<PAGE_SIZE_BITS, Self> {
+ (*node.as_ptr()).size_right_child
+ }
+}