summaryrefslogtreecommitdiff
path: root/crates/buddy_allocator
diff options
context:
space:
mode:
Diffstat (limited to 'crates/buddy_allocator')
-rw-r--r--crates/buddy_allocator/src/bitvec.rs54
-rw-r--r--crates/buddy_allocator/src/free_list.rs80
-rw-r--r--crates/buddy_allocator/src/lib.rs89
-rw-r--r--crates/buddy_allocator/src/tree.rs166
-rw-r--r--crates/buddy_allocator/tests/hosted_test.rs57
5 files changed, 283 insertions, 163 deletions
diff --git a/crates/buddy_allocator/src/bitvec.rs b/crates/buddy_allocator/src/bitvec.rs
index 856d579..5cabc5e 100644
--- a/crates/buddy_allocator/src/bitvec.rs
+++ b/crates/buddy_allocator/src/bitvec.rs
@@ -1,59 +1,77 @@
use contracts::requires;
use core::{fmt, mem::transmute};
-/// A fixed-length vector of bits.
-pub struct BitVec([usize]);
+/// A fixed-length vector of bits used for determining whether a subregion is in the free lists or
+/// not.
+pub struct Bitset([usize]);
-impl BitVec {
- fn from_mut(words: &mut [usize]) -> &mut BitVec {
+impl Bitset {
+ fn from_mut(words: &mut [usize]) -> &mut Bitset {
// SAFETY: The types have a newtype relationship.
unsafe { transmute(words) }
}
- fn from_ref(words: &[usize]) -> &BitVec {
+ fn from_ref(words: &[usize]) -> &Bitset {
// SAFETY: The types have a newtype relationship.
unsafe { transmute(words) }
}
- /// Retrieves the value of a bit from the BitVec.
+ /// Retrieves the value of a bit from the Bitset.
#[requires(i < self.len())]
- pub fn get(&self, i: usize) -> bool {
+ pub fn get(&self, i: usize) -> SubregionStatus {
let word_index = i / usize::BITS as usize;
let subword_index = i % usize::BITS as usize;
let one_hot = 1 << subword_index;
- (self.0[word_index] & one_hot) != 0
+ if (self.0[word_index] & one_hot) == 0 {
+ SubregionStatus::NotInFreeList
+ } else {
+ SubregionStatus::InFreeList
+ }
}
- /// Returns whether the BitVec is empty.
+ /// Returns whether the Bitset is empty.
pub fn is_empty(&self) -> bool {
self.0.is_empty()
}
- /// Returns the number of bits in the BitVec.
+ /// Returns the number of bits in the Bitset.
pub fn len(&self) -> usize {
self.0.len() * usize::BITS as usize
}
- /// Sets the value of a bit in the BitVec.
+ /// Sets the value of a bit in the Bitset.
#[requires(i < self.len())]
- pub fn set(&mut self, i: usize, value: bool) {
+ pub fn set(&mut self, i: usize, status: SubregionStatus) {
let word_index = i / usize::BITS as usize;
let subword_index = i % usize::BITS as usize;
let one_hot = 1 << subword_index;
- if value {
- self.0[word_index] |= one_hot;
- } else {
- self.0[word_index] &= !one_hot;
+ match status {
+ SubregionStatus::NotInFreeList => {
+ self.0[word_index] &= !one_hot;
+ }
+ SubregionStatus::InFreeList => {
+ self.0[word_index] |= one_hot;
+ }
}
}
}
-impl fmt::Debug for BitVec {
+impl fmt::Debug for Bitset {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
- write!(fmt, "BitVec(")?;
+ write!(fmt, "Bitset(")?;
for word in &self.0 {
write!(fmt, "{word:064b}")?;
}
write!(fmt, ")")
}
}
+
+/// The status of a subregion, as tracked by `Bitset`.
+#[derive(Clone, Copy, Debug, Eq, Ord, PartialEq, PartialOrd)]
+pub enum SubregionStatus {
+ /// The region is not in the free list.
+ NotInFreeList = 0,
+
+ /// The region is in the free list.
+ InFreeList = 1,
+}
diff --git a/crates/buddy_allocator/src/free_list.rs b/crates/buddy_allocator/src/free_list.rs
index 2e3afe2..b794b8c 100644
--- a/crates/buddy_allocator/src/free_list.rs
+++ b/crates/buddy_allocator/src/free_list.rs
@@ -1,4 +1,4 @@
-//! A doubly-linked list of power-of-two-sized subregions.
+//! A circular doubly-linked list of power-of-two-sized subregions.
use core::{
fmt,
@@ -7,25 +7,52 @@ use core::{
ptr::{self, NonNull},
};
+use contracts::requires;
+
/// The magic number a non-sentinel node should have.
const FREE_NODE_MAGIC: u64 = u64::from_ne_bytes(*b"FREENODE");
/// The magic number a sentinel node should have.
const SENTINEL_MAGIC: u64 = u64::from_ne_bytes(*b"SENTINEL");
-/// A doubly-linked list of power-of-two-sized subregions.
+/// The magic number set into the spot for a node's magic number when it gets unlinked.
+const USED_NODE_MAGIC: u64 = u64::from_ne_bytes(*b"ALLOC'ED");
+
+/// A circular doubly-linked list of power-of-two-sized subregions.
///
/// This type is valid when zero-initialized.
///
/// # Safety
///
-/// - This type is valid only if it is zero-initialized or if it contains a valid doubly-linked
-/// list of subregions of the correct size, which it owns.
+/// - This type is valid only if it is zero-initialized or if it contains a valid circular
+/// doubly-linked list of subregions of the correct size, which it owns.
pub struct FreeList {
sentinel: FreeListNode,
}
impl FreeList {
+ /// Pops a subregion from the free-list, returning ownership to the caller.
+ pub fn pop(mut self: Pin<&mut FreeList>) -> Option<NonNull<u8>> {
+ assert_ne!(self.sentinel.next, ptr::null_mut());
+ assert_ne!(self.sentinel.prev, ptr::null_mut());
+
+ // Check if the sentinel's next pointer points to the sentinel itself.
+ let sentinel = &self.sentinel as *const FreeListNode as *mut FreeListNode;
+ let next = *self.as_mut().sentinel().next_mut();
+ if sentinel == next {
+ // If so, the list is empty.
+ None
+ } else {
+ // Otherwise, unlink that node.
+ //
+ // UNWRAP: The pointer was non-null, by the invariants of FreeList.
+ let mut node = NonNull::new(next).unwrap();
+ // SAFETY: The list was valid, by the invariants of FreeList.
+ unsafe { node.as_mut().unlink() };
+ Some(node.cast())
+ }
+ }
+
/// Pushes a subregion into the free-list.
///
/// # Safety
@@ -36,19 +63,23 @@ impl FreeList {
assert_ne!(self.sentinel.prev, ptr::null_mut());
// Write the header the node should have to the node.
- let sentinel = &self.sentinel as *const FreeListNode as *mut FreeListNode;
- let next = *self.as_mut().sentinel().next_mut();
- let node = subregion_ptr.cast();
+ //
+ // SAFETY: This &mut is not used to move out of the node.
+ let sentinel = unsafe { self.as_mut().sentinel().get_unchecked_mut() };
+ let mut node = subregion_ptr.cast();
node.write(FreeListNode {
- next,
+ next: sentinel.next,
prev: sentinel,
magic: FREE_NODE_MAGIC,
_phantom: PhantomPinned,
});
// Link the node into the list.
- *self.sentinel().next_mut() = node.as_ptr();
- (*next).prev = node.as_ptr();
+ //
+ // SAFETY: There are no other references to the node live.
+ let node = unsafe { node.as_mut() };
+ (*node.prev).next = node;
+ (*node.next).prev = node;
}
/// Performs self-linking. This must be done before the free list is used in any other way.
@@ -126,4 +157,33 @@ impl FreeListNode {
// SAFETY: magic is structurally pinned.
unsafe { &mut self.get_unchecked_mut().magic }
}
+
+ /// Links the node into a circular doubly-linked list.
+ ///
+ /// # Safety
+ ///
+ /// This must be a node that's not part of any linked list.
+ unsafe fn link(&mut self) {
+ todo!()
+ }
+
+ /// Unlinks a node from a circular doubly-linked list.
+ ///
+ /// # Safety
+ ///
+ /// This must be a non-sentinel node in a valid circular doubly-linked list.
+ #[requires(self.magic == FREE_NODE_MAGIC)]
+ #[ensures(self.magic == USED_NODE_MAGIC)]
+ unsafe fn unlink(&mut self) {
+ let prev = self.prev;
+ let next = self.next;
+
+ use core::ptr::addr_of_mut;
+ *addr_of_mut!((*prev).next) = next;
+ *addr_of_mut!((*next).prev) = prev;
+
+ self.next = ptr::null_mut();
+ self.prev = ptr::null_mut();
+ self.magic = USED_NODE_MAGIC;
+ }
}
diff --git a/crates/buddy_allocator/src/lib.rs b/crates/buddy_allocator/src/lib.rs
index 441c77f..4292444 100644
--- a/crates/buddy_allocator/src/lib.rs
+++ b/crates/buddy_allocator/src/lib.rs
@@ -50,9 +50,13 @@ mod tree;
// mod stripe;
-use crate::{bitvec::BitVec, free_list::FreeList, tree::Tree};
+use crate::{
+ bitvec::{Bitset, SubregionStatus},
+ free_list::FreeList,
+ tree::Tree,
+};
use allocator_api2::alloc::{AllocError, Layout, LayoutError};
-use contracts::requires;
+use contracts::{ensures, requires};
use core::{mem, pin::Pin, ptr::NonNull};
use vernos_physmem_free_list::FreeListAllocator;
use vernos_utils::pin::{pin_project_slice_mut, pin_project_slice_mut_iter};
@@ -72,7 +76,7 @@ pub struct BuddyAllocator<
trees: &'allocator mut [Tree<'allocator, PAGE_SIZE, PAGE_SIZE_BITS>],
/// The shared bitset.
- bitset: &'allocator mut BitVec,
+ bitset: &'allocator mut Bitset,
}
impl<
@@ -149,8 +153,10 @@ impl<
)
.as_mut()
};
- let bitset: &'allocator mut BitVec = unsafe {
- mem::transmute::<&mut [usize], &mut BitVec>(
+ // SAFETY: This type is valid when zero-initialized, and is a newtype for `[usize]` so has
+ // the same layout.
+ let bitset: &'allocator mut Bitset = unsafe {
+ mem::transmute::<&mut [usize], &mut Bitset>(
NonNull::slice_from_raw_parts(
metadata.byte_add(metadata_layout.bitset_offset).cast(),
metadata_layout.bitset_len,
@@ -222,16 +228,14 @@ impl<
// UNWRAP: The pointer must have been non-null when it was in the free-list
// allocator.
let ptr = NonNull::new(tree.base_ptr as *mut u8).unwrap();
+ let free_list = pin_project_slice_mut(free_lists.as_mut(), tree.size_class);
// SAFETY: This region is not owned or borrowed by anything else (the only thing it
// could be would be the metadata, but that's not in this tree), is of the correct
// size, and is composed of untyped memory.
- unsafe {
- pin_project_slice_mut(free_lists.as_mut(), tree.size_class).push(ptr);
- }
+ unsafe { free_list.push(ptr) };
// Then, set a bit in the bitset to say that this region is present.
- assert!(!bitset.get(tree.bitset_offset));
- bitset.set(tree.bitset_offset, true);
+ tree.bitset_mark_as_present(bitset, tree.size_class, 0);
}
}
@@ -244,9 +248,24 @@ impl<
}
/// Tries to allocate a subregion of a particular size class from the allocator.
- #[requires((0..SIZE_CLASS_COUNT).contains(&size_class))]
+ #[requires(size_class < SIZE_CLASS_COUNT)]
pub fn alloc(&mut self, size_class: usize) -> Result<NonNull<u8>, AllocError> {
- Err(AllocError)
+ if let Some(ptr) = self.free_list_mut(size_class).pop() {
+ // Fast-path: try allocating from the right size class's free list.
+
+ // Find the corresponding tree and the offset of the subregion in the tree.
+ let (tree_index, offset) = self.tree_index_and_offset(ptr);
+ let tree = &mut self.trees[tree_index];
+
+ // Mark the pointer as no longer being in the tree.
+ tree.bitset_mark_as_absent(&mut self.bitset, size_class, offset);
+
+ // Return the pointer.
+ Ok(ptr)
+ } else {
+ // Try finding a larger chunk of memory in a larger size class.
+ Err(AllocError)
+ }
}
/// Returns a subregion of a particular size class to the allocator.
@@ -255,9 +274,49 @@ impl<
///
/// - `ptr` must have been returned by a call to `alloc` with the same `size_class`.
/// - `ptr` must convey ownership over the entire region.
- #[requires((0..SIZE_CLASS_COUNT).contains(&size_class))]
- pub unsafe fn dealloc(&mut self, ptr: NonNull<u8>, size_class: usize) {
- todo!()
+ #[requires(size_class < SIZE_CLASS_COUNT)]
+ pub unsafe fn dealloc(&mut self, ptr: NonNull<u8>, mut size_class: usize) {
+ // Find the corresponding tree and the offset of the subregion in the tree.
+ let (tree_index, offset) = self.tree_index_and_offset(ptr);
+ let tree = &mut self.trees[tree_index];
+
+ // Ensure that the memory is marked as not being in the free list.
+ assert_eq!(
+ tree.bitset_get(&mut self.bitset, size_class, offset),
+ SubregionStatus::NotInFreeList
+ );
+
+ // Start combining size classes.
+ while size_class < tree.size_class {
+ let buddy_offset = offset ^ (PAGE_SIZE << size_class);
+ todo!("{buddy_offset:#x} {offset:#x}");
+ }
+
+ todo!("{tree:#?}")
+ }
+
+ /// Returns a (pinned) mutable reference to the free list with the given size class.
+ fn free_list_mut(&mut self, i: usize) -> Pin<&mut FreeList> {
+ pin_project_slice_mut(self.free_lists.as_mut(), i)
+ }
+
+ /// Returns the index of the tree this pointer was allocated in.
+ #[ensures(self.trees[ret.0].contains(ptr.as_ptr() as usize))]
+ #[ensures(ret.1 < (1 << self.trees[ret.0].size_class))]
+ fn tree_index_and_offset(&self, ptr: NonNull<u8>) -> (usize, usize) {
+ let addr = ptr.as_ptr() as usize;
+ let tree_index = self
+ .trees
+ .binary_search_by_key(&addr, |tree| tree.base_ptr as usize);
+ let tree_index = match tree_index {
+ Ok(i) => i,
+ Err(i) => {
+ assert_ne!(i, 0);
+ i - 1
+ }
+ };
+ let offset = addr - self.trees[tree_index].base_ptr as usize;
+ (tree_index, offset)
}
}
diff --git a/crates/buddy_allocator/src/tree.rs b/crates/buddy_allocator/src/tree.rs
index 1673e26..7c67f3a 100644
--- a/crates/buddy_allocator/src/tree.rs
+++ b/crates/buddy_allocator/src/tree.rs
@@ -1,3 +1,6 @@
+use crate::bitvec::{Bitset, SubregionStatus};
+use contracts::requires;
+
/// A single region of the allocator. See the comment on the `crate::allocators::buddy` module for
/// more information.
///
@@ -13,118 +16,77 @@ pub struct Tree<'buddy, const PAGE_SIZE: usize, const PAGE_SIZE_BITS: usize> {
/// The offset in the bitset to the bits responsible for this tree's pages.
pub bitset_offset: usize,
- // /// The bitset used to track whether subregion are allocated or not in this tree.
- // pub bitset: &'buddy mut BitVec,
phantom: core::marker::PhantomData<&'buddy mut [u8; PAGE_SIZE]>,
}
-/*
-use crate::{
- bitvec::BitVec,
- stripe::{FreeListNode, Stripe},
- MAX_ORDER, PAGE_SIZE_BITS,
-};
-use contracts::requires;
-
-/// A single region of the allocator. See the comment on the `crate::allocators::buddy` module for
-/// more information.
-pub struct Tree<'buddy> {
- /// The base address of the tree.
- pub base: *const (),
-
- /// The log2 of the number of pages in the region represented by the tree.
- pub log2_page_count: usize,
-
- /// The array of sentinel nodes of the free-list for this node. As an invariant of the type,
- /// there are no live references to any node in any list in this array, and there are
- /// MAX_ORDER + 1 nodes.
- pub free_lists: NonNull<FreeListNode>,
-
- /// The bitset used to track whether subregion are allocated or not in this tree.
- pub bitset: &'buddy mut BitVec,
-}
-
-impl<'buddy> Tree<'buddy> {
- /// Tries to allocate a subregion with the given order, possibly splitting a larger subregion
- /// in order to so so.
- #[requires(order <= MAX_ORDER)]
- pub fn alloc(&mut self, order: usize) -> Option<NonNull<FreeListNode>> {
- if let Some(ptr) = self.stripe(order).pop() {
- Some(ptr)
- } else if order == MAX_ORDER {
- None
- } else {
- // Get a larger region.
- let ptr = self.alloc(order + 1)?;
-
- // Get a pointer to the higher half.
- //
- // SAFETY: This has to be in-bounds, it's part of the same allocation!
- let higher_half = unsafe { ptr.byte_add(1 << (PAGE_SIZE_BITS + order)) };
-
- // Put the higher half in the current buddy's stripe.
- //
- // SAFETY: The higher half is from this region, not in the higher stripe, and of the
- // right size.
- unsafe {
- self.stripe(order).push(higher_half);
- }
-
- // Return the pointer.
- Some(ptr)
- }
+impl<'buddy, const PAGE_SIZE: usize, const PAGE_SIZE_BITS: usize>
+ Tree<'buddy, PAGE_SIZE, PAGE_SIZE_BITS>
+{
+ /// Reads a bit from the bitset.
+ #[requires(size_class <= self.size_class)]
+ #[requires(offset_bytes <= (PAGE_SIZE << self.size_class))]
+ #[requires(offset_bytes.trailing_zeros() as usize >= PAGE_SIZE_BITS + size_class)]
+ pub fn bitset_get(
+ &self,
+ bitset: &mut Bitset,
+ size_class: usize,
+ offset_bytes: usize,
+ ) -> SubregionStatus {
+ bitset.get(self.bitset_index(size_class, offset_bytes))
}
- /// Returns the stripe with the given order.
- #[requires(order <= MAX_ORDER)]
- fn stripe<'stripe>(&'stripe mut self, order: usize) -> Stripe<'stripe> {
- // TODO: There should be some smart bitwise-math version of this...
- let mut bitset_offset = 0;
- for i in 0..order {
- bitset_offset += (1 << self.log2_page_count) >> i;
- }
-
- Stripe {
- base: self.base,
- order,
- // SAFETY: order is constrained to be in-bounds.
- free_list: unsafe { self.free_lists.add(order) },
- bitset: self.bitset,
- bitset_offset,
- }
+ /// Returns the index of a bit in the bitset that corresponds to the given size class and
+ /// offset.
+ #[requires(size_class <= self.size_class)]
+ #[requires(offset_bytes <= (PAGE_SIZE << self.size_class))]
+ #[requires(offset_bytes.trailing_zeros() as usize >= PAGE_SIZE_BITS + size_class)]
+ fn bitset_index(&self, size_class: usize, offset_bytes: usize) -> usize {
+ // We store the largest size classes first in the bitset. Count how many we are away from
+ // the largest.
+ let skipped_size_classes = self.size_class - size_class;
+ let bits_skipped_for_size_class = (1 << skipped_size_classes) - 1;
+
+ // Next, find our index in the size class.
+ let bits_skipped_for_index = offset_bytes >> (PAGE_SIZE_BITS + size_class);
+
+ // The sum of those two is simply our index.
+ bits_skipped_for_size_class + bits_skipped_for_index
}
-}
-/// Evil bitwise version of the reasonable loop to compute the `bitset_offset` of a stripe.
-#[requires(log2_page_count < usize::BITS as usize)]
-#[requires(order < usize::BITS as usize)]
-fn compute_bitset_offset(log2_page_count: usize, order: usize) -> usize {
- let ones = |i: usize| !(usize::MAX << i);
-
- if order > log2_page_count + 1 {
- ones(log2_page_count + 1)
- } else {
- ones(order).wrapping_shl((log2_page_count + 1 - order) as u32)
+ /// Changes a bit in the bitset from `InFreeList` to `NotInFreeList`.
+ #[requires(size_class <= self.size_class)]
+ #[requires(offset_bytes <= (PAGE_SIZE << self.size_class))]
+ #[requires(offset_bytes.trailing_zeros() as usize >= PAGE_SIZE_BITS + size_class)]
+ pub fn bitset_mark_as_absent(
+ &self,
+ bitset: &mut Bitset,
+ size_class: usize,
+ offset_bytes: usize,
+ ) {
+ let index = self.bitset_index(size_class, offset_bytes);
+ assert_eq!(bitset.get(index), SubregionStatus::InFreeList);
+ bitset.set(index, SubregionStatus::NotInFreeList)
}
-}
-#[test]
-fn compute_bitset_offset_test() {
- fn compute_bitset_offset_loop(log2_page_count: usize, order: usize) -> usize {
- let mut bitset_offset = 0;
- for i in 0..order {
- bitset_offset += (1 << log2_page_count) >> i;
- }
- bitset_offset
+ /// Changes a bit in the bitset from `NotInFreeList` to `InFreeList`.
+ #[requires(size_class <= self.size_class)]
+ #[requires(offset_bytes <= (PAGE_SIZE << self.size_class))]
+ #[requires(offset_bytes.trailing_zeros() as usize >= PAGE_SIZE_BITS + size_class)]
+ pub fn bitset_mark_as_present(
+ &self,
+ bitset: &mut Bitset,
+ size_class: usize,
+ offset_bytes: usize,
+ ) {
+ let index = self.bitset_index(size_class, offset_bytes);
+ assert_eq!(bitset.get(index), SubregionStatus::NotInFreeList);
+ bitset.set(index, SubregionStatus::InFreeList)
}
- for log2_page_count in 0..64 {
- for order in 0..64 {
- assert_eq!(
- compute_bitset_offset(log2_page_count, order),
- compute_bitset_offset_loop(log2_page_count, order),
- );
- }
+ /// Returns whether the tree contains an address.
+ pub fn contains(&self, addr: usize) -> bool {
+ let tree_addr_lo = self.base_ptr as usize;
+ let tree_addr_hi = tree_addr_lo + (PAGE_SIZE << self.size_class);
+ (tree_addr_lo..tree_addr_hi).contains(&addr)
}
}
-*/
diff --git a/crates/buddy_allocator/tests/hosted_test.rs b/crates/buddy_allocator/tests/hosted_test.rs
index a841802..d6d3389 100644
--- a/crates/buddy_allocator/tests/hosted_test.rs
+++ b/crates/buddy_allocator/tests/hosted_test.rs
@@ -33,7 +33,7 @@ fn test_simple_scenario() {
scenario.run(false)
}
-const PAGE_SIZE: usize = (size_of::<*const ()>() * 3).next_power_of_two();
+const PAGE_SIZE: usize = 64;
const PAGE_SIZE_BITS: usize = PAGE_SIZE.trailing_zeros() as usize;
const SIZE_CLASS_COUNT: usize = 4;
@@ -182,22 +182,17 @@ impl Scenario {
let backing_mem = self
.range_sizes
.iter()
- .map(|&size| unsafe {
- mmap_anonymous(
- None,
- NonZero::new(size * PAGE_SIZE).unwrap(),
- ProtFlags::PROT_READ | ProtFlags::PROT_WRITE,
- MapFlags::MAP_PRIVATE,
- )
- .expect("failed to allocate memory")
+ .map(|&size| {
+ Mmap::new(NonZero::new(size * PAGE_SIZE).unwrap())
+ .expect("failed to allocate memory")
})
.collect::<Vec<_>>();
// Create the free list allocator and move the pages there.
let mut free_list: FreeListAllocator<PAGE_SIZE> = FreeListAllocator::new();
- for (&size, &ptr) in self.range_sizes.iter().zip(backing_mem.iter()) {
+ for (&size, mmap) in self.range_sizes.iter().zip(backing_mem.iter()) {
unsafe {
- free_list.add(ptr.cast(), size);
+ free_list.add(mmap.ptr(), size);
}
}
@@ -222,13 +217,6 @@ impl Scenario {
}
}
}
-
- // Free each of the page ranges.
- for (&size, ptr) in self.range_sizes.iter().zip(backing_mem) {
- unsafe {
- munmap(ptr, size * PAGE_SIZE).expect("failed to free memory");
- }
- }
}
}
@@ -256,3 +244,36 @@ impl<'alloc> fmt::Debug for Alloc<'alloc> {
.finish()
}
}
+
+/// A mmap-allocated region of memory.
+struct Mmap {
+ ptr: NonNull<u8>,
+ len: NonZero<usize>,
+}
+
+impl Mmap {
+ pub fn new(len: NonZero<usize>) -> Result<Mmap, nix::Error> {
+ let ptr = unsafe {
+ mmap_anonymous(
+ None,
+ len,
+ ProtFlags::PROT_READ | ProtFlags::PROT_WRITE,
+ MapFlags::MAP_PRIVATE,
+ )?
+ .cast()
+ };
+ Ok(Mmap { ptr, len })
+ }
+
+ pub fn ptr<T>(&self) -> NonNull<T> {
+ self.ptr.cast()
+ }
+}
+
+impl Drop for Mmap {
+ fn drop(&mut self) {
+ unsafe {
+ munmap(self.ptr(), self.len.get()).expect("failed to free memory");
+ }
+ }
+}