summaryrefslogtreecommitdiff
path: root/crates/kernel/src/arch/riscv64/paging.rs
diff options
context:
space:
mode:
Diffstat (limited to 'crates/kernel/src/arch/riscv64/paging.rs')
-rw-r--r--crates/kernel/src/arch/riscv64/paging.rs197
1 files changed, 169 insertions, 28 deletions
diff --git a/crates/kernel/src/arch/riscv64/paging.rs b/crates/kernel/src/arch/riscv64/paging.rs
index 5ebdb5f..6b81881 100644
--- a/crates/kernel/src/arch/riscv64/paging.rs
+++ b/crates/kernel/src/arch/riscv64/paging.rs
@@ -1,6 +1,8 @@
-use crate::arch::PAGE_SIZE;
+use crate::arch::{PAGE_SIZE, PAGE_SIZE_BITS};
use contracts::requires;
-use core::{arch::asm, fmt, str};
+use core::{arch::asm, fmt, iter, ops::RangeInclusive, str};
+use either::Either;
+use vernos_utils::debug;
/// The number of bits looked up in each page table entry.
pub const PAGE_TABLE_BITS: usize = 9;
@@ -18,7 +20,6 @@ impl ASID {
}
/// A single page table.
-#[derive(Debug)]
#[repr(align(4096))]
pub struct PageTable([PageTableEntry; 512]);
@@ -50,6 +51,57 @@ impl PageTable {
}
}
+impl fmt::Debug for PageTable {
+ fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+ // Get an iterator over the valid leaf page table entries.
+ let mut mappings = iter_level_2_mappings(&self.0, 0).peekable();
+
+ // Make an iterator that merges adjacent entries that have the same flags.
+ let mappings = iter::from_fn(|| {
+ let (entry, mut vaddrs) = mappings.next()?;
+ let paddrs_start = entry.addr() as usize;
+ let mut len = (vaddrs.end() - vaddrs.start()) + 1;
+
+ while let Some((next_entry, next_vaddrs)) = mappings.peek() {
+ let next_paddrs_start = next_entry.addr() as usize;
+
+ if entry.flag_bits() != next_entry.flag_bits()
+ || vaddrs.end().wrapping_add(1) != *next_vaddrs.start()
+ || paddrs_start.wrapping_add(len) != next_paddrs_start
+ {
+ break;
+ }
+ // UNWRAP: .peek() already showed us that there's a next entry.
+ let (_, next_vaddrs) = mappings.next().unwrap();
+ vaddrs = *vaddrs.start()..=*next_vaddrs.end();
+ len = (next_vaddrs.end() - vaddrs.start()) + 1;
+ }
+ let paddrs = paddrs_start..=paddrs_start + (len - 1);
+ Some((entry, vaddrs, paddrs))
+ });
+
+ // Turn the iterator into an iterator over Debugs.
+ let debug_mappings = mappings.map(|(entry, vaddrs, paddrs)| {
+ debug(move |fmt| {
+ let flags = entry.flags_str();
+ // UNWRAP: The flags must be ASCII by the postcondition of flags_str().
+ let flags = str::from_utf8(&flags).unwrap();
+ write!(
+ fmt,
+ "[V|{:16x}-{:16x}][P|{:16x}-{:16x}][F|{}]",
+ *vaddrs.start(),
+ *vaddrs.end(),
+ *paddrs.start(),
+ *paddrs.end(),
+ flags
+ )
+ })
+ });
+
+ fmt.debug_list().entries(debug_mappings).finish()
+ }
+}
+
/// An entry in a page table.
#[derive(Clone, Copy, Default, Eq, PartialEq)]
pub struct PageTableEntry(u64);
@@ -62,11 +114,52 @@ impl PageTableEntry {
(self.0 >> 10) & 0x0000_0fff_ffff_ffff
}
+ /// Returns the bits of the entry that correspond to flags.
+ ///
+ /// This isn't `pub` because this isn't portable, though maybe it makes sense to instead export
+ /// a predicate for "do these two entries have the _same_ flags bits," since that should be
+ /// more portable.
+ #[requires(self.valid())]
+ #[ensures((ret & !0xffc0_0000_0000_03ff) == 0)]
+ fn flag_bits(&self) -> u64 {
+ self.0 & 0xffc0_0000_0000_03ff
+ }
+
+ /// Returns bytes that correspond to an ASCII string with the flags.
+ #[requires(self.valid())]
+ #[ensures(ret.iter().all(|ch| ch.is_ascii()))]
+ fn flags_str(&self) -> [u8; 7] {
+ let mut flags = *b"rwxugad";
+ let char_disabled = b'-';
+ if !self.readable() {
+ flags[0] = char_disabled;
+ }
+ if !self.writable() {
+ flags[1] = char_disabled;
+ }
+ if !self.executable() {
+ flags[2] = char_disabled;
+ }
+ if !self.user() {
+ flags[3] = char_disabled;
+ }
+ if !self.global() {
+ flags[4] = char_disabled;
+ }
+ if !self.accessed() {
+ flags[5] = char_disabled;
+ }
+ if !self.dirty() {
+ flags[6] = char_disabled;
+ }
+ flags
+ }
+
/// Returns the physical address of the backing page or next level page table.
#[requires(self.valid())]
#[ensures((ret & !0x003f_ffff_ffff_fc00) == 0)]
pub fn addr(&self) -> u64 {
- self.ppn() << PAGE_TABLE_BITS
+ self.ppn() << PAGE_SIZE_BITS
}
/// Returns a pointer to the backing page.
@@ -228,31 +321,9 @@ impl PageTableEntry {
impl fmt::Debug for PageTableEntry {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
if self.valid() {
- let mut flags = *b"DAGUXWRV";
- if !self.dirty() {
- flags[0] = b' ';
- }
- if !self.accessed() {
- flags[1] = b' ';
- }
- if !self.global() {
- flags[2] = b' ';
- }
- if !self.user() {
- flags[3] = b' ';
- }
- if !self.executable() {
- flags[4] = b' ';
- }
- if !self.writable() {
- flags[5] = b' ';
- }
- if !self.readable() {
- flags[6] = b' ';
- }
-
- // UNWRAP: The flags must be ASCII.
let addr = self.addr() as *const ();
+ let flags = self.flags_str();
+ // UNWRAP: The flags must be ASCII by the postcondition of flags_str().
let flags = str::from_utf8(&flags).unwrap();
write!(fmt, "PageTableEntry({addr:018p}, {flags})")
} else {
@@ -260,3 +331,73 @@ impl fmt::Debug for PageTableEntry {
}
}
}
+
+/// See `PageTable::iter_mappings`. This needs to be its own function because of `impl Trait`; we
+/// can't allocate here, and we want a fixed-size iterator.
+fn iter_level_2_mappings(
+ table: &[PageTableEntry; 512],
+ base_addr: usize,
+) -> impl '_ + Iterator<Item = (PageTableEntry, RangeInclusive<usize>)> {
+ const ENTRY_SIZE: usize = 1 << (12 + 9 + 9);
+ table
+ .iter()
+ .enumerate()
+ .filter(|(_, entry)| entry.valid())
+ .flat_map(move |(i, &entry)| {
+ let mut start_addr = base_addr + i * ENTRY_SIZE;
+ if i >= 256 {
+ start_addr += 0xffff_ff80_0000_0000;
+ }
+ if entry.leaf_pte() {
+ Either::Left(iter::once((
+ entry,
+ start_addr..=start_addr + (ENTRY_SIZE - 1),
+ )))
+ } else {
+ let next_table = unsafe { &(*entry.page_table()).0 };
+ Either::Right(iter_level_1_mappings(next_table, start_addr))
+ }
+ })
+}
+
+/// See `PageTable::iter_mappings`. This needs to be its own function because of `impl Trait`; we
+/// can't allocate here, and we want a fixed-size iterator.
+fn iter_level_1_mappings(
+ table: &[PageTableEntry; 512],
+ base_addr: usize,
+) -> impl '_ + Iterator<Item = (PageTableEntry, RangeInclusive<usize>)> {
+ const ENTRY_SIZE: usize = 1 << (12 + 9);
+ table
+ .iter()
+ .enumerate()
+ .filter(|(_, entry)| entry.valid())
+ .flat_map(move |(i, &entry)| {
+ let start_addr = base_addr + i * ENTRY_SIZE;
+ if entry.leaf_pte() {
+ Either::Left(iter::once((
+ entry,
+ start_addr..=start_addr + (ENTRY_SIZE - 1),
+ )))
+ } else {
+ let next_table = unsafe { &(*entry.page_table()).0 };
+ Either::Right(iter_level_0_mappings(next_table, start_addr))
+ }
+ })
+}
+
+/// See `PageTable::iter_mappings`. This needs to be its own function because of `impl Trait`; we
+/// can't allocate here, and we want a fixed-size iterator.
+fn iter_level_0_mappings(
+ table: &[PageTableEntry; 512],
+ base_addr: usize,
+) -> impl '_ + Iterator<Item = (PageTableEntry, RangeInclusive<usize>)> {
+ const ENTRY_SIZE: usize = 1 << 12;
+ table
+ .iter()
+ .enumerate()
+ .filter(|(_, entry)| entry.valid())
+ .map(move |(i, &entry)| {
+ let start_addr = base_addr + i * ENTRY_SIZE;
+ (entry, start_addr..=start_addr + (ENTRY_SIZE - 1))
+ })
+}