x86_64/structures/paging/
page.rsuse crate::sealed::Sealed;
use crate::structures::paging::page_table::PageTableLevel;
use crate::structures::paging::PageTableIndex;
use crate::VirtAddr;
use core::fmt;
#[cfg(feature = "step_trait")]
use core::iter::Step;
use core::marker::PhantomData;
use core::ops::{Add, AddAssign, Sub, SubAssign};
pub trait PageSize: Copy + Eq + PartialOrd + Ord + Sealed {
const SIZE: u64;
const DEBUG_STR: &'static str;
}
pub trait NotGiantPageSize: PageSize {}
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
pub enum Size4KiB {}
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
pub enum Size2MiB {}
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
pub enum Size1GiB {}
impl PageSize for Size4KiB {
const SIZE: u64 = 4096;
const DEBUG_STR: &'static str = "4KiB";
}
impl NotGiantPageSize for Size4KiB {}
impl Sealed for super::Size4KiB {}
impl PageSize for Size2MiB {
const SIZE: u64 = Size4KiB::SIZE * 512;
const DEBUG_STR: &'static str = "2MiB";
}
impl NotGiantPageSize for Size2MiB {}
impl Sealed for super::Size2MiB {}
impl PageSize for Size1GiB {
const SIZE: u64 = Size2MiB::SIZE * 512;
const DEBUG_STR: &'static str = "1GiB";
}
impl Sealed for super::Size1GiB {}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[repr(C)]
pub struct Page<S: PageSize = Size4KiB> {
start_address: VirtAddr,
size: PhantomData<S>,
}
impl<S: PageSize> Page<S> {
pub const SIZE: u64 = S::SIZE;
#[inline]
#[rustversion::attr(since(1.61), const)]
pub fn from_start_address(address: VirtAddr) -> Result<Self, AddressNotAligned> {
if !address.is_aligned_u64(S::SIZE) {
return Err(AddressNotAligned);
}
Ok(Page::containing_address(address))
}
#[inline]
#[rustversion::attr(since(1.61), const)]
pub unsafe fn from_start_address_unchecked(start_address: VirtAddr) -> Self {
Page {
start_address,
size: PhantomData,
}
}
#[inline]
#[rustversion::attr(since(1.61), const)]
pub fn containing_address(address: VirtAddr) -> Self {
Page {
start_address: address.align_down_u64(S::SIZE),
size: PhantomData,
}
}
#[inline]
#[rustversion::attr(since(1.61), const)]
pub fn start_address(self) -> VirtAddr {
self.start_address
}
#[inline]
#[rustversion::attr(since(1.61), const)]
pub fn size(self) -> u64 {
S::SIZE
}
#[inline]
#[rustversion::attr(since(1.61), const)]
pub fn p4_index(self) -> PageTableIndex {
self.start_address().p4_index()
}
#[inline]
#[rustversion::attr(since(1.61), const)]
pub fn p3_index(self) -> PageTableIndex {
self.start_address().p3_index()
}
#[inline]
#[rustversion::attr(since(1.61), const)]
pub fn page_table_index(self, level: PageTableLevel) -> PageTableIndex {
self.start_address().page_table_index(level)
}
#[inline]
#[rustversion::attr(since(1.61), const)]
pub fn range(start: Self, end: Self) -> PageRange<S> {
PageRange { start, end }
}
#[inline]
#[rustversion::attr(since(1.61), const)]
pub fn range_inclusive(start: Self, end: Self) -> PageRangeInclusive<S> {
PageRangeInclusive { start, end }
}
pub(crate) fn steps_between_impl(start: &Self, end: &Self) -> Option<usize> {
VirtAddr::steps_between_impl(&start.start_address, &end.start_address)
.map(|steps| steps / S::SIZE as usize)
}
pub(crate) fn forward_checked_impl(start: Self, count: usize) -> Option<Self> {
let count = count.checked_mul(S::SIZE as usize)?;
let start_address = VirtAddr::forward_checked_impl(start.start_address, count)?;
Some(Self {
start_address,
size: PhantomData,
})
}
}
impl<S: NotGiantPageSize> Page<S> {
#[inline]
#[rustversion::attr(since(1.61), const)]
pub fn p2_index(self) -> PageTableIndex {
self.start_address().p2_index()
}
}
impl Page<Size1GiB> {
#[inline]
#[rustversion::attr(since(1.61), const)]
pub fn from_page_table_indices_1gib(
p4_index: PageTableIndex,
p3_index: PageTableIndex,
) -> Self {
let mut addr = 0;
addr |= p4_index.into_u64() << 39;
addr |= p3_index.into_u64() << 30;
Page::containing_address(VirtAddr::new_truncate(addr))
}
}
impl Page<Size2MiB> {
#[inline]
#[rustversion::attr(since(1.61), const)]
pub fn from_page_table_indices_2mib(
p4_index: PageTableIndex,
p3_index: PageTableIndex,
p2_index: PageTableIndex,
) -> Self {
let mut addr = 0;
addr |= p4_index.into_u64() << 39;
addr |= p3_index.into_u64() << 30;
addr |= p2_index.into_u64() << 21;
Page::containing_address(VirtAddr::new_truncate(addr))
}
}
impl Page<Size4KiB> {
#[inline]
#[rustversion::attr(since(1.61), const)]
pub fn from_page_table_indices(
p4_index: PageTableIndex,
p3_index: PageTableIndex,
p2_index: PageTableIndex,
p1_index: PageTableIndex,
) -> Self {
let mut addr = 0;
addr |= p4_index.into_u64() << 39;
addr |= p3_index.into_u64() << 30;
addr |= p2_index.into_u64() << 21;
addr |= p1_index.into_u64() << 12;
Page::containing_address(VirtAddr::new_truncate(addr))
}
#[inline]
pub const fn p1_index(self) -> PageTableIndex {
self.start_address.p1_index()
}
}
impl<S: PageSize> fmt::Debug for Page<S> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_fmt(format_args!(
"Page[{}]({:#x})",
S::DEBUG_STR,
self.start_address().as_u64()
))
}
}
impl<S: PageSize> Add<u64> for Page<S> {
type Output = Self;
#[inline]
fn add(self, rhs: u64) -> Self::Output {
Page::containing_address(self.start_address() + rhs * S::SIZE)
}
}
impl<S: PageSize> AddAssign<u64> for Page<S> {
#[inline]
fn add_assign(&mut self, rhs: u64) {
*self = *self + rhs;
}
}
impl<S: PageSize> Sub<u64> for Page<S> {
type Output = Self;
#[inline]
fn sub(self, rhs: u64) -> Self::Output {
Page::containing_address(self.start_address() - rhs * S::SIZE)
}
}
impl<S: PageSize> SubAssign<u64> for Page<S> {
#[inline]
fn sub_assign(&mut self, rhs: u64) {
*self = *self - rhs;
}
}
impl<S: PageSize> Sub<Self> for Page<S> {
type Output = u64;
#[inline]
fn sub(self, rhs: Self) -> Self::Output {
(self.start_address - rhs.start_address) / S::SIZE
}
}
#[cfg(feature = "step_trait")]
impl<S: PageSize> Step for Page<S> {
fn steps_between(start: &Self, end: &Self) -> Option<usize> {
Self::steps_between_impl(start, end)
}
fn forward_checked(start: Self, count: usize) -> Option<Self> {
Self::forward_checked_impl(start, count)
}
fn backward_checked(start: Self, count: usize) -> Option<Self> {
let count = count.checked_mul(S::SIZE as usize)?;
let start_address = Step::backward_checked(start.start_address, count)?;
Some(Self {
start_address,
size: PhantomData,
})
}
}
#[derive(Clone, Copy, PartialEq, Eq, Hash)]
#[repr(C)]
pub struct PageRange<S: PageSize = Size4KiB> {
pub start: Page<S>,
pub end: Page<S>,
}
impl<S: PageSize> PageRange<S> {
#[inline]
pub fn is_empty(&self) -> bool {
self.start >= self.end
}
}
impl<S: PageSize> Iterator for PageRange<S> {
type Item = Page<S>;
#[inline]
fn next(&mut self) -> Option<Self::Item> {
if self.start < self.end {
let page = self.start;
self.start += 1;
Some(page)
} else {
None
}
}
}
impl PageRange<Size2MiB> {
#[inline]
pub fn as_4kib_page_range(self) -> PageRange<Size4KiB> {
PageRange {
start: Page::containing_address(self.start.start_address()),
end: Page::containing_address(self.end.start_address()),
}
}
}
impl<S: PageSize> fmt::Debug for PageRange<S> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_struct("PageRange")
.field("start", &self.start)
.field("end", &self.end)
.finish()
}
}
#[derive(Clone, Copy, PartialEq, Eq, Hash)]
#[repr(C)]
pub struct PageRangeInclusive<S: PageSize = Size4KiB> {
pub start: Page<S>,
pub end: Page<S>,
}
impl<S: PageSize> PageRangeInclusive<S> {
#[inline]
pub fn is_empty(&self) -> bool {
self.start > self.end
}
}
impl<S: PageSize> Iterator for PageRangeInclusive<S> {
type Item = Page<S>;
#[inline]
fn next(&mut self) -> Option<Self::Item> {
if self.start <= self.end {
let page = self.start;
let max_page_addr = VirtAddr::new(u64::MAX) - (S::SIZE - 1);
if self.start.start_address() < max_page_addr {
self.start += 1;
} else {
self.end -= 1;
}
Some(page)
} else {
None
}
}
}
impl<S: PageSize> fmt::Debug for PageRangeInclusive<S> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_struct("PageRangeInclusive")
.field("start", &self.start)
.field("end", &self.end)
.finish()
}
}
#[derive(Debug)]
pub struct AddressNotAligned;
impl fmt::Display for AddressNotAligned {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "the given address was not sufficiently aligned")
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
pub fn test_page_ranges() {
let page_size = Size4KiB::SIZE;
let number = 1000;
let start_addr = VirtAddr::new(0xdead_beaf);
let start: Page = Page::containing_address(start_addr);
let end = start + number;
let mut range = Page::range(start, end);
for i in 0..number {
assert_eq!(
range.next(),
Some(Page::containing_address(start_addr + page_size * i))
);
}
assert_eq!(range.next(), None);
let mut range_inclusive = Page::range_inclusive(start, end);
for i in 0..=number {
assert_eq!(
range_inclusive.next(),
Some(Page::containing_address(start_addr + page_size * i))
);
}
assert_eq!(range_inclusive.next(), None);
}
#[test]
pub fn test_page_range_inclusive_overflow() {
let page_size = Size4KiB::SIZE;
let number = 1000;
let start_addr = VirtAddr::new(u64::MAX).align_down(page_size) - number * page_size;
let start: Page = Page::containing_address(start_addr);
let end = start + number;
let mut range_inclusive = Page::range_inclusive(start, end);
for i in 0..=number {
assert_eq!(
range_inclusive.next(),
Some(Page::containing_address(start_addr + page_size * i))
);
}
assert_eq!(range_inclusive.next(), None);
}
}