x86_64/structures/paging/mapper/
offset_page_table.rs#![cfg(target_pointer_width = "64")]
use crate::structures::paging::{
frame::PhysFrame, mapper::*, page::PageRangeInclusive, page_table::PageTable, FrameDeallocator,
Page, PageTableFlags,
};
#[derive(Debug)]
pub struct OffsetPageTable<'a> {
inner: MappedPageTable<'a, PhysOffset>,
}
impl<'a> OffsetPageTable<'a> {
#[inline]
pub unsafe fn new(level_4_table: &'a mut PageTable, phys_offset: VirtAddr) -> Self {
let phys_offset = PhysOffset {
offset: phys_offset,
};
Self {
inner: unsafe { MappedPageTable::new(level_4_table, phys_offset) },
}
}
pub fn level_4_table(&self) -> &PageTable {
self.inner.level_4_table()
}
pub fn level_4_table_mut(&mut self) -> &mut PageTable {
self.inner.level_4_table_mut()
}
pub fn phys_offset(&self) -> VirtAddr {
self.inner.page_table_frame_mapping().offset
}
}
#[derive(Debug)]
struct PhysOffset {
offset: VirtAddr,
}
unsafe impl PageTableFrameMapping for PhysOffset {
fn frame_to_pointer(&self, frame: PhysFrame) -> *mut PageTable {
let virt = self.offset + frame.start_address().as_u64();
virt.as_mut_ptr()
}
}
impl<'a> Mapper<Size1GiB> for OffsetPageTable<'a> {
#[inline]
unsafe fn map_to_with_table_flags<A>(
&mut self,
page: Page<Size1GiB>,
frame: PhysFrame<Size1GiB>,
flags: PageTableFlags,
parent_table_flags: PageTableFlags,
allocator: &mut A,
) -> Result<MapperFlush<Size1GiB>, MapToError<Size1GiB>>
where
A: FrameAllocator<Size4KiB> + ?Sized,
{
unsafe {
self.inner
.map_to_with_table_flags(page, frame, flags, parent_table_flags, allocator)
}
}
#[inline]
fn unmap(
&mut self,
page: Page<Size1GiB>,
) -> Result<(PhysFrame<Size1GiB>, MapperFlush<Size1GiB>), UnmapError> {
self.inner.unmap(page)
}
#[inline]
unsafe fn update_flags(
&mut self,
page: Page<Size1GiB>,
flags: PageTableFlags,
) -> Result<MapperFlush<Size1GiB>, FlagUpdateError> {
unsafe { self.inner.update_flags(page, flags) }
}
#[inline]
unsafe fn set_flags_p4_entry(
&mut self,
page: Page<Size1GiB>,
flags: PageTableFlags,
) -> Result<MapperFlushAll, FlagUpdateError> {
unsafe { self.inner.set_flags_p4_entry(page, flags) }
}
#[inline]
unsafe fn set_flags_p3_entry(
&mut self,
page: Page<Size1GiB>,
flags: PageTableFlags,
) -> Result<MapperFlushAll, FlagUpdateError> {
unsafe { self.inner.set_flags_p3_entry(page, flags) }
}
#[inline]
unsafe fn set_flags_p2_entry(
&mut self,
page: Page<Size1GiB>,
flags: PageTableFlags,
) -> Result<MapperFlushAll, FlagUpdateError> {
unsafe { self.inner.set_flags_p2_entry(page, flags) }
}
#[inline]
fn translate_page(&self, page: Page<Size1GiB>) -> Result<PhysFrame<Size1GiB>, TranslateError> {
self.inner.translate_page(page)
}
}
impl<'a> Mapper<Size2MiB> for OffsetPageTable<'a> {
#[inline]
unsafe fn map_to_with_table_flags<A>(
&mut self,
page: Page<Size2MiB>,
frame: PhysFrame<Size2MiB>,
flags: PageTableFlags,
parent_table_flags: PageTableFlags,
allocator: &mut A,
) -> Result<MapperFlush<Size2MiB>, MapToError<Size2MiB>>
where
A: FrameAllocator<Size4KiB> + ?Sized,
{
unsafe {
self.inner
.map_to_with_table_flags(page, frame, flags, parent_table_flags, allocator)
}
}
#[inline]
fn unmap(
&mut self,
page: Page<Size2MiB>,
) -> Result<(PhysFrame<Size2MiB>, MapperFlush<Size2MiB>), UnmapError> {
self.inner.unmap(page)
}
#[inline]
unsafe fn update_flags(
&mut self,
page: Page<Size2MiB>,
flags: PageTableFlags,
) -> Result<MapperFlush<Size2MiB>, FlagUpdateError> {
unsafe { self.inner.update_flags(page, flags) }
}
#[inline]
unsafe fn set_flags_p4_entry(
&mut self,
page: Page<Size2MiB>,
flags: PageTableFlags,
) -> Result<MapperFlushAll, FlagUpdateError> {
unsafe { self.inner.set_flags_p4_entry(page, flags) }
}
#[inline]
unsafe fn set_flags_p3_entry(
&mut self,
page: Page<Size2MiB>,
flags: PageTableFlags,
) -> Result<MapperFlushAll, FlagUpdateError> {
unsafe { self.inner.set_flags_p3_entry(page, flags) }
}
#[inline]
unsafe fn set_flags_p2_entry(
&mut self,
page: Page<Size2MiB>,
flags: PageTableFlags,
) -> Result<MapperFlushAll, FlagUpdateError> {
unsafe { self.inner.set_flags_p2_entry(page, flags) }
}
#[inline]
fn translate_page(&self, page: Page<Size2MiB>) -> Result<PhysFrame<Size2MiB>, TranslateError> {
self.inner.translate_page(page)
}
}
impl<'a> Mapper<Size4KiB> for OffsetPageTable<'a> {
#[inline]
unsafe fn map_to_with_table_flags<A>(
&mut self,
page: Page<Size4KiB>,
frame: PhysFrame<Size4KiB>,
flags: PageTableFlags,
parent_table_flags: PageTableFlags,
allocator: &mut A,
) -> Result<MapperFlush<Size4KiB>, MapToError<Size4KiB>>
where
A: FrameAllocator<Size4KiB> + ?Sized,
{
unsafe {
self.inner
.map_to_with_table_flags(page, frame, flags, parent_table_flags, allocator)
}
}
#[inline]
fn unmap(
&mut self,
page: Page<Size4KiB>,
) -> Result<(PhysFrame<Size4KiB>, MapperFlush<Size4KiB>), UnmapError> {
self.inner.unmap(page)
}
#[inline]
unsafe fn update_flags(
&mut self,
page: Page<Size4KiB>,
flags: PageTableFlags,
) -> Result<MapperFlush<Size4KiB>, FlagUpdateError> {
unsafe { self.inner.update_flags(page, flags) }
}
#[inline]
unsafe fn set_flags_p4_entry(
&mut self,
page: Page<Size4KiB>,
flags: PageTableFlags,
) -> Result<MapperFlushAll, FlagUpdateError> {
unsafe { self.inner.set_flags_p4_entry(page, flags) }
}
#[inline]
unsafe fn set_flags_p3_entry(
&mut self,
page: Page<Size4KiB>,
flags: PageTableFlags,
) -> Result<MapperFlushAll, FlagUpdateError> {
unsafe { self.inner.set_flags_p3_entry(page, flags) }
}
#[inline]
unsafe fn set_flags_p2_entry(
&mut self,
page: Page<Size4KiB>,
flags: PageTableFlags,
) -> Result<MapperFlushAll, FlagUpdateError> {
unsafe { self.inner.set_flags_p2_entry(page, flags) }
}
#[inline]
fn translate_page(&self, page: Page<Size4KiB>) -> Result<PhysFrame<Size4KiB>, TranslateError> {
self.inner.translate_page(page)
}
}
impl<'a> Translate for OffsetPageTable<'a> {
#[inline]
fn translate(&self, addr: VirtAddr) -> TranslateResult {
self.inner.translate(addr)
}
}
impl<'a> CleanUp for OffsetPageTable<'a> {
#[inline]
unsafe fn clean_up<D>(&mut self, frame_deallocator: &mut D)
where
D: FrameDeallocator<Size4KiB>,
{
unsafe { self.inner.clean_up(frame_deallocator) }
}
#[inline]
unsafe fn clean_up_addr_range<D>(
&mut self,
range: PageRangeInclusive,
frame_deallocator: &mut D,
) where
D: FrameDeallocator<Size4KiB>,
{
unsafe { self.inner.clean_up_addr_range(range, frame_deallocator) }
}
}