1use core::convert::TryFrom;
4use core::fmt;
5#[cfg(feature = "step_trait")]
6use core::iter::Step;
7use core::ops::{Add, AddAssign, Sub, SubAssign};
8#[cfg(feature = "memory_encryption")]
9use core::sync::atomic::Ordering;
10
11#[cfg(feature = "memory_encryption")]
12use crate::structures::mem_encrypt::ENC_BIT_MASK;
13use crate::structures::paging::page_table::PageTableLevel;
14use crate::structures::paging::{PageOffset, PageTableIndex};
15
16use bit_field::BitField;
17use dep_const_fn::const_fn;
18
19const ADDRESS_SPACE_SIZE: u64 = 0x1_0000_0000_0000;
20
21#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
32#[repr(transparent)]
33pub struct VirtAddr(u64);
34
35#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
45#[repr(transparent)]
46pub struct PhysAddr(u64);
47
48pub struct VirtAddrNotValid(pub u64);
57
58impl core::fmt::Debug for VirtAddrNotValid {
59 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
60 f.debug_tuple("VirtAddrNotValid")
61 .field(&format_args!("{:#x}", self.0))
62 .finish()
63 }
64}
65
66impl VirtAddr {
67 #[inline]
77 pub const fn new(addr: u64) -> VirtAddr {
78 match Self::try_new(addr) {
80 Ok(v) => v,
81 Err(_) => panic!("virtual address must be sign extended in bits 48 to 64"),
82 }
83 }
84
85 #[inline]
92 pub const fn try_new(addr: u64) -> Result<VirtAddr, VirtAddrNotValid> {
93 let v = Self::new_truncate(addr);
94 if v.0 == addr {
95 Ok(v)
96 } else {
97 Err(VirtAddrNotValid(addr))
98 }
99 }
100
101 #[inline]
107 pub const fn new_truncate(addr: u64) -> VirtAddr {
108 VirtAddr(((addr << 16) as i64 >> 16) as u64)
111 }
112
113 #[inline]
119 pub const unsafe fn new_unsafe(addr: u64) -> VirtAddr {
120 VirtAddr(addr)
121 }
122
123 #[inline]
125 pub const fn zero() -> VirtAddr {
126 VirtAddr(0)
127 }
128
129 #[inline]
131 pub const fn as_u64(self) -> u64 {
132 self.0
133 }
134
135 #[cfg(target_pointer_width = "64")]
137 #[inline]
138 pub fn from_ptr<T: ?Sized>(ptr: *const T) -> Self {
139 Self::new(ptr as *const () as u64)
140 }
141
142 #[cfg(target_pointer_width = "64")]
144 #[inline]
145 pub const fn as_ptr<T>(self) -> *const T {
146 self.as_u64() as *const T
147 }
148
149 #[cfg(target_pointer_width = "64")]
151 #[inline]
152 pub const fn as_mut_ptr<T>(self) -> *mut T {
153 self.as_ptr::<T>() as *mut T
154 }
155
156 #[inline]
158 pub const fn is_null(self) -> bool {
159 self.0 == 0
160 }
161
162 #[inline]
171 pub fn align_up<U>(self, align: U) -> Self
172 where
173 U: Into<u64>,
174 {
175 VirtAddr::new_truncate(align_up(self.0, align.into()))
176 }
177
178 #[inline]
182 pub fn align_down<U>(self, align: U) -> Self
183 where
184 U: Into<u64>,
185 {
186 self.align_down_u64(align.into())
187 }
188
189 #[inline]
193 pub(crate) const fn align_down_u64(self, align: u64) -> Self {
194 VirtAddr::new_truncate(align_down(self.0, align))
195 }
196
197 #[inline]
199 pub fn is_aligned<U>(self, align: U) -> bool
200 where
201 U: Into<u64>,
202 {
203 self.is_aligned_u64(align.into())
204 }
205
206 #[inline]
208 pub(crate) const fn is_aligned_u64(self, align: u64) -> bool {
209 self.align_down_u64(align).as_u64() == self.as_u64()
210 }
211
212 #[inline]
214 pub const fn page_offset(self) -> PageOffset {
215 PageOffset::new_truncate(self.0 as u16)
216 }
217
218 #[inline]
220 pub const fn p1_index(self) -> PageTableIndex {
221 PageTableIndex::new_truncate((self.0 >> 12) as u16)
222 }
223
224 #[inline]
226 pub const fn p2_index(self) -> PageTableIndex {
227 PageTableIndex::new_truncate((self.0 >> 12 >> 9) as u16)
228 }
229
230 #[inline]
232 pub const fn p3_index(self) -> PageTableIndex {
233 PageTableIndex::new_truncate((self.0 >> 12 >> 9 >> 9) as u16)
234 }
235
236 #[inline]
238 pub const fn p4_index(self) -> PageTableIndex {
239 PageTableIndex::new_truncate((self.0 >> 12 >> 9 >> 9 >> 9) as u16)
240 }
241
242 #[inline]
244 pub const fn page_table_index(self, level: PageTableLevel) -> PageTableIndex {
245 PageTableIndex::new_truncate((self.0 >> 12 >> ((level as u8 - 1) * 9)) as u16)
246 }
247
248 #[cfg(feature = "step_trait")]
250 pub(crate) fn steps_between_impl(start: &Self, end: &Self) -> (usize, Option<usize>) {
251 if let Some(steps) = Self::steps_between_u64(start, end) {
252 let steps = usize::try_from(steps).ok();
253 (steps.unwrap_or(usize::MAX), steps)
254 } else {
255 (0, None)
256 }
257 }
258
259 #[cfg(any(feature = "instructions", feature = "step_trait"))]
263 pub(crate) fn steps_between_u64(start: &Self, end: &Self) -> Option<u64> {
264 let mut steps = end.0.checked_sub(start.0)?;
265
266 steps &= 0xffff_ffff_ffff;
268
269 Some(steps)
270 }
271
272 #[inline]
274 pub(crate) fn forward_checked_impl(start: Self, count: usize) -> Option<Self> {
275 Self::forward_checked_u64(start, u64::try_from(count).ok()?)
276 }
277
278 #[inline]
280 pub(crate) fn forward_checked_u64(start: Self, count: u64) -> Option<Self> {
281 if count > ADDRESS_SPACE_SIZE {
282 return None;
283 }
284
285 let mut addr = start.0.checked_add(count)?;
286
287 match addr.get_bits(47..) {
288 0x1 => {
289 addr.set_bits(47.., 0x1ffff);
291 }
292 0x2 => {
293 return None;
295 }
296 _ => {}
297 }
298
299 Some(unsafe { Self::new_unsafe(addr) })
300 }
301
302 #[cfg(feature = "step_trait")]
304 #[inline]
305 pub(crate) fn backward_checked_u64(start: Self, count: u64) -> Option<Self> {
306 if count > ADDRESS_SPACE_SIZE {
307 return None;
308 }
309
310 let mut addr = start.0.checked_sub(count)?;
311
312 match addr.get_bits(47..) {
313 0x1fffe => {
314 addr.set_bits(47.., 0);
316 }
317 0x1fffd => {
318 return None;
320 }
321 _ => {}
322 }
323
324 Some(unsafe { Self::new_unsafe(addr) })
325 }
326}
327
328impl fmt::Debug for VirtAddr {
329 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
330 f.debug_tuple("VirtAddr")
331 .field(&format_args!("{:#x}", self.0))
332 .finish()
333 }
334}
335
336impl fmt::Binary for VirtAddr {
337 #[inline]
338 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
339 fmt::Binary::fmt(&self.0, f)
340 }
341}
342
343impl fmt::LowerHex for VirtAddr {
344 #[inline]
345 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
346 fmt::LowerHex::fmt(&self.0, f)
347 }
348}
349
350impl fmt::Octal for VirtAddr {
351 #[inline]
352 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
353 fmt::Octal::fmt(&self.0, f)
354 }
355}
356
357impl fmt::UpperHex for VirtAddr {
358 #[inline]
359 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
360 fmt::UpperHex::fmt(&self.0, f)
361 }
362}
363
364impl fmt::Pointer for VirtAddr {
365 #[inline]
366 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
367 fmt::Pointer::fmt(&(self.0 as *const ()), f)
368 }
369}
370
371impl Add<u64> for VirtAddr {
372 type Output = Self;
373
374 #[cfg_attr(not(feature = "step_trait"), allow(rustdoc::broken_intra_doc_links))]
375 #[inline]
386 fn add(self, rhs: u64) -> Self::Output {
387 VirtAddr::try_new(
388 self.0
389 .checked_add(rhs)
390 .expect("attempt to add with overflow"),
391 )
392 .expect("attempt to add resulted in non-canonical virtual address")
393 }
394}
395
396impl AddAssign<u64> for VirtAddr {
397 #[cfg_attr(not(feature = "step_trait"), allow(rustdoc::broken_intra_doc_links))]
398 #[inline]
409 fn add_assign(&mut self, rhs: u64) {
410 *self = *self + rhs;
411 }
412}
413
414impl Sub<u64> for VirtAddr {
415 type Output = Self;
416
417 #[cfg_attr(not(feature = "step_trait"), allow(rustdoc::broken_intra_doc_links))]
418 #[inline]
429 fn sub(self, rhs: u64) -> Self::Output {
430 VirtAddr::try_new(
431 self.0
432 .checked_sub(rhs)
433 .expect("attempt to subtract with overflow"),
434 )
435 .expect("attempt to subtract resulted in non-canonical virtual address")
436 }
437}
438
439impl SubAssign<u64> for VirtAddr {
440 #[cfg_attr(not(feature = "step_trait"), allow(rustdoc::broken_intra_doc_links))]
441 #[inline]
452 fn sub_assign(&mut self, rhs: u64) {
453 *self = *self - rhs;
454 }
455}
456
457impl Sub<VirtAddr> for VirtAddr {
458 type Output = u64;
459
460 #[inline]
466 fn sub(self, rhs: VirtAddr) -> Self::Output {
467 self.as_u64()
468 .checked_sub(rhs.as_u64())
469 .expect("attempt to subtract with overflow")
470 }
471}
472
473#[cfg(feature = "step_trait")]
474impl Step for VirtAddr {
475 #[inline]
476 fn steps_between(start: &Self, end: &Self) -> (usize, Option<usize>) {
477 Self::steps_between_impl(start, end)
478 }
479
480 #[inline]
481 fn forward_checked(start: Self, count: usize) -> Option<Self> {
482 Self::forward_checked_impl(start, count)
483 }
484
485 #[inline]
486 fn backward_checked(start: Self, count: usize) -> Option<Self> {
487 Self::backward_checked_u64(start, u64::try_from(count).ok()?)
488 }
489}
490
491pub struct PhysAddrNotValid(pub u64);
497
498impl core::fmt::Debug for PhysAddrNotValid {
499 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
500 f.debug_tuple("PhysAddrNotValid")
501 .field(&format_args!("{:#x}", self.0))
502 .finish()
503 }
504}
505
506impl PhysAddr {
507 #[inline]
515 #[const_fn(cfg(not(feature = "memory_encryption")))]
516 pub const fn new(addr: u64) -> Self {
517 match Self::try_new(addr) {
519 Ok(p) => p,
520 Err(_) => panic!("physical addresses must not have any bits in the range 52 to 64 set"),
521 }
522 }
523
524 #[cfg(not(feature = "memory_encryption"))]
526 #[inline]
527 pub const fn new_truncate(addr: u64) -> PhysAddr {
528 PhysAddr(addr % (1 << 52))
529 }
530
531 #[cfg(feature = "memory_encryption")]
533 #[inline]
534 pub fn new_truncate(addr: u64) -> PhysAddr {
535 PhysAddr((addr % (1 << 52)) & !ENC_BIT_MASK.load(Ordering::Relaxed))
536 }
537
538 #[inline]
544 pub const unsafe fn new_unsafe(addr: u64) -> PhysAddr {
545 PhysAddr(addr)
546 }
547
548 #[inline]
554 #[const_fn(cfg(not(feature = "memory_encryption")))]
555 pub const fn try_new(addr: u64) -> Result<Self, PhysAddrNotValid> {
556 let p = Self::new_truncate(addr);
557 if p.0 == addr {
558 Ok(p)
559 } else {
560 Err(PhysAddrNotValid(addr))
561 }
562 }
563
564 #[inline]
566 pub const fn zero() -> PhysAddr {
567 PhysAddr(0)
568 }
569
570 #[inline]
572 pub const fn as_u64(self) -> u64 {
573 self.0
574 }
575
576 #[inline]
578 pub const fn is_null(self) -> bool {
579 self.0 == 0
580 }
581
582 #[inline]
591 pub fn align_up<U>(self, align: U) -> Self
592 where
593 U: Into<u64>,
594 {
595 PhysAddr::new(align_up(self.0, align.into()))
596 }
597
598 #[inline]
602 pub fn align_down<U>(self, align: U) -> Self
603 where
604 U: Into<u64>,
605 {
606 self.align_down_u64(align.into())
607 }
608
609 #[inline]
613 pub(crate) const fn align_down_u64(self, align: u64) -> Self {
614 PhysAddr(align_down(self.0, align))
615 }
616
617 #[inline]
619 pub fn is_aligned<U>(self, align: U) -> bool
620 where
621 U: Into<u64>,
622 {
623 self.is_aligned_u64(align.into())
624 }
625
626 #[inline]
628 pub(crate) const fn is_aligned_u64(self, align: u64) -> bool {
629 self.align_down_u64(align).as_u64() == self.as_u64()
630 }
631}
632
633impl fmt::Debug for PhysAddr {
634 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
635 f.debug_tuple("PhysAddr")
636 .field(&format_args!("{:#x}", self.0))
637 .finish()
638 }
639}
640
641impl fmt::Binary for PhysAddr {
642 #[inline]
643 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
644 fmt::Binary::fmt(&self.0, f)
645 }
646}
647
648impl fmt::LowerHex for PhysAddr {
649 #[inline]
650 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
651 fmt::LowerHex::fmt(&self.0, f)
652 }
653}
654
655impl fmt::Octal for PhysAddr {
656 #[inline]
657 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
658 fmt::Octal::fmt(&self.0, f)
659 }
660}
661
662impl fmt::UpperHex for PhysAddr {
663 #[inline]
664 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
665 fmt::UpperHex::fmt(&self.0, f)
666 }
667}
668
669impl fmt::Pointer for PhysAddr {
670 #[inline]
671 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
672 fmt::Pointer::fmt(&(self.0 as *const ()), f)
673 }
674}
675
676impl Add<u64> for PhysAddr {
677 type Output = Self;
678 #[inline]
679 fn add(self, rhs: u64) -> Self::Output {
680 PhysAddr::new(self.0.checked_add(rhs).unwrap())
681 }
682}
683
684impl AddAssign<u64> for PhysAddr {
685 #[inline]
686 fn add_assign(&mut self, rhs: u64) {
687 *self = *self + rhs;
688 }
689}
690
691impl Sub<u64> for PhysAddr {
692 type Output = Self;
693 #[inline]
694 fn sub(self, rhs: u64) -> Self::Output {
695 PhysAddr::new(self.0.checked_sub(rhs).unwrap())
696 }
697}
698
699impl SubAssign<u64> for PhysAddr {
700 #[inline]
701 fn sub_assign(&mut self, rhs: u64) {
702 *self = *self - rhs;
703 }
704}
705
706impl Sub<PhysAddr> for PhysAddr {
707 type Output = u64;
708 #[inline]
709 fn sub(self, rhs: PhysAddr) -> Self::Output {
710 self.as_u64().checked_sub(rhs.as_u64()).unwrap()
711 }
712}
713
714#[inline]
720pub const fn align_down(addr: u64, align: u64) -> u64 {
721 assert!(align.is_power_of_two(), "`align` must be a power of two");
722 addr & !(align - 1)
723}
724
725#[inline]
731pub const fn align_up(addr: u64, align: u64) -> u64 {
732 assert!(align.is_power_of_two(), "`align` must be a power of two");
733 let align_mask = align - 1;
734 if addr & align_mask == 0 {
735 addr } else {
737 if let Some(aligned) = (addr | align_mask).checked_add(1) {
739 aligned
740 } else {
741 panic!("attempt to add with overflow")
742 }
743 }
744}
745
746#[cfg(test)]
747mod tests {
748 use super::*;
749
750 #[test]
751 #[should_panic]
752 pub fn add_overflow_virtaddr() {
753 let _ = VirtAddr::new(0xffff_ffff_ffff_ffff) + 1;
754 }
755
756 #[test]
757 #[should_panic]
758 pub fn add_overflow_physaddr() {
759 let _ = PhysAddr::new(0x000f_ffff_ffff_ffff) + 0xffff_0000_0000_0000;
760 }
761
762 #[test]
763 #[should_panic]
764 pub fn sub_underflow_virtaddr() {
765 let _ = VirtAddr::new(0) - 1;
766 }
767
768 #[test]
769 #[should_panic]
770 pub fn sub_overflow_physaddr() {
771 let _ = PhysAddr::new(0) - 1;
772 }
773
774 #[test]
775 pub fn virtaddr_new_truncate() {
776 assert_eq!(VirtAddr::new_truncate(0), VirtAddr(0));
777 assert_eq!(VirtAddr::new_truncate(1 << 47), VirtAddr(0xfffff << 47));
778 assert_eq!(VirtAddr::new_truncate(123), VirtAddr(123));
779 assert_eq!(VirtAddr::new_truncate(123 << 47), VirtAddr(0xfffff << 47));
780 }
781
782 #[test]
783 #[cfg(feature = "step_trait")]
784 fn virtaddr_step_forward() {
785 assert_eq!(Step::forward(VirtAddr(0), 0), VirtAddr(0));
786 assert_eq!(Step::forward(VirtAddr(0), 1), VirtAddr(1));
787 assert_eq!(
788 Step::forward(VirtAddr(0x7fff_ffff_ffff), 1),
789 VirtAddr(0xffff_8000_0000_0000)
790 );
791 assert_eq!(
792 Step::forward(VirtAddr(0xffff_8000_0000_0000), 1),
793 VirtAddr(0xffff_8000_0000_0001)
794 );
795 assert_eq!(
796 Step::forward_checked(VirtAddr(0xffff_ffff_ffff_ffff), 1),
797 None
798 );
799 #[cfg(target_pointer_width = "64")]
800 assert_eq!(
801 Step::forward(VirtAddr(0x7fff_ffff_ffff), 0x1234_5678_9abd),
802 VirtAddr(0xffff_9234_5678_9abc)
803 );
804 #[cfg(target_pointer_width = "64")]
805 assert_eq!(
806 Step::forward(VirtAddr(0x7fff_ffff_ffff), 0x8000_0000_0000),
807 VirtAddr(0xffff_ffff_ffff_ffff)
808 );
809 #[cfg(target_pointer_width = "64")]
810 assert_eq!(
811 Step::forward(VirtAddr(0x7fff_ffff_ff00), 0x8000_0000_00ff),
812 VirtAddr(0xffff_ffff_ffff_ffff)
813 );
814 #[cfg(target_pointer_width = "64")]
815 assert_eq!(
816 Step::forward_checked(VirtAddr(0x7fff_ffff_ff00), 0x8000_0000_0100),
817 None
818 );
819 #[cfg(target_pointer_width = "64")]
820 assert_eq!(
821 Step::forward_checked(VirtAddr(0x7fff_ffff_ffff), 0x8000_0000_0001),
822 None
823 );
824 }
825
826 #[test]
827 #[cfg(feature = "step_trait")]
828 fn virtaddr_step_backward() {
829 assert_eq!(Step::backward(VirtAddr(0), 0), VirtAddr(0));
830 assert_eq!(Step::backward_checked(VirtAddr(0), 1), None);
831 assert_eq!(Step::backward(VirtAddr(1), 1), VirtAddr(0));
832 assert_eq!(
833 Step::backward(VirtAddr(0xffff_8000_0000_0000), 1),
834 VirtAddr(0x7fff_ffff_ffff)
835 );
836 assert_eq!(
837 Step::backward(VirtAddr(0xffff_8000_0000_0001), 1),
838 VirtAddr(0xffff_8000_0000_0000)
839 );
840 #[cfg(target_pointer_width = "64")]
841 assert_eq!(
842 Step::backward(VirtAddr(0xffff_9234_5678_9abc), 0x1234_5678_9abd),
843 VirtAddr(0x7fff_ffff_ffff)
844 );
845 #[cfg(target_pointer_width = "64")]
846 assert_eq!(
847 Step::backward(VirtAddr(0xffff_8000_0000_0000), 0x8000_0000_0000),
848 VirtAddr(0)
849 );
850 #[cfg(target_pointer_width = "64")]
851 assert_eq!(
852 Step::backward(VirtAddr(0xffff_8000_0000_0000), 0x7fff_ffff_ff01),
853 VirtAddr(0xff)
854 );
855 #[cfg(target_pointer_width = "64")]
856 assert_eq!(
857 Step::backward_checked(VirtAddr(0xffff_8000_0000_0000), 0x8000_0000_0001),
858 None
859 );
860 }
861
862 #[test]
863 #[cfg(feature = "step_trait")]
864 fn virtaddr_steps_between() {
865 assert_eq!(
866 Step::steps_between(&VirtAddr(0), &VirtAddr(0)),
867 (0, Some(0))
868 );
869 assert_eq!(
870 Step::steps_between(&VirtAddr(0), &VirtAddr(1)),
871 (1, Some(1))
872 );
873 assert_eq!(Step::steps_between(&VirtAddr(1), &VirtAddr(0)), (0, None));
874 assert_eq!(
875 Step::steps_between(
876 &VirtAddr(0x7fff_ffff_ffff),
877 &VirtAddr(0xffff_8000_0000_0000)
878 ),
879 (1, Some(1))
880 );
881 assert_eq!(
882 Step::steps_between(
883 &VirtAddr(0xffff_8000_0000_0000),
884 &VirtAddr(0x7fff_ffff_ffff)
885 ),
886 (0, None)
887 );
888 assert_eq!(
889 Step::steps_between(
890 &VirtAddr(0xffff_8000_0000_0000),
891 &VirtAddr(0xffff_8000_0000_0000)
892 ),
893 (0, Some(0))
894 );
895 assert_eq!(
896 Step::steps_between(
897 &VirtAddr(0xffff_8000_0000_0000),
898 &VirtAddr(0xffff_8000_0000_0001)
899 ),
900 (1, Some(1))
901 );
902 assert_eq!(
903 Step::steps_between(
904 &VirtAddr(0xffff_8000_0000_0001),
905 &VirtAddr(0xffff_8000_0000_0000)
906 ),
907 (0, None)
908 );
909 #[cfg(target_pointer_width = "64")]
915 assert_eq!(
916 Step::steps_between(&VirtAddr(0), &VirtAddr(0x1_0000_0000)),
917 (0x1_0000_0000, Some(0x1_0000_0000))
918 );
919 #[cfg(not(target_pointer_width = "64"))]
920 assert_eq!(
921 Step::steps_between(&VirtAddr(0), &VirtAddr(0x1_0000_0000)),
922 (usize::MAX, None)
923 );
924 }
925
926 #[test]
927 pub fn test_align_up() {
928 assert_eq!(align_up(0, 1), 0);
930 assert_eq!(align_up(1234, 1), 1234);
931 assert_eq!(align_up(0xffff_ffff_ffff_ffff, 1), 0xffff_ffff_ffff_ffff);
932 assert_eq!(align_up(0, 2), 0);
934 assert_eq!(align_up(1233, 2), 1234);
935 assert_eq!(align_up(0xffff_ffff_ffff_fffe, 2), 0xffff_ffff_ffff_fffe);
936 assert_eq!(align_up(0, 128), 0);
938 assert_eq!(align_up(0, 1), 0);
939 assert_eq!(align_up(0, 2), 0);
940 assert_eq!(align_up(0, 0x8000_0000_0000_0000), 0);
941 }
942
943 #[test]
944 fn test_virt_addr_align_up() {
945 assert_eq!(
947 VirtAddr::new(0x7fff_ffff_ffff).align_up(2u64),
948 VirtAddr::new(0xffff_8000_0000_0000)
949 );
950 }
951
952 #[test]
953 fn test_virt_addr_align_down() {
954 assert_eq!(
956 VirtAddr::new(0xffff_8000_0000_0000).align_down(1u64 << 48),
957 VirtAddr::new(0)
958 );
959 }
960
961 #[test]
962 #[should_panic]
963 fn test_virt_addr_align_up_overflow() {
964 VirtAddr::new(0xffff_ffff_ffff_ffff).align_up(2u64);
965 }
966
967 #[test]
968 #[should_panic]
969 fn test_phys_addr_align_up_overflow() {
970 PhysAddr::new(0x000f_ffff_ffff_ffff).align_up(2u64);
971 }
972
973 #[test]
974 #[cfg(target_pointer_width = "64")]
975 fn test_from_ptr_array() {
976 let slice = &[1, 2, 3, 4, 5];
977 assert_eq!(
979 VirtAddr::from_ptr(slice.as_slice()),
980 VirtAddr::from_ptr(&slice[0])
981 );
982 }
983}
984
985#[cfg(kani)]
986mod proofs {
987 use super::*;
988
989 #[kani::proof]
995 fn forward_base_case() {
996 let start_raw: u64 = kani::any();
997 let Ok(start) = VirtAddr::try_new(start_raw) else {
998 return;
999 };
1000
1001 let same = Step::forward(start, 0);
1003 assert!(start == same);
1004
1005 let expected = match start_raw {
1007 0x0000_0000_0000_0000..=0x0000_7fff_ffff_fffe => Some(start_raw + 1),
1010 0x0000_7fff_ffff_ffff => Some(0xffff_8000_0000_0000),
1012 0x0000_8000_0000_0000..=0xffff_7fff_ffff_ffff => unreachable!(),
1014 0xffff_8000_0000_0000..=0xffff_ffff_ffff_fffe => Some(start_raw + 1),
1017 0xffff_ffff_ffff_ffff => None,
1019 };
1020 if let Some(expected) = expected {
1021 assert!(VirtAddr::try_new(expected).is_ok());
1023 }
1024 let next = Step::forward_checked(start, 1);
1026 assert!(next.map(VirtAddr::as_u64) == expected);
1027 }
1028
1029 #[kani::proof]
1032 fn forward_induction_step() {
1033 let start_raw: u64 = kani::any();
1034 let Ok(start) = VirtAddr::try_new(start_raw) else {
1035 return;
1036 };
1037
1038 let count1: usize = kani::any();
1039 let count2: usize = kani::any();
1040 let Some(next1) = Step::forward_checked(start, count1) else {
1042 return;
1043 };
1044 let Some(next2) = Step::forward_checked(next1, count2) else {
1045 return;
1046 };
1047
1048 let count_both = count1 + count2;
1050 let next_both = Step::forward(start, count_both);
1051 assert!(next2 == next_both);
1052 }
1053
1054 #[kani::proof]
1062 fn forward_implies_backward() {
1063 let start_raw: u64 = kani::any();
1064 let Ok(start) = VirtAddr::try_new(start_raw) else {
1065 return;
1066 };
1067 let count: usize = kani::any();
1068
1069 let Some(end) = Step::forward_checked(start, count) else {
1071 return;
1072 };
1073
1074 let start2 = Step::backward(end, count);
1076 assert!(start == start2);
1077 }
1078
1079 #[kani::proof]
1082 fn backward_implies_forward() {
1083 let end_raw: u64 = kani::any();
1084 let Ok(end) = VirtAddr::try_new(end_raw) else {
1085 return;
1086 };
1087 let count: usize = kani::any();
1088
1089 let Some(start) = Step::backward_checked(end, count) else {
1091 return;
1092 };
1093
1094 let end2 = Step::forward(start, count);
1096 assert!(end == end2);
1097 }
1098
1099 #[kani::proof]
1107 fn forward_implies_steps_between() {
1108 let start: u64 = kani::any();
1109 let Ok(start) = VirtAddr::try_new(start) else {
1110 return;
1111 };
1112 let count: usize = kani::any();
1113
1114 let Some(end) = Step::forward_checked(start, count) else {
1116 return;
1117 };
1118
1119 assert!(Step::steps_between(&start, &end) == (count, Some(count)));
1121 }
1122
1123 #[kani::proof]
1126 fn steps_between_implies_forward() {
1127 let start: u64 = kani::any();
1128 let Ok(start) = VirtAddr::try_new(start) else {
1129 return;
1130 };
1131 let end: u64 = kani::any();
1132 let Ok(end) = VirtAddr::try_new(end) else {
1133 return;
1134 };
1135
1136 let Some(count) = Step::steps_between(&start, &end).1 else {
1138 return;
1139 };
1140
1141 assert!(Step::forward(start, count) == end);
1143 }
1144}