1use bitflags::*;
4
5use core::convert::{From, Into};
6use core::fmt;
7use core::hash::{Hash, Hasher};
8#[cfg(feature = "unstable")]
9use core::iter::Step;
10use core::ops;
11
12macro_rules! check_flag {
13 ($doc:meta, $fun:ident, $flag:expr) => {
14 #[$doc]
15 pub fn $fun(self) -> bool {
16 self.flags().contains($flag)
17 }
18 };
19}
20
21#[inline(always)]
26fn align_down(addr: u64, align: u64) -> u64 {
27 addr & !(align - 1)
28}
29
30#[inline(always)]
35fn align_up(addr: u64, align: u64) -> u64 {
36 let align_mask = align - 1;
37 if addr & align_mask == 0 {
38 addr
39 } else {
40 (addr | align_mask) + 1
41 }
42}
43
44#[repr(transparent)]
46#[derive(Copy, Clone, Eq, Ord, PartialEq, PartialOrd)]
47pub struct PAddr(pub u64);
48
49impl PAddr {
50 pub fn as_u64(self) -> u64 {
52 self.0
53 }
54
55 pub fn as_usize(self) -> usize {
57 self.0 as usize
58 }
59
60 pub const fn zero() -> Self {
62 PAddr(0)
63 }
64
65 pub fn is_zero(self) -> bool {
67 self == PAddr::zero()
68 }
69
70 pub fn split(&self) -> (u32, u32) {
72 (self.0 as u32, (self.0 >> 32) as u32)
73 }
74
75 fn align_up<U>(self, align: U) -> Self
76 where
77 U: Into<u64>,
78 {
79 PAddr(align_up(self.0, align.into()))
80 }
81
82 fn align_down<U>(self, align: U) -> Self
83 where
84 U: Into<u64>,
85 {
86 PAddr(align_down(self.0, align.into()))
87 }
88
89 pub fn base_page_offset(self) -> u64 {
91 self.0 & (BASE_PAGE_SIZE as u64 - 1)
92 }
93
94 pub fn large_page_offset(self) -> u64 {
96 self.0 & (LARGE_PAGE_SIZE as u64 - 1)
97 }
98
99 pub fn huge_page_offset(self) -> u64 {
101 self.0 & (HUGE_PAGE_SIZE as u64 - 1)
102 }
103
104 pub fn align_down_to_base_page(self) -> Self {
106 self.align_down(BASE_PAGE_SIZE as u64)
107 }
108
109 pub fn align_down_to_large_page(self) -> Self {
111 self.align_down(LARGE_PAGE_SIZE as u64)
112 }
113
114 pub fn align_down_to_huge_page(self) -> Self {
116 self.align_down(HUGE_PAGE_SIZE as u64)
117 }
118
119 pub fn align_up_to_base_page(self) -> Self {
121 self.align_up(BASE_PAGE_SIZE as u64)
122 }
123
124 pub fn align_up_to_large_page(self) -> Self {
126 self.align_up(LARGE_PAGE_SIZE as u64)
127 }
128
129 pub fn align_up_to_huge_page(self) -> Self {
131 self.align_up(HUGE_PAGE_SIZE as u64)
132 }
133
134 pub fn is_base_page_aligned(self) -> bool {
136 self.align_down(BASE_PAGE_SIZE as u64) == self
137 }
138
139 pub fn is_large_page_aligned(self) -> bool {
141 self.align_down(LARGE_PAGE_SIZE as u64) == self
142 }
143
144 pub fn is_huge_page_aligned(self) -> bool {
146 self.align_down(HUGE_PAGE_SIZE as u64) == self
147 }
148
149 pub fn is_aligned<U>(self, align: U) -> bool
154 where
155 U: Into<u64> + Copy,
156 {
157 if !align.into().is_power_of_two() {
158 return false;
159 }
160
161 self.align_down(align) == self
162 }
163}
164
165#[cfg(feature = "unstable")]
166impl Step for PAddr {
167 fn steps_between(start: &Self, end: &Self) -> Option<usize> {
168 <u64 as Step>::steps_between(&start.0, &end.0)
169 }
170 fn forward_checked(start: Self, count: usize) -> Option<Self> {
171 <u64 as Step>::forward_checked(start.0, count).map(|v| PAddr(v))
172 }
173 fn backward_checked(start: Self, count: usize) -> Option<Self> {
174 <u64 as Step>::backward_checked(start.0, count).map(|v| PAddr(v))
175 }
176}
177
178impl From<u64> for PAddr {
179 fn from(num: u64) -> Self {
180 PAddr(num)
181 }
182}
183
184impl From<usize> for PAddr {
185 fn from(num: usize) -> Self {
186 PAddr(num as u64)
187 }
188}
189
190impl From<i32> for PAddr {
191 fn from(num: i32) -> Self {
192 PAddr(num as u64)
193 }
194}
195
196#[allow(clippy::clippy::from_over_into)]
197impl Into<u64> for PAddr {
198 fn into(self) -> u64 {
199 self.0
200 }
201}
202
203#[allow(clippy::clippy::from_over_into)]
204impl Into<usize> for PAddr {
205 fn into(self) -> usize {
206 self.0 as usize
207 }
208}
209
210impl ops::Add for PAddr {
211 type Output = PAddr;
212
213 fn add(self, rhs: PAddr) -> Self::Output {
214 PAddr(self.0 + rhs.0)
215 }
216}
217
218impl ops::Add<u64> for PAddr {
219 type Output = PAddr;
220
221 fn add(self, rhs: u64) -> Self::Output {
222 PAddr::from(self.0 + rhs)
223 }
224}
225
226impl ops::Add<usize> for PAddr {
227 type Output = PAddr;
228
229 fn add(self, rhs: usize) -> Self::Output {
230 PAddr::from(self.0 + rhs as u64)
231 }
232}
233
234impl ops::AddAssign for PAddr {
235 fn add_assign(&mut self, other: PAddr) {
236 *self = PAddr::from(self.0 + other.0);
237 }
238}
239
240impl ops::AddAssign<u64> for PAddr {
241 fn add_assign(&mut self, offset: u64) {
242 *self = PAddr::from(self.0 + offset);
243 }
244}
245
246impl ops::Sub for PAddr {
247 type Output = PAddr;
248
249 fn sub(self, rhs: PAddr) -> Self::Output {
250 PAddr::from(self.0 - rhs.0)
251 }
252}
253
254impl ops::Sub<u64> for PAddr {
255 type Output = PAddr;
256
257 fn sub(self, rhs: u64) -> Self::Output {
258 PAddr::from(self.0 - rhs)
259 }
260}
261
262impl ops::Sub<usize> for PAddr {
263 type Output = PAddr;
264
265 fn sub(self, rhs: usize) -> Self::Output {
266 PAddr::from(self.0 - rhs as u64)
267 }
268}
269
270impl ops::Rem for PAddr {
271 type Output = PAddr;
272
273 fn rem(self, rhs: PAddr) -> Self::Output {
274 PAddr(self.0 % rhs.0)
275 }
276}
277
278impl ops::Rem<u64> for PAddr {
279 type Output = u64;
280
281 fn rem(self, rhs: u64) -> Self::Output {
282 self.0 % rhs
283 }
284}
285
286impl ops::Rem<usize> for PAddr {
287 type Output = u64;
288
289 fn rem(self, rhs: usize) -> Self::Output {
290 self.0 % (rhs as u64)
291 }
292}
293
294impl ops::BitAnd for PAddr {
295 type Output = Self;
296
297 fn bitand(self, rhs: Self) -> Self {
298 PAddr(self.0 & rhs.0)
299 }
300}
301
302impl ops::BitAnd<u64> for PAddr {
303 type Output = u64;
304
305 fn bitand(self, rhs: u64) -> Self::Output {
306 Into::<u64>::into(self) & rhs
307 }
308}
309
310impl ops::BitOr for PAddr {
311 type Output = PAddr;
312
313 fn bitor(self, rhs: PAddr) -> Self::Output {
314 PAddr(self.0 | rhs.0)
315 }
316}
317
318impl ops::BitOr<u64> for PAddr {
319 type Output = u64;
320
321 fn bitor(self, rhs: u64) -> Self::Output {
322 self.0 | rhs
323 }
324}
325
326impl ops::Shr<u64> for PAddr {
327 type Output = u64;
328
329 fn shr(self, rhs: u64) -> Self::Output {
330 self.0 >> rhs
331 }
332}
333
334impl fmt::Binary for PAddr {
335 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
336 self.0.fmt(f)
337 }
338}
339
340impl fmt::Display for PAddr {
341 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
342 self.0.fmt(f)
343 }
344}
345
346impl fmt::Debug for PAddr {
347 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
348 write!(f, "{:#x}", self.0)
349 }
350}
351
352impl fmt::LowerHex for PAddr {
353 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
354 self.0.fmt(f)
355 }
356}
357
358impl fmt::Octal for PAddr {
359 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
360 self.0.fmt(f)
361 }
362}
363
364impl fmt::UpperHex for PAddr {
365 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
366 self.0.fmt(f)
367 }
368}
369
370impl fmt::Pointer for PAddr {
371 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
372 use core::fmt::LowerHex;
373 self.0.fmt(f)
374 }
375}
376
377#[allow(clippy::clippy::derive_hash_xor_eq)]
378impl Hash for PAddr {
379 fn hash<H: Hasher>(&self, state: &mut H) {
380 self.0.hash(state);
381 }
382}
383
384#[repr(transparent)]
386#[derive(Copy, Clone, Eq, Ord, PartialEq, PartialOrd)]
387pub struct IOAddr(pub u64);
388
389impl IOAddr {
390 pub fn as_u64(self) -> u64 {
392 self.0
393 }
394
395 pub fn as_usize(self) -> usize {
397 self.0 as usize
398 }
399
400 pub const fn zero() -> Self {
402 IOAddr(0)
403 }
404
405 pub fn is_zero(self) -> bool {
407 self == IOAddr::zero()
408 }
409
410 pub fn split(&self) -> (u32, u32) {
412 (self.0 as u32, (self.0 >> 32) as u32)
413 }
414
415 fn align_up<U>(self, align: U) -> Self
416 where
417 U: Into<u64>,
418 {
419 IOAddr(align_up(self.0, align.into()))
420 }
421
422 fn align_down<U>(self, align: U) -> Self
423 where
424 U: Into<u64>,
425 {
426 IOAddr(align_down(self.0, align.into()))
427 }
428
429 pub fn base_page_offset(self) -> u64 {
431 self.0 & (BASE_PAGE_SIZE as u64 - 1)
432 }
433
434 pub fn large_page_offset(self) -> u64 {
436 self.0 & (LARGE_PAGE_SIZE as u64 - 1)
437 }
438
439 pub fn huge_page_offset(self) -> u64 {
441 self.0 & (HUGE_PAGE_SIZE as u64 - 1)
442 }
443
444 pub fn align_down_to_base_page(self) -> Self {
446 self.align_down(BASE_PAGE_SIZE as u64)
447 }
448
449 pub fn align_down_to_large_page(self) -> Self {
451 self.align_down(LARGE_PAGE_SIZE as u64)
452 }
453
454 pub fn align_down_to_huge_page(self) -> Self {
456 self.align_down(HUGE_PAGE_SIZE as u64)
457 }
458
459 pub fn align_up_to_base_page(self) -> Self {
461 self.align_up(BASE_PAGE_SIZE as u64)
462 }
463
464 pub fn align_up_to_large_page(self) -> Self {
466 self.align_up(LARGE_PAGE_SIZE as u64)
467 }
468
469 pub fn align_up_to_huge_page(self) -> Self {
471 self.align_up(HUGE_PAGE_SIZE as u64)
472 }
473
474 pub fn is_base_page_aligned(self) -> bool {
476 self.align_down(BASE_PAGE_SIZE as u64) == self
477 }
478
479 pub fn is_large_page_aligned(self) -> bool {
481 self.align_down(LARGE_PAGE_SIZE as u64) == self
482 }
483
484 pub fn is_huge_page_aligned(self) -> bool {
486 self.align_down(HUGE_PAGE_SIZE as u64) == self
487 }
488
489 pub fn is_aligned<U>(self, align: U) -> bool
494 where
495 U: Into<u64> + Copy,
496 {
497 if !align.into().is_power_of_two() {
498 return false;
499 }
500
501 self.align_down(align) == self
502 }
503}
504
505#[cfg(feature = "unstable")]
506impl Step for IOAddr {
507 fn steps_between(start: &Self, end: &Self) -> Option<usize> {
508 <u64 as Step>::steps_between(&start.0, &end.0)
509 }
510 fn forward_checked(start: Self, count: usize) -> Option<Self> {
511 <u64 as Step>::forward_checked(start.0, count).map(|v| IOAddr(v))
512 }
513 fn backward_checked(start: Self, count: usize) -> Option<Self> {
514 <u64 as Step>::backward_checked(start.0, count).map(|v| IOAddr(v))
515 }
516}
517
518impl From<u64> for IOAddr {
519 fn from(num: u64) -> Self {
520 IOAddr(num)
521 }
522}
523
524impl From<usize> for IOAddr {
525 fn from(num: usize) -> Self {
526 IOAddr(num as u64)
527 }
528}
529
530impl From<i32> for IOAddr {
531 fn from(num: i32) -> Self {
532 IOAddr(num as u64)
533 }
534}
535
536#[allow(clippy::clippy::from_over_into)]
537impl Into<u64> for IOAddr {
538 fn into(self) -> u64 {
539 self.0
540 }
541}
542
543#[allow(clippy::clippy::from_over_into)]
544impl Into<usize> for IOAddr {
545 fn into(self) -> usize {
546 self.0 as usize
547 }
548}
549
550impl ops::Add for IOAddr {
551 type Output = IOAddr;
552
553 fn add(self, rhs: IOAddr) -> Self::Output {
554 IOAddr(self.0 + rhs.0)
555 }
556}
557
558impl ops::Add<u64> for IOAddr {
559 type Output = IOAddr;
560
561 fn add(self, rhs: u64) -> Self::Output {
562 IOAddr::from(self.0 + rhs)
563 }
564}
565
566impl ops::Add<usize> for IOAddr {
567 type Output = IOAddr;
568
569 fn add(self, rhs: usize) -> Self::Output {
570 IOAddr::from(self.0 + rhs as u64)
571 }
572}
573
574impl ops::AddAssign for IOAddr {
575 fn add_assign(&mut self, other: IOAddr) {
576 *self = IOAddr::from(self.0 + other.0);
577 }
578}
579
580impl ops::AddAssign<u64> for IOAddr {
581 fn add_assign(&mut self, offset: u64) {
582 *self = IOAddr::from(self.0 + offset);
583 }
584}
585
586impl ops::Sub for IOAddr {
587 type Output = IOAddr;
588
589 fn sub(self, rhs: IOAddr) -> Self::Output {
590 IOAddr::from(self.0 - rhs.0)
591 }
592}
593
594impl ops::Sub<u64> for IOAddr {
595 type Output = IOAddr;
596
597 fn sub(self, rhs: u64) -> Self::Output {
598 IOAddr::from(self.0 - rhs)
599 }
600}
601
602impl ops::Sub<usize> for IOAddr {
603 type Output = IOAddr;
604
605 fn sub(self, rhs: usize) -> Self::Output {
606 IOAddr::from(self.0 - rhs as u64)
607 }
608}
609
610impl ops::Rem for IOAddr {
611 type Output = IOAddr;
612
613 fn rem(self, rhs: IOAddr) -> Self::Output {
614 IOAddr(self.0 % rhs.0)
615 }
616}
617
618impl ops::Rem<u64> for IOAddr {
619 type Output = u64;
620
621 fn rem(self, rhs: u64) -> Self::Output {
622 self.0 % rhs
623 }
624}
625
626impl ops::Rem<usize> for IOAddr {
627 type Output = u64;
628
629 fn rem(self, rhs: usize) -> Self::Output {
630 self.0 % (rhs as u64)
631 }
632}
633
634impl ops::BitAnd for IOAddr {
635 type Output = Self;
636
637 fn bitand(self, rhs: Self) -> Self {
638 IOAddr(self.0 & rhs.0)
639 }
640}
641
642impl ops::BitAnd<u64> for IOAddr {
643 type Output = u64;
644
645 fn bitand(self, rhs: u64) -> Self::Output {
646 Into::<u64>::into(self) & rhs
647 }
648}
649
650impl ops::BitOr for IOAddr {
651 type Output = IOAddr;
652
653 fn bitor(self, rhs: IOAddr) -> Self::Output {
654 IOAddr(self.0 | rhs.0)
655 }
656}
657
658impl ops::BitOr<u64> for IOAddr {
659 type Output = u64;
660
661 fn bitor(self, rhs: u64) -> Self::Output {
662 self.0 | rhs
663 }
664}
665
666impl ops::Shr<u64> for IOAddr {
667 type Output = u64;
668
669 fn shr(self, rhs: u64) -> Self::Output {
670 self.0 >> rhs
671 }
672}
673
674impl fmt::Binary for IOAddr {
675 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
676 self.0.fmt(f)
677 }
678}
679
680impl fmt::Display for IOAddr {
681 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
682 self.0.fmt(f)
683 }
684}
685
686impl fmt::Debug for IOAddr {
687 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
688 write!(f, "{:#x}", self.0)
689 }
690}
691
692impl fmt::LowerHex for IOAddr {
693 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
694 self.0.fmt(f)
695 }
696}
697
698impl fmt::Octal for IOAddr {
699 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
700 self.0.fmt(f)
701 }
702}
703
704impl fmt::UpperHex for IOAddr {
705 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
706 self.0.fmt(f)
707 }
708}
709
710impl fmt::Pointer for IOAddr {
711 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
712 use core::fmt::LowerHex;
713 self.0.fmt(f)
714 }
715}
716
717#[allow(clippy::clippy::derive_hash_xor_eq)]
718impl Hash for IOAddr {
719 fn hash<H: Hasher>(&self, state: &mut H) {
720 self.0.hash(state);
721 }
722}
723
724#[repr(transparent)]
726#[derive(Copy, Clone, Eq, Ord, PartialEq, PartialOrd)]
727pub struct VAddr(pub u64);
728
729impl VAddr {
730 pub const fn from_u64(v: u64) -> Self {
732 VAddr(v)
733 }
734
735 pub const fn from_usize(v: usize) -> Self {
737 VAddr(v as u64)
738 }
739
740 pub const fn as_u64(self) -> u64 {
742 self.0
743 }
744
745 pub const fn as_usize(self) -> usize {
747 self.0 as usize
748 }
749
750 pub fn as_mut_ptr<T>(self) -> *mut T {
752 self.0 as *mut T
753 }
754
755 pub fn as_ptr<T>(self) -> *const T {
757 self.0 as *const T
758 }
759
760 pub const fn zero() -> Self {
762 VAddr(0)
763 }
764
765 pub fn is_zero(self) -> bool {
767 self == VAddr::zero()
768 }
769
770 fn align_up<U>(self, align: U) -> Self
771 where
772 U: Into<u64>,
773 {
774 VAddr(align_up(self.0, align.into()))
775 }
776
777 fn align_down<U>(self, align: U) -> Self
778 where
779 U: Into<u64>,
780 {
781 VAddr(align_down(self.0, align.into()))
782 }
783
784 pub fn base_page_offset(self) -> u64 {
786 self.0 & (BASE_PAGE_SIZE as u64 - 1)
787 }
788
789 pub fn large_page_offset(self) -> u64 {
791 self.0 & (LARGE_PAGE_SIZE as u64 - 1)
792 }
793
794 pub fn huge_page_offset(self) -> u64 {
796 self.0 & (HUGE_PAGE_SIZE as u64 - 1)
797 }
798
799 pub fn align_down_to_base_page(self) -> Self {
801 self.align_down(BASE_PAGE_SIZE as u64)
802 }
803
804 pub fn align_down_to_large_page(self) -> Self {
806 self.align_down(LARGE_PAGE_SIZE as u64)
807 }
808
809 pub fn align_down_to_huge_page(self) -> Self {
811 self.align_down(HUGE_PAGE_SIZE as u64)
812 }
813
814 pub fn align_up_to_base_page(self) -> Self {
816 self.align_up(BASE_PAGE_SIZE as u64)
817 }
818
819 pub fn align_up_to_large_page(self) -> Self {
821 self.align_up(LARGE_PAGE_SIZE as u64)
822 }
823
824 pub fn align_up_to_huge_page(self) -> Self {
826 self.align_up(HUGE_PAGE_SIZE as u64)
827 }
828
829 pub fn is_base_page_aligned(self) -> bool {
831 self.align_down(BASE_PAGE_SIZE as u64) == self
832 }
833
834 pub fn is_large_page_aligned(self) -> bool {
836 self.align_down(LARGE_PAGE_SIZE as u64) == self
837 }
838
839 pub fn is_huge_page_aligned(self) -> bool {
841 self.align_down(HUGE_PAGE_SIZE as u64) == self
842 }
843
844 pub fn is_aligned<U>(self, align: U) -> bool
849 where
850 U: Into<u64> + Copy,
851 {
852 if !align.into().is_power_of_two() {
853 return false;
854 }
855
856 self.align_down(align) == self
857 }
858}
859
860#[cfg(feature = "unstable")]
861impl Step for VAddr {
862 fn steps_between(start: &Self, end: &Self) -> Option<usize> {
863 <u64 as Step>::steps_between(&start.0, &end.0)
864 }
865 fn forward_checked(start: Self, count: usize) -> Option<Self> {
866 <u64 as Step>::forward_checked(start.0, count).map(|v| VAddr(v))
867 }
868 fn backward_checked(start: Self, count: usize) -> Option<Self> {
869 <u64 as Step>::backward_checked(start.0, count).map(|v| VAddr(v))
870 }
871}
872
873impl From<u64> for VAddr {
874 fn from(num: u64) -> Self {
875 VAddr(num)
876 }
877}
878
879impl From<i32> for VAddr {
880 fn from(num: i32) -> Self {
881 VAddr(num as u64)
882 }
883}
884
885#[allow(clippy::clippy::from_over_into)]
886impl Into<u64> for VAddr {
887 fn into(self) -> u64 {
888 self.0
889 }
890}
891
892impl From<usize> for VAddr {
893 fn from(num: usize) -> Self {
894 VAddr(num as u64)
895 }
896}
897
898#[allow(clippy::clippy::from_over_into)]
899impl Into<usize> for VAddr {
900 fn into(self) -> usize {
901 self.0 as usize
902 }
903}
904
905impl ops::Add for VAddr {
906 type Output = VAddr;
907
908 fn add(self, rhs: VAddr) -> Self::Output {
909 VAddr(self.0 + rhs.0)
910 }
911}
912
913impl ops::Add<u64> for VAddr {
914 type Output = VAddr;
915
916 fn add(self, rhs: u64) -> Self::Output {
917 VAddr(self.0 + rhs)
918 }
919}
920
921impl ops::Add<usize> for VAddr {
922 type Output = VAddr;
923
924 fn add(self, rhs: usize) -> Self::Output {
925 VAddr::from(self.0 + rhs as u64)
926 }
927}
928
929impl ops::AddAssign for VAddr {
930 fn add_assign(&mut self, other: VAddr) {
931 *self = VAddr::from(self.0 + other.0);
932 }
933}
934
935impl ops::AddAssign<u64> for VAddr {
936 fn add_assign(&mut self, offset: u64) {
937 *self = VAddr::from(self.0 + offset);
938 }
939}
940
941impl ops::AddAssign<usize> for VAddr {
942 fn add_assign(&mut self, offset: usize) {
943 *self = VAddr::from(self.0 + offset as u64);
944 }
945}
946
947impl ops::Sub for VAddr {
948 type Output = VAddr;
949
950 fn sub(self, rhs: VAddr) -> Self::Output {
951 VAddr::from(self.0 - rhs.0)
952 }
953}
954
955impl ops::Sub<u64> for VAddr {
956 type Output = VAddr;
957
958 fn sub(self, rhs: u64) -> Self::Output {
959 VAddr::from(self.0 - rhs)
960 }
961}
962
963impl ops::Sub<usize> for VAddr {
964 type Output = VAddr;
965
966 fn sub(self, rhs: usize) -> Self::Output {
967 VAddr::from(self.0 - rhs as u64)
968 }
969}
970
971impl ops::Rem for VAddr {
972 type Output = VAddr;
973
974 fn rem(self, rhs: VAddr) -> Self::Output {
975 VAddr(self.0 % rhs.0)
976 }
977}
978
979impl ops::Rem<u64> for VAddr {
980 type Output = u64;
981
982 fn rem(self, rhs: Self::Output) -> Self::Output {
983 self.0 % rhs
984 }
985}
986
987impl ops::Rem<usize> for VAddr {
988 type Output = usize;
989
990 fn rem(self, rhs: Self::Output) -> Self::Output {
991 self.as_usize() % rhs
992 }
993}
994
995impl ops::BitAnd for VAddr {
996 type Output = Self;
997
998 fn bitand(self, rhs: Self) -> Self::Output {
999 VAddr(self.0 & rhs.0)
1000 }
1001}
1002
1003impl ops::BitAnd<u64> for VAddr {
1004 type Output = VAddr;
1005
1006 fn bitand(self, rhs: u64) -> Self::Output {
1007 VAddr(self.0 & rhs)
1008 }
1009}
1010
1011impl ops::BitAnd<usize> for VAddr {
1012 type Output = VAddr;
1013
1014 fn bitand(self, rhs: usize) -> Self::Output {
1015 VAddr(self.0 & rhs as u64)
1016 }
1017}
1018
1019impl ops::BitAnd<i32> for VAddr {
1020 type Output = VAddr;
1021
1022 fn bitand(self, rhs: i32) -> Self::Output {
1023 VAddr(self.0 & rhs as u64)
1024 }
1025}
1026
1027impl ops::BitOr for VAddr {
1028 type Output = VAddr;
1029
1030 fn bitor(self, rhs: VAddr) -> VAddr {
1031 VAddr(self.0 | rhs.0)
1032 }
1033}
1034
1035impl ops::BitOr<u64> for VAddr {
1036 type Output = VAddr;
1037
1038 fn bitor(self, rhs: u64) -> Self::Output {
1039 VAddr(self.0 | rhs)
1040 }
1041}
1042
1043impl ops::BitOr<usize> for VAddr {
1044 type Output = VAddr;
1045
1046 fn bitor(self, rhs: usize) -> Self::Output {
1047 VAddr(self.0 | rhs as u64)
1048 }
1049}
1050
1051impl ops::Shr<u64> for VAddr {
1052 type Output = u64;
1053
1054 fn shr(self, rhs: u64) -> Self::Output {
1055 self.0 >> rhs as u64
1056 }
1057}
1058
1059impl ops::Shr<usize> for VAddr {
1060 type Output = u64;
1061
1062 fn shr(self, rhs: usize) -> Self::Output {
1063 self.0 >> rhs as u64
1064 }
1065}
1066
1067impl ops::Shr<i32> for VAddr {
1068 type Output = u64;
1069
1070 fn shr(self, rhs: i32) -> Self::Output {
1071 self.0 >> rhs as u64
1072 }
1073}
1074
1075impl fmt::Binary for VAddr {
1076 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
1077 self.0.fmt(f)
1078 }
1079}
1080
1081impl fmt::Display for VAddr {
1082 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
1083 write!(f, "{:#x}", self.0)
1084 }
1085}
1086
1087impl fmt::Debug for VAddr {
1088 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
1089 write!(f, "{:#x}", self.0)
1090 }
1091}
1092
1093impl fmt::LowerHex for VAddr {
1094 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
1095 self.0.fmt(f)
1096 }
1097}
1098
1099impl fmt::Octal for VAddr {
1100 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
1101 self.0.fmt(f)
1102 }
1103}
1104
1105impl fmt::UpperHex for VAddr {
1106 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
1107 self.0.fmt(f)
1108 }
1109}
1110
1111impl fmt::Pointer for VAddr {
1112 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
1113 use core::fmt::LowerHex;
1114 self.0.fmt(f)
1115 }
1116}
1117
1118#[allow(clippy::clippy::derive_hash_xor_eq)]
1119impl Hash for VAddr {
1120 fn hash<H: Hasher>(&self, state: &mut H) {
1121 self.0.hash(state);
1122 }
1123}
1124
1125pub const BASE_PAGE_SHIFT: usize = 12;
1127
1128pub const BASE_PAGE_SIZE: usize = 4096;
1130
1131pub const LARGE_PAGE_SIZE: usize = 1024 * 1024 * 2;
1133
1134pub const HUGE_PAGE_SIZE: usize = 1024 * 1024 * 1024;
1136
1137#[cfg(target_arch = "x86_64")]
1139pub const PML4_SLOT_SIZE: usize = HUGE_PAGE_SIZE * 512;
1140
1141pub const CACHE_LINE_SIZE: usize = 64;
1143
1144pub struct Page([u8; BASE_PAGE_SIZE]);
1146
1147pub struct LargePage([u8; LARGE_PAGE_SIZE]);
1149
1150pub struct HugePage([u8; HUGE_PAGE_SIZE]);
1152
1153pub const MAXPHYADDR: u64 = 52;
1155
1156const ADDRESS_MASK: u64 = ((1 << MAXPHYADDR) - 1) & !0xfff;
1158
1159pub const MAXVADDR_BITS: u64 = 57;
1161
1162pub const MAXVADDR: u64 = 1 << MAXVADDR_BITS;
1164
1165pub const PAGE_SIZE_ENTRIES: usize = 512;
1167
1168pub type PML4 = [PML4Entry; PAGE_SIZE_ENTRIES];
1170
1171pub type PML5 = [PML5Entry; PAGE_SIZE_ENTRIES];
1173
1174#[allow(clippy::clippy::upper_case_acronyms)]
1176pub type PDPT = [PDPTEntry; PAGE_SIZE_ENTRIES];
1177
1178pub type PD = [PDEntry; PAGE_SIZE_ENTRIES];
1180
1181pub type PT = [PTEntry; PAGE_SIZE_ENTRIES];
1183
1184#[cfg(target_arch = "x86_64")]
1186#[inline]
1187pub fn pml4_index(addr: VAddr) -> usize {
1188 ((addr >> 39usize) & 0b111111111) as usize
1189}
1190
1191#[cfg(target_arch = "x86_64")]
1193#[inline]
1194pub fn pml5_index(addr: VAddr) -> usize {
1195 ((addr >> 48usize) & 0b111111111) as usize
1196}
1197
1198#[inline]
1200pub fn pdpt_index(addr: VAddr) -> usize {
1201 ((addr >> 30usize) & 0b111111111) as usize
1202}
1203
1204#[inline]
1206pub fn pd_index(addr: VAddr) -> usize {
1207 ((addr >> 21usize) & 0b111111111) as usize
1208}
1209
1210#[inline]
1212pub fn pt_index(addr: VAddr) -> usize {
1213 ((addr >> 12usize) & 0b111111111) as usize
1214}
1215
1216bitflags! {
1217 #[repr(transparent)]
1219 pub struct PML4Flags: u64 {
1220 const P = bit!(0);
1222 const RW = bit!(1);
1225 const US = bit!(2);
1228 const PWT = bit!(3);
1231 const PCD = bit!(4);
1234 const A = bit!(5);
1236 const USER_9 = bit!(9);
1238 const USER_10 = bit!(10);
1240 const USER_11 = bit!(11);
1242 const XD = bit!(63);
1245 }
1246}
1247
1248bitflags! {
1249 #[repr(transparent)]
1251 pub struct PML5Flags: u64 {
1252 const P = bit!(0);
1254 const RW = bit!(1);
1257 const US = bit!(2);
1260 const PWT = bit!(3);
1263 const PCD = bit!(4);
1266 const A = bit!(5);
1268 const USER_9 = bit!(9);
1270 const USER_10 = bit!(10);
1272 const USER_11 = bit!(11);
1274 const XD = bit!(63);
1277 }
1278}
1279
1280#[repr(transparent)]
1282#[derive(Clone, Copy)]
1283pub struct PML4Entry(pub u64);
1284
1285impl fmt::Debug for PML4Entry {
1286 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
1287 write!(
1288 f,
1289 "PML4Entry {{ {:#x}, {:?} }}",
1290 self.address(),
1291 self.flags()
1292 )
1293 }
1294}
1295
1296impl PML4Entry {
1297 pub fn new(pml4: PAddr, flags: PML4Flags) -> PML4Entry {
1304 let pml4_val = pml4 & ADDRESS_MASK;
1305 assert!(pml4_val == pml4.into());
1306 assert!(pml4 % BASE_PAGE_SIZE == 0);
1307 PML4Entry(pml4_val | flags.bits)
1308 }
1309
1310 pub fn address(self) -> PAddr {
1312 PAddr::from(self.0 & ADDRESS_MASK)
1313 }
1314
1315 pub fn flags(self) -> PML4Flags {
1316 PML4Flags::from_bits_truncate(self.0)
1317 }
1318
1319 check_flag!(doc = "Is page present?", is_present, PML4Flags::P);
1320 check_flag!(doc = "Read/write; if 0, writes may not be allowed to the 512-GByte region, controlled by this entry (see Section 4.6)",
1321 is_writeable, PML4Flags::RW);
1322 check_flag!(doc = "User/supervisor; if 0, user-mode accesses are not allowed to the 512-GByte region controlled by this entry.",
1323 is_user_mode_allowed, PML4Flags::US);
1324 check_flag!(doc = "Page-level write-through; indirectly determines the memory type used to access the page-directory-pointer table referenced by this entry.",
1325 is_page_write_through, PML4Flags::PWT);
1326 check_flag!(doc = "Page-level cache disable; indirectly determines the memory type used to access the page-directory-pointer table referenced by this entry.",
1327 is_page_level_cache_disabled, PML4Flags::PCD);
1328 check_flag!(
1329 doc =
1330 "Accessed; indicates whether this entry has been used for linear-address translation.",
1331 is_accessed,
1332 PML4Flags::A
1333 );
1334 check_flag!(doc = "If IA32_EFER.NXE = 1, execute-disable. If 1, instruction fetches are not allowed from the 512-GByte region.",
1335 is_instruction_fetching_disabled, PML4Flags::XD);
1336}
1337
1338#[repr(transparent)]
1340#[derive(Clone, Copy)]
1341pub struct PML5Entry(pub u64);
1342
1343impl fmt::Debug for PML5Entry {
1344 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
1345 write!(
1346 f,
1347 "PML5Entry {{ {:#x}, {:?} }}",
1348 self.address(),
1349 self.flags()
1350 )
1351 }
1352}
1353
1354impl PML5Entry {
1355 pub fn new(pml4: PAddr, flags: PML5Flags) -> PML5Entry {
1362 let pml5_val = pml4 & ADDRESS_MASK;
1363 assert!(pml5_val == pml4.into());
1364 assert!(pml4 % BASE_PAGE_SIZE == 0);
1365 PML5Entry(pml5_val | flags.bits)
1366 }
1367
1368 pub fn address(self) -> PAddr {
1370 PAddr::from(self.0 & ADDRESS_MASK)
1371 }
1372
1373 pub fn flags(self) -> PML5Flags {
1374 PML5Flags::from_bits_truncate(self.0)
1375 }
1376
1377 check_flag!(doc = "Is page present?", is_present, PML5Flags::P);
1378 check_flag!(doc = "Read/write; if 0, writes may not be allowed to the 256-TByte region, controlled by this entry (see Section 4.6)",
1379 is_writeable, PML5Flags::RW);
1380 check_flag!(doc = "User/supervisor; if 0, user-mode accesses are not allowed to the 256-TByte region controlled by this entry.",
1381 is_user_mode_allowed, PML5Flags::US);
1382 check_flag!(doc = "Page-level write-through; indirectly determines the memory type used to access the PML4 table referenced by this entry.",
1383 is_page_write_through, PML5Flags::PWT);
1384 check_flag!(doc = "Page-level cache disable; indirectly determines the memory type used to access the PML4 table referenced by this entry.",
1385 is_page_level_cache_disabled, PML5Flags::PCD);
1386 check_flag!(
1387 doc =
1388 "Accessed; indicates whether this entry has been used for linear-address translation.",
1389 is_accessed,
1390 PML5Flags::A
1391 );
1392 check_flag!(doc = "If IA32_EFER.NXE = 1, execute-disable. If 1, instruction fetches are not allowed from the 256-TByte region.",
1393 is_instruction_fetching_disabled, PML5Flags::XD);
1394}
1395
1396bitflags! {
1397 #[repr(transparent)]
1399 pub struct PDPTFlags: u64 {
1400 const P = bit!(0);
1402 const RW = bit!(1);
1404 const US = bit!(2);
1406 const PWT = bit!(3);
1408 const PCD = bit!(4);
1410 const A = bit!(5);
1413 const D = bit!(6);
1416 const PS = bit!(7);
1419 const G = bit!(8);
1422 const USER_9 = bit!(9);
1424 const USER_10 = bit!(10);
1426 const USER_11 = bit!(11);
1428 const PAT = bit!(12);
1430 const XD = bit!(63);
1433 }
1434}
1435
1436#[repr(transparent)]
1438#[derive(Clone, Copy)]
1439pub struct PDPTEntry(pub u64);
1440
1441impl fmt::Debug for PDPTEntry {
1442 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
1443 write!(
1444 f,
1445 "PDPTEntry {{ {:#x}, {:?} }}",
1446 self.address(),
1447 self.flags()
1448 )
1449 }
1450}
1451
1452impl PDPTEntry {
1453 pub fn new(pd: PAddr, flags: PDPTFlags) -> PDPTEntry {
1460 let pd_val = pd & ADDRESS_MASK;
1461 assert!(pd_val == pd.into());
1462 assert!(pd % BASE_PAGE_SIZE == 0);
1463 PDPTEntry(pd_val | flags.bits)
1464 }
1465
1466 pub fn address(self) -> PAddr {
1468 PAddr::from(self.0 & ADDRESS_MASK)
1469 }
1470
1471 pub fn flags(self) -> PDPTFlags {
1473 PDPTFlags::from_bits_truncate(self.0)
1474 }
1475
1476 check_flag!(doc = "Is page present?", is_present, PDPTFlags::P);
1477 check_flag!(doc = "Read/write; if 0, writes may not be allowed to the 1-GByte region controlled by this entry.",
1478 is_writeable, PDPTFlags::RW);
1479 check_flag!(doc = "User/supervisor; user-mode accesses are not allowed to the 1-GByte region controlled by this entry.",
1480 is_user_mode_allowed, PDPTFlags::US);
1481 check_flag!(
1482 doc = "Page-level write-through.",
1483 is_page_write_through,
1484 PDPTFlags::PWT
1485 );
1486 check_flag!(
1487 doc = "Page-level cache disable.",
1488 is_page_level_cache_disabled,
1489 PDPTFlags::PCD
1490 );
1491 check_flag!(
1492 doc =
1493 "Accessed; indicates whether this entry has been used for linear-address translation.",
1494 is_accessed,
1495 PDPTFlags::A
1496 );
1497 check_flag!(doc = "Indirectly determines the memory type used to access the 1-GByte page referenced by this entry. if not PS this is ignored.",
1498 is_pat, PDPTFlags::PAT);
1499 check_flag!(doc = "If IA32_EFER.NXE = 1, execute-disable. If 1, instruction fetches are not allowed from the 512-GByte region.",
1500 is_instruction_fetching_disabled, PDPTFlags::XD);
1501 check_flag!(doc = "Page size; if set this entry maps a 1-GByte page; otherwise, this entry references a page directory.",
1502 is_page, PDPTFlags::PS);
1503}
1504
1505bitflags! {
1506 #[repr(transparent)]
1508 pub struct PDFlags: u64 {
1509 const P = bit!(0);
1511 const RW = bit!(1);
1513 const US = bit!(2);
1515 const PWT = bit!(3);
1517 const PCD = bit!(4);
1519 const A = bit!(5);
1522 const D = bit!(6);
1525 const PS = bit!(7);
1527 const G = bit!(8);
1530 const USER_9 = bit!(9);
1532 const USER_10 = bit!(10);
1534 const USER_11 = bit!(11);
1536 const PAT = bit!(12);
1539 const XD = bit!(63);
1542 }
1543}
1544
1545#[repr(transparent)]
1547#[derive(Clone, Copy)]
1548pub struct PDEntry(pub u64);
1549
1550impl fmt::Debug for PDEntry {
1551 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
1552 write!(f, "PDEntry {{ {:#x}, {:?} }}", self.address(), self.flags())
1553 }
1554}
1555
1556impl PDEntry {
1557 pub fn new(pt: PAddr, flags: PDFlags) -> PDEntry {
1564 let pt_val = pt & ADDRESS_MASK;
1565 assert!(pt_val == pt.into());
1566 assert!(pt % BASE_PAGE_SIZE == 0);
1567 PDEntry(pt_val | flags.bits)
1568 }
1569
1570 pub fn address(self) -> PAddr {
1572 PAddr::from(self.0 & ADDRESS_MASK)
1573 }
1574
1575 pub fn flags(self) -> PDFlags {
1577 PDFlags::from_bits_truncate(self.0)
1578 }
1579
1580 check_flag!(
1581 doc = "Present; must be 1 to map a 2-MByte page or reference a page table.",
1582 is_present,
1583 PDFlags::P
1584 );
1585 check_flag!(doc = "Read/write; if 0, writes may not be allowed to the 2-MByte region controlled by this entry",
1586 is_writeable, PDFlags::RW);
1587 check_flag!(doc = "User/supervisor; user-mode accesses are not allowed to the 2-MByte region controlled by this entry.",
1588 is_user_mode_allowed, PDFlags::US);
1589 check_flag!(
1590 doc = "Page-level write-through.",
1591 is_page_write_through,
1592 PDFlags::PWT
1593 );
1594 check_flag!(
1595 doc = "Page-level cache disable.",
1596 is_page_level_cache_disabled,
1597 PDFlags::PCD
1598 );
1599 check_flag!(doc = "Accessed; if PS set indicates whether software has accessed the 2-MByte page else indicates whether this entry has been used for linear-address translation.",
1600 is_accessed, PDFlags::A);
1601 check_flag!(doc = "Dirty; if PS set indicates whether software has written to the 2-MByte page referenced by this entry else ignored.",
1602 is_dirty, PDFlags::D);
1603 check_flag!(doc = "Page size; if set this entry maps a 2-MByte page; otherwise, this entry references a page directory.",
1604 is_page, PDFlags::PS);
1605 check_flag!(doc = "Global; if PS && CR4.PGE = 1, determines whether the translation is global; ignored otherwise if not PS this is ignored.",
1606 is_global, PDFlags::G);
1607 check_flag!(doc = "Indirectly determines the memory type used to access the 2-MByte page referenced by this entry. if not PS this is ignored.",
1608 is_pat, PDFlags::PAT);
1609 check_flag!(doc = "If IA32_EFER.NXE = 1, execute-disable. If 1, instruction fetches are not allowed from the 2-Mbyte region.",
1610 is_instruction_fetching_disabled, PDFlags::XD);
1611}
1612
1613bitflags! {
1614 #[repr(transparent)]
1616 pub struct PTFlags: u64 {
1617 const P = bit!(0);
1619 const RW = bit!(1);
1621 const US = bit!(2);
1623 const PWT = bit!(3);
1625 const PCD = bit!(4);
1627 const A = bit!(5);
1629 const D = bit!(6);
1631 const G = bit!(8);
1633 const USER_9 = bit!(9);
1635 const USER_10 = bit!(10);
1637 const USER_11 = bit!(11);
1639 const XD = bit!(63);
1642 }
1643}
1644
1645#[repr(transparent)]
1647#[derive(Clone, Copy)]
1648pub struct PTEntry(pub u64);
1649
1650impl fmt::Debug for PTEntry {
1651 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
1652 write!(f, "PTEntry {{ {:#x}, {:?} }}", self.address(), self.flags())
1653 }
1654}
1655
1656impl PTEntry {
1657 pub fn new(page: PAddr, flags: PTFlags) -> PTEntry {
1664 let page_val = page & ADDRESS_MASK;
1665 assert!(page_val == page.into());
1666 assert!(page % BASE_PAGE_SIZE == 0);
1667 PTEntry(page_val | flags.bits)
1668 }
1669
1670 pub fn address(self) -> PAddr {
1672 PAddr::from(self.0 & ADDRESS_MASK)
1673 }
1674
1675 pub fn flags(self) -> PTFlags {
1677 PTFlags::from_bits_truncate(self.0)
1678 }
1679
1680 check_flag!(
1681 doc = "Present; must be 1 to map a 4-KByte page or reference a page table.",
1682 is_present,
1683 PTFlags::P
1684 );
1685 check_flag!(doc = "Read/write; if 0, writes may not be allowed to the 4-KByte region controlled by this entry",
1686 is_writeable, PTFlags::RW);
1687 check_flag!(doc = "User/supervisor; user-mode accesses are not allowed to the 4-KByte region controlled by this entry.",
1688 is_user_mode_allowed, PTFlags::US);
1689 check_flag!(
1690 doc = "Page-level write-through.",
1691 is_page_write_through,
1692 PTFlags::PWT
1693 );
1694 check_flag!(
1695 doc = "Page-level cache disable.",
1696 is_page_level_cache_disabled,
1697 PTFlags::PCD
1698 );
1699 check_flag!(doc = "Accessed; if PS set indicates whether software has accessed the 4-KByte page else indicates whether this entry has been used for linear-address translation.",
1700 is_accessed, PTFlags::A);
1701 check_flag!(doc = "Dirty; if PD_PS set indicates whether software has written to the 4-KByte page referenced by this entry else ignored.",
1702 is_dirty, PTFlags::D);
1703 check_flag!(doc = "Global; if PS && CR4.PGE = 1, determines whether the translation is global; ignored otherwise if not PS this is ignored.",
1704 is_global, PTFlags::G);
1705 check_flag!(doc = "If IA32_EFER.NXE = 1, execute-disable. If 1, instruction fetches are not allowed from the 4-KByte region.",
1706 is_instruction_fetching_disabled, PTFlags::XD);
1707}
1708
1709#[cfg(all(test, feature = "utest"))]
1710mod test {
1711 use super::*;
1712
1713 #[test]
1714 fn paddr_align() {
1715 let base = PAddr::from(0x1000);
1716 assert_eq!(base.base_page_offset(), 0x0);
1717 assert_eq!(base.large_page_offset(), 0x1000);
1718 assert_eq!(base.huge_page_offset(), 0x1000);
1719 assert_eq!(base.align_down_to_base_page(), PAddr::from(0x1000));
1720 assert_eq!(base.align_down_to_large_page(), PAddr::from(0x0));
1721 assert_eq!(base.align_down_to_huge_page(), PAddr::from(0x0));
1722 assert_eq!(base.align_up_to_base_page(), PAddr::from(0x1000));
1723 assert_eq!(base.align_up_to_large_page(), PAddr::from(0x200000));
1724 assert_eq!(base.align_up_to_huge_page(), PAddr::from(1073741824));
1725 assert!(base.is_base_page_aligned());
1726 assert!(!base.is_large_page_aligned());
1727 assert!(!base.is_huge_page_aligned());
1728 assert!(base.is_aligned(0x1u64));
1729 assert!(base.is_aligned(0x2u64));
1730 assert!(!base.is_aligned(0x3u64));
1731 assert!(base.is_aligned(0x4u64));
1732
1733 let base = PAddr::from(0x1001);
1734 assert_eq!(base.base_page_offset(), 0x1);
1735 assert_eq!(base.large_page_offset(), 0x1001);
1736 assert_eq!(base.huge_page_offset(), 0x1001);
1737 assert_eq!(base.align_down_to_base_page(), PAddr::from(0x1000));
1738 assert_eq!(base.align_down_to_large_page(), PAddr::from(0x0));
1739 assert_eq!(base.align_down_to_huge_page(), PAddr::from(0x0));
1740 assert_eq!(base.align_up_to_base_page(), PAddr::from(0x2000));
1741 assert_eq!(base.align_up_to_large_page(), PAddr::from(0x200000));
1742 assert_eq!(base.align_up_to_huge_page(), PAddr::from(1073741824));
1743 assert!(!base.is_base_page_aligned());
1744 assert!(!base.is_large_page_aligned());
1745 assert!(!base.is_huge_page_aligned());
1746 assert!(base.is_aligned(0x1u64));
1747 assert!(!base.is_aligned(0x2u64));
1748 assert!(!base.is_aligned(0x3u64));
1749 assert!(!base.is_aligned(0x4u64));
1750
1751 let base = PAddr::from(0x200000);
1752 assert_eq!(base.base_page_offset(), 0x0);
1753 assert_eq!(base.large_page_offset(), 0x0);
1754 assert_eq!(base.huge_page_offset(), 0x200000);
1755 assert_eq!(base.align_down_to_base_page(), PAddr::from(0x200000));
1756 assert_eq!(base.align_down_to_large_page(), PAddr::from(0x200000));
1757 assert_eq!(base.align_down_to_huge_page(), PAddr::from(0x0));
1758 assert_eq!(base.align_up_to_base_page(), PAddr::from(0x200000));
1759 assert_eq!(base.align_up_to_large_page(), PAddr::from(0x200000));
1760 assert_eq!(base.align_up_to_huge_page(), PAddr::from(1073741824));
1761 assert!(base.is_base_page_aligned());
1762 assert!(base.is_large_page_aligned());
1763 assert!(!base.is_huge_page_aligned());
1764 assert!(base.is_aligned(0x1u64));
1765 assert!(base.is_aligned(0x2u64));
1766 assert!(!base.is_aligned(0x3u64));
1767 assert!(base.is_aligned(0x4u64));
1768
1769 let base = PAddr::from(0x200002);
1770 assert_eq!(base.base_page_offset(), 0x2);
1771 assert_eq!(base.large_page_offset(), 0x2);
1772 assert_eq!(base.huge_page_offset(), 0x200002);
1773 assert_eq!(base.align_down_to_base_page(), PAddr::from(0x200000));
1774 assert_eq!(base.align_down_to_large_page(), PAddr::from(0x200000));
1775 assert_eq!(base.align_down_to_huge_page(), PAddr::from(0x0));
1776 assert_eq!(base.align_up_to_base_page(), PAddr::from(0x201000));
1777 assert_eq!(base.align_up_to_large_page(), PAddr::from(0x400000));
1778 assert_eq!(base.align_up_to_huge_page(), PAddr::from(1073741824));
1779 assert!(!base.is_base_page_aligned());
1780 assert!(!base.is_large_page_aligned());
1781 assert!(!base.is_huge_page_aligned());
1782 assert!(base.is_aligned(0x1u64));
1783 assert!(base.is_aligned(0x2u64));
1784 assert!(!base.is_aligned(0x3u64));
1785 assert!(!base.is_aligned(0x4u64));
1786 }
1787
1788 #[test]
1789 fn ioaddr_align() {
1790 let base = IOAddr::from(0x1000);
1791 assert_eq!(base.base_page_offset(), 0x0);
1792 assert_eq!(base.large_page_offset(), 0x1000);
1793 assert_eq!(base.huge_page_offset(), 0x1000);
1794 assert_eq!(base.align_down_to_base_page(), IOAddr::from(0x1000));
1795 assert_eq!(base.align_down_to_large_page(), IOAddr::from(0x0));
1796 assert_eq!(base.align_down_to_huge_page(), IOAddr::from(0x0));
1797 assert_eq!(base.align_up_to_base_page(), IOAddr::from(0x1000));
1798 assert_eq!(base.align_up_to_large_page(), IOAddr::from(0x200000));
1799 assert_eq!(base.align_up_to_huge_page(), IOAddr::from(1073741824));
1800 assert!(base.is_base_page_aligned());
1801 assert!(!base.is_large_page_aligned());
1802 assert!(!base.is_huge_page_aligned());
1803 assert!(base.is_aligned(0x1u64));
1804 assert!(base.is_aligned(0x2u64));
1805 assert!(!base.is_aligned(0x3u64));
1806 assert!(base.is_aligned(0x4u64));
1807
1808 let base = IOAddr::from(0x1001);
1809 assert_eq!(base.base_page_offset(), 0x1);
1810 assert_eq!(base.large_page_offset(), 0x1001);
1811 assert_eq!(base.huge_page_offset(), 0x1001);
1812 assert_eq!(base.align_down_to_base_page(), IOAddr::from(0x1000));
1813 assert_eq!(base.align_down_to_large_page(), IOAddr::from(0x0));
1814 assert_eq!(base.align_down_to_huge_page(), IOAddr::from(0x0));
1815 assert_eq!(base.align_up_to_base_page(), IOAddr::from(0x2000));
1816 assert_eq!(base.align_up_to_large_page(), IOAddr::from(0x200000));
1817 assert_eq!(base.align_up_to_huge_page(), IOAddr::from(1073741824));
1818 assert!(!base.is_base_page_aligned());
1819 assert!(!base.is_large_page_aligned());
1820 assert!(!base.is_huge_page_aligned());
1821 assert!(base.is_aligned(0x1u64));
1822 assert!(!base.is_aligned(0x2u64));
1823 assert!(!base.is_aligned(0x3u64));
1824 assert!(!base.is_aligned(0x4u64));
1825
1826 let base = IOAddr::from(0x200000);
1827 assert_eq!(base.base_page_offset(), 0x0);
1828 assert_eq!(base.large_page_offset(), 0x0);
1829 assert_eq!(base.huge_page_offset(), 0x200000);
1830 assert_eq!(base.align_down_to_base_page(), IOAddr::from(0x200000));
1831 assert_eq!(base.align_down_to_large_page(), IOAddr::from(0x200000));
1832 assert_eq!(base.align_down_to_huge_page(), IOAddr::from(0x0));
1833 assert_eq!(base.align_up_to_base_page(), IOAddr::from(0x200000));
1834 assert_eq!(base.align_up_to_large_page(), IOAddr::from(0x200000));
1835 assert_eq!(base.align_up_to_huge_page(), IOAddr::from(1073741824));
1836 assert!(base.is_base_page_aligned());
1837 assert!(base.is_large_page_aligned());
1838 assert!(!base.is_huge_page_aligned());
1839 assert!(base.is_aligned(0x1u64));
1840 assert!(base.is_aligned(0x2u64));
1841 assert!(!base.is_aligned(0x3u64));
1842 assert!(base.is_aligned(0x4u64));
1843
1844 let base = IOAddr::from(0x200002);
1845 assert_eq!(base.base_page_offset(), 0x2);
1846 assert_eq!(base.large_page_offset(), 0x2);
1847 assert_eq!(base.huge_page_offset(), 0x200002);
1848 assert_eq!(base.align_down_to_base_page(), IOAddr::from(0x200000));
1849 assert_eq!(base.align_down_to_large_page(), IOAddr::from(0x200000));
1850 assert_eq!(base.align_down_to_huge_page(), IOAddr::from(0x0));
1851 assert_eq!(base.align_up_to_base_page(), IOAddr::from(0x201000));
1852 assert_eq!(base.align_up_to_large_page(), IOAddr::from(0x400000));
1853 assert_eq!(base.align_up_to_huge_page(), IOAddr::from(1073741824));
1854 assert!(!base.is_base_page_aligned());
1855 assert!(!base.is_large_page_aligned());
1856 assert!(!base.is_huge_page_aligned());
1857 assert!(base.is_aligned(0x1u64));
1858 assert!(base.is_aligned(0x2u64));
1859 assert!(!base.is_aligned(0x3u64));
1860 assert!(!base.is_aligned(0x4u64));
1861 }
1862
1863 #[test]
1864 fn vaddr_align() {
1865 let base = VAddr::from(0x1000);
1866 assert_eq!(base.base_page_offset(), 0x0);
1867 assert_eq!(base.large_page_offset(), 0x1000);
1868 assert_eq!(base.huge_page_offset(), 0x1000);
1869 assert_eq!(base.align_down_to_base_page(), VAddr::from(0x1000));
1870 assert_eq!(base.align_down_to_large_page(), VAddr::from(0x0));
1871 assert_eq!(base.align_down_to_huge_page(), VAddr::from(0x0));
1872 assert_eq!(base.align_up_to_base_page(), VAddr::from(0x1000));
1873 assert_eq!(base.align_up_to_large_page(), VAddr::from(0x200000));
1874 assert_eq!(base.align_up_to_huge_page(), VAddr::from(1073741824));
1875 assert!(base.is_base_page_aligned());
1876 assert!(!base.is_large_page_aligned());
1877 assert!(!base.is_huge_page_aligned());
1878 assert!(base.is_aligned(0x1u64));
1879 assert!(base.is_aligned(0x2u64));
1880 assert!(!base.is_aligned(0x3u64));
1881 assert!(base.is_aligned(0x4u64));
1882
1883 let base = VAddr::from(0x1001);
1884 assert_eq!(base.base_page_offset(), 0x1);
1885 assert_eq!(base.large_page_offset(), 0x1001);
1886 assert_eq!(base.huge_page_offset(), 0x1001);
1887 assert_eq!(base.align_down_to_base_page(), VAddr::from(0x1000));
1888 assert_eq!(base.align_down_to_large_page(), VAddr::from(0x0));
1889 assert_eq!(base.align_down_to_huge_page(), VAddr::from(0x0));
1890 assert_eq!(base.align_up_to_base_page(), VAddr::from(0x2000));
1891 assert_eq!(base.align_up_to_large_page(), VAddr::from(0x200000));
1892 assert_eq!(base.align_up_to_huge_page(), VAddr::from(1073741824));
1893 assert!(!base.is_base_page_aligned());
1894 assert!(!base.is_large_page_aligned());
1895 assert!(!base.is_huge_page_aligned());
1896 assert!(base.is_aligned(0x1u64));
1897 assert!(!base.is_aligned(0x2u64));
1898 assert!(!base.is_aligned(0x3u64));
1899 assert!(!base.is_aligned(0x4u64));
1900
1901 let base = VAddr::from(0x200000);
1902 assert_eq!(base.base_page_offset(), 0x0);
1903 assert_eq!(base.large_page_offset(), 0x0);
1904 assert_eq!(base.huge_page_offset(), 0x200000);
1905 assert_eq!(base.align_down_to_base_page(), VAddr::from(0x200000));
1906 assert_eq!(base.align_down_to_large_page(), VAddr::from(0x200000));
1907 assert_eq!(base.align_down_to_huge_page(), VAddr::from(0x0));
1908 assert_eq!(base.align_up_to_base_page(), VAddr::from(0x200000));
1909 assert_eq!(base.align_up_to_large_page(), VAddr::from(0x200000));
1910 assert_eq!(base.align_up_to_huge_page(), VAddr::from(1073741824));
1911 assert!(base.is_base_page_aligned());
1912 assert!(base.is_large_page_aligned());
1913 assert!(!base.is_huge_page_aligned());
1914 assert!(base.is_aligned(0x1u64));
1915 assert!(base.is_aligned(0x2u64));
1916 assert!(!base.is_aligned(0x3u64));
1917 assert!(base.is_aligned(0x4u64));
1918
1919 let base = VAddr::from(0x200002);
1920 assert_eq!(base.base_page_offset(), 0x2);
1921 assert_eq!(base.large_page_offset(), 0x2);
1922 assert_eq!(base.huge_page_offset(), 0x200002);
1923 assert_eq!(base.align_down_to_base_page(), VAddr::from(0x200000));
1924 assert_eq!(base.align_down_to_large_page(), VAddr::from(0x200000));
1925 assert_eq!(base.align_down_to_huge_page(), VAddr::from(0x0));
1926 assert_eq!(base.align_up_to_base_page(), VAddr::from(0x201000));
1927 assert_eq!(base.align_up_to_large_page(), VAddr::from(0x400000));
1928 assert_eq!(base.align_up_to_huge_page(), VAddr::from(1073741824));
1929 assert!(!base.is_base_page_aligned());
1930 assert!(!base.is_large_page_aligned());
1931 assert!(!base.is_huge_page_aligned());
1932 assert!(base.is_aligned(0x1u64));
1933 assert!(base.is_aligned(0x2u64));
1934 assert!(!base.is_aligned(0x3u64));
1935 assert!(!base.is_aligned(0x4u64));
1936 }
1937}