1#![no_std]
4
5use core::fmt;
6use core::fmt::Debug;
7use core::ops::{
8 Add, AddAssign, BitAnd, BitAndAssign, BitOr, BitOrAssign, BitXor, BitXorAssign, Not, Shl,
9 ShlAssign, Shr, ShrAssign, Sub, SubAssign,
10};
11
12pub mod arch;
13#[macro_use]
14pub(crate) mod macros;
15pub(crate) use macros::impl_address;
16
17cfg_if::cfg_if! {
18 if #[cfg(all(target_arch = "x86_64", feature = "x86_64"))] {
19 pub use crate::arch::x86_64::{PhysAddr, VirtAddr};
20 } else if #[cfg(all(target_arch = "aarch64", feature = "aarch64"))] {
21 pub use crate::arch::aarch64::{PhysAddr, VirtAddr};
22 } else if #[cfg(all(target_arch = "riscv64", feature = "riscv64"))] {
23 pub use crate::arch::riscv64::{PhysAddr, VirtAddr};
24 } else {
25 pub use crate::arch::fallback::{PhysAddr, VirtAddr};
26 }
27}
28
29pub trait MemoryAddress:
33 PartialEq
34 + Eq
35 + PartialOrd
36 + Ord
37 + Copy
38 + Clone
39 + Sized
40 + BitAnd<<Self>::RAW, Output = Self::RAW>
41 + BitAndAssign<<Self>::RAW>
42 + BitOr<<Self>::RAW, Output = Self::RAW>
43 + BitOrAssign<<Self>::RAW>
44 + BitXor<<Self>::RAW, Output = Self::RAW>
45 + BitXorAssign<<Self>::RAW>
46 + Add<<Self>::RAW>
47 + AddAssign<<Self>::RAW>
48 + Sub<Self, Output = Self::RAW>
49 + Sub<<Self>::RAW, Output = Self>
50 + SubAssign<<Self>::RAW>
51 + Shr<usize, Output = Self>
52 + ShrAssign<usize>
53 + Shl<usize, Output = Self>
54 + ShlAssign<usize>
55 + fmt::Binary
56 + fmt::LowerHex
57 + fmt::UpperHex
58 + fmt::Octal
59 + fmt::Pointer
60{
61 type RAW: Copy
63 + PartialEq
64 + Eq
65 + PartialOrd
66 + Ord
67 + Not<Output = Self::RAW>
68 + Add<Output = Self::RAW>
69 + Sub<Output = Self::RAW>
70 + BitAnd<Output = Self::RAW>
71 + BitOr<Output = Self::RAW>
72 + BitXor<Output = Self::RAW>
73 + Debug
74 + From<u8>
75 + TryInto<usize, Error: Debug>;
76
77 fn raw(self) -> Self::RAW;
79}
80
81#[non_exhaustive]
83#[derive(Clone, PartialEq, Eq, Debug)]
84pub enum AddrRangeError {
85 EndBeforeStart,
87}
88
89impl fmt::Display for AddrRangeError {
90 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
91 match self {
92 Self::EndBeforeStart => {
93 f.write_str("Range end address can't be smaller than range start address")
94 }
95 }
96 }
97}
98
99pub struct AddrRange<T: MemoryAddress> {
101 pub start: T,
103 pub end: T,
105}
106impl<T: MemoryAddress> AddrRange<T> {
107 pub fn new(start: T, end: T) -> Result<Self, AddrRangeError> {
109 if end < start {
110 return Err(AddrRangeError::EndBeforeStart);
111 }
112 Ok(Self { start, end })
113 }
114
115 pub fn iter(&self) -> AddrIter<T> {
117 AddrIter {
118 current: self.start,
119 end: self.end,
120 }
121 }
122
123 pub fn contains(&self, element: &T) -> bool {
125 element.raw() >= self.start.raw() && element.raw() < self.end.raw()
126 }
127
128 pub fn length(&self) -> usize {
132 (self.end.raw() - self.start.raw())
133 .try_into()
134 .expect("address range is larger than the architecture's usize")
135 }
136}
137
138pub struct AddrIter<T: MemoryAddress> {
140 current: T,
141 end: T,
142}
143impl<T: MemoryAddress> Iterator for AddrIter<T> {
144 type Item = T;
145 fn next(&mut self) -> Option<Self::Item> {
146 if self.current >= self.end {
147 None
148 } else {
149 let ret = Some(self.current);
150 self.current += 1.into();
151 ret
152 }
153 }
154}
155
156#[cfg(test)]
157mod tests {
158 use super::*;
159
160 #[test]
161 pub fn virtaddr_new_truncate() {
162 assert_eq!(VirtAddr::new_truncate(0), VirtAddr::new(0));
163 assert_eq!(VirtAddr::new_truncate(123), VirtAddr::new(123));
164 }
165
166 #[test]
167 fn test_from_ptr_array() {
168 let slice = &[1, 2, 3, 4, 5];
169 assert_eq!(VirtAddr::from_ptr(slice), VirtAddr::from_ptr(&slice[0]));
171 }
172
173 #[test]
174 fn test_addr_range() {
175 let r = AddrRange::new(VirtAddr::new(0x0), VirtAddr::new(0x3)).unwrap();
176 assert!(r.contains(&VirtAddr::new(0x0)));
177 assert!(r.contains(&VirtAddr::new(0x1)));
178 assert!(!r.contains(&VirtAddr::new(0x3)));
179 let mut i = r.iter();
180 assert_eq!(i.next().unwrap(), VirtAddr::new(0x0));
181 assert_eq!(i.next().unwrap(), VirtAddr::new(0x1));
182 assert_eq!(i.next().unwrap(), VirtAddr::new(0x2));
183 assert!(i.next().is_none());
184
185 for (i, a) in r.iter().enumerate() {
186 assert_eq!(a.raw() as usize, i);
187 }
188
189 let r = AddrRange::new(PhysAddr::new(0x2), PhysAddr::new(0x4)).unwrap();
190 let mut i = r.iter();
191 assert_eq!(i.next().unwrap(), PhysAddr::new(0x2));
192 assert_eq!(i.next().unwrap(), PhysAddr::new(0x3));
193 assert!(i.next().is_none());
194
195 assert_eq!(r.iter().map(|a| a.raw() as usize).sum::<usize>(), 0x5);
196 }
197}