1use crate::{talc::Talc, OomHandler};
4
5use core::{
6 alloc::{GlobalAlloc, Layout},
7 cmp::Ordering,
8 ptr::{null_mut, NonNull},
9};
10
11#[cfg(feature = "allocator")]
12use core::alloc::{AllocError, Allocator};
13
14#[cfg(all(feature = "allocator-api2", not(feature = "allocator")))]
15use allocator_api2::alloc::{AllocError, Allocator};
16
17#[cfg(any(feature = "allocator", feature = "allocator-api2"))]
18pub(crate) fn is_aligned_to(ptr: *mut u8, align: usize) -> bool {
19 (ptr as usize).trailing_zeros() >= align.trailing_zeros()
20}
21
22const RELEASE_LOCK_ON_REALLOC_LIMIT: usize = 0x10000;
23
24#[derive(Debug)]
33pub struct Talck<R: lock_api::RawMutex, O: OomHandler> {
34 mutex: lock_api::Mutex<R, Talc<O>>,
35}
36
37impl<R: lock_api::RawMutex, O: OomHandler> Talck<R, O> {
38 pub const fn new(talc: Talc<O>) -> Self {
40 Self { mutex: lock_api::Mutex::new(talc) }
41 }
42
43 pub fn lock(&self) -> lock_api::MutexGuard<R, Talc<O>> {
45 self.mutex.lock()
46 }
47
48 pub fn try_lock(&self) -> Option<lock_api::MutexGuard<R, Talc<O>>> {
50 self.mutex.try_lock()
51 }
52
53 pub fn into_inner(self) -> Talc<O> {
55 self.mutex.into_inner()
56 }
57}
58
59unsafe impl<R: lock_api::RawMutex, O: OomHandler> GlobalAlloc for Talck<R, O> {
60 unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
61 self.lock().malloc(layout).map_or(null_mut(), |nn| nn.as_ptr())
62 }
63
64 unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
65 self.lock().free(NonNull::new_unchecked(ptr), layout)
66 }
67
68 unsafe fn realloc(&self, ptr: *mut u8, old_layout: Layout, new_size: usize) -> *mut u8 {
69 let nn_ptr = NonNull::new_unchecked(ptr);
70
71 match new_size.cmp(&old_layout.size()) {
72 Ordering::Greater => {
73 if let Ok(nn) = self.lock().grow_in_place(nn_ptr, old_layout, new_size) {
76 return nn.as_ptr();
77 }
78
79 let new_layout = Layout::from_size_align_unchecked(new_size, old_layout.align());
82
83 let mut lock = self.lock();
84 let allocation = match lock.malloc(new_layout) {
85 Ok(ptr) => ptr,
86 Err(_) => return null_mut(),
87 };
88
89 if old_layout.size() > RELEASE_LOCK_ON_REALLOC_LIMIT {
90 drop(lock);
91 allocation.as_ptr().copy_from_nonoverlapping(ptr, old_layout.size());
92 lock = self.lock();
93 } else {
94 allocation.as_ptr().copy_from_nonoverlapping(ptr, old_layout.size());
95 }
96
97 lock.free(nn_ptr, old_layout);
98 allocation.as_ptr()
99 }
100
101 Ordering::Less => {
102 self.lock().shrink(NonNull::new_unchecked(ptr), old_layout, new_size);
103 ptr
104 }
105
106 Ordering::Equal => ptr,
107 }
108 }
109}
110
111#[cfg(any(feature = "allocator", feature = "allocator-api2"))]
113fn nonnull_slice_from_raw_parts(ptr: NonNull<u8>, len: usize) -> NonNull<[u8]> {
114 unsafe { NonNull::new_unchecked(core::ptr::slice_from_raw_parts_mut(ptr.as_ptr(), len)) }
115}
116
117#[cfg(any(feature = "allocator", feature = "allocator-api2"))]
118unsafe impl<R: lock_api::RawMutex, O: OomHandler> Allocator for Talck<R, O> {
119 fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
120 if layout.size() == 0 {
121 return Ok(nonnull_slice_from_raw_parts(NonNull::dangling(), 0));
122 }
123
124 unsafe { self.lock().malloc(layout) }
125 .map(|nn| nonnull_slice_from_raw_parts(nn, layout.size()))
126 .map_err(|_| AllocError)
127 }
128
129 unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
130 if layout.size() != 0 {
131 self.lock().free(ptr, layout);
132 }
133 }
134
135 unsafe fn grow(
136 &self,
137 ptr: NonNull<u8>,
138 old_layout: Layout,
139 new_layout: Layout,
140 ) -> Result<NonNull<[u8]>, AllocError> {
141 debug_assert!(new_layout.size() >= old_layout.size());
142
143 if old_layout.size() == 0 {
144 return self.allocate(new_layout);
145 } else if is_aligned_to(ptr.as_ptr(), new_layout.align()) {
146 if let Ok(nn) = self.lock().grow_in_place(ptr, old_layout, new_layout.size()) {
148 return Ok(nonnull_slice_from_raw_parts(nn, new_layout.size()));
149 }
150 }
151
152 let mut lock = self.lock();
155 let allocation = lock.malloc(new_layout).map_err(|_| AllocError)?;
156
157 if old_layout.size() > RELEASE_LOCK_ON_REALLOC_LIMIT {
158 drop(lock);
159 allocation.as_ptr().copy_from_nonoverlapping(ptr.as_ptr(), old_layout.size());
160 lock = self.lock();
161 } else {
162 allocation.as_ptr().copy_from_nonoverlapping(ptr.as_ptr(), old_layout.size());
163 }
164
165 lock.free(ptr, old_layout);
166
167 Ok(nonnull_slice_from_raw_parts(allocation, new_layout.size()))
168 }
169
170 unsafe fn grow_zeroed(
171 &self,
172 ptr: NonNull<u8>,
173 old_layout: Layout,
174 new_layout: Layout,
175 ) -> Result<NonNull<[u8]>, AllocError> {
176 let res = self.grow(ptr, old_layout, new_layout);
177
178 if let Ok(allocation) = res {
179 allocation
180 .as_ptr()
181 .cast::<u8>()
182 .add(old_layout.size())
183 .write_bytes(0, new_layout.size() - old_layout.size());
184 }
185
186 res
187 }
188
189 unsafe fn shrink(
190 &self,
191 ptr: NonNull<u8>,
192 old_layout: Layout,
193 new_layout: Layout,
194 ) -> Result<NonNull<[u8]>, AllocError> {
195 debug_assert!(new_layout.size() <= old_layout.size());
196
197 if new_layout.size() == 0 {
198 if old_layout.size() > 0 {
199 self.lock().free(ptr, old_layout);
200 }
201
202 return Ok(nonnull_slice_from_raw_parts(NonNull::dangling(), 0));
203 }
204
205 if !is_aligned_to(ptr.as_ptr(), new_layout.align()) {
206 let mut lock = self.lock();
207 let allocation = lock.malloc(new_layout).map_err(|_| AllocError)?;
208
209 if new_layout.size() > RELEASE_LOCK_ON_REALLOC_LIMIT {
210 drop(lock);
211 allocation.as_ptr().copy_from_nonoverlapping(ptr.as_ptr(), new_layout.size());
212 lock = self.lock();
213 } else {
214 allocation.as_ptr().copy_from_nonoverlapping(ptr.as_ptr(), new_layout.size());
215 }
216
217 lock.free(ptr, old_layout);
218 return Ok(nonnull_slice_from_raw_parts(allocation, new_layout.size()));
219 }
220
221 self.lock().shrink(ptr, old_layout, new_layout.size());
222
223 Ok(nonnull_slice_from_raw_parts(ptr, new_layout.size()))
224 }
225}
226
227impl<O: OomHandler> Talc<O> {
228 pub const fn lock<R: lock_api::RawMutex>(self) -> Talck<R, O> {
246 Talck::new(self)
247 }
248}
249
250#[cfg(all(target_family = "wasm"))]
251impl TalckWasm {
252 pub const unsafe fn new_global() -> Self {
259 Talc::new(crate::WasmHandler::new()).lock()
260 }
261}
262
263#[cfg(all(target_family = "wasm"))]
264pub type TalckWasm = Talck<crate::locking::AssumeUnlockable, crate::WasmHandler>;