1#![allow(unpredictable_function_pointer_comparisons)]
2
3#[cfg(unix)]
4use core::ffi::c_int;
5use core::{
6 alloc::Layout,
7 ffi::{c_uint, c_void},
8 marker::PhantomData,
9 mem,
10 ptr::NonNull,
11};
12
13#[allow(non_camel_case_types)]
14type size_t = usize;
15
16const ALIGN: u8 = 64;
17const _: () = assert!(ALIGN.count_ones() == 1);
19const _: () = assert!(ALIGN as usize % mem::size_of::<*mut c_void>() == 0);
20
21#[cfg(unix)]
25unsafe extern "C" fn zalloc_c(opaque: *mut c_void, items: c_uint, size: c_uint) -> *mut c_void {
26 let _ = opaque;
27
28 extern "C" {
29 fn posix_memalign(memptr: *mut *mut c_void, align: size_t, size: size_t) -> c_int;
30 }
31
32 let mut ptr = core::ptr::null_mut();
33 let size = items as size_t * size as size_t;
34 if size == 0 {
35 return ptr;
36 }
37 match unsafe { posix_memalign(&mut ptr, ALIGN.into(), size) } {
41 0 => ptr,
42 _ => core::ptr::null_mut(),
43 }
44}
45
46#[cfg(not(unix))]
50unsafe extern "C" fn zalloc_c(opaque: *mut c_void, items: c_uint, size: c_uint) -> *mut c_void {
51 let _ = opaque;
52
53 let size = items as size_t * size as size_t;
54 if size == 0 {
55 return core::ptr::null_mut();
56 }
57
58 extern "C" {
59 fn malloc(size: size_t) -> *mut c_void;
60 }
61
62 unsafe { malloc(size) }
66}
67
68unsafe extern "C" fn zalloc_c_calloc(
72 opaque: *mut c_void,
73 items: c_uint,
74 size: c_uint,
75) -> *mut c_void {
76 let _ = opaque;
77
78 extern "C" {
79 fn calloc(nitems: size_t, size: size_t) -> *mut c_void;
80 }
81
82 if items as size_t * size as size_t == 0 {
83 return core::ptr::null_mut();
84 }
85
86 unsafe { calloc(items as size_t, size as size_t) }
91}
92
93unsafe extern "C" fn zfree_c(opaque: *mut c_void, ptr: *mut c_void) {
97 let _ = opaque;
98
99 extern "C" {
100 fn free(p: *mut c_void);
101 }
102
103 unsafe { free(ptr) }
106}
107
108#[cfg(feature = "rust-allocator")]
112unsafe extern "C" fn zalloc_rust(_opaque: *mut c_void, count: c_uint, size: c_uint) -> *mut c_void {
113 let size = count as usize * size as usize;
114 if size == 0 {
115 return core::ptr::null_mut();
116 }
117
118 let layout = Layout::from_size_align(size, ALIGN.into()).unwrap();
120
121 let ptr = unsafe { std::alloc::alloc(layout) };
124
125 ptr as *mut c_void
126}
127
128#[cfg(feature = "rust-allocator")]
132unsafe extern "C" fn zalloc_rust_calloc(
133 _opaque: *mut c_void,
134 count: c_uint,
135 size: c_uint,
136) -> *mut c_void {
137 let size = count as usize * size as usize;
138 if size == 0 {
139 return core::ptr::null_mut();
140 }
141
142 let layout = Layout::from_size_align(size, ALIGN.into()).unwrap();
144
145 let ptr = unsafe { std::alloc::alloc_zeroed(layout) };
148
149 ptr as *mut c_void
150}
151
152#[cfg(feature = "rust-allocator")]
157unsafe extern "C" fn zfree_rust(opaque: *mut c_void, ptr: *mut c_void) {
158 if ptr.is_null() {
159 return;
160 }
161
162 debug_assert!(!opaque.is_null());
164 if opaque.is_null() {
165 return;
166 }
167
168 let size = unsafe { *(opaque as *mut usize) };
170
171 if size == 0 {
176 return;
177 }
178
179 let layout = Layout::from_size_align(size, ALIGN.into());
180 let layout = layout.unwrap();
181
182 unsafe { std::alloc::dealloc(ptr.cast(), layout) };
186}
187
188#[cfg(test)]
189unsafe extern "C" fn zalloc_fail(_: *mut c_void, _: c_uint, _: c_uint) -> *mut c_void {
190 core::ptr::null_mut()
191}
192
193#[cfg(test)]
194unsafe extern "C" fn zfree_fail(_: *mut c_void, _: *mut c_void) {
195 }
197
198#[derive(Clone, Copy)]
199#[repr(C)]
200pub struct Allocator<'a> {
201 pub zalloc: crate::c_api::alloc_func,
202 pub zfree: crate::c_api::free_func,
203 pub opaque: crate::c_api::voidpf,
204 pub _marker: PhantomData<&'a ()>,
205}
206
207unsafe impl Sync for Allocator<'static> {}
208
209#[cfg(feature = "rust-allocator")]
210pub static RUST: Allocator<'static> = Allocator {
211 zalloc: zalloc_rust,
212 zfree: zfree_rust,
213 opaque: core::ptr::null_mut(),
214 _marker: PhantomData,
215};
216
217#[cfg(feature = "c-allocator")]
218pub static C: Allocator<'static> = Allocator {
219 zalloc: zalloc_c,
220 zfree: zfree_c,
221 opaque: core::ptr::null_mut(),
222 _marker: PhantomData,
223};
224
225#[cfg(test)]
226static FAIL: Allocator<'static> = Allocator {
227 zalloc: zalloc_fail,
228 zfree: zfree_fail,
229 opaque: core::ptr::null_mut(),
230 _marker: PhantomData,
231};
232
233impl Allocator<'_> {
234 fn allocate_layout(&self, layout: Layout) -> *mut c_void {
235 assert!(layout.align() <= ALIGN.into());
236
237 #[cfg(feature = "rust-allocator")]
239 if self.zalloc == RUST.zalloc {
240 let ptr = unsafe { (RUST.zalloc)(self.opaque, layout.size() as _, 1) };
241
242 debug_assert_eq!(ptr as usize % layout.align(), 0);
243
244 return ptr;
245 }
246
247 let extra_space = core::mem::size_of::<*mut c_void>() + layout.align();
277
278 let ptr = unsafe { (self.zalloc)(self.opaque, (layout.size() + extra_space) as _, 1) };
281
282 if ptr.is_null() {
283 return ptr;
284 }
285
286 let align_diff = (ptr as usize).next_multiple_of(layout.align()) - (ptr as usize);
288
289 let mut return_ptr = unsafe { ptr.cast::<u8>().add(align_diff) };
291
292 if align_diff < core::mem::size_of::<*mut c_void>() {
294 let offset = Ord::max(core::mem::size_of::<*mut c_void>(), layout.align());
300 return_ptr = unsafe { return_ptr.add(offset) };
301 }
302
303 unsafe {
308 let original_ptr = return_ptr.sub(core::mem::size_of::<*mut c_void>());
309 core::ptr::write_unaligned(original_ptr.cast::<*mut c_void>(), ptr);
310 };
311
312 let ptr = return_ptr.cast::<c_void>();
314
315 debug_assert_eq!(ptr as usize % layout.align(), 0);
316
317 ptr
318 }
319
320 fn allocate_layout_zeroed(&self, layout: Layout) -> *mut c_void {
321 assert!(layout.align() <= ALIGN.into());
322
323 #[cfg(feature = "rust-allocator")]
324 if self.zalloc == RUST.zalloc {
325 let ptr = unsafe { zalloc_rust_calloc(self.opaque, layout.size() as _, 1) };
326
327 debug_assert_eq!(ptr as usize % layout.align(), 0);
328
329 return ptr;
330 }
331
332 #[cfg(feature = "c-allocator")]
333 if self.zalloc == C.zalloc {
334 let alloc = Allocator {
335 zalloc: zalloc_c_calloc,
336 zfree: zfree_c,
337 opaque: core::ptr::null_mut(),
338 _marker: PhantomData,
339 };
340
341 return alloc.allocate_layout(layout);
342 }
343
344 let ptr = self.allocate_layout(layout);
346
347 if !ptr.is_null() {
348 unsafe { core::ptr::write_bytes(ptr, 0u8, layout.size()) };
350 }
351
352 ptr
353 }
354
355 pub fn allocate_raw<T>(&self) -> Option<NonNull<T>> {
356 NonNull::new(self.allocate_layout(Layout::new::<T>()).cast())
357 }
358
359 pub fn allocate_slice_raw<T>(&self, len: usize) -> Option<NonNull<T>> {
360 NonNull::new(self.allocate_layout(Layout::array::<T>(len).ok()?).cast())
361 }
362
363 pub fn allocate_zeroed_raw<T>(&self) -> Option<NonNull<T>> {
364 NonNull::new(self.allocate_layout_zeroed(Layout::new::<T>()).cast())
365 }
366
367 pub fn allocate_zeroed_buffer(&self, len: usize) -> Option<NonNull<u8>> {
368 let layout = Layout::array::<u8>(len).ok()?;
369 NonNull::new(self.allocate_layout_zeroed(layout).cast())
370 }
371
372 #[allow(unused)] pub unsafe fn deallocate<T>(&self, ptr: *mut T, len: usize) {
382 if !ptr.is_null() {
383 #[cfg(feature = "rust-allocator")]
385 if self.zfree == RUST.zfree {
386 assert_ne!(len, 0, "invalid size for {ptr:?}");
387 let mut size = core::mem::size_of::<T>() * len;
388 return unsafe { (RUST.zfree)(&mut size as *mut usize as *mut c_void, ptr.cast()) };
391 }
392
393 unsafe {
399 let original_ptr = (ptr as *mut u8).sub(core::mem::size_of::<*const c_void>());
400 let free_ptr = core::ptr::read_unaligned(original_ptr as *mut *mut c_void);
401
402 (self.zfree)(self.opaque, free_ptr)
403 }
404 }
405 }
406}
407
408#[cfg(test)]
409mod tests {
410 use core::sync::atomic::{AtomicPtr, Ordering};
411 use std::ptr;
412 use std::sync::Mutex;
413
414 use super::*;
415
416 static PTR: AtomicPtr<c_void> = AtomicPtr::new(core::ptr::null_mut());
417 static MUTEX: Mutex<()> = Mutex::new(());
418
419 unsafe extern "C" fn unaligned_alloc(
420 _opaque: *mut c_void,
421 _items: c_uint,
422 _size: c_uint,
423 ) -> *mut c_void {
424 PTR.load(Ordering::Relaxed)
425 }
426
427 unsafe extern "C" fn unaligned_free(_opaque: *mut c_void, ptr: *mut c_void) {
428 let expected = PTR.load(Ordering::Relaxed);
429 assert_eq!(expected, ptr)
430 }
431
432 fn unaligned_allocator_help<T>() {
433 let mut buf = [0u8; 1024];
434
435 let _guard = MUTEX.lock().unwrap();
437
438 for i in 0..64 {
439 let ptr = unsafe { buf.as_mut_ptr().add(i).cast() };
440 PTR.store(ptr, Ordering::Relaxed);
441
442 let allocator = Allocator {
443 zalloc: unaligned_alloc,
444 zfree: unaligned_free,
445 opaque: core::ptr::null_mut(),
446 _marker: PhantomData,
447 };
448
449 let ptr = allocator.allocate_raw::<T>().unwrap().as_ptr();
450 assert_eq!(ptr as usize % core::mem::align_of::<T>(), 0);
451 unsafe { allocator.deallocate(ptr, 1) }
452
453 let ptr = allocator.allocate_slice_raw::<T>(10).unwrap().as_ptr();
454 assert_eq!(ptr as usize % core::mem::align_of::<T>(), 0);
455 unsafe { allocator.deallocate(ptr, 10) }
456 }
457 }
458
459 #[test]
460 fn unaligned_allocator_0() {
461 unaligned_allocator_help::<()>()
462 }
463
464 #[test]
465 fn unaligned_allocator_1() {
466 unaligned_allocator_help::<u8>()
467 }
468
469 #[test]
470 fn unaligned_allocator_2() {
471 unaligned_allocator_help::<u16>()
472 }
473 #[test]
474 fn unaligned_allocator_4() {
475 unaligned_allocator_help::<u32>()
476 }
477 #[test]
478 fn unaligned_allocator_8() {
479 unaligned_allocator_help::<u64>()
480 }
481 #[test]
482 fn unaligned_allocator_16() {
483 unaligned_allocator_help::<u128>()
484 }
485
486 #[test]
487 fn unaligned_allocator_32() {
488 #[repr(C, align(32))]
489 struct Align32(u8);
490
491 unaligned_allocator_help::<Align32>()
492 }
493
494 #[test]
495 fn unaligned_allocator_64() {
496 #[repr(C, align(64))]
497 struct Align64(u8);
498
499 unaligned_allocator_help::<Align64>()
500 }
501
502 fn test_allocate_zeroed_help(allocator: Allocator) {
503 #[repr(C, align(64))]
504 struct Align64(u8);
505
506 let ptr = allocator.allocate_raw::<Align64>();
507 assert!(ptr.is_some());
508 unsafe { allocator.deallocate(ptr.unwrap().as_ptr(), 1) };
509 }
510
511 #[test]
512 fn test_allocate_zeroed() {
513 #[cfg(feature = "rust-allocator")]
514 test_allocate_zeroed_help(RUST);
515
516 #[cfg(feature = "c-allocator")]
517 test_allocate_zeroed_help(C);
518
519 assert!(FAIL.allocate_raw::<u128>().is_none());
520 }
521
522 fn test_allocate_zeroed_buffer_help(allocator: Allocator) {
523 let len = 42;
524 let Some(buf) = allocator.allocate_zeroed_buffer(len) else {
525 return;
526 };
527
528 let slice = unsafe { core::slice::from_raw_parts_mut(buf.as_ptr(), len) };
529
530 assert_eq!(slice.iter().sum::<u8>(), 0);
531
532 unsafe { allocator.deallocate(buf.as_ptr(), len) };
533 }
534
535 #[test]
536 fn test_allocate_buffer_zeroed() {
537 #[cfg(feature = "rust-allocator")]
538 test_allocate_zeroed_buffer_help(RUST);
539
540 #[cfg(feature = "c-allocator")]
541 test_allocate_zeroed_buffer_help(C);
542
543 test_allocate_zeroed_buffer_help(FAIL);
544 }
545
546 #[test]
547 fn test_deallocate_null() {
548 unsafe {
549 #[cfg(feature = "rust-allocator")]
550 (RUST.zfree)(core::ptr::null_mut(), core::ptr::null_mut());
551
552 #[cfg(feature = "c-allocator")]
553 (C.zfree)(core::ptr::null_mut(), core::ptr::null_mut());
554
555 (FAIL.zfree)(core::ptr::null_mut(), core::ptr::null_mut());
556 }
557 }
558
559 #[test]
560 fn test_allocate_zero_size() {
561 unsafe {
563 assert!(zalloc_c(ptr::null_mut(), 1, 0).is_null());
564 assert!(zalloc_c(ptr::null_mut(), 0, 1).is_null());
565 assert!(zalloc_c_calloc(ptr::null_mut(), 1, 0).is_null());
566 assert!(zalloc_c_calloc(ptr::null_mut(), 0, 1).is_null());
567 assert!(zalloc_rust(ptr::null_mut(), 1, 0).is_null());
568 assert!(zalloc_rust(ptr::null_mut(), 0, 1).is_null());
569 assert!(zalloc_rust_calloc(ptr::null_mut(), 1, 0).is_null());
570 assert!(zalloc_rust_calloc(ptr::null_mut(), 0, 1).is_null());
571 }
572 }
573}