1#![doc = include_str!("../README.md")]
2#![cfg_attr(not(feature = "std"), no_std)]
3
4#[cfg(not(feature = "std"))]
5extern crate alloc;
6
7#[cfg(not(feature = "std"))]
8use alloc::{boxed::Box, vec::Vec};
9use core::{
10 alloc::{GlobalAlloc, Layout},
11 cell::UnsafeCell,
12 mem::MaybeUninit,
13 ptr::{self, NonNull},
14};
15use portable_atomic::{AtomicUsize, Ordering};
16
17pub struct Heap<S: Storage> {
19 storage: UnsafeCell<S>,
20 remained: AtomicUsize,
21}
22
23unsafe impl<S: Storage> Sync for Heap<S> {}
24
25impl<S: Storage> Heap<S> {
26 pub const fn new_with_storage(storage: S, size: usize) -> Self {
28 Self {
29 storage: UnsafeCell::new(storage),
30 remained: AtomicUsize::new(size),
31 }
32 }
33
34 pub fn remained(&self) -> usize {
36 self.remained.load(Ordering::Relaxed)
37 }
38}
39
40#[allow(clippy::new_without_default)]
41impl<S: ConstStorage> Heap<S> {
42 pub const fn new() -> Self {
44 Self::new_with_storage(S::INIT, S::SIZE)
45 }
46}
47
48impl Heap<BoxedSlice> {
49 pub fn new_boxed(size: usize) -> Self {
51 Self::new_with_storage(BoxedSlice::new(size), size)
52 }
53}
54
55impl Heap<Pointer> {
56 pub const fn empty() -> Self {
58 Self::new_with_storage(Pointer::empty(), 0)
59 }
60
61 pub unsafe fn init_with_ptr(&self, address: usize, size: usize) {
69 let s = unsafe { &mut *self.storage.get() };
70 s.ptr = unsafe { NonNull::new_unchecked(address as *mut u8) };
71 self.remained.store(size, Ordering::Release);
72 }
73}
74
75unsafe impl<S: Storage> GlobalAlloc for Heap<S> {
76 unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
77 let align_mask_to_round_down = !(layout.align() - 1);
80
81 let mut old_remained = self.remained.load(Ordering::Relaxed);
82 loop {
83 if layout.size() > old_remained {
84 return ptr::null_mut();
85 }
86
87 let remained = (old_remained - layout.size()) & align_mask_to_round_down;
88 match self.remained.compare_exchange_weak(
89 old_remained,
90 remained,
91 Ordering::SeqCst,
92 Ordering::Relaxed,
93 ) {
94 Err(x) => old_remained = x,
95 Ok(_) => {
96 return unsafe { ((&mut *self.storage.get()).ptr().as_ptr()).add(remained) };
97 }
98 }
99 }
100 }
101
102 unsafe fn dealloc(&self, _ptr: *mut u8, _layout: Layout) {}
103}
104
105#[cfg(feature = "allocator-api")]
106mod allocator_api {
107 use super::*;
108 use core::alloc::{AllocError, Allocator};
109
110 unsafe impl Allocator for Heap {
111 fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
112 match layout.size() {
113 0 => Ok(NonNull::slice_from_raw_parts(layout.dangling(), 0)),
114 size => self.alloc(layout).map_or(Err(AllocError), |allocation| {
115 Ok(NonNull::slice_from_raw_parts(allocation, size))
116 }),
117 }
118 }
119
120 unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
121 if layout.size() != 0 {
122 self.dealloc(ptr.as_ptr(), layout);
123 }
124 }
125 }
126}
127
128pub trait Storage {
132 unsafe fn ptr(&mut self) -> NonNull<u8>;
139}
140
141pub trait ConstStorage: Storage {
142 const INIT: Self;
144 const SIZE: usize;
146}
147
148pub struct Inline<const SIZE: usize> {
151 buf: [MaybeUninit<u8>; SIZE],
152}
153
154#[allow(clippy::new_without_default)]
155impl<const SIZE: usize> Inline<SIZE> {
156 pub const fn new() -> Self {
157 Self {
158 buf: [MaybeUninit::uninit(); SIZE],
159 }
160 }
161}
162
163impl<const SIZE: usize> ConstStorage for Inline<SIZE> {
164 const INIT: Self = Self::new();
165 const SIZE: usize = SIZE;
166}
167
168impl<const SIZE: usize> Storage for Inline<SIZE> {
169 #[inline]
170 unsafe fn ptr(&mut self) -> NonNull<u8> {
171 unsafe { NonNull::new_unchecked(self.buf.as_mut_ptr().cast()) }
172 }
173}
174
175pub struct Pointer {
178 ptr: NonNull<u8>,
179}
180
181impl Pointer {
182 const fn empty() -> Self {
183 Self {
184 ptr: NonNull::dangling(),
185 }
186 }
187}
188
189impl Storage for Pointer {
190 #[inline]
191 unsafe fn ptr(&mut self) -> NonNull<u8> {
192 self.ptr
193 }
194}
195
196pub struct BoxedSlice {
199 buf: Box<[MaybeUninit<u8>]>,
200}
201
202impl BoxedSlice {
203 fn new(size: usize) -> Self {
205 let mut v = Vec::with_capacity(size);
206 unsafe { v.set_len(size) }
207 Self {
208 buf: v.into_boxed_slice(),
209 }
210 }
211}
212
213impl Storage for BoxedSlice {
214 #[inline]
215 unsafe fn ptr(&mut self) -> NonNull<u8> {
216 unsafe { NonNull::new_unchecked(self.buf.as_mut_ptr().cast()) }
217 }
218}
219
220#[cfg(test)]
223mod tests {
224 use super::*;
225 static HEAP: Heap<Inline<100>> = Heap::new();
226 static HEAP_P: Heap<Pointer> = Heap::empty();
227
228 #[test]
229 fn test_heap() {
230 assert_eq!(HEAP.remained.load(Ordering::Relaxed), 100);
231 let p0 = unsafe { (&mut *HEAP.storage.get()).buf.as_ptr() as usize };
232 let p1 = unsafe { (&mut *HEAP.storage.get()).ptr() }.as_ptr();
233 assert_eq!(p0, p1 as usize);
234 let p2 = unsafe { HEAP.alloc(Layout::new::<u64>()) };
235 assert_eq!(HEAP.remained.load(Ordering::Relaxed), 88);
236 assert_eq!(unsafe { p2.offset_from(p1) }, 88);
237
238 unsafe { HEAP.alloc(Layout::new::<u32>()) };
239 assert_eq!(HEAP.remained.load(Ordering::Relaxed), 84);
240 }
241
242 #[test]
243 fn test_heap_pointer() {
244 const HEAP_SIZE: usize = 100;
245 static mut HEAP_MEM: [MaybeUninit<u8>; HEAP_SIZE] = [MaybeUninit::uninit(); HEAP_SIZE];
246 unsafe { HEAP_P.init_with_ptr(&raw mut HEAP_MEM as usize, HEAP_SIZE) }
247 let p0 = &raw mut HEAP_MEM as usize;
248 let p1 = unsafe { (&mut *HEAP_P.storage.get()).ptr() }.as_ptr();
249 assert_eq!(p0, p1 as usize);
250 let p2 = unsafe { HEAP_P.alloc(Layout::new::<u64>()) };
251 assert_eq!(HEAP_P.remained.load(Ordering::Relaxed), 88);
252 assert_eq!(unsafe { p2.offset_from(p1) }, 88);
253 }
254
255 #[test]
256 fn test_heap_dynamic() {
257 let heap = Heap::new_boxed(100);
258 assert_eq!(heap.remained.load(Ordering::Relaxed), 100);
259 let p0 = unsafe { (&mut *heap.storage.get()).buf.as_ptr() as usize };
260 let p1 = unsafe { (&mut *heap.storage.get()).ptr() }.as_ptr();
261 assert_eq!(p0, p1 as usize);
262 let p2 = unsafe { heap.alloc(Layout::new::<u64>()) };
263 assert_eq!(heap.remained.load(Ordering::Relaxed), 88);
264 assert_eq!(unsafe { p2.offset_from(p1) }, 88);
265
266 unsafe { heap.alloc(Layout::new::<u32>()) };
267 assert_eq!(heap.remained.load(Ordering::Relaxed), 84);
268 }
269
270 #[test]
271 fn test_heap_local() {
272 let _heap: Heap<Inline<100>> = Heap::new();
273 }
274}