1use core::{
2 alloc::Layout,
3 cell::{Cell, RefCell},
4 marker::PhantomData,
5 cmp, mem, ptr, slice,
6};
7
8use crate::chunk::ArenaChunk;
9use crate::{HUGE_PAGE, PAGE_SIZE};
10
11pub struct DroplessArena<'ctx> {
12 elems: RefCell<Vec<ArenaChunk<u8>>>,
13 start: Cell<*mut u8>,
14 end: Cell<*mut u8>,
15 _marker: PhantomData<&'ctx u8>,
16}
17
18const ALIGNMENT: usize = mem::size_of::<usize>();
19
20#[allow(clippy::inline_always)]
21#[inline(always)]
22const fn align_down(val: usize, align: usize) -> usize {
23 debug_assert!(align.is_power_of_two());
24 val & !(align - 1)
25}
26
27#[allow(clippy::inline_always)]
28#[inline(always)]
29const fn align_up(val: usize, align: usize) -> usize {
30 debug_assert!(align.is_power_of_two());
31 (val + align - 1) & !(align - 1)
32}
33
34#[allow(clippy::mut_from_ref)]
35impl<'ctx> DroplessArena<'ctx> {
36 fn __alloc_raw(&self, layout: Layout) -> Option<*mut u8> {
37 let start = self.start.get().addr();
38 let old_end = self.end.get();
39 let end = old_end.addr();
40
41 let bytes = align_up(layout.size(), ALIGNMENT);
42
43 let sub = end.checked_sub(bytes)?;
44
45 let new_end = align_down(sub, layout.align());
46 if start <= new_end {
47 let new_end = old_end.with_addr(new_end);
48 self.end.set(new_end);
49 Some(new_end)
50 } else {
51 None
52 }
53 }
54
55 pub fn alloc_raw(&self, layout: Layout) -> *mut u8 {
57 if layout.size() == 0 {
58 return ptr::without_provenance_mut(!0);
59 }
60 loop {
61 if let Some(ptr) = self.__alloc_raw(layout) {
62 debug_assert!(!ptr.is_null());
63 return ptr;
64 }
65 self.grow(layout);
66 }
67 }
68
69 #[allow(clippy::missing_panics_doc)]
71 pub fn alloc<T>(&self, value: T) -> &'ctx mut T {
72 assert!(!mem::needs_drop::<T>());
73
74 let buf = self.alloc_raw(Layout::new::<T>()) as *mut T;
75
76 unsafe {
77 ptr::write(buf, value);
78 &mut *buf
79 }
80 }
81
82fn fill_array<T, I>(mut iter: I, ptr: *mut T, len: usize) -> &'ctx mut [T]
84 where
85 I: Iterator<Item = T>,
86 {
87 for i in 0..len {
88 let Some(elem) = iter.next() else {
89 unreachable!()
92 };
93
94 unsafe { ptr.add(i).write(elem) };
95 }
96 unsafe { slice::from_raw_parts_mut(ptr, len) }
97 }
98
99 pub fn alloc_iter<T, I>(&self, iter: I) -> &'ctx mut [T]
103 where
104 I: IntoIterator<
105 Item = T,
106 IntoIter: ExactSizeIterator,
107 >,
108 {
109 assert!(!mem::needs_drop::<T>());
110 assert!(mem::size_of::<T>() != 0);
111
112 let iter = iter.into_iter();
113 let length = iter.len();
114
115 if length == 0 {
116 return &mut [];
117 }
118
119 let ptr = self.alloc_raw(Layout::array::<T>(length).unwrap()) as *mut T;
120
121 Self::fill_array(iter, ptr, length)
122 }
123
124 fn grow(&self, layout: Layout) {
125 let additional = layout.size() + cmp::max(ALIGNMENT, layout.align()) - 1;
126
127 let mut elems = self.elems.borrow_mut();
128 let mut new_cap;
129 if let Some(last) = elems.last_mut() {
130 new_cap = last.capacity().min(HUGE_PAGE / 2);
131 new_cap *= 2;
132 } else {
133 new_cap = PAGE_SIZE;
134 }
135
136 new_cap = cmp::max(additional, new_cap);
137
138 let mut chunk = ArenaChunk::new(align_up(new_cap, PAGE_SIZE));
139
140 self.start.set(chunk.start());
141
142 let end = align_down(chunk.end().addr(), ALIGNMENT);
143
144 debug_assert!(self.start.get().addr() <= end);
145
146 self.end.set(chunk.end().with_addr(end));
147
148 elems.push(chunk);
149 }
150}
151
152impl Default for DroplessArena<'_> {
153 fn default() -> Self {
154 Self {
155 elems: RefCell::default(),
156 start: Cell::new(ptr::null_mut()),
157 end: Cell::new(ptr::null_mut()),
158 _marker: PhantomData,
159 }
160 }
161}