minimal_lexical/
stackvec.rs
1#![cfg(not(feature = "alloc"))]
4#![doc(hidden)]
5
6use crate::bigint;
7use core::{cmp, mem, ops, ptr, slice};
8
9#[derive(Clone)]
11pub struct StackVec {
12 data: [mem::MaybeUninit<bigint::Limb>; bigint::BIGINT_LIMBS],
14 length: u16,
16}
17
18#[allow(clippy::new_without_default)]
19impl StackVec {
20 #[inline]
22 pub const fn new() -> Self {
23 Self {
24 length: 0,
25 data: [mem::MaybeUninit::uninit(); bigint::BIGINT_LIMBS],
26 }
27 }
28
29 #[inline]
31 pub fn try_from(x: &[bigint::Limb]) -> Option<Self> {
32 let mut vec = Self::new();
33 vec.try_extend(x)?;
34 Some(vec)
35 }
36
37 #[inline]
47 pub unsafe fn set_len(&mut self, len: usize) {
48 debug_assert!(len <= 0xffff);
50 debug_assert!(len <= bigint::BIGINT_LIMBS);
51 self.length = len as u16;
52 }
53
54 #[inline]
56 pub const fn len(&self) -> usize {
57 self.length as usize
58 }
59
60 #[inline]
62 pub const fn is_empty(&self) -> bool {
63 self.len() == 0
64 }
65
66 #[inline]
68 pub const fn capacity(&self) -> usize {
69 bigint::BIGINT_LIMBS as usize
70 }
71
72 #[inline]
78 pub unsafe fn push_unchecked(&mut self, value: bigint::Limb) {
79 debug_assert!(self.len() < self.capacity());
80 unsafe {
82 ptr::write(self.as_mut_ptr().add(self.len()), value);
83 self.length += 1;
84 }
85 }
86
87 #[inline]
89 pub fn try_push(&mut self, value: bigint::Limb) -> Option<()> {
90 if self.len() < self.capacity() {
91 unsafe { self.push_unchecked(value) };
93 Some(())
94 } else {
95 None
96 }
97 }
98
99 #[inline]
105 pub unsafe fn pop_unchecked(&mut self) -> bigint::Limb {
106 debug_assert!(!self.is_empty());
107 self.length -= 1;
110 unsafe { ptr::read(self.as_mut_ptr().add(self.len())) }
111 }
112
113 #[inline]
115 pub fn pop(&mut self) -> Option<bigint::Limb> {
116 if self.is_empty() {
117 None
118 } else {
119 unsafe { Some(self.pop_unchecked()) }
121 }
122 }
123
124 #[inline]
130 pub unsafe fn extend_unchecked(&mut self, slc: &[bigint::Limb]) {
131 let index = self.len();
132 let new_len = index + slc.len();
133 debug_assert!(self.len() + slc.len() <= self.capacity());
134 let src = slc.as_ptr();
135 unsafe {
137 let dst = self.as_mut_ptr().add(index);
138 ptr::copy_nonoverlapping(src, dst, slc.len());
139 self.set_len(new_len);
140 }
141 }
142
143 #[inline]
145 pub fn try_extend(&mut self, slc: &[bigint::Limb]) -> Option<()> {
146 if self.len() + slc.len() <= self.capacity() {
147 unsafe { self.extend_unchecked(slc) };
149 Some(())
150 } else {
151 None
152 }
153 }
154
155 unsafe fn truncate_unchecked(&mut self, len: usize) {
161 debug_assert!(len <= self.capacity());
162 self.length = len as u16;
163 }
164
165 #[inline]
171 pub unsafe fn resize_unchecked(&mut self, len: usize, value: bigint::Limb) {
172 debug_assert!(len <= self.capacity());
173 let old_len = self.len();
174 if len > old_len {
175 let count = len - old_len;
181 for index in 0..count {
182 unsafe {
183 let dst = self.as_mut_ptr().add(old_len + index);
184 ptr::write(dst, value);
185 }
186 }
187 self.length = len as u16;
188 } else {
189 unsafe { self.truncate_unchecked(len) };
191 }
192 }
193
194 #[inline]
199 pub fn try_resize(&mut self, len: usize, value: bigint::Limb) -> Option<()> {
200 if len > self.capacity() {
201 None
202 } else {
203 unsafe { self.resize_unchecked(len, value) };
205 Some(())
206 }
207 }
208
209 #[inline(always)]
213 pub fn hi64(&self) -> (u64, bool) {
214 bigint::hi64(self)
215 }
216
217 #[inline(always)]
221 pub fn from_u64(x: u64) -> Self {
222 bigint::from_u64(x)
223 }
224
225 #[inline]
229 pub fn normalize(&mut self) {
230 bigint::normalize(self)
231 }
232
233 #[inline]
235 pub fn is_normalized(&self) -> bool {
236 bigint::is_normalized(self)
237 }
238
239 #[inline]
241 pub fn add_small(&mut self, y: bigint::Limb) -> Option<()> {
242 bigint::small_add(self, y)
243 }
244
245 #[inline]
247 pub fn mul_small(&mut self, y: bigint::Limb) -> Option<()> {
248 bigint::small_mul(self, y)
249 }
250}
251
252impl PartialEq for StackVec {
253 #[inline]
254 #[allow(clippy::op_ref)]
255 fn eq(&self, other: &Self) -> bool {
256 use core::ops::Deref;
257 self.len() == other.len() && self.deref() == other.deref()
258 }
259}
260
261impl Eq for StackVec {
262}
263
264impl cmp::PartialOrd for StackVec {
265 #[inline]
266 fn partial_cmp(&self, other: &Self) -> Option<cmp::Ordering> {
267 Some(bigint::compare(self, other))
268 }
269}
270
271impl cmp::Ord for StackVec {
272 #[inline]
273 fn cmp(&self, other: &Self) -> cmp::Ordering {
274 bigint::compare(self, other)
275 }
276}
277
278impl ops::Deref for StackVec {
279 type Target = [bigint::Limb];
280 #[inline]
281 fn deref(&self) -> &[bigint::Limb] {
282 unsafe {
285 let ptr = self.data.as_ptr() as *const bigint::Limb;
286 slice::from_raw_parts(ptr, self.len())
287 }
288 }
289}
290
291impl ops::DerefMut for StackVec {
292 #[inline]
293 fn deref_mut(&mut self) -> &mut [bigint::Limb] {
294 unsafe {
297 let ptr = self.data.as_mut_ptr() as *mut bigint::Limb;
298 slice::from_raw_parts_mut(ptr, self.len())
299 }
300 }
301}
302
303impl ops::MulAssign<&[bigint::Limb]> for StackVec {
304 #[inline]
305 fn mul_assign(&mut self, rhs: &[bigint::Limb]) {
306 bigint::large_mul(self, rhs).unwrap();
307 }
308}