1use crate::{
2 raw::{
3 Header, InnerNode, InnerNode16, InnerNode256, InnerNodeCompressed, Node, NodeType,
4 OpaqueNodePtr,
5 },
6 rust_nightly_apis::{
7 assume, maybe_uninit_slice_assume_init_mut, maybe_uninit_slice_assume_init_ref,
8 },
9};
10use std::{
11 cmp::Ordering,
12 error::Error,
13 fmt,
14 iter::{Enumerate, FusedIterator},
15 mem::{self, MaybeUninit},
16 ops::Bound,
17 slice::Iter,
18};
19
20#[cfg(feature = "nightly")]
21use std::{
22 iter::{FilterMap, Map},
23 simd::{cmp::SimdPartialEq, u8x64},
24};
25
26#[derive(Debug, Clone, Copy, PartialEq, Eq)]
28#[repr(transparent)]
29pub struct RestrictedNodeIndex<const LIMIT: u8>(u8);
30
31impl<const LIMIT: u8> RestrictedNodeIndex<LIMIT> {
32 pub const EMPTY: Self = RestrictedNodeIndex(LIMIT);
35
36 pub fn is_empty(self) -> bool {
38 self == Self::EMPTY
39 }
40}
41
42impl<const LIMIT: u8> From<RestrictedNodeIndex<LIMIT>> for u8 {
43 fn from(src: RestrictedNodeIndex<LIMIT>) -> Self {
44 src.0
45 }
46}
47
48impl<const LIMIT: u8> From<RestrictedNodeIndex<LIMIT>> for usize {
49 fn from(src: RestrictedNodeIndex<LIMIT>) -> Self {
50 usize::from(src.0)
51 }
52}
53
54impl<const LIMIT: u8> TryFrom<usize> for RestrictedNodeIndex<LIMIT> {
55 type Error = TryFromByteError;
56
57 fn try_from(value: usize) -> Result<Self, Self::Error> {
58 if value < usize::from(LIMIT) {
59 Ok(RestrictedNodeIndex(value as u8))
60 } else {
61 Err(TryFromByteError(LIMIT, value))
62 }
63 }
64}
65
66impl<const LIMIT: u8> TryFrom<u8> for RestrictedNodeIndex<LIMIT> {
67 type Error = TryFromByteError;
68
69 fn try_from(value: u8) -> Result<Self, Self::Error> {
70 if value < LIMIT {
71 Ok(RestrictedNodeIndex(value))
72 } else {
73 Err(TryFromByteError(LIMIT, usize::from(value)))
74 }
75 }
76}
77
78impl<const LIMIT: u8> PartialOrd for RestrictedNodeIndex<LIMIT> {
79 fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
80 if self.0 == LIMIT || other.0 == LIMIT {
81 None
82 } else {
83 Some(self.0.cmp(&other.0))
84 }
85 }
86}
87
88#[derive(Debug, Clone, Copy, PartialEq, Eq)]
91pub struct TryFromByteError(u8, usize);
92
93impl fmt::Display for TryFromByteError {
94 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
95 write!(
96 f,
97 "Input value [{}] is greater than the allowed maximum [{}] for PartialNodeIndex.",
98 self.1, self.0
99 )
100 }
101}
102
103impl Error for TryFromByteError {}
104
105#[repr(C, align(8))]
107pub struct InnerNode48<K, V, const PREFIX_LEN: usize> {
108 pub header: Header<PREFIX_LEN>,
110 pub child_indices: [RestrictedNodeIndex<48>; 256],
116 pub child_pointers: [MaybeUninit<OpaqueNodePtr<K, V, PREFIX_LEN>>; 48],
120}
121
122impl<K, V, const PREFIX_LEN: usize> fmt::Debug for InnerNode48<K, V, PREFIX_LEN> {
123 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
124 f.debug_struct("InnerNode48")
125 .field("header", &self.header)
126 .field("child_indices", &self.child_indices)
127 .field("child_pointers", &self.child_pointers)
128 .finish()
129 }
130}
131
132impl<K, V, const PREFIX_LEN: usize> Clone for InnerNode48<K, V, PREFIX_LEN> {
133 fn clone(&self) -> Self {
134 Self {
135 header: self.header.clone(),
136 child_indices: self.child_indices,
137 child_pointers: self.child_pointers,
138 }
139 }
140}
141
142impl<K, V, const PREFIX_LEN: usize> InnerNode48<K, V, PREFIX_LEN> {
143 pub fn initialized_child_pointers(&self) -> &[OpaqueNodePtr<K, V, PREFIX_LEN>] {
145 unsafe {
146 assume!(self.header.num_children() <= self.child_pointers.len());
149 maybe_uninit_slice_assume_init_ref(&self.child_pointers[..self.header.num_children()])
150 }
151 }
152}
153
154impl<K, V, const PREFIX_LEN: usize> Node<PREFIX_LEN> for InnerNode48<K, V, PREFIX_LEN> {
155 type Key = K;
156 type Value = V;
157
158 const TYPE: NodeType = NodeType::Node48;
159}
160
161impl<K, V, const PREFIX_LEN: usize> InnerNode<PREFIX_LEN> for InnerNode48<K, V, PREFIX_LEN> {
162 type GrownNode = InnerNode256<K, V, PREFIX_LEN>;
163 #[cfg(not(feature = "nightly"))]
164 type Iter<'a>
165 = Node48Iter<'a, K, V, PREFIX_LEN>
166 where
167 Self: 'a;
168 #[cfg(feature = "nightly")]
169 type Iter<'a>
170 = Map<
171 FilterMap<
172 Enumerate<Iter<'a, RestrictedNodeIndex<48>>>,
173 impl FnMut((usize, &'a RestrictedNodeIndex<48>)) -> Option<(u8, usize)>,
174 >,
175 impl FnMut((u8, usize)) -> (u8, OpaqueNodePtr<K, V, PREFIX_LEN>),
176 >
177 where
178 Self: 'a;
179 type ShrunkNode = InnerNode16<K, V, PREFIX_LEN>;
180
181 fn header(&self) -> &Header<PREFIX_LEN> {
182 &self.header
183 }
184
185 fn from_header(header: Header<PREFIX_LEN>) -> Self {
186 InnerNode48 {
187 header,
188 child_indices: [RestrictedNodeIndex::<48>::EMPTY; 256],
189 child_pointers: [MaybeUninit::uninit(); 48],
190 }
191 }
192
193 fn lookup_child(&self, key_fragment: u8) -> Option<OpaqueNodePtr<K, V, PREFIX_LEN>> {
194 let index = &self.child_indices[usize::from(key_fragment)];
195 let child_pointers = self.initialized_child_pointers();
196 if !index.is_empty() {
197 let idx = usize::from(*index);
198
199 #[allow(unused_unsafe)]
200 unsafe {
201 assume!(idx < child_pointers.len());
205 }
206 Some(child_pointers[idx])
207 } else {
208 None
209 }
210 }
211
212 fn write_child(&mut self, key_fragment: u8, child_pointer: OpaqueNodePtr<K, V, PREFIX_LEN>) {
213 let key_fragment_idx = usize::from(key_fragment);
214 let child_index = if self.child_indices[key_fragment_idx] == RestrictedNodeIndex::EMPTY {
215 let child_index = self.header.num_children();
216 debug_assert!(child_index < self.child_pointers.len(), "node is full");
217
218 self.child_indices[key_fragment_idx] =
229 unsafe { RestrictedNodeIndex::try_from(child_index).unwrap_unchecked() };
230 self.header.inc_num_children();
231 child_index
232 } else {
233 usize::from(self.child_indices[key_fragment_idx])
235 };
236
237 #[allow(unused_unsafe)]
239 unsafe {
240 assume!(child_index < self.child_pointers.len());
241 }
242 self.child_pointers[child_index].write(child_pointer);
243 }
244
245 fn remove_child(&mut self, key_fragment: u8) -> Option<OpaqueNodePtr<K, V, PREFIX_LEN>> {
246 let restricted_index = self.child_indices[usize::from(key_fragment)];
247 if restricted_index.is_empty() {
248 return None;
249 }
250
251 let child_ptr = mem::replace(
254 &mut self.child_pointers[usize::from(restricted_index)],
255 MaybeUninit::uninit(),
256 );
257
258 self.child_pointers.copy_within(
260 (usize::from(restricted_index) + 1)..self.header.num_children(),
261 usize::from(restricted_index),
262 );
263
264 for other_restrict_index in &mut self.child_indices {
267 if matches!(
268 restricted_index.partial_cmp(other_restrict_index),
269 Some(Ordering::Less)
270 ) {
271 *other_restrict_index =
276 RestrictedNodeIndex::try_from(other_restrict_index.0 - 1).unwrap();
277 }
278 }
279
280 self.child_indices[usize::from(key_fragment)] = RestrictedNodeIndex::EMPTY;
281 self.header.dec_num_children();
282 Some(unsafe { MaybeUninit::assume_init(child_ptr) })
285 }
286
287 fn grow(&self) -> Self::GrownNode {
288 let header = self.header.clone();
289 let mut child_pointers = [None; 256];
290 let initialized_child_pointers = self.initialized_child_pointers();
291 for (key_fragment, idx) in self.child_indices.iter().enumerate() {
292 if idx.is_empty() {
293 continue;
294 }
295
296 let idx = usize::from(*idx);
297
298 #[allow(unused_unsafe)]
299 unsafe {
300 assume!(idx < initialized_child_pointers.len());
304 }
305 let child_pointer = initialized_child_pointers[idx];
306 child_pointers[key_fragment] = Some(child_pointer);
307 }
308
309 InnerNode256 {
310 header,
311 child_pointers,
312 }
313 }
314
315 fn shrink(&self) -> Self::ShrunkNode {
316 assert!(
317 self.header.num_children() <= 16,
318 "Cannot shrink a Node48 when it has more than 16 children. Currently has [{}] \
319 children.",
320 self.header.num_children()
321 );
322
323 let header = self.header.clone();
324
325 let mut key_and_child_ptrs = [MaybeUninit::uninit(); 16];
326
327 for (idx, value) in self.iter().enumerate() {
328 key_and_child_ptrs[idx].write(value);
329 }
330
331 let init_key_and_child_ptrs = {
332 let init_key_and_child_ptrs = unsafe {
336 maybe_uninit_slice_assume_init_mut(&mut key_and_child_ptrs[..header.num_children()])
337 };
338
339 init_key_and_child_ptrs.sort_unstable_by_key(|(key_byte, _)| *key_byte);
340
341 init_key_and_child_ptrs
342 };
343
344 let mut keys = [MaybeUninit::uninit(); 16];
345 let mut child_pointers = [MaybeUninit::uninit(); 16];
346
347 for (idx, (key_byte, child_ptr)) in init_key_and_child_ptrs.iter().copied().enumerate() {
348 keys[idx].write(key_byte);
349 child_pointers[idx].write(child_ptr);
350 }
351
352 InnerNodeCompressed {
353 header,
354 keys,
355 child_pointers,
356 }
357 }
358
359 fn iter(&self) -> Self::Iter<'_> {
360 let child_pointers = self.initialized_child_pointers();
361
362 #[cfg(not(feature = "nightly"))]
363 {
364 Node48Iter {
365 it: self.child_indices.iter().enumerate(),
366 child_pointers,
367 }
368 }
369
370 #[cfg(feature = "nightly")]
371 {
372 self.child_indices
373 .iter()
374 .enumerate()
375 .filter_map(|(key, idx)| {
376 (!idx.is_empty()).then_some((key as u8, usize::from(*idx)))
377 })
378 .map(|(key, idx)| unsafe { (key, *child_pointers.get_unchecked(idx)) })
381 }
382 }
383
384 fn range(
385 &self,
386 bound: impl std::ops::RangeBounds<u8>,
387 ) -> impl DoubleEndedIterator<Item = (u8, OpaqueNodePtr<Self::Key, Self::Value, PREFIX_LEN>)>
388 + FusedIterator {
389 {
390 match (bound.start_bound(), bound.end_bound()) {
391 (Bound::Excluded(s), Bound::Excluded(e)) if s == e => {
392 panic!("range start and end are equal and excluded: ({s:?})")
393 },
394 (
395 Bound::Included(s) | Bound::Excluded(s),
396 Bound::Included(e) | Bound::Excluded(e),
397 ) if s > e => {
398 panic!("range start ({s:?}) is greater than range end ({e:?})")
399 },
400 _ => {},
401 }
402 }
403
404 let child_pointers = self.initialized_child_pointers();
405
406 let start = bound.start_bound().map(|val| usize::from(*val));
407 let key_offset = match bound.start_bound() {
408 std::ops::Bound::Included(val) => *val,
409 std::ops::Bound::Excluded(val) => val.saturating_add(1),
410 std::ops::Bound::Unbounded => 0,
411 };
412 let end = bound.end_bound().map(|val| usize::from(*val));
413
414 self.child_indices[(start, end)]
415 .iter()
416 .enumerate()
417 .filter_map(|(key, idx)| {
418 (!idx.is_empty()).then_some((key as u8, usize::from(*idx)))
421 })
422 .map(move |(key, idx)| unsafe {
425 (
426 key.saturating_add(key_offset),
427 *child_pointers.get_unchecked(idx),
428 )
429 })
430 }
431
432 #[cfg(feature = "nightly")]
433 #[cfg_attr(test, mutants::skip)]
434 fn min(&self) -> (u8, OpaqueNodePtr<K, V, PREFIX_LEN>) {
435 let child_indices: &[u8; 256] = unsafe { std::mem::transmute(&self.child_indices) };
438 let empty = u8x64::splat(48);
439 let r0 = u8x64::from_array(child_indices[0..64].try_into().unwrap())
440 .simd_eq(empty)
441 .to_bitmask();
442 let r1 = u8x64::from_array(child_indices[64..128].try_into().unwrap())
443 .simd_eq(empty)
444 .to_bitmask();
445 let r2 = u8x64::from_array(child_indices[128..192].try_into().unwrap())
446 .simd_eq(empty)
447 .to_bitmask();
448 let r3 = u8x64::from_array(child_indices[192..256].try_into().unwrap())
449 .simd_eq(empty)
450 .to_bitmask();
451
452 let key = if r0 != u64::MAX {
453 r0.trailing_ones()
454 } else if r1 != u64::MAX {
455 r1.trailing_ones() + 64
456 } else if r2 != u64::MAX {
457 r2.trailing_ones() + 128
458 } else {
459 r3.trailing_ones() + 192
460 } as usize;
461
462 unsafe {
463 assume!(key < self.child_indices.len());
468 }
469
470 let idx = usize::from(self.child_indices[key]);
471 let child_pointers = self.initialized_child_pointers();
472
473 unsafe {
474 assume!(idx < child_pointers.len());
479 }
480
481 (key as u8, child_pointers[idx])
482 }
483
484 #[cfg(not(feature = "nightly"))]
485 fn min(&self) -> (u8, OpaqueNodePtr<K, V, PREFIX_LEN>) {
486 for (key, idx) in self.child_indices.iter().enumerate() {
487 if idx.is_empty() {
488 continue;
489 }
490 let child_pointers = self.initialized_child_pointers();
491 return (key as u8, child_pointers[usize::from(*idx)]);
492 }
493 unreachable!("inner node must have non-zero number of children");
494 }
495
496 #[cfg(feature = "nightly")]
497 #[cfg_attr(test, mutants::skip)]
498 fn max(&self) -> (u8, OpaqueNodePtr<K, V, PREFIX_LEN>) {
499 let child_indices: &[u8; 256] = unsafe { std::mem::transmute(&self.child_indices) };
502 let empty = u8x64::splat(48);
503 let r0 = u8x64::from_array(child_indices[0..64].try_into().unwrap())
504 .simd_eq(empty)
505 .to_bitmask();
506 let r1 = u8x64::from_array(child_indices[64..128].try_into().unwrap())
507 .simd_eq(empty)
508 .to_bitmask();
509 let r2 = u8x64::from_array(child_indices[128..192].try_into().unwrap())
510 .simd_eq(empty)
511 .to_bitmask();
512 let r3 = u8x64::from_array(child_indices[192..256].try_into().unwrap())
513 .simd_eq(empty)
514 .to_bitmask();
515
516 let key = if r3 != u64::MAX {
517 255 - r3.leading_ones()
518 } else if r2 != u64::MAX {
519 191 - r2.leading_ones()
520 } else if r1 != u64::MAX {
521 127 - r1.leading_ones()
522 } else {
523 63 - r0.leading_ones()
527 } as usize;
528
529 unsafe {
530 assume!(key < self.child_indices.len());
532 }
533
534 let idx = usize::from(self.child_indices[key]);
535 let child_pointers = self.initialized_child_pointers();
536
537 unsafe {
538 assume!(idx < child_pointers.len());
543 }
544
545 (key as u8, child_pointers[idx])
546 }
547
548 #[cfg(not(feature = "nightly"))]
549 fn max(&self) -> (u8, OpaqueNodePtr<K, V, PREFIX_LEN>) {
550 for (key, idx) in self.child_indices.iter().enumerate().rev() {
551 if idx.is_empty() {
552 continue;
553 }
554 let child_pointers = self.initialized_child_pointers();
555 return (key as u8, child_pointers[usize::from(*idx)]);
556 }
557 unreachable!("inner node must have non-zero number of children");
558 }
559}
560
561#[cfg(not(feature = "nightly"))]
563pub struct Node48Iter<'a, K, V, const PREFIX_LEN: usize> {
564 pub(crate) it: Enumerate<Iter<'a, RestrictedNodeIndex<48>>>,
565 pub(crate) child_pointers: &'a [OpaqueNodePtr<K, V, PREFIX_LEN>],
566}
567
568#[cfg(not(feature = "nightly"))]
569impl<K, V, const PREFIX_LEN: usize> Iterator for Node48Iter<'_, K, V, PREFIX_LEN> {
570 type Item = (u8, OpaqueNodePtr<K, V, PREFIX_LEN>);
571
572 fn next(&mut self) -> Option<Self::Item> {
573 for (key, idx) in self.it.by_ref() {
574 if idx.is_empty() {
575 continue;
576 }
577 let key = key as u8;
578 let child_pointer = unsafe { *self.child_pointers.get_unchecked(usize::from(*idx)) };
581 return Some((key, child_pointer));
582 }
583 None
584 }
585}
586
587#[cfg(not(feature = "nightly"))]
588impl<K, V, const PREFIX_LEN: usize> DoubleEndedIterator for Node48Iter<'_, K, V, PREFIX_LEN> {
589 fn next_back(&mut self) -> Option<Self::Item> {
590 while let Some((key, idx)) = self.it.next_back() {
591 if idx.is_empty() {
592 continue;
593 }
594 let key = key as u8;
595 let child_pointer = unsafe { *self.child_pointers.get_unchecked(usize::from(*idx)) };
598 return Some((key, child_pointer));
599 }
600 None
601 }
602}
603
604#[cfg(not(feature = "nightly"))]
605impl<K, V, const PREFIX_LEN: usize> FusedIterator for Node48Iter<'_, K, V, PREFIX_LEN> {}
606
607#[cfg(test)]
608mod tests {
609 use std::ops::{Bound, RangeBounds};
610
611 use crate::raw::{
612 representation::tests::{
613 inner_node_min_max_test, inner_node_remove_child_test, inner_node_shrink_test,
614 inner_node_write_child_test, FixtureReturn,
615 },
616 LeafNode, NodePtr,
617 };
618
619 use super::*;
620
621 #[test]
622 fn lookup() {
623 let mut n = InnerNode48::<Box<[u8]>, (), 16>::empty();
624 let mut l1 = LeafNode::with_no_siblings(Box::from([]), ());
625 let mut l2 = LeafNode::with_no_siblings(Box::from([]), ());
626 let mut l3 = LeafNode::with_no_siblings(Box::from([]), ());
627 let l1_ptr = NodePtr::from(&mut l1).to_opaque();
628 let l2_ptr = NodePtr::from(&mut l2).to_opaque();
629 let l3_ptr = NodePtr::from(&mut l3).to_opaque();
630
631 assert!(n.lookup_child(123).is_none());
632
633 n.header.inc_num_children();
634 n.header.inc_num_children();
635 n.header.inc_num_children();
636
637 n.child_indices[1] = 2usize.try_into().unwrap();
638 n.child_indices[3] = 0usize.try_into().unwrap();
639 n.child_indices[123] = 1usize.try_into().unwrap();
640
641 n.child_pointers[0].write(l1_ptr);
642 n.child_pointers[1].write(l2_ptr);
643 n.child_pointers[2].write(l3_ptr);
644
645 assert_eq!(n.lookup_child(123), Some(l2_ptr));
646 }
647
648 #[test]
649 fn write_child() {
650 inner_node_write_child_test(InnerNode48::<_, _, 16>::empty(), 48)
651 }
652
653 #[test]
654 fn remove_child() {
655 inner_node_remove_child_test(InnerNode48::<_, _, 16>::empty(), 48)
656 }
657
658 #[test]
659 #[should_panic]
660 fn write_child_full_panic() {
661 inner_node_write_child_test(InnerNode48::<_, _, 16>::empty(), 49);
662 }
663
664 #[test]
665 fn grow() {
666 let mut n48 = InnerNode48::<Box<[u8]>, (), 16>::empty();
667 let mut l1 = LeafNode::with_no_siblings(vec![].into(), ());
668 let mut l2 = LeafNode::with_no_siblings(vec![].into(), ());
669 let mut l3 = LeafNode::with_no_siblings(vec![].into(), ());
670 let l1_ptr = NodePtr::from(&mut l1).to_opaque();
671 let l2_ptr = NodePtr::from(&mut l2).to_opaque();
672 let l3_ptr = NodePtr::from(&mut l3).to_opaque();
673
674 n48.write_child(3, l1_ptr);
675 n48.write_child(123, l2_ptr);
676 n48.write_child(1, l3_ptr);
677
678 let n256 = n48.grow();
679
680 assert_eq!(n256.lookup_child(3), Some(l1_ptr));
681 assert_eq!(n256.lookup_child(123), Some(l2_ptr));
682 assert_eq!(n256.lookup_child(1), Some(l3_ptr));
683 assert_eq!(n256.lookup_child(4), None);
684 }
685
686 #[test]
687 fn shrink() {
688 inner_node_shrink_test(InnerNode48::<_, _, 16>::empty(), 16);
689 }
690
691 #[test]
692 #[should_panic = "Cannot shrink a Node48 when it has more than 16 children. Currently has [17] \
693 children."]
694 fn shrink_too_many_children_panic() {
695 inner_node_shrink_test(InnerNode48::<_, _, 16>::empty(), 17);
696 }
697
698 #[test]
699 fn min_max() {
700 inner_node_min_max_test(InnerNode48::<_, _, 16>::empty(), 48);
701 }
702
703 fn fixture() -> FixtureReturn<InnerNode48<Box<[u8]>, (), 16>, 4> {
704 let mut n4 = InnerNode48::empty();
705 let mut l1 = LeafNode::with_no_siblings(vec![].into(), ());
706 let mut l2 = LeafNode::with_no_siblings(vec![].into(), ());
707 let mut l3 = LeafNode::with_no_siblings(vec![].into(), ());
708 let mut l4 = LeafNode::with_no_siblings(vec![].into(), ());
709 let l1_ptr = NodePtr::from(&mut l1).to_opaque();
710 let l2_ptr = NodePtr::from(&mut l2).to_opaque();
711 let l3_ptr = NodePtr::from(&mut l3).to_opaque();
712 let l4_ptr = NodePtr::from(&mut l4).to_opaque();
713
714 n4.write_child(3, l1_ptr);
715 n4.write_child(255, l2_ptr);
716 n4.write_child(0u8, l3_ptr);
717 n4.write_child(85, l4_ptr);
718
719 (n4, [l1, l2, l3, l4], [l1_ptr, l2_ptr, l3_ptr, l4_ptr])
720 }
721
722 #[test]
723 fn iterate() {
724 let (node, _, [l1_ptr, l2_ptr, l3_ptr, l4_ptr]) = fixture();
725
726 let mut iter = node.iter();
727
728 assert_eq!(iter.next().unwrap(), (0u8, l3_ptr));
729 assert_eq!(iter.next().unwrap(), (3, l1_ptr));
730 assert_eq!(iter.next().unwrap(), (85, l4_ptr));
731 assert_eq!(iter.next().unwrap(), (255, l2_ptr));
732 assert_eq!(iter.next(), None);
733 }
734
735 #[test]
736 fn iterate_rev() {
737 let (node, _, [l1_ptr, l2_ptr, l3_ptr, l4_ptr]) = fixture();
738
739 let mut iter = node.iter().rev();
740
741 assert_eq!(iter.next().unwrap(), (255, l2_ptr));
742 assert_eq!(iter.next().unwrap(), (85, l4_ptr));
743 assert_eq!(iter.next().unwrap(), (3, l1_ptr));
744 assert_eq!(iter.next().unwrap(), (0u8, l3_ptr));
745 assert_eq!(iter.next(), None);
746 }
747
748 #[test]
749 fn range_iterate() {
750 let (node, _, [l1_ptr, l2_ptr, l3_ptr, l4_ptr]) = fixture();
751
752 #[track_caller]
753 fn check<K, V, const PREFIX_LEN: usize, const N: usize>(
754 node: &InnerNode48<K, V, PREFIX_LEN>,
755 bound: impl RangeBounds<u8>,
756 expected_pairs: [(u8, OpaqueNodePtr<K, V, PREFIX_LEN>); N],
757 ) {
758 let pairs = node.range(bound).collect::<Vec<_>>();
759 assert_eq!(pairs, expected_pairs);
760 }
761
762 check(
763 &node,
764 (Bound::Included(0), Bound::Included(3)),
765 [(0u8, l3_ptr), (3, l1_ptr)],
766 );
767 check(&node, (Bound::Excluded(0), Bound::Excluded(3)), []);
768 check(
769 &node,
770 (Bound::Included(0), Bound::Included(0)),
771 [(0u8, l3_ptr)],
772 );
773 check(
774 &node,
775 (Bound::Included(0), Bound::Included(255)),
776 [(0u8, l3_ptr), (3, l1_ptr), (85, l4_ptr), (255, l2_ptr)],
777 );
778 check(
779 &node,
780 (Bound::Included(255), Bound::Included(255)),
781 [(255, l2_ptr)],
782 );
783 check(&node, (Bound::Included(255), Bound::Excluded(255)), []);
784 check(&node, (Bound::Excluded(255), Bound::Included(255)), []);
785 check(
786 &node,
787 (Bound::Excluded(0), Bound::Excluded(255)),
788 [(3, l1_ptr), (85, l4_ptr)],
789 );
790 check(
791 &node,
792 (Bound::<u8>::Unbounded, Bound::Unbounded),
793 [(0u8, l3_ptr), (3, l1_ptr), (85, l4_ptr), (255, l2_ptr)],
794 );
795 check(
796 &node,
797 (Bound::<u8>::Unbounded, Bound::Included(86)),
798 [(0u8, l3_ptr), (3, l1_ptr), (85, l4_ptr)],
799 );
800 }
801
802 fn fixture_empty_edges() -> FixtureReturn<InnerNode48<Box<[u8]>, (), 16>, 4> {
803 let mut n4 = InnerNode48::empty();
804 let mut l1 = LeafNode::with_no_siblings(vec![].into(), ());
805 let mut l2 = LeafNode::with_no_siblings(vec![].into(), ());
806 let mut l3 = LeafNode::with_no_siblings(vec![].into(), ());
807 let mut l4 = LeafNode::with_no_siblings(vec![].into(), ());
808 let l1_ptr = NodePtr::from(&mut l1).to_opaque();
809 let l2_ptr = NodePtr::from(&mut l2).to_opaque();
810 let l3_ptr = NodePtr::from(&mut l3).to_opaque();
811 let l4_ptr = NodePtr::from(&mut l4).to_opaque();
812
813 n4.write_child(3, l1_ptr);
814 n4.write_child(254, l2_ptr);
815 n4.write_child(2u8, l3_ptr);
816 n4.write_child(85, l4_ptr);
817
818 (n4, [l1, l2, l3, l4], [l1_ptr, l2_ptr, l3_ptr, l4_ptr])
819 }
820
821 #[test]
822 fn range_iterate_boundary_conditions() {
823 let (node, _, [l1_ptr, l2_ptr, l3_ptr, l4_ptr]) = fixture_empty_edges();
824
825 #[track_caller]
826 fn check<K, V, const PREFIX_LEN: usize, const N: usize>(
827 node: &InnerNode48<K, V, PREFIX_LEN>,
828 bound: impl RangeBounds<u8>,
829 expected_pairs: [(u8, OpaqueNodePtr<K, V, PREFIX_LEN>); N],
830 ) {
831 let pairs = node.range(bound).collect::<Vec<_>>();
832 assert_eq!(pairs, expected_pairs);
833 }
834
835 check(
836 &node,
837 (Bound::<u8>::Unbounded, Bound::Included(86)),
838 [(2u8, l3_ptr), (3, l1_ptr), (85, l4_ptr)],
839 );
840 check(
841 &node,
842 (Bound::<u8>::Unbounded, Bound::Included(4)),
843 [(2u8, l3_ptr), (3, l1_ptr)],
844 );
845 check(
846 &node,
847 (Bound::<u8>::Unbounded, Bound::Excluded(3)),
848 [(2u8, l3_ptr)],
849 );
850 check(
851 &node,
852 (Bound::<u8>::Unbounded, Bound::Included(2)),
853 [(2u8, l3_ptr)],
854 );
855 check(&node, (Bound::<u8>::Unbounded, Bound::Included(1)), []);
856 check(&node, (Bound::<u8>::Unbounded, Bound::Included(0)), []);
857
858 check(
859 &node,
860 (Bound::Included(1), Bound::<u8>::Unbounded),
861 [(2u8, l3_ptr), (3, l1_ptr), (85, l4_ptr), (254, l2_ptr)],
862 );
863 check(
864 &node,
865 (Bound::Included(3), Bound::<u8>::Unbounded),
866 [(3, l1_ptr), (85, l4_ptr), (254, l2_ptr)],
867 );
868 check(
869 &node,
870 (Bound::Excluded(84), Bound::<u8>::Unbounded),
871 [(85, l4_ptr), (254, l2_ptr)],
872 );
873 check(
874 &node,
875 (Bound::Included(253), Bound::<u8>::Unbounded),
876 [(254, l2_ptr)],
877 );
878 check(&node, (Bound::Included(255), Bound::<u8>::Unbounded), []);
879 }
880
881 #[test]
882 #[should_panic = "range start and end are equal and excluded: (80)"]
883 fn range_iterate_out_of_bounds_panic_both_excluded() {
884 let (node, _, [_l1_ptr, _l2_ptr, _l3_ptr, _l4_ptr]) = fixture();
885
886 let pairs = node
887 .range((Bound::Excluded(80), Bound::Excluded(80)))
888 .collect::<Vec<_>>();
889 assert_eq!(pairs, &[]);
890 }
891
892 #[test]
893 #[should_panic = "range start (80) is greater than range end (0)"]
894 fn range_iterate_start_greater_than_end() {
895 let (node, _, [_l1_ptr, _l2_ptr, _l3_ptr, _l4_ptr]) = fixture();
896
897 let _pairs = node
898 .range((Bound::Excluded(80), Bound::Included(0)))
899 .collect::<Vec<_>>();
900 }
901}