1#[cfg(feature = "nightly")]
2use core::simd::{cmp::SimdPartialEq, u8x64};
3use core::{
4 fmt,
5 iter::{Enumerate, FusedIterator},
6 mem::{self, MaybeUninit},
7 slice::Iter,
8};
9
10use self::index::NonMaxIndex;
11use crate::{
12 raw::{
13 representation::assert_valid_range_bounds, Header, InnerNode, InnerNode16, InnerNodeCommon,
14 InnerNodeDirect, InnerNodeSorted, Node, NodeType, OpaqueNodePtr,
15 },
16 rust_nightly_apis::maybe_uninit_slice_assume_init_ref,
17};
18
19mod index;
20
21#[repr(C, align(8))]
27pub struct InnerNodeIndirect<K, V, const PREFIX_LEN: usize, const SIZE: usize> {
28 pub header: Header<PREFIX_LEN>,
30 pub child_pointers: [MaybeUninit<OpaqueNodePtr<K, V, PREFIX_LEN>>; SIZE],
34 pub child_indices: [Option<NonMaxIndex>; 256],
40}
41
42impl<K, V, const PREFIX_LEN: usize, const SIZE: usize, const OTHER_SIZE: usize>
43 From<&InnerNodeSorted<K, V, PREFIX_LEN, OTHER_SIZE>>
44 for InnerNodeIndirect<K, V, PREFIX_LEN, SIZE>
45{
46 fn from(value: &InnerNodeSorted<K, V, PREFIX_LEN, OTHER_SIZE>) -> Self {
47 assert!(
48 value.header.num_children() <= SIZE,
49 "Cannot shrink a InnerNodeDirect when it has more than {SIZE} children. Currently has \
50 [{}] children.",
51 value.header.num_children()
52 );
53
54 let header = value.header.clone();
55 let mut child_indices = [None; 256];
56 let mut child_pointers = [MaybeUninit::uninit(); SIZE];
57
58 let (keys, _) = value.initialized_portion();
59
60 assert_eq!(
61 keys.len(),
62 value.keys.len(),
63 "Node must be full to grow to node 48"
64 );
65
66 for (index, key) in keys.iter().copied().enumerate() {
67 child_indices[usize::from(key)] =
70 Some(unsafe { NonMaxIndex::try_from(index).unwrap_unchecked() });
71 }
72
73 let num_children = header.num_children();
74
75 unsafe {
76 core::hint::assert_unchecked(num_children <= value.child_pointers.len());
81
82 core::hint::assert_unchecked(num_children <= child_pointers.len());
84 }
85
86 child_pointers[..num_children].copy_from_slice(&value.child_pointers[..num_children]);
87
88 Self {
89 header,
90 child_indices,
91 child_pointers,
92 }
93 }
94}
95
96impl<K, V, const PREFIX_LEN: usize, const SIZE: usize> From<&InnerNodeDirect<K, V, PREFIX_LEN>>
97 for InnerNodeIndirect<K, V, PREFIX_LEN, SIZE>
98{
99 fn from(value: &InnerNodeDirect<K, V, PREFIX_LEN>) -> Self {
100 assert!(
101 value.header.num_children() <= SIZE,
102 "Cannot shrink a InnerNodeDirect when it has more than {SIZE} children. Currently has \
103 [{}] children.",
104 value.header.num_children()
105 );
106
107 let header = value.header.clone();
108 let mut child_indices = [None; 256];
109 let mut child_pointers = [MaybeUninit::uninit(); SIZE];
110
111 for (child_index, (key_byte, child_ptr)) in value.iter().enumerate() {
112 let key_byte = usize::from(key_byte);
116 child_indices[key_byte] = Some(NonMaxIndex::try_from(child_index).unwrap());
117 child_pointers[child_index].write(child_ptr);
118 }
119
120 Self {
121 header,
122 child_indices,
123 child_pointers,
124 }
125 }
126}
127
128impl<K, V, const PREFIX_LEN: usize, const SIZE: usize> fmt::Debug
129 for InnerNodeIndirect<K, V, PREFIX_LEN, SIZE>
130{
131 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
132 f.debug_struct("InnerNode48")
133 .field("header", &self.header)
134 .field("child_indices", &self.child_indices)
135 .field("child_pointers", &self.child_pointers)
136 .finish()
137 }
138}
139
140impl<K, V, const PREFIX_LEN: usize, const SIZE: usize> Clone
141 for InnerNodeIndirect<K, V, PREFIX_LEN, SIZE>
142{
143 fn clone(&self) -> Self {
144 Self {
145 header: self.header.clone(),
146 child_indices: self.child_indices,
147 child_pointers: self.child_pointers,
148 }
149 }
150}
151
152impl<K, V, const PREFIX_LEN: usize, const SIZE: usize> InnerNodeIndirect<K, V, PREFIX_LEN, SIZE> {
153 pub fn initialized_child_pointers(&self) -> &[OpaqueNodePtr<K, V, PREFIX_LEN>] {
155 unsafe {
156 core::hint::assert_unchecked(self.header.num_children() <= self.child_pointers.len());
159 maybe_uninit_slice_assume_init_ref(&self.child_pointers[..self.header.num_children()])
160 }
161 }
162}
163
164unsafe impl<K, V, const PREFIX_LEN: usize, const SIZE: usize> InnerNodeCommon<K, V, PREFIX_LEN>
166 for InnerNodeIndirect<K, V, PREFIX_LEN, SIZE>
167{
168 type Iter<'a>
169 = NodeIndirectIter<'a, K, V, PREFIX_LEN>
170 where
171 Self: 'a;
172
173 fn header(&self) -> &Header<PREFIX_LEN> {
174 &self.header
175 }
176
177 fn from_header(header: Header<PREFIX_LEN>) -> Self {
178 InnerNodeIndirect {
179 header,
180 child_indices: [None; 256],
181 child_pointers: [MaybeUninit::uninit(); SIZE],
182 }
183 }
184
185 fn lookup_child(&self, key_fragment: u8) -> Option<OpaqueNodePtr<K, V, PREFIX_LEN>> {
186 let index = &self.child_indices[usize::from(key_fragment)];
187 let child_pointers = self.initialized_child_pointers();
188 if let Some(index) = index {
189 let idx = usize::from(*index);
190
191 unsafe {
192 core::hint::assert_unchecked(idx < child_pointers.len());
196 }
197 Some(child_pointers[idx])
198 } else {
199 None
200 }
201 }
202
203 fn write_child(&mut self, key_fragment: u8, child_pointer: OpaqueNodePtr<K, V, PREFIX_LEN>) {
204 let key_fragment_idx = usize::from(key_fragment);
205 let child_index = if let Some(index) = self.child_indices[key_fragment_idx] {
206 usize::from(index)
208 } else {
209 let child_index = self.header.num_children();
210 debug_assert!(child_index < self.child_pointers.len(), "node is full");
211
212 self.child_indices[key_fragment_idx] =
223 Some(unsafe { NonMaxIndex::try_from(child_index).unwrap_unchecked() });
224 self.header.inc_num_children();
225 child_index
226 };
227
228 unsafe {
231 core::hint::assert_unchecked(child_index < self.child_pointers.len());
232 self.child_pointers
233 .get_unchecked_mut(child_index)
234 .write(child_pointer);
235 }
236 }
237
238 fn remove_child(&mut self, key_fragment: u8) -> Option<OpaqueNodePtr<K, V, PREFIX_LEN>> {
239 let restricted_index = self.child_indices[usize::from(key_fragment)]?;
240
241 let child_ptr = mem::replace(
244 &mut self.child_pointers[usize::from(restricted_index)],
245 MaybeUninit::uninit(),
246 );
247
248 self.child_pointers.copy_within(
250 (usize::from(restricted_index) + 1)..self.header.num_children(),
251 usize::from(restricted_index),
252 );
253
254 for other_restrict_index in self.child_indices.iter_mut().flatten() {
257 if restricted_index < *other_restrict_index {
258 *other_restrict_index =
263 NonMaxIndex::try_from(other_restrict_index.get() - 1).unwrap();
264 }
265 }
266
267 self.child_indices[usize::from(key_fragment)] = None;
268 self.header.dec_num_children();
269 Some(unsafe { MaybeUninit::assume_init(child_ptr) })
272 }
273
274 fn iter(&self) -> NodeIndirectIter<'_, K, V, PREFIX_LEN> {
275 let child_pointers = self.initialized_child_pointers();
276
277 NodeIndirectIter {
278 it: self.child_indices.iter().enumerate(),
279 child_pointers,
280 }
281 }
282
283 fn range(
284 &self,
285 bound: impl core::ops::RangeBounds<u8>,
286 ) -> impl DoubleEndedIterator<Item = (u8, OpaqueNodePtr<K, V, PREFIX_LEN>)> + FusedIterator
287 {
288 assert_valid_range_bounds(&bound);
289
290 let child_pointers = self.initialized_child_pointers();
291
292 let start = bound.start_bound().map(|val| usize::from(*val));
293 let key_offset = match bound.start_bound() {
294 core::ops::Bound::Included(val) => *val,
295 core::ops::Bound::Excluded(val) => val.saturating_add(1),
296 core::ops::Bound::Unbounded => 0,
297 };
298 let end = bound.end_bound().map(|val| usize::from(*val));
299
300 self.child_indices[(start, end)]
301 .iter()
302 .enumerate()
303 .filter_map(|(key, idx)| {
304 idx.map(|idx| (key as u8, usize::from(idx)))
307 })
308 .map(move |(key, idx)| unsafe {
311 (
312 key.saturating_add(key_offset),
313 *child_pointers.get_unchecked(idx),
314 )
315 })
316 }
317
318 #[cfg(feature = "nightly")]
319 #[cfg_attr(test, mutants::skip)]
320 fn min(&self) -> (u8, OpaqueNodePtr<K, V, PREFIX_LEN>) {
321 let child_indices: &[u8; 256] = unsafe { mem::transmute(&self.child_indices) };
327 let empty = u8x64::splat(0);
328 let r0 = u8x64::from_array(child_indices[0..64].try_into().unwrap())
329 .simd_eq(empty)
330 .to_bitmask();
331 let r1 = u8x64::from_array(child_indices[64..128].try_into().unwrap())
332 .simd_eq(empty)
333 .to_bitmask();
334 let r2 = u8x64::from_array(child_indices[128..192].try_into().unwrap())
335 .simd_eq(empty)
336 .to_bitmask();
337 let r3 = u8x64::from_array(child_indices[192..256].try_into().unwrap())
338 .simd_eq(empty)
339 .to_bitmask();
340
341 let key = if r0 != u64::MAX {
342 r0.trailing_ones()
343 } else if r1 != u64::MAX {
344 r1.trailing_ones() + 64
345 } else if r2 != u64::MAX {
346 r2.trailing_ones() + 128
347 } else {
348 r3.trailing_ones() + 192
349 } as usize;
350
351 unsafe {
352 core::hint::assert_unchecked(key < self.child_indices.len());
357 }
358
359 let idx = usize::from(unsafe { self.child_indices[key].unwrap_unchecked() });
362 let child_pointers = self.initialized_child_pointers();
363
364 unsafe {
365 core::hint::assert_unchecked(idx < child_pointers.len());
370 }
371
372 (key as u8, child_pointers[idx])
373 }
374
375 #[cfg(not(feature = "nightly"))]
376 fn min(&self) -> (u8, OpaqueNodePtr<K, V, PREFIX_LEN>) {
377 for (key, idx) in self.child_indices.iter().enumerate() {
378 let Some(idx) = idx else {
379 continue;
380 };
381 let child_pointers = self.initialized_child_pointers();
382 return (key as u8, child_pointers[usize::from(*idx)]);
383 }
384 unreachable!("inner node must have non-zero number of children");
385 }
386
387 #[cfg(feature = "nightly")]
388 #[cfg_attr(test, mutants::skip)]
389 fn max(&self) -> (u8, OpaqueNodePtr<K, V, PREFIX_LEN>) {
390 let child_indices: &[u8; 256] = unsafe { mem::transmute(&self.child_indices) };
396 let empty = u8x64::splat(0);
397 let r0 = u8x64::from_array(child_indices[0..64].try_into().unwrap())
398 .simd_eq(empty)
399 .to_bitmask();
400 let r1 = u8x64::from_array(child_indices[64..128].try_into().unwrap())
401 .simd_eq(empty)
402 .to_bitmask();
403 let r2 = u8x64::from_array(child_indices[128..192].try_into().unwrap())
404 .simd_eq(empty)
405 .to_bitmask();
406 let r3 = u8x64::from_array(child_indices[192..256].try_into().unwrap())
407 .simd_eq(empty)
408 .to_bitmask();
409
410 let key = if r3 != u64::MAX {
411 255 - r3.leading_ones()
412 } else if r2 != u64::MAX {
413 191 - r2.leading_ones()
414 } else if r1 != u64::MAX {
415 127 - r1.leading_ones()
416 } else {
417 63 - r0.leading_ones()
421 } as usize;
422
423 unsafe {
424 core::hint::assert_unchecked(key < self.child_indices.len());
426 }
427
428 let idx = usize::from(unsafe { self.child_indices[key].unwrap_unchecked() });
431 let child_pointers = self.initialized_child_pointers();
432
433 unsafe {
434 core::hint::assert_unchecked(idx < child_pointers.len());
439 }
440
441 (key as u8, child_pointers[idx])
442 }
443
444 #[cfg(not(feature = "nightly"))]
445 fn max(&self) -> (u8, OpaqueNodePtr<K, V, PREFIX_LEN>) {
446 for (key, idx) in self.child_indices.iter().enumerate().rev() {
447 let Some(idx) = idx else {
448 continue;
449 };
450 let child_pointers = self.initialized_child_pointers();
451 return (key as u8, child_pointers[usize::from(*idx)]);
452 }
453 unreachable!("inner node must have non-zero number of children");
454 }
455}
456
457pub type InnerNode48<K, V, const PREFIX_LEN: usize> = InnerNodeIndirect<K, V, PREFIX_LEN, 48>;
459
460impl<K, V, const PREFIX_LEN: usize> Node<PREFIX_LEN> for InnerNode48<K, V, PREFIX_LEN> {
461 type Key = K;
462 type Value = V;
463
464 const TYPE: NodeType = NodeType::Node48;
465}
466
467impl<K, V, const PREFIX_LEN: usize> InnerNode<PREFIX_LEN> for InnerNode48<K, V, PREFIX_LEN> {
468 type GrownNode = InnerNodeDirect<K, V, PREFIX_LEN>;
469 type ShrunkNode = InnerNode16<K, V, PREFIX_LEN>;
470
471 fn grow(&self) -> Self::GrownNode {
472 self.into()
473 }
474
475 fn shrink(&self) -> Self::ShrunkNode {
476 self.into()
477 }
478}
479
480pub struct NodeIndirectIter<'a, K, V, const PREFIX_LEN: usize> {
482 pub(crate) it: Enumerate<Iter<'a, Option<NonMaxIndex>>>,
483 pub(crate) child_pointers: &'a [OpaqueNodePtr<K, V, PREFIX_LEN>],
484}
485
486impl<K, V, const PREFIX_LEN: usize> Iterator for NodeIndirectIter<'_, K, V, PREFIX_LEN> {
487 type Item = (u8, OpaqueNodePtr<K, V, PREFIX_LEN>);
488
489 fn next(&mut self) -> Option<Self::Item> {
490 for (key, idx) in self.it.by_ref() {
491 let Some(idx) = idx else {
492 continue;
493 };
494 let key = key as u8;
495 let child_pointer = unsafe { *self.child_pointers.get_unchecked(usize::from(*idx)) };
498 return Some((key, child_pointer));
499 }
500 None
501 }
502}
503
504impl<K, V, const PREFIX_LEN: usize> DoubleEndedIterator for NodeIndirectIter<'_, K, V, PREFIX_LEN> {
505 fn next_back(&mut self) -> Option<Self::Item> {
506 while let Some((key, idx)) = self.it.next_back() {
507 let Some(idx) = idx else {
508 continue;
509 };
510 let key = key as u8;
511 let child_pointer = unsafe { *self.child_pointers.get_unchecked(usize::from(*idx)) };
514 return Some((key, child_pointer));
515 }
516 None
517 }
518}
519
520impl<K, V, const PREFIX_LEN: usize> FusedIterator for NodeIndirectIter<'_, K, V, PREFIX_LEN> {}
521
522#[cfg(test)]
523mod tests {
524 use alloc::{boxed::Box, vec::Vec};
525 use core::ops::{Bound, RangeBounds};
526
527 use super::*;
528 use crate::raw::{
529 representation::tests::{
530 inner_node_min_max_test, inner_node_remove_child_test, inner_node_shrink_test,
531 inner_node_write_child_test, FixtureReturn,
532 },
533 LeafNode, NodePtr,
534 };
535
536 #[test]
537 fn lookup() {
538 let mut n = InnerNode48::<Box<[u8]>, (), 16>::empty();
539 let mut l1 = LeafNode::with_no_siblings(Box::from([]), ());
540 let mut l2 = LeafNode::with_no_siblings(Box::from([]), ());
541 let mut l3 = LeafNode::with_no_siblings(Box::from([]), ());
542 let l1_ptr = NodePtr::from(&mut l1).to_opaque();
543 let l2_ptr = NodePtr::from(&mut l2).to_opaque();
544 let l3_ptr = NodePtr::from(&mut l3).to_opaque();
545
546 assert!(n.lookup_child(123).is_none());
547
548 n.header.inc_num_children();
549 n.header.inc_num_children();
550 n.header.inc_num_children();
551
552 n.child_indices[1] = Some(2usize.try_into().unwrap());
553 n.child_indices[3] = Some(0usize.try_into().unwrap());
554 n.child_indices[123] = Some(1usize.try_into().unwrap());
555
556 n.child_pointers[0].write(l1_ptr);
557 n.child_pointers[1].write(l2_ptr);
558 n.child_pointers[2].write(l3_ptr);
559
560 assert_eq!(n.lookup_child(123), Some(l2_ptr));
561 }
562
563 #[test]
564 fn write_child() {
565 inner_node_write_child_test(InnerNode48::<_, _, 16>::empty(), 48)
566 }
567
568 #[test]
569 fn remove_child() {
570 inner_node_remove_child_test(InnerNode48::<_, _, 16>::empty(), 48)
571 }
572
573 #[test]
574 #[should_panic]
575 fn write_child_full_panic() {
576 inner_node_write_child_test(InnerNode48::<_, _, 16>::empty(), 49);
577 }
578
579 #[test]
580 fn grow() {
581 let mut n48 = InnerNode48::<Box<[u8]>, (), 16>::empty();
582 let mut l1 = LeafNode::with_no_siblings(vec![].into(), ());
583 let mut l2 = LeafNode::with_no_siblings(vec![].into(), ());
584 let mut l3 = LeafNode::with_no_siblings(vec![].into(), ());
585 let l1_ptr = NodePtr::from(&mut l1).to_opaque();
586 let l2_ptr = NodePtr::from(&mut l2).to_opaque();
587 let l3_ptr = NodePtr::from(&mut l3).to_opaque();
588
589 n48.write_child(3, l1_ptr);
590 n48.write_child(123, l2_ptr);
591 n48.write_child(1, l3_ptr);
592
593 let n256 = n48.grow();
594
595 assert_eq!(n256.lookup_child(3), Some(l1_ptr));
596 assert_eq!(n256.lookup_child(123), Some(l2_ptr));
597 assert_eq!(n256.lookup_child(1), Some(l3_ptr));
598 assert_eq!(n256.lookup_child(4), None);
599 }
600
601 #[test]
602 fn shrink() {
603 inner_node_shrink_test(InnerNode48::<_, _, 16>::empty(), 16);
604 }
605
606 #[test]
607 #[should_panic = "Cannot shrink a InnerNodeIndirect when it has more than 16 children. \
608 Currently has [17] children."]
609 fn shrink_too_many_children_panic() {
610 inner_node_shrink_test(InnerNode48::<_, _, 16>::empty(), 17);
611 }
612
613 #[test]
614 fn min_max() {
615 inner_node_min_max_test(InnerNode48::<_, _, 16>::empty(), 48);
616 }
617
618 fn fixture() -> FixtureReturn<InnerNode48<Box<[u8]>, (), 16>, 4> {
619 let mut n4 = InnerNode48::empty();
620 let mut l1 = LeafNode::with_no_siblings(vec![].into(), ());
621 let mut l2 = LeafNode::with_no_siblings(vec![].into(), ());
622 let mut l3 = LeafNode::with_no_siblings(vec![].into(), ());
623 let mut l4 = LeafNode::with_no_siblings(vec![].into(), ());
624 let l1_ptr = NodePtr::from(&mut l1).to_opaque();
625 let l2_ptr = NodePtr::from(&mut l2).to_opaque();
626 let l3_ptr = NodePtr::from(&mut l3).to_opaque();
627 let l4_ptr = NodePtr::from(&mut l4).to_opaque();
628
629 n4.write_child(3, l1_ptr);
630 n4.write_child(255, l2_ptr);
631 n4.write_child(0u8, l3_ptr);
632 n4.write_child(85, l4_ptr);
633
634 (n4, [l1, l2, l3, l4], [l1_ptr, l2_ptr, l3_ptr, l4_ptr])
635 }
636
637 #[test]
638 fn iterate() {
639 let (node, _, [l1_ptr, l2_ptr, l3_ptr, l4_ptr]) = fixture();
640
641 let mut iter = node.iter();
642
643 assert_eq!(iter.next().unwrap(), (0u8, l3_ptr));
644 assert_eq!(iter.next().unwrap(), (3, l1_ptr));
645 assert_eq!(iter.next().unwrap(), (85, l4_ptr));
646 assert_eq!(iter.next().unwrap(), (255, l2_ptr));
647 assert_eq!(iter.next(), None);
648 }
649
650 #[test]
651 fn iterate_rev() {
652 let (node, _, [l1_ptr, l2_ptr, l3_ptr, l4_ptr]) = fixture();
653
654 let mut iter = node.iter().rev();
655
656 assert_eq!(iter.next().unwrap(), (255, l2_ptr));
657 assert_eq!(iter.next().unwrap(), (85, l4_ptr));
658 assert_eq!(iter.next().unwrap(), (3, l1_ptr));
659 assert_eq!(iter.next().unwrap(), (0u8, l3_ptr));
660 assert_eq!(iter.next(), None);
661 }
662
663 #[test]
664 fn range_iterate() {
665 let (node, _, [l1_ptr, l2_ptr, l3_ptr, l4_ptr]) = fixture();
666
667 #[track_caller]
668 fn check<K, V, const PREFIX_LEN: usize, const N: usize>(
669 node: &InnerNode48<K, V, PREFIX_LEN>,
670 bound: impl RangeBounds<u8>,
671 expected_pairs: [(u8, OpaqueNodePtr<K, V, PREFIX_LEN>); N],
672 ) {
673 let pairs = node.range(bound).collect::<Vec<_>>();
674 assert_eq!(pairs, expected_pairs);
675 }
676
677 check(
678 &node,
679 (Bound::Included(0), Bound::Included(3)),
680 [(0u8, l3_ptr), (3, l1_ptr)],
681 );
682 check(&node, (Bound::Excluded(0), Bound::Excluded(3)), []);
683 check(
684 &node,
685 (Bound::Included(0), Bound::Included(0)),
686 [(0u8, l3_ptr)],
687 );
688 check(
689 &node,
690 (Bound::Included(0), Bound::Included(255)),
691 [(0u8, l3_ptr), (3, l1_ptr), (85, l4_ptr), (255, l2_ptr)],
692 );
693 check(
694 &node,
695 (Bound::Included(255), Bound::Included(255)),
696 [(255, l2_ptr)],
697 );
698 check(&node, (Bound::Included(255), Bound::Excluded(255)), []);
699 check(&node, (Bound::Excluded(255), Bound::Included(255)), []);
700 check(
701 &node,
702 (Bound::Excluded(0), Bound::Excluded(255)),
703 [(3, l1_ptr), (85, l4_ptr)],
704 );
705 check(
706 &node,
707 (Bound::<u8>::Unbounded, Bound::Unbounded),
708 [(0u8, l3_ptr), (3, l1_ptr), (85, l4_ptr), (255, l2_ptr)],
709 );
710 check(
711 &node,
712 (Bound::<u8>::Unbounded, Bound::Included(86)),
713 [(0u8, l3_ptr), (3, l1_ptr), (85, l4_ptr)],
714 );
715 }
716
717 fn fixture_empty_edges() -> FixtureReturn<InnerNode48<Box<[u8]>, (), 16>, 4> {
718 let mut n4 = InnerNode48::empty();
719 let mut l1 = LeafNode::with_no_siblings(vec![].into(), ());
720 let mut l2 = LeafNode::with_no_siblings(vec![].into(), ());
721 let mut l3 = LeafNode::with_no_siblings(vec![].into(), ());
722 let mut l4 = LeafNode::with_no_siblings(vec![].into(), ());
723 let l1_ptr = NodePtr::from(&mut l1).to_opaque();
724 let l2_ptr = NodePtr::from(&mut l2).to_opaque();
725 let l3_ptr = NodePtr::from(&mut l3).to_opaque();
726 let l4_ptr = NodePtr::from(&mut l4).to_opaque();
727
728 n4.write_child(3, l1_ptr);
729 n4.write_child(254, l2_ptr);
730 n4.write_child(2u8, l3_ptr);
731 n4.write_child(85, l4_ptr);
732
733 (n4, [l1, l2, l3, l4], [l1_ptr, l2_ptr, l3_ptr, l4_ptr])
734 }
735
736 #[test]
737 fn range_iterate_boundary_conditions() {
738 let (node, _, [l1_ptr, l2_ptr, l3_ptr, l4_ptr]) = fixture_empty_edges();
739
740 #[track_caller]
741 fn check<K, V, const PREFIX_LEN: usize, const N: usize>(
742 node: &InnerNode48<K, V, PREFIX_LEN>,
743 bound: impl RangeBounds<u8>,
744 expected_pairs: [(u8, OpaqueNodePtr<K, V, PREFIX_LEN>); N],
745 ) {
746 let pairs = node.range(bound).collect::<Vec<_>>();
747 assert_eq!(pairs, expected_pairs);
748 }
749
750 check(
751 &node,
752 (Bound::<u8>::Unbounded, Bound::Included(86)),
753 [(2u8, l3_ptr), (3, l1_ptr), (85, l4_ptr)],
754 );
755 check(
756 &node,
757 (Bound::<u8>::Unbounded, Bound::Included(4)),
758 [(2u8, l3_ptr), (3, l1_ptr)],
759 );
760 check(
761 &node,
762 (Bound::<u8>::Unbounded, Bound::Excluded(3)),
763 [(2u8, l3_ptr)],
764 );
765 check(
766 &node,
767 (Bound::<u8>::Unbounded, Bound::Included(2)),
768 [(2u8, l3_ptr)],
769 );
770 check(&node, (Bound::<u8>::Unbounded, Bound::Included(1)), []);
771 check(&node, (Bound::<u8>::Unbounded, Bound::Included(0)), []);
772
773 check(
774 &node,
775 (Bound::Included(1), Bound::<u8>::Unbounded),
776 [(2u8, l3_ptr), (3, l1_ptr), (85, l4_ptr), (254, l2_ptr)],
777 );
778 check(
779 &node,
780 (Bound::Included(3), Bound::<u8>::Unbounded),
781 [(3, l1_ptr), (85, l4_ptr), (254, l2_ptr)],
782 );
783 check(
784 &node,
785 (Bound::Excluded(84), Bound::<u8>::Unbounded),
786 [(85, l4_ptr), (254, l2_ptr)],
787 );
788 check(
789 &node,
790 (Bound::Included(253), Bound::<u8>::Unbounded),
791 [(254, l2_ptr)],
792 );
793 check(&node, (Bound::Included(255), Bound::<u8>::Unbounded), []);
794 }
795
796 #[test]
797 #[should_panic = "range start and end are equal and excluded: (80)"]
798 fn range_iterate_out_of_bounds_panic_both_excluded() {
799 let (node, _, [_l1_ptr, _l2_ptr, _l3_ptr, _l4_ptr]) = fixture();
800
801 let pairs = node
802 .range((Bound::Excluded(80), Bound::Excluded(80)))
803 .collect::<Vec<_>>();
804 assert_eq!(pairs, &[]);
805 }
806
807 #[test]
808 #[should_panic = "range start (80) is greater than range end (0)"]
809 fn range_iterate_start_greater_than_end() {
810 let (node, _, [_l1_ptr, _l2_ptr, _l3_ptr, _l4_ptr]) = fixture();
811
812 let _pairs = node
813 .range((Bound::Excluded(80), Bound::Included(0)))
814 .collect::<Vec<_>>();
815 }
816}