blart/raw/representation/
inner_node_compressed.rs

1use crate::{
2    raw::{Header, InnerNode, InnerNode48, Node, NodeType, OpaqueNodePtr, RestrictedNodeIndex},
3    rust_nightly_apis::{assume, maybe_uninit_slice_assume_init_ref},
4};
5use std::{
6    fmt,
7    iter::{Copied, Zip},
8    mem::{self, MaybeUninit},
9    ops::{Bound, RangeBounds},
10    slice::Iter,
11};
12
13#[cfg(feature = "nightly")]
14use std::simd::{
15    cmp::{SimdPartialEq, SimdPartialOrd},
16    u8x16,
17};
18
19/// Where a write should happen inside the node
20#[derive(Debug)]
21enum WritePoint {
22    /// In an already existing key fragment
23    Existing(usize),
24    /// As the last key fragment
25    Last(usize),
26    /// Shift the key fragments to the right
27    Shift(usize),
28}
29
30/// Common methods for searching in an [`InnerNodeCompressed`]
31trait SearchInnerNodeCompressed {
32    /// Get the index of the child if it exists
33    fn lookup_child_index(&self, key_fragment: u8) -> Option<usize>;
34
35    /// Find the write point for `key_fragment`
36    fn find_write_point(&self, key_fragment: u8) -> WritePoint;
37}
38
39/// Node type that has a compact representation for key bytes and children
40/// pointers.
41#[repr(C, align(8))]
42pub struct InnerNodeCompressed<K, V, const PREFIX_LEN: usize, const SIZE: usize> {
43    /// The common node fields.
44    pub header: Header<PREFIX_LEN>,
45    /// An array that contains single key bytes in the same index as the
46    /// `child_pointers` array contains the matching child tree.
47    ///
48    /// This array will only be initialized for the first `header.num_children`
49    /// values.
50    pub keys: [MaybeUninit<u8>; SIZE],
51    /// An array that contains the child data.
52    ///
53    /// This array will only be initialized for the first `header.num_children`
54    /// values.
55    pub child_pointers: [MaybeUninit<OpaqueNodePtr<K, V, PREFIX_LEN>>; SIZE],
56}
57
58impl<K, V, const PREFIX_LEN: usize, const SIZE: usize> Clone
59    for InnerNodeCompressed<K, V, PREFIX_LEN, SIZE>
60{
61    fn clone(&self) -> Self {
62        Self {
63            header: self.header.clone(),
64            keys: self.keys,
65            child_pointers: self.child_pointers,
66        }
67    }
68}
69
70impl<K, V, const PREFIX_LEN: usize, const SIZE: usize> fmt::Debug
71    for InnerNodeCompressed<K, V, PREFIX_LEN, SIZE>
72{
73    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
74        let (keys, child_pointers) = self.initialized_portion();
75        f.debug_struct("InnerNodeBlock")
76            .field("SIZE", &SIZE)
77            .field("header", &self.header)
78            .field("keys", &keys)
79            .field("child_pointers", &child_pointers)
80            .finish()
81    }
82}
83
84/// Iterator type for an [`InnerNodeCompressed`]
85pub type InnerNodeCompressedIter<'a, K, V, const PREFIX_LEN: usize> =
86    Zip<Copied<Iter<'a, u8>>, Copied<Iter<'a, OpaqueNodePtr<K, V, PREFIX_LEN>>>>;
87
88impl<K, V, const PREFIX_LEN: usize, const SIZE: usize> InnerNodeCompressed<K, V, PREFIX_LEN, SIZE> {
89    /// Return the initialized portions of the keys and child pointer arrays.
90    pub fn initialized_portion(&self) -> (&[u8], &[OpaqueNodePtr<K, V, PREFIX_LEN>]) {
91        // SAFETY: The array prefix with length `header.num_children` is guaranteed to
92        // be initialized
93        unsafe {
94            let num_children = self.header.num_children();
95            assume!(num_children <= self.keys.len());
96            (
97                maybe_uninit_slice_assume_init_ref(self.keys.get_unchecked(0..num_children)),
98                maybe_uninit_slice_assume_init_ref(
99                    self.child_pointers.get_unchecked(0..num_children),
100                ),
101            )
102        }
103    }
104
105    /// Generalized version of [`InnerNode::lookup_child`] for compressed nodes
106    fn lookup_child_inner(&self, key_fragment: u8) -> Option<OpaqueNodePtr<K, V, PREFIX_LEN>>
107    where
108        Self: SearchInnerNodeCompressed,
109    {
110        let idx = self.lookup_child_index(key_fragment)?;
111        unsafe {
112            // SAFETY: If `idx` is out of bounds the node should already have grown
113            // so it's safe to assume that `idx` is in bounds
114            assume!(idx < self.child_pointers.len());
115
116            // SAFETY: The value at `child_index` is guaranteed to be initialized because
117            // the `lookup_child_index` function will only search in the initialized portion
118            // of the `child_pointers` array.
119            Some(MaybeUninit::assume_init(self.child_pointers[idx]))
120        }
121    }
122
123    /// Writes a child to the node by check the order of insertion
124    ///
125    /// # Panics
126    ///  - This functions assumes that the write is gonna be inbound (i.e the
127    ///    check for a full node is done previously to the call of this
128    ///    function)
129    fn write_child_inner(
130        &mut self,
131        key_fragment: u8,
132        child_pointer: OpaqueNodePtr<K, V, PREFIX_LEN>,
133    ) where
134        Self: SearchInnerNodeCompressed,
135    {
136        let num_children = self.header.num_children();
137        let idx = match self.find_write_point(key_fragment) {
138            WritePoint::Existing(child_index) => child_index,
139            WritePoint::Last(child_index) => {
140                self.header.inc_num_children();
141                child_index
142            },
143            WritePoint::Shift(child_index) => {
144                #[allow(unused_unsafe)]
145                unsafe {
146                    // SAFETY: This is by construction, since the number of children
147                    // is always <= maximum number of keys (children) that we can hold
148                    assume!(num_children <= self.keys.len());
149
150                    // SAFETY: When we are shifting children, because a new minimum one
151                    // is being inserted this guarantees to us that the index of insertion
152                    // is < current number of children (because if it was >= we wouldn't
153                    // need to shift the data)
154                    assume!(child_index < num_children);
155
156                    // assume!(child_index + 1 + (num_children - child_index) <=
157                    // self.keys.len());
158                }
159                self.keys
160                    .copy_within(child_index..num_children, child_index + 1);
161                self.child_pointers
162                    .copy_within(child_index..num_children, child_index + 1);
163                self.header.inc_num_children();
164                child_index
165            },
166        };
167        unsafe {
168            // SAFETY: The check for a full node is done previously to the call
169            // of this function, so it's safe to assume that the new child index is
170            // in bounds
171            self.write_child_at(idx, key_fragment, child_pointer);
172        }
173    }
174
175    /// Writes a child to the node without bounds check or order
176    ///
177    /// # Safety
178    /// - This functions assumes that the write is gonna be inbound (i.e the
179    ///   check for a full node is done previously to the call of this function)
180    pub unsafe fn write_child_unchecked(
181        &mut self,
182        key_fragment: u8,
183        child_pointer: OpaqueNodePtr<K, V, PREFIX_LEN>,
184    ) {
185        let idx = self.header.num_children();
186        unsafe { self.write_child_at(idx, key_fragment, child_pointer) };
187        self.header.inc_num_children();
188    }
189
190    /// Writes a child to the node without bounds check or order
191    ///
192    /// # Safety
193    ///  - This functions assumes that the write is gonna be inbound (i.e the
194    ///    check for a full node is done previously to the call of this
195    ///    function)
196    unsafe fn write_child_at(
197        &mut self,
198        idx: usize,
199        key_fragment: u8,
200        child_pointer: OpaqueNodePtr<K, V, PREFIX_LEN>,
201    ) {
202        unsafe {
203            assume!(idx < self.keys.len());
204            self.keys.get_unchecked_mut(idx).write(key_fragment);
205            self.child_pointers
206                .get_unchecked_mut(idx)
207                .write(child_pointer);
208        }
209    }
210
211    /// Removes child if it exists
212    fn remove_child_inner(&mut self, key_fragment: u8) -> Option<OpaqueNodePtr<K, V, PREFIX_LEN>>
213    where
214        Self: SearchInnerNodeCompressed,
215    {
216        let child_index = self.lookup_child_index(key_fragment)?;
217        let child_ptr = mem::replace(&mut self.child_pointers[child_index], MaybeUninit::uninit());
218
219        // Copy all the child_pointer and key values in higher indices down by one.
220        self.child_pointers
221            .copy_within((child_index + 1)..self.header.num_children(), child_index);
222        self.keys
223            .copy_within((child_index + 1)..self.header.num_children(), child_index);
224
225        self.header.dec_num_children();
226        // SAFETY: This child pointer value is initialized because we got it by
227        // searching through the initialized keys and got the `Ok(index)` value.
228        Some(unsafe { MaybeUninit::assume_init(child_ptr) })
229    }
230
231    /// Grows or shrinks the node
232    fn change_block_size<const NEW_SIZE: usize>(
233        &self,
234    ) -> InnerNodeCompressed<K, V, PREFIX_LEN, NEW_SIZE> {
235        assert!(
236            self.header.num_children() <= NEW_SIZE,
237            "Cannot change InnerNodeCompressed<{}> to size {} when it has more than {} children. \
238             Currently has [{}] children.",
239            SIZE,
240            NEW_SIZE,
241            NEW_SIZE,
242            self.header.num_children()
243        );
244
245        let header = self.header.clone();
246        let mut keys = [MaybeUninit::new(0); NEW_SIZE];
247        let mut child_pointers = [MaybeUninit::uninit(); NEW_SIZE];
248        let num_children = header.num_children();
249
250        #[allow(unused_unsafe)]
251        unsafe {
252            // SAFETY: By construction the number of children in the header
253            // is kept in sync with the number of children written in the node
254            // and if this number exceeds the maximum len the node should have
255            // already grown. So we know for a fact that that num_children <= node len
256            assume!(num_children <= self.keys.len());
257            assume!(num_children <= self.child_pointers.len());
258
259            // SAFETY: When calling this function the NEW_SIZE, should fit the nodes.
260            // We only need to be careful when shrinking the node, since when growing
261            // NEW_SIZE >= SIZE.
262            // This function is only called in a shrink case when a node is removed from
263            // a node and the new current size fits in the NEW_SIZE
264            assume!(num_children <= keys.len());
265            assume!(num_children <= child_pointers.len());
266        }
267
268        keys[..num_children].copy_from_slice(&self.keys[..num_children]);
269        child_pointers[..num_children].copy_from_slice(&self.child_pointers[..num_children]);
270
271        InnerNodeCompressed {
272            header,
273            keys,
274            child_pointers,
275        }
276    }
277
278    /// Transform node into a [`InnerNode48`]
279    fn grow_node48(&self) -> InnerNode48<K, V, PREFIX_LEN> {
280        let header = self.header.clone();
281        let mut child_indices = [RestrictedNodeIndex::<48>::EMPTY; 256];
282        let mut child_pointers = [MaybeUninit::uninit(); 48];
283
284        let (keys, _) = self.initialized_portion();
285
286        assert_eq!(
287            keys.len(),
288            self.keys.len(),
289            "Node must be full to grow to node 48"
290        );
291
292        for (index, key) in keys.iter().copied().enumerate() {
293            // SAFETY: This `try_from` will not panic because index is guaranteed to
294            // be 15 or less because of the length of the `InnerNode16.keys` array.
295            child_indices[usize::from(key)] =
296                unsafe { RestrictedNodeIndex::try_from(index).unwrap_unchecked() };
297        }
298
299        let num_children = header.num_children();
300
301        #[allow(unused_unsafe)]
302        unsafe {
303            // SAFETY: By construction the number of children in the header
304            // is kept in sync with the number of children written in the node
305            // and if this number exceeds the maximum len the node should have
306            // already grown. So we know for a fact that that num_children <= node len
307            assume!(num_children <= self.child_pointers.len());
308
309            // SAFETY: We know that the new size is >= old size, so this is safe
310            assume!(num_children <= child_pointers.len());
311        }
312
313        child_pointers[..num_children].copy_from_slice(&self.child_pointers[..num_children]);
314
315        InnerNode48 {
316            header,
317            child_indices,
318            child_pointers,
319        }
320    }
321
322    /// Get an iterator over the keys and values of the node
323    fn inner_iter(&self) -> InnerNodeCompressedIter<'_, K, V, PREFIX_LEN> {
324        let (keys, nodes) = self.initialized_portion();
325        keys.iter().copied().zip(nodes.iter().copied())
326    }
327
328    /// Get an iterator over a range of keys and values of the node.
329    fn inner_range_iter(
330        &self,
331        bound: impl RangeBounds<u8>,
332    ) -> InnerNodeCompressedIter<'_, K, V, PREFIX_LEN>
333    where
334        Self: SearchInnerNodeCompressed,
335    {
336        {
337            match (bound.start_bound(), bound.end_bound()) {
338                (Bound::Excluded(s), Bound::Excluded(e)) if s == e => {
339                    panic!("range start and end are equal and excluded: ({s:?})")
340                },
341                (
342                    Bound::Included(s) | Bound::Excluded(s),
343                    Bound::Included(e) | Bound::Excluded(e),
344                ) if s > e => {
345                    panic!("range start ({s:?}) is greater than range end ({e:?})")
346                },
347                _ => {},
348            }
349        }
350
351        fn fixup_bound_lookup(bound: Bound<WritePoint>, is_start: bool) -> Bound<usize> {
352            // [0, 3, 85, 254]
353            match bound {
354                // key = Included(0), bound = Included(Existing(0)), output = Included(0)
355                Bound::Included(WritePoint::Existing(idx)) => Bound::Included(idx),
356                // key = Included(255), bound = Included(Last(4)), output = Included(4)
357                Bound::Included(WritePoint::Last(idx)) => Bound::Included(idx),
358                // key = Included(2), bound = Included(Shift(1)), output = Included(1)
359                Bound::Included(WritePoint::Shift(idx)) => {
360                    if is_start {
361                        Bound::Included(idx)
362                    } else {
363                        idx.checked_sub(1)
364                            .map_or(Bound::Excluded(0), Bound::Included)
365                    }
366                },
367                // key = Excluded(0), bound = Excluded(Existing(0)), output = Excluded(0)
368                Bound::Excluded(WritePoint::Existing(idx)) => Bound::Excluded(idx),
369                // key = Excluded(255), bound = Excluded(Last(4)), output = Excluded(4)
370                Bound::Excluded(WritePoint::Last(idx)) => Bound::Excluded(idx),
371                // key = Excluded(2), bound = Excluded(Shift(1)), output = Excluded(0)
372                Bound::Excluded(WritePoint::Shift(idx)) => {
373                    if is_start {
374                        idx.checked_sub(1).map_or(Bound::Unbounded, Bound::Excluded)
375                    } else {
376                        Bound::Excluded(idx)
377                    }
378                },
379                Bound::Unbounded => Bound::Unbounded,
380            }
381        }
382
383        let start_idx = fixup_bound_lookup(
384            bound.start_bound().map(|val| self.find_write_point(*val)),
385            true,
386        );
387        let end_idx = fixup_bound_lookup(
388            bound.end_bound().map(|val| self.find_write_point(*val)),
389            false,
390        );
391
392        let slice_range = (start_idx, end_idx);
393
394        let (mut keys, mut nodes) = self.initialized_portion();
395
396        keys = &keys[slice_range];
397        nodes = &nodes[slice_range];
398
399        keys.iter().copied().zip(nodes.iter().copied())
400    }
401}
402
403/// Node that references between 2 and 4 children
404pub type InnerNode4<K, V, const PREFIX_LEN: usize> = InnerNodeCompressed<K, V, PREFIX_LEN, 4>;
405
406impl<K, V, const PREFIX_LEN: usize> SearchInnerNodeCompressed for InnerNode4<K, V, PREFIX_LEN> {
407    fn lookup_child_index(&self, key_fragment: u8) -> Option<usize> {
408        let (keys, _) = self.initialized_portion();
409        for (child_index, key) in keys.iter().enumerate() {
410            if key_fragment == *key {
411                return Some(child_index);
412            }
413        }
414
415        None
416    }
417
418    fn find_write_point(&self, key_fragment: u8) -> WritePoint {
419        let (keys, _) = self.initialized_portion();
420
421        let mut child_index = 0;
422        for key in keys {
423            #[allow(clippy::comparison_chain)]
424            if key_fragment < *key {
425                return WritePoint::Shift(child_index);
426            } else if key_fragment == *key {
427                return WritePoint::Existing(child_index);
428            }
429            child_index += 1;
430        }
431        WritePoint::Last(child_index)
432    }
433}
434
435impl<K, V, const PREFIX_LEN: usize> Node<PREFIX_LEN> for InnerNode4<K, V, PREFIX_LEN> {
436    type Key = K;
437    type Value = V;
438
439    const TYPE: NodeType = NodeType::Node4;
440}
441
442impl<K, V, const PREFIX_LEN: usize> InnerNode<PREFIX_LEN> for InnerNode4<K, V, PREFIX_LEN> {
443    type GrownNode = InnerNode16<K, V, PREFIX_LEN>;
444    type Iter<'a>
445        = InnerNodeCompressedIter<'a, K, V, PREFIX_LEN>
446    where
447        Self: 'a;
448    type ShrunkNode = InnerNode4<K, V, PREFIX_LEN>;
449
450    fn header(&self) -> &Header<PREFIX_LEN> {
451        &self.header
452    }
453
454    fn from_header(header: Header<PREFIX_LEN>) -> Self {
455        Self {
456            header,
457            child_pointers: [MaybeUninit::uninit(); 4],
458            keys: [MaybeUninit::uninit(); 4],
459        }
460    }
461
462    fn lookup_child(&self, key_fragment: u8) -> Option<OpaqueNodePtr<K, V, PREFIX_LEN>> {
463        self.lookup_child_inner(key_fragment)
464    }
465
466    fn write_child(&mut self, key_fragment: u8, child_pointer: OpaqueNodePtr<K, V, PREFIX_LEN>) {
467        self.write_child_inner(key_fragment, child_pointer)
468    }
469
470    fn remove_child(&mut self, key_fragment: u8) -> Option<OpaqueNodePtr<K, V, PREFIX_LEN>> {
471        self.remove_child_inner(key_fragment)
472    }
473
474    fn grow(&self) -> Self::GrownNode {
475        self.change_block_size()
476    }
477
478    fn shrink(&self) -> Self::ShrunkNode {
479        panic!("unable to shrink a Node4, something went wrong!")
480    }
481
482    fn iter(&self) -> Self::Iter<'_> {
483        self.inner_iter()
484    }
485
486    fn range(
487        &self,
488        bound: impl RangeBounds<u8>,
489    ) -> impl DoubleEndedIterator<Item = (u8, OpaqueNodePtr<Self::Key, Self::Value, PREFIX_LEN>)>
490           + std::iter::FusedIterator {
491        self.inner_range_iter(bound)
492    }
493
494    fn min(&self) -> (u8, OpaqueNodePtr<K, V, PREFIX_LEN>) {
495        let (keys, children) = self.initialized_portion();
496        // SAFETY: Any Node4 must have at least 2 children, so the `keys` and `children`
497        // arrays must be non-empty
498        unsafe {
499            (
500                keys.first().copied().unwrap_unchecked(),
501                children.first().copied().unwrap_unchecked(),
502            )
503        }
504    }
505
506    fn max(&self) -> (u8, OpaqueNodePtr<K, V, PREFIX_LEN>) {
507        let (keys, children) = self.initialized_portion();
508        // SAFETY: Any Node4 must have at least 2 children, so the `keys` and `children`
509        // arrays must be non-empty
510        unsafe {
511            (
512                keys.last().copied().unwrap_unchecked(),
513                children.last().copied().unwrap_unchecked(),
514            )
515        }
516    }
517}
518
519/// Node that references between 5 and 16 children
520pub type InnerNode16<K, V, const PREFIX_LEN: usize> = InnerNodeCompressed<K, V, PREFIX_LEN, 16>;
521
522impl<K, V, const PREFIX_LEN: usize> SearchInnerNodeCompressed for InnerNode16<K, V, PREFIX_LEN> {
523    #[cfg(feature = "nightly")]
524    #[cfg_attr(test, mutants::skip)]
525    fn lookup_child_index(&self, key_fragment: u8) -> Option<usize> {
526        // SAFETY: Even though the type is marked is uninit data, when
527        // crated this is filled with initialized data, we just use it to
528        // remind us that a portion might be uninitialized
529        let keys = unsafe { MaybeUninit::array_assume_init(self.keys) };
530        let cmp = u8x16::splat(key_fragment)
531            .simd_eq(u8x16::from_array(keys))
532            .to_bitmask() as u32;
533        let mask = (1u32 << self.header.num_children()) - 1;
534        let bitfield = cmp & mask;
535        if bitfield != 0 {
536            Some(bitfield.trailing_zeros() as usize)
537        } else {
538            None
539        }
540    }
541
542    #[cfg(not(feature = "nightly"))]
543    fn lookup_child_index(&self, key_fragment: u8) -> Option<usize> {
544        let (keys, _) = self.initialized_portion();
545        for (child_index, key) in keys.iter().enumerate() {
546            if key_fragment == *key {
547                return Some(child_index);
548            }
549        }
550
551        None
552    }
553
554    #[cfg(feature = "nightly")]
555    #[cfg_attr(test, mutants::skip)]
556    fn find_write_point(&self, key_fragment: u8) -> WritePoint {
557        match self.lookup_child_index(key_fragment) {
558            Some(child_index) => WritePoint::Existing(child_index),
559            None => {
560                // SAFETY: Even though the type is marked is uninit data, when
561                // crated this is filled with initialized data, we just use it to
562                // remind us that a portion might be uninitialized
563                let keys = unsafe { MaybeUninit::array_assume_init(self.keys) };
564                let cmp = u8x16::splat(key_fragment)
565                    .simd_lt(u8x16::from_array(keys))
566                    .to_bitmask() as u32;
567                let mask = (1u32 << self.header.num_children()) - 1;
568                let bitfield = cmp & mask;
569                if bitfield != 0 {
570                    WritePoint::Shift(bitfield.trailing_zeros() as usize)
571                } else {
572                    WritePoint::Last(self.header.num_children())
573                }
574            },
575        }
576    }
577
578    #[cfg(not(feature = "nightly"))]
579    fn find_write_point(&self, key_fragment: u8) -> WritePoint {
580        let (keys, _) = self.initialized_portion();
581
582        let mut child_index = 0;
583        for key in keys {
584            #[allow(clippy::comparison_chain)]
585            if key_fragment < *key {
586                return WritePoint::Shift(child_index);
587            } else if key_fragment == *key {
588                return WritePoint::Existing(child_index);
589            }
590            child_index += 1;
591        }
592        WritePoint::Last(child_index)
593    }
594}
595
596impl<K, V, const PREFIX_LEN: usize> Node<PREFIX_LEN> for InnerNode16<K, V, PREFIX_LEN> {
597    type Key = K;
598    type Value = V;
599
600    const TYPE: NodeType = NodeType::Node16;
601}
602
603impl<K, V, const PREFIX_LEN: usize> InnerNode<PREFIX_LEN> for InnerNode16<K, V, PREFIX_LEN> {
604    type GrownNode = InnerNode48<K, V, PREFIX_LEN>;
605    type Iter<'a>
606        = InnerNodeCompressedIter<'a, K, V, PREFIX_LEN>
607    where
608        Self: 'a;
609    type ShrunkNode = InnerNode4<K, V, PREFIX_LEN>;
610
611    fn header(&self) -> &Header<PREFIX_LEN> {
612        &self.header
613    }
614
615    fn from_header(header: Header<PREFIX_LEN>) -> Self {
616        Self {
617            header,
618            child_pointers: [MaybeUninit::uninit(); 16],
619            keys: [MaybeUninit::new(0); 16],
620        }
621    }
622
623    fn lookup_child(&self, key_fragment: u8) -> Option<OpaqueNodePtr<K, V, PREFIX_LEN>> {
624        self.lookup_child_inner(key_fragment)
625    }
626
627    fn write_child(&mut self, key_fragment: u8, child_pointer: OpaqueNodePtr<K, V, PREFIX_LEN>) {
628        self.write_child_inner(key_fragment, child_pointer)
629    }
630
631    fn remove_child(&mut self, key_fragment: u8) -> Option<OpaqueNodePtr<K, V, PREFIX_LEN>> {
632        self.remove_child_inner(key_fragment)
633    }
634
635    fn grow(&self) -> Self::GrownNode {
636        self.grow_node48()
637    }
638
639    fn shrink(&self) -> Self::ShrunkNode {
640        self.change_block_size()
641    }
642
643    fn iter(&self) -> Self::Iter<'_> {
644        self.inner_iter()
645    }
646
647    fn range(
648        &self,
649        bound: impl RangeBounds<u8>,
650    ) -> impl DoubleEndedIterator<Item = (u8, OpaqueNodePtr<Self::Key, Self::Value, PREFIX_LEN>)>
651           + std::iter::FusedIterator {
652        self.inner_range_iter(bound)
653    }
654
655    fn min(&self) -> (u8, OpaqueNodePtr<K, V, PREFIX_LEN>) {
656        let (keys, children) = self.initialized_portion();
657        // SAFETY: Covered by the containing function
658        unsafe {
659            (
660                keys.first().copied().unwrap_unchecked(),
661                children.first().copied().unwrap_unchecked(),
662            )
663        }
664    }
665
666    fn max(&self) -> (u8, OpaqueNodePtr<K, V, PREFIX_LEN>) {
667        let (keys, children) = self.initialized_portion();
668        // SAFETY: Covered by the containing function
669        unsafe {
670            (
671                keys.last().copied().unwrap_unchecked(),
672                children.last().copied().unwrap_unchecked(),
673            )
674        }
675    }
676}
677
678#[cfg(test)]
679mod tests {
680    use crate::raw::{
681        representation::tests::{
682            inner_node_min_max_test, inner_node_remove_child_test, inner_node_shrink_test,
683            inner_node_write_child_test, FixtureReturn,
684        },
685        LeafNode, NodePtr,
686    };
687
688    use super::*;
689
690    #[test]
691    fn node4_lookup() {
692        let mut n = InnerNode4::<Box<[u8]>, (), 16>::empty();
693        let mut l1 = LeafNode::with_no_siblings(vec![].into(), ());
694        let mut l2 = LeafNode::with_no_siblings(vec![].into(), ());
695        let mut l3 = LeafNode::with_no_siblings(vec![].into(), ());
696        let l1_ptr = NodePtr::from(&mut l1).to_opaque();
697        let l2_ptr = NodePtr::from(&mut l2).to_opaque();
698        let l3_ptr = NodePtr::from(&mut l3).to_opaque();
699
700        assert!(n.lookup_child(123).is_none());
701
702        n.header.inc_num_children();
703        n.header.inc_num_children();
704        n.header.inc_num_children();
705
706        n.keys[0].write(3);
707        n.keys[1].write(123);
708        n.keys[2].write(1);
709
710        n.child_pointers[0].write(l1_ptr);
711        n.child_pointers[1].write(l2_ptr);
712        n.child_pointers[2].write(l3_ptr);
713
714        assert_eq!(n.lookup_child(123), Some(l2_ptr));
715    }
716
717    #[test]
718    fn node4_write_child() {
719        inner_node_write_child_test(InnerNode4::<_, _, 16>::empty(), 4)
720    }
721
722    #[test]
723    fn node4_remove_child() {
724        inner_node_remove_child_test(InnerNode4::<_, _, 16>::empty(), 4)
725    }
726
727    #[test]
728    #[should_panic]
729    fn node4_write_child_full_panic() {
730        inner_node_write_child_test(InnerNode4::<_, _, 16>::empty(), 5);
731    }
732
733    #[test]
734    fn node4_grow() {
735        let mut n4 = InnerNode4::<Box<[u8]>, (), 16>::empty();
736        let mut l1 = LeafNode::with_no_siblings(vec![].into(), ());
737        let mut l2 = LeafNode::with_no_siblings(vec![].into(), ());
738        let mut l3 = LeafNode::with_no_siblings(vec![].into(), ());
739        let l1_ptr = NodePtr::from(&mut l1).to_opaque();
740        let l2_ptr = NodePtr::from(&mut l2).to_opaque();
741        let l3_ptr = NodePtr::from(&mut l3).to_opaque();
742
743        n4.write_child(3, l1_ptr);
744        n4.write_child(123, l2_ptr);
745        n4.write_child(1, l3_ptr);
746
747        let n16 = n4.grow();
748
749        assert_eq!(n16.lookup_child(3), Some(l1_ptr));
750        assert_eq!(n16.lookup_child(123), Some(l2_ptr));
751        assert_eq!(n16.lookup_child(1), Some(l3_ptr));
752        assert_eq!(n16.lookup_child(4), None);
753    }
754
755    #[test]
756    #[should_panic = "unable to shrink a Node4, something went wrong!"]
757    fn node4_shrink() {
758        let n4 = InnerNode4::<Box<[u8]>, (), 16>::empty();
759
760        n4.shrink();
761    }
762
763    #[test]
764    fn node4_min_max() {
765        inner_node_min_max_test(InnerNode4::<_, _, 16>::empty(), 4);
766    }
767
768    #[test]
769    fn node16_lookup() {
770        let mut n = InnerNode16::<Box<[u8]>, (), 16>::empty();
771        let mut l1 = LeafNode::with_no_siblings(Box::from([]), ());
772        let mut l2 = LeafNode::with_no_siblings(Box::from([]), ());
773        let mut l3 = LeafNode::with_no_siblings(Box::from([]), ());
774        let l1_ptr = NodePtr::from(&mut l1).to_opaque();
775        let l2_ptr = NodePtr::from(&mut l2).to_opaque();
776        let l3_ptr = NodePtr::from(&mut l3).to_opaque();
777
778        assert!(n.lookup_child(123).is_none());
779
780        n.header.inc_num_children();
781        n.header.inc_num_children();
782        n.header.inc_num_children();
783
784        n.keys[0].write(3);
785        n.keys[1].write(123);
786        n.keys[2].write(1);
787
788        n.child_pointers[0].write(l1_ptr);
789        n.child_pointers[1].write(l2_ptr);
790        n.child_pointers[2].write(l3_ptr);
791
792        assert_eq!(n.lookup_child(123), Some(l2_ptr));
793    }
794
795    #[test]
796    fn node16_write_child() {
797        inner_node_write_child_test(InnerNode16::<_, _, 16>::empty(), 16)
798    }
799
800    #[test]
801    fn node16_remove_child() {
802        inner_node_remove_child_test(InnerNode16::<_, _, 16>::empty(), 16)
803    }
804
805    #[test]
806    #[should_panic]
807    fn node16_write_child_full_panic() {
808        inner_node_write_child_test(InnerNode16::<_, _, 16>::empty(), 17);
809    }
810
811    #[test]
812    #[should_panic = "Node must be full to grow to node 48"]
813    fn node16_grow_panic() {
814        let mut n16 = InnerNode16::<Box<[u8]>, (), 16>::empty();
815        let mut l1 = LeafNode::with_no_siblings(vec![].into(), ());
816        let mut l2 = LeafNode::with_no_siblings(vec![].into(), ());
817        let mut l3 = LeafNode::with_no_siblings(vec![].into(), ());
818        let l1_ptr = NodePtr::from(&mut l1).to_opaque();
819        let l2_ptr = NodePtr::from(&mut l2).to_opaque();
820        let l3_ptr = NodePtr::from(&mut l3).to_opaque();
821
822        n16.write_child(3, l1_ptr);
823        n16.write_child(123, l2_ptr);
824        n16.write_child(1, l3_ptr);
825
826        let _n48 = n16.grow();
827    }
828
829    #[test]
830    fn node16_grow() {
831        let mut n16 = InnerNode16::<Box<[u8]>, (), 16>::empty();
832        let mut v = Vec::new();
833        for i in 0..16 {
834            let mut l = LeafNode::with_no_siblings(vec![].into(), ());
835            let l_ptr = NodePtr::from(&mut l).to_opaque();
836            v.push(l_ptr);
837            n16.write_child(i * 2, l_ptr);
838        }
839
840        let n48 = n16.grow();
841
842        for i in 0..16 {
843            assert_eq!(n48.lookup_child(i * 2), Some(v[i as usize]));
844        }
845    }
846
847    #[test]
848    fn node16_shrink() {
849        inner_node_shrink_test(InnerNode16::<_, _, 16>::empty(), 4);
850    }
851
852    #[test]
853    #[should_panic = "Cannot change InnerNodeCompressed<16> to size 4 when it has more than 4 \
854                      children. Currently has [5] children."]
855    fn node16_shrink_too_many_children_panic() {
856        inner_node_shrink_test(InnerNode16::<_, _, 16>::empty(), 5);
857    }
858
859    #[test]
860    fn node16_min_max() {
861        inner_node_min_max_test(InnerNode16::<_, _, 16>::empty(), 16);
862    }
863
864    fn node4_fixture() -> FixtureReturn<InnerNode4<Box<[u8]>, (), 16>, 4> {
865        let mut n4 = InnerNode4::empty();
866        let mut l1 = LeafNode::with_no_siblings(vec![].into(), ());
867        let mut l2 = LeafNode::with_no_siblings(vec![].into(), ());
868        let mut l3 = LeafNode::with_no_siblings(vec![].into(), ());
869        let mut l4 = LeafNode::with_no_siblings(vec![].into(), ());
870        let l1_ptr = NodePtr::from(&mut l1).to_opaque();
871        let l2_ptr = NodePtr::from(&mut l2).to_opaque();
872        let l3_ptr = NodePtr::from(&mut l3).to_opaque();
873        let l4_ptr = NodePtr::from(&mut l4).to_opaque();
874
875        n4.write_child(3, l1_ptr);
876        n4.write_child(255, l2_ptr);
877        n4.write_child(0u8, l3_ptr);
878        n4.write_child(85, l4_ptr);
879
880        (n4, [l1, l2, l3, l4], [l1_ptr, l2_ptr, l3_ptr, l4_ptr])
881    }
882
883    #[test]
884    fn node4_iterate() {
885        let (node, _, [l1_ptr, l2_ptr, l3_ptr, l4_ptr]) = node4_fixture();
886
887        let mut iter = node.iter();
888
889        assert_eq!(iter.next().unwrap(), (0u8, l3_ptr));
890        assert_eq!(iter.next().unwrap(), (3, l1_ptr));
891        assert_eq!(iter.next().unwrap(), (85, l4_ptr));
892        assert_eq!(iter.next().unwrap(), (255, l2_ptr));
893        assert_eq!(iter.next(), None);
894    }
895
896    #[test]
897    fn node4_iterate_rev() {
898        let (node, _, [l1_ptr, l2_ptr, l3_ptr, l4_ptr]) = node4_fixture();
899
900        let mut iter = node.iter().rev();
901
902        assert_eq!(iter.next().unwrap(), (255, l2_ptr));
903        assert_eq!(iter.next().unwrap(), (85, l4_ptr));
904        assert_eq!(iter.next().unwrap(), (3, l1_ptr));
905        assert_eq!(iter.next().unwrap(), (0u8, l3_ptr));
906        assert_eq!(iter.next(), None);
907    }
908
909    #[test]
910    fn node4_range_iterate() {
911        let (node, _, [l1_ptr, l2_ptr, l3_ptr, l4_ptr]) = node4_fixture();
912
913        #[track_caller]
914        fn check<K, V, const PREFIX_LEN: usize, const N: usize>(
915            node: &InnerNodeCompressed<K, V, PREFIX_LEN, 4>,
916            bound: impl RangeBounds<u8>,
917            expected_pairs: [(u8, OpaqueNodePtr<K, V, PREFIX_LEN>); N],
918        ) {
919            let pairs = node.range(bound).collect::<Vec<_>>();
920            assert_eq!(pairs, expected_pairs);
921        }
922
923        check(
924            &node,
925            (Bound::Included(0), Bound::Included(3)),
926            [(0u8, l3_ptr), (3, l1_ptr)],
927        );
928        check(&node, (Bound::Excluded(0), Bound::Excluded(3)), []);
929        check(
930            &node,
931            (Bound::Included(0), Bound::Included(0)),
932            [(0u8, l3_ptr)],
933        );
934        check(
935            &node,
936            (Bound::Included(0), Bound::Included(255)),
937            [(0u8, l3_ptr), (3, l1_ptr), (85, l4_ptr), (255, l2_ptr)],
938        );
939        check(
940            &node,
941            (Bound::Included(255), Bound::Included(255)),
942            [(255, l2_ptr)],
943        );
944        check(&node, (Bound::Included(255), Bound::Excluded(255)), []);
945        check(&node, (Bound::Excluded(255), Bound::Included(255)), []);
946        check(
947            &node,
948            (Bound::Excluded(0), Bound::Excluded(255)),
949            [(3, l1_ptr), (85, l4_ptr)],
950        );
951        check(
952            &node,
953            (Bound::<u8>::Unbounded, Bound::Unbounded),
954            [(0u8, l3_ptr), (3, l1_ptr), (85, l4_ptr), (255, l2_ptr)],
955        );
956    }
957
958    fn node4_fixture_empty_edges() -> FixtureReturn<InnerNode4<Box<[u8]>, (), 16>, 4> {
959        let mut n4 = InnerNode4::empty();
960        let mut l1 = LeafNode::with_no_siblings(vec![].into(), ());
961        let mut l2 = LeafNode::with_no_siblings(vec![].into(), ());
962        let mut l3 = LeafNode::with_no_siblings(vec![].into(), ());
963        let mut l4 = LeafNode::with_no_siblings(vec![].into(), ());
964        let l1_ptr = NodePtr::from(&mut l1).to_opaque();
965        let l2_ptr = NodePtr::from(&mut l2).to_opaque();
966        let l3_ptr = NodePtr::from(&mut l3).to_opaque();
967        let l4_ptr = NodePtr::from(&mut l4).to_opaque();
968
969        n4.write_child(3, l1_ptr);
970        n4.write_child(254, l2_ptr);
971        n4.write_child(2u8, l3_ptr);
972        n4.write_child(85, l4_ptr);
973
974        (n4, [l1, l2, l3, l4], [l1_ptr, l2_ptr, l3_ptr, l4_ptr])
975    }
976
977    #[test]
978    fn node4_range_iterate_boundary_conditions() {
979        let (node, _, [l1_ptr, l2_ptr, l3_ptr, l4_ptr]) = node4_fixture_empty_edges();
980
981        #[track_caller]
982        fn check<K, V, const PREFIX_LEN: usize, const N: usize>(
983            node: &InnerNodeCompressed<K, V, PREFIX_LEN, 4>,
984            bound: impl RangeBounds<u8>,
985            expected_pairs: [(u8, OpaqueNodePtr<K, V, PREFIX_LEN>); N],
986        ) {
987            let pairs = node.range(bound).collect::<Vec<_>>();
988            assert_eq!(pairs, expected_pairs);
989        }
990
991        check(
992            &node,
993            (Bound::<u8>::Unbounded, Bound::Included(86)),
994            [(2u8, l3_ptr), (3, l1_ptr), (85, l4_ptr)],
995        );
996        check(
997            &node,
998            (Bound::<u8>::Unbounded, Bound::Included(4)),
999            [(2u8, l3_ptr), (3, l1_ptr)],
1000        );
1001        check(
1002            &node,
1003            (Bound::<u8>::Unbounded, Bound::Excluded(3)),
1004            [(2u8, l3_ptr)],
1005        );
1006        check(
1007            &node,
1008            (Bound::<u8>::Unbounded, Bound::Included(2)),
1009            [(2u8, l3_ptr)],
1010        );
1011        check(&node, (Bound::<u8>::Unbounded, Bound::Included(1)), []);
1012        check(&node, (Bound::<u8>::Unbounded, Bound::Included(0)), []);
1013
1014        check(
1015            &node,
1016            (Bound::Included(1), Bound::<u8>::Unbounded),
1017            [(2u8, l3_ptr), (3, l1_ptr), (85, l4_ptr), (254, l2_ptr)],
1018        );
1019        check(
1020            &node,
1021            (Bound::Included(3), Bound::<u8>::Unbounded),
1022            [(3, l1_ptr), (85, l4_ptr), (254, l2_ptr)],
1023        );
1024        check(
1025            &node,
1026            (Bound::Excluded(84), Bound::<u8>::Unbounded),
1027            [(85, l4_ptr), (254, l2_ptr)],
1028        );
1029        check(
1030            &node,
1031            (Bound::Included(253), Bound::<u8>::Unbounded),
1032            [(254, l2_ptr)],
1033        );
1034        check(&node, (Bound::Included(255), Bound::<u8>::Unbounded), []);
1035    }
1036
1037    #[test]
1038    #[should_panic = "range start and end are equal and excluded: (80)"]
1039    fn node4_range_iterate_out_of_bounds_panic_both_excluded() {
1040        let (node, _, [_l1_ptr, _l2_ptr, _l3_ptr, _l4_ptr]) = node4_fixture();
1041
1042        let _pairs = node
1043            .range((Bound::Excluded(80), Bound::Excluded(80)))
1044            .collect::<Vec<_>>();
1045    }
1046
1047    #[test]
1048    #[should_panic = "range start (80) is greater than range end (0)"]
1049    fn node4_range_iterate_start_greater_than_end() {
1050        let (node, _, [_l1_ptr, _l2_ptr, _l3_ptr, _l4_ptr]) = node4_fixture();
1051
1052        let _pairs = node
1053            .range((Bound::Excluded(80), Bound::Included(0)))
1054            .collect::<Vec<_>>();
1055    }
1056
1057    fn node16_fixture() -> FixtureReturn<InnerNode16<Box<[u8]>, (), 16>, 4> {
1058        let mut n16 = InnerNode16::empty();
1059        let mut l1 = LeafNode::with_no_siblings(vec![].into(), ());
1060        let mut l2 = LeafNode::with_no_siblings(vec![].into(), ());
1061        let mut l3 = LeafNode::with_no_siblings(vec![].into(), ());
1062        let mut l4 = LeafNode::with_no_siblings(vec![].into(), ());
1063        let l1_ptr = NodePtr::from(&mut l1).to_opaque();
1064        let l2_ptr = NodePtr::from(&mut l2).to_opaque();
1065        let l3_ptr = NodePtr::from(&mut l3).to_opaque();
1066        let l4_ptr = NodePtr::from(&mut l4).to_opaque();
1067
1068        n16.write_child(3, l1_ptr);
1069        n16.write_child(255, l2_ptr);
1070        n16.write_child(0u8, l3_ptr);
1071        n16.write_child(85, l4_ptr);
1072
1073        (n16, [l1, l2, l3, l4], [l1_ptr, l2_ptr, l3_ptr, l4_ptr])
1074    }
1075
1076    #[test]
1077    fn node16_iterate() {
1078        let (node, _, [l1_ptr, l2_ptr, l3_ptr, l4_ptr]) = node16_fixture();
1079
1080        let mut iter = node.iter();
1081
1082        assert_eq!(iter.next().unwrap(), (0u8, l3_ptr));
1083        assert_eq!(iter.next().unwrap(), (3, l1_ptr));
1084        assert_eq!(iter.next().unwrap(), (85, l4_ptr));
1085        assert_eq!(iter.next().unwrap(), (255, l2_ptr));
1086        assert_eq!(iter.next(), None);
1087    }
1088
1089    #[test]
1090    fn node16_iterate_rev() {
1091        let (node, _, [l1_ptr, l2_ptr, l3_ptr, l4_ptr]) = node16_fixture();
1092
1093        let mut iter = node.iter().rev();
1094
1095        assert_eq!(iter.next().unwrap(), (255, l2_ptr));
1096        assert_eq!(iter.next().unwrap(), (85, l4_ptr));
1097        assert_eq!(iter.next().unwrap(), (3, l1_ptr));
1098        assert_eq!(iter.next().unwrap(), (0u8, l3_ptr));
1099        assert_eq!(iter.next(), None);
1100    }
1101
1102    #[test]
1103    fn node16_range_iterate() {
1104        let (node, _, [l1_ptr, l2_ptr, l3_ptr, l4_ptr]) = node16_fixture();
1105
1106        #[track_caller]
1107        fn check<K, V, const PREFIX_LEN: usize, const N: usize>(
1108            node: &InnerNodeCompressed<K, V, PREFIX_LEN, 16>,
1109            bound: impl RangeBounds<u8>,
1110            expected_pairs: [(u8, OpaqueNodePtr<K, V, PREFIX_LEN>); N],
1111        ) {
1112            let pairs = node.range(bound).collect::<Vec<_>>();
1113            assert_eq!(pairs, expected_pairs);
1114        }
1115
1116        check(
1117            &node,
1118            (Bound::Included(0), Bound::Included(3)),
1119            [(0u8, l3_ptr), (3, l1_ptr)],
1120        );
1121        check(&node, (Bound::Excluded(0), Bound::Excluded(3)), []);
1122        check(
1123            &node,
1124            (Bound::Included(0), Bound::Included(0)),
1125            [(0u8, l3_ptr)],
1126        );
1127        check(
1128            &node,
1129            (Bound::Included(0), Bound::Included(255)),
1130            [(0u8, l3_ptr), (3, l1_ptr), (85, l4_ptr), (255, l2_ptr)],
1131        );
1132        check(
1133            &node,
1134            (Bound::Included(255), Bound::Included(255)),
1135            [(255, l2_ptr)],
1136        );
1137        check(&node, (Bound::Included(255), Bound::Excluded(255)), []);
1138        check(&node, (Bound::Excluded(255), Bound::Included(255)), []);
1139        check(
1140            &node,
1141            (Bound::Excluded(0), Bound::Excluded(255)),
1142            [(3, l1_ptr), (85, l4_ptr)],
1143        );
1144        check(
1145            &node,
1146            (Bound::<u8>::Unbounded, Bound::Unbounded),
1147            [(0u8, l3_ptr), (3, l1_ptr), (85, l4_ptr), (255, l2_ptr)],
1148        );
1149        check(
1150            &node,
1151            (Bound::<u8>::Unbounded, Bound::Included(86)),
1152            [(0u8, l3_ptr), (3, l1_ptr), (85, l4_ptr)],
1153        );
1154    }
1155
1156    fn node16_fixture_empty_edges() -> FixtureReturn<InnerNode16<Box<[u8]>, (), 16>, 4> {
1157        let mut n16 = InnerNode16::empty();
1158        let mut l1 = LeafNode::with_no_siblings(vec![].into(), ());
1159        let mut l2 = LeafNode::with_no_siblings(vec![].into(), ());
1160        let mut l3 = LeafNode::with_no_siblings(vec![].into(), ());
1161        let mut l4 = LeafNode::with_no_siblings(vec![].into(), ());
1162        let l1_ptr = NodePtr::from(&mut l1).to_opaque();
1163        let l2_ptr = NodePtr::from(&mut l2).to_opaque();
1164        let l3_ptr = NodePtr::from(&mut l3).to_opaque();
1165        let l4_ptr = NodePtr::from(&mut l4).to_opaque();
1166
1167        n16.write_child(3, l1_ptr);
1168        n16.write_child(254, l2_ptr);
1169        n16.write_child(2u8, l3_ptr);
1170        n16.write_child(85, l4_ptr);
1171
1172        (n16, [l1, l2, l3, l4], [l1_ptr, l2_ptr, l3_ptr, l4_ptr])
1173    }
1174
1175    #[test]
1176    fn node16_range_iterate_boundary_conditions() {
1177        let (node, _, [l1_ptr, l2_ptr, l3_ptr, l4_ptr]) = node16_fixture_empty_edges();
1178
1179        #[track_caller]
1180        fn check<K, V, const PREFIX_LEN: usize, const N: usize>(
1181            node: &InnerNodeCompressed<K, V, PREFIX_LEN, 16>,
1182            bound: impl RangeBounds<u8>,
1183            expected_pairs: [(u8, OpaqueNodePtr<K, V, PREFIX_LEN>); N],
1184        ) {
1185            let pairs = node.range(bound).collect::<Vec<_>>();
1186            assert_eq!(pairs, expected_pairs);
1187        }
1188
1189        check(
1190            &node,
1191            (Bound::<u8>::Unbounded, Bound::Included(86)),
1192            [(2u8, l3_ptr), (3, l1_ptr), (85, l4_ptr)],
1193        );
1194        check(
1195            &node,
1196            (Bound::<u8>::Unbounded, Bound::Included(4)),
1197            [(2u8, l3_ptr), (3, l1_ptr)],
1198        );
1199        check(
1200            &node,
1201            (Bound::<u8>::Unbounded, Bound::Excluded(3)),
1202            [(2u8, l3_ptr)],
1203        );
1204        check(
1205            &node,
1206            (Bound::<u8>::Unbounded, Bound::Included(2)),
1207            [(2u8, l3_ptr)],
1208        );
1209        check(&node, (Bound::<u8>::Unbounded, Bound::Included(1)), []);
1210        check(&node, (Bound::<u8>::Unbounded, Bound::Included(0)), []);
1211
1212        check(
1213            &node,
1214            (Bound::Included(1), Bound::<u8>::Unbounded),
1215            [(2u8, l3_ptr), (3, l1_ptr), (85, l4_ptr), (254, l2_ptr)],
1216        );
1217        check(
1218            &node,
1219            (Bound::Included(3), Bound::<u8>::Unbounded),
1220            [(3, l1_ptr), (85, l4_ptr), (254, l2_ptr)],
1221        );
1222        check(
1223            &node,
1224            (Bound::Excluded(84), Bound::<u8>::Unbounded),
1225            [(85, l4_ptr), (254, l2_ptr)],
1226        );
1227        check(
1228            &node,
1229            (Bound::Included(253), Bound::<u8>::Unbounded),
1230            [(254, l2_ptr)],
1231        );
1232        check(&node, (Bound::Included(255), Bound::<u8>::Unbounded), []);
1233    }
1234
1235    #[test]
1236    fn node16_range_edge_cases() {
1237        let (node, _, [l1_ptr, l2_ptr, l3_ptr, l4_ptr]) = node16_fixture();
1238
1239        let pairs = node
1240            .range((Bound::<u8>::Excluded(84), Bound::Unbounded))
1241            .collect::<Vec<_>>();
1242        assert_eq!(pairs, &[(85, l4_ptr), (255, l2_ptr),]);
1243
1244        let pairs = node
1245            .range((Bound::<u8>::Unbounded, Bound::Excluded(4)))
1246            .collect::<Vec<_>>();
1247        assert_eq!(pairs, &[(0u8, l3_ptr), (3, l1_ptr)]);
1248    }
1249
1250    #[test]
1251    #[should_panic = "range start and end are equal and excluded: (80)"]
1252    fn node16_range_iterate_out_of_bounds_panic_both_excluded() {
1253        let (node, _, [_l1_ptr, _l2_ptr, _l3_ptr, _l4_ptr]) = node16_fixture();
1254
1255        let pairs = node
1256            .range((Bound::Excluded(80), Bound::Excluded(80)))
1257            .collect::<Vec<_>>();
1258        assert_eq!(pairs, &[]);
1259    }
1260
1261    #[test]
1262    #[should_panic = "range start (80) is greater than range end (0)"]
1263    fn node16_range_iterate_start_greater_than_end() {
1264        let (node, _, [_l1_ptr, _l2_ptr, _l3_ptr, _l4_ptr]) = node16_fixture();
1265
1266        let _pairs = node
1267            .range((Bound::Excluded(80), Bound::Included(0)))
1268            .collect::<Vec<_>>();
1269    }
1270}