Auto merge of #68827 - ssomers:btree_navigation_revisited, r=Mark-Simulacrum

BTreeMap navigation done safer & faster

It turns out that there was a faster way to do the tree navigation code bundled in #67073, by moving from edge to KV and from KV to next edge separately. It extracts most of the code as safe functions, and contains the duplication of handles within the short wrapper functions.

This somehow hits a sweet spot in the compiler because it reports boosts all over the board:
```
>cargo benchcmp pre3.txt posz4.txt --threshold 5
 name                                           pre3.txt ns/iter  posz4.txt ns/iter  diff ns/iter   diff %  speedup
 btree::map::first_and_last_0                   40                37                           -3   -7.50%   x 1.08
 btree::map::first_and_last_100                 58                44                          -14  -24.14%   x 1.32
 btree::map::iter_1000                          8,920             3,419                    -5,501  -61.67%   x 2.61
 btree::map::iter_100000                        1,069,290         411,615                -657,675  -61.51%   x 2.60
 btree::map::iter_20                            169               58                         -111  -65.68%   x 2.91
 btree::map::iter_mut_1000                      8,701             3,303                    -5,398  -62.04%   x 2.63
 btree::map::iter_mut_100000                    1,034,560         405,975                -628,585  -60.76%   x 2.55
 btree::map::iter_mut_20                        165               58                         -107  -64.85%   x 2.84
 btree::set::clone_100                          1,831             1,562                      -269  -14.69%   x 1.17
 btree::set::clone_100_and_clear                1,831             1,565                      -266  -14.53%   x 1.17
 btree::set::clone_100_and_into_iter            1,917             1,541                      -376  -19.61%   x 1.24
 btree::set::clone_100_and_pop_all              2,609             2,441                      -168   -6.44%   x 1.07
 btree::set::clone_100_and_remove_all           4,598             3,927                      -671  -14.59%   x 1.17
 btree::set::clone_100_and_remove_half          2,765             2,551                      -214   -7.74%   x 1.08
 btree::set::clone_10k                          191,610           164,616                 -26,994  -14.09%   x 1.16
 btree::set::clone_10k_and_clear                192,003           164,616                 -27,387  -14.26%   x 1.17
 btree::set::clone_10k_and_into_iter            200,037           163,010                 -37,027  -18.51%   x 1.23
 btree::set::clone_10k_and_pop_all              267,023           250,913                 -16,110   -6.03%   x 1.06
 btree::set::clone_10k_and_remove_all           536,230           464,100                 -72,130  -13.45%   x 1.16
 btree::set::clone_10k_and_remove_half          453,350           430,545                 -22,805   -5.03%   x 1.05
 btree::set::difference_random_100_vs_100       1,787             801                        -986  -55.18%   x 2.23
 btree::set::difference_random_100_vs_10k       2,978             2,696                      -282   -9.47%   x 1.10
 btree::set::difference_random_10k_vs_100       111,075           54,734                  -56,341  -50.72%   x 2.03
 btree::set::difference_random_10k_vs_10k       246,380           175,980                 -70,400  -28.57%   x 1.40
 btree::set::difference_staggered_100_vs_100    1,789             951                        -838  -46.84%   x 1.88
 btree::set::difference_staggered_100_vs_10k    2,798             2,606                      -192   -6.86%   x 1.07
 btree::set::difference_staggered_10k_vs_10k    176,452           97,401                  -79,051  -44.80%   x 1.81
 btree::set::intersection_100_neg_vs_10k_pos    34                32                           -2   -5.88%   x 1.06
 btree::set::intersection_100_pos_vs_100_neg    30                27                           -3  -10.00%   x 1.11
 btree::set::intersection_random_100_vs_100     1,537             613                        -924  -60.12%   x 2.51
 btree::set::intersection_random_100_vs_10k     2,793             2,649                      -144   -5.16%   x 1.05
 btree::set::intersection_random_10k_vs_10k     222,127           147,166                 -74,961  -33.75%   x 1.51
 btree::set::intersection_staggered_100_vs_100  1,447             622                        -825  -57.01%   x 2.33
 btree::set::intersection_staggered_100_vs_10k  2,606             2,382                      -224   -8.60%   x 1.09
 btree::set::intersection_staggered_10k_vs_10k  143,620           58,790                  -84,830  -59.07%   x 2.44
 btree::set::is_subset_100_vs_100               1,349             488                        -861  -63.83%   x 2.76
 btree::set::is_subset_100_vs_10k               1,720             1,428                      -292  -16.98%   x 1.20
 btree::set::is_subset_10k_vs_10k               135,984           48,527                  -87,457  -64.31%   x 2.80
```
The `first_and_last` ones are noise (they don't do iteration), the others seem genuine.
As always, approved by Miri.

Also, a separate commit with some more benchmarks of mutable behaviour (which also benefit).

r? @cuviper
This commit is contained in:
bors 2020-02-28 09:32:34 +00:00
commit e2223c94bf
4 changed files with 234 additions and 167 deletions

View File

@ -50,43 +50,112 @@ macro_rules! set_bench {
};
}
const BUILD_SET_SIZE: usize = 100;
#[bench]
pub fn build_and_clear(b: &mut Bencher) {
b.iter(|| pos(BUILD_SET_SIZE).clear())
pub fn clone_100(b: &mut Bencher) {
let src = pos(100);
b.iter(|| src.clone())
}
#[bench]
pub fn build_and_drop(b: &mut Bencher) {
b.iter(|| pos(BUILD_SET_SIZE))
pub fn clone_100_and_clear(b: &mut Bencher) {
let src = pos(100);
b.iter(|| src.clone().clear())
}
#[bench]
pub fn build_and_into_iter(b: &mut Bencher) {
b.iter(|| pos(BUILD_SET_SIZE).into_iter().count())
pub fn clone_100_and_into_iter(b: &mut Bencher) {
let src = pos(100);
b.iter(|| src.clone().into_iter().count())
}
#[bench]
pub fn build_and_pop_all(b: &mut Bencher) {
pub fn clone_100_and_pop_all(b: &mut Bencher) {
let src = pos(100);
b.iter(|| {
let mut s = pos(BUILD_SET_SIZE);
while s.pop_first().is_some() {}
s
let mut set = src.clone();
while set.pop_first().is_some() {}
set
});
}
#[bench]
pub fn build_and_remove_all(b: &mut Bencher) {
pub fn clone_100_and_remove_all(b: &mut Bencher) {
let src = pos(100);
b.iter(|| {
let mut s = pos(BUILD_SET_SIZE);
while let Some(elt) = s.iter().copied().next() {
s.remove(&elt);
let mut set = src.clone();
while let Some(elt) = set.iter().copied().next() {
set.remove(&elt);
}
s
set
});
}
#[bench]
pub fn clone_100_and_remove_half(b: &mut Bencher) {
let src = pos(100);
b.iter(|| {
let mut set = src.clone();
for i in (2..=100 as i32).step_by(2) {
set.remove(&i);
}
assert_eq!(set.len(), 100 / 2);
set
})
}
#[bench]
pub fn clone_10k(b: &mut Bencher) {
let src = pos(10_000);
b.iter(|| src.clone())
}
#[bench]
pub fn clone_10k_and_clear(b: &mut Bencher) {
let src = pos(10_000);
b.iter(|| src.clone().clear())
}
#[bench]
pub fn clone_10k_and_into_iter(b: &mut Bencher) {
let src = pos(10_000);
b.iter(|| src.clone().into_iter().count())
}
#[bench]
pub fn clone_10k_and_pop_all(b: &mut Bencher) {
let src = pos(10_000);
b.iter(|| {
let mut set = src.clone();
while set.pop_first().is_some() {}
set
});
}
#[bench]
pub fn clone_10k_and_remove_all(b: &mut Bencher) {
let src = pos(10_000);
b.iter(|| {
let mut set = src.clone();
while let Some(elt) = set.iter().copied().next() {
set.remove(&elt);
}
set
});
}
#[bench]
pub fn clone_10k_and_remove_half(b: &mut Bencher) {
let src = pos(10_000);
b.iter(|| {
let mut set = src.clone();
for i in (2..=10_000 as i32).step_by(2) {
set.remove(&i);
}
assert_eq!(set.len(), 10_000 / 2);
set
})
}
set_bench! {intersection_100_neg_vs_100_pos, intersection, count, [neg(100), pos(100)]}
set_bench! {intersection_100_neg_vs_10k_pos, intersection, count, [neg(100), pos(10_000)]}
set_bench! {intersection_100_pos_vs_100_neg, intersection, count, [pos(100), neg(100)]}

View File

@ -1487,16 +1487,13 @@ impl<K, V> Drop for IntoIter<K, V> {
}
unsafe {
let leaf_node = ptr::read(&self.front).into_node();
if leaf_node.is_shared_root() {
let mut node = ptr::read(&self.front).into_node().forget_type();
if node.is_shared_root() {
return;
}
if let Some(first_parent) = leaf_node.deallocate_and_ascend() {
let mut cur_internal_node = first_parent.into_node();
while let Some(parent) = cur_internal_node.deallocate_and_ascend() {
cur_internal_node = parent.into_node()
}
while let Some(parent) = node.deallocate_and_ascend() {
node = parent.into_node().forget_type();
}
}
}

View File

@ -3,120 +3,69 @@ use core::ptr;
use super::node::{marker, ForceResult::*, Handle, NodeRef};
use super::unwrap_unchecked;
macro_rules! def_next {
{ unsafe fn $name:ident : $next_kv:ident $next_edge:ident $initial_leaf_edge:ident } => {
/// Given a leaf edge handle into an immutable tree, returns a handle to the next
/// leaf edge and references to the key and value between these edges.
/// Unsafe because the caller must ensure that the given leaf edge has a successor.
unsafe fn $name <'a, K: 'a, V: 'a>(
leaf_edge: Handle<NodeRef<marker::Immut<'a>, K, V, marker::Leaf>, marker::Edge>,
) -> (Handle<NodeRef<marker::Immut<'a>, K, V, marker::Leaf>, marker::Edge>, &'a K, &'a V) {
let mut cur_handle = match leaf_edge.$next_kv() {
Ok(leaf_kv) => {
let (k, v) = leaf_kv.into_kv();
let next_leaf_edge = leaf_kv.$next_edge();
return (next_leaf_edge, k, v);
}
Err(last_edge) => {
let next_level = last_edge.into_node().ascend().ok();
unwrap_unchecked(next_level)
}
};
loop {
cur_handle = match cur_handle.$next_kv() {
Ok(internal_kv) => {
let (k, v) = internal_kv.into_kv();
let next_internal_edge = internal_kv.$next_edge();
let next_leaf_edge = next_internal_edge.descend().$initial_leaf_edge();
return (next_leaf_edge, k, v);
}
Err(last_edge) => {
let next_level = last_edge.into_node().ascend().ok();
unwrap_unchecked(next_level)
}
}
impl<BorrowType, K, V> Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge> {
/// Given a leaf edge handle, returns [`Result::Ok`] with a handle to the neighboring KV
/// on the right side, which is either in the same leaf node or in an ancestor node.
/// If the leaf edge is the last one in the tree, returns [`Result::Err`] with the root node.
pub fn next_kv(
self,
) -> Result<
Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, marker::KV>,
NodeRef<BorrowType, K, V, marker::LeafOrInternal>,
> {
let mut edge = self.forget_node_type();
loop {
edge = match edge.right_kv() {
Ok(internal_kv) => return Ok(internal_kv),
Err(last_edge) => match last_edge.into_node().ascend() {
Ok(parent_edge) => parent_edge.forget_node_type(),
Err(root) => return Err(root.forget_type()),
},
}
}
};
}
}
macro_rules! def_next_mut {
{ unsafe fn $name:ident : $next_kv:ident $next_edge:ident $initial_leaf_edge:ident } => {
/// Given a leaf edge handle into a mutable tree, returns handles to the next
/// leaf edge and to the KV between these edges.
/// Unsafe for two reasons:
/// - the caller must ensure that the given leaf edge has a successor;
/// - both returned handles represent mutable references into the same tree
/// that can easily invalidate each other, even on immutable use.
unsafe fn $name <'a, K: 'a, V: 'a>(
leaf_edge: Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge>,
) -> (Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge>,
Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, marker::KV>) {
let mut cur_handle = match leaf_edge.$next_kv() {
Ok(leaf_kv) => {
let next_leaf_edge = ptr::read(&leaf_kv).$next_edge();
return (next_leaf_edge, leaf_kv.forget_node_type());
}
Err(last_edge) => {
let next_level = last_edge.into_node().ascend().ok();
unwrap_unchecked(next_level)
}
};
loop {
cur_handle = match cur_handle.$next_kv() {
Ok(internal_kv) => {
let next_internal_edge = ptr::read(&internal_kv).$next_edge();
let next_leaf_edge = next_internal_edge.descend().$initial_leaf_edge();
return (next_leaf_edge, internal_kv.forget_node_type());
}
Err(last_edge) => {
let next_level = last_edge.into_node().ascend().ok();
unwrap_unchecked(next_level)
}
}
/// Given a leaf edge handle, returns [`Result::Ok`] with a handle to the neighboring KV
/// on the left side, which is either in the same leaf node or in an ancestor node.
/// If the leaf edge is the first one in the tree, returns [`Result::Err`] with the root node.
pub fn next_back_kv(
self,
) -> Result<
Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, marker::KV>,
NodeRef<BorrowType, K, V, marker::LeafOrInternal>,
> {
let mut edge = self.forget_node_type();
loop {
edge = match edge.left_kv() {
Ok(internal_kv) => return Ok(internal_kv),
Err(last_edge) => match last_edge.into_node().ascend() {
Ok(parent_edge) => parent_edge.forget_node_type(),
Err(root) => return Err(root.forget_type()),
},
}
}
};
}
}
macro_rules! def_next_dealloc {
{ unsafe fn $name:ident : $next_kv:ident $next_edge:ident $initial_leaf_edge:ident } => {
/// Given a leaf edge handle into an owned tree, returns a handle to the next
/// leaf edge and the key and value between these edges, while deallocating
/// any node left behind.
macro_rules! def_next_kv_uncheched_dealloc {
{ unsafe fn $name:ident : $adjacent_kv:ident } => {
/// Given a leaf edge handle into an owned tree, returns a handle to the next KV,
/// while deallocating any node left behind.
/// Unsafe for two reasons:
/// - the caller must ensure that the given leaf edge has a successor;
/// - the node pointed at by the given handle, and its ancestors, may be deallocated,
/// - The caller must ensure that the leaf edge is not the last one in the tree.
/// - The node pointed at by the given handle, and its ancestors, may be deallocated,
/// while the reference to those nodes in the surviving ancestors is left dangling;
/// thus using the returned handle is dangerous.
/// thus using the returned handle to navigate further is dangerous.
unsafe fn $name <K, V>(
leaf_edge: Handle<NodeRef<marker::Owned, K, V, marker::Leaf>, marker::Edge>,
) -> (Handle<NodeRef<marker::Owned, K, V, marker::Leaf>, marker::Edge>, K, V) {
let mut cur_handle = match leaf_edge.$next_kv() {
Ok(leaf_kv) => {
let k = ptr::read(leaf_kv.reborrow().into_kv().0);
let v = ptr::read(leaf_kv.reborrow().into_kv().1);
let next_leaf_edge = leaf_kv.$next_edge();
return (next_leaf_edge, k, v);
}
Err(last_edge) => {
unwrap_unchecked(last_edge.into_node().deallocate_and_ascend())
}
};
) -> Handle<NodeRef<marker::Owned, K, V, marker::LeafOrInternal>, marker::KV> {
let mut edge = leaf_edge.forget_node_type();
loop {
cur_handle = match cur_handle.$next_kv() {
Ok(internal_kv) => {
let k = ptr::read(internal_kv.reborrow().into_kv().0);
let v = ptr::read(internal_kv.reborrow().into_kv().1);
let next_internal_edge = internal_kv.$next_edge();
let next_leaf_edge = next_internal_edge.descend().$initial_leaf_edge();
return (next_leaf_edge, k, v);
}
edge = match edge.$adjacent_kv() {
Ok(internal_kv) => return internal_kv,
Err(last_edge) => {
unwrap_unchecked(last_edge.into_node().deallocate_and_ascend())
let parent_edge = last_edge.into_node().deallocate_and_ascend();
unwrap_unchecked(parent_edge).forget_node_type()
}
}
}
@ -124,30 +73,42 @@ macro_rules! def_next_dealloc {
};
}
def_next! {unsafe fn next_unchecked: right_kv right_edge first_leaf_edge}
def_next! {unsafe fn next_back_unchecked: left_kv left_edge last_leaf_edge}
def_next_mut! {unsafe fn next_unchecked_mut: right_kv right_edge first_leaf_edge}
def_next_mut! {unsafe fn next_back_unchecked_mut: left_kv left_edge last_leaf_edge}
def_next_dealloc! {unsafe fn next_unchecked_deallocating: right_kv right_edge first_leaf_edge}
def_next_dealloc! {unsafe fn next_back_unchecked_deallocating: left_kv left_edge last_leaf_edge}
def_next_kv_uncheched_dealloc! {unsafe fn next_kv_unchecked_dealloc: right_kv}
def_next_kv_uncheched_dealloc! {unsafe fn next_back_kv_unchecked_dealloc: left_kv}
/// This replaces the value behind the `v` unique reference by calling the
/// relevant function.
///
/// Safety: The change closure must not panic.
#[inline]
unsafe fn replace<T, R>(v: &mut T, change: impl FnOnce(T) -> (T, R)) -> R {
let value = ptr::read(v);
let (new_value, ret) = change(value);
ptr::write(v, new_value);
ret
}
impl<'a, K, V> Handle<NodeRef<marker::Immut<'a>, K, V, marker::Leaf>, marker::Edge> {
/// Moves the leaf edge handle to the next leaf edge and returns references to the
/// key and value in between.
/// Unsafe because the caller must ensure that the leaf edge is not the last one in the tree.
pub unsafe fn next_unchecked(&mut self) -> (&'a K, &'a V) {
let (next_edge, k, v) = next_unchecked(*self);
*self = next_edge;
(k, v)
replace(self, |leaf_edge| {
let kv = leaf_edge.next_kv();
let kv = unwrap_unchecked(kv.ok());
(kv.next_leaf_edge(), kv.into_kv())
})
}
/// Moves the leaf edge handle to the previous leaf edge and returns references to the
/// key and value in between.
/// Unsafe because the caller must ensure that the leaf edge is not the first one in the tree.
pub unsafe fn next_back_unchecked(&mut self) -> (&'a K, &'a V) {
let (next_edge, k, v) = next_back_unchecked(*self);
*self = next_edge;
(k, v)
replace(self, |leaf_edge| {
let kv = leaf_edge.next_back_kv();
let kv = unwrap_unchecked(kv.ok());
(kv.next_back_leaf_edge(), kv.into_kv())
})
}
}
@ -158,8 +119,11 @@ impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge
/// - The caller must ensure that the leaf edge is not the last one in the tree.
/// - Using the updated handle may well invalidate the returned references.
pub unsafe fn next_unchecked(&mut self) -> (&'a mut K, &'a mut V) {
let (next_edge, kv) = next_unchecked_mut(ptr::read(self));
*self = next_edge;
let kv = replace(self, |leaf_edge| {
let kv = leaf_edge.next_kv();
let kv = unwrap_unchecked(kv.ok());
(ptr::read(&kv).next_leaf_edge(), kv)
});
// Doing the descend (and perhaps another move) invalidates the references
// returned by `into_kv_mut`, so we have to do this last.
kv.into_kv_mut()
@ -171,8 +135,11 @@ impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge
/// - The caller must ensure that the leaf edge is not the first one in the tree.
/// - Using the updated handle may well invalidate the returned references.
pub unsafe fn next_back_unchecked(&mut self) -> (&'a mut K, &'a mut V) {
let (next_edge, kv) = next_back_unchecked_mut(ptr::read(self));
*self = next_edge;
let kv = replace(self, |leaf_edge| {
let kv = leaf_edge.next_back_kv();
let kv = unwrap_unchecked(kv.ok());
(ptr::read(&kv).next_back_leaf_edge(), kv)
});
// Doing the descend (and perhaps another move) invalidates the references
// returned by `into_kv_mut`, so we have to do this last.
kv.into_kv_mut()
@ -192,9 +159,12 @@ impl<K, V> Handle<NodeRef<marker::Owned, K, V, marker::Leaf>, marker::Edge> {
/// if the two preconditions above hold.
/// - Using the updated handle may well invalidate the returned references.
pub unsafe fn next_unchecked(&mut self) -> (K, V) {
let (next_edge, k, v) = next_unchecked_deallocating(ptr::read(self));
*self = next_edge;
(k, v)
replace(self, |leaf_edge| {
let kv = next_kv_unchecked_dealloc(leaf_edge);
let k = ptr::read(kv.reborrow().into_kv().0);
let v = ptr::read(kv.reborrow().into_kv().1);
(kv.next_leaf_edge(), (k, v))
})
}
/// Moves the leaf edge handle to the previous leaf edge and returns the key
@ -209,9 +179,12 @@ impl<K, V> Handle<NodeRef<marker::Owned, K, V, marker::Leaf>, marker::Edge> {
/// if the two preconditions above hold.
/// - Using the updated handle may well invalidate the returned references.
pub unsafe fn next_back_unchecked(&mut self) -> (K, V) {
let (next_edge, k, v) = next_back_unchecked_deallocating(ptr::read(self));
*self = next_edge;
(k, v)
replace(self, |leaf_edge| {
let kv = next_back_kv_unchecked_dealloc(leaf_edge);
let k = ptr::read(kv.reborrow().into_kv().0);
let v = ptr::read(kv.reborrow().into_kv().1);
(kv.next_back_leaf_edge(), (k, v))
})
}
}
@ -242,3 +215,29 @@ impl<BorrowType, K, V> NodeRef<BorrowType, K, V, marker::LeafOrInternal> {
}
}
}
impl<BorrowType, K, V> Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, marker::KV> {
/// Returns the leaf edge closest to a KV for forward navigation.
pub fn next_leaf_edge(self) -> Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge> {
match self.force() {
Leaf(leaf_kv) => leaf_kv.right_edge(),
Internal(internal_kv) => {
let next_internal_edge = internal_kv.right_edge();
next_internal_edge.descend().first_leaf_edge()
}
}
}
/// Returns the leaf edge closest to a KV for backward navigation.
pub fn next_back_leaf_edge(
self,
) -> Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge> {
match self.force() {
Leaf(leaf_kv) => leaf_kv.left_edge(),
Internal(internal_kv) => {
let next_internal_edge = internal_kv.left_edge();
next_internal_edge.descend().last_leaf_edge()
}
}
}
}

View File

@ -452,7 +452,7 @@ impl<BorrowType, K, V, Type> NodeRef<BorrowType, K, V, Type> {
}
}
impl<K, V> NodeRef<marker::Owned, K, V, marker::Leaf> {
impl<K, V> NodeRef<marker::Owned, K, V, marker::LeafOrInternal> {
/// Similar to `ascend`, gets a reference to a node's parent node, but also
/// deallocate the current node in the process. This is unsafe because the
/// current node will still be accessible despite being deallocated.
@ -460,23 +460,17 @@ impl<K, V> NodeRef<marker::Owned, K, V, marker::Leaf> {
self,
) -> Option<Handle<NodeRef<marker::Owned, K, V, marker::Internal>, marker::Edge>> {
assert!(!self.is_shared_root());
let height = self.height;
let node = self.node;
let ret = self.ascend().ok();
Global.dealloc(node.cast(), Layout::new::<LeafNode<K, V>>());
ret
}
}
impl<K, V> NodeRef<marker::Owned, K, V, marker::Internal> {
/// Similar to `ascend`, gets a reference to a node's parent node, but also
/// deallocate the current node in the process. This is unsafe because the
/// current node will still be accessible despite being deallocated.
pub unsafe fn deallocate_and_ascend(
self,
) -> Option<Handle<NodeRef<marker::Owned, K, V, marker::Internal>, marker::Edge>> {
let node = self.node;
let ret = self.ascend().ok();
Global.dealloc(node.cast(), Layout::new::<InternalNode<K, V>>());
Global.dealloc(
node.cast(),
if height > 0 {
Layout::new::<InternalNode<K, V>>()
} else {
Layout::new::<LeafNode<K, V>>()
},
);
ret
}
}
@ -1427,15 +1421,23 @@ unsafe fn move_edges<K, V>(
dest.correct_childrens_parent_links(dest_offset, dest_offset + count);
}
impl<BorrowType, K, V> Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::KV> {
impl<BorrowType, K, V> Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge> {
pub fn forget_node_type(
self,
) -> Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, marker::KV> {
unsafe { Handle::new_kv(self.node.forget_type(), self.idx) }
) -> Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, marker::Edge> {
unsafe { Handle::new_edge(self.node.forget_type(), self.idx) }
}
}
impl<BorrowType, K, V> Handle<NodeRef<BorrowType, K, V, marker::Internal>, marker::KV> {
impl<BorrowType, K, V> Handle<NodeRef<BorrowType, K, V, marker::Internal>, marker::Edge> {
pub fn forget_node_type(
self,
) -> Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, marker::Edge> {
unsafe { Handle::new_edge(self.node.forget_type(), self.idx) }
}
}
impl<BorrowType, K, V> Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::KV> {
pub fn forget_node_type(
self,
) -> Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, marker::KV> {