Skip to content

Commit d0485c7

Browse files
committed
Auto merge of #86520 - ssomers:btree_iterators_checked_unwrap, r=Mark-Simulacrum
BTree: consistently avoid unwrap_unchecked in iterators Some iterator support functions named `_unchecked` internally use `unwrap`, some use `unwrap_unchecked`. This PR tries settling on `unwrap`. #86195 went up the same road but travelled way further and doesn't seem successful. r? `@Mark-Simulacrum`
2 parents 0deb536 + 6a5b645 commit d0485c7

File tree

1 file changed

+10
-24
lines changed

1 file changed

+10
-24
lines changed

library/alloc/src/collections/btree/navigate.rs

+10-24
Original file line numberDiff line numberDiff line change
@@ -47,14 +47,12 @@ impl<'a, K, V> LeafRange<marker::Immut<'a>, K, V> {
4747

4848
#[inline]
4949
pub unsafe fn next_unchecked(&mut self) -> (&'a K, &'a V) {
50-
debug_assert!(self.front.is_some());
51-
unsafe { self.front.as_mut().unwrap_unchecked().next_unchecked() }
50+
unsafe { self.front.as_mut().unwrap().next_unchecked() }
5251
}
5352

5453
#[inline]
5554
pub unsafe fn next_back_unchecked(&mut self) -> (&'a K, &'a V) {
56-
debug_assert!(self.back.is_some());
57-
unsafe { self.back.as_mut().unwrap_unchecked().next_back_unchecked() }
55+
unsafe { self.back.as_mut().unwrap().next_back_unchecked() }
5856
}
5957
}
6058

@@ -71,14 +69,12 @@ impl<'a, K, V> LeafRange<marker::ValMut<'a>, K, V> {
7169

7270
#[inline]
7371
pub unsafe fn next_unchecked(&mut self) -> (&'a K, &'a mut V) {
74-
debug_assert!(self.front.is_some());
75-
unsafe { self.front.as_mut().unwrap_unchecked().next_unchecked() }
72+
unsafe { self.front.as_mut().unwrap().next_unchecked() }
7673
}
7774

7875
#[inline]
7976
pub unsafe fn next_back_unchecked(&mut self) -> (&'a K, &'a mut V) {
80-
debug_assert!(self.back.is_some());
81-
unsafe { self.back.as_mut().unwrap_unchecked().next_back_unchecked() }
77+
unsafe { self.back.as_mut().unwrap().next_back_unchecked() }
8278
}
8379
}
8480

@@ -400,9 +396,7 @@ impl<'a, K, V> Handle<NodeRef<marker::Immut<'a>, K, V, marker::Leaf>, marker::Ed
400396
/// There must be another KV in the direction travelled.
401397
unsafe fn next_unchecked(&mut self) -> (&'a K, &'a V) {
402398
super::mem::replace(self, |leaf_edge| {
403-
let kv = leaf_edge.next_kv();
404-
debug_assert!(kv.is_ok());
405-
let kv = unsafe { kv.ok().unwrap_unchecked() };
399+
let kv = leaf_edge.next_kv().ok().unwrap();
406400
(kv.next_leaf_edge(), kv.into_kv())
407401
})
408402
}
@@ -414,9 +408,7 @@ impl<'a, K, V> Handle<NodeRef<marker::Immut<'a>, K, V, marker::Leaf>, marker::Ed
414408
/// There must be another KV in the direction travelled.
415409
unsafe fn next_back_unchecked(&mut self) -> (&'a K, &'a V) {
416410
super::mem::replace(self, |leaf_edge| {
417-
let kv = leaf_edge.next_back_kv();
418-
debug_assert!(kv.is_ok());
419-
let kv = unsafe { kv.ok().unwrap_unchecked() };
411+
let kv = leaf_edge.next_back_kv().ok().unwrap();
420412
(kv.next_back_leaf_edge(), kv.into_kv())
421413
})
422414
}
@@ -430,9 +422,7 @@ impl<'a, K, V> Handle<NodeRef<marker::ValMut<'a>, K, V, marker::Leaf>, marker::E
430422
/// There must be another KV in the direction travelled.
431423
unsafe fn next_unchecked(&mut self) -> (&'a K, &'a mut V) {
432424
let kv = super::mem::replace(self, |leaf_edge| {
433-
let kv = leaf_edge.next_kv();
434-
debug_assert!(kv.is_ok());
435-
let kv = unsafe { kv.ok().unwrap_unchecked() };
425+
let kv = leaf_edge.next_kv().ok().unwrap();
436426
(unsafe { ptr::read(&kv) }.next_leaf_edge(), kv)
437427
});
438428
// Doing this last is faster, according to benchmarks.
@@ -446,9 +436,7 @@ impl<'a, K, V> Handle<NodeRef<marker::ValMut<'a>, K, V, marker::Leaf>, marker::E
446436
/// There must be another KV in the direction travelled.
447437
unsafe fn next_back_unchecked(&mut self) -> (&'a K, &'a mut V) {
448438
let kv = super::mem::replace(self, |leaf_edge| {
449-
let kv = leaf_edge.next_back_kv();
450-
debug_assert!(kv.is_ok());
451-
let kv = unsafe { kv.ok().unwrap_unchecked() };
439+
let kv = leaf_edge.next_back_kv().ok().unwrap();
452440
(unsafe { ptr::read(&kv) }.next_back_leaf_edge(), kv)
453441
});
454442
// Doing this last is faster, according to benchmarks.
@@ -472,9 +460,7 @@ impl<K, V> Handle<NodeRef<marker::Dying, K, V, marker::Leaf>, marker::Edge> {
472460
pub unsafe fn deallocating_next_unchecked(
473461
&mut self,
474462
) -> Handle<NodeRef<marker::Dying, K, V, marker::LeafOrInternal>, marker::KV> {
475-
super::mem::replace(self, |leaf_edge| unsafe {
476-
leaf_edge.deallocating_next().unwrap_unchecked()
477-
})
463+
super::mem::replace(self, |leaf_edge| unsafe { leaf_edge.deallocating_next().unwrap() })
478464
}
479465

480466
/// Moves the leaf edge handle to the previous leaf edge and returns the key and value
@@ -493,7 +479,7 @@ impl<K, V> Handle<NodeRef<marker::Dying, K, V, marker::Leaf>, marker::Edge> {
493479
&mut self,
494480
) -> Handle<NodeRef<marker::Dying, K, V, marker::LeafOrInternal>, marker::KV> {
495481
super::mem::replace(self, |leaf_edge| unsafe {
496-
leaf_edge.deallocating_next_back().unwrap_unchecked()
482+
leaf_edge.deallocating_next_back().unwrap()
497483
})
498484
}
499485
}

0 commit comments

Comments
 (0)