diff --git a/src/libcore/iter/adapters/chain.rs b/src/libcore/iter/adapters/chain.rs
index c9612596b1ba0..016fa411bb360 100644
--- a/src/libcore/iter/adapters/chain.rs
+++ b/src/libcore/iter/adapters/chain.rs
@@ -54,7 +54,6 @@ impl Iterator for Chain where
{
type Item = A::Item;
- #[inline]
fn next(&mut self) -> Option {
match self.state {
ChainState::Both => match self.a.next() {
@@ -117,7 +116,6 @@ impl Iterator for Chain where
accum
}
- #[inline]
fn nth(&mut self, mut n: usize) -> Option {
match self.state {
ChainState::Both | ChainState::Front => {
@@ -157,7 +155,6 @@ impl Iterator for Chain where
}
}
- #[inline]
fn last(self) -> Option {
match self.state {
ChainState::Both => {
@@ -198,7 +195,6 @@ impl DoubleEndedIterator for Chain where
A: DoubleEndedIterator,
B: DoubleEndedIterator- ,
{
- #[inline]
fn next_back(&mut self) -> Option {
match self.state {
ChainState::Both => match self.b.next_back() {
@@ -213,7 +209,6 @@ impl DoubleEndedIterator for Chain where
}
}
- #[inline]
fn nth_back(&mut self, mut n: usize) -> Option {
match self.state {
ChainState::Both | ChainState::Back => {
diff --git a/src/librustc_mir/build/block.rs b/src/librustc_mir/build/block.rs
index 7353ca9285ddb..f9440866e4925 100644
--- a/src/librustc_mir/build/block.rs
+++ b/src/librustc_mir/build/block.rs
@@ -1,18 +1,22 @@
use crate::build::{BlockAnd, BlockAndExtension, BlockFrame, Builder};
use crate::build::ForGuard::OutsideGuard;
use crate::build::matches::ArmHasGuard;
+use crate::build::scope::DropKind;
use crate::hair::*;
+use rustc::middle::region;
use rustc::mir::*;
use rustc::hir;
use syntax_pos::Span;
impl<'a, 'tcx> Builder<'a, 'tcx> {
- pub fn ast_block(&mut self,
- destination: &Place<'tcx>,
- block: BasicBlock,
- ast_block: &'tcx hir::Block,
- source_info: SourceInfo)
- -> BlockAnd<()> {
+ pub fn ast_block(
+ &mut self,
+ destination: &Place<'tcx>,
+ scope: Option,
+ block: BasicBlock,
+ ast_block: &'tcx hir::Block,
+ source_info: SourceInfo,
+ ) -> BlockAnd<()> {
let Block {
region_scope,
opt_destruction_scope,
@@ -21,37 +25,61 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
expr,
targeted_by_break,
safety_mode
- } =
- self.hir.mirror(ast_block);
+ } = self.hir.mirror(ast_block);
self.in_opt_scope(opt_destruction_scope.map(|de|(de, source_info)), move |this| {
this.in_scope((region_scope, source_info), LintLevel::Inherited, move |this| {
if targeted_by_break {
// This is a `break`-able block
let exit_block = this.cfg.start_new_block();
+ if let Some(scope) = scope {
+ // Breakable blocks assign to their destination on each
+ // `break`, as well as when they exit normally. So we
+ // can't schedule the drop in the last expression like
+ // normal blocks do.
+ let local = destination.as_local()
+ .expect("cannot schedule drop of non-Local place");
+ this.schedule_drop(span, scope, local, DropKind::Value);
+ }
let block_exit = this.in_breakable_scope(
None, exit_block, destination.clone(), |this| {
- this.ast_block_stmts(destination, block, span, stmts, expr,
- safety_mode)
+ this.ast_block_stmts(
+ destination,
+ None,
+ block,
+ span,
+ stmts,
+ expr,
+ safety_mode,
+ )
});
this.cfg.terminate(unpack!(block_exit), source_info,
TerminatorKind::Goto { target: exit_block });
exit_block.unit()
} else {
- this.ast_block_stmts(destination, block, span, stmts, expr,
- safety_mode)
+ this.ast_block_stmts(
+ destination,
+ scope,
+ block,
+ span,
+ stmts,
+ expr,
+ safety_mode,
+ )
}
})
})
}
- fn ast_block_stmts(&mut self,
- destination: &Place<'tcx>,
- mut block: BasicBlock,
- span: Span,
- stmts: Vec>,
- expr: Option>,
- safety_mode: BlockSafety)
- -> BlockAnd<()> {
+ fn ast_block_stmts(
+ &mut self,
+ destination: &Place<'tcx>,
+ scope: Option,
+ mut block: BasicBlock,
+ span: Span,
+ stmts: Vec>,
+ expr: Option>,
+ safety_mode: BlockSafety,
+ ) -> BlockAnd<()> {
let this = self;
// This convoluted structure is to avoid using recursion as we walk down a list
@@ -177,7 +205,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
this.block_context.currently_ignores_tail_results();
this.block_context.push(BlockFrame::TailExpr { tail_result_is_ignored });
- unpack!(block = this.into(destination, block, expr));
+ unpack!(block = this.into(destination, scope, block, expr));
let popped = this.block_context.pop();
assert!(popped.map_or(false, |bf|bf.is_tail_expr()));
diff --git a/src/librustc_mir/build/expr/as_rvalue.rs b/src/librustc_mir/build/expr/as_rvalue.rs
index 4f1ac8e51dc20..a0df626911601 100644
--- a/src/librustc_mir/build/expr/as_rvalue.rs
+++ b/src/librustc_mir/build/expr/as_rvalue.rs
@@ -136,11 +136,14 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
this.cfg
.push_assign(block, source_info, &Place::from(result), box_);
- // initialize the box contents:
+ // Initialize the box contents. No scope is needed since the
+ // `Box` is already scheduled to be dropped.
unpack!(
block = this.into(
&this.hir.tcx().mk_place_deref(Place::from(result)),
- block, value
+ None,
+ block,
+ value,
)
);
block.and(Rvalue::Use(Operand::Move(Place::from(result))))
diff --git a/src/librustc_mir/build/expr/as_temp.rs b/src/librustc_mir/build/expr/as_temp.rs
index 18332ed68f8bd..bd20f27c945c1 100644
--- a/src/librustc_mir/build/expr/as_temp.rs
+++ b/src/librustc_mir/build/expr/as_temp.rs
@@ -109,16 +109,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
}
}
- unpack!(block = this.into(temp_place, block, expr));
-
- if let Some(temp_lifetime) = temp_lifetime {
- this.schedule_drop(
- expr_span,
- temp_lifetime,
- temp,
- DropKind::Value,
- );
- }
+ unpack!(block = this.into(temp_place, temp_lifetime, block, expr));
block.and(temp)
}
diff --git a/src/librustc_mir/build/expr/into.rs b/src/librustc_mir/build/expr/into.rs
index e7388b920548b..014c1876f48e8 100644
--- a/src/librustc_mir/build/expr/into.rs
+++ b/src/librustc_mir/build/expr/into.rs
@@ -2,7 +2,9 @@
use crate::build::expr::category::{Category, RvalueFunc};
use crate::build::{BlockAnd, BlockAndExtension, BlockFrame, Builder};
+use crate::build::scope::DropKind;
use crate::hair::*;
+use rustc::middle::region;
use rustc::mir::*;
use rustc::ty;
@@ -11,15 +13,18 @@ use rustc_target::spec::abi::Abi;
impl<'a, 'tcx> Builder<'a, 'tcx> {
/// Compile `expr`, storing the result into `destination`, which
/// is assumed to be uninitialized.
+ /// If a `drop_scope` is provided, `destination` is scheduled to be dropped
+ /// in `scope` once it has been initialized.
pub fn into_expr(
&mut self,
destination: &Place<'tcx>,
+ scope: Option,
mut block: BasicBlock,
expr: Expr<'tcx>,
) -> BlockAnd<()> {
debug!(
- "into_expr(destination={:?}, block={:?}, expr={:?})",
- destination, block, expr
+ "into_expr(destination={:?}, scope={:?}, block={:?}, expr={:?})",
+ destination, scope, block, expr
);
// since we frequently have to reference `self` from within a
@@ -35,6 +40,14 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
_ => false,
};
+ let schedule_drop = move |this: &mut Self| {
+ if let Some(drop_scope) = scope {
+ let local = destination.as_local()
+ .expect("cannot schedule drop of non-Local place");
+ this.schedule_drop(expr_span, drop_scope, local, DropKind::Value);
+ }
+ };
+
if !expr_is_block_or_scope {
this.block_context.push(BlockFrame::SubExpr);
}
@@ -47,14 +60,14 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
} => {
let region_scope = (region_scope, source_info);
this.in_scope(region_scope, lint_level, |this| {
- this.into(destination, block, value)
+ this.into(destination, scope, block, value)
})
}
ExprKind::Block { body: ast_block } => {
- this.ast_block(destination, block, ast_block, source_info)
+ this.ast_block(destination, scope, block, ast_block, source_info)
}
ExprKind::Match { scrutinee, arms } => {
- this.match_expr(destination, expr_span, block, scrutinee, arms)
+ this.match_expr(destination, scope, expr_span, block, scrutinee, arms)
}
ExprKind::NeverToAny { source } => {
let source = this.hir.mirror(source);
@@ -67,6 +80,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
// This is an optimization. If the expression was a call then we already have an
// unreachable block. Don't bother to terminate it and create a new one.
+ schedule_drop(this);
if is_call {
block.unit()
} else {
@@ -164,6 +178,9 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
TerminatorKind::Goto { target: loop_block },
);
+ // Loops assign to their destination on each `break`. Since we
+ // can't easily unschedule drops, we schedule the drop now.
+ schedule_drop(this);
this.in_breakable_scope(
Some(loop_block),
exit_block,
@@ -185,7 +202,8 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
// introduce a unit temporary as the destination for the loop body.
let tmp = this.get_unit_temp();
// Execute the body, branching back to the test.
- let body_block_end = unpack!(this.into(&tmp, body_block, body));
+ // No scope is provided, since we've scheduled the drop above.
+ let body_block_end = unpack!(this.into(&tmp, None, body_block, body));
this.cfg.terminate(
body_block_end,
source_info,
@@ -234,8 +252,14 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
is_block_tail: None,
});
let ptr_temp = Place::from(ptr_temp);
- let block = unpack!(this.into(&ptr_temp, block, ptr));
- this.into(&this.hir.tcx().mk_place_deref(ptr_temp), block, val)
+ // No need for a scope, ptr_temp doesn't need drop
+ let block = unpack!(this.into(&ptr_temp, None, block, ptr));
+ // Maybe we should provide a scope here so that
+ // `move_val_init` wouldn't leak on panic even with an
+ // arbitrary `val` expression, but `schedule_drop`,
+ // borrowck and drop elaboration all prevent us from
+ // dropping `ptr_temp.deref()`.
+ this.into(&this.hir.tcx().mk_place_deref(ptr_temp), None, block, val)
} else {
let args: Vec<_> = args
.into_iter()
@@ -265,11 +289,12 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
from_hir_call,
},
);
+ schedule_drop(this);
success.unit()
}
}
ExprKind::Use { source } => {
- this.into(destination, block, source)
+ this.into(destination, scope, block, source)
}
// These cases don't actually need a destination
@@ -296,6 +321,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
let rvalue = Rvalue::Use(this.consume_by_copy_or_move(place));
this.cfg
.push_assign(block, source_info, destination, rvalue);
+ schedule_drop(this);
block.unit()
}
ExprKind::Index { .. } | ExprKind::Deref { .. } | ExprKind::Field { .. } => {
@@ -315,6 +341,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
let rvalue = Rvalue::Use(this.consume_by_copy_or_move(place));
this.cfg
.push_assign(block, source_info, destination, rvalue);
+ schedule_drop(this);
block.unit()
}
@@ -346,6 +373,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
let rvalue = unpack!(block = this.as_local_rvalue(block, expr));
this.cfg.push_assign(block, source_info, destination, rvalue);
+ schedule_drop(this);
block.unit()
}
};
diff --git a/src/librustc_mir/build/into.rs b/src/librustc_mir/build/into.rs
index 077840c9ccf17..e57f10f0b14e9 100644
--- a/src/librustc_mir/build/into.rs
+++ b/src/librustc_mir/build/into.rs
@@ -6,6 +6,7 @@
use crate::build::{BlockAnd, Builder};
use crate::hair::*;
+use rustc::middle::region;
use rustc::mir::*;
pub(in crate::build) trait EvalInto<'tcx> {
@@ -13,19 +14,23 @@ pub(in crate::build) trait EvalInto<'tcx> {
self,
builder: &mut Builder<'_, 'tcx>,
destination: &Place<'tcx>,
+ scope: Option,
block: BasicBlock,
) -> BlockAnd<()>;
}
impl<'a, 'tcx> Builder<'a, 'tcx> {
- pub fn into(&mut self,
- destination: &Place<'tcx>,
- block: BasicBlock,
- expr: E)
- -> BlockAnd<()>
- where E: EvalInto<'tcx>
+ pub fn into(
+ &mut self,
+ destination: &Place<'tcx>,
+ scope: Option,
+ block: BasicBlock,
+ expr: E,
+ ) -> BlockAnd<()>
+ where
+ E: EvalInto<'tcx>,
{
- expr.eval_into(self, destination, block)
+ expr.eval_into(self, destination, scope, block)
}
}
@@ -34,10 +39,11 @@ impl<'tcx> EvalInto<'tcx> for ExprRef<'tcx> {
self,
builder: &mut Builder<'_, 'tcx>,
destination: &Place<'tcx>,
+ scope: Option,
block: BasicBlock,
) -> BlockAnd<()> {
let expr = builder.hir.mirror(self);
- builder.into_expr(destination, block, expr)
+ builder.into_expr(destination, scope, block, expr)
}
}
@@ -46,8 +52,9 @@ impl<'tcx> EvalInto<'tcx> for Expr<'tcx> {
self,
builder: &mut Builder<'_, 'tcx>,
destination: &Place<'tcx>,
+ scope: Option,
block: BasicBlock,
) -> BlockAnd<()> {
- builder.into_expr(destination, block, self)
+ builder.into_expr(destination, scope, block, self)
}
}
diff --git a/src/librustc_mir/build/matches/mod.rs b/src/librustc_mir/build/matches/mod.rs
index 667b37bbd80c8..518bb7603acbd 100644
--- a/src/librustc_mir/build/matches/mod.rs
+++ b/src/librustc_mir/build/matches/mod.rs
@@ -102,6 +102,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
pub fn match_expr(
&mut self,
destination: &Place<'tcx>,
+ destination_scope: Option,
span: Span,
mut block: BasicBlock,
scrutinee: ExprRef<'tcx>,
@@ -228,6 +229,14 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
};
// Step 5. Create everything else: the guards and the arms.
+ if let Some(scope) = destination_scope {
+ // `match` assigns to its destination in each arm. Since we can't
+ // easily unschedule drops, we schedule the drop now.
+ let local = destination.as_local()
+ .expect("cannot schedule drop of non-Local place");
+ self.schedule_drop(span, scope, local, DropKind::Value);
+ }
+
let match_scope = self.scopes.topmost();
let arm_end_blocks: Vec<_> = arm_candidates.into_iter().map(|(arm, mut candidates)| {
@@ -275,7 +284,8 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
this.source_scope = source_scope;
}
- this.into(destination, arm_block, body)
+ // No scope is provided, since we've scheduled the drop above.
+ this.into(destination, None, arm_block, body)
})
}).collect();
@@ -311,8 +321,9 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
} => {
let place =
self.storage_live_binding(block, var, irrefutable_pat.span, OutsideGuard);
- unpack!(block = self.into(&place, block, initializer));
+ let region_scope = self.hir.region_scope_tree.var_scope(var.local_id);
+ unpack!(block = self.into(&place, Some(region_scope), block, initializer));
// Inject a fake read, see comments on `FakeReadCause::ForLet`.
let source_info = self.source_info(irrefutable_pat.span);
@@ -324,7 +335,6 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
},
);
- self.schedule_drop_for_binding(var, irrefutable_pat.span, OutsideGuard);
block.unit()
}
@@ -352,9 +362,10 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
user_ty_span,
},
} => {
+ let region_scope = self.hir.region_scope_tree.var_scope(var.local_id);
let place =
self.storage_live_binding(block, var, irrefutable_pat.span, OutsideGuard);
- unpack!(block = self.into(&place, block, initializer));
+ unpack!(block = self.into(&place, Some(region_scope), block, initializer));
// Inject a fake read, see comments on `FakeReadCause::ForLet`.
let pattern_source_info = self.source_info(irrefutable_pat.span);
@@ -400,7 +411,6 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
},
);
- self.schedule_drop_for_binding(var, irrefutable_pat.span, OutsideGuard);
block.unit()
}
diff --git a/src/librustc_mir/build/mod.rs b/src/librustc_mir/build/mod.rs
index ffb70180bbb4b..d65f49480bca5 100644
--- a/src/librustc_mir/build/mod.rs
+++ b/src/librustc_mir/build/mod.rs
@@ -613,6 +613,7 @@ where
let source_info = builder.source_info(span);
let call_site_s = (call_site_scope, source_info);
unpack!(block = builder.in_scope(call_site_s, LintLevel::Inherited, |builder| {
+ builder.schedule_drop(span, call_site_scope, RETURN_PLACE, DropKind::Value);
if should_abort_on_panic(tcx, fn_def_id, abi) {
builder.schedule_abort();
}
@@ -643,6 +644,7 @@ where
builder.cfg.terminate(unreachable_block, source_info,
TerminatorKind::Unreachable);
}
+ builder.unschedule_return_place_drop();
return_block.unit()
}));
assert_eq!(block, builder.return_block());
@@ -684,7 +686,9 @@ fn construct_const<'a, 'tcx>(
let mut block = START_BLOCK;
let ast_expr = &tcx.hir().body(body_id).value;
let expr = builder.hir.mirror(ast_expr);
- unpack!(block = builder.into_expr(&Place::return_place(), block, expr));
+ // We don't provide a scope because we can't unwind in constants, so won't
+ // need to drop the return place.
+ unpack!(block = builder.into_expr(&Place::return_place(), None, block, expr));
let source_info = builder.source_info(span);
builder.cfg.terminate(block, source_info, TerminatorKind::Return);
@@ -885,7 +889,9 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
}
let body = self.hir.mirror(ast_body);
- self.into(&Place::return_place(), block, body)
+ // No scope is provided, since we've scheduled the drop of the return
+ // place.
+ self.into(&Place::return_place(), None, block, body)
}
fn set_correct_source_scope_for_arg(
diff --git a/src/librustc_mir/build/scope.rs b/src/librustc_mir/build/scope.rs
index 1b3d8641f204e..f5cb9b8d0cd63 100644
--- a/src/librustc_mir/build/scope.rs
+++ b/src/librustc_mir/build/scope.rs
@@ -513,7 +513,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
if let Some(value) = value {
debug!("stmt_expr Break val block_context.push(SubExpr)");
self.block_context.push(BlockFrame::SubExpr);
- unpack!(block = self.into(&destination, block, value));
+ unpack!(block = self.into(&destination, None, block, value));
self.block_context.pop();
} else {
self.cfg.push_assign_unit(block, source_info, &destination)
@@ -1067,6 +1067,18 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
success_block
}
+ /// Unschedules the drop of the return place.
+ ///
+ /// If the return type of a function requires drop, then we schedule it
+ /// in the outermost scope so that it's dropped if there's a panic while
+ /// we drop any local variables. But we don't want to drop it if we
+ /// return normally.
+ crate fn unschedule_return_place_drop(&mut self) {
+ assert_eq!(self.scopes.len(), 1);
+ assert!(self.scopes.scopes[0].drops.len() <= 1);
+ self.scopes.scopes[0].drops.clear();
+ }
+
// `match` arm scopes
// ==================
/// Unschedules any drops in the top scope.
diff --git a/src/librustc_mir/dataflow/move_paths/builder.rs b/src/librustc_mir/dataflow/move_paths/builder.rs
index 52016d4c9363a..906776ed64259 100644
--- a/src/librustc_mir/dataflow/move_paths/builder.rs
+++ b/src/librustc_mir/dataflow/move_paths/builder.rs
@@ -348,6 +348,7 @@ impl<'b, 'a, 'tcx> Gatherer<'b, 'a, 'tcx> {
fn gather_terminator(&mut self, term: &Terminator<'tcx>) {
match term.kind {
TerminatorKind::Goto { target: _ }
+ | TerminatorKind::Return
| TerminatorKind::Resume
| TerminatorKind::Abort
| TerminatorKind::GeneratorDrop
@@ -355,10 +356,6 @@ impl<'b, 'a, 'tcx> Gatherer<'b, 'a, 'tcx> {
| TerminatorKind::FalseUnwind { .. }
| TerminatorKind::Unreachable => {}
- TerminatorKind::Return => {
- self.gather_move(&Place::return_place());
- }
-
TerminatorKind::Assert { ref cond, .. } => {
self.gather_operand(cond);
}
diff --git a/src/librustc_mir/util/elaborate_drops.rs b/src/librustc_mir/util/elaborate_drops.rs
index a1846a1fb5eaf..3091f78403dc2 100644
--- a/src/librustc_mir/util/elaborate_drops.rs
+++ b/src/librustc_mir/util/elaborate_drops.rs
@@ -163,8 +163,6 @@ where
});
}
DropStyle::Static => {
- let loc = self.terminator_loc(bb);
- self.elaborator.clear_drop_flag(loc, self.path, DropFlagMode::Deep);
self.elaborator.patch().patch_terminator(bb, TerminatorKind::Drop {
location: self.place.clone(),
target: self.succ,
@@ -172,9 +170,7 @@ where
});
}
DropStyle::Conditional => {
- let unwind = self.unwind; // FIXME(#43234)
- let succ = self.succ;
- let drop_bb = self.complete_drop(Some(DropFlagMode::Deep), succ, unwind);
+ let drop_bb = self.complete_drop(self.succ, self.unwind);
self.elaborator.patch().patch_terminator(bb, TerminatorKind::Goto {
target: drop_bb
});
@@ -236,7 +232,7 @@ where
// Using `self.path` here to condition the drop on
// our own drop flag.
path: self.path
- }.complete_drop(None, succ, unwind)
+ }.complete_drop(succ, unwind)
}
}
@@ -265,13 +261,7 @@ where
// Clear the "master" drop flag at the end. This is needed
// because the "master" drop protects the ADT's discriminant,
// which is invalidated after the ADT is dropped.
- let (succ, unwind) = (self.succ, self.unwind); // FIXME(#43234)
- (
- self.drop_flag_reset_block(DropFlagMode::Shallow, succ, unwind),
- unwind.map(|unwind| {
- self.drop_flag_reset_block(DropFlagMode::Shallow, unwind, Unwind::InCleanup)
- })
- )
+ (self.drop_flag_reset_block(DropFlagMode::Shallow, self.succ, self.unwind), self.unwind)
}
/// Creates a full drop ladder, consisting of 2 connected half-drop-ladders
@@ -827,9 +817,7 @@ where
}
}
ty::Dynamic(..) => {
- let unwind = self.unwind; // FIXME(#43234)
- let succ = self.succ;
- self.complete_drop(Some(DropFlagMode::Deep), succ, unwind)
+ self.complete_drop(self.succ, self.unwind)
}
ty::Array(ety, size) => {
let size = size.try_eval_usize(self.tcx(), self.elaborator.param_env());
@@ -850,18 +838,12 @@ where
/// drop(self.place)
fn complete_drop(
&mut self,
- drop_mode: Option,
succ: BasicBlock,
unwind: Unwind,
) -> BasicBlock {
- debug!("complete_drop({:?},{:?})", self, drop_mode);
+ debug!("complete_drop(succ={:?}, unwind={:?})", succ, unwind);
let drop_block = self.drop_block(succ, unwind);
- let drop_block = if let Some(mode) = drop_mode {
- self.drop_flag_reset_block(mode, drop_block, unwind)
- } else {
- drop_block
- };
self.drop_flag_test_block(drop_block, succ, unwind)
}
@@ -873,6 +855,11 @@ where
{
debug!("drop_flag_reset_block({:?},{:?})", self, mode);
+ if unwind.is_cleanup() {
+ // The drop flag isn't read again on the unwind path, so don't
+ // bother setting it.
+ return succ;
+ }
let block = self.new_block(unwind, TerminatorKind::Goto { target: succ });
let block_start = Location { block: block, statement_index: 0 };
self.elaborator.clear_drop_flag(block_start, self.path, mode);
@@ -976,11 +963,6 @@ where
self.elaborator.patch().new_temp(ty, self.source_info.span)
}
- fn terminator_loc(&mut self, bb: BasicBlock) -> Location {
- let body = self.elaborator.body();
- self.elaborator.patch().terminator_loc(body, bb)
- }
-
fn constant_usize(&self, val: u16) -> Operand<'tcx> {
Operand::Constant(box Constant {
span: self.source_info.span,
diff --git a/src/test/mir-opt/box_expr.rs b/src/test/mir-opt/box_expr.rs
index 8dc6b73edf6d4..76098731947fe 100644
--- a/src/test/mir-opt/box_expr.rs
+++ b/src/test/mir-opt/box_expr.rs
@@ -41,33 +41,36 @@ impl Drop for S {
//
// bb2: {
// _1 = move _2;
-// drop(_2) -> bb4;
+// drop(_2) -> [return: bb5, unwind: bb4];
// }
//
// bb3 (cleanup): {
// drop(_2) -> bb1;
// }
//
-// bb4: {
+// bb4 (cleanup): {
+// drop(_1) -> bb1;
+// }
+//
+// bb5: {
// StorageDead(_2);
// StorageLive(_3);
// StorageLive(_4);
// _4 = move _1;
-// _3 = const std::mem::drop::>(move _4) -> [return: bb5, unwind: bb7];
+// _3 = const std::mem::drop::>(move _4) -> [return: bb6, unwind: bb7];
// }
//
-// bb5: {
+// bb6: {
// StorageDead(_4);
// StorageDead(_3);
// _0 = ();
// drop(_1) -> bb8;
// }
-// bb6 (cleanup): {
-// drop(_1) -> bb1;
-// }
+//
// bb7 (cleanup): {
-// drop(_4) -> bb6;
+// drop(_4) -> bb4;
// }
+//
// bb8: {
// StorageDead(_1);
// return;
diff --git a/src/test/mir-opt/issue-62289.rs b/src/test/mir-opt/issue-62289.rs
index a3b517e9bca87..e8dd56cbbae22 100644
--- a/src/test/mir-opt/issue-62289.rs
+++ b/src/test/mir-opt/issue-62289.rs
@@ -24,7 +24,7 @@ fn main() {
// StorageLive(_3);
// StorageLive(_4);
// _4 = std::option::Option::::None;
-// _3 = const as std::ops::Try>::into_result(move _4) -> [return: bb2, unwind: bb3];
+// _3 = const as std::ops::Try>::into_result(move _4) -> [return: bb2, unwind: bb4];
// }
// bb1 (cleanup): {
// resume;
@@ -32,60 +32,63 @@ fn main() {
// bb2: {
// StorageDead(_4);
// _5 = discriminant(_3);
-// switchInt(move _5) -> [0isize: bb10, 1isize: bb5, otherwise: bb4];
+// switchInt(move _5) -> [0isize: bb11, 1isize: bb6, otherwise: bb5];
// }
// bb3 (cleanup): {
-// drop(_2) -> bb1;
+// drop(_0) -> bb1;
// }
-// bb4: {
-// unreachable;
+// bb4 (cleanup): {
+// drop(_2) -> bb3;
// }
// bb5: {
+// unreachable;
+// }
+// bb6: {
// StorageLive(_6);
// _6 = ((_3 as Err).0: std::option::NoneError);
// StorageLive(_8);
// StorageLive(_9);
// _9 = _6;
-// _8 = const >::from(move _9) -> [return: bb7, unwind: bb3];
+// _8 = const >::from(move _9) -> [return: bb8, unwind: bb4];
// }
-// bb6: {
+// bb7: {
// return;
// }
-// bb7: {
+// bb8: {
// StorageDead(_9);
-// _0 = const > as std::ops::Try>::from_error(move _8) -> [return: bb8, unwind: bb3];
+// _0 = const > as std::ops::Try>::from_error(move _8) -> [return: bb9, unwind: bb4];
// }
-// bb8: {
+// bb9: {
// StorageDead(_8);
// StorageDead(_6);
-// drop(_2) -> bb9;
+// drop(_2) -> [return: bb10, unwind: bb3];
// }
-// bb9: {
+// bb10: {
// StorageDead(_2);
// StorageDead(_1);
// StorageDead(_3);
-// goto -> bb6;
+// goto -> bb7;
// }
-// bb10: {
+// bb11: {
// StorageLive(_10);
// _10 = ((_3 as Ok).0: u32);
// (*_2) = _10;
// StorageDead(_10);
// _1 = move _2;
-// drop(_2) -> [return: bb12, unwind: bb11];
+// drop(_2) -> [return: bb13, unwind: bb12];
// }
-// bb11 (cleanup): {
-// drop(_1) -> bb1;
+// bb12 (cleanup): {
+// drop(_1) -> bb3;
// }
-// bb12: {
+// bb13: {
// StorageDead(_2);
// _0 = std::option::Option::>::Some(move _1,);
-// drop(_1) -> bb13;
+// drop(_1) -> [return: bb14, unwind: bb3];
// }
-// bb13: {
+// bb14: {
// StorageDead(_1);
// StorageDead(_3);
-// goto -> bb6;
+// goto -> bb7;
// }
// }
// END rustc.test.ElaborateDrops.before.mir
diff --git a/src/test/mir-opt/unusual-item-types.rs b/src/test/mir-opt/unusual-item-types.rs
index f4d848dfc7ad1..8d29c21835976 100644
--- a/src/test/mir-opt/unusual-item-types.rs
+++ b/src/test/mir-opt/unusual-item-types.rs
@@ -46,8 +46,8 @@ fn main() {
// END rustc.E-V-{{constant}}.mir_map.0.mir
// START rustc.ptr-real_drop_in_place.std__vec__Vec_i32_.AddMovesForPackedDrops.before.mir
-// bb0: {
-// goto -> bb7;
+// bb0: {
+// goto -> bb6;
// }
// bb1: {
// return;
@@ -59,17 +59,14 @@ fn main() {
// goto -> bb1;
// }
// bb4 (cleanup): {
-// goto -> bb2;
+// drop(((*_1).0: alloc::raw_vec::RawVec)) -> bb2;
// }
-// bb5 (cleanup): {
-// drop(((*_1).0: alloc::raw_vec::RawVec)) -> bb4;
+// bb5: {
+// drop(((*_1).0: alloc::raw_vec::RawVec)) -> [return: bb3, unwind: bb2];
// }
// bb6: {
-// drop(((*_1).0: alloc::raw_vec::RawVec)) -> [return: bb3, unwind: bb4];
-// }
-// bb7: {
// _2 = &mut (*_1);
-// _3 = const as std::ops::Drop>::drop(move _2) -> [return: bb6, unwind: bb5];
+// _3 = const as std::ops::Drop>::drop(move _2) -> [return: bb5, unwind: bb4];
// }
// END rustc.ptr-real_drop_in_place.std__vec__Vec_i32_.AddMovesForPackedDrops.before.mir
diff --git a/src/test/ui/async-await/async-fn-size-uninit-locals.rs b/src/test/ui/async-await/async-fn-size-uninit-locals.rs
index 0558084f4f8a3..44b14e27332e0 100644
--- a/src/test/ui/async-await/async-fn-size-uninit-locals.rs
+++ b/src/test/ui/async-await/async-fn-size-uninit-locals.rs
@@ -99,5 +99,5 @@ fn main() {
assert_eq!(12, std::mem::size_of_val(&single_with_noop()));
assert_eq!(3084, std::mem::size_of_val(&joined()));
assert_eq!(3084, std::mem::size_of_val(&joined_with_noop()));
- assert_eq!(3080, std::mem::size_of_val(&join_retval()));
+ assert_eq!(3084, std::mem::size_of_val(&join_retval()));
}
diff --git a/src/test/ui/drop/dynamic-drop-async.rs b/src/test/ui/drop/dynamic-drop-async.rs
index 91063edf0f6c4..398bcb7ec0e82 100644
--- a/src/test/ui/drop/dynamic-drop-async.rs
+++ b/src/test/ui/drop/dynamic-drop-async.rs
@@ -7,7 +7,7 @@
// edition:2018
// ignore-wasm32-bare compiled with panic=abort by default
-#![feature(slice_patterns)]
+#![feature(slice_patterns, arbitrary_self_types)]
#![allow(unused)]
use std::{
@@ -45,6 +45,7 @@ impl Future for Defer {
/// The `failing_op`-th operation will panic.
struct Allocator {
data: RefCell>,
+ name: &'static str,
failing_op: usize,
cur_ops: Cell,
}
@@ -56,23 +57,28 @@ impl Drop for Allocator {
fn drop(&mut self) {
let data = self.data.borrow();
if data.iter().any(|d| *d) {
- panic!("missing free: {:?}", data);
+ panic!("missing free in {:?}: {:?}", self.name, data);
}
}
}
impl Allocator {
- fn new(failing_op: usize) -> Self {
- Allocator { failing_op, cur_ops: Cell::new(0), data: RefCell::new(vec![]) }
+ fn new(failing_op: usize, name: &'static str) -> Self {
+ Allocator {
+ failing_op,
+ name,
+ cur_ops: Cell::new(0),
+ data: RefCell::new(vec![]),
+ }
}
- fn alloc(&self) -> impl Future