From af01df7d11a5151fca7f8d4cce6bccbda753394e Mon Sep 17 00:00:00 2001 From: bjorn3 Date: Tue, 10 Jul 2018 12:18:54 +0200 Subject: [PATCH 1/2] Move LocalAnalyzer to rustc_mir::ssa_analyze --- src/librustc_codegen_llvm/mir/analyze.rs | 263 +++------------------- src/librustc_mir/lib.rs | 1 + src/librustc_mir/ssa_analyze.rs | 268 +++++++++++++++++++++++ 3 files changed, 297 insertions(+), 235 deletions(-) create mode 100644 src/librustc_mir/ssa_analyze.rs diff --git a/src/librustc_codegen_llvm/mir/analyze.rs b/src/librustc_codegen_llvm/mir/analyze.rs index 9e5298eb736a3..5d8bc4885f531 100644 --- a/src/librustc_codegen_llvm/mir/analyze.rs +++ b/src/librustc_codegen_llvm/mir/analyze.rs @@ -1,249 +1,42 @@ -// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -//! An analysis to determine which locals require allocas and -//! which do not. - use rustc_data_structures::bitvec::BitVector; -use rustc_data_structures::control_flow_graph::dominators::Dominators; -use rustc_data_structures::indexed_vec::{Idx, IndexVec}; -use rustc::mir::{self, Location, TerminatorKind}; -use rustc::mir::visit::{Visitor, PlaceContext}; +use rustc_data_structures::indexed_vec::IndexVec; +use rustc::mir::{self, TerminatorKind}; use rustc::mir::traversal; -use rustc::ty; +use rustc::ty::Ty; +use rustc_mir::ssa_analyze; use rustc::ty::layout::LayoutOf; + use type_of::LayoutLlvmExt; use super::FunctionCx; -pub fn non_ssa_locals<'a, 'tcx>(fx: &FunctionCx<'a, 'tcx>) -> BitVector { - let mir = fx.mir; - let mut analyzer = LocalAnalyzer::new(fx); - - analyzer.visit_mir(mir); - - for (index, ty) in mir.local_decls.iter().map(|l| l.ty).enumerate() { - let ty = fx.monomorphize(&ty); - debug!("local {} has type {:?}", index, ty); - let layout = fx.cx.layout_of(ty); - if layout.is_llvm_immediate() { - // These sorts of types are immediates that we can store - // in an ValueRef without an alloca. - } else if layout.is_llvm_scalar_pair() { - // We allow pairs and uses of any of their 2 fields. - } else { - // These sorts of types require an alloca. Note that - // is_llvm_immediate() may *still* be true, particularly - // for newtypes, but we currently force some types - // (e.g. structs) into an alloca unconditionally, just so - // that we don't have to deal with having two pathways - // (gep vs extractvalue etc). - analyzer.not_ssa(mir::Local::new(index)); - } - } - - analyzer.non_ssa_locals -} - -struct LocalAnalyzer<'mir, 'a: 'mir, 'tcx: 'a> { - fx: &'mir FunctionCx<'a, 'tcx>, - dominators: Dominators, - non_ssa_locals: BitVector, - // The location of the first visited direct assignment to each - // local, or an invalid location (out of bounds `block` index). - first_assignment: IndexVec -} - -impl<'mir, 'a, 'tcx> LocalAnalyzer<'mir, 'a, 'tcx> { - fn new(fx: &'mir FunctionCx<'a, 'tcx>) -> LocalAnalyzer<'mir, 'a, 'tcx> { - let invalid_location = - mir::BasicBlock::new(fx.mir.basic_blocks().len()).start_location(); - let mut analyzer = LocalAnalyzer { - fx, - dominators: fx.mir.dominators(), - non_ssa_locals: BitVector::new(fx.mir.local_decls.len()), - first_assignment: IndexVec::from_elem(invalid_location, &fx.mir.local_decls) - }; - - // Arguments get assigned to by means of the function being called - for arg in fx.mir.args_iter() { - analyzer.first_assignment[arg] = mir::START_BLOCK.start_location(); - } - - analyzer - } - - fn first_assignment(&self, local: mir::Local) -> Option { - let location = self.first_assignment[local]; - if location.block.index() < self.fx.mir.basic_blocks().len() { - Some(location) - } else { - None - } - } - - fn not_ssa(&mut self, local: mir::Local) { - debug!("marking {:?} as non-SSA", local); - self.non_ssa_locals.insert(local.index()); - } - - fn assign(&mut self, local: mir::Local, location: Location) { - if self.first_assignment(local).is_some() { - self.not_ssa(local); - } else { - self.first_assignment[local] = location; - } - } -} - -impl<'mir, 'a, 'tcx> Visitor<'tcx> for LocalAnalyzer<'mir, 'a, 'tcx> { - fn visit_assign(&mut self, - block: mir::BasicBlock, - place: &mir::Place<'tcx>, - rvalue: &mir::Rvalue<'tcx>, - location: Location) { - debug!("visit_assign(block={:?}, place={:?}, rvalue={:?})", block, place, rvalue); - - if let mir::Place::Local(index) = *place { - self.assign(index, location); - if !self.fx.rvalue_creates_operand(rvalue) { - self.not_ssa(index); +pub fn non_ssa_locals<'a, 'tcx: 'a>(fx: &FunctionCx<'a, 'tcx>) -> BitVector { + impl<'a, 'tcx: 'a> ssa_analyze::LocalAnalyzerCallbacks<'tcx> for &'a FunctionCx<'a, 'tcx>{ + fn ty_ssa_allowed(&self, ty: Ty<'tcx>) -> bool { + let layout = self.cx.layout_of(ty); + if layout.is_llvm_immediate() { + // These sorts of types are immediates that we can store + // in an ValueRef without an alloca. + true + } else if layout.is_llvm_scalar_pair() { + // We allow pairs and uses of any of their 2 fields. + true + } else { + // These sorts of types require an alloca. Note that + // is_llvm_immediate() may *still* be true, particularly + // for newtypes, but we currently force some types + // (e.g. structs) into an alloca unconditionally, just so + // that we don't have to deal with having two pathways + // (gep vs extractvalue etc). + false } - } else { - self.visit_place(place, PlaceContext::Store, location); } - self.visit_rvalue(rvalue, location); - } - - fn visit_terminator_kind(&mut self, - block: mir::BasicBlock, - kind: &mir::TerminatorKind<'tcx>, - location: Location) { - let check = match *kind { - mir::TerminatorKind::Call { - func: mir::Operand::Constant(ref c), - ref args, .. - } => match c.ty.sty { - ty::TyFnDef(did, _) => Some((did, args)), - _ => None, - }, - _ => None, - }; - if let Some((def_id, args)) = check { - if Some(def_id) == self.fx.cx.tcx.lang_items().box_free_fn() { - // box_free(x) shares with `drop x` the property that it - // is not guaranteed to be statically dominated by the - // definition of x, so x must always be in an alloca. - if let mir::Operand::Move(ref place) = args[0] { - self.visit_place(place, PlaceContext::Drop, location); - } - } - } - - self.super_terminator_kind(block, kind, location); - } - - fn visit_place(&mut self, - place: &mir::Place<'tcx>, - context: PlaceContext<'tcx>, - location: Location) { - debug!("visit_place(place={:?}, context={:?})", place, context); - let cx = self.fx.cx; - - if let mir::Place::Projection(ref proj) = *place { - // Allow uses of projections that are ZSTs or from scalar fields. - let is_consume = match context { - PlaceContext::Copy | PlaceContext::Move => true, - _ => false - }; - if is_consume { - let base_ty = proj.base.ty(self.fx.mir, cx.tcx); - let base_ty = self.fx.monomorphize(&base_ty); - - // ZSTs don't require any actual memory access. - let elem_ty = base_ty.projection_ty(cx.tcx, &proj.elem).to_ty(cx.tcx); - let elem_ty = self.fx.monomorphize(&elem_ty); - if cx.layout_of(elem_ty).is_zst() { - return; - } - - if let mir::ProjectionElem::Field(..) = proj.elem { - let layout = cx.layout_of(base_ty.to_ty(cx.tcx)); - if layout.is_llvm_immediate() || layout.is_llvm_scalar_pair() { - // Recurse with the same context, instead of `Projection`, - // potentially stopping at non-operand projections, - // which would trigger `not_ssa` on locals. - self.visit_place(&proj.base, context, location); - return; - } - } - } - - // A deref projection only reads the pointer, never needs the place. - if let mir::ProjectionElem::Deref = proj.elem { - return self.visit_place(&proj.base, PlaceContext::Copy, location); - } + fn does_rvalue_create_operand(&self, rvalue: &mir::Rvalue<'tcx>) -> bool { + self.rvalue_creates_operand(rvalue) } - - self.super_place(place, context, location); } - fn visit_local(&mut self, - &local: &mir::Local, - context: PlaceContext<'tcx>, - location: Location) { - match context { - PlaceContext::Call => { - self.assign(local, location); - } - - PlaceContext::StorageLive | - PlaceContext::StorageDead | - PlaceContext::Validate => {} - - PlaceContext::Copy | - PlaceContext::Move => { - // Reads from uninitialized variables (e.g. in dead code, after - // optimizations) require locals to be in (uninitialized) memory. - // NB: there can be uninitialized reads of a local visited after - // an assignment to that local, if they happen on disjoint paths. - let ssa_read = match self.first_assignment(local) { - Some(assignment_location) => { - assignment_location.dominates(location, &self.dominators) - } - None => false - }; - if !ssa_read { - self.not_ssa(local); - } - } - - PlaceContext::Inspect | - PlaceContext::Store | - PlaceContext::AsmOutput | - PlaceContext::Borrow { .. } | - PlaceContext::Projection(..) => { - self.not_ssa(local); - } - - PlaceContext::Drop => { - let ty = mir::Place::Local(local).ty(self.fx.mir, self.fx.cx.tcx); - let ty = self.fx.monomorphize(&ty.to_ty(self.fx.cx.tcx)); - - // Only need the place if we're actually dropping it. - if self.fx.cx.type_needs_drop(ty) { - self.not_ssa(local); - } - } - } - } + ssa_analyze::non_ssa_locals(fx.cx.tcx, fx.mir, fx.param_substs, fx) } #[derive(Copy, Clone, Debug, PartialEq, Eq)] @@ -358,4 +151,4 @@ pub fn cleanup_kinds<'a, 'tcx>(mir: &mir::Mir<'tcx>) -> IndexVec or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +//! An analysis to determine which locals require allocas and +//! which do not. + +use rustc_data_structures::bitvec::BitVector; +use rustc_data_structures::control_flow_graph::dominators::Dominators; +use rustc_data_structures::indexed_vec::{Idx, IndexVec}; +use rustc::mir::{self, Mir, Location}; +use rustc::mir::visit::{Visitor, PlaceContext}; +use rustc::ty::{self, TyCtxt, Ty, TypeFoldable}; +use rustc::ty::subst::Substs; +use rustc::ty::layout::{TyLayout, LayoutError}; + +pub trait LocalAnalyzerCallbacks<'tcx> { + fn ty_ssa_allowed(&self, ty: Ty<'tcx>) -> bool; + fn does_rvalue_create_operand(&self, rval: &mir::Rvalue<'tcx>) -> bool; +} + +pub fn non_ssa_locals<'a, 'tcx: 'a, C: LocalAnalyzerCallbacks<'tcx>>( + tcx: TyCtxt<'a, 'tcx, 'tcx>, + mir: &'a Mir<'tcx>, + param_substs: &'tcx Substs<'tcx>, + callbacks: C, +) -> BitVector { + let mut analyzer = LocalAnalyzer::new(tcx, mir, param_substs, callbacks); + + analyzer.visit_mir(mir); + + for (index, ty) in mir.local_decls.iter().map(|l| l.ty).enumerate() { + let ty = analyzer.monomorphize(&ty); + debug!("local {} has type {:?}", index, ty); + if !analyzer.callbacks.ty_ssa_allowed(ty) { + analyzer.not_ssa(mir::Local::new(index)); + } + } + + analyzer.non_ssa_locals +} + +struct LocalAnalyzer<'a, 'tcx: 'a, C: LocalAnalyzerCallbacks<'tcx>> { + tcx: TyCtxt<'a, 'tcx, 'tcx>, + mir: &'a Mir<'tcx>, + param_substs: &'tcx Substs<'tcx>, + callbacks: C, + dominators: Dominators, + non_ssa_locals: BitVector, + // The location of the first visited direct assignment to each + // local, or an invalid location (out of bounds `block` index). + first_assignment: IndexVec +} + +impl<'a, 'tcx: 'a, C: LocalAnalyzerCallbacks<'tcx>> LocalAnalyzer<'a, 'tcx, C> { + fn new( + tcx: TyCtxt<'a, 'tcx, 'tcx>, + mir: &'a Mir<'tcx>, + param_substs: &'tcx Substs<'tcx>, + callbacks: C, + ) -> LocalAnalyzer<'a, 'tcx, C> { + let invalid_location = + mir::BasicBlock::new(mir.basic_blocks().len()).start_location(); + let mut analyzer = LocalAnalyzer { + tcx, + mir, + param_substs, + callbacks, + dominators: mir.dominators(), + non_ssa_locals: BitVector::new(mir.local_decls.len()), + first_assignment: IndexVec::from_elem(invalid_location, &mir.local_decls) + }; + + // Arguments get assigned to by means of the function being called + for arg in mir.args_iter() { + analyzer.first_assignment[arg] = mir::START_BLOCK.start_location(); + } + + analyzer + } + + fn first_assignment(&self, local: mir::Local) -> Option { + let location = self.first_assignment[local]; + if location.block.index() < self.mir.basic_blocks().len() { + Some(location) + } else { + None + } + } + + fn not_ssa(&mut self, local: mir::Local) { + debug!("marking {:?} as non-SSA", local); + self.non_ssa_locals.insert(local.index()); + } + + fn assign(&mut self, local: mir::Local, location: Location) { + if self.first_assignment(local).is_some() { + self.not_ssa(local); + } else { + self.first_assignment[local] = location; + } + } + + fn layout_of(&self, ty: Ty<'tcx>) -> TyLayout<'tcx> { + self.tcx.layout_of(ty::ParamEnv::reveal_all().and(ty)) + .unwrap_or_else(|e| match e { + LayoutError::SizeOverflow(_) => self.tcx.sess.fatal(&e.to_string()), + _ => bug!("failed to get layout for `{}`: {}", ty, e) + }) + } + + fn monomorphize(&self, value: &T) -> T + where T: TypeFoldable<'tcx> + { + self.tcx.subst_and_normalize_erasing_regions( + self.param_substs, + ty::ParamEnv::reveal_all(), + value, + ) + } +} + +impl<'a, 'tcx, C: LocalAnalyzerCallbacks<'tcx>> Visitor<'tcx> for LocalAnalyzer<'a, 'tcx, C> { + fn visit_assign(&mut self, + block: mir::BasicBlock, + place: &mir::Place<'tcx>, + rvalue: &mir::Rvalue<'tcx>, + location: Location) { + debug!("visit_assign(block={:?}, place={:?}, rvalue={:?})", block, place, rvalue); + + if let mir::Place::Local(index) = *place { + self.assign(index, location); + if !self.callbacks.does_rvalue_create_operand(rvalue) { + self.not_ssa(index); + } + } else { + self.visit_place(place, PlaceContext::Store, location); + } + + self.visit_rvalue(rvalue, location); + } + + fn visit_terminator_kind(&mut self, + block: mir::BasicBlock, + kind: &mir::TerminatorKind<'tcx>, + location: Location) { + let check = match *kind { + mir::TerminatorKind::Call { + func: mir::Operand::Constant(ref c), + ref args, .. + } => match c.ty.sty { + ty::TyFnDef(did, _) => Some((did, args)), + _ => None, + }, + _ => None, + }; + if let Some((def_id, args)) = check { + if Some(def_id) == self.tcx.lang_items().box_free_fn() { + // box_free(x) shares with `drop x` the property that it + // is not guaranteed to be statically dominated by the + // definition of x, so x must always be in an alloca. + if let mir::Operand::Move(ref place) = args[0] { + self.visit_place(place, PlaceContext::Drop, location); + } + } + } + + self.super_terminator_kind(block, kind, location); + } + + fn visit_place(&mut self, + place: &mir::Place<'tcx>, + context: PlaceContext<'tcx>, + location: Location) { + debug!("visit_place(place={:?}, context={:?})", place, context); + if let mir::Place::Projection(ref proj) = *place { + // Allow uses of projections that are ZSTs or from scalar fields. + let is_consume = match context { + PlaceContext::Copy | PlaceContext::Move => true, + _ => false + }; + if is_consume { + let base_ty = proj.base.ty(self.mir, self.tcx); + let base_ty = self.monomorphize(&base_ty); + + // ZSTs don't require any actual memory access. + let elem_ty = base_ty.projection_ty(self.tcx, &proj.elem).to_ty(self.tcx); + let elem_ty = self.monomorphize(&elem_ty); + if self.layout_of(elem_ty).is_zst() { + return; + } + + if let mir::ProjectionElem::Field(..) = proj.elem { + if self.callbacks.ty_ssa_allowed(base_ty.to_ty(self.tcx)) { + // Recurse with the same context, instead of `Projection`, + // potentially stopping at non-operand projections, + // which would trigger `not_ssa` on locals. + self.visit_place(&proj.base, context, location); + return; + } + } + } + + // A deref projection only reads the pointer, never needs the place. + if let mir::ProjectionElem::Deref = proj.elem { + return self.visit_place(&proj.base, PlaceContext::Copy, location); + } + } + + self.super_place(place, context, location); + } + + fn visit_local(&mut self, + &local: &mir::Local, + context: PlaceContext<'tcx>, + location: Location) { + match context { + PlaceContext::Call => { + self.assign(local, location); + } + + PlaceContext::StorageLive | + PlaceContext::StorageDead | + PlaceContext::Validate => {} + + PlaceContext::Copy | + PlaceContext::Move => { + // Reads from uninitialized variables (e.g. in dead code, after + // optimizations) require locals to be in (uninitialized) memory. + // NB: there can be uninitialized reads of a local visited after + // an assignment to that local, if they happen on disjoint paths. + let ssa_read = match self.first_assignment(local) { + Some(assignment_location) => { + assignment_location.dominates(location, &self.dominators) + } + None => false + }; + if !ssa_read { + self.not_ssa(local); + } + } + + PlaceContext::Inspect | + PlaceContext::Store | + PlaceContext::AsmOutput | + PlaceContext::Borrow { .. } | + PlaceContext::Projection(..) => { + self.not_ssa(local); + } + + PlaceContext::Drop => { + let ty = mir::Place::Local(local).ty(self.mir, self.tcx); + let ty = self.monomorphize(&ty.to_ty(self.tcx)); + + // Only need the place if we're actually dropping it. + if ty.needs_drop(self.tcx, ty::ParamEnv::reveal_all()) { + self.not_ssa(local); + } + } + } + } +} From 96d1515073f453de75d9185889ccc62f5188c08b Mon Sep 17 00:00:00 2001 From: bjorn3 Date: Tue, 10 Jul 2018 14:14:17 +0200 Subject: [PATCH 2/2] Fix tidy errors --- src/librustc_codegen_llvm/mir/analyze.rs | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/src/librustc_codegen_llvm/mir/analyze.rs b/src/librustc_codegen_llvm/mir/analyze.rs index 5d8bc4885f531..0c37971fecd42 100644 --- a/src/librustc_codegen_llvm/mir/analyze.rs +++ b/src/librustc_codegen_llvm/mir/analyze.rs @@ -1,3 +1,13 @@ +// Copyright 2018 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + use rustc_data_structures::bitvec::BitVector; use rustc_data_structures::indexed_vec::IndexVec; use rustc::mir::{self, TerminatorKind}; @@ -151,4 +161,4 @@ pub fn cleanup_kinds<'a, 'tcx>(mir: &mir::Mir<'tcx>) -> IndexVec