diff --git a/src/librustc_mir/dataflow/impls/borrowed_locals.rs b/src/librustc_mir/dataflow/impls/borrowed_locals.rs index f929b2ddde0ba..b61dc56407eb9 100644 --- a/src/librustc_mir/dataflow/impls/borrowed_locals.rs +++ b/src/librustc_mir/dataflow/impls/borrowed_locals.rs @@ -99,6 +99,9 @@ impl GenKillAnalysis<'tcx> for MaybeBorrowedLocals where K: BorrowAnalysisKind<'tcx>, { + // The generator transform relies on the fact that this analysis does **not** use "before" + // effects. + fn statement_effect( &self, trans: &mut impl GenKill, diff --git a/src/librustc_mir/dataflow/impls/init_locals.rs b/src/librustc_mir/dataflow/impls/init_locals.rs new file mode 100644 index 0000000000000..726330b1f035e --- /dev/null +++ b/src/librustc_mir/dataflow/impls/init_locals.rs @@ -0,0 +1,118 @@ +//! A less precise version of `MaybeInitializedPlaces` whose domain is entire locals. +//! +//! A local will be maybe initialized if *any* projections of that local might be initialized. + +use crate::dataflow::{self, BottomValue, GenKill}; + +use rustc_index::bit_set::BitSet; +use rustc_middle::mir::visit::{PlaceContext, Visitor}; +use rustc_middle::mir::{self, BasicBlock, Local, Location}; + +pub struct MaybeInitializedLocals; + +impl BottomValue for MaybeInitializedLocals { + /// bottom = uninit + const BOTTOM_VALUE: bool = false; +} + +impl dataflow::AnalysisDomain<'tcx> for MaybeInitializedLocals { + type Idx = Local; + + const NAME: &'static str = "maybe_init_locals"; + + fn bits_per_block(&self, body: &mir::Body<'tcx>) -> usize { + body.local_decls.len() + } + + fn initialize_start_block(&self, body: &mir::Body<'tcx>, entry_set: &mut BitSet) { + // Function arguments are initialized to begin with. + for arg in body.args_iter() { + entry_set.insert(arg); + } + } +} + +impl dataflow::GenKillAnalysis<'tcx> for MaybeInitializedLocals { + // The generator transform relies on the fact that this analysis does **not** use "before" + // effects. + + fn statement_effect( + &self, + trans: &mut impl GenKill, + statement: &mir::Statement<'tcx>, + loc: Location, + ) { + TransferFunction { trans }.visit_statement(statement, loc) + } + + fn terminator_effect( + &self, + trans: &mut impl GenKill, + terminator: &mir::Terminator<'tcx>, + loc: Location, + ) { + TransferFunction { trans }.visit_terminator(terminator, loc) + } + + fn call_return_effect( + &self, + trans: &mut impl GenKill, + _block: BasicBlock, + _func: &mir::Operand<'tcx>, + _args: &[mir::Operand<'tcx>], + return_place: mir::Place<'tcx>, + ) { + trans.gen(return_place.local) + } + + /// See `Analysis::apply_yield_resume_effect`. + fn yield_resume_effect( + &self, + trans: &mut impl GenKill, + _resume_block: BasicBlock, + resume_place: mir::Place<'tcx>, + ) { + trans.gen(resume_place.local) + } +} + +struct TransferFunction<'a, T> { + trans: &'a mut T, +} + +impl Visitor<'tcx> for TransferFunction<'a, T> +where + T: GenKill, +{ + fn visit_local(&mut self, &local: &Local, context: PlaceContext, _: Location) { + use rustc_middle::mir::visit::{MutatingUseContext, NonMutatingUseContext, NonUseContext}; + match context { + // These are handled specially in `call_return_effect` and `yield_resume_effect`. + PlaceContext::MutatingUse(MutatingUseContext::Call | MutatingUseContext::Yield) => {} + + // Otherwise, when a place is mutated, we must consider it possibly initialized. + PlaceContext::MutatingUse(_) => self.trans.gen(local), + + // If the local is moved out of, or if it gets marked `StorageDead`, consider it no + // longer initialized. + PlaceContext::NonUse(NonUseContext::StorageDead) + | PlaceContext::NonMutatingUse(NonMutatingUseContext::Move) => self.trans.kill(local), + + // All other uses do not affect this analysis. + PlaceContext::NonUse( + NonUseContext::StorageLive + | NonUseContext::AscribeUserTy + | NonUseContext::VarDebugInfo, + ) + | PlaceContext::NonMutatingUse( + NonMutatingUseContext::Inspect + | NonMutatingUseContext::Copy + | NonMutatingUseContext::SharedBorrow + | NonMutatingUseContext::ShallowBorrow + | NonMutatingUseContext::UniqueBorrow + | NonMutatingUseContext::AddressOf + | NonMutatingUseContext::Projection, + ) => {} + } + } +} diff --git a/src/librustc_mir/dataflow/impls/mod.rs b/src/librustc_mir/dataflow/impls/mod.rs index e199a174efbc3..ed01d6b01ea43 100644 --- a/src/librustc_mir/dataflow/impls/mod.rs +++ b/src/librustc_mir/dataflow/impls/mod.rs @@ -22,13 +22,15 @@ use crate::dataflow::drop_flag_effects; mod borrowed_locals; pub(super) mod borrows; +mod init_locals; mod liveness; mod storage_liveness; pub use self::borrowed_locals::{MaybeBorrowedLocals, MaybeMutBorrowedLocals}; pub use self::borrows::Borrows; +pub use self::init_locals::MaybeInitializedLocals; pub use self::liveness::MaybeLiveLocals; -pub use self::storage_liveness::{MaybeRequiresStorage, MaybeStorageLive}; +pub use self::storage_liveness::MaybeStorageLive; /// `MaybeInitializedPlaces` tracks all places that might be /// initialized upon reaching a particular point in the control flow diff --git a/src/librustc_mir/dataflow/impls/storage_liveness.rs b/src/librustc_mir/dataflow/impls/storage_liveness.rs index bbc4942030ef7..2a2be069b1ed8 100644 --- a/src/librustc_mir/dataflow/impls/storage_liveness.rs +++ b/src/librustc_mir/dataflow/impls/storage_liveness.rs @@ -1,11 +1,9 @@ pub use super::*; use crate::dataflow::BottomValue; -use crate::dataflow::{self, GenKill, Results, ResultsRefCursor}; +use crate::dataflow::{self, GenKill}; use crate::util::storage::AlwaysLiveLocals; -use rustc_middle::mir::visit::{NonMutatingUseContext, PlaceContext, Visitor}; use rustc_middle::mir::*; -use std::cell::RefCell; #[derive(Clone)] pub struct MaybeStorageLive { @@ -78,233 +76,3 @@ impl BottomValue for MaybeStorageLive { /// bottom = dead const BOTTOM_VALUE: bool = false; } - -type BorrowedLocalsResults<'a, 'tcx> = ResultsRefCursor<'a, 'a, 'tcx, MaybeBorrowedLocals>; - -/// Dataflow analysis that determines whether each local requires storage at a -/// given location; i.e. whether its storage can go away without being observed. -pub struct MaybeRequiresStorage<'mir, 'tcx> { - body: &'mir Body<'tcx>, - borrowed_locals: RefCell>, -} - -impl<'mir, 'tcx> MaybeRequiresStorage<'mir, 'tcx> { - pub fn new( - body: &'mir Body<'tcx>, - borrowed_locals: &'mir Results<'tcx, MaybeBorrowedLocals>, - ) -> Self { - MaybeRequiresStorage { - body, - borrowed_locals: RefCell::new(ResultsRefCursor::new(&body, borrowed_locals)), - } - } -} - -impl<'mir, 'tcx> dataflow::AnalysisDomain<'tcx> for MaybeRequiresStorage<'mir, 'tcx> { - type Idx = Local; - - const NAME: &'static str = "requires_storage"; - - fn bits_per_block(&self, body: &mir::Body<'tcx>) -> usize { - body.local_decls.len() - } - - fn initialize_start_block(&self, body: &mir::Body<'tcx>, on_entry: &mut BitSet) { - // The resume argument is live on function entry (we don't care about - // the `self` argument) - for arg in body.args_iter().skip(1) { - on_entry.insert(arg); - } - } -} - -impl<'mir, 'tcx> dataflow::GenKillAnalysis<'tcx> for MaybeRequiresStorage<'mir, 'tcx> { - fn before_statement_effect( - &self, - trans: &mut impl GenKill, - stmt: &mir::Statement<'tcx>, - loc: Location, - ) { - // If a place is borrowed in a statement, it needs storage for that statement. - self.borrowed_locals.borrow().analysis().statement_effect(trans, stmt, loc); - - match &stmt.kind { - StatementKind::StorageDead(l) => trans.kill(*l), - - // If a place is assigned to in a statement, it needs storage for that statement. - StatementKind::Assign(box (place, _)) - | StatementKind::SetDiscriminant { box place, .. } => { - trans.gen(place.local); - } - StatementKind::LlvmInlineAsm(asm) => { - for place in &*asm.outputs { - trans.gen(place.local); - } - } - - // Nothing to do for these. Match exhaustively so this fails to compile when new - // variants are added. - StatementKind::AscribeUserType(..) - | StatementKind::FakeRead(..) - | StatementKind::Nop - | StatementKind::Retag(..) - | StatementKind::StorageLive(..) => {} - } - } - - fn statement_effect( - &self, - trans: &mut impl GenKill, - _: &mir::Statement<'tcx>, - loc: Location, - ) { - // If we move from a place then only stops needing storage *after* - // that statement. - self.check_for_move(trans, loc); - } - - fn before_terminator_effect( - &self, - trans: &mut impl GenKill, - terminator: &mir::Terminator<'tcx>, - loc: Location, - ) { - // If a place is borrowed in a terminator, it needs storage for that terminator. - self.borrowed_locals.borrow().analysis().terminator_effect(trans, terminator, loc); - - match &terminator.kind { - TerminatorKind::Call { destination: Some((place, _)), .. } => { - trans.gen(place.local); - } - - // Note that we do *not* gen the `resume_arg` of `Yield` terminators. The reason for - // that is that a `yield` will return from the function, and `resume_arg` is written - // only when the generator is later resumed. Unlike `Call`, this doesn't require the - // place to have storage *before* the yield, only after. - TerminatorKind::Yield { .. } => {} - - TerminatorKind::InlineAsm { operands, .. } => { - for op in operands { - match op { - InlineAsmOperand::Out { place, .. } - | InlineAsmOperand::InOut { out_place: place, .. } => { - if let Some(place) = place { - trans.gen(place.local); - } - } - InlineAsmOperand::In { .. } - | InlineAsmOperand::Const { .. } - | InlineAsmOperand::SymFn { .. } - | InlineAsmOperand::SymStatic { .. } => {} - } - } - } - - // Nothing to do for these. Match exhaustively so this fails to compile when new - // variants are added. - TerminatorKind::Call { destination: None, .. } - | TerminatorKind::Abort - | TerminatorKind::Assert { .. } - | TerminatorKind::Drop { .. } - | TerminatorKind::DropAndReplace { .. } - | TerminatorKind::FalseEdges { .. } - | TerminatorKind::FalseUnwind { .. } - | TerminatorKind::GeneratorDrop - | TerminatorKind::Goto { .. } - | TerminatorKind::Resume - | TerminatorKind::Return - | TerminatorKind::SwitchInt { .. } - | TerminatorKind::Unreachable => {} - } - } - - fn terminator_effect( - &self, - trans: &mut impl GenKill, - terminator: &mir::Terminator<'tcx>, - loc: Location, - ) { - match &terminator.kind { - // For call terminators the destination requires storage for the call - // and after the call returns successfully, but not after a panic. - // Since `propagate_call_unwind` doesn't exist, we have to kill the - // destination here, and then gen it again in `call_return_effect`. - TerminatorKind::Call { destination: Some((place, _)), .. } => { - trans.kill(place.local); - } - - // Nothing to do for these. Match exhaustively so this fails to compile when new - // variants are added. - TerminatorKind::Call { destination: None, .. } - | TerminatorKind::Yield { .. } - | TerminatorKind::Abort - | TerminatorKind::Assert { .. } - | TerminatorKind::Drop { .. } - | TerminatorKind::DropAndReplace { .. } - | TerminatorKind::FalseEdges { .. } - | TerminatorKind::FalseUnwind { .. } - | TerminatorKind::GeneratorDrop - | TerminatorKind::Goto { .. } - | TerminatorKind::InlineAsm { .. } - | TerminatorKind::Resume - | TerminatorKind::Return - | TerminatorKind::SwitchInt { .. } - | TerminatorKind::Unreachable => {} - } - - self.check_for_move(trans, loc); - } - - fn call_return_effect( - &self, - trans: &mut impl GenKill, - _block: BasicBlock, - _func: &mir::Operand<'tcx>, - _args: &[mir::Operand<'tcx>], - return_place: mir::Place<'tcx>, - ) { - trans.gen(return_place.local); - } - - fn yield_resume_effect( - &self, - trans: &mut impl GenKill, - _resume_block: BasicBlock, - resume_place: mir::Place<'tcx>, - ) { - trans.gen(resume_place.local); - } -} - -impl<'mir, 'tcx> MaybeRequiresStorage<'mir, 'tcx> { - /// Kill locals that are fully moved and have not been borrowed. - fn check_for_move(&self, trans: &mut impl GenKill, loc: Location) { - let mut visitor = MoveVisitor { trans, borrowed_locals: &self.borrowed_locals }; - visitor.visit_location(&self.body, loc); - } -} - -impl<'mir, 'tcx> BottomValue for MaybeRequiresStorage<'mir, 'tcx> { - /// bottom = dead - const BOTTOM_VALUE: bool = false; -} - -struct MoveVisitor<'a, 'mir, 'tcx, T> { - borrowed_locals: &'a RefCell>, - trans: &'a mut T, -} - -impl<'a, 'mir, 'tcx, T> Visitor<'tcx> for MoveVisitor<'a, 'mir, 'tcx, T> -where - T: GenKill, -{ - fn visit_local(&mut self, local: &Local, context: PlaceContext, loc: Location) { - if PlaceContext::NonMutatingUse(NonMutatingUseContext::Move) == context { - let mut borrowed_locals = self.borrowed_locals.borrow_mut(); - borrowed_locals.seek_before_primary_effect(loc); - if !borrowed_locals.contains(*local) { - self.trans.kill(*local); - } - } - } -} diff --git a/src/librustc_mir/transform/generator.rs b/src/librustc_mir/transform/generator.rs index 4bf2adcd450c0..5f8104e7934eb 100644 --- a/src/librustc_mir/transform/generator.rs +++ b/src/librustc_mir/transform/generator.rs @@ -50,7 +50,7 @@ //! Otherwise it drops all the values in scope at the last suspension point. use crate::dataflow::impls::{ - MaybeBorrowedLocals, MaybeLiveLocals, MaybeRequiresStorage, MaybeStorageLive, + MaybeBorrowedLocals, MaybeInitializedLocals, MaybeLiveLocals, MaybeStorageLive, }; use crate::dataflow::{self, Analysis}; use crate::transform::no_landing_pads::no_landing_pads; @@ -444,86 +444,80 @@ fn locals_live_across_suspend_points( movable: bool, ) -> LivenessInfo { let def_id = source.def_id(); - let body_ref: &Body<'_> = &body; // Calculate when MIR locals have live storage. This gives us an upper bound of their // lifetimes. let mut storage_live = MaybeStorageLive::new(always_live_locals.clone()) - .into_engine(tcx, body_ref, def_id) + .into_engine(tcx, body, def_id) .iterate_to_fixpoint() - .into_results_cursor(body_ref); - - // Calculate the MIR locals which have been previously - // borrowed (even if they are still active). - let borrowed_locals_results = - MaybeBorrowedLocals::all_borrows().into_engine(tcx, body_ref, def_id).iterate_to_fixpoint(); - - let mut borrowed_locals_cursor = - dataflow::ResultsCursor::new(body_ref, &borrowed_locals_results); - - // Calculate the MIR locals that we actually need to keep storage around - // for. - let requires_storage_results = MaybeRequiresStorage::new(body, &borrowed_locals_results) - .into_engine(tcx, body_ref, def_id) - .iterate_to_fixpoint(); - let mut requires_storage_cursor = - dataflow::ResultsCursor::new(body_ref, &requires_storage_results); - - // Calculate the liveness of MIR locals ignoring borrows. - let mut liveness = MaybeLiveLocals - .into_engine(tcx, body_ref, def_id) + .into_results_cursor(body); + + let mut init = MaybeInitializedLocals + .into_engine(tcx, body, def_id) + .iterate_to_fixpoint() + .into_results_cursor(body); + + let mut live = MaybeLiveLocals + .into_engine(tcx, body, def_id) + .iterate_to_fixpoint() + .into_results_cursor(body); + + let mut borrowed = MaybeBorrowedLocals::all_borrows() + .into_engine(tcx, body, def_id) .iterate_to_fixpoint() - .into_results_cursor(body_ref); + .into_results_cursor(body); + + // Liveness across yield points is determined by the following boolean equation, where `live`, + // `init` and `borrowed` come from dataflow and `movable` is a property of the generator. + // Movable generators do not allow borrows to live across yield points, so they don't need to + // store a local simply because it is borrowed. + // + // live_across_yield := (live & init) | (!movable & borrowed) + // + let mut locals_live_across_yield_point = |block| { + live.seek_to_block_end(block); + let mut live_locals = live.get().clone(); + + init.seek_to_block_end(block); + live_locals.intersect(init.get()); + + if !movable { + borrowed.seek_to_block_end(block); + live_locals.union(borrowed.get()); + } + + live_locals + }; let mut storage_liveness_map = IndexVec::from_elem(None, body.basic_blocks()); let mut live_locals_at_suspension_points = Vec::new(); let mut live_locals_at_any_suspension_point = BitSet::new_empty(body.local_decls.len()); for (block, data) in body.basic_blocks().iter_enumerated() { - if let TerminatorKind::Yield { .. } = data.terminator().kind { - let loc = Location { block, statement_index: data.statements.len() }; - - liveness.seek_to_block_end(block); - let mut live_locals = liveness.get().clone(); - - if !movable { - // The `liveness` variable contains the liveness of MIR locals ignoring borrows. - // This is correct for movable generators since borrows cannot live across - // suspension points. However for immovable generators we need to account for - // borrows, so we conseratively assume that all borrowed locals are live until - // we find a StorageDead statement referencing the locals. - // To do this we just union our `liveness` result with `borrowed_locals`, which - // contains all the locals which has been borrowed before this suspension point. - // If a borrow is converted to a raw reference, we must also assume that it lives - // forever. Note that the final liveness is still bounded by the storage liveness - // of the local, which happens using the `intersect` operation below. - borrowed_locals_cursor.seek_before_primary_effect(loc); - live_locals.union(borrowed_locals_cursor.get()); - } - - // Store the storage liveness for later use so we can restore the state - // after a suspension point - storage_live.seek_before_primary_effect(loc); - storage_liveness_map[block] = Some(storage_live.get().clone()); - - // Locals live are live at this point only if they are used across - // suspension points (the `liveness` variable) - // and their storage is required (the `storage_required` variable) - requires_storage_cursor.seek_before_primary_effect(loc); - live_locals.intersect(requires_storage_cursor.get()); + if !matches!(data.terminator().kind, TerminatorKind::Yield { .. }) { + continue; + } - // The generator argument is ignored. - live_locals.remove(SELF_ARG); + // Store the storage liveness for later use so we can restore the state + // after a suspension point + storage_live.seek_to_block_end(block); + storage_liveness_map[block] = Some(storage_live.get().clone()); - debug!("loc = {:?}, live_locals = {:?}", loc, live_locals); + let mut live_locals = locals_live_across_yield_point(block); - // Add the locals live at this suspension point to the set of locals which live across - // any suspension points - live_locals_at_any_suspension_point.union(&live_locals); + // The combination of `MaybeInitializedLocals` and `MaybeBorrowedLocals` should be strictly + // more precise than `MaybeStorageLive` because they handle `StorageDead` themselves. This + // assumes that the MIR forbids locals from being initialized/borrowed before reaching + // `StorageLive`. + debug_assert!(storage_live.get().superset(&live_locals)); - live_locals_at_suspension_points.push(live_locals); - } + // Ignore the generator's `self` argument since it is handled seperately. + live_locals.remove(SELF_ARG); + debug!("block = {:?}, live_locals = {:?}", block, live_locals); + live_locals_at_any_suspension_point.union(&live_locals); + live_locals_at_suspension_points.push(live_locals); } + debug!("live_locals_anywhere = {:?}", live_locals_at_any_suspension_point); // Renumber our liveness_map bitsets to include only the locals we are @@ -534,10 +528,11 @@ fn locals_live_across_suspend_points( .collect(); let storage_conflicts = compute_storage_conflicts( - body_ref, + body, &live_locals_at_any_suspension_point, always_live_locals.clone(), - requires_storage_results, + init, + borrowed, ); LivenessInfo { @@ -569,6 +564,37 @@ fn renumber_bitset( out } +/// Record conflicts between locals at the current dataflow cursor positions. +/// +/// You need to seek the cursors before calling this function. +fn record_conflicts_at_curr_loc( + local_conflicts: &mut BitMatrix, + init: &dataflow::ResultsCursor<'mir, 'tcx, MaybeInitializedLocals>, + borrowed: &dataflow::ResultsCursor<'mir, 'tcx, MaybeBorrowedLocals>, +) { + // A local requires storage if it is initialized or borrowed. For now, a local + // becomes uninitialized if it is moved from, but is still considered "borrowed". + // + // requires_storage := init | borrowed + // + // Just like when determining what locals are live at yield points, there is no need + // to look at storage liveness here, since `init | borrowed` is strictly more precise. + // + // FIXME: This function is called in a loop, so it might be better to pass in a temporary + // bitset rather than cloning here. + let mut requires_storage = init.get().clone(); + requires_storage.union(borrowed.get()); + + for local in requires_storage.iter() { + local_conflicts.union_row_with(&requires_storage, local); + } + + // `>1` because the `self` argument always requires storage. + if requires_storage.count() > 1 { + trace!("requires_storage={:?}", requires_storage); + } +} + /// For every saved local, looks for which locals are StorageLive at the same /// time. Generates a bitset for every local of all the other locals that may be /// StorageLive simultaneously with that local. This is used in the layout @@ -577,30 +603,45 @@ fn compute_storage_conflicts( body: &'mir Body<'tcx>, stored_locals: &BitSet, always_live_locals: storage::AlwaysLiveLocals, - requires_storage: dataflow::Results<'tcx, MaybeRequiresStorage<'mir, 'tcx>>, + mut init: dataflow::ResultsCursor<'mir, 'tcx, MaybeInitializedLocals>, + mut borrowed: dataflow::ResultsCursor<'mir, 'tcx, MaybeBorrowedLocals>, ) -> BitMatrix { - assert_eq!(body.local_decls.len(), stored_locals.domain_size()); - debug!("compute_storage_conflicts({:?})", body.span); - debug!("always_live = {:?}", always_live_locals); - - // Locals that are always live or ones that need to be stored across - // suspension points are not eligible for overlap. - let mut ineligible_locals = always_live_locals.into_inner(); - ineligible_locals.intersect(stored_locals); + assert_eq!(body.local_decls.len(), stored_locals.domain_size()); - // Compute the storage conflicts for all eligible locals. - let mut visitor = StorageConflictVisitor { - body, - stored_locals: &stored_locals, - local_conflicts: BitMatrix::from_row_n(&ineligible_locals, body.local_decls.len()), - }; + // Locals that are always live conflict with all other locals. + // + // FIXME: Why do we need to handle locals without `Storage{Live,Dead}` specially here? + // Shouldn't it be enough to know whether they are initialized? + let always_live_locals = always_live_locals.into_inner(); + let mut local_conflicts = BitMatrix::from_row_n(&always_live_locals, body.local_decls.len()); + + // Visit every reachable statement and terminator. The exact order does not matter. When two + // locals are live at the same point in time, add an entry in the conflict matrix. + for (block, data) in traversal::preorder(body) { + // Ignore unreachable blocks. + if data.terminator().kind == TerminatorKind::Unreachable { + continue; + } - // Visit only reachable basic blocks. The exact order is not important. - let reachable_blocks = traversal::preorder(body).map(|(bb, _)| bb); - requires_storage.visit_with(body, reachable_blocks, &mut visitor); + // Observe the dataflow state *before* all possible locations (statement or terminator) in + // each basic block... + for statement_index in 0..=data.statements.len() { + let loc = Location { block, statement_index }; + trace!("record conflicts at {:?}", loc); + init.seek_before_primary_effect(loc); + borrowed.seek_before_primary_effect(loc); + record_conflicts_at_curr_loc(&mut local_conflicts, &init, &borrowed); + } - let local_conflicts = visitor.local_conflicts; + // ...and then observe the state *after* the terminator effect is applied. As long as + // neither `init` nor `borrowed` has a "before" effect, we will observe all possible + // dataflow states here or in the loop above. + trace!("record conflicts at end of {:?}", block); + init.seek_to_block_end(block); + borrowed.seek_to_block_end(block); + record_conflicts_at_curr_loc(&mut local_conflicts, &init, &borrowed); + } // Compress the matrix using only stored locals (Local -> GeneratorSavedLocal). // @@ -612,7 +653,7 @@ fn compute_storage_conflicts( let mut storage_conflicts = BitMatrix::new(stored_locals.count(), stored_locals.count()); for (idx_a, local_a) in stored_locals.iter().enumerate() { let saved_local_a = GeneratorSavedLocal::new(idx_a); - if ineligible_locals.contains(local_a) { + if always_live_locals.contains(local_a) { // Conflicts with everything. storage_conflicts.insert_all_into_row(saved_local_a); } else { @@ -628,56 +669,6 @@ fn compute_storage_conflicts( storage_conflicts } -struct StorageConflictVisitor<'mir, 'tcx, 's> { - body: &'mir Body<'tcx>, - stored_locals: &'s BitSet, - // FIXME(tmandry): Consider using sparse bitsets here once we have good - // benchmarks for generators. - local_conflicts: BitMatrix, -} - -impl dataflow::ResultsVisitor<'mir, 'tcx> for StorageConflictVisitor<'mir, 'tcx, '_> { - type FlowState = BitSet; - - fn visit_statement_before_primary_effect( - &mut self, - state: &Self::FlowState, - _statement: &'mir Statement<'tcx>, - loc: Location, - ) { - self.apply_state(state, loc); - } - - fn visit_terminator_before_primary_effect( - &mut self, - state: &Self::FlowState, - _terminator: &'mir Terminator<'tcx>, - loc: Location, - ) { - self.apply_state(state, loc); - } -} - -impl<'body, 'tcx, 's> StorageConflictVisitor<'body, 'tcx, 's> { - fn apply_state(&mut self, flow_state: &BitSet, loc: Location) { - // Ignore unreachable blocks. - if self.body.basic_blocks()[loc.block].terminator().kind == TerminatorKind::Unreachable { - return; - } - - let mut eligible_storage_live = flow_state.clone(); - eligible_storage_live.intersect(&self.stored_locals); - - for local in eligible_storage_live.iter() { - self.local_conflicts.union_row_with(&eligible_storage_live, local); - } - - if eligible_storage_live.count() > 1 { - trace!("at {:?}, eligible_storage_live={:?}", loc, eligible_storage_live); - } - } -} - fn compute_layout<'tcx>( tcx: TyCtxt<'tcx>, source: MirSource<'tcx>, diff --git a/src/test/ui/async-await/async-fn-size-moved-locals.rs b/src/test/ui/async-await/async-fn-size-moved-locals.rs index 636fafc2bc44a..000acf14a3fbc 100644 --- a/src/test/ui/async-await/async-fn-size-moved-locals.rs +++ b/src/test/ui/async-await/async-fn-size-moved-locals.rs @@ -114,5 +114,5 @@ fn main() { assert_eq!(1026, std::mem::size_of_val(&single_with_noop())); assert_eq!(3078, std::mem::size_of_val(&joined())); assert_eq!(3079, std::mem::size_of_val(&joined_with_noop())); - assert_eq!(7181, std::mem::size_of_val(&mixed_sizes())); + assert_eq!(6157, std::mem::size_of_val(&mixed_sizes())); } diff --git a/src/test/ui/generator/size-moved-locals.rs b/src/test/ui/generator/size-moved-locals.rs index 74c60d98154dd..a5786c2999eb4 100644 --- a/src/test/ui/generator/size-moved-locals.rs +++ b/src/test/ui/generator/size-moved-locals.rs @@ -72,6 +72,6 @@ fn overlap_x_and_y() -> impl Generator { fn main() { assert_eq!(1025, std::mem::size_of_val(&move_before_yield())); assert_eq!(1026, std::mem::size_of_val(&move_before_yield_with_noop())); - assert_eq!(2051, std::mem::size_of_val(&overlap_move_points())); + assert_eq!(1027, std::mem::size_of_val(&overlap_move_points())); assert_eq!(1026, std::mem::size_of_val(&overlap_x_and_y())); }