From 042b8c62df9e64be0526d2f1e670b78b4424f06d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9my=20Rakic?= Date: Tue, 26 Aug 2025 12:13:30 +0000 Subject: [PATCH 1/5] wip --- .../src/type_check/liveness/trace.rs | 570 +++++++++++++++++- .../rustc_mir_dataflow/src/framework/fmt.rs | 14 + .../src/impls/initialized.rs | 8 +- .../src/impls/initialized2.rs | 402 ++++++++++++ compiler/rustc_mir_dataflow/src/impls/mod.rs | 5 + .../rustc_mir_dataflow/src/move_paths/mod.rs | 11 +- compiler/rustc_mir_dataflow/src/rustc_peek.rs | 20 +- 7 files changed, 1008 insertions(+), 22 deletions(-) create mode 100644 compiler/rustc_mir_dataflow/src/impls/initialized2.rs diff --git a/compiler/rustc_borrowck/src/type_check/liveness/trace.rs b/compiler/rustc_borrowck/src/type_check/liveness/trace.rs index b704d8f0a7692..b035771beb1dd 100644 --- a/compiler/rustc_borrowck/src/type_check/liveness/trace.rs +++ b/compiler/rustc_borrowck/src/type_check/liveness/trace.rs @@ -7,7 +7,7 @@ use rustc_middle::mir::{BasicBlock, Body, ConstraintCategory, HasLocalDecls, Loc use rustc_middle::traits::query::DropckOutlivesResult; use rustc_middle::ty::relate::Relate; use rustc_middle::ty::{Ty, TyCtxt, TypeVisitable, TypeVisitableExt}; -use rustc_mir_dataflow::impls::MaybeInitializedPlaces; +use rustc_mir_dataflow::impls::MaybeInitializedPlaces2; use rustc_mir_dataflow::move_paths::{HasMoveData, MoveData, MovePathIndex}; use rustc_mir_dataflow::points::{DenseLocationMap, PointIndex}; use rustc_mir_dataflow::{Analysis, ResultsCursor}; @@ -45,6 +45,34 @@ pub(super) fn trace<'tcx>( boring_locals: Vec, ) { let local_use_map = &LocalUseMap::build(&relevant_live_locals, location_map, typeck.body); + + // let mut locals_with_drop_points = 0; + + // let timer = std::time::Instant::now(); + + let mut dense_mpis = smallvec::SmallVec::new(); + + for &local in relevant_live_locals.iter() { + let Some(mpi) = move_data.rev_lookup.find_local(local) else { unreachable!() }; + + // We only compute initializedness in drop-liveness on locals with drop points. + if local_use_map.drops(local).next().is_none() { + continue; + } + + // locals_with_drop_points += 1; + + dense_mpis.push(mpi); + + let move_paths = &move_data.move_paths; + let _ = move_paths[mpi].find_descendant(move_paths, |mpi| { + dense_mpis.push(mpi); + false + }); + } + + // let elapsed = timer.elapsed(); + let cx = LivenessContext { typeck, flow_inits: None, @@ -52,8 +80,34 @@ pub(super) fn trace<'tcx>( local_use_map, move_data, drop_data: FxIndexMap::default(), + dense_mpis, }; + // if locals_with_drop_points > 0 && std::env::var("LETSGO").is_ok() { + // eprintln!( + // "body has {} relevant locals, {} with drop points -> {} MPIs of interest (local + descendants, in {} ns) out of {}, {:?}", + // relevant_live_locals.len(), + // locals_with_drop_points, + // cx.dense_mpis.len(), + // elapsed.as_nanos(), + // cx.move_data.move_paths.len(), + // cx.typeck.body.span, + // ); + // // for (idx, &local) in relevant_live_locals.iter().enumerate() { + // // let Some(mpi) = cx.move_data.rev_lookup.find_local(local) else { continue }; + + // // let drop_points = cx.local_use_map.drops(local).count(); + // // if drop_points > 0 { + // // eprintln!( + // // "relevant local {idx:<4}: local {} (mpi: {:?}) has {} drop points", + // // local.as_u32(), + // // mpi, + // // drop_points, + // // ); + // // } + // // } + // } + let mut results = LivenessResults::new(cx); results.add_extra_drop_facts(&relevant_live_locals); @@ -81,11 +135,13 @@ struct LivenessContext<'a, 'typeck, 'tcx> { /// Results of dataflow tracking which variables (and paths) have been /// initialized. Computed lazily when needed by drop-liveness. - flow_inits: Option>>, + flow_inits: Option>>, /// Index indicating where each variable is assigned, used, or /// dropped. local_use_map: &'a LocalUseMap, + + dense_mpis: smallvec::SmallVec<[MovePathIndex; 1]>, } struct DropData<'tcx> { @@ -458,6 +514,12 @@ impl<'a, 'typeck, 'tcx> LivenessResults<'a, 'typeck, 'tcx> { } } +// type IndexMapper = SparseIndexMapper; +// type IndexMapper = rustc_mir_dataflow::impls::DefaultMovePathIndexMapper; +type IndexMapper = rustc_mir_dataflow::impls::FilteringMovePathIndexMapper; +// type IndexMapper = Sparse32; +// type IndexMapper = Sparse; + impl<'a, 'typeck, 'tcx> LivenessContext<'a, 'typeck, 'tcx> { /// Computes the `MaybeInitializedPlaces` dataflow analysis if it hasn't been done already. /// @@ -468,7 +530,9 @@ impl<'a, 'typeck, 'tcx> LivenessContext<'a, 'typeck, 'tcx> { /// /// This happens as part of the drop-liveness computation: it's the only place checking for /// maybe-initializedness of `MovePathIndex`es. - fn flow_inits(&mut self) -> &mut ResultsCursor<'a, 'tcx, MaybeInitializedPlaces<'a, 'tcx>> { + fn flow_inits( + &mut self, + ) -> &mut ResultsCursor<'a, 'tcx, MaybeInitializedPlaces2<'a, 'tcx, IndexMapper>> { self.flow_inits.get_or_insert_with(|| { let tcx = self.typeck.tcx(); let body = self.typeck.body; @@ -484,14 +548,495 @@ impl<'a, 'typeck, 'tcx> LivenessContext<'a, 'typeck, 'tcx> { // a much, much smaller domain: in our benchmarks, when it's not zero (the most likely // case), there are a few dozens compared to e.g. thousands or tens of thousands of // locals and move paths. - let flow_inits = MaybeInitializedPlaces::new(tcx, body, self.move_data) + // eprintln!( + // "computing flow_inits: {} MPIs total, {} relevant MPIs", + // self.move_data.move_paths.len(), + // self.dense_mpis.len() + // ); + + // FIXME: use the sparse vec + bitset metadata trick instead of a map! + + // let map_timer = std::time::Instant::now(); + // let mut sparse_map = FxIndexMap::default(); + // for (idx, &dense_idx) in self.dense_mpis.iter().enumerate() { + // let sparse_idx = SparseMovePathIndex::from_usize(idx); + // sparse_map.insert(dense_idx, sparse_idx); + // } + // // for local in body.args_iter() { + // // let Some(mpi) = self.move_data.rev_lookup.find_local(local) else { unreachable!() }; + // // self.dense_mpis.push(mpi); + // // map.insert(mpi, self.dense_mpis.len()); + // // } + + // let map_len = sparse_map.len(); + // let mapper = FilteringMovePathIndexMapper { sparse_map }; + // let map_elapsed = map_timer.elapsed(); + + // // let mapper = rustc_mir_dataflow::impls::NoOpMapper; + + // let sparse_bitset_timer = std::time::Instant::now(); + + // // let mut sparse_bitset = Sparse32::new(self.dense_mpis.len()); + // // // FIXME: do this in asc order to keep idxes stable and not shuffle the vec inside + // // for &dense_idx in self.dense_mpis.iter() { + // // sparse_bitset.insert(dense_idx); + // // } + + // let sparse_bitset = Sparse32::new(&self.dense_mpis); + + // // let sparse_bitset_elapsed = sparse_bitset_timer.elapsed(); + // // let sparse_bitset_len = sparse_bitset.dense.len(); + + // // let sparse_bitset_timer = std::time::Instant::now(); + + // let mut sparse_bitset = Sparse::new( + // 1 + self.dense_mpis.iter().max().unwrap().as_usize(), + // self.dense_mpis.len(), + // ); + // // FIXME: do this in asc order to keep idxes stable and not shuffle the vec inside + // for &dense_idx in self.dense_mpis.iter() { + // sparse_bitset.insert(dense_idx); + // } + // // also: move this into the sparse ctor, so that prefixes can be computed there after inserting stuff + + // sparse_bitset.compute_prefixes(); + + // // let sparse_bitset_elapsed = sparse_bitset_timer.elapsed(); + // // let sparse_bitset_len = sparse_bitset.sparse.len(); + + // let mapper = sparse_bitset; + + // let timer = std::time::Instant::now(); + let flow_inits = MaybeInitializedPlaces2::new(tcx, body, self.move_data) + .filter_move_paths(&self.dense_mpis) + // .with_mapper(mapper) .iterate_to_fixpoint(tcx, body, Some("borrowck")) .into_results_cursor(body); + // let elapsed = timer.elapsed(); + + // use std::sync::OnceLock; + // static PROFILE: OnceLock = OnceLock::new(); + // if *PROFILE.get_or_init(|| std::env::var("LETSGO1").is_ok()) { + // eprintln!( + // "flow_inits took {:?} ns, map of {} took: {} ns, sparse bitset of {} took {} ns, {:?}", + // elapsed.as_nanos(), + // map_len, + // map_elapsed.as_nanos(), + // sparse_bitset_len, + // sparse_bitset_elapsed.as_nanos(), + // body.span, + // ); + // } flow_inits }) } } +// use boomphf::*; + +// struct Sparse32 { +// // bitmap: u64, +// // dense: [MovePathIndex; 64], +// dense: Vec, +// domain: usize, +// phf: Mphf, +// seen: DenseBitSet, +// } + +// impl Sparse32 { +// // pub(crate) fn new(sparse_domain_size: usize) -> Self { +// // Self { bitmap: 0, dense: [MovePathIndex::from_u32(0); _], domain: sparse_domain_size } +// // } + +// pub(crate) fn new(sparse_domain: &[MovePathIndex]) -> Self { +// // assert!(sparse_domain.len() < 640); + +// let dense_domain = 1 + sparse_domain.iter().max().unwrap().as_usize(); + +// let phf = Mphf::new(1.7, sparse_domain); +// let mut map = Self { +// // bitmap: 0, +// // dense: [MovePathIndex::from_u32(0); _], +// dense: vec![MovePathIndex::from_u32(0); sparse_domain.len()], +// domain: sparse_domain.len(), +// phf, +// seen: DenseBitSet::new_empty(dense_domain), +// }; + +// // if map.seen.domain_size() == 28 { +// // eprintln!( +// // "sparse domain of len {}, domain size: {}: {:?}", +// // sparse_domain.len(), +// // map.seen.domain_size(), +// // sparse_domain +// // ); +// // } + +// for &dense_idx in sparse_domain { +// map.insert(dense_idx); +// // if dense_idx.as_u32() == 28 && map.seen.domain_size() == 28 { +// // eprintln!( +// // "inserting {:?} in sparse domain of len {}, domain size: {} (dense_domain: {}), max in domain: {}, {:?}", +// // dense_idx, +// // sparse_domain.len(), +// // map.seen.domain_size(), +// // dense_domain, +// // sparse_domain.iter().max().unwrap().as_usize(), +// // sparse_domain, +// // ); +// // // todo!("yo, "); +// // } + +// // let r = catch_unwind(AssertUnwindSafe(|| { +// // map.seen.insert(dense_idx); +// // })); +// // if r.is_err() { +// // eprintln!( +// // "inserting {:?} in sparse domain of len {}, domain size: {}: {:?}", +// // dense_idx, +// // sparse_domain.len(), +// // map.seen.domain_size(), +// // sparse_domain +// // ); +// // panic!("oh: {r:?}"); +// // } +// } + +// map +// } + +// // #[inline(always)] +// // pub(crate) fn contains(&self, dense_idx: MovePathIndex) -> bool { +// // let bit = 1u64 << dense_idx.as_u32(); +// // (self.bitmap & bit) != 0 +// // } + +// // #[inline(always)] +// // pub(crate) fn map_index(&self, dense_idx: MovePathIndex) -> usize { +// // self.index_and_bit(dense_idx).0 +// // } + +// // #[inline(always)] +// // fn index_and_bit(&self, dense_idx: MovePathIndex) -> (usize, u64) { +// // let bit = 1u64 << dense_idx.as_u32(); +// // let mask = bit.wrapping_sub(1); +// // let below = self.bitmap & mask; +// // let sparse_idx = below.count_ones() as usize; +// // (sparse_idx, bit) +// // } + +// #[inline] +// pub(crate) fn insert(&mut self, dense_idx: MovePathIndex) { +// // tmp: +// // debug_assert!(dense_idx.as_u32() < 64); +// // debug_assert!( +// // !self.contains(dense_idx), +// // "the dense index {:?} is already present in the dense array!", +// // dense_idx +// // ); + +// // let (sparse_idx, bit) = self.index_and_bit(dense_idx); +// // // self.dense[sparse_idx] = dense_idx; +// // unsafe { +// // *self.dense.get_unchecked_mut(sparse_idx) = dense_idx; +// // } +// // self.bitmap |= bit; + +// // let sparse_idx = self.phf.hash(&dense_idx) as usize; +// let sparse_idx = self.phf.hash(&dense_idx) as usize; + +// // let bit = 1u64 << sparse_idx; +// unsafe { +// *self.dense.get_unchecked_mut(sparse_idx) = dense_idx; +// } +// // assert!(sparse_idx < self.dense.len()); +// // self.dense[sparse_idx] = dense_idx; + +// // self.dense.insert(sparse_idx, dense_idx); +// // self.bitmap |= bit; +// self.seen.insert(dense_idx); +// } +// } + +// impl rustc_mir_dataflow::impls::MovePathIndexMapper for Sparse32 { +// type TargetIndex = u32; + +// #[inline(always)] +// fn domain_size(&self, _dense_domain_size: usize) -> usize { +// // self.dense.len() +// self.domain +// } + +// #[inline(always)] +// fn map_index(&self, dense_idx: MovePathIndex) -> Option { +// if dense_idx.as_usize() < self.seen.domain_size() && self.seen.contains(dense_idx) { +// let sparse_idx = self.phf.hash(&dense_idx); +// Some(sparse_idx as u32) +// } else { +// None +// } + +// // let bit = 1u64 << dense_idx.as_u32(); +// // if self.bitmap & bit == 0 { +// // return None; +// // } +// // // let mask = bit.wrapping_sub(1); +// // // let below = self.bitmap & mask; +// // // let sparse_idx = below.count_ones() as u32; +// // // Some(sparse_idx) + +// // // let sparse_idx = self.phf.try_hash(&dense_idx).expect("Couldn't insert item?!"); +// // let sparse_idx = self.phf.try_hash(&dense_idx).unwrap_or_else(|| { +// // panic!( +// // "Couldn't find hash of dense idx {:?}, bitmap is: {:#034b}, bit is: {:#034b}, combo is {:#034b}, and values are {:?}", +// // dense_idx, self.bitmap, bit, self.bitmap & bit, self.dense +// // ); +// // }); +// // // let sparse_idx = self.phf.hash(&dense_idx) as u32; +// // Some(sparse_idx as u32) +// } +// } + +// // --- +// use std::fmt; +// use std::marker::PhantomData; + +// use rustc_index::Idx; + +// type Word = u64; +// const WORD_BYTES: usize = size_of::(); +// const WORD_BITS: usize = WORD_BYTES * 8; + +// #[inline] +// fn num_words(domain_size: T) -> usize { +// domain_size.index().div_ceil(WORD_BITS) +// } + +// #[inline] +// fn word_index_and_mask(elem: T) -> (usize, Word) { +// let elem = elem.index(); +// let word_index = elem / WORD_BITS; +// let mask = 1 << (elem % WORD_BITS); +// (word_index, mask) +// } + +// struct Bitmap { +// domain_size: usize, +// words: smallvec::SmallVec<[Word; 64]>, +// marker: PhantomData, +// } + +// impl fmt::Debug for Bitmap { +// fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { +// let mut words = Vec::new(); + +// for (_idx, word) in self.words.iter().copied().enumerate().rev() { +// // words.push(format!("{:08b}", word)); +// // words.push(format!("{:08b} ({} 1s)", word, word.count_ones())); +// let word = format!("{:0width$b}", word, width = WORD_BITS); +// // words.push(format!("{}: {}", idx, word)); +// words.push(format!("{}", word)); +// } + +// write!(f, "[{}]", words.join(", ")) +// } +// } + +// // impl Bitmap { +// // /// Gets the domain size. +// // pub(crate) fn domain_size(&self) -> usize { +// // self.domain_size +// // } +// // } + +// impl Bitmap { +// /// Creates a new, empty bitset with a given `domain_size`. +// #[inline] +// pub(crate) fn new_empty(domain_size: usize) -> Bitmap { +// let num_words = num_words(domain_size); +// Bitmap { domain_size, words: smallvec::smallvec![0; num_words], marker: PhantomData } +// } + +// /// Returns `true` if `self` contains `elem`. +// #[inline] +// pub(crate) fn contains(&self, elem: T) -> bool { +// if elem.index() >= self.domain_size { +// false +// } else { +// let (word_index, mask) = word_index_and_mask(elem); +// (unsafe { *self.words.get_unchecked(word_index) } & mask) != 0 +// } +// } + +// // /// Insert `elem`. +// // #[inline] +// // pub(crate) fn insert(&mut self, elem: T) { +// // // assert!( +// // // elem.index() < self.domain_size, +// // // "inserting element at index {} but domain size is {}", +// // // elem.index(), +// // // self.domain_size, +// // // ); +// // let (word_index, mask) = word_index_and_mask(elem); +// // let word = unsafe { *self.words.get_unchecked(word_index) }; +// // // let word_ref = &mut self.words[word_index]; +// // // let word = *word_ref; +// // let new_word = word | mask; +// // // *word_ref = new_word; +// // unsafe { *self.words.get_unchecked_mut(word_index) = new_word }; +// // } +// } + +// // --- +// pub(crate) struct Sparse { +// bitmap: Bitmap, +// sparse: smallvec::SmallVec<[MovePathIndex; 64]>, +// prefixes: Vec, +// } + +// impl Sparse { +// pub(crate) fn new(dense_domain: usize, sparse_domain: usize) -> Self { +// Self { +// bitmap: Bitmap::new_empty(dense_domain), +// sparse: smallvec::SmallVec::with_capacity(sparse_domain), +// prefixes: Vec::with_capacity(num_words(dense_domain)), +// } +// } + +// #[inline(always)] +// pub(crate) fn contains(&self, dense_idx: MovePathIndex) -> bool { +// self.bitmap.contains(dense_idx) +// } + +// fn compute_prefixes(&mut self) { +// let mut ones = 0; +// for word in self.bitmap.words.iter().copied() { +// ones += word.count_ones(); +// self.prefixes.push(ones); +// } +// } + +// // #[inline(always)] +// // pub(crate) fn map_index(&self, dense_idx: MovePathIndex) -> u32 { +// // let dense_idx = dense_idx.index(); +// // // Index within the word list +// // let word_idx = dense_idx / WORD_BITS; +// // // Index within the word at the word_idx +// // let idx = dense_idx % WORD_BITS; + +// // let bit: Word = 1 << idx; +// // let mask = bit.wrapping_sub(1); + +// // let word = unsafe { *self.bitmap.words.get_unchecked(word_idx) }; + +// // // Mask of key that are smaller than the dense_idx within word_idx +// // let below_word = word & mask; + +// // // Number of keys that are smaller than the dense_idx within word_idx +// // // FIXME: fenwick tree ici!! +// // let sparse_idx_word = below_word.count_ones(); +// // let sparse_idx_rest: u32 = +// // self.bitmap.words[0..word_idx].iter().map(|word| word.count_ones()).sum(); + +// // let sparse_idx = sparse_idx_word + sparse_idx_rest; +// // sparse_idx +// // } + +// #[inline(always)] +// pub(crate) fn map_index(&self, dense_idx: MovePathIndex) -> u32 { +// let dense_idx = dense_idx.index(); +// // Index within the word list +// let word_idx = dense_idx / WORD_BITS; +// // Index within the word at the word_idx +// let idx = dense_idx % WORD_BITS; + +// let bit: Word = 1 << idx; +// let mask = bit.wrapping_sub(1); + +// let word = unsafe { *self.bitmap.words.get_unchecked(word_idx) }; + +// // Mask of key that are smaller than the dense_idx within word_idx +// let below_word = word & mask; + +// // Number of keys that are smaller than the dense_idx within word_idx +// let sparse_idx_word = below_word.count_ones(); + +// let mut sparse_idx = sparse_idx_word; +// if word_idx > 0 { +// let sparse_idx_rest: u32 = self.prefixes[word_idx - 1]; +// sparse_idx += sparse_idx_rest; +// } + +// sparse_idx +// } + +// pub(crate) fn insert(&mut self, dense_idx: MovePathIndex) { +// let dense = dense_idx.index(); +// // Index within the word list +// let word_idx = dense / WORD_BITS; +// // Index within the word at the word_idx +// let idx = dense % WORD_BITS; + +// let bit: Word = 1 << idx; +// let mask = bit.wrapping_sub(1); + +// let word = unsafe { *self.bitmap.words.get_unchecked(word_idx) }; + +// // Mask of key that are smaller than the dense_idx within word_idx +// let below_word = word & mask; + +// // Number of keys that are smaller than the dense_idx within word_idx +// let sparse_idx_word = below_word.count_ones() as usize; + +// // The full sparse_idx is the sparse_idx_word + the sum of count_ones of all words [0..word_idx] +// // segment-tree / fenwick tree ici? +// let sparse_idx_rest: u32 = +// self.bitmap.words[0..word_idx].iter().map(|word| word.count_ones()).sum(); + +// let sparse_idx = sparse_idx_word + sparse_idx_rest as usize; + +// // If we're adding dense indices in an ascending order, we'll always be at the last position +// // of the vector. +// if sparse_idx == self.sparse.len() { +// self.sparse.push(dense_idx); +// } else { +// // Otherwise, we have a smaller index to add to the list. +// self.sparse.insert(sparse_idx, dense_idx); +// } + +// // eprintln!( +// // "inserting dense idx {:>2}, word_idx: {}, idx: {}, sparse index found: {} (word: {}, rest: {}), bitmap: {:?}, sparse values: {:?}", +// // dense_idx, +// // word_idx, +// // idx, +// // sparse_idx, +// // sparse_idx_word, +// // sparse_idx_rest, +// // self.bitmap, +// // self.sparse, +// // ); + +// let new_word = word | bit; +// unsafe { *self.bitmap.words.get_unchecked_mut(word_idx) = new_word }; +// } +// } + +// impl rustc_mir_dataflow::impls::MovePathIndexMapper for Sparse { +// type TargetIndex = u32; + +// #[inline(always)] +// fn mapped_domain_size<'tcx>(&self, _analysis: &impl HasMoveData<'tcx>) -> usize { +// self.sparse.len() +// } + +// #[inline(always)] +// fn map_index(&self, dense_idx: MovePathIndex) -> Option { +// if self.contains(dense_idx) { Some(self.map_index(dense_idx)) } else { None } +// } +// } + +// // --- + impl<'tcx> LivenessContext<'_, '_, 'tcx> { fn body(&self) -> &Body<'tcx> { self.typeck.body @@ -502,13 +1047,24 @@ impl<'tcx> LivenessContext<'_, '_, 'tcx> { /// the cursor to the desired location. fn initialized_at_curr_loc(&mut self, mpi: MovePathIndex) -> bool { let flow_inits = self.flow_inits(); + let analysis = flow_inits.analysis(); + let idx = analysis + .map_index(mpi) + .unwrap_or_else(|| unreachable!("dataflow is somehow missing MPI {mpi:?}")); let state = flow_inits.get(); - if state.contains(mpi) { + if state.contains(idx) { return true; } - let move_paths = &flow_inits.analysis().move_data().move_paths; - move_paths[mpi].find_descendant(move_paths, |mpi| state.contains(mpi)).is_some() + let move_paths = &analysis.move_data().move_paths; + move_paths[mpi] + .find_descendant(move_paths, |mpi| { + let idx = analysis + .map_index(mpi) + .unwrap_or_else(|| unreachable!("dataflow is somehow missing MPI {mpi:?}")); + state.contains(idx) + }) + .is_some() } /// Returns `true` if the local variable (or some part of it) is initialized in diff --git a/compiler/rustc_mir_dataflow/src/framework/fmt.rs b/compiler/rustc_mir_dataflow/src/framework/fmt.rs index 38599cd094933..cb728b3d05b46 100644 --- a/compiler/rustc_mir_dataflow/src/framework/fmt.rs +++ b/compiler/rustc_mir_dataflow/src/framework/fmt.rs @@ -250,3 +250,17 @@ where write!(f, "{}", ctxt.move_data().move_paths[*self]) } } + +impl DebugWithContext for crate::move_paths::SparseMovePathIndex {} +impl DebugWithContext for u32 {} + +// impl<'tcx, C> DebugWithContext for crate::move_paths::SparseMovePathIndex +// where +// C: crate::move_paths::HasMoveData<'tcx> + +// C: crate::impls::MovePathIndexMapper +// { +// fn fmt_with(&self, ctxt: &C, f: &mut fmt::Formatter<'_>) -> fmt::Result { +// let +// write!(f, "{}", ctxt.move_data().move_paths[*self]) +// } +// } diff --git a/compiler/rustc_mir_dataflow/src/impls/initialized.rs b/compiler/rustc_mir_dataflow/src/impls/initialized.rs index 5937d68f389e4..44ea1f86a8e09 100644 --- a/compiler/rustc_mir_dataflow/src/impls/initialized.rs +++ b/compiler/rustc_mir_dataflow/src/impls/initialized.rs @@ -21,14 +21,14 @@ use crate::{ // Used by both `MaybeInitializedPlaces` and `MaybeUninitializedPlaces`. pub struct MaybePlacesSwitchIntData<'tcx> { - enum_place: mir::Place<'tcx>, + pub(super) enum_place: mir::Place<'tcx>, discriminants: Vec<(VariantIdx, Discr<'tcx>)>, index: usize, } impl<'tcx> MaybePlacesSwitchIntData<'tcx> { /// Creates a `SmallVec` mapping each target in `targets` to its `VariantIdx`. - fn variants(&mut self, targets: &mir::SwitchTargets) -> SmallVec<[VariantIdx; 4]> { + pub(super) fn variants(&mut self, targets: &mir::SwitchTargets) -> SmallVec<[VariantIdx; 4]> { self.index = 0; targets.all_values().iter().map(|value| self.next_discr(value.get())).collect() } @@ -36,7 +36,7 @@ impl<'tcx> MaybePlacesSwitchIntData<'tcx> { // The discriminant order in the `SwitchInt` targets should match the order yielded by // `AdtDef::discriminants`. We rely on this to match each discriminant in the targets to its // corresponding variant in linear time. - fn next_discr(&mut self, value: u128) -> VariantIdx { + pub(super) fn next_discr(&mut self, value: u128) -> VariantIdx { // An out-of-bounds abort will occur if the discriminant ordering isn't as described above. loop { let (variant, discr) = self.discriminants[self.index]; @@ -49,7 +49,7 @@ impl<'tcx> MaybePlacesSwitchIntData<'tcx> { } impl<'tcx> MaybePlacesSwitchIntData<'tcx> { - fn new( + pub(super) fn new( tcx: TyCtxt<'tcx>, body: &Body<'tcx>, block: mir::BasicBlock, diff --git a/compiler/rustc_mir_dataflow/src/impls/initialized2.rs b/compiler/rustc_mir_dataflow/src/impls/initialized2.rs new file mode 100644 index 0000000000000..96fb2df84d761 --- /dev/null +++ b/compiler/rustc_mir_dataflow/src/impls/initialized2.rs @@ -0,0 +1,402 @@ +use rustc_data_structures::fx::FxIndexMap; +use rustc_index::Idx; +use rustc_index::bit_set::MixedBitSet; +use rustc_middle::mir::{ + self, Body, CallReturnPlaces, Location, SwitchTargetValue, TerminatorEdges, +}; +use rustc_middle::ty::TyCtxt; + +use crate::drop_flag_effects::{DropFlagState, InactiveVariants}; +use crate::impls::initialized::MaybePlacesSwitchIntData; +use crate::move_paths::{HasMoveData, LookupResult, MoveData, MovePathIndex, SparseMovePathIndex}; +use crate::{ + Analysis, GenKill, MaybeReachable, drop_flag_effects, drop_flag_effects_for_function_entry, + drop_flag_effects_for_location, on_all_children_bits, on_lookup_result_bits, +}; + +/// `MaybeInitializedPlaces` tracks all places that might be +/// initialized upon reaching a particular point in the control flow +/// for a function. +/// +/// For example, in code like the following, we have corresponding +/// dataflow information shown in the right-hand comments. +/// +/// ```rust +/// struct S; +/// #[rustfmt::skip] +/// fn foo(pred: bool) { // maybe-init: +/// // {} +/// let a = S; let mut b = S; let c; let d; // {a, b} +/// +/// if pred { +/// drop(a); // { b} +/// b = S; // { b} +/// +/// } else { +/// drop(b); // {a} +/// d = S; // {a, d} +/// +/// } // {a, b, d} +/// +/// c = S; // {a, b, c, d} +/// } +/// ``` +/// +/// To determine whether a place is *definitely* initialized at a +/// particular control-flow point, one can take the set-complement +/// of the data from `MaybeUninitializedPlaces` at the corresponding +/// control-flow point. +/// +/// Similarly, at a given `drop` statement, the set-intersection +/// between this data and `MaybeUninitializedPlaces` yields the set of +/// places that would require a dynamic drop-flag at that statement. +/// +/// When needed for efficiency purposes, a [`MovePathIndexMapper`] can +/// be used to only track a subset of places +/// ([`FilteringMovePathIndexMapper`]). The default, most common, case +/// is to compute maybe-initializedness for all places (via the +/// [`DefaultMovePathIndexMapper`] index mapper). +pub struct MaybeInitializedPlaces2<'a, 'tcx, M = DefaultMovePathIndexMapper> { + tcx: TyCtxt<'tcx>, + body: &'a Body<'tcx>, + move_data: &'a MoveData<'tcx>, + exclude_inactive_in_otherwise: bool, + skip_unreachable_unwind: bool, + index_mapper: M, +} + +/// Maps between source `MovePathIndex`es and target `Idx`. The two common cases are +/// - filtering uninteresting move paths, described below. +/// - an identity mapping, using `MovePathIndex`es as-is. +/// +/// The dataflow engine, and MIR visitor infrastructure will still refer to elements +/// using `MovePathIndex`, as will code visiting the dataflow results, and only the analysis will +/// internally use the target indices. +/// +/// When filtering move paths, the source `MovePathIndex`es are considered the dense domain, and the +/// subset of "interesting" move paths is the sparse domain. The mapper accepts dense indices from the +/// datalflow engine and result-visiting code, and returns the associated sparse index. Note that +/// this mapping is optional: the point is that there are more uninteresting dense values than +/// interesting sparse values, and we want to use the smaller more efficien domain in the dataflow +/// analysis. +pub trait MovePathIndexMapper { + type TargetIndex: Idx; + + /// Returns the size of the mapped domain: + /// - either the original, dense, domain size: when not filtering any move paths, + /// - or, the sparse domain size: the subset of interesting move paths we want to track. + fn mapped_domain_size<'tcx>(&self, analysis: &impl HasMoveData<'tcx>) -> usize; + fn map_index(&self, source_idx: MovePathIndex) -> Option; +} + +/// This `MovePathIndex` mapper is the common case: +/// - it doesn't filter any paths, there's only the dense domain, and this mapping is an identity. +/// - we always use all `MovePathIndex`es outside and inside the datafkiw analysis. +pub struct DefaultMovePathIndexMapper; +impl MovePathIndexMapper for DefaultMovePathIndexMapper { + type TargetIndex = MovePathIndex; + + #[inline(always)] + fn mapped_domain_size<'tcx>(&self, analysis: &impl HasMoveData<'tcx>) -> usize { + analysis.move_data().move_paths.len() + } + + #[inline(always)] + fn map_index(&self, source_idx: MovePathIndex) -> Option { + Some(source_idx) + } +} + +/// This `MovePathIndex` filters move paths, and maps the source dense indices to the sparse indices +/// of interest. +pub struct FilteringMovePathIndexMapper { + /// Map of dense move paths to their associated sparse index. + sparse_map: FxIndexMap, +} + +impl MovePathIndexMapper for FilteringMovePathIndexMapper { + type TargetIndex = SparseMovePathIndex; + + #[inline(always)] + fn mapped_domain_size<'tcx>(&self, _analysis: &impl HasMoveData<'tcx>) -> usize { + self.sparse_map.len() + } + + #[inline(always)] + fn map_index(&self, source_idx: MovePathIndex) -> Option { + self.sparse_map.get(&source_idx).copied() + } +} + +impl<'a, 'tcx> MaybeInitializedPlaces2<'a, 'tcx, DefaultMovePathIndexMapper> { + pub fn filter_move_paths( + self, + interesting_move_paths: &[MovePathIndex], + ) -> MaybeInitializedPlaces2<'a, 'tcx, FilteringMovePathIndexMapper> { + let mut sparse_map = FxIndexMap::default(); + for (idx, &dense_idx) in interesting_move_paths.iter().enumerate() { + // SAFETY: `MovePathIndex`es are u32s. + // let sparse_idx = unsafe { + // SparseMovePathIndex::from_u32_unchecked(idx.try_into().unwrap()) + // }; + let sparse_idx = SparseMovePathIndex::from_u32(idx.try_into().unwrap()); + sparse_map.insert(dense_idx, sparse_idx); + } + let index_mapper = FilteringMovePathIndexMapper { sparse_map }; + MaybeInitializedPlaces2 { + tcx: self.tcx, + body: self.body, + move_data: self.move_data, + exclude_inactive_in_otherwise: self.exclude_inactive_in_otherwise, + skip_unreachable_unwind: self.skip_unreachable_unwind, + index_mapper, + } + } + + pub fn with_mapper( + self, + index_mapper: N, + ) -> MaybeInitializedPlaces2<'a, 'tcx, N> { + MaybeInitializedPlaces2 { + tcx: self.tcx, + body: self.body, + move_data: self.move_data, + exclude_inactive_in_otherwise: self.exclude_inactive_in_otherwise, + skip_unreachable_unwind: self.skip_unreachable_unwind, + index_mapper, + } + } +} + +impl<'a, 'tcx> MaybeInitializedPlaces2<'a, 'tcx, DefaultMovePathIndexMapper> { + pub fn new( + tcx: TyCtxt<'tcx>, + body: &'a Body<'tcx>, + move_data: &'a MoveData<'tcx>, + ) -> MaybeInitializedPlaces2<'a, 'tcx, DefaultMovePathIndexMapper> { + MaybeInitializedPlaces2 { + tcx, + body, + move_data, + exclude_inactive_in_otherwise: false, + skip_unreachable_unwind: false, + index_mapper: DefaultMovePathIndexMapper, + } + } +} + +impl<'a, 'tcx, M: MovePathIndexMapper> MaybeInitializedPlaces2<'a, 'tcx, M> { + /// Ensures definitely inactive variants are excluded from the set of initialized places for + /// blocks reached through an `otherwise` edge. + pub fn exclude_inactive_in_otherwise(mut self) -> Self { + self.exclude_inactive_in_otherwise = true; + self + } + + pub fn skipping_unreachable_unwind(mut self) -> Self { + self.skip_unreachable_unwind = true; + self + } + + pub fn is_unwind_dead( + &self, + place: mir::Place<'tcx>, + state: &>::Domain, + ) -> bool { + if let LookupResult::Exact(path) = self.move_data().rev_lookup.find(place.as_ref()) { + let mut maybe_live = false; + // FIXME: use traversal that can early return instead of iterating through all children, + // (similar to `MovePath::find_descendant`). + on_all_children_bits(&self.move_data, path, |child| { + if let Some(idx) = self.index_mapper.map_index(child) { + maybe_live |= state.contains(idx); + } + }); + !maybe_live + } else { + false + } + } + + // FIXME: put this only in filtered mapper? + /// Map the source `MovePathIndex` to the target index tracked in the analysis: + /// - either the `MovePathIndex` itself, + /// - or its image in the sparse domain: its "interesting move path" index. + #[inline(always)] + pub fn map_index(&self, source_idx: MovePathIndex) -> Option { + self.index_mapper.map_index(source_idx) + } +} + +impl<'a, 'tcx, M> HasMoveData<'tcx> for MaybeInitializedPlaces2<'a, 'tcx, M> { + fn move_data(&self) -> &MoveData<'tcx> { + self.move_data + } +} + +impl<'a, 'tcx, M: MovePathIndexMapper> MaybeInitializedPlaces2<'a, 'tcx, M> { + fn update_bits( + state: &mut >::Domain, + path: M::TargetIndex, + dfstate: DropFlagState, + ) { + match dfstate { + DropFlagState::Absent => state.kill(path), + DropFlagState::Present => state.gen_(path), + } + } +} + +impl<'tcx, M: MovePathIndexMapper> Analysis<'tcx> for MaybeInitializedPlaces2<'_, 'tcx, M> { + /// There can be many more `MovePathIndex` than there are locals in a MIR body. + /// We use a mixed bitset to avoid paying too high a memory footprint. + type Domain = MaybeReachable>; + + type SwitchIntData = MaybePlacesSwitchIntData<'tcx>; + + const NAME: &'static str = "maybe_init"; + + fn bottom_value(&self, _: &mir::Body<'tcx>) -> Self::Domain { + // bottom = uninitialized + MaybeReachable::Unreachable + } + + fn initialize_start_block(&self, _: &mir::Body<'tcx>, state: &mut Self::Domain) { + // The mapper controls the domain mapping and thus the number of move paths we track here. + let domain_size = self.index_mapper.mapped_domain_size(self); + *state = MaybeReachable::Reachable(MixedBitSet::new_empty(domain_size)); + + drop_flag_effects_for_function_entry(self.body, self.move_data, |path, s| { + assert!(s == DropFlagState::Present); + if let Some(idx) = self.index_mapper.map_index(path) { + state.gen_(idx); + } + }); + } + + fn apply_primary_statement_effect( + &mut self, + state: &mut Self::Domain, + statement: &mir::Statement<'tcx>, + location: Location, + ) { + drop_flag_effects_for_location(self.body, self.move_data, location, |path, s| { + if let Some(idx) = self.index_mapper.map_index(path) { + Self::update_bits(state, idx, s); + } + }); + + // Mark all places as "maybe init" if they are mutably borrowed. See #90752. + if self.tcx.sess.opts.unstable_opts.precise_enum_drop_elaboration + && let Some((_, rvalue)) = statement.kind.as_assign() + && let mir::Rvalue::Ref(_, mir::BorrowKind::Mut { .. }, place) + // FIXME: Does `&raw const foo` allow mutation? See #90413. + | mir::Rvalue::RawPtr(_, place) = rvalue + && let LookupResult::Exact(mpi) = self.move_data().rev_lookup.find(place.as_ref()) + { + // We only check this MPI's children if it itself is interesting. + if self.index_mapper.map_index(mpi).is_none() { + return; + } + + on_all_children_bits(self.move_data(), mpi, |child| { + if let Some(idx) = self.index_mapper.map_index(child) { + state.gen_(idx); + } + }) + } + } + + fn apply_primary_terminator_effect<'mir>( + &mut self, + state: &mut Self::Domain, + terminator: &'mir mir::Terminator<'tcx>, + location: Location, + ) -> TerminatorEdges<'mir, 'tcx> { + // Note: `edges` must be computed first because `drop_flag_effects_for_location` can change + // the result of `is_unwind_dead`. + let mut edges = terminator.edges(); + if self.skip_unreachable_unwind + && let mir::TerminatorKind::Drop { + target, + unwind, + place, + replace: _, + drop: _, + async_fut: _, + } = terminator.kind + && matches!(unwind, mir::UnwindAction::Cleanup(_)) + && self.is_unwind_dead(place, state) + { + edges = TerminatorEdges::Single(target); + } + drop_flag_effects_for_location(self.body, self.move_data, location, |path, s| { + if let Some(idx) = self.index_mapper.map_index(path) { + Self::update_bits(state, idx, s); + } + }); + edges + } + + fn apply_call_return_effect( + &mut self, + state: &mut Self::Domain, + _block: mir::BasicBlock, + return_places: CallReturnPlaces<'_, 'tcx>, + ) { + return_places.for_each(|place| { + // when a call returns successfully, that means we need to set + // the bits for that dest_place to 1 (initialized). + on_lookup_result_bits( + self.move_data(), + self.move_data().rev_lookup.find(place.as_ref()), + |child| { + if let Some(idx) = self.index_mapper.map_index(child) { + state.gen_(idx); + } + }, + ); + }); + } + + fn get_switch_int_data( + &mut self, + block: mir::BasicBlock, + discr: &mir::Operand<'tcx>, + ) -> Option { + if !self.tcx.sess.opts.unstable_opts.precise_enum_drop_elaboration { + return None; + } + + MaybePlacesSwitchIntData::new(self.tcx, self.body, block, discr) + } + + fn apply_switch_int_edge_effect( + &mut self, + data: &mut Self::SwitchIntData, + state: &mut Self::Domain, + value: SwitchTargetValue, + targets: &mir::SwitchTargets, + ) { + let inactive_variants = match value { + SwitchTargetValue::Normal(value) => InactiveVariants::Active(data.next_discr(value)), + SwitchTargetValue::Otherwise if self.exclude_inactive_in_otherwise => { + InactiveVariants::Inactives(data.variants(targets)) + } + _ => return, + }; + + // Kill all move paths that correspond to variants we know to be inactive along this + // particular outgoing edge of a `SwitchInt`. + drop_flag_effects::on_all_inactive_variants( + self.move_data, + data.enum_place, + &inactive_variants, + |path| { + if let Some(idx) = self.index_mapper.map_index(path) { + state.kill(idx) + } + }, + ); + } +} diff --git a/compiler/rustc_mir_dataflow/src/impls/mod.rs b/compiler/rustc_mir_dataflow/src/impls/mod.rs index 3f29b819a6d18..a0959b10b8883 100644 --- a/compiler/rustc_mir_dataflow/src/impls/mod.rs +++ b/compiler/rustc_mir_dataflow/src/impls/mod.rs @@ -1,5 +1,6 @@ mod borrowed_locals; mod initialized; +mod initialized2; mod liveness; mod storage_liveness; @@ -8,6 +9,10 @@ pub use self::initialized::{ EverInitializedPlaces, EverInitializedPlacesDomain, MaybeInitializedPlaces, MaybeUninitializedPlaces, MaybeUninitializedPlacesDomain, }; +pub use self::initialized2::{ + DefaultMovePathIndexMapper, FilteringMovePathIndexMapper, MaybeInitializedPlaces2, + MovePathIndexMapper, +}; pub use self::liveness::{ MaybeLiveLocals, MaybeTransitiveLiveLocals, TransferFunction as LivenessTransferFunction, }; diff --git a/compiler/rustc_mir_dataflow/src/move_paths/mod.rs b/compiler/rustc_mir_dataflow/src/move_paths/mod.rs index 466416d63f53e..9422bc1284b19 100644 --- a/compiler/rustc_mir_dataflow/src/move_paths/mod.rs +++ b/compiler/rustc_mir_dataflow/src/move_paths/mod.rs @@ -34,6 +34,15 @@ impl polonius_engine::Atom for MovePathIndex { } } +rustc_index::newtype_index! { + /// A subset of `MovePathIndex`es, to track the interesting move paths. Used in the + /// `MaybeInitializedPlaces` analysis. + #[orderable] + #[debug_format = "smp{}"] + pub struct SparseMovePathIndex {} +} + + rustc_index::newtype_index! { #[orderable] #[debug_format = "mo{}"] @@ -107,7 +116,7 @@ impl<'tcx> MovePath<'tcx> { pub fn find_descendant( &self, move_paths: &IndexSlice>, - f: impl Fn(MovePathIndex) -> bool, + mut f: impl FnMut(MovePathIndex) -> bool, ) -> Option { let mut todo = if let Some(child) = self.first_child { vec![child] diff --git a/compiler/rustc_mir_dataflow/src/rustc_peek.rs b/compiler/rustc_mir_dataflow/src/rustc_peek.rs index a899ec1fa8846..1eaad805c13f3 100644 --- a/compiler/rustc_mir_dataflow/src/rustc_peek.rs +++ b/compiler/rustc_mir_dataflow/src/rustc_peek.rs @@ -1,4 +1,3 @@ -use rustc_ast::MetaItem; use rustc_middle::mir::{self, Body, Local, Location}; use rustc_middle::ty::{self, Ty, TyCtxt}; use rustc_span::def_id::DefId; @@ -14,17 +13,18 @@ use crate::impls::{MaybeInitializedPlaces, MaybeLiveLocals, MaybeUninitializedPl use crate::move_paths::{HasMoveData, LookupResult, MoveData, MovePathIndex}; use crate::{Analysis, JoinSemiLattice, ResultsCursor}; -fn has_rustc_mir_with(tcx: TyCtxt<'_>, def_id: DefId, name: Symbol) -> Option { +fn has_rustc_mir_with(tcx: TyCtxt<'_>, def_id: DefId, name: Symbol) -> bool { for attr in tcx.get_attrs(def_id, sym::rustc_mir) { let items = attr.meta_item_list(); for item in items.iter().flat_map(|l| l.iter()) { - match item.meta_item() { - Some(mi) if mi.has_name(name) => return Some(mi.clone()), - _ => continue, + if let Some(mi) = item.meta_item() + && mi.has_name(name) + { + return true; } } } - None + false } pub fn sanity_check<'tcx>(tcx: TyCtxt<'tcx>, body: &Body<'tcx>) { @@ -38,27 +38,27 @@ pub fn sanity_check<'tcx>(tcx: TyCtxt<'tcx>, body: &Body<'tcx>) { let move_data = MoveData::gather_moves(body, tcx, |_| true); - if has_rustc_mir_with(tcx, def_id, sym::rustc_peek_maybe_init).is_some() { + if has_rustc_mir_with(tcx, def_id, sym::rustc_peek_maybe_init) { let flow_inits = MaybeInitializedPlaces::new(tcx, body, &move_data) .iterate_to_fixpoint(tcx, body, None) .into_results_cursor(body); sanity_check_via_rustc_peek(tcx, flow_inits); } - if has_rustc_mir_with(tcx, def_id, sym::rustc_peek_maybe_uninit).is_some() { + if has_rustc_mir_with(tcx, def_id, sym::rustc_peek_maybe_uninit) { let flow_uninits = MaybeUninitializedPlaces::new(tcx, body, &move_data) .iterate_to_fixpoint(tcx, body, None) .into_results_cursor(body); sanity_check_via_rustc_peek(tcx, flow_uninits); } - if has_rustc_mir_with(tcx, def_id, sym::rustc_peek_liveness).is_some() { + if has_rustc_mir_with(tcx, def_id, sym::rustc_peek_liveness) { let flow_liveness = MaybeLiveLocals.iterate_to_fixpoint(tcx, body, None).into_results_cursor(body); sanity_check_via_rustc_peek(tcx, flow_liveness); } - if has_rustc_mir_with(tcx, def_id, sym::stop_after_dataflow).is_some() { + if has_rustc_mir_with(tcx, def_id, sym::stop_after_dataflow) { tcx.dcx().emit_fatal(StopAfterDataFlowEndedCompilation); } } From cd106c5e2de4c44d3687fb6a03c047d07c0f333c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9my=20Rakic?= Date: Tue, 26 Aug 2025 12:26:12 +0000 Subject: [PATCH 2/5] wip --- compiler/rustc_mir_dataflow/src/impls/initialized2.rs | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/compiler/rustc_mir_dataflow/src/impls/initialized2.rs b/compiler/rustc_mir_dataflow/src/impls/initialized2.rs index 96fb2df84d761..b380bda33b9e5 100644 --- a/compiler/rustc_mir_dataflow/src/impls/initialized2.rs +++ b/compiler/rustc_mir_dataflow/src/impls/initialized2.rs @@ -136,10 +136,10 @@ impl<'a, 'tcx> MaybeInitializedPlaces2<'a, 'tcx, DefaultMovePathIndexMapper> { let mut sparse_map = FxIndexMap::default(); for (idx, &dense_idx) in interesting_move_paths.iter().enumerate() { // SAFETY: `MovePathIndex`es are u32s. - // let sparse_idx = unsafe { - // SparseMovePathIndex::from_u32_unchecked(idx.try_into().unwrap()) - // }; - let sparse_idx = SparseMovePathIndex::from_u32(idx.try_into().unwrap()); + let sparse_idx = unsafe { + SparseMovePathIndex::from_u32_unchecked(idx.try_into().unwrap()) + }; + // let sparse_idx = SparseMovePathIndex::from_u32(idx.try_into().unwrap()); sparse_map.insert(dense_idx, sparse_idx); } let index_mapper = FilteringMovePathIndexMapper { sparse_map }; From 8febc064b26379ea76cfe23f5ee7376c420d435f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9my=20Rakic?= Date: Tue, 26 Aug 2025 12:37:03 +0000 Subject: [PATCH 3/5] wip --- compiler/rustc_mir_dataflow/src/impls/initialized2.rs | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/compiler/rustc_mir_dataflow/src/impls/initialized2.rs b/compiler/rustc_mir_dataflow/src/impls/initialized2.rs index b380bda33b9e5..c035a7d85e64e 100644 --- a/compiler/rustc_mir_dataflow/src/impls/initialized2.rs +++ b/compiler/rustc_mir_dataflow/src/impls/initialized2.rs @@ -136,9 +136,7 @@ impl<'a, 'tcx> MaybeInitializedPlaces2<'a, 'tcx, DefaultMovePathIndexMapper> { let mut sparse_map = FxIndexMap::default(); for (idx, &dense_idx) in interesting_move_paths.iter().enumerate() { // SAFETY: `MovePathIndex`es are u32s. - let sparse_idx = unsafe { - SparseMovePathIndex::from_u32_unchecked(idx.try_into().unwrap()) - }; + let sparse_idx = unsafe { SparseMovePathIndex::from_u32_unchecked(idx as u32) }; // let sparse_idx = SparseMovePathIndex::from_u32(idx.try_into().unwrap()); sparse_map.insert(dense_idx, sparse_idx); } From d51c7c35b2c7cf408fd405724642ec7edb4cecb9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9my=20Rakic?= Date: Tue, 26 Aug 2025 12:45:37 +0000 Subject: [PATCH 4/5] wip --- compiler/rustc_mir_dataflow/src/impls/initialized2.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/compiler/rustc_mir_dataflow/src/impls/initialized2.rs b/compiler/rustc_mir_dataflow/src/impls/initialized2.rs index c035a7d85e64e..dec9a7ed9bef1 100644 --- a/compiler/rustc_mir_dataflow/src/impls/initialized2.rs +++ b/compiler/rustc_mir_dataflow/src/impls/initialized2.rs @@ -136,8 +136,8 @@ impl<'a, 'tcx> MaybeInitializedPlaces2<'a, 'tcx, DefaultMovePathIndexMapper> { let mut sparse_map = FxIndexMap::default(); for (idx, &dense_idx) in interesting_move_paths.iter().enumerate() { // SAFETY: `MovePathIndex`es are u32s. - let sparse_idx = unsafe { SparseMovePathIndex::from_u32_unchecked(idx as u32) }; - // let sparse_idx = SparseMovePathIndex::from_u32(idx.try_into().unwrap()); + // let sparse_idx = unsafe { SparseMovePathIndex::from_u32_unchecked(idx as u32) }; + let sparse_idx = SparseMovePathIndex::from_u32(idx.try_into().unwrap()); sparse_map.insert(dense_idx, sparse_idx); } let index_mapper = FilteringMovePathIndexMapper { sparse_map }; From b407ec1681d4dce12ae859c1fd35bed48a75df08 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9my=20Rakic?= Date: Tue, 26 Aug 2025 16:54:41 +0000 Subject: [PATCH 5/5] wip --- .../src/type_check/liveness/trace.rs | 464 +----------------- .../rustc_mir_dataflow/src/framework/fmt.rs | 4 +- .../src/impls/initialized2.rs | 2 - .../rustc_mir_dataflow/src/move_paths/mod.rs | 3 +- 4 files changed, 12 insertions(+), 461 deletions(-) diff --git a/compiler/rustc_borrowck/src/type_check/liveness/trace.rs b/compiler/rustc_borrowck/src/type_check/liveness/trace.rs index b035771beb1dd..25fa25574f0de 100644 --- a/compiler/rustc_borrowck/src/type_check/liveness/trace.rs +++ b/compiler/rustc_borrowck/src/type_check/liveness/trace.rs @@ -7,10 +7,10 @@ use rustc_middle::mir::{BasicBlock, Body, ConstraintCategory, HasLocalDecls, Loc use rustc_middle::traits::query::DropckOutlivesResult; use rustc_middle::ty::relate::Relate; use rustc_middle::ty::{Ty, TyCtxt, TypeVisitable, TypeVisitableExt}; -use rustc_mir_dataflow::impls::MaybeInitializedPlaces2; +use rustc_mir_dataflow::impls::{FilteringMovePathIndexMapper, MaybeInitializedPlaces2}; use rustc_mir_dataflow::move_paths::{HasMoveData, MoveData, MovePathIndex}; use rustc_mir_dataflow::points::{DenseLocationMap, PointIndex}; -use rustc_mir_dataflow::{Analysis, ResultsCursor}; +use rustc_mir_dataflow::{Analysis, ResultsCursor, on_all_children_bits}; use rustc_span::{DUMMY_SP, ErrorGuaranteed, Span}; use rustc_trait_selection::error_reporting::InferCtxtErrorExt; use rustc_trait_selection::traits::ObligationCtxt; @@ -46,12 +46,7 @@ pub(super) fn trace<'tcx>( ) { let local_use_map = &LocalUseMap::build(&relevant_live_locals, location_map, typeck.body); - // let mut locals_with_drop_points = 0; - - // let timer = std::time::Instant::now(); - let mut dense_mpis = smallvec::SmallVec::new(); - for &local in relevant_live_locals.iter() { let Some(mpi) = move_data.rev_lookup.find_local(local) else { unreachable!() }; @@ -60,19 +55,11 @@ pub(super) fn trace<'tcx>( continue; } - // locals_with_drop_points += 1; - - dense_mpis.push(mpi); - - let move_paths = &move_data.move_paths; - let _ = move_paths[mpi].find_descendant(move_paths, |mpi| { - dense_mpis.push(mpi); - false + on_all_children_bits(move_data, mpi, |child| { + dense_mpis.push(child); }); } - // let elapsed = timer.elapsed(); - let cx = LivenessContext { typeck, flow_inits: None, @@ -83,31 +70,6 @@ pub(super) fn trace<'tcx>( dense_mpis, }; - // if locals_with_drop_points > 0 && std::env::var("LETSGO").is_ok() { - // eprintln!( - // "body has {} relevant locals, {} with drop points -> {} MPIs of interest (local + descendants, in {} ns) out of {}, {:?}", - // relevant_live_locals.len(), - // locals_with_drop_points, - // cx.dense_mpis.len(), - // elapsed.as_nanos(), - // cx.move_data.move_paths.len(), - // cx.typeck.body.span, - // ); - // // for (idx, &local) in relevant_live_locals.iter().enumerate() { - // // let Some(mpi) = cx.move_data.rev_lookup.find_local(local) else { continue }; - - // // let drop_points = cx.local_use_map.drops(local).count(); - // // if drop_points > 0 { - // // eprintln!( - // // "relevant local {idx:<4}: local {} (mpi: {:?}) has {} drop points", - // // local.as_u32(), - // // mpi, - // // drop_points, - // // ); - // // } - // // } - // } - let mut results = LivenessResults::new(cx); results.add_extra_drop_facts(&relevant_live_locals); @@ -135,7 +97,9 @@ struct LivenessContext<'a, 'typeck, 'tcx> { /// Results of dataflow tracking which variables (and paths) have been /// initialized. Computed lazily when needed by drop-liveness. - flow_inits: Option>>, + flow_inits: Option< + ResultsCursor<'a, 'tcx, MaybeInitializedPlaces2<'a, 'tcx, FilteringMovePathIndexMapper>>, + >, /// Index indicating where each variable is assigned, used, or /// dropped. @@ -514,12 +478,6 @@ impl<'a, 'typeck, 'tcx> LivenessResults<'a, 'typeck, 'tcx> { } } -// type IndexMapper = SparseIndexMapper; -// type IndexMapper = rustc_mir_dataflow::impls::DefaultMovePathIndexMapper; -type IndexMapper = rustc_mir_dataflow::impls::FilteringMovePathIndexMapper; -// type IndexMapper = Sparse32; -// type IndexMapper = Sparse; - impl<'a, 'typeck, 'tcx> LivenessContext<'a, 'typeck, 'tcx> { /// Computes the `MaybeInitializedPlaces` dataflow analysis if it hasn't been done already. /// @@ -532,7 +490,8 @@ impl<'a, 'typeck, 'tcx> LivenessContext<'a, 'typeck, 'tcx> { /// maybe-initializedness of `MovePathIndex`es. fn flow_inits( &mut self, - ) -> &mut ResultsCursor<'a, 'tcx, MaybeInitializedPlaces2<'a, 'tcx, IndexMapper>> { + ) -> &mut ResultsCursor<'a, 'tcx, MaybeInitializedPlaces2<'a, 'tcx, FilteringMovePathIndexMapper>> + { self.flow_inits.get_or_insert_with(|| { let tcx = self.typeck.tcx(); let body = self.typeck.body; @@ -632,411 +591,6 @@ impl<'a, 'typeck, 'tcx> LivenessContext<'a, 'typeck, 'tcx> { } } -// use boomphf::*; - -// struct Sparse32 { -// // bitmap: u64, -// // dense: [MovePathIndex; 64], -// dense: Vec, -// domain: usize, -// phf: Mphf, -// seen: DenseBitSet, -// } - -// impl Sparse32 { -// // pub(crate) fn new(sparse_domain_size: usize) -> Self { -// // Self { bitmap: 0, dense: [MovePathIndex::from_u32(0); _], domain: sparse_domain_size } -// // } - -// pub(crate) fn new(sparse_domain: &[MovePathIndex]) -> Self { -// // assert!(sparse_domain.len() < 640); - -// let dense_domain = 1 + sparse_domain.iter().max().unwrap().as_usize(); - -// let phf = Mphf::new(1.7, sparse_domain); -// let mut map = Self { -// // bitmap: 0, -// // dense: [MovePathIndex::from_u32(0); _], -// dense: vec![MovePathIndex::from_u32(0); sparse_domain.len()], -// domain: sparse_domain.len(), -// phf, -// seen: DenseBitSet::new_empty(dense_domain), -// }; - -// // if map.seen.domain_size() == 28 { -// // eprintln!( -// // "sparse domain of len {}, domain size: {}: {:?}", -// // sparse_domain.len(), -// // map.seen.domain_size(), -// // sparse_domain -// // ); -// // } - -// for &dense_idx in sparse_domain { -// map.insert(dense_idx); -// // if dense_idx.as_u32() == 28 && map.seen.domain_size() == 28 { -// // eprintln!( -// // "inserting {:?} in sparse domain of len {}, domain size: {} (dense_domain: {}), max in domain: {}, {:?}", -// // dense_idx, -// // sparse_domain.len(), -// // map.seen.domain_size(), -// // dense_domain, -// // sparse_domain.iter().max().unwrap().as_usize(), -// // sparse_domain, -// // ); -// // // todo!("yo, "); -// // } - -// // let r = catch_unwind(AssertUnwindSafe(|| { -// // map.seen.insert(dense_idx); -// // })); -// // if r.is_err() { -// // eprintln!( -// // "inserting {:?} in sparse domain of len {}, domain size: {}: {:?}", -// // dense_idx, -// // sparse_domain.len(), -// // map.seen.domain_size(), -// // sparse_domain -// // ); -// // panic!("oh: {r:?}"); -// // } -// } - -// map -// } - -// // #[inline(always)] -// // pub(crate) fn contains(&self, dense_idx: MovePathIndex) -> bool { -// // let bit = 1u64 << dense_idx.as_u32(); -// // (self.bitmap & bit) != 0 -// // } - -// // #[inline(always)] -// // pub(crate) fn map_index(&self, dense_idx: MovePathIndex) -> usize { -// // self.index_and_bit(dense_idx).0 -// // } - -// // #[inline(always)] -// // fn index_and_bit(&self, dense_idx: MovePathIndex) -> (usize, u64) { -// // let bit = 1u64 << dense_idx.as_u32(); -// // let mask = bit.wrapping_sub(1); -// // let below = self.bitmap & mask; -// // let sparse_idx = below.count_ones() as usize; -// // (sparse_idx, bit) -// // } - -// #[inline] -// pub(crate) fn insert(&mut self, dense_idx: MovePathIndex) { -// // tmp: -// // debug_assert!(dense_idx.as_u32() < 64); -// // debug_assert!( -// // !self.contains(dense_idx), -// // "the dense index {:?} is already present in the dense array!", -// // dense_idx -// // ); - -// // let (sparse_idx, bit) = self.index_and_bit(dense_idx); -// // // self.dense[sparse_idx] = dense_idx; -// // unsafe { -// // *self.dense.get_unchecked_mut(sparse_idx) = dense_idx; -// // } -// // self.bitmap |= bit; - -// // let sparse_idx = self.phf.hash(&dense_idx) as usize; -// let sparse_idx = self.phf.hash(&dense_idx) as usize; - -// // let bit = 1u64 << sparse_idx; -// unsafe { -// *self.dense.get_unchecked_mut(sparse_idx) = dense_idx; -// } -// // assert!(sparse_idx < self.dense.len()); -// // self.dense[sparse_idx] = dense_idx; - -// // self.dense.insert(sparse_idx, dense_idx); -// // self.bitmap |= bit; -// self.seen.insert(dense_idx); -// } -// } - -// impl rustc_mir_dataflow::impls::MovePathIndexMapper for Sparse32 { -// type TargetIndex = u32; - -// #[inline(always)] -// fn domain_size(&self, _dense_domain_size: usize) -> usize { -// // self.dense.len() -// self.domain -// } - -// #[inline(always)] -// fn map_index(&self, dense_idx: MovePathIndex) -> Option { -// if dense_idx.as_usize() < self.seen.domain_size() && self.seen.contains(dense_idx) { -// let sparse_idx = self.phf.hash(&dense_idx); -// Some(sparse_idx as u32) -// } else { -// None -// } - -// // let bit = 1u64 << dense_idx.as_u32(); -// // if self.bitmap & bit == 0 { -// // return None; -// // } -// // // let mask = bit.wrapping_sub(1); -// // // let below = self.bitmap & mask; -// // // let sparse_idx = below.count_ones() as u32; -// // // Some(sparse_idx) - -// // // let sparse_idx = self.phf.try_hash(&dense_idx).expect("Couldn't insert item?!"); -// // let sparse_idx = self.phf.try_hash(&dense_idx).unwrap_or_else(|| { -// // panic!( -// // "Couldn't find hash of dense idx {:?}, bitmap is: {:#034b}, bit is: {:#034b}, combo is {:#034b}, and values are {:?}", -// // dense_idx, self.bitmap, bit, self.bitmap & bit, self.dense -// // ); -// // }); -// // // let sparse_idx = self.phf.hash(&dense_idx) as u32; -// // Some(sparse_idx as u32) -// } -// } - -// // --- -// use std::fmt; -// use std::marker::PhantomData; - -// use rustc_index::Idx; - -// type Word = u64; -// const WORD_BYTES: usize = size_of::(); -// const WORD_BITS: usize = WORD_BYTES * 8; - -// #[inline] -// fn num_words(domain_size: T) -> usize { -// domain_size.index().div_ceil(WORD_BITS) -// } - -// #[inline] -// fn word_index_and_mask(elem: T) -> (usize, Word) { -// let elem = elem.index(); -// let word_index = elem / WORD_BITS; -// let mask = 1 << (elem % WORD_BITS); -// (word_index, mask) -// } - -// struct Bitmap { -// domain_size: usize, -// words: smallvec::SmallVec<[Word; 64]>, -// marker: PhantomData, -// } - -// impl fmt::Debug for Bitmap { -// fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { -// let mut words = Vec::new(); - -// for (_idx, word) in self.words.iter().copied().enumerate().rev() { -// // words.push(format!("{:08b}", word)); -// // words.push(format!("{:08b} ({} 1s)", word, word.count_ones())); -// let word = format!("{:0width$b}", word, width = WORD_BITS); -// // words.push(format!("{}: {}", idx, word)); -// words.push(format!("{}", word)); -// } - -// write!(f, "[{}]", words.join(", ")) -// } -// } - -// // impl Bitmap { -// // /// Gets the domain size. -// // pub(crate) fn domain_size(&self) -> usize { -// // self.domain_size -// // } -// // } - -// impl Bitmap { -// /// Creates a new, empty bitset with a given `domain_size`. -// #[inline] -// pub(crate) fn new_empty(domain_size: usize) -> Bitmap { -// let num_words = num_words(domain_size); -// Bitmap { domain_size, words: smallvec::smallvec![0; num_words], marker: PhantomData } -// } - -// /// Returns `true` if `self` contains `elem`. -// #[inline] -// pub(crate) fn contains(&self, elem: T) -> bool { -// if elem.index() >= self.domain_size { -// false -// } else { -// let (word_index, mask) = word_index_and_mask(elem); -// (unsafe { *self.words.get_unchecked(word_index) } & mask) != 0 -// } -// } - -// // /// Insert `elem`. -// // #[inline] -// // pub(crate) fn insert(&mut self, elem: T) { -// // // assert!( -// // // elem.index() < self.domain_size, -// // // "inserting element at index {} but domain size is {}", -// // // elem.index(), -// // // self.domain_size, -// // // ); -// // let (word_index, mask) = word_index_and_mask(elem); -// // let word = unsafe { *self.words.get_unchecked(word_index) }; -// // // let word_ref = &mut self.words[word_index]; -// // // let word = *word_ref; -// // let new_word = word | mask; -// // // *word_ref = new_word; -// // unsafe { *self.words.get_unchecked_mut(word_index) = new_word }; -// // } -// } - -// // --- -// pub(crate) struct Sparse { -// bitmap: Bitmap, -// sparse: smallvec::SmallVec<[MovePathIndex; 64]>, -// prefixes: Vec, -// } - -// impl Sparse { -// pub(crate) fn new(dense_domain: usize, sparse_domain: usize) -> Self { -// Self { -// bitmap: Bitmap::new_empty(dense_domain), -// sparse: smallvec::SmallVec::with_capacity(sparse_domain), -// prefixes: Vec::with_capacity(num_words(dense_domain)), -// } -// } - -// #[inline(always)] -// pub(crate) fn contains(&self, dense_idx: MovePathIndex) -> bool { -// self.bitmap.contains(dense_idx) -// } - -// fn compute_prefixes(&mut self) { -// let mut ones = 0; -// for word in self.bitmap.words.iter().copied() { -// ones += word.count_ones(); -// self.prefixes.push(ones); -// } -// } - -// // #[inline(always)] -// // pub(crate) fn map_index(&self, dense_idx: MovePathIndex) -> u32 { -// // let dense_idx = dense_idx.index(); -// // // Index within the word list -// // let word_idx = dense_idx / WORD_BITS; -// // // Index within the word at the word_idx -// // let idx = dense_idx % WORD_BITS; - -// // let bit: Word = 1 << idx; -// // let mask = bit.wrapping_sub(1); - -// // let word = unsafe { *self.bitmap.words.get_unchecked(word_idx) }; - -// // // Mask of key that are smaller than the dense_idx within word_idx -// // let below_word = word & mask; - -// // // Number of keys that are smaller than the dense_idx within word_idx -// // // FIXME: fenwick tree ici!! -// // let sparse_idx_word = below_word.count_ones(); -// // let sparse_idx_rest: u32 = -// // self.bitmap.words[0..word_idx].iter().map(|word| word.count_ones()).sum(); - -// // let sparse_idx = sparse_idx_word + sparse_idx_rest; -// // sparse_idx -// // } - -// #[inline(always)] -// pub(crate) fn map_index(&self, dense_idx: MovePathIndex) -> u32 { -// let dense_idx = dense_idx.index(); -// // Index within the word list -// let word_idx = dense_idx / WORD_BITS; -// // Index within the word at the word_idx -// let idx = dense_idx % WORD_BITS; - -// let bit: Word = 1 << idx; -// let mask = bit.wrapping_sub(1); - -// let word = unsafe { *self.bitmap.words.get_unchecked(word_idx) }; - -// // Mask of key that are smaller than the dense_idx within word_idx -// let below_word = word & mask; - -// // Number of keys that are smaller than the dense_idx within word_idx -// let sparse_idx_word = below_word.count_ones(); - -// let mut sparse_idx = sparse_idx_word; -// if word_idx > 0 { -// let sparse_idx_rest: u32 = self.prefixes[word_idx - 1]; -// sparse_idx += sparse_idx_rest; -// } - -// sparse_idx -// } - -// pub(crate) fn insert(&mut self, dense_idx: MovePathIndex) { -// let dense = dense_idx.index(); -// // Index within the word list -// let word_idx = dense / WORD_BITS; -// // Index within the word at the word_idx -// let idx = dense % WORD_BITS; - -// let bit: Word = 1 << idx; -// let mask = bit.wrapping_sub(1); - -// let word = unsafe { *self.bitmap.words.get_unchecked(word_idx) }; - -// // Mask of key that are smaller than the dense_idx within word_idx -// let below_word = word & mask; - -// // Number of keys that are smaller than the dense_idx within word_idx -// let sparse_idx_word = below_word.count_ones() as usize; - -// // The full sparse_idx is the sparse_idx_word + the sum of count_ones of all words [0..word_idx] -// // segment-tree / fenwick tree ici? -// let sparse_idx_rest: u32 = -// self.bitmap.words[0..word_idx].iter().map(|word| word.count_ones()).sum(); - -// let sparse_idx = sparse_idx_word + sparse_idx_rest as usize; - -// // If we're adding dense indices in an ascending order, we'll always be at the last position -// // of the vector. -// if sparse_idx == self.sparse.len() { -// self.sparse.push(dense_idx); -// } else { -// // Otherwise, we have a smaller index to add to the list. -// self.sparse.insert(sparse_idx, dense_idx); -// } - -// // eprintln!( -// // "inserting dense idx {:>2}, word_idx: {}, idx: {}, sparse index found: {} (word: {}, rest: {}), bitmap: {:?}, sparse values: {:?}", -// // dense_idx, -// // word_idx, -// // idx, -// // sparse_idx, -// // sparse_idx_word, -// // sparse_idx_rest, -// // self.bitmap, -// // self.sparse, -// // ); - -// let new_word = word | bit; -// unsafe { *self.bitmap.words.get_unchecked_mut(word_idx) = new_word }; -// } -// } - -// impl rustc_mir_dataflow::impls::MovePathIndexMapper for Sparse { -// type TargetIndex = u32; - -// #[inline(always)] -// fn mapped_domain_size<'tcx>(&self, _analysis: &impl HasMoveData<'tcx>) -> usize { -// self.sparse.len() -// } - -// #[inline(always)] -// fn map_index(&self, dense_idx: MovePathIndex) -> Option { -// if self.contains(dense_idx) { Some(self.map_index(dense_idx)) } else { None } -// } -// } - -// // --- - impl<'tcx> LivenessContext<'_, '_, 'tcx> { fn body(&self) -> &Body<'tcx> { self.typeck.body diff --git a/compiler/rustc_mir_dataflow/src/framework/fmt.rs b/compiler/rustc_mir_dataflow/src/framework/fmt.rs index cb728b3d05b46..3976b27b5a302 100644 --- a/compiler/rustc_mir_dataflow/src/framework/fmt.rs +++ b/compiler/rustc_mir_dataflow/src/framework/fmt.rs @@ -256,11 +256,11 @@ impl DebugWithContext for u32 {} // impl<'tcx, C> DebugWithContext for crate::move_paths::SparseMovePathIndex // where -// C: crate::move_paths::HasMoveData<'tcx> + +// C: crate::move_paths::HasMoveData<'tcx> + // C: crate::impls::MovePathIndexMapper // { // fn fmt_with(&self, ctxt: &C, f: &mut fmt::Formatter<'_>) -> fmt::Result { -// let +// let // write!(f, "{}", ctxt.move_data().move_paths[*self]) // } // } diff --git a/compiler/rustc_mir_dataflow/src/impls/initialized2.rs b/compiler/rustc_mir_dataflow/src/impls/initialized2.rs index dec9a7ed9bef1..5feee9c03e8e0 100644 --- a/compiler/rustc_mir_dataflow/src/impls/initialized2.rs +++ b/compiler/rustc_mir_dataflow/src/impls/initialized2.rs @@ -135,8 +135,6 @@ impl<'a, 'tcx> MaybeInitializedPlaces2<'a, 'tcx, DefaultMovePathIndexMapper> { ) -> MaybeInitializedPlaces2<'a, 'tcx, FilteringMovePathIndexMapper> { let mut sparse_map = FxIndexMap::default(); for (idx, &dense_idx) in interesting_move_paths.iter().enumerate() { - // SAFETY: `MovePathIndex`es are u32s. - // let sparse_idx = unsafe { SparseMovePathIndex::from_u32_unchecked(idx as u32) }; let sparse_idx = SparseMovePathIndex::from_u32(idx.try_into().unwrap()); sparse_map.insert(dense_idx, sparse_idx); } diff --git a/compiler/rustc_mir_dataflow/src/move_paths/mod.rs b/compiler/rustc_mir_dataflow/src/move_paths/mod.rs index 9422bc1284b19..eefcdf277a61f 100644 --- a/compiler/rustc_mir_dataflow/src/move_paths/mod.rs +++ b/compiler/rustc_mir_dataflow/src/move_paths/mod.rs @@ -42,7 +42,6 @@ rustc_index::newtype_index! { pub struct SparseMovePathIndex {} } - rustc_index::newtype_index! { #[orderable] #[debug_format = "mo{}"] @@ -116,7 +115,7 @@ impl<'tcx> MovePath<'tcx> { pub fn find_descendant( &self, move_paths: &IndexSlice>, - mut f: impl FnMut(MovePathIndex) -> bool, + f: impl Fn(MovePathIndex) -> bool, ) -> Option { let mut todo = if let Some(child) = self.first_child { vec![child]