diff --git a/clippy_data_structures/Cargo.toml b/clippy_data_structures/Cargo.toml new file mode 100644 index 000000000000..1826e9ce31c7 --- /dev/null +++ b/clippy_data_structures/Cargo.toml @@ -0,0 +1,12 @@ +[package] +name = "clippy_data_structures" +version = "0.0.1" +edition = "2021" + +[dependencies] +arrayvec = { version = "0.7", default-features = false} +smallvec = { version = "1.8.1", features = ["union", "may_dangle", "const_new"] } + +[package.metadata.rust-analyzer] +# This package uses #[feature(rustc_private)] +rustc_private = true diff --git a/clippy_data_structures/src/bit_set_2d.rs b/clippy_data_structures/src/bit_set_2d.rs new file mode 100644 index 000000000000..4227331539bf --- /dev/null +++ b/clippy_data_structures/src/bit_set_2d.rs @@ -0,0 +1,428 @@ +use crate::BitSlice; +use crate::bit_slice::{Word, final_mask_for_size, word_count_from_bits}; +use crate::traits::{ + IntoRangeWithStride, LimitExplicitRangeBounds, RangeLen, SplitRangeAt, SubtractRangeItemsFromEdge, +}; +use core::iter; +use core::marker::PhantomData; +use rustc_arena::DroplessArena; +use rustc_index::{Idx, IntoSliceIdx}; + +/// A reference to a two-dimensional bit set. +/// +/// This is represented as a dense array of words stored in row major order with each row aligned to +/// the start of a word. +pub struct BitSlice2d<'a, R, C> { + words: &'a mut [Word], + rows: u32, + columns: u32, + row_stride: u32, + phantom: PhantomData<(R, C)>, +} +impl<'a, R, C> BitSlice2d<'a, R, C> { + /// Treats `words` as a two-dimensional bit set. + /// + /// The length of the given slice must match the number of words required to store a bit set + /// with the given dimensions. + #[inline] + pub fn from_mut_words(words: &'a mut [Word], rows: u32, columns: u32) -> Self { + let row_stride = word_count_from_bits(columns as usize); + debug_assert_eq!(Some(words.len()), row_stride.checked_mul(rows as usize)); + Self { + words, + rows, + columns, + row_stride: row_stride as u32, + phantom: PhantomData, + } + } + + /// Allocates a new zero-initialized, two-dimensional bit set of the given size. + #[inline] + pub fn empty_arena(arena: &'a DroplessArena, rows: u32, columns: u32) -> Self { + let row_stride = word_count_from_bits(columns as usize); + Self { + words: arena.alloc_from_iter(iter::repeat_n(0usize, row_stride.checked_mul(rows as usize).unwrap())), + rows, + columns, + row_stride: row_stride as u32, + phantom: PhantomData, + } + } + + /// Gets the number of rows in the bit set. + #[inline] + pub const fn row_len(&self) -> u32 { + self.rows + } + + /// Gets the number of columns in the bit set. + #[inline] + pub const fn column_len(&self) -> u32 { + self.columns + } + + /// Gets a reference to the words backing this bit set. + #[inline] + pub const fn words(&self) -> &[Word] { + self.words + } + + /// Gets a mutable reference to the words backing this bit set. + #[inline] + pub fn words_mut(&mut self) -> &mut [Word] { + self.words + } + + #[inline] + #[track_caller] + pub fn iter_rows( + &self, + range: impl IntoSliceIdx, + ) -> impl ExactSizeIterator> + Clone { + self.words[range.into_slice_idx().into_range_with_stride(self.row_stride)] + .chunks_exact(self.row_stride as usize) + .map(|words| BitSlice::from_words(words)) + } + + #[inline] + pub fn iter_mut_rows( + &mut self, + range: impl IntoSliceIdx, + ) -> impl ExactSizeIterator> { + self.words[range.into_slice_idx().into_range_with_stride(self.row_stride)] + .chunks_exact_mut(self.row_stride as usize) + .map(|words| BitSlice::from_words_mut(words)) + } + + #[inline] + pub fn is_empty(&self) -> bool { + self.words.iter().all(|&x| x == 0) + } + + #[inline] + pub fn count_ones(&self) -> usize { + self.words.iter().map(|&x| x.count_ones() as usize).sum() + } + + #[inline] + pub fn clear(&mut self) { + self.words.fill(0); + } + + #[inline] + pub fn fill(&mut self) { + self.words.fill(!0); + let mask = final_mask_for_size(self.columns as usize); + for row in self.iter_mut_rows(..) { + row.mask_final_word(mask); + } + } + + pub fn union(&mut self, other: &BitSlice2d<'_, R, C>) -> bool { + assert_eq!(self.rows, other.rows); + assert_eq!(self.columns, other.columns); + self.words.iter_mut().zip(&*other.words).fold(false, |res, (dst, src)| { + let prev = *dst; + *dst |= *src; + res || prev != *dst + }) + } +} +impl<'a, R: Idx, C: Idx> BitSlice2d<'a, R, C> { + #[inline] + pub fn enumerate_rows(&self) -> impl ExactSizeIterator)> + Clone { + self.words + .chunks_exact(self.row_stride as usize) + .map(|words| BitSlice::from_words(words)) + .enumerate() + .map(|(i, row)| (R::new(i), row)) + } + + #[inline] + pub fn enumerate_rows_mut(&mut self) -> impl ExactSizeIterator)> { + self.words + .chunks_exact_mut(self.row_stride as usize) + .map(|words| BitSlice::from_words_mut(words)) + .enumerate() + .map(|(i, row)| (R::new(i), row)) + } + + #[inline] + #[track_caller] + pub fn row(&self, row: R) -> &BitSlice { + assert!(row.index() < self.rows as usize); + let start = self.row_stride as usize * row.index(); + BitSlice::from_words(&self.words[start..start + self.row_stride as usize]) + } + + #[inline] + #[track_caller] + pub fn row_mut(&mut self, row: R) -> &mut BitSlice { + assert!(row.index() < self.rows as usize); + let start = self.row_stride as usize * row.index(); + BitSlice::from_words_mut(&mut self.words[start..start + self.row_stride as usize]) + } + + #[inline] + #[track_caller] + pub fn copy_rows(&mut self, src: impl IntoSliceIdx, dst: R) { + let src = src.into_slice_idx().into_range_with_stride(self.row_stride); + self.words.copy_within(src, dst.index()); + } + + #[inline] + #[track_caller] + pub fn move_rows( + &mut self, + src: impl IntoSliceIdx>, + dst: R, + ) { + let src = src.into_slice_idx().into_range_with_stride(self.row_stride); + let dst_start = dst.index() * self.row_stride as usize; + self.words.copy_within(src.clone(), dst_start); + let src_len = src.len(); + self.words[src.subtract_range_items_from_edge(dst_start..dst_start + src_len)].fill(0); + } + + #[inline] + #[track_caller] + pub fn clear_rows(&mut self, rows: impl IntoSliceIdx) { + let words = &mut self.words[rows.into_slice_idx().into_range_with_stride(self.row_stride)]; + words.fill(0); + } +} + +impl PartialEq for BitSlice2d<'_, R, C> { + fn eq(&self, other: &Self) -> bool { + self.columns == other.columns && self.rows == other.rows && self.words == other.words + } +} +impl Eq for BitSlice2d<'_, R, C> {} + +pub struct GrowableBitSet2d { + words: Vec, + rows: u32, + columns: u32, + row_stride: u32, + phantom: PhantomData<(R, C)>, +} +impl GrowableBitSet2d { + #[inline] + pub const fn new(columns: u32) -> Self { + Self { + words: Vec::new(), + rows: 0, + columns, + row_stride: word_count_from_bits(columns as usize) as u32, + phantom: PhantomData, + } + } + + #[inline] + pub const fn row_len(&self) -> u32 { + self.rows + } + + #[inline] + pub const fn column_len(&self) -> u32 { + self.columns + } + + #[inline] + pub fn words(&self) -> &[Word] { + self.words.as_slice() + } + + #[inline] + pub fn words_mut(&mut self) -> &mut [Word] { + self.words.as_mut_slice() + } + + #[inline] + pub fn is_empty(&self) -> bool { + self.words.iter().all(|&x| x == 0) + } + + #[inline] + pub fn iter_rows( + &self, + range: impl IntoSliceIdx, + ) -> impl ExactSizeIterator> + Clone { + self.words[range + .into_slice_idx() + .limit_explicit_range_bounds(self.rows as usize) + .into_range_with_stride(self.row_stride)] + .chunks_exact(self.row_stride as usize) + .map(|words| BitSlice::from_words(words)) + } + + #[inline] + pub fn iter_mut_rows( + &mut self, + range: impl IntoSliceIdx, + ) -> impl ExactSizeIterator> { + self.words[range + .into_slice_idx() + .limit_explicit_range_bounds(self.rows as usize) + .into_range_with_stride(self.row_stride)] + .chunks_exact_mut(self.row_stride as usize) + .map(|words| BitSlice::from_words_mut(words)) + } + + #[inline] + pub fn count_ones(&self) -> usize { + self.words.iter().map(|&x| x.count_ones() as usize).sum() + } + + #[inline] + pub fn clear(&mut self) { + self.words.clear(); + } + + #[inline] + pub fn as_mut_slice(&mut self) -> BitSlice2d<'_, R, C> { + BitSlice2d { + words: self.words.as_mut_slice(), + rows: self.rows, + columns: self.columns, + row_stride: self.row_stride, + phantom: PhantomData, + } + } + + pub fn union(&mut self, other: &Self) -> bool { + assert_eq!(self.columns, other.columns); + if self.rows < other.rows { + self.words.resize(other.row_stride as usize * other.rows as usize, 0); + self.rows = other.rows; + } + self.words.iter_mut().zip(&*other.words).fold(false, |res, (dst, src)| { + let prev = *dst; + *dst |= *src; + res || prev != *dst + }) + } +} +impl GrowableBitSet2d { + #[inline] + pub fn enumerate_rows(&self) -> impl ExactSizeIterator)> + Clone { + self.words + .chunks_exact(self.row_stride as usize) + .map(|words| BitSlice::from_words(words)) + .enumerate() + .map(|(i, row)| (R::new(i), row)) + } + + #[inline] + pub fn enumerate_mut_rows(&mut self) -> impl ExactSizeIterator)> { + self.words + .chunks_exact_mut(self.row_stride as usize) + .map(|words| BitSlice::from_words_mut(words)) + .enumerate() + .map(|(i, row)| (R::new(i), row)) + } + + #[inline] + pub fn opt_row(&self, row: R) -> Option<&BitSlice> { + let start = self.row_stride as usize * row.index(); + self.words + .get(start..start + self.row_stride as usize) + .map(BitSlice::from_words) + } + + #[inline] + pub fn ensure_row(&mut self, row: R) -> &mut BitSlice { + let start = self.row_stride as usize * row.index(); + let end = start + self.row_stride as usize; + BitSlice::from_words_mut(match self.words.get_mut(start..end) { + Some(_) => &mut self.words[start..end], + None => { + self.words.resize(end, 0); + self.rows = row.index() as u32 + 1; + &mut self.words[start..end] + }, + }) + } + + #[inline] + pub fn clear_rows(&mut self, rows: impl IntoSliceIdx) { + self.words[rows + .into_slice_idx() + .limit_explicit_range_bounds(self.rows as usize) + .into_range_with_stride(self.row_stride)] + .fill(0); + } + + pub fn copy_rows(&mut self, src: impl IntoSliceIdx, dst: R) { + let (src_range, src_extra) = src.into_slice_idx().split_range_at(self.rows as usize); + let dst_start = dst.index() * self.row_stride as usize; + let dst_row_end = dst.index() + src_range.len(); + let src_range = src_range.into_range_with_stride(self.row_stride); + let dst_copy_end = dst_start + src_range.len(); + if self.rows < dst_row_end as u32 { + self.words.resize(dst_copy_end, 0); + self.rows = dst_row_end as u32; + } + self.words.copy_within(src_range, dst_start); + let dst_end = dst_copy_end + self.words.len().min(src_extra * self.row_stride as usize); + self.words[dst_copy_end..dst_end].fill(0); + } + + pub fn move_rows(&mut self, src: impl IntoSliceIdx, dst: R) { + let (src_range, src_extra) = src.into_slice_idx().split_range_at(self.rows as usize); + let dst_start = dst.index() * self.row_stride as usize; + let dst_row_end = dst.index() + src_range.len(); + let src_range = src_range.into_range_with_stride(self.row_stride); + let dst_copy_end = dst_start + src_range.len(); + if self.rows < dst_row_end as u32 { + self.words.resize(dst_copy_end, 0); + self.rows = dst_row_end as u32; + } + self.words.copy_within(src_range.clone(), dst_start); + let dst_end = self + .words + .len() + .min(dst_copy_end + src_extra * self.row_stride as usize); + self.words[dst_copy_end..dst_end].fill(0); + self.words[src_range.subtract_range_items_from_edge(dst_start..dst_end)].fill(0); + } +} + +impl PartialEq for GrowableBitSet2d { + fn eq(&self, other: &Self) -> bool { + if self.columns != other.columns { + return false; + } + let (lhs, rhs, extra) = match self.words.split_at_checked(other.words.len()) { + Some((lhs, extra)) => (lhs, other.words.as_slice(), extra), + None => { + let (rhs, extra) = other.words.split_at(self.words.len()); + (self.words.as_slice(), rhs, extra) + }, + }; + lhs == rhs && extra.iter().all(|&x| x == 0) + } +} +impl Eq for GrowableBitSet2d {} + +impl Clone for GrowableBitSet2d { + #[inline] + fn clone(&self) -> Self { + Self { + words: self.words.clone(), + rows: self.rows, + columns: self.columns, + row_stride: self.row_stride, + phantom: PhantomData, + } + } + + #[inline] + fn clone_from(&mut self, source: &Self) { + self.words.clone_from(&source.words); + self.rows = source.rows; + self.columns = source.columns; + self.row_stride = source.row_stride; + } +} diff --git a/clippy_data_structures/src/bit_slice.rs b/clippy_data_structures/src/bit_slice.rs new file mode 100644 index 000000000000..d700d631e13b --- /dev/null +++ b/clippy_data_structures/src/bit_slice.rs @@ -0,0 +1,469 @@ +use core::marker::PhantomData; +use core::mem::{self, transmute}; +use core::ops::{Range, RangeFrom, RangeFull, RangeInclusive, RangeTo, RangeToInclusive}; +use core::slice::{self, SliceIndex}; +use core::{iter, range}; +use rustc_arena::DroplessArena; +use rustc_index::{Idx, IntoSliceIdx}; + +pub type Word = usize; +pub const WORD_BITS: usize = Word::BITS as usize; +pub const MAX_WORDS: usize = usize::MAX / WORD_BITS; + +#[inline] +pub const fn word_count_from_bits(bits: usize) -> usize { + (bits + (WORD_BITS - 1)) / WORD_BITS +} + +#[inline] +pub const fn final_mask_for_size(bits: usize) -> Word { + (!(!0usize << bits % WORD_BITS)).wrapping_sub((bits % WORD_BITS == 0) as usize) +} + +pub struct BitRange { + /// The range of affected words. + words: R, + /// The amount to shift to make a bit-mask for the first word. + first_shift: u8, + /// The amount to shift to make a bit-mask for the last word. + last_shift: u8, +} +impl BitRange { + #[inline] + pub const fn first_mask(&self) -> Word { + !0usize << self.first_shift + } + + #[inline] + pub const fn last_mask(&self) -> Word { + !0usize >> self.last_shift + } +} + +pub trait IntoBitRange: Sized { + type Range: SliceIndex<[Word], Output = [Word]>; + fn into_bit_range(self) -> BitRange; +} +impl IntoBitRange for RangeFull { + type Range = Self; + #[inline] + fn into_bit_range(self) -> BitRange { + BitRange { + words: self, + first_shift: 0, + last_shift: 0, + } + } +} +impl IntoBitRange for Range { + type Range = Self; + #[inline] + fn into_bit_range(self) -> BitRange { + let start = BitIdx::from_bit(self.start); + let end = BitIdx::from_bit(self.end); + BitRange { + words: Range { + start: start.word, + end: end.word + (end.bit != 0) as usize, + }, + first_shift: start.bit as u8, + last_shift: ((WORD_BITS - 1) - (end.bit.wrapping_sub(1) % WORD_BITS)) as u8, + } + } +} +impl IntoBitRange for RangeFrom { + type Range = Self; + #[inline] + fn into_bit_range(self) -> BitRange { + let start = BitIdx::from_bit(self.start); + BitRange { + words: RangeFrom { start: start.word }, + first_shift: start.bit as u8, + last_shift: 0, + } + } +} +impl IntoBitRange for RangeTo { + type Range = Self; + #[inline] + fn into_bit_range(self) -> BitRange { + let end = BitIdx::from_bit(self.end); + BitRange { + words: RangeTo { + end: end.word + (end.bit != 0) as usize, + }, + first_shift: 0, + last_shift: ((WORD_BITS - 1) - (end.bit.wrapping_sub(1) % WORD_BITS)) as u8, + } + } +} +impl IntoBitRange for RangeInclusive { + type Range = Range; + #[inline] + fn into_bit_range(self) -> BitRange { + let start = BitIdx::from_bit(*self.start()); + let end = BitIdx::from_bit(*self.end()); + BitRange { + words: Range { + start: start.word, + end: end.word + 1, + }, + first_shift: start.bit as u8, + last_shift: ((WORD_BITS - 1) - end.bit) as u8, + } + } +} +impl IntoBitRange for RangeToInclusive { + type Range = RangeTo; + #[inline] + fn into_bit_range(self) -> BitRange { + let end = BitIdx::from_bit(self.end); + BitRange { + words: RangeTo { end: end.word + 1 }, + first_shift: 0, + last_shift: ((WORD_BITS - 1) - end.bit) as u8, + } + } +} +impl IntoBitRange for range::Range { + type Range = range::Range; + #[inline] + fn into_bit_range(self) -> BitRange { + let start = BitIdx::from_bit(self.start); + let end = BitIdx::from_bit(self.end); + BitRange { + words: range::Range { + start: start.word, + end: end.word + (end.bit != 0) as usize, + }, + first_shift: start.bit as u8, + last_shift: ((WORD_BITS - 1) - (end.bit.wrapping_sub(1) % WORD_BITS)) as u8, + } + } +} +impl IntoBitRange for range::RangeFrom { + type Range = range::RangeFrom; + #[inline] + fn into_bit_range(self) -> BitRange { + let start = BitIdx::from_bit(self.start); + BitRange { + words: range::RangeFrom { start: start.word }, + first_shift: start.bit as u8, + last_shift: 0, + } + } +} +impl IntoBitRange for range::RangeInclusive { + type Range = range::Range; + #[inline] + fn into_bit_range(self) -> BitRange { + let start = BitIdx::from_bit(self.start); + let end = BitIdx::from_bit(self.end); + BitRange { + words: range::Range { + start: start.word, + end: end.word + 1, + }, + first_shift: start.bit as u8, + last_shift: ((WORD_BITS - 1) - end.bit) as u8, + } + } +} + +struct BitIdx { + word: usize, + bit: usize, +} +impl BitIdx { + #[inline] + fn from_bit(bit: T) -> Self { + let bit = bit.index(); + Self { + word: bit / WORD_BITS, + bit: bit % WORD_BITS, + } + } + + #[inline] + fn word_mask(&self) -> Word { + 1usize << self.bit + } +} + +/// A slice of words where each bit is treated as an individual value. +/// +/// Note: This can only hold bits as a multiple of `WORD_SIZE`. Use +/// `mask_final_word(final_mask_for_size(len))` to clear the final bits greater than or equal to +/// `len`. +#[repr(transparent)] +pub struct BitSlice { + phantom: PhantomData, + pub words: [Word], +} +impl BitSlice { + #[inline] + pub const fn from_words(words: &[Word]) -> &Self { + debug_assert!(words.len() <= MAX_WORDS); + // SAFETY: `BitSlice` is a transparent wrapper around `[Word]`. + unsafe { transmute::<&[Word], &Self>(words) } + } + + #[inline] + pub fn from_words_mut(words: &mut [Word]) -> &mut Self { + debug_assert!(words.len() <= MAX_WORDS); + // SAFETY: `BitSlice` is a transparent wrapper around `[Word]`. + unsafe { transmute::<&mut [Word], &mut Self>(words) } + } + + #[inline] + pub fn from_boxed_words(words: Box<[Word]>) -> Box { + debug_assert!(words.len() <= MAX_WORDS); + // SAFETY: `BitSlice` is a transparent wrapper around `[Word]`. + unsafe { transmute::, Box>(words) } + } + + /// Gets the size of this slice in bits. + #[inline] + pub const fn bit_len(&self) -> usize { + self.words.len() * WORD_BITS + } + + /// Checks whether all bits are zero. + #[inline] + pub fn is_empty(&self) -> bool { + self.words.iter().all(|&x| x == 0) + } + + /// Counts the number of set bits in the slice. + #[inline] + pub fn count_ones(&self) -> usize { + self.words.iter().map(|&x| x.count_ones() as usize).sum() + } + + /// Allocates a boxed `BitSlice` of the given size rounded up to the nearest word size. + #[inline] + pub fn empty_box(bits: usize) -> Box { + Self::from_boxed_words(vec![0usize; word_count_from_bits(bits)].into_boxed_slice()) + } + + /// Allocates a `BitSlice` of the given size rounded up to the nearest word size. + #[inline] + pub fn empty_arena(arena: &DroplessArena, bits: usize) -> &mut Self { + Self::from_words_mut(arena.alloc_from_iter(iter::repeat_n(0, word_count_from_bits(bits)))) + } + + /// Applies a bit-mask to the final word of the slice. + #[inline] + pub fn mask_final_word(&mut self, mask: Word) { + if let Some(word) = self.words.last_mut() { + *word &= mask; + } + } + + /// Fills the entire bit slice. + /// + /// Note: This can only work with whole `Word`s. Use `mask_final_word(final_mask_for_size(len))` + /// to clear the final bits greater than or equal to `len`. + #[inline] + pub fn fill(&mut self) { + self.words.fill(!0); + } + + /// Clears the entire bit slice. + #[inline] + pub fn clear(&mut self) { + self.words.fill(0); + } + + /// Calculates the union over two sets storing the results in `self`. Return if `self` has + /// changed. + /// + /// Note: The result will be truncated to the number of bits contained in `self` + pub fn union_trunc(&mut self, other: &Self) -> bool { + self.words.iter_mut().zip(&other.words).fold(false, |res, (lhs, rhs)| { + let prev = *lhs; + *lhs |= *rhs; + prev != *lhs || res + }) + } + + /// Calculates the intersection over two sets storing the results in `self`. Return if `self` + /// has changed. + pub fn intersect(&mut self, other: &Self) -> bool { + self.words.iter_mut().zip(&other.words).fold(false, |res, (lhs, rhs)| { + let prev = *lhs; + *lhs &= *rhs; + prev != *lhs || res + }) + } + + /// Subtracts `other` from `self` storing the results in `self`. Return if `self` has changed. + pub fn subtract(&mut self, other: &Self) -> bool { + self.words.iter_mut().zip(&other.words).fold(false, |res, (lhs, rhs)| { + let prev = *lhs; + *lhs &= !*rhs; + prev != *lhs || res + }) + } +} +impl BitSlice { + /// Inserts the given bit and returns whether the value changed. + #[inline] + #[track_caller] + pub fn insert(&mut self, bit: T) -> bool { + let idx = BitIdx::from_bit(bit); + let res = self.words[idx.word] & idx.word_mask() == 0; + self.words[idx.word] |= idx.word_mask(); + res + } + + /// Checks if the given bit is contained in the slice. + #[inline] + pub fn contains(&self, bit: T) -> bool { + let idx = BitIdx::from_bit(bit); + self.words.get(idx.word).map_or(0, |&x| x) & idx.word_mask() != 0 + } + + /// Inserts the given range of bits into the slice. + #[track_caller] + pub fn insert_range(&mut self, range: impl IntoSliceIdx) { + let range = range.into_slice_idx().into_bit_range(); + let first = range.first_mask(); + let last = range.last_mask(); + match &mut self.words[range.words] { + [] => {}, + [word] => *word |= first & last, + [word1, words @ .., word2] => { + *word1 |= first; + words.fill(!0); + *word2 |= last; + }, + } + } + + #[inline] + pub fn iter(&self) -> Iter<'_, T> { + Iter::new(&self.words) + } + + #[inline] + pub fn drain(&mut self) -> Drain<'_, T> { + Drain::new(&mut self.words) + } +} + +impl Extend for &mut BitSlice { + fn extend>(&mut self, iter: Iter) { + for i in iter { + self.insert(i); + } + } +} + +#[derive(Default, Clone)] +pub struct WordBitIter(Word); +impl WordBitIter { + #[inline] + pub const fn new(word: Word) -> Self { + Self(word) + } +} +impl Iterator for WordBitIter { + type Item = u32; + #[inline] + fn next(&mut self) -> Option { + if self.0 == 0 { + None + } else { + let bit_pos = self.0.trailing_zeros(); + self.0 ^= 1 << bit_pos; + Some(bit_pos) + } + } +} + +// `BitIter` copied from rustc. +pub struct Iter<'a, T: Idx> { + /// Iterator over a single word. + word: WordBitIter, + + /// The offset (measured in bits) of the current word. + offset: usize, + + /// Underlying iterator over the words. + iter: slice::Iter<'a, Word>, + + marker: PhantomData, +} +impl<'a, T: Idx> Iter<'a, T> { + #[inline] + fn new(words: &'a [Word]) -> Self { + // We initialize `word` and `offset` to degenerate values. On the first + // call to `next()` we will fall through to getting the first word from + // `iter`, which sets `word` to the first word (if there is one) and + // `offset` to 0. Doing it this way saves us from having to maintain + // additional state about whether we have started. + Self { + word: WordBitIter::new(0), + offset: usize::MAX - (WORD_BITS - 1), + iter: words.iter(), + marker: PhantomData, + } + } +} +impl<'a, T: Idx> Iterator for Iter<'a, T> { + type Item = T; + fn next(&mut self) -> Option { + loop { + if let Some(idx) = self.word.next() { + return Some(T::new(idx as usize + self.offset)); + } + + // Move onto the next word. `wrapping_add()` is needed to handle + // the degenerate initial value given to `offset` in `new()`. + self.word = WordBitIter::new(*self.iter.next()?); + self.offset = self.offset.wrapping_add(WORD_BITS); + } + } +} + +pub struct Drain<'a, T> { + word: WordBitIter, + offset: usize, + iter: slice::IterMut<'a, usize>, + marker: PhantomData, +} +impl<'a, T> Drain<'a, T> { + #[inline] + fn new(words: &'a mut [Word]) -> Self { + Self { + word: WordBitIter::new(0), + offset: usize::MAX - (WORD_BITS - 1), + iter: words.iter_mut(), + marker: PhantomData, + } + } +} +impl Drop for Drain<'_, T> { + #[inline] + fn drop(&mut self) { + for x in &mut self.iter { + *x = 0; + } + } +} +impl Iterator for Drain<'_, T> { + type Item = T; + fn next(&mut self) -> Option { + loop { + if let Some(idx) = self.word.next() { + return Some(T::new(idx as usize + self.offset)); + } + + // Move onto the next word. `wrapping_add()` is needed to handle + // the degenerate initial value given to `offset` in `new()`. + self.word = WordBitIter::new(mem::replace(self.iter.next()?, 0)); + self.offset = self.offset.wrapping_add(WORD_BITS); + } + } +} diff --git a/clippy_data_structures/src/lib.rs b/clippy_data_structures/src/lib.rs new file mode 100644 index 000000000000..e5163034f018 --- /dev/null +++ b/clippy_data_structures/src/lib.rs @@ -0,0 +1,75 @@ +#![feature(array_windows)] +#![feature(if_let_guard)] +#![feature(min_specialization)] +#![feature(new_range_api)] +#![feature(rustc_private)] +#![feature(slice_partition_dedup)] + +extern crate rustc_arena; +extern crate rustc_index; +extern crate rustc_mir_dataflow; + +use crate::traits::{RangeLen, SubtractRangeItemsFromEdge}; +use arrayvec::ArrayVec; +use core::ops::RangeBounds; +use smallvec::SmallVec; + +mod sorted; +mod traits; + +pub mod bit_slice; +pub use bit_slice::BitSlice; + +pub mod bit_set_2d; +pub use bit_set_2d::{BitSlice2d, GrowableBitSet2d}; + +mod slice_set; +pub use slice_set::SliceSet; + +mod vec_set; +pub type VecSet = vec_set::VecSet>; +pub type SmallVecSet = vec_set::VecSet>; +pub type ArrayVecSet = vec_set::VecSet>; + +/// An iterator where the size hint is provided by calling `Iterator::count`. +pub struct CountedIter(pub T); +impl Iterator for CountedIter +where + T: Iterator + Clone, +{ + type Item = T::Item; + fn next(&mut self) -> Option { + self.0.next() + } + fn nth(&mut self, n: usize) -> Option { + self.0.nth(n) + } + fn count(self) -> usize { + self.0.count() + } + fn fold(self, init: B, f: F) -> B + where + F: FnMut(B, Self::Item) -> B, + { + self.0.fold(init, f) + } + fn size_hint(&self) -> (usize, Option) { + let size = self.0.clone().count(); + (size, Some(size)) + } +} + +/// Moves items within the slice leaving behind the default value at indices from the source range +/// which are not also part of the destination range. +#[inline] +pub fn move_within_slice( + slice: &mut [impl Copy + Default], + src: impl Clone + RangeBounds + RangeLen + SubtractRangeItemsFromEdge, + dst: usize, +) { + slice.copy_within(src.clone(), dst); + let src_len = src.len(); + for x in &mut slice[src.subtract_range_items_from_edge(dst..dst + src_len)] { + *x = Default::default() + } +} diff --git a/clippy_data_structures/src/slice_set.rs b/clippy_data_structures/src/slice_set.rs new file mode 100644 index 000000000000..46bc6e8739e2 --- /dev/null +++ b/clippy_data_structures/src/slice_set.rs @@ -0,0 +1,153 @@ +use crate::sorted; +use crate::traits::SortedIndex; +use core::borrow::Borrow; +use core::mem::transmute; +use core::ops::Deref; +use core::slice; + +/// Wrapper type around a `Vec`-like or slice type where all items are unique and sorted. +#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] +#[repr(transparent)] +pub struct SliceSet { + data: [T], +} +impl SliceSet { + #[inline] + pub const fn empty() -> &'static Self { + Self::from_sorted_unchecked(&[]) + } + + #[inline] + pub const fn from_ref(value: &T) -> &Self { + Self::from_sorted_unchecked(slice::from_ref(value)) + } + + /// Same as `from_sorted`, but without debug assertions. + #[inline] + pub(crate) const fn from_sorted_unchecked(slice: &[T]) -> &Self { + // SAFETY: `SliceSet`` is a transparent wrapper around `T`. + unsafe { transmute::<&[T], &SliceSet>(slice) } + } + + /// Gets the current value as a regular slice. + #[inline] + pub const fn as_raw_slice(&self) -> &[T] { + &self.data + } + + /// Checks if the set contains the given value. + #[inline] + pub fn contains(&self, item: &Q) -> bool + where + T: Borrow, + Q: Ord + ?Sized, + { + self.data.binary_search_by(|x| x.borrow().cmp(item)).is_ok() + } + + /// Gets the specified item from the set. + #[inline] + pub fn get(&self, item: &Q) -> Option<&T> + where + T: Borrow, + Q: Ord + ?Sized, + { + self.data + .binary_search_by(|x| x.borrow().cmp(item)) + .ok() + .map(|i| &self.data[i]) + } + + /// Gets the index of the specified item in the set. + #[inline] + pub fn get_index(&self, item: &Q) -> Option + where + T: Borrow, + Q: Ord + ?Sized, + { + self.data.binary_search_by(|x| x.borrow().cmp(item)).ok() + } + + /// Gets a subset of the current set. + #[inline] + pub fn get_range(&self, range: impl SortedIndex) -> &Self + where + T: Borrow, + Q: Ord + ?Sized, + { + Self::from_sorted_unchecked( + &self.data[range.find_range(&self.data, |slice, target| { + slice.binary_search_by(|x| x.borrow().cmp(target)) + })], + ) + } +} +impl SliceSet { + /// Assumes the given slice is sorted with no duplicates. + /// + /// Will panic with debug assertions enabled if the given slice is unsorted or contains + /// duplicates. + #[inline] + pub fn from_sorted(slice: &[T]) -> &Self { + debug_assert!(sorted::is_slice_set(slice)); + Self::from_sorted_unchecked(slice) + } + + /// Sorts the given slice and assumes no duplicates. + /// + /// Will panic with debug assertions enabled if the given slice contains duplicates. + #[inline] + pub fn from_unsorted_slice(slice: &mut [T]) -> &Self { + slice.sort_unstable(); + Self::from_sorted(slice) + } + + /// Sorts and partitions out duplicates from the given slice. + #[inline] + pub fn from_unsorted_slice_dedup(slice: &mut [T]) -> &Self { + slice.sort_unstable(); + Self::from_sorted_unchecked(slice.partition_dedup().0) + } + + /// Checks if this set is a subset of another set. + #[inline] + pub fn is_subset_of>(&self, other: &U) -> bool { + let other: &Self = other.borrow(); + if self.len() > other.len() { + return false; + } + if ::should_binary_search(other.len(), self.len()) { + sorted::is_subset_of_binary(self, other) + } else { + sorted::is_subset_of_linear(self, other) + } + } + + #[inline] + pub fn is_superset_of(&self, other: &Self) -> bool { + other.is_subset_of(self) + } +} + +impl Deref for SliceSet { + type Target = [T]; + #[inline] + fn deref(&self) -> &Self::Target { + &self.data + } +} +impl Borrow<[T]> for SliceSet { + #[inline] + fn borrow(&self) -> &[T] { + &self.data + } +} + +impl<'a, T> IntoIterator for &'a SliceSet { + type Item = &'a T; + type IntoIter = slice::Iter<'a, T>; + #[inline] + fn into_iter(self) -> Self::IntoIter { + self.data.iter() + } +} diff --git a/clippy_data_structures/src/sorted.rs b/clippy_data_structures/src/sorted.rs new file mode 100644 index 000000000000..c5adeff55893 --- /dev/null +++ b/clippy_data_structures/src/sorted.rs @@ -0,0 +1,184 @@ +use crate::traits::{VecLike, VecLikeCapacity}; +use arrayvec::ArrayVec; +use core::cmp::Ordering; + +/// Determines whether a binary or linear search should be used when searching for `count` sorted +/// items in a sorted list of size `len`. +#[inline] +fn should_binary_search_gen(list_size: usize, search_count: usize) -> bool { + // Using binary search has a complexity of `O(log2(len) * count)` with an average case only slightly + // better. This roughly calculates if the binary search will be faster, erring on the side of a + // linear search. + + // This is essentially `count < len / len.ilog2().next_power_of_two() / 2`, but with better codegen. + let log2 = (usize::BITS - 1).wrapping_sub(list_size.leading_zeros()); + search_count < list_size.wrapping_shr(usize::BITS - log2.leading_zeros()) +} + +pub trait ShouldBinarySearchSpec { + fn should_binary_search(list_size: usize, search_count: usize) -> bool; +} +impl ShouldBinarySearchSpec for T { + #[inline] + default fn should_binary_search(list_size: usize, search_count: usize) -> bool { + should_binary_search_gen(list_size, search_count) + } +} +impl ShouldBinarySearchSpec for ArrayVec { + #[inline] + fn should_binary_search(list_size: usize, search_count: usize) -> bool { + N > 6 && should_binary_search_gen(list_size, search_count) + } +} +impl ShouldBinarySearchSpec for crate::vec_set::VecSet> { + #[inline] + fn should_binary_search(list_size: usize, search_count: usize) -> bool { + N > 6 && should_binary_search_gen(list_size, search_count) + } +} + +pub fn linear_search_by(slice: &[T], mut f: impl FnMut(&T) -> Ordering) -> Result { + for (i, item) in slice.iter().enumerate() { + match f(item) { + Ordering::Less => {}, + Ordering::Equal => return Ok(i), + Ordering::Greater => return Err(i), + } + } + Err(slice.len()) +} + +pub fn fill_empty_from_iter_union>( + dst: &mut T, + mut xs: impl Iterator, + mut ys: impl Iterator, + mut cmp: impl FnMut(&T::Item, &T::Item) -> Ordering, +) { + let mut next_x = xs.next(); + let mut next_y = ys.next(); + loop { + match (next_x, next_y) { + (Some(x), Some(y)) => match cmp(&x, &y) { + Ordering::Equal => { + dst.push(x); + next_x = xs.next(); + next_y = ys.next(); + }, + Ordering::Less => { + dst.push(x); + next_x = xs.next(); + next_y = Some(y); + }, + Ordering::Greater => { + dst.push(y); + next_x = Some(x); + next_y = ys.next(); + }, + }, + (Some(x), None) => { + dst.push(x); + dst.extend(xs); + break; + }, + (None, Some(y)) => { + dst.push(y); + dst.extend(ys); + break; + }, + (None, None) => break, + } + } +} + +/// Merges the contents of the iterator into the list. +/// +/// Will panic with debug assertions enabled if the input sequence is not a sorted set. +fn union_impl( + list: &mut T, + mut items: impl Iterator, + mut search: impl FnMut(&[T::Item], &T::Item) -> Result, + mut merge: impl FnMut(&mut T::Item, T::Item), +) where + T: VecLike + Extend + ?Sized, +{ + let mut i = 0usize; + while let Some(next) = items.next() { + let slice = &mut list.borrow_mut()[i..]; + match search(slice, &next) { + Ok(j) => { + merge(&mut slice[j], next); + i += j; + }, + Err(j) if j != slice.len() => { + list.insert(i + j, next); + i += j; + }, + Err(_) => { + list.push(next); + list.extend(items); + return; + }, + } + } +} + +/// Performs a union between two sorted sets, storing the result in the first. +/// +/// Both lists must be sorted and contain no duplicates according to the given comparison function. +/// For any duplicates between the two lists the given merge function will be used to combine the +/// two values. This function must not change the sort order of the item. +pub fn union( + list: &mut T, + items: impl IntoIterator, + mut cmp: impl FnMut(&T::Item, &T::Item) -> Ordering, + merge: impl FnMut(&mut T::Item, T::Item), +) where + T: VecLikeCapacity + Extend + ?Sized, +{ + let items = items.into_iter(); + let (min, max) = items.size_hint(); + list.reserve(min); + let incoming = match max { + Some(max) => min.midpoint(max), + None => usize::MAX, + }; + if ::should_binary_search(list.borrow().len(), incoming) { + union_impl(list, items, |list, item| list.binary_search_by(|x| cmp(x, item)), merge); + } else { + union_impl( + list, + items, + |list, item| linear_search_by(list, |x| cmp(x, item)), + merge, + ); + } +} + +pub fn is_subset_of_linear(xs: &[T], ys: &[T]) -> bool { + let mut y = ys.iter(); + 'outer: for x in xs { + for y in &mut y { + match x.cmp(y) { + Ordering::Equal => continue 'outer, + Ordering::Less => return false, + Ordering::Greater => {}, + } + } + return false; + } + true +} + +pub fn is_subset_of_binary(xs: &[T], mut ys: &[T]) -> bool { + for x in xs { + match ys.binary_search(x) { + Ok(i) => ys = &ys[i + 1..], + Err(_) => return false, + } + } + true +} + +pub fn is_slice_set(slice: &[T]) -> bool { + slice.array_windows::<2>().all(|[x, y]| x.cmp(y).is_lt()) +} diff --git a/clippy_data_structures/src/traits.rs b/clippy_data_structures/src/traits.rs new file mode 100644 index 000000000000..9b2c1f78e545 --- /dev/null +++ b/clippy_data_structures/src/traits.rs @@ -0,0 +1,739 @@ +use arrayvec::ArrayVec; +use core::borrow::BorrowMut; +use core::ops::{Bound, Range, RangeBounds, RangeFrom, RangeFull, RangeInclusive, RangeTo, RangeToInclusive}; +use core::range; +use core::slice::SliceIndex; +use smallvec::SmallVec; + +/// Trait for types which act like a `Vec`. +pub trait VecLike: BorrowMut<[Self::Item]> { + type Item; + type Drain<'a> + where + Self: 'a, + Self::Item: 'a; + + fn clear(&mut self); + fn drain(&mut self, range: impl RangeBounds) -> Self::Drain<'_>; + fn push(&mut self, item: Self::Item); + fn insert(&mut self, idx: usize, item: Self::Item); + fn remove(&mut self, idx: usize) -> Self::Item; + fn retain(&mut self, f: impl FnMut(&mut Self::Item) -> bool); + fn splice(&mut self, range: impl RangeBounds, replacement: impl IntoIterator); + fn insert_within_capacity(&mut self, idx: usize, item: Self::Item) -> Result<(), Self::Item>; +} +pub trait VecLikeCapacity: VecLike { + /// Creates a new value with the specified capacity. + fn with_capacity(size: usize) -> Self; + + /// Reserves space for at least `additional` more items. + fn reserve(&mut self, additional: usize); +} +pub trait VecLikeDedup: VecLike { + /// Removes consecutive repeated elements from the vector. + fn dedup(&mut self); +} + +impl VecLike for Vec { + type Item = T; + type Drain<'a> + = std::vec::Drain<'a, T> + where + Self: 'a, + Self::Item: 'a; + + #[inline] + fn clear(&mut self) { + self.clear(); + } + + #[inline] + fn drain(&mut self, range: impl RangeBounds) -> Self::Drain<'_> { + self.drain(range) + } + + #[inline] + fn push(&mut self, item: Self::Item) { + self.push(item); + } + + #[inline] + #[track_caller] + fn insert(&mut self, idx: usize, item: T) { + self.insert(idx, item); + } + + #[inline] + #[track_caller] + fn insert_within_capacity(&mut self, idx: usize, item: T) -> Result<(), T> { + if self.len() < self.capacity() { + self.insert(idx, item); + Ok(()) + } else { + Err(item) + } + } + + #[inline] + #[track_caller] + fn remove(&mut self, idx: usize) -> T { + self.remove(idx) + } + + #[inline] + fn retain(&mut self, f: impl FnMut(&mut T) -> bool) { + self.retain_mut(f); + } + + #[inline] + #[track_caller] + fn splice(&mut self, range: impl RangeBounds, replacement: impl IntoIterator) { + self.splice(range, replacement); + } +} +impl VecLikeCapacity for Vec { + #[inline] + fn with_capacity(size: usize) -> Self { + Self::with_capacity(size) + } + + #[inline] + fn reserve(&mut self, additional: usize) { + self.reserve(additional); + } +} +impl VecLikeDedup for Vec { + #[inline] + fn dedup(&mut self) { + self.dedup(); + } +} + +impl VecLike for SmallVec<[T; N]> { + type Item = T; + type Drain<'a> + = smallvec::Drain<'a, [T; N]> + where + Self: 'a, + Self::Item: 'a; + + #[inline] + fn clear(&mut self) { + self.clear(); + } + + #[inline] + fn drain(&mut self, range: impl RangeBounds) -> Self::Drain<'_> { + self.drain(range) + } + + #[inline] + fn push(&mut self, item: Self::Item) { + self.push(item); + } + + #[inline] + #[track_caller] + fn insert(&mut self, idx: usize, item: T) { + self.insert(idx, item); + } + + #[inline] + #[track_caller] + fn insert_within_capacity(&mut self, idx: usize, item: T) -> Result<(), T> { + if self.len() < self.capacity() { + self.insert(idx, item); + Ok(()) + } else { + Err(item) + } + } + + #[inline] + #[track_caller] + fn remove(&mut self, idx: usize) -> T { + self.remove(idx) + } + + #[inline] + fn retain(&mut self, f: impl FnMut(&mut T) -> bool) { + self.retain(f); + } + + #[inline] + #[track_caller] + fn splice(&mut self, range: impl RangeBounds, replacement: impl IntoIterator) { + let i = match range.start_bound() { + Bound::Unbounded => 0, + Bound::Included(&x) => x, + Bound::Excluded(&x) => x + 1, + }; + self.drain(range); + self.insert_many(i, replacement); + } +} +impl VecLikeCapacity for SmallVec<[T; N]> { + #[inline] + fn with_capacity(size: usize) -> Self { + Self::with_capacity(size) + } + + #[inline] + fn reserve(&mut self, additional: usize) { + self.reserve(additional); + } +} +impl VecLikeDedup for SmallVec<[T; N]> { + #[inline] + fn dedup(&mut self) { + self.dedup(); + } +} + +impl VecLike for ArrayVec { + type Item = T; + type Drain<'a> + = arrayvec::Drain<'a, T, N> + where + Self: 'a, + Self::Item: 'a; + + #[inline] + fn clear(&mut self) { + self.clear(); + } + + #[inline] + fn drain(&mut self, range: impl RangeBounds) -> Self::Drain<'_> { + self.drain(range) + } + + #[inline] + fn push(&mut self, item: Self::Item) { + self.push(item); + } + + #[inline] + #[track_caller] + fn insert(&mut self, idx: usize, item: T) { + self.insert(idx, item); + } + + #[inline] + fn insert_within_capacity(&mut self, idx: usize, item: T) -> Result<(), T> { + self.try_insert(idx, item).map_err(|e| e.element()) + } + + #[inline] + #[track_caller] + fn remove(&mut self, idx: usize) -> T { + self.remove(idx) + } + + #[inline] + fn retain(&mut self, f: impl FnMut(&mut T) -> bool) { + self.retain(f); + } + + #[inline] + #[track_caller] + fn splice(&mut self, range: impl RangeBounds, replacement: impl IntoIterator) { + let mut i = match range.start_bound() { + Bound::Unbounded => 0, + Bound::Included(&x) => x, + Bound::Excluded(&x) => x + 1, + }; + self.drain(range); + for x in replacement { + self.insert(i, x); + i += 1; + } + } +} + +/// A helper trait for getting a range of items from a sorted slice. +pub trait SortedIndex { + type Result: SliceIndex<[T], Output = [T]> + RangeBounds; + fn find_range(self, slice: &[T], find: impl FnMut(&[T], &Q) -> Result) -> Self::Result; +} +impl SortedIndex for RangeFull { + type Result = RangeFull; + fn find_range(self, _: &[T], _: impl FnMut(&[T], &Q) -> Result) -> Self::Result { + self + } +} +impl SortedIndex for Range<&Q> { + type Result = Range; + #[inline] + fn find_range(self, slice: &[T], mut find: impl FnMut(&[T], &Q) -> Result) -> Self::Result { + let (Ok(start) | Err(start)) = find(slice, self.start); + let (Ok(end) | Err(end)) = find(slice, self.end); + Range { start, end } + } +} +impl SortedIndex for range::Range<&Q> { + type Result = range::Range; + #[inline] + fn find_range(self, slice: &[T], mut find: impl FnMut(&[T], &Q) -> Result) -> Self::Result { + let (Ok(start) | Err(start)) = find(slice, self.start); + let (Ok(end) | Err(end)) = find(slice, self.end); + range::Range { start, end } + } +} +impl SortedIndex for RangeInclusive<&Q> { + type Result = RangeInclusive; + #[inline] + fn find_range(self, slice: &[T], mut find: impl FnMut(&[T], &Q) -> Result) -> Self::Result { + let (Ok(start) | Err(start)) = find(slice, *self.start()); + let end = match find(slice, *self.end()) { + Ok(i) => i + 1, + Err(i) => i, + }; + RangeInclusive::new(start, end) + } +} +impl SortedIndex for range::RangeInclusive<&Q> { + type Result = range::RangeInclusive; + #[inline] + fn find_range(self, slice: &[T], mut find: impl FnMut(&[T], &Q) -> Result) -> Self::Result { + let (Ok(start) | Err(start)) = find(slice, self.start); + let end = match find(slice, self.end) { + Ok(i) => i + 1, + Err(i) => i, + }; + range::RangeInclusive { start, end } + } +} +impl SortedIndex for RangeFrom<&Q> { + type Result = RangeFrom; + #[inline] + fn find_range(self, slice: &[T], mut find: impl FnMut(&[T], &Q) -> Result) -> Self::Result { + let (Ok(start) | Err(start)) = find(slice, self.start); + RangeFrom { start } + } +} +impl SortedIndex for range::RangeFrom<&Q> { + type Result = range::RangeFrom; + #[inline] + fn find_range(self, slice: &[T], mut find: impl FnMut(&[T], &Q) -> Result) -> Self::Result { + let (Ok(start) | Err(start)) = find(slice, self.start); + range::RangeFrom { start } + } +} +impl SortedIndex for RangeTo<&Q> { + type Result = RangeTo; + #[inline] + fn find_range(self, slice: &[T], mut find: impl FnMut(&[T], &Q) -> Result) -> Self::Result { + let (Ok(end) | Err(end)) = find(slice, self.end); + RangeTo { end } + } +} +impl SortedIndex for RangeToInclusive<&Q> { + type Result = RangeToInclusive; + #[inline] + fn find_range(self, slice: &[T], mut find: impl FnMut(&[T], &Q) -> Result) -> Self::Result { + let end = match find(slice, self.end) { + Ok(i) => i + 1, + Err(i) => i, + }; + RangeToInclusive { end } + } +} + +/// Gets the total number of steps in a range. +pub trait RangeLen { + fn len(&self) -> usize; +} +impl RangeLen for Range { + #[inline] + fn len(&self) -> usize { + self.end - self.start + } +} +impl RangeLen for range::Range { + #[inline] + fn len(&self) -> usize { + self.end - self.start + } +} +impl RangeLen for RangeInclusive { + #[inline] + fn len(&self) -> usize { + match self.end_bound() { + Bound::Excluded(&x) => x - *self.start(), + Bound::Included(&x) => x - *self.start() + 1, + Bound::Unbounded => unreachable!(), + } + } +} +impl RangeLen for range::RangeInclusive { + #[inline] + fn len(&self) -> usize { + self.end - self.start + 1 + } +} +impl RangeLen for RangeTo { + #[inline] + fn len(&self) -> usize { + self.end + } +} +impl RangeLen for RangeToInclusive { + #[inline] + fn len(&self) -> usize { + self.end + 1 + } +} + +/// Removes items from the current range which overlap with another. +/// +/// The other range must start either before or at the current range, or it must end either at or +/// after the current range. i.e. `other.start <= self.start || self.end <= other.end` +pub trait SubtractRangeItemsFromEdge { + fn subtract_range_items_from_edge(self, other: Range) -> Range; +} +impl SubtractRangeItemsFromEdge for Range { + #[inline] + fn subtract_range_items_from_edge(self, other: Range) -> Range { + debug_assert!(other.start <= self.start || self.end <= other.end); + let (start, end) = if other.start <= self.start { + (self.start.max(other.end).min(self.end), self.end) + } else { + (self.start, self.end.min(other.start)) + }; + Range { start, end } + } +} +impl SubtractRangeItemsFromEdge for range::Range { + #[inline] + fn subtract_range_items_from_edge(self, other: Range) -> Range { + debug_assert!(other.start <= self.start || self.end <= other.end); + let (start, end) = if other.start <= self.start { + (self.start.max(other.end).min(self.end), self.end) + } else { + (self.start, self.end.min(other.start)) + }; + Range { start, end } + } +} +impl SubtractRangeItemsFromEdge for RangeTo { + #[inline] + fn subtract_range_items_from_edge(self, other: Range) -> Range { + debug_assert!(other.start == 0 || self.end <= other.end); + let (start, end) = if other.start == 0 { + (other.end.min(self.end), self.end) + } else { + (0, self.end.min(other.start)) + }; + Range { start, end } + } +} + +/// Applies an exclusive upper limit to any explicit bounds in a range. +pub trait LimitExplicitRangeBounds { + type Output: Clone + + SliceIndex<[usize], Output = [usize]> + + RangeBounds + + LimitExplicitRangeBounds + + IntoRangeWithStride; + fn limit_explicit_range_bounds(self, limit: usize) -> Self::Output; +} +impl LimitExplicitRangeBounds for usize { + type Output = Range; + #[inline] + fn limit_explicit_range_bounds(self, limit: usize) -> Self::Output { + if self < limit { self..self + 1 } else { limit..limit } + } +} +impl LimitExplicitRangeBounds for RangeFull { + type Output = Self; + #[inline] + fn limit_explicit_range_bounds(self, _: usize) -> Self::Output { + self + } +} +impl LimitExplicitRangeBounds for Range { + type Output = Self; + #[inline] + fn limit_explicit_range_bounds(self, limit: usize) -> Self::Output { + Self { + start: self.start.min(limit), + end: self.end.min(limit), + } + } +} +impl LimitExplicitRangeBounds for range::Range { + type Output = Self; + #[inline] + fn limit_explicit_range_bounds(self, limit: usize) -> Self::Output { + Self { + start: self.start.min(limit), + end: self.end.min(limit), + } + } +} +impl LimitExplicitRangeBounds for RangeInclusive { + type Output = Range; + #[inline] + fn limit_explicit_range_bounds(self, limit: usize) -> Self::Output { + Range { + start: (*self.start()).min(limit), + end: if *self.end() < limit { + match self.end_bound() { + Bound::Included(&x) => x + 1, + Bound::Excluded(&x) => x, + Bound::Unbounded => limit, + } + } else { + limit + }, + } + } +} +impl LimitExplicitRangeBounds for range::RangeInclusive { + type Output = range::Range; + #[inline] + fn limit_explicit_range_bounds(self, limit: usize) -> Self::Output { + range::Range { + start: self.start.min(limit), + end: if self.end < limit { self.end + 1 } else { limit }, + } + } +} +impl LimitExplicitRangeBounds for RangeTo { + type Output = Self; + #[inline] + fn limit_explicit_range_bounds(self, limit: usize) -> Self::Output { + Self { + end: self.end.min(limit), + } + } +} +impl LimitExplicitRangeBounds for RangeToInclusive { + type Output = RangeTo; + #[inline] + fn limit_explicit_range_bounds(self, limit: usize) -> Self::Output { + RangeTo { + end: if self.end < limit { self.end + 1 } else { limit }, + } + } +} +impl LimitExplicitRangeBounds for RangeFrom { + type Output = Self; + #[inline] + fn limit_explicit_range_bounds(self, limit: usize) -> Self::Output { + Self { + start: self.start.min(limit), + } + } +} +impl LimitExplicitRangeBounds for range::RangeFrom { + type Output = Self; + #[inline] + fn limit_explicit_range_bounds(self, limit: usize) -> Self::Output { + Self { + start: self.start.min(limit), + } + } +} + +/// Adjusts a range/index to contain each item as though they were `n` steps apart (i.e. multiplies +/// the bounds by `n`). +pub trait IntoRangeWithStride { + type Output: Clone + + SliceIndex<[usize], Output = [usize]> + + RangeBounds + + LimitExplicitRangeBounds + + IntoRangeWithStride; + fn into_range_with_stride(self, stride: u32) -> Self::Output; +} +impl IntoRangeWithStride for usize { + type Output = Range; + fn into_range_with_stride(self, stride: u32) -> Self::Output { + Range { + start: self, + end: self + stride as usize, + } + } +} +impl IntoRangeWithStride for RangeFull { + type Output = Self; + #[inline] + fn into_range_with_stride(self, _: u32) -> Self::Output { + self + } +} +impl IntoRangeWithStride for Range { + type Output = Self; + #[inline] + fn into_range_with_stride(self, stride: u32) -> Self::Output { + Range { + start: self.start * stride as usize, + end: self.end * stride as usize, + } + } +} +impl IntoRangeWithStride for range::Range { + type Output = Self; + #[inline] + fn into_range_with_stride(self, stride: u32) -> Self::Output { + range::Range { + start: self.start * stride as usize, + end: self.end * stride as usize, + } + } +} +impl IntoRangeWithStride for RangeInclusive { + type Output = Range; + #[inline] + fn into_range_with_stride(self, stride: u32) -> Self::Output { + Range { + start: *self.start() * stride as usize, + end: (*self.end() + 1) * stride as usize, + } + } +} +impl IntoRangeWithStride for range::RangeInclusive { + type Output = range::Range; + #[inline] + fn into_range_with_stride(self, stride: u32) -> Self::Output { + range::Range { + start: self.start * stride as usize, + end: (self.end + 1) * stride as usize, + } + } +} +impl IntoRangeWithStride for RangeFrom { + type Output = Self; + #[inline] + fn into_range_with_stride(self, stride: u32) -> Self::Output { + RangeFrom { + start: self.start * stride as usize, + } + } +} +impl IntoRangeWithStride for range::RangeFrom { + type Output = Self; + #[inline] + fn into_range_with_stride(self, stride: u32) -> Self::Output { + range::RangeFrom { + start: self.start * stride as usize, + } + } +} +impl IntoRangeWithStride for RangeTo { + type Output = Self; + #[inline] + fn into_range_with_stride(self, stride: u32) -> Self::Output { + RangeTo { + end: self.end * stride as usize, + } + } +} +impl IntoRangeWithStride for RangeToInclusive { + type Output = RangeTo; + #[inline] + fn into_range_with_stride(self, stride: u32) -> Self::Output { + RangeTo { + end: (self.end + 1) * stride as usize, + } + } +} + +/// Splits a range/index into a range before `n`, and the number of steps after `n`. +pub trait SplitRangeAt { + type Output: Clone + + SliceIndex<[usize], Output = [usize]> + + RangeBounds + + RangeLen + + LimitExplicitRangeBounds + + SubtractRangeItemsFromEdge + + IntoRangeWithStride; + fn split_range_at(self, idx: usize) -> (Self::Output, usize); +} +impl SplitRangeAt for usize { + type Output = Range; + #[inline] + fn split_range_at(self, idx: usize) -> (Self::Output, usize) { + if self < idx { (self..self + 1, 0) } else { (0..0, 1) } + } +} +impl SplitRangeAt for Range { + type Output = Range; + fn split_range_at(self, idx: usize) -> (Self::Output, usize) { + let range = Range { + start: self.start.min(idx), + end: self.end.min(idx), + }; + let extra = self + .end + .wrapping_sub(self.start) + .wrapping_sub(range.end.wrapping_sub(range.start)); + (range, if self.end > self.start { 0 } else { extra }) + } +} +impl SplitRangeAt for range::Range { + type Output = range::Range; + fn split_range_at(self, idx: usize) -> (Self::Output, usize) { + let range = range::Range { + start: self.start.min(idx), + end: self.end.min(idx), + }; + let extra = self + .end + .wrapping_sub(self.start) + .wrapping_sub(range.end.wrapping_sub(range.start)); + (range, if self.end > self.start { 0 } else { extra }) + } +} +impl SplitRangeAt for RangeInclusive { + type Output = Range; + fn split_range_at(self, idx: usize) -> (Self::Output, usize) { + let range = Range { + start: (*self.start()).min(idx), + end: (*self.end()).min(idx), + }; + let extra = (*self.end()) + .wrapping_sub(*self.start()) + .wrapping_sub(range.end.wrapping_sub(range.start)); + // Can overflow if `count == 0` and the range is `0..=usize::MAX`. + // Don't do that. + (range, if self.is_empty() { 0 } else { extra + 1 }) + } +} +impl SplitRangeAt for range::RangeInclusive { + type Output = range::Range; + fn split_range_at(self, idx: usize) -> (Self::Output, usize) { + let range = range::Range { + start: self.start.min(idx), + end: self.end.min(idx), + }; + let extra = self + .end + .wrapping_sub(self.start) + .wrapping_sub(range.end.wrapping_sub(range.start)); + // Can overflow if `count == 0` and the range is `0..=usize::MAX`. + // Don't do that. + (range, if self.start > self.end { 0 } else { extra + 1 }) + } +} +impl SplitRangeAt for RangeTo { + type Output = RangeTo; + #[inline] + fn split_range_at(self, idx: usize) -> (Self::Output, usize) { + let range: RangeTo = RangeTo { end: self.end.min(idx) }; + let extra = self.end.wrapping_sub(range.end); + (range, extra) + } +} +impl SplitRangeAt for RangeToInclusive { + type Output = RangeTo; + #[inline] + fn split_range_at(self, idx: usize) -> (Self::Output, usize) { + let range = RangeTo { end: self.end.min(idx) }; + // Can overflow if `count == 0` and the range is `..=usize::MAX`. + // Don't do that. + let extra = self.end.wrapping_sub(range.end) + 1; + (range, extra) + } +} diff --git a/clippy_data_structures/src/vec_map.rs b/clippy_data_structures/src/vec_map.rs new file mode 100644 index 000000000000..3751ff33c462 --- /dev/null +++ b/clippy_data_structures/src/vec_map.rs @@ -0,0 +1,218 @@ +use crate::sorted_set::merge_sorted; +use crate::{AsSlice, DerefSlice, VecBase}; +use core::borrow::{Borrow, BorrowMut}; +use core::mem::{self, transmute}; +use core::ops::Deref; + +#[derive(Default, Clone, PartialEq, Eq)] +#[repr(transparent)] +pub struct VecMap { + data: T, +} +impl VecMap +where + T: Default, +{ + #[inline] + pub fn new() -> Self { + Self { data: T::default() } + } +} +impl VecMap +where + T: ?Sized + AsSlice, +{ + #[inline] + pub fn len(&self) -> usize { + self.data.borrow().len() + } + + #[inline] + pub fn is_empty(&self) -> bool { + self.data.borrow().len() == 0 + } + + #[inline] + pub fn as_slice(&self) -> &VecMap<[(K, V)]> { + // SAFETY: `Sorted`` is a transparent wrapper around `T`. + unsafe { transmute::<&[(K, V)], &VecMap<[(K, V)]>>(self.data.borrow()) } + } + + #[inline] + fn find(&self, item: &Q) -> Result + where + K: Borrow, + Q: Ord + ?Sized, + { + if T::MAX <= 6 { + // Optimization for small `ArrayVec`s. + self.data.borrow().binary_search_by(|x| x.0.borrow().cmp(item)) + } else { + crate::sorted_set::linear_search_by_sorted(self.data.borrow(), |x| x.0.borrow().cmp(item)) + } + } + + #[inline] + pub fn contains(&self, key: &Q) -> bool + where + K: Borrow, + Q: Ord + ?Sized, + { + self.find(key).is_ok() + } + + #[inline] + pub fn get<'a, Q>(&'a self, key: &Q) -> Option<&'a V> + where + K: 'a + Borrow, + Q: Ord + ?Sized, + { + self.find(key).ok().map(|i| &self.data.borrow()[i].1) + } +} +impl VecMap +where + T: ?Sized + AsSlice + AsMut<[(K, V)]>, +{ + #[inline] + pub fn as_mut_slice(&mut self) -> &mut VecMap<[(K, V)]> { + // SAFETY: `Sorted`` is a transparent wrapper around `T`. + unsafe { transmute::<&mut [(K, V)], &mut VecMap<[(K, V)]>>(self.data.as_mut()) } + } + + #[inline] + pub fn get_mut<'a, Q>(&'a mut self, key: &Q) -> Option<&'a mut V> + where + K: 'a + Borrow, + Q: Ord + ?Sized, + { + self.find(key).ok().map(|i| &mut self.data.as_mut()[i].1) + } +} +impl VecMap +where + T: VecBase, +{ + #[inline] + pub fn clear(&mut self) { + self.data.clear(); + } + + #[inline] + pub fn reserve(&mut self, additional: usize) { + self.data.reserve(additional); + } + + pub fn remove(&mut self, key: &Q) -> Option + where + K: Borrow, + Q: Ord + ?Sized, + { + match self.find(key) { + Ok(i) => Some(self.data.remove(i).1), + Err(_) => None, + } + } + + #[inline] + pub fn retain(&mut self, f: impl FnMut(&mut T::Item) -> bool) { + self.data.retain(f); + } +} +impl VecMap +where + T: VecBase + BorrowMut<[(K, V)]>, + K: Ord, +{ + pub fn insert(&mut self, key: K, value: V) -> Option { + match self.find(&key) { + Ok(i) => Some(mem::replace(&mut self.data.borrow_mut()[i].1, value)), + Err(i) => { + self.data.insert(i, (key, value)); + None + }, + } + } +} +impl VecMap +where + T: VecBase + FromIterator<(K, V)>, + K: Ord, +{ + #[inline] + pub fn from_sorted(items: impl IntoIterator) -> Self { + Self { + data: T::from_iter(items), + } + } +} +impl VecMap +where + T: VecBase + Extend<(K, V)> + BorrowMut<[(K, V)]>, + K: Ord, +{ + #[inline] + pub fn insert_sorted(&mut self, items: impl IntoIterator) { + merge_sorted(&mut self.data, items, |x, y| x.0.cmp(&y.0), |x, y| x.1 = y.1); + } + + #[inline] + pub fn merge_sorted(&mut self, items: impl IntoIterator, mut merge: impl FnMut(&mut V, V)) { + merge_sorted(&mut self.data, items, |x, y| x.0.cmp(&y.0), |x, y| merge(&mut x.1, y.1)); + } +} + +impl Deref for VecMap +where + T: AsSlice + DerefSlice + ?Sized, +{ + type Target = VecMap<[(K, V)]>; + #[inline] + fn deref(&self) -> &Self::Target { + self.as_slice() + } +} + +impl Borrow> for VecMap +where + T: AsSlice + DerefSlice + ?Sized, +{ + #[inline] + fn borrow(&self) -> &VecMap<[(K, V)]> { + self.as_slice() + } +} + +impl IntoIterator for VecMap +where + T: IntoIterator, +{ + type Item = T::Item; + type IntoIter = T::IntoIter; + #[inline] + fn into_iter(self) -> Self::IntoIter { + self.data.into_iter() + } +} +impl<'a, T> IntoIterator for &'a VecMap +where + &'a T: IntoIterator, +{ + type Item = <&'a T as IntoIterator>::Item; + type IntoIter = <&'a T as IntoIterator>::IntoIter; + #[inline] + fn into_iter(self) -> Self::IntoIter { + (&self.data).into_iter() + } +} +impl<'a, T> IntoIterator for &'a mut VecMap +where + &'a mut T: IntoIterator, +{ + type Item = <&'a mut T as IntoIterator>::Item; + type IntoIter = <&'a mut T as IntoIterator>::IntoIter; + #[inline] + fn into_iter(self) -> Self::IntoIter { + (&mut self.data).into_iter() + } +} diff --git a/clippy_data_structures/src/vec_set.rs b/clippy_data_structures/src/vec_set.rs new file mode 100644 index 000000000000..c87169531ab6 --- /dev/null +++ b/clippy_data_structures/src/vec_set.rs @@ -0,0 +1,291 @@ +use crate::traits::{SortedIndex, VecLike, VecLikeCapacity, VecLikeDedup}; +use crate::{SliceSet, sorted}; +use arrayvec::ArrayVec; +use core::borrow::Borrow; +use core::ops::Deref; +use core::{mem, slice}; +use smallvec::SmallVec; + +trait FindSpec: VecLike { + fn find(list: &[Self::Item], item: &Q) -> Result + where + Self::Item: Borrow, + Q: ?Sized + Ord; +} +impl FindSpec for T { + #[inline] + default fn find(list: &[Self::Item], item: &Q) -> Result + where + Self::Item: Borrow, + Q: ?Sized + Ord, + { + list.binary_search_by(|x| x.borrow().cmp(item)) + } +} +impl FindSpec for ArrayVec { + #[inline] + fn find(list: &[Self::Item], item: &Q) -> Result + where + Self::Item: Borrow, + Q: ?Sized + Ord, + { + if N <= 6 { + sorted::linear_search_by(list, |x| x.borrow().cmp(item)) + } else { + list.binary_search_by(|x| x.borrow().cmp(item)) + } + } +} + +/// Wrapper type around a `Vec`-like type where all items are unique and sorted. +#[derive(Debug, Default, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] +#[repr(transparent)] +pub struct VecSet { + data: T, +} +impl VecSet> { + #[inline] + pub const fn new() -> Self { + Self { data: Vec::new() } + } +} +impl VecSet> { + #[inline] + pub const fn new() -> Self { + Self { + data: SmallVec::new_const(), + } + } +} +impl VecSet> { + #[inline] + pub const fn new() -> Self { + Self { + data: ArrayVec::new_const(), + } + } +} +impl VecSet { + /// Creates a new, empty vec with the given capacity. + #[inline] + pub fn with_capacity(size: usize) -> Self { + Self { + data: T::with_capacity(size), + } + } + + /// Reserves space for at least `additional` more items. + #[inline] + pub fn reserve(&mut self, additional: usize) { + self.data.reserve(additional); + } +} +impl> VecSet { + /// Assumes the given slice is sorted with no duplicates. + /// + /// Will panic with debug assertions enabled if the given slice is unsorted or contains + /// duplicates. + #[inline] + pub fn from_sorted(data: T) -> Self { + debug_assert!(sorted::is_slice_set(data.borrow())); + Self { data } + } + + /// Sorts the given slice and assumes no duplicates. + /// + /// Will panic with debug assertions enabled if the given slice contains duplicates. + #[inline] + pub fn from_unsorted(mut data: T) -> Self { + data.borrow_mut().sort(); + debug_assert!(sorted::is_slice_set(data.borrow())); + Self { data } + } + + /// Inserts the given item into the set. + /// + /// If the item already exists in the set, it will be replaced by the new item and returned. + /// Otherwise this will return `None`. + pub fn insert(&mut self, item: T::Item) -> Option { + match ::find(self.data.borrow(), &item) { + Ok(i) => Some(mem::replace(&mut self.data.borrow_mut()[i], item)), + Err(i) => { + self.data.insert(i, item); + None + }, + } + } + + /// Inserts the given item into the set if there is sufficient capacity to do so. + /// + /// If the item already exists in the set, it will be replaced by the new item and returned. + /// Otherwise this will return `None`. + pub fn insert_within_capacity(&mut self, item: T::Item) -> Result, T::Item> { + match ::find(self.data.borrow(), &item) { + Ok(i) => Ok(Some(mem::replace(&mut self.data.borrow_mut()[i], item))), + Err(i) => self.data.insert_within_capacity(i, item).map(|()| None), + } + } + + #[inline] + pub fn is_superset_of(&self, other: &SliceSet) -> bool { + other.is_subset_of(self) + } +} +impl VecSet { + /// Checks if the set contains the given value. + #[inline] + pub fn contains(&self, item: &Q) -> bool + where + T::Item: Borrow, + Q: ?Sized + Ord, + { + ::find(self.data.borrow(), item).is_ok() + } + + /// Gets the specified item from the set. + #[inline] + pub fn get(&self, item: &Q) -> Option<&T::Item> + where + T::Item: Borrow, + Q: ?Sized + Ord, + { + match ::find(self.data.borrow(), item) { + Ok(i) => Some(&self.data.borrow()[i]), + Err(_) => None, + } + } + + /// Gets a subset of the current set which . + #[inline] + pub fn get_range(&self, range: impl SortedIndex) -> &SliceSet + where + T::Item: Borrow, + Q: Ord + ?Sized, + { + SliceSet::from_sorted_unchecked( + &self.data.borrow()[range.find_range(&self.data.borrow(), |list, item| ::find(list, item))], + ) + } + + /// Removes all items from the set. + #[inline] + pub fn clear(&mut self) { + self.data.clear(); + } + + #[inline] + pub fn drain(&mut self, range: impl SortedIndex) -> T::Drain<'_> + where + T::Item: Borrow, + Q: Ord + ?Sized, + { + self.data + .drain(range.find_range(&self.data.borrow(), |list, item| ::find(list, item))) + } + + /// Removes the given item from the set. Returns `None` if the set does not contain the item. + pub fn remove(&mut self, item: &Q) -> Option + where + T::Item: Borrow, + Q: Ord + ?Sized, + { + match ::find(self.data.borrow(), item) { + Ok(i) => Some(self.data.remove(i)), + Err(_) => None, + } + } + + /// Retains only the items for which the given predicate returns `true`. + #[inline] + pub fn retain(&mut self, f: impl FnMut(&mut T::Item) -> bool) { + self.data.retain(f); + } +} +impl + Extend> VecSet { + /// Replaces the contents of this set with the union of two sets. + #[inline] + pub fn replace_with_union(&mut self, xs: impl IntoIterator, ys: impl IntoIterator) { + self.clear(); + let xs = xs.into_iter(); + let ys = ys.into_iter(); + self.reserve(xs.size_hint().0 + ys.size_hint().0); + sorted::fill_empty_from_iter_union(&mut self.data, xs, ys, |x, y| x.cmp(y)); + debug_assert!(sorted::is_slice_set(self.data.borrow())); + } +} + +impl> VecSet { + /// Sorts and removes duplicates from the given vec. + #[inline] + pub fn from_unsorted_dedup(mut data: T) -> Self { + data.borrow_mut().sort(); + data.dedup(); + Self { data } + } +} + +impl + FromIterator> VecSet { + #[inline] + pub fn from_sorted_iter(iter: impl IntoIterator) -> Self { + Self::from_sorted(T::from_iter(iter)) + } + + #[inline] + pub fn from_unsorted_iter(iter: impl IntoIterator) -> Self { + Self::from_unsorted(T::from_iter(iter)) + } +} +impl + FromIterator> VecSet { + #[inline] + pub fn from_unsorted_iter_dedup(iter: impl IntoIterator) -> Self { + Self::from_unsorted_dedup(T::from_iter(iter)) + } +} + +impl + Extend> VecSet { + #[inline] + pub fn extend_sorted(&mut self, iter: impl IntoIterator) { + sorted::union(&mut self.data, iter.into_iter(), |x, y| x.cmp(y), |x, y| *x = y); + debug_assert!(sorted::is_slice_set(self.data.borrow())); + } +} + +impl Deref for VecSet { + type Target = SliceSet; + #[inline] + fn deref(&self) -> &Self::Target { + SliceSet::from_sorted_unchecked(self.data.borrow()) + } +} +impl Borrow> for VecSet { + #[inline] + fn borrow(&self) -> &SliceSet { + SliceSet::from_sorted_unchecked(self.data.borrow()) + } +} +impl Borrow<[T::Item]> for VecSet { + #[inline] + fn borrow(&self) -> &[T::Item] { + self.data.borrow() + } +} + +impl IntoIterator for VecSet +where + T: VecLike + IntoIterator::Item>, +{ + type Item = ::Item; + type IntoIter = ::IntoIter; + #[inline] + fn into_iter(self) -> Self::IntoIter { + self.data.into_iter() + } +} +impl<'a, T: VecLike> IntoIterator for &'a VecSet { + type Item = &'a T::Item; + type IntoIter = slice::Iter<'a, T::Item>; + #[inline] + fn into_iter(self) -> Self::IntoIter { + self.data.borrow().iter() + } +} diff --git a/clippy_lints/Cargo.toml b/clippy_lints/Cargo.toml index d36b6ae9c953..16e60927793d 100644 --- a/clippy_lints/Cargo.toml +++ b/clippy_lints/Cargo.toml @@ -14,6 +14,8 @@ edition = "2024" arrayvec = { version = "0.7", default-features = false } cargo_metadata = "0.18" clippy_config = { path = "../clippy_config" } +clippy_data_structures = { path = "../clippy_data_structures" } +clippy_mir = { path = "../clippy_mir" } clippy_utils = { path = "../clippy_utils" } itertools = "0.12" quine-mc_cluskey = "0.2" diff --git a/clippy_lints/src/assigning_clones.rs b/clippy_lints/src/assigning_clones.rs index bf376eea70dc..64d1d81f9cc1 100644 --- a/clippy_lints/src/assigning_clones.rs +++ b/clippy_lints/src/assigning_clones.rs @@ -156,7 +156,7 @@ fn clone_source_borrows_from_dest(cx: &LateContext<'_>, lhs: &Expr<'_>, call_spa let Some(mir) = enclosing_mir(cx.tcx, lhs.hir_id) else { return false; }; - let PossibleBorrowerMap { map: borrow_map, .. } = PossibleBorrowerMap::new(cx, mir); + let PossibleBorrowerMap { map: borrow_map, .. } = PossibleBorrowerMap::new(cx.tcx, cx.typing_env(), mir); // The operation `dest = src.to_owned()` in MIR is split up across 3 blocks *if* the type has `Drop` // code. For types that don't, the second basic block is simply skipped. diff --git a/clippy_lints/src/lib.rs b/clippy_lints/src/lib.rs index 9e1df7881ce6..ff5363cb037a 100644 --- a/clippy_lints/src/lib.rs +++ b/clippy_lints/src/lib.rs @@ -1,6 +1,8 @@ +#![feature(array_chunks)] #![feature(array_windows)] #![feature(binary_heap_into_iter_sorted)] #![feature(box_patterns)] +#![feature(cmp_minmax)] #![feature(macro_metavar_expr_concat)] #![feature(f128)] #![feature(f16)] @@ -32,6 +34,7 @@ // FIXME: switch to something more ergonomic here, once available. // (Currently there is no way to opt into sysroot crates without `extern crate`.) +extern crate indexmap; extern crate pulldown_cmark; extern crate rustc_abi; extern crate rustc_arena; @@ -50,6 +53,7 @@ extern crate rustc_infer; extern crate rustc_lexer; extern crate rustc_lint; extern crate rustc_middle; +extern crate rustc_mir_dataflow; extern crate rustc_parse; extern crate rustc_parse_format; extern crate rustc_resolve; diff --git a/clippy_lints/src/needless_borrows_for_generic_args.rs b/clippy_lints/src/needless_borrows_for_generic_args.rs index f686cc912ddb..2997c9b23afb 100644 --- a/clippy_lints/src/needless_borrows_for_generic_args.rs +++ b/clippy_lints/src/needless_borrows_for_generic_args.rs @@ -362,7 +362,10 @@ fn referent_used_exactly_once<'tcx>( .last() .is_none_or(|&(local_def_id, _)| local_def_id != body_owner_local_def_id) { - possible_borrowers.push((body_owner_local_def_id, PossibleBorrowerMap::new(cx, mir))); + possible_borrowers.push(( + body_owner_local_def_id, + PossibleBorrowerMap::new(cx.tcx, cx.typing_env(), mir), + )); } let possible_borrower = &mut possible_borrowers.last_mut().unwrap().1; // If `only_borrowers` were used here, the `copyable_iterator::warn` test would fail. The reason is diff --git a/clippy_lints/src/redundant_clone.rs b/clippy_lints/src/redundant_clone.rs index e57b8cc2d84e..995aab706e2e 100644 --- a/clippy_lints/src/redundant_clone.rs +++ b/clippy_lints/src/redundant_clone.rs @@ -1,26 +1,30 @@ -use clippy_utils::diagnostics::{span_lint_hir, span_lint_hir_and_then}; +use clippy_data_structures::bit_slice::WordBitIter; +use clippy_data_structures::{BitSlice, BitSlice2d, GrowableBitSet2d, SliceSet, bit_slice, move_within_slice}; +use clippy_mir::analysis::{Analysis, BlockOrderMap, OrderedBlock, WorkQueue, get_body_edges, run_analysis}; +use clippy_mir::projection::{self, PlaceFilter, ResolvedPlace as _, Resolver as _}; +use clippy_mir::value_tracking::Visitor as _; +use clippy_mir::{childless_projection, value_tracking}; +use clippy_utils::diagnostics::span_lint_hir_and_then; use clippy_utils::fn_has_unsatisfiable_preds; -use clippy_utils::mir::{LocalUsage, PossibleBorrowerMap, visit_local_usage}; -use clippy_utils::source::SpanRangeExt; -use clippy_utils::ty::{has_drop, is_copy, is_type_diagnostic_item, is_type_lang_item, walk_ptrs_ty_depth}; -use rustc_errors::Applicability; +use clippy_utils::mir::PossibleBorrowerMap; +use clippy_utils::ty::{implements_trait_with_env, is_type_lang_item}; +use core::ops::Range; +use core::{iter, mem}; +use rustc_arena::DroplessArena; +use rustc_data_structures::fx::{FxHashSet, FxIndexSet}; use rustc_hir::intravisit::FnKind; -use rustc_hir::{Body, FnDecl, LangItem, def_id}; +use rustc_hir::{self as hir, FnDecl, LangItem, Mutability}; +use rustc_index::{Idx, IndexSlice, IndexVec}; use rustc_lint::{LateContext, LateLintPass}; -use rustc_middle::mir; -use rustc_middle::ty::{self, Ty}; +use rustc_middle::mir::{ + BasicBlock, BasicBlockData, Body, BorrowKind, Local, Location, Operand, Place, ProjectionElem, START_BLOCK, + SourceInfo, SourceScope, TerminatorKind, +}; +use rustc_middle::ty::{self, AliasTyKind, Ty, TyCtxt, TypingEnv}; +use rustc_mir_dataflow::lattice::{FlatSet, JoinSemiLattice}; use rustc_session::declare_lint_pass; use rustc_span::def_id::LocalDefId; -use rustc_span::{BytePos, Span, sym}; - -macro_rules! unwrap_or_continue { - ($x:expr) => { - match $x { - Some(x) => x, - None => continue, - } - }; -} +use rustc_span::{DUMMY_SP, Span, sym}; declare_clippy_lint! { /// ### What it does @@ -62,13 +66,12 @@ declare_clippy_lint! { declare_lint_pass!(RedundantClone => [REDUNDANT_CLONE]); impl<'tcx> LateLintPass<'tcx> for RedundantClone { - #[expect(clippy::too_many_lines)] fn check_fn( &mut self, cx: &LateContext<'tcx>, _: FnKind<'tcx>, _: &'tcx FnDecl<'_>, - _: &'tcx Body<'_>, + _: &'tcx hir::Body<'_>, _: Span, def_id: LocalDefId, ) { @@ -76,329 +79,1696 @@ impl<'tcx> LateLintPass<'tcx> for RedundantClone { if fn_has_unsatisfiable_preds(cx, def_id.to_def_id()) { return; } + let body = cx.tcx.optimized_mir(def_id.to_def_id()); + if body + .basic_blocks + .iter() + .all(|block| get_clone_call(cx, body, block).is_none()) + { + return; + } - let mir = cx.tcx.optimized_mir(def_id.to_def_id()); + // Check for clone calls and get the type and locals involved with each. + let mut cloned_tys = FxHashSet::default(); + let arena = DroplessArena::default(); + let clone_calls = + IndexSlice::::from_raw(arena.alloc_from_iter(body.basic_blocks.iter().map(|block| { + get_clone_call(cx, body, block).map(|(ty, call)| { + cloned_tys.insert(ty); + call + }) + }))); - let mut possible_borrower = PossibleBorrowerMap::new(cx, mir); + let block_map = BlockOrderMap::new_reverse_postorder(&arena, body); + let mut work_queue = WorkQueue::new(&arena, body); + let place_filter = PlaceFilter::new_raw_borrow_filter(&arena, body); + let body_edges = get_body_edges(&arena, body, &block_map); - for (bb, bbdata) in mir.basic_blocks.iter_enumerated() { - let terminator = bbdata.terminator(); + let Some((mut ref_target_analysis, mut tmp_state, states)) = RefTargetAnalysis::new( + cx, + &arena, + body, + def_id, + &block_map, + &place_filter, + &cloned_tys, + clone_calls, + ) else { + return; + }; + run_analysis( + &mut work_queue, + body_edges, + states, + &mut tmp_state, + &mut ref_target_analysis, + ); + if ref_target_analysis.clone_srcs.iter().all(Option::is_none) { + return; + } - if terminator.source_info.span.from_expansion() { - continue; + let Some((mut clone_analysis, mut tmp_state, mut states)) = CloneAnalysis::new( + cx, + body, + &arena, + def_id, + &block_map, + &place_filter, + &cloned_tys, + clone_calls, + ref_target_analysis.clone_srcs, + ) else { + return; + }; + run_analysis( + &mut work_queue, + body_edges, + &mut states, + &mut tmp_state, + &mut clone_analysis, + ); + + let mut idx = 0; + let mut linted = Vec::new(); + for &(mut word) in &clone_analysis.required_reads.words { + for _ in 0..(bit_slice::WORD_BITS / 2).min(clone_analysis.clone_info.len() - idx) { + let clone = CloneIdx::from_usize(idx); + let info = &clone_analysis.clone_info[clone]; + if (word | CloneValue::bit_pair(matches!(info.can_move_from_src, CanMove::No), false)) & 0b11 != 0b11 { + linted.push((clone, CloneValue::from_bit_pair(word), info.source_info)); + } + idx += 1; + word >>= 2; } + } + linted.sort_by_key(|&(_, _, info)| info.span); + for (_, _, info) in linted { + span_lint_hir_and_then( + cx, + REDUNDANT_CLONE, + body.source_scopes[info.scope] + .local_data + .as_ref() + .unwrap_crate_local() + .lint_root, + info.span, + "redundant clone", + |_| {}, + ); + } + } +} - // Give up on loops - if terminator.successors().any(|s| s == bb) { - continue; +#[derive(Clone, Copy)] +struct CloneCall<'tcx> { + src: Place<'tcx>, + dst: Place<'tcx>, +} + +fn get_clone_call<'tcx>( + cx: &LateContext<'tcx>, + body: &Body<'tcx>, + block: &BasicBlockData<'tcx>, +) -> Option<(Ty<'tcx>, CloneCall<'tcx>)> { + if let TerminatorKind::Call { + func, + args, + destination, + .. + } = &block.terminator().kind + && let [arg] = &**args + && let Operand::Move(src) | Operand::Copy(src) = arg.node + && let ty::FnDef(fn_id, fn_args) = *func.ty(body, cx.tcx).kind() + && let diag_name = if cx.tcx.lang_items().clone_fn() == Some(fn_id) { + None + } else if let Some(diag_name) = cx.tcx.get_diagnostic_name(fn_id) + && matches!(diag_name, sym::to_owned_method | sym::to_string_method) + { + Some(diag_name) + } else { + return None; + } + && let fn_sig = cx + .tcx + .instantiate_bound_regions_with_erased(cx.tcx.fn_sig(fn_id).instantiate(cx.tcx, fn_args)) + && let [arg_ty, res_ty] = **fn_sig.inputs_and_output + && let src_ty = cx + .tcx + .try_normalize_erasing_regions(cx.typing_env(), arg_ty) + .unwrap_or(arg_ty) + && let ty::Ref(_, src_ty, Mutability::Not) = *src_ty.kind() + && match diag_name { + Some(sym::to_owned_method) => { + cx.tcx + .try_normalize_erasing_regions(cx.typing_env(), res_ty) + .unwrap_or(res_ty) + == src_ty + }, + Some(sym::to_string_method) => is_type_lang_item(cx, src_ty, LangItem::String), + None => true, + _ => unreachable!(), + } + // Assume cloning types without drop glue is either trivial or has side-effects. + // Don't lint non-freeze types since we can't detect mutation properly. + // Don't lint significant drop types since a clone of those will have side-effects. + // Always lint type parameters and projections even if it will lead to false positives. + && (matches!( + src_ty.kind(), + ty::Param(_) | ty::Alias(AliasTyKind::Projection | AliasTyKind::Opaque, _) + ) || (src_ty.needs_drop(cx.tcx, cx.typing_env()) + && src_ty.is_freeze(cx.tcx, cx.typing_env()) + && !src_ty.has_significant_drop(cx.tcx, cx.typing_env()))) + && src.projection.iter().all(|x| matches!(x, ProjectionElem::Field { .. })) + && destination + .projection + .iter() + .all(|x| matches!(x, ProjectionElem::Field { .. })) + { + Some((src_ty, CloneCall { src, dst: *destination })) + } else { + None + } +} + +#[derive(Clone, Copy)] +enum CanMove { + Yes, + Take, + No, +} + +#[derive(Clone, Copy)] +struct CloneInfo<'body> { + source_info: &'body SourceInfo, + can_move_from_src: CanMove, +} +static DUMMY_CLONE_INFO: CloneInfo<'_> = CloneInfo { + source_info: &SourceInfo { + span: DUMMY_SP, + scope: SourceScope::ZERO, + }, + can_move_from_src: CanMove::Yes, +}; + +/// A pair of projection indices linked by a clone call. +#[derive(Clone, Copy, PartialEq, Eq, Hash)] +struct Link { + original: projection::Idx, + clone: projection::Idx, +} + +rustc_index::newtype_index! { + /// Index in the link interner representing a specific link. + #[orderable] + struct LinkIdx {} +} + +/// Interner to compress links into a single value. +/// +/// These links need to be created dynamically as new links can be created during the analysis by +/// moving a linked value to a new place. +#[derive(Default)] +struct LinkInterner { + links: FxIndexSet, +} +impl LinkInterner { + fn intern(&mut self, link: Link) -> LinkIdx { + LinkIdx::from_usize(self.links.insert_full(link).0) + } + + fn get(&self, idx: LinkIdx) -> Link { + self.links[idx.as_usize()] + } + + fn iter(&self) -> indexmap::set::Iter<'_, Link> { + self.links.iter() + } + + fn idx_range(&self) -> Range { + LinkIdx::ZERO..LinkIdx::from_usize(self.links.len()) + } +} + +/// Doubles the bit index of a word to give a bit index in a pair of words. +fn double_bit_idx(i: u32) -> (usize, u32) { + ( + i as usize >> (bit_slice::WORD_BITS.trailing_zeros() - 1), + (i << 1) & (bit_slice::Word::BITS - 1), + ) +} + +rustc_index::newtype_index! { + #[orderable] + struct Value {} +} +rustc_index::newtype_index! { + /// Index representing a specific clone call in the body. + #[orderable] + #[max = 0x7fff_ffff] + struct CloneIdx {} +} + +/// Which side of a clone a specific value is from. +#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord)] +enum CloneValue { + Original = 0, + Clone = 1, +} +impl CloneValue { + fn with_clone(self, idx: CloneIdx) -> CloneWithValue { + CloneWithValue::from_u32((idx.as_u32() << 1) | self as u32) + } + + fn bit_pair(original: bool, clone: bool) -> bit_slice::Word { + bit_slice::Word::from(original) | (bit_slice::Word::from(clone) << 1) + } + + fn from_bit_pair(word: bit_slice::Word) -> (bool, bool) { + (word & 1 != 0, word & 2 != 0) + } +} + +rustc_index::newtype_index! { + /// A combination of `CloneIdx` and `CloneValue`. + #[orderable] + #[max = 0xffff_ffff] + struct CloneWithValue {} +} +#[expect(dead_code)] +impl CloneWithValue { + fn clone_idx(self) -> CloneIdx { + CloneIdx::from_u32(self.as_u32() >> 1) + } + + fn clone_value(self) -> CloneValue { + if self.as_u32() & 1 != 0 { + CloneValue::Clone + } else { + CloneValue::Original + } + } + + fn invert_value(self) -> Self { + Self::from_u32(self.as_u32() ^ 1) + } +} + +/// How a value was used. Used for diagnostic output only. +#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] +enum UseKind { + Diverged, + Dropped, +} + +/// All information about the use of a value. Used for diagnostic output only. +#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] +struct UseInfo { + clone: CloneWithValue, + kind: UseKind, + sp: Span, +} +impl UseInfo { + fn new(clone: CloneWithValue, kind: UseKind, sp: Span) -> Self { + Self { clone, kind, sp } + } +} + +struct Predecessors<'a> { + /// The predecessor blocks, or an empty set if there are less the two predecessors. + predecessors: &'a SliceSet, + /// The value to use for the first projection when predecessor blocks have different values. + /// Subsequent projections will use ascending values. + first_value: Value, +} +impl<'a> Predecessors<'a> { + fn for_body( + arena: &'a DroplessArena, + body: &Body<'_>, + block_map: &BlockOrderMap<'_>, + projections: &projection::Map<'_>, + ) -> &'a IndexSlice { + let mut next_value = projections.domain_size_u32(); + let predecessors = body.basic_blocks.predecessors(); + IndexSlice::from_raw(arena.alloc_from_iter(block_map.from_ordered().iter().map(|&block| { + let first_value = Value::from_u32(next_value); + next_value += projections.domain_size_u32(); + Predecessors { + predecessors: if predecessors.len() > 1 { + SliceSet::from_unsorted_slice_dedup( + arena.alloc_from_iter(predecessors[block].iter().map(|&block| block_map.to_ordered()[block])), + ) + } else { + SliceSet::empty() + }, + first_value, } + }))) + } +} - let (fn_def_id, arg, arg_ty, clone_ret) = - unwrap_or_continue!(is_call_with_ref_arg(cx, mir, &terminator.kind)); +enum CloneOp { + /// Creates a clone of a specific projection and establishes a link. + Clone { + clone: CloneIdx, + link: LinkIdx, + src: projection::Idx, + dst: projection::Idx, + sp: Span, + }, + /// Copies a child of a projection without establishing a link. + CloneStructure { + src: projection::Idx, + dst: projection::Idx, + sp: Span, + }, + /// Copies a child of a projection without establishing a link. + CloneStructureRange { + src: Range, + dst: projection::Idx, + sp: Span, + }, + Move { + src: projection::Idx, + dst: projection::Idx, + }, + MoveRange { + src: Range, + dst: projection::Idx, + }, + Mutate { + idx: projection::Idx, + value: Value, + sp: Span, + stmt: u32, + }, + MutateRange { + range: Range, + value_start: Value, + sp: Span, + stmt: u32, + }, + /// Mutates a projection or a child of a projection then marks clones as required. + /// + /// Used for mutable borrows or when a child is consumed. + MutateRequired { + idx: projection::Idx, + value: Value, + sp: Span, + stmt: u32, + }, + BorrowMut { + range: Range, + value_start: Value, + sp: Span, + stmt: u32, + }, + Drop { + range: Range, + sp: Span, + }, + Consume { + idx: projection::Idx, + sp: Span, + stmt: u32, + }, + ConsumeRange { + range: Range, + sp: Span, + stmt: u32, + }, + /// Reads a projection or a child of a projection. + Read { + idx: projection::Idx, + }, + /// Reads a projection via it's parent. + ReadStructure { + idx: projection::Idx, + }, + /// Reads a projection via it's parent. + ReadStructureRange { + range: Range, + }, +} - let from_borrow = cx.tcx.lang_items().get(LangItem::CloneFn) == Some(fn_def_id) - || cx.tcx.is_diagnostic_item(sym::to_owned_method, fn_def_id) - || (cx.tcx.is_diagnostic_item(sym::to_string_method, fn_def_id) - && is_type_lang_item(cx, arg_ty, LangItem::String)); +struct CloneOpVisitor<'arena, 'body, 'tcx> { + tcx: TyCtxt<'tcx>, + typing_env: TypingEnv<'tcx>, + body: &'body Body<'tcx>, + projections: projection::Map<'arena>, + link_interner: LinkInterner, + clone_info: &'arena mut [CloneInfo<'body>], + ops: Vec, + next_value: u32, + next_clone_id: u32, + stmt: u32, +} +impl<'body, 'tcx> CloneOpVisitor<'_, 'body, 'tcx> { + fn visit_clone_call(&mut self, src: Place<'tcx>, dst: Place<'tcx>, source_info: &'body SourceInfo) { + let resolved_src = self.projections.resolve(src); + let resolved_dst = self.projections.resolve(dst); + let (src_start, src_data) = resolved_src.values(); + let (dst_start, dst_data) = resolved_dst.values(); + let sp = source_info.span; + if dst_data.contains_values() || src_data.contains_values() { + if dst_data.has_value && src_data.has_value { + self.clone_info[self.next_clone_id as usize] = CloneInfo { + source_info, + can_move_from_src: { + if let [ref proj @ .., ProjectionElem::Field(_, final_ty)] = **src.projection { + let mut ty = self.body.local_decls[src.local].ty; + let mut proj = proj; + loop { + if ty.ty_adt_def().is_some_and(|adt| adt.destructor(self.tcx).is_some()) { + if let Some(default_trait) = self.tcx.get_diagnostic_item(sym::Default) + && implements_trait_with_env( + self.tcx, + self.typing_env, + final_ty, + default_trait, + None, + &[], + ) + { + break CanMove::Take; + } + break CanMove::No; + } + if let &[ProjectionElem::Field(_, next_ty), ref rest @ ..] = proj { + ty = next_ty; + proj = rest; + } else { + break CanMove::Yes; + } + } + } else { + CanMove::Yes + } + }, + }; + self.ops.push(CloneOp::Clone { + clone: CloneIdx::from_u32(self.next_clone_id), + link: self.link_interner.intern(Link { + original: src_start, + clone: dst_start, + }), + src: src_start, + dst: dst_start, + sp, + }); + self.next_clone_id += 1; + } else if dst_data.has_value { + self.visit_mutate_idx(dst_start, sp); + } else if src_data.has_value { + self.ops.push(CloneOp::Read { idx: src_start }); + } + if dst_data != src_data { + value_tracking::copy_place_fields( + self, + value_tracking::Copy, + dst_start, + dst_data, + src_start, + src_data, + sp, + ); + } else if src_data.value_count > u32::from(src_data.has_value) { + self.visit_copy_range( + dst_start.plus(usize::from(src_data.has_value)), + src_start.plus(usize::from(src_data.has_value))..src_start.plus(src_data.value_count as usize), + sp, + ); + } + } + for idx in resolved_src.parents(&self.projections) { + self.visit_read_parent(idx, sp); + } + for idx in resolved_dst.parents(&self.projections) { + self.visit_mutate_parent(idx, sp); + } + } +} +impl<'arena, 'tcx> value_tracking::Visitor<'arena, 'tcx> for CloneOpVisitor<'arena, '_, 'tcx> { + type Resolver = projection::Map<'arena>; + fn resolver(&self) -> &Self::Resolver { + &self.projections + } + fn tcx(&self) -> TyCtxt<'tcx> { + self.tcx + } + fn body(&self) -> &Body<'tcx> { + self.body + } - let from_deref = !from_borrow - && (cx.tcx.is_diagnostic_item(sym::path_to_pathbuf, fn_def_id) - || cx.tcx.is_diagnostic_item(sym::os_str_to_os_string, fn_def_id)); + fn visit_read_idx(&mut self, idx: projection::Idx, _: Span) { + self.ops.push(CloneOp::ReadStructure { idx }); + } + fn visit_read_range(&mut self, range: Range, _: Span) { + self.ops.push(CloneOp::ReadStructureRange { range }); + } - if !from_borrow && !from_deref { - continue; + fn visit_mutate_idx(&mut self, idx: projection::Idx, sp: Span) { + self.ops.push(CloneOp::Mutate { + idx, + value: Value::from_u32(self.next_value), + sp, + stmt: self.stmt, + }); + self.next_value += 1; + } + fn visit_mutate_range(&mut self, range: Range, sp: Span) { + let len = range.end.as_u32() - range.start.as_u32(); + self.ops.push(CloneOp::MutateRange { + range, + value_start: Value::from_u32(self.next_value), + sp, + stmt: self.stmt, + }); + self.next_value += len; + } + + fn visit_consume_idx(&mut self, idx: projection::Idx, sp: Span) { + self.ops.push(CloneOp::Consume { + idx, + sp, + stmt: self.stmt, + }); + } + fn visit_consume_range(&mut self, range: Range, sp: Span) { + self.ops.push(CloneOp::ConsumeRange { + range, + sp, + stmt: self.stmt, + }); + } + + // A place can't be uninitialized without first being dropped or moved from. No need to mark + // it uninitialized again. + fn visit_uninit_idx(&mut self, _: projection::Idx, _: Span) {} + fn visit_uninit_range(&mut self, _: Range, _: Span) {} + fn visit_uninit_place(&mut self, _: Place<'tcx>, _: Span) {} + fn visit_uninit_local(&mut self, _: Local, _: Span) {} + + fn visit_move_idx(&mut self, dst: projection::Idx, src: projection::Idx, _: Span) { + self.ops.push(CloneOp::Move { src, dst }); + } + fn visit_move_range(&mut self, dst: projection::Idx, src: Range, _: Span) { + self.ops.push(CloneOp::MoveRange { src, dst }); + } + + // Tracked clone calls use `visit_copy_place`. + fn visit_copy_idx(&mut self, dst: projection::Idx, src: projection::Idx, sp: Span) { + self.ops.push(CloneOp::CloneStructure { src, dst, sp }); + } + fn visit_copy_range(&mut self, dst: projection::Idx, src: Range, sp: Span) { + self.ops.push(CloneOp::CloneStructureRange { src, dst, sp }); + } + + fn visit_read_parent(&mut self, idx: projection::Idx, _: Span) { + self.ops.push(CloneOp::Read { idx }); + } + + fn visit_consume_parent(&mut self, idx: projection::Idx, sp: Span) { + self.ops.push(CloneOp::MutateRequired { + idx, + value: Value::from_u32(self.next_value), + sp, + stmt: self.stmt, + }); + self.next_value += 1; + } + + fn visit_read_place(&mut self, place: Place<'tcx>, sp: Span) { + let place = self.projections.resolve(place); + let (mut start, data) = place.values(); + if data.contains_values() { + if data.has_value { + self.ops.push(CloneOp::Read { idx: start }); + start = start.plus(1); + } + if data.value_count > u32::from(data.has_value) { + self.visit_read_range( + start..start.plus(data.value_count as usize - usize::from(data.has_value)), + sp, + ); + } + } + for idx in place.parents(self.resolver()) { + self.visit_read_parent(idx, sp); + } + } + + fn visit_drop_place(&mut self, place: Place<'tcx>, sp: Span) { + let place = self.projections.resolve(place); + let (start, data) = place.values(); + if data.contains_values() { + self.ops.push(CloneOp::Drop { + range: start..start.plus(data.value_count as usize), + sp, + }); + } + for idx in place.parents(self.resolver()) { + self.visit_mutate_parent(idx, sp); + } + } + + fn visit_assign_borrow(&mut self, dst: Place<'tcx>, src: Place<'tcx>, kind: BorrowKind, sp: Span) { + let src = self.projections.resolve(src); + let (mut src_start, src_data) = src.values(); + if src_data.contains_values() { + if matches!(kind, BorrowKind::Mut { .. }) { + self.ops.push(CloneOp::BorrowMut { + range: src_start..src_start.plus(src_data.value_count as usize), + value_start: Value::from_u32(self.next_value), + sp, + stmt: self.stmt, + }); + self.next_value += src_data.value_count; + } else { + if src_data.has_value { + self.ops.push(CloneOp::Read { idx: src_start }); + src_start = src_start.plus(1); + } + if src_data.value_count > u32::from(src_data.has_value) { + self.visit_read_range( + src_start..src_start.plus(src_data.value_count as usize - usize::from(src_data.has_value)), + sp, + ); + } } + } + if matches!(kind, BorrowKind::Mut { .. }) { + for idx in src.parents(self.resolver()) { + self.ops.push(CloneOp::MutateRequired { + idx, + value: Value::from_u32(self.next_value), + sp, + stmt: self.stmt, + }); + self.next_value += 1; + } + } else { + for idx in src.parents(self.resolver()) { + self.visit_read_parent(idx, sp); + } + } + self.visit_mutate_place(dst, sp); + } +} - if let ty::Adt(def, _) = arg_ty.kind() - && def.is_manually_drop() - { - continue; +struct CloneDomain<'arena> { + /// Which pairs of projections are clones of each other with values that have yet to diverge and + /// which clone calls caused that link. + /// + /// Each link can be caused by multiple clones when merging blocks. e.g. + /// ```ignore + /// let x = value; + /// let y = if cond { x.clone() } else { x.clone() }; + /// ``` + /// + /// Each clone can also appear in multiple links. e.g. + /// ```ignore + /// let (x, mut y) = (value1, value2); + /// loop { + /// // The second iteration will start with `x` and `y` already linked. + /// let z = x.clone(); + /// y = z; + /// } + /// ``` + links: GrowableBitSet2d, + /// For each projection, track which clone calls the current value is associated with where the + /// pair of values have diverged, but have not yet been read. Also tracks which side of the + /// clone this value is (it may be both). + diverged: BitSlice2d<'arena, projection::Idx, CloneWithValue>, + values: &'arena mut IndexSlice>, + // For each value, track the value of each predecessor block. A value is considered diverged at + // the block's entry if there are different values amongst its predecessors. + // + // Data layout is `[[Value; predecessor_count]; projection_count]`. + // + // This is only tracked if there are multiple predecessor blocks. + // predecessor_values: &'arena mut [Option], +} + +struct CloneAnalysis<'arena, 'body, 'tcx, 'blocks> { + projections: projection::Map<'arena>, + clone_info: &'arena IndexSlice>, + /// Tracks where clone values diverge and are dropped for diagnostic purposes. + clone_use_info: FxHashSet, + /// For both values (original and clone) of each clone call, track whether a read has occurred + /// that cannot be replace by a read of the paired value. A read cannot be replace if the values + /// have diverged or if the read comes from a structure containing the value. + required_reads: &'arena mut BitSlice, + ops: &'arena IndexSlice, + link_interner: LinkInterner, + borrowers: PossibleBorrowerMap<'body, 'tcx>, + block_map: &'blocks BlockOrderMap<'blocks>, + /// `PossibleBorrowerMap` doesn't handle cleanup blocks well. Since these only drop values we + /// can just skip diverging values at the block entry to fix the false negatives. + cleanup_blocks: &'arena BitSlice, + predecessors: &'arena IndexSlice>, +} +impl<'arena, 'body, 'tcx, 'blocks> CloneAnalysis<'arena, 'body, 'tcx, 'blocks> { + #[expect(clippy::too_many_lines, clippy::too_many_arguments, clippy::type_complexity)] + fn new( + cx: &LateContext<'tcx>, + body: &'body Body<'tcx>, + arena: &'arena DroplessArena, + def_id: LocalDefId, + block_map: &'blocks BlockOrderMap<'blocks>, + place_filter: &PlaceFilter<'_>, + cloned_tys: &FxHashSet>, + clone_calls: &IndexSlice>>, + clone_srcs: &IndexSlice>>, + ) -> Option<( + Self, + ::Domain, + IndexVec::Domain>, + )> { + let mut visitor = CloneOpVisitor { + tcx: cx.tcx, + typing_env: cx.typing_env(), + body, + projections: { + let projections = projection::Map::new( + cx.tcx, + cx.typing_env(), + arena, + body, + |ty| cloned_tys.contains(&ty), + def_id.to_def_id(), + place_filter, + ); + if projections.domain_size() == 0 { + return None; + } + projections + }, + link_interner: LinkInterner::default(), + clone_info: arena.alloc_from_iter(iter::repeat_with(|| DUMMY_CLONE_INFO).take(body.basic_blocks.len())), + ops: Vec::new(), + next_value: 0, + next_clone_id: 0, + stmt: 0, + }; + let ops = IndexSlice::from_raw(arena.alloc_from_iter(block_map.from_ordered().iter_enumerated().map( + |(ordered_block, &block)| { + let block_data = &body.basic_blocks[block]; + for s in &block_data.statements { + visitor.visit_statement(s); + visitor.stmt += 1; + } + if let Some(term) = &block_data.terminator { + if let Some(src) = clone_srcs[ordered_block] + && let TerminatorKind::Call { args, .. } = &term.kind + && let [arg] = &**args + && let Some(call) = &clone_calls[block] + { + value_tracking::walk_operand(&mut visitor, &arg.node, arg.span); + visitor.visit_clone_call(src, call.dst, &term.source_info); + } else { + visitor.visit_terminator(term); + } + } + visitor.stmt = 0; + &*arena.alloc_from_iter(visitor.ops.drain(..)) + }, + ))); + if visitor.next_clone_id == 0 { + return None; + } + + let predecessors = Predecessors::for_body(arena, body, block_map, &visitor.projections); + let mut states: IndexVec = predecessors + .iter() + .map(|pre| CloneDomain { + links: GrowableBitSet2d::new(visitor.next_clone_id), + diverged: BitSlice2d::empty_arena( + arena, + visitor.projections.domain_size_u32(), + visitor.next_clone_id * 2, + ), + values: IndexSlice::from_raw_mut(arena.alloc_from_iter( + iter::repeat_with(|| None).take(visitor.projections.domain_size() * pre.predecessors.len().max(1)), + )), + }) + .collect(); + let tmp_state = CloneDomain { + links: GrowableBitSet2d::new(visitor.next_clone_id), + diverged: BitSlice2d::empty_arena(arena, visitor.projections.domain_size_u32(), visitor.next_clone_id * 2), + values: IndexSlice::from_raw_mut( + arena.alloc_from_iter(iter::repeat_with(|| None).take(visitor.projections.domain_size())), + ), + }; + let start_state = &mut states[block_map.to_ordered()[START_BLOCK]]; + for idx in visitor.projections.resolve_args(body) { + start_state.values[idx] = Some(Value::from_u32(visitor.next_value)); + visitor.next_value += 1; + } + + let cleanup_blocks = BitSlice::empty_arena(arena, body.basic_blocks.len()); + for (block, data) in body.basic_blocks.iter_enumerated() { + if data.is_cleanup { + cleanup_blocks.insert(block_map.to_ordered()[block]); + } + } + + Some(( + Self { + projections: visitor.projections, + clone_info: IndexSlice::from_raw(&visitor.clone_info[..visitor.next_clone_id as usize]), + clone_use_info: FxHashSet::default(), + required_reads: BitSlice::empty_arena(arena, visitor.next_clone_id as usize * 2), + ops, + link_interner: visitor.link_interner, + borrowers: PossibleBorrowerMap::new(cx.tcx, cx.typing_env(), body), + block_map, + cleanup_blocks, + predecessors, + }, + tmp_state, + states, + )) + } + + fn set_diverged(diverged: &mut BitSlice, clones: &BitSlice, value: bit_slice::Word) { + diverged.words.chunks_mut(2).zip(&clones.words).for_each(|(dst, &src)| { + for i in WordBitIter::new(src) { + let (idx, shift) = double_bit_idx(i); + dst[idx] |= value << shift; } + }); + } - // `{ arg = &cloned; clone(move arg); }` or `{ arg = &cloned; to_path_buf(arg); }` - let (cloned, cannot_move_out) = unwrap_or_continue!(find_stmt_assigns_to(cx, mir, arg, from_borrow, bb)); + fn set_diverged_with_use( + use_info: &mut FxHashSet, + diverged: &mut BitSlice, + clones: &BitSlice, + value: bit_slice::Word, + clone_value: CloneValue, + sp: Span, + ) { + let mut clone_idx = 0; + diverged.words.chunks_mut(2).zip(&clones.words).for_each(|(dst, &src)| { + for i in WordBitIter::new(src) { + use_info.insert(UseInfo::new( + clone_value.with_clone(CloneIdx::from_u32(i + clone_idx)), + UseKind::Diverged, + sp, + )); + let (idx, shift) = double_bit_idx(i); + dst[idx] |= value << shift; + } + clone_idx += bit_slice::Word::BITS; + }); + } - let loc = mir::Location { - block: bb, - statement_index: bbdata.statements.len(), + fn diverge_idx( + &mut self, + links: &mut GrowableBitSet2d, + diverged: &mut BitSlice2d<'_, projection::Idx, CloneWithValue>, + idx: projection::Idx, + sp: Span, + loc: Location, + ) { + for (clones, link) in links.iter_mut_rows(..).zip(self.link_interner.iter()) { + let value = if idx == link.original { + CloneValue::Original + } else if idx == link.clone { + CloneValue::Clone + } else { + continue; }; + let original_borrowed = !self + .borrowers + .is_unborrowed_before(self.projections.local_for_idx(link.original), loc); + let clone_borrowed = !self + .borrowers + .is_unborrowed_before(self.projections.local_for_idx(link.clone), loc); + if original_borrowed || clone_borrowed { + Self::set_diverged( + self.required_reads, + clones, + CloneValue::bit_pair(original_borrowed, clone_borrowed), + ); + } + if !original_borrowed { + Self::set_diverged( + diverged.row_mut(link.original), + clones, + CloneValue::bit_pair(true, false), + ); + } + if !clone_borrowed { + Self::set_diverged(diverged.row_mut(link.clone), clones, CloneValue::bit_pair(false, true)); + } + for clone in clones.drain() { + self.clone_use_info + .insert(UseInfo::new(value.with_clone(clone), UseKind::Diverged, sp)); + } + } + } + + fn diverge_range( + &mut self, + state: &mut ::Domain, + range: Range, + sp: Span, + loc: Location, + ) { + for (clones, link) in state.links.iter_mut_rows(..).zip(self.link_interner.iter()) { + let is_original = range.contains(&link.original); + let is_clone = range.contains(&link.clone); + if !(is_original || is_clone) { + continue; + } - // `Local` to be cloned, and a local of `clone` call's destination - let (local, ret_local) = if from_borrow { - // `res = clone(arg)` can be turned into `res = move arg;` - // if `arg` is the only borrow of `cloned` at this point. + let original_borrowed = !self + .borrowers + .is_unborrowed_before(self.projections.local_for_idx(link.original), loc); + let clone_borrowed = !self + .borrowers + .is_unborrowed_before(self.projections.local_for_idx(link.clone), loc); + if original_borrowed || clone_borrowed { + Self::set_diverged( + self.required_reads, + clones, + CloneValue::bit_pair(original_borrowed, clone_borrowed), + ); + } + if !original_borrowed { + Self::set_diverged( + state.diverged.row_mut(link.original), + clones, + CloneValue::bit_pair(true, false), + ); + } + if !clone_borrowed { + Self::set_diverged( + state.diverged.row_mut(link.clone), + clones, + CloneValue::bit_pair(false, true), + ); + } - if cannot_move_out || !possible_borrower.only_borrowers(&[arg], cloned, loc) { - continue; + if is_original { + for clone in clones.iter() { + self.clone_use_info.insert(UseInfo::new( + CloneValue::Original.with_clone(clone), + UseKind::Diverged, + sp, + )); } + } + if is_clone { + for clone in clones.iter() { + self.clone_use_info.insert(UseInfo::new( + CloneValue::Clone.with_clone(clone), + UseKind::Diverged, + sp, + )); + } + } + clones.clear(); + } + } - (cloned, clone_ret) + fn diverge_read_idx( + &mut self, + state: &mut ::Domain, + idx: projection::Idx, + sp: Span, + loc: Location, + ) { + for (clones, link) in state.links.iter_mut_rows(..).zip(self.link_interner.iter()) { + let (other_idx, clone_value, other_value, this_value) = if link.clone == idx { + ( + link.original, + CloneValue::Clone, + CloneValue::bit_pair(true, false), + CloneValue::bit_pair(false, true), + ) + } else if link.original == idx { + ( + link.clone, + CloneValue::Clone, + CloneValue::bit_pair(false, true), + CloneValue::bit_pair(true, false), + ) } else { - // `arg` is a reference as it is `.deref()`ed in the previous block. - // Look into the predecessor block and find out the source of deref. + continue; + }; + let diverged = if self + .borrowers + .is_unborrowed_before(self.projections.local_for_idx(other_idx), loc) + { + state.diverged.row_mut(other_idx) + } else { + &mut *self.required_reads + }; + Self::set_diverged(diverged, clones, other_value); + let diverged = state.diverged.row_mut(idx); + Self::set_diverged_with_use(&mut self.clone_use_info, diverged, clones, this_value, clone_value, sp); + for (src, dst) in diverged.words.iter_mut().zip(self.required_reads.words.iter_mut()) { + *dst |= mem::take(src); + } + } + } + + fn diverge_read_range( + &mut self, + state: &mut ::Domain, + range: Range, + sp: Span, + loc: Location, + ) { + for (clones, link) in state.links.iter_mut_rows(..).zip(self.link_interner.iter()) { + let is_original = range.contains(&link.original); + let is_clone = range.contains(&link.clone); - let ps = &mir.basic_blocks.predecessors()[bb]; - if ps.len() != 1 { - continue; + if is_original { + let diverged = state.diverged.row_mut(link.original); + Self::set_diverged_with_use( + &mut self.clone_use_info, + diverged, + clones, + CloneValue::bit_pair(true, false), + CloneValue::Original, + sp, + ); + for (src, dst) in diverged.words.iter_mut().zip(self.required_reads.words.iter_mut()) { + *dst |= mem::take(src); } - let pred_terminator = mir[ps[0]].terminator(); - - // receiver of the `deref()` call - let (pred_arg, deref_clone_ret) = if let Some((pred_fn_def_id, pred_arg, pred_arg_ty, res)) = - is_call_with_ref_arg(cx, mir, &pred_terminator.kind) - && res == cloned - && cx.tcx.is_diagnostic_item(sym::deref_method, pred_fn_def_id) - && (is_type_diagnostic_item(cx, pred_arg_ty, sym::PathBuf) - || is_type_diagnostic_item(cx, pred_arg_ty, sym::OsString)) + } else if is_clone { + let diverged = if self + .borrowers + .is_unborrowed_before(self.projections.local_for_idx(link.original), loc) { - (pred_arg, res) + state.diverged.row_mut(link.original) } else { - continue; + &mut *self.required_reads }; + Self::set_diverged(diverged, clones, CloneValue::bit_pair(true, false)); + } - let (local, cannot_move_out) = - unwrap_or_continue!(find_stmt_assigns_to(cx, mir, pred_arg, true, ps[0])); - let loc = mir::Location { - block: bb, - statement_index: mir.basic_blocks[bb].statements.len(), + if is_clone { + let diverged = state.diverged.row_mut(link.clone); + Self::set_diverged_with_use( + &mut self.clone_use_info, + diverged, + clones, + CloneValue::bit_pair(false, true), + CloneValue::Clone, + sp, + ); + for (src, dst) in diverged.words.iter_mut().zip(self.required_reads.words.iter_mut()) { + *dst |= mem::take(src); + } + } else if is_original { + let diverged = if self + .borrowers + .is_unborrowed_before(self.projections.local_for_idx(link.clone), loc) + { + state.diverged.row_mut(link.clone) + } else { + &mut *self.required_reads }; + Self::set_diverged(diverged, clones, CloneValue::bit_pair(false, true)); + } - // This can be turned into `res = move local` if `arg` and `cloned` are not borrowed - // at the last statement: - // - // ``` - // pred_arg = &local; - // cloned = deref(pred_arg); - // arg = &cloned; - // StorageDead(pred_arg); - // res = to_path_buf(cloned); - // ``` - if cannot_move_out || !possible_borrower.only_borrowers(&[arg, cloned], local, loc) { - continue; - } + if is_clone || is_original { + clones.clear(); + } + } + } - (local, deref_clone_ret) - }; + /// Clears an index preparing it to receive a new value. + fn clear_idx(&mut self, state: &mut ::Domain, idx: projection::Idx, sp: Span) { + let row = state.diverged.row_mut(idx); + self.clone_use_info + .extend(row.drain().map(|idx| UseInfo::new(idx, UseKind::Dropped, sp))); + for (row, link) in state.links.iter_mut_rows(..).zip(self.link_interner.iter()) { + if link.clone == idx || link.original == idx { + row.clear(); + } + } + } - let clone_usage = if local == ret_local { - CloneUsage { - cloned_use_loc: None.into(), - cloned_consume_or_mutate_loc: None, - clone_consumed_or_mutated: true, - } - } else { - let clone_usage = visit_clone_usage(local, ret_local, mir, bb); - if clone_usage.cloned_use_loc.maybe_used() && clone_usage.clone_consumed_or_mutated { - // cloned value is used, and the clone is modified or moved - continue; - } else if let MirLocalUsage::Used(loc) = clone_usage.cloned_use_loc - && possible_borrower.local_is_alive_at(ret_local, loc) - { - // cloned value is used, and the clone is alive. - continue; - } else if let Some(loc) = clone_usage.cloned_consume_or_mutate_loc - // cloned value is mutated, and the clone is alive. - && possible_borrower.local_is_alive_at(ret_local, loc) - { - continue; - } - clone_usage - }; + /// Clears an range preparing it to receive a new set of values. + fn clear_range(&mut self, state: &mut ::Domain, range: Range, sp: Span) { + for row in state.diverged.iter_mut_rows(range.clone()) { + self.clone_use_info + .extend(row.drain().map(|idx| UseInfo::new(idx, UseKind::Dropped, sp))); + } + for (row, link) in state.links.iter_mut_rows(..).zip(self.link_interner.iter()) { + if range.contains(&link.clone) || range.contains(&link.original) { + row.clear(); + } + } + } - let span = terminator.source_info.span; - let scope = terminator.source_info.scope; - let node = mir.source_scopes[scope] - .local_data - .as_ref() - .unwrap_crate_local() - .lint_root; + fn read_idx(&mut self, state: &mut ::Domain, idx: projection::Idx) { + for (src, dst) in state + .diverged + .row_mut(idx) + .words + .iter_mut() + .zip(&mut self.required_reads.words) + { + *dst |= mem::take(src); + } + } - if let Some(snip) = span.get_source_text(cx) - && let Some(dot) = snip.rfind('.') - { - let sugg_span = span.with_lo(span.lo() + BytePos(u32::try_from(dot).unwrap())); - let mut app = Applicability::MaybeIncorrect; - - let call_snip = &snip[dot + 1..]; - // Machine applicable when `call_snip` looks like `foobar()` - if let Some(call_snip) = call_snip.strip_suffix("()").map(str::trim) - && call_snip - .as_bytes() - .iter() - .all(|b| b.is_ascii_alphabetic() || *b == b'_') - { - app = Applicability::MachineApplicable; - } + fn read_structure_idx(&mut self, state: &mut ::Domain, idx: projection::Idx) { + self.read_idx(state, idx); - span_lint_hir_and_then(cx, REDUNDANT_CLONE, node, sugg_span, "redundant clone", |diag| { - diag.span_suggestion(sugg_span, "remove this", "", app); - if clone_usage.cloned_use_loc.maybe_used() { - diag.span_note(span, "cloned value is neither consumed nor mutated"); - } else { - diag.span_note( - span.with_hi(span.lo() + BytePos(u32::try_from(dot).unwrap())), - "this value is dropped without further use", - ); - } - }); - } else { - span_lint_hir(cx, REDUNDANT_CLONE, node, span, "redundant clone"); + for (clones, link) in state.links.iter_mut_rows(..).zip(self.link_interner.iter()) { + let is_original = link.original == idx; + let is_clone = link.clone == idx; + if is_original || is_clone { + Self::set_diverged(self.required_reads, clones, CloneValue::bit_pair(is_original, is_clone)); } } } -} -/// If `kind` is `y = func(x: &T)` where `T: !Copy`, returns `(DefId of func, x, T, y)`. -fn is_call_with_ref_arg<'tcx>( - cx: &LateContext<'tcx>, - mir: &'tcx mir::Body<'tcx>, - kind: &'tcx mir::TerminatorKind<'tcx>, -) -> Option<(def_id::DefId, mir::Local, Ty<'tcx>, mir::Local)> { - if let mir::TerminatorKind::Call { - func, - args, - destination, - .. - } = kind - && args.len() == 1 - && let mir::Operand::Move(mir::Place { local, .. }) = &args[0].node - && let ty::FnDef(def_id, _) = *func.ty(mir, cx.tcx).kind() - && let (inner_ty, 1) = walk_ptrs_ty_depth(args[0].node.ty(mir, cx.tcx)) - && !is_copy(cx, inner_ty) - { - Some((def_id, *local, inner_ty, destination.as_local()?)) - } else { - None + fn read_structure_range(&mut self, state: &mut ::Domain, range: Range) { + for row in state.diverged.iter_mut_rows(range.clone()) { + for (src, dst) in row.words.iter_mut().zip(&mut self.required_reads.words) { + *dst |= mem::take(src); + } + } + for (clones, link) in state.links.iter_mut_rows(..).zip(self.link_interner.iter()) { + let is_original = range.contains(&link.original); + let is_clone = range.contains(&link.clone); + if is_original || is_clone { + Self::set_diverged(self.required_reads, clones, CloneValue::bit_pair(is_original, is_clone)); + } + } } } +impl<'arena> Analysis for CloneAnalysis<'arena, '_, '_, '_> { + type Domain = CloneDomain<'arena>; -type CannotMoveOut = bool; + fn clone_block_entry(&mut self, src: &Self::Domain, dst: &mut Self::Domain, block: OrderedBlock) { + dst.links.clone_from(&src.links); + dst.diverged.words_mut().copy_from_slice(src.diverged.words()); + if src.values.len() == dst.values.len() { + dst.values.raw.copy_from_slice(&src.values.raw); + } else { + let predecessors = &self.predecessors[block]; + for ((idx, dst_value), src_values) in dst + .values + .iter_enumerated_mut() + .zip(src.values.raw.chunks_exact(predecessors.predecessors.len())) + { + let mut src_values = src_values.iter(); + let loc_block = self.block_map.from_ordered()[block]; + *dst_value = if let Some(value) = src_values.find_map(|&x| x) { + Some(if src_values.all(|&x| x.is_none_or(|x| x == value)) { + value + } else { + // `PossibleBorrowerMap` sees dead references as live on cleanup blocks + // causing a lot of false reads to occur. + if !self.cleanup_blocks.contains(block) { + self.diverge_idx( + &mut dst.links, + &mut dst.diverged, + idx, + DUMMY_SP, + Location { + block: loc_block, + statement_index: 0, + }, + ); + } + predecessors.first_value.plus(idx.as_usize()) + }) + } else { + None + }; + } + } + } -/// Finds the first `to = (&)from`, and returns -/// ``Some((from, whether `from` cannot be moved out))``. -fn find_stmt_assigns_to<'tcx>( - cx: &LateContext<'tcx>, - mir: &mir::Body<'tcx>, - to_local: mir::Local, - by_ref: bool, - bb: mir::BasicBlock, -) -> Option<(mir::Local, CannotMoveOut)> { - let rvalue = mir.basic_blocks[bb].statements.iter().rev().find_map(|stmt| { - if let mir::StatementKind::Assign(box (mir::Place { local, .. }, v)) = &stmt.kind { - return if *local == to_local { Some(v) } else { None }; + fn join_domain( + &mut self, + src: &Self::Domain, + dst: &mut Self::Domain, + src_block: OrderedBlock, + dst_block: OrderedBlock, + ) -> bool { + let mut changed = dst.links.union(&src.links) | dst.diverged.union(&src.diverged); + if src.values.len() == dst.values.len() { + for (dst_value, &src_value) in dst.values.iter_mut().zip(src.values.iter()) { + changed |= *dst_value != src_value; + *dst_value = src_value; + } + } else { + let predecessors = &self.predecessors[dst_block]; + let idx = predecessors.predecessors.get_index(&src_block).unwrap(); + assert!(idx < predecessors.predecessors.len()); + for (dst_value, &src_value) in dst + .values + .raw + .chunks_exact_mut(predecessors.predecessors.len()) + .map(|x| &mut x[idx]) + .zip(src.values.iter()) + { + changed |= *dst_value != src_value; + *dst_value = src_value; + } } + changed + } - None - })?; - - match (by_ref, rvalue) { - (true, mir::Rvalue::Ref(_, _, place)) | (false, mir::Rvalue::Use(mir::Operand::Copy(place))) => { - Some(base_local_and_movability(cx, mir, *place)) - }, - (false, mir::Rvalue::Ref(_, _, place)) => { - if let [mir::ProjectionElem::Deref] = place.as_ref().projection { - Some(base_local_and_movability(cx, mir, *place)) - } else { - None + #[expect(clippy::too_many_lines)] + fn apply_block(&mut self, state: &mut Self::Domain, block: OrderedBlock) { + if self.cleanup_blocks.contains(block) { + return; + } + let loc_block = self.block_map.from_ordered()[block]; + for op in self.ops[block] { + match *op { + CloneOp::Read { idx } => self.read_idx(state, idx), + CloneOp::ReadStructure { idx } => self.read_structure_idx(state, idx), + CloneOp::ReadStructureRange { ref range } => self.read_structure_range(state, range.clone()), + CloneOp::Drop { ref range, sp } => { + self.clear_range(state, range.clone(), sp); + state.values[range.clone()].fill(None); + }, + CloneOp::Consume { idx, sp, stmt } => { + self.diverge_read_idx( + state, + idx, + sp, + Location { + block: loc_block, + statement_index: stmt as usize, + }, + ); + state.values[idx] = None; + }, + CloneOp::ConsumeRange { ref range, sp, stmt } => { + self.diverge_read_range( + state, + range.clone(), + sp, + Location { + block: loc_block, + statement_index: stmt as usize, + }, + ); + state.values[range.clone()].fill(None); + }, + CloneOp::Mutate { idx, value, sp, stmt } => { + self.diverge_idx( + &mut state.links, + &mut state.diverged, + idx, + sp, + Location { + block: loc_block, + statement_index: stmt as usize, + }, + ); + state.values[idx] = Some(value); + }, + CloneOp::MutateRange { + ref range, + value_start, + sp, + stmt, + } => { + self.diverge_range( + state, + range.clone(), + sp, + Location { + block: loc_block, + statement_index: stmt as usize, + }, + ); + for (dst, value) in state.values[range.clone()].iter_mut().zip(value_start..) { + *dst = Some(value); + } + }, + CloneOp::MutateRequired { idx, value, sp, stmt } => { + self.diverge_read_idx( + state, + idx, + sp, + Location { + block: loc_block, + statement_index: stmt as usize, + }, + ); + state.values[idx] = Some(value); + }, + CloneOp::BorrowMut { + ref range, + value_start, + sp, + stmt, + } => { + self.diverge_read_range( + state, + range.clone(), + sp, + Location { + block: loc_block, + statement_index: stmt as usize, + }, + ); + for (dst, value) in state.values[range.clone()].iter_mut().zip(value_start..) { + *dst = Some(value); + } + }, + CloneOp::Clone { + clone, + link, + src, + dst, + sp, + } => { + self.read_idx(state, src); + self.clear_idx(state, dst, sp); + state.links.ensure_row(link).insert(clone); + let src_value = state.values[src]; + state.values[dst] = src_value; + }, + CloneOp::CloneStructure { src, dst, sp } => { + self.read_structure_idx(state, src); + self.clear_idx(state, dst, sp); + let src = state.values[src]; + state.values[dst] = src; + }, + CloneOp::CloneStructureRange { ref src, dst, sp } => { + self.read_structure_range(state, src.clone()); + self.clear_range(state, dst..dst.plus(src.end.as_usize() - src.start.as_usize()), sp); + state + .values + .raw + .copy_within(src.start.as_usize()..src.end.as_usize(), dst.as_usize()); + }, + CloneOp::Move { src, dst } => { + for link_idx in self.link_interner.idx_range() { + let link = self.link_interner.get(link_idx); + let new_link = if link.original == src { + Link { + original: dst, + clone: link.clone, + } + } else if link.clone == src { + Link { + original: link.original, + clone: dst, + } + } else { + continue; + }; + + let new_idx = self.link_interner.intern(new_link); + state.links.move_rows(link_idx, new_idx); + } + + state.diverged.move_rows(src, dst); + let src_value = state.values[src]; + state.values[src] = None; + state.values[dst] = src_value; + }, + CloneOp::MoveRange { ref src, dst } => { + for link_idx in self.link_interner.idx_range() { + let link = self.link_interner.get(link_idx); + let mut changed = false; + let original = if let Some(offset) = link.original.as_usize().checked_sub(src.start.as_usize()) + && link.original < src.end + { + changed = true; + dst.plus(offset) + } else { + link.original + }; + let clone = if let Some(offset) = link.clone.as_usize().checked_sub(src.start.as_usize()) + && link.clone < src.end + { + changed = true; + dst.plus(offset) + } else { + link.clone + }; + if changed { + let new_idx = self.link_interner.intern(Link { original, clone }); + state.links.move_rows(link_idx, new_idx); + } + } + + state.diverged.move_rows(src.clone(), dst); + move_within_slice( + &mut state.values.raw, + src.start.as_usize()..src.end.as_usize(), + dst.as_usize(), + ); + }, } - }, - _ => None, + } } } -/// Extracts and returns the undermost base `Local` of given `place`. Returns `place` itself -/// if it is already a `Local`. -/// -/// Also reports whether given `place` cannot be moved out. -fn base_local_and_movability<'tcx>( - cx: &LateContext<'tcx>, - mir: &mir::Body<'tcx>, - place: mir::Place<'tcx>, -) -> (mir::Local, CannotMoveOut) { - // Dereference. You cannot move things out from a borrowed value. - let mut deref = false; - // Accessing a field of an ADT that has `Drop`. Moving the field out will cause E0509. - let mut field = false; - // If projection is a slice index then clone can be removed only if the - // underlying type implements Copy - let mut slice = false; - - for (base, elem) in place.as_ref().iter_projections() { - let base_ty = base.ty(&mir.local_decls, cx.tcx).ty; - deref |= matches!(elem, mir::ProjectionElem::Deref); - field |= matches!(elem, mir::ProjectionElem::Field(..)) && has_drop(cx, base_ty); - slice |= matches!(elem, mir::ProjectionElem::Index(..)) && !is_copy(cx, base_ty); - } - - (place.local, deref || field || slice) -} - -#[derive(Debug, Default)] -enum MirLocalUsage { - /// The local maybe used, but we are not sure how. - Unknown, - /// The local is not used. - #[default] - Unused, - /// The local is used at a specific location. - Used(mir::Location), -} - -impl MirLocalUsage { - fn maybe_used(&self) -> bool { - matches!(self, MirLocalUsage::Unknown | MirLocalUsage::Used(_)) - } -} - -impl From> for MirLocalUsage { - fn from(loc: Option) -> Self { - loc.map_or(MirLocalUsage::Unused, MirLocalUsage::Used) - } -} - -#[derive(Debug, Default)] -struct CloneUsage { - /// The first location where the cloned value is used, if any. - cloned_use_loc: MirLocalUsage, - /// The first location where the cloned value is consumed or mutated, if any. - cloned_consume_or_mutate_loc: Option, - /// Whether the clone value is mutated. - clone_consumed_or_mutated: bool, -} - -fn visit_clone_usage(cloned: mir::Local, clone: mir::Local, mir: &mir::Body<'_>, bb: mir::BasicBlock) -> CloneUsage { - if let Some(( - LocalUsage { - local_use_locs: cloned_use_locs, - local_consume_or_mutate_locs: cloned_consume_or_mutate_locs, - }, - LocalUsage { - local_use_locs: _, - local_consume_or_mutate_locs: clone_consume_or_mutate_locs, - }, - )) = visit_local_usage( - &[cloned, clone], - mir, - mir::Location { - block: bb, - statement_index: mir.basic_blocks[bb].statements.len(), - }, - ) - .map(|mut vec| (vec.remove(0), vec.remove(0))) - { - CloneUsage { - cloned_use_loc: cloned_use_locs.first().copied().into(), - cloned_consume_or_mutate_loc: cloned_consume_or_mutate_locs.first().copied(), - // Consider non-temporary clones consumed. - // TODO: Actually check for mutation of non-temporaries. - clone_consumed_or_mutated: mir.local_kind(clone) != mir::LocalKind::Temp - || !clone_consume_or_mutate_locs.is_empty(), +enum RefTargetOp<'tcx> { + Set { + dst: projection::Idx, + target: Place<'tcx>, + }, + Copy { + src: projection::Idx, + dst: projection::Idx, + }, + CopyRange { + src: Range, + dst: projection::Idx, + }, + Move { + src: projection::Idx, + dst: projection::Idx, + }, + MoveRange { + src: Range, + dst: projection::Idx, + }, + SetUnknown(projection::Idx), + SetUnknownRange(Range), + Clear(projection::Idx), + ClearRange(Range), + Clone { + block: OrderedBlock, + idx: projection::Idx, + }, +} + +struct RefTargetOpVisitor<'a, 'arena, 'tcx> { + tcx: TyCtxt<'tcx>, + body: &'a Body<'tcx>, + projections: childless_projection::Map<'arena>, + ops: Vec>, + has_set: bool, +} +impl<'arena, 'tcx> value_tracking::Visitor<'arena, 'tcx> for RefTargetOpVisitor<'_, 'arena, 'tcx> { + type Resolver = childless_projection::Map<'arena>; + fn resolver(&self) -> &Self::Resolver { + &self.projections + } + fn body(&self) -> &Body<'tcx> { + self.body + } + fn tcx(&self) -> TyCtxt<'tcx> { + self.tcx + } + + fn visit_read_idx(&mut self, _: projection::Idx, _: Span) {} + fn visit_read_range(&mut self, _: Range, _: Span) {} + + fn visit_mutate_idx(&mut self, idx: projection::Idx, _: Span) { + self.ops.push(RefTargetOp::SetUnknown(idx)); + } + + fn visit_uninit_idx(&mut self, idx: projection::Idx, _: Span) { + self.ops.push(RefTargetOp::Clear(idx)); + } + + fn visit_copy_idx(&mut self, dst: projection::Idx, src: projection::Idx, _: Span) { + self.ops.push(RefTargetOp::Copy { src, dst }); + } + + fn visit_move_idx(&mut self, dst: projection::Idx, src: projection::Idx, _: Span) { + self.ops.push(RefTargetOp::Move { src, dst }); + } + + fn visit_mutate_range(&mut self, range: Range, _: Span) { + self.ops.push(RefTargetOp::SetUnknownRange(range)); + } + + fn visit_uninit_range(&mut self, range: Range, _: Span) { + self.ops.push(RefTargetOp::ClearRange(range)); + } + + fn visit_copy_range(&mut self, dst: projection::Idx, src: Range, _: Span) { + self.ops.push(RefTargetOp::CopyRange { src, dst }); + } + + fn visit_move_range(&mut self, dst: projection::Idx, src: Range, _: Span) { + self.ops.push(RefTargetOp::MoveRange { src, dst }); + } + + fn visit_assign_borrow(&mut self, dst: Place<'tcx>, src: Place<'tcx>, _: BorrowKind, _: Span) { + if let Some(dst) = self.projections.resolve(dst).as_scalar_value() { + self.ops.push( + if let [elems @ .., ProjectionElem::Deref] = &**src.projection + && let Some(src) = self.projections.resolve_slice_proj(src.local, elems).as_scalar_value() + { + RefTargetOp::Copy { src, dst } + } else if !src.is_indirect() { + self.has_set = true; + RefTargetOp::Set { dst, target: src } + } else { + RefTargetOp::SetUnknown(dst) + }, + ); } - } else { - CloneUsage { - cloned_use_loc: MirLocalUsage::Unknown, - cloned_consume_or_mutate_loc: None, - clone_consumed_or_mutated: true, + } +} + +struct RefTargetAnalysis<'arena, 'tcx> { + ops: &'arena IndexSlice]>, + // This stores the results of the analysis. This is fairly cheap to calculate during the + // analysis and doing so saves a final pass over every block. + clone_srcs: &'arena mut IndexSlice>>, +} +impl<'arena, 'tcx> RefTargetAnalysis<'arena, 'tcx> { + #[expect(clippy::too_many_arguments, clippy::type_complexity)] + fn new( + cx: &LateContext<'tcx>, + arena: &'arena DroplessArena, + body: &Body<'tcx>, + def_id: LocalDefId, + block_map: &BlockOrderMap<'_>, + place_filter: &PlaceFilter<'_>, + cloned_tys: &FxHashSet>, + clone_calls: &IndexSlice>>, + ) -> Option<( + Self, + ::Domain, + &'arena mut IndexSlice::Domain>, + )> { + let mut visitor = RefTargetOpVisitor { + tcx: cx.tcx, + body, + projections: { + let projections = childless_projection::Map::new( + cx.tcx, + cx.typing_env(), + arena, + body, + |ty| matches!(*ty.kind(), ty::Ref(_, ty, _) if cloned_tys.contains(&ty)), + def_id.to_def_id(), + place_filter, + ); + if projections.domain_size() == 0 { + return None; + } + projections + }, + ops: Vec::new(), + has_set: false, + }; + let ops = IndexSlice::from_raw(arena.alloc_from_iter(block_map.from_ordered().iter_enumerated().map( + |(ordered_block, &block)| { + let block_data = &body.basic_blocks[block]; + for s in &block_data.statements { + visitor.visit_statement(s); + } + if let Some(term) = &block_data.terminator { + if let Some(call) = &clone_calls[block] + && let Some(idx) = visitor.projections.resolve(call.src).as_scalar_value() + { + visitor.ops.push(RefTargetOp::Clone { + block: ordered_block, + idx, + }); + } + visitor.visit_terminator(term); + } + &*arena.alloc_from_iter(visitor.ops.drain(..)) + }, + ))); + if !visitor.has_set { + return None; + } + + let clone_srcs = IndexSlice::::from_raw_mut( + arena.alloc_from_iter(iter::repeat_with(|| None).take(body.basic_blocks.len())), + ); + let states = + IndexSlice::::from_raw_mut( + arena.alloc_from_iter( + iter::repeat_with(|| { + IndexSlice::::from_raw_mut(arena.alloc_from_iter( + iter::repeat_with(|| FlatSet::Bottom).take(visitor.projections.domain_size()), + )) + }) + .take(body.basic_blocks.len()), + ), + ); + let tmp_state = IndexSlice::::from_raw_mut( + arena.alloc_from_iter(iter::repeat_with(|| FlatSet::Bottom).take(visitor.projections.domain_size())), + ); + states[block_map.to_ordered()[START_BLOCK]][visitor.projections.resolve_args(body)].fill(FlatSet::Top); + Some((Self { ops, clone_srcs }, tmp_state, states)) + } +} +impl<'arena, 'tcx> Analysis for RefTargetAnalysis<'arena, 'tcx> { + type Domain = &'arena mut IndexSlice>>; + + fn clone_block_entry(&mut self, src: &Self::Domain, dst: &mut Self::Domain, _: OrderedBlock) { + dst.raw.copy_from_slice(&src.raw); + } + + fn join_domain(&mut self, src: &Self::Domain, dst: &mut Self::Domain, _: OrderedBlock, _: OrderedBlock) -> bool { + dst.iter_mut() + .zip(src.iter()) + .fold(false, |changed, (dst, src)| dst.join(src) || changed) + } + + fn apply_block(&mut self, state: &mut Self::Domain, block: OrderedBlock) { + for op in self.ops[block] { + match op { + &RefTargetOp::Clear(idx) => state[idx] = FlatSet::Bottom, + RefTargetOp::ClearRange(range) => state[range.clone()].fill(FlatSet::Bottom), + &RefTargetOp::Copy { src, dst } => state[dst] = state[src], + RefTargetOp::CopyRange { src, dst } => { + state + .raw + .copy_within(src.start.as_usize()..src.end.as_usize(), dst.as_usize()); + }, + &RefTargetOp::Move { src, dst } => { + state[dst] = state[src]; + state[src] = FlatSet::Bottom; + }, + RefTargetOp::MoveRange { src, dst } => { + state + .raw + .copy_within(src.start.as_usize()..src.end.as_usize(), dst.as_usize()); + state[src.clone()].fill(FlatSet::Bottom); + }, + &RefTargetOp::SetUnknown(idx) => state[idx] = FlatSet::Top, + RefTargetOp::SetUnknownRange(range) => state[range.clone()].fill(FlatSet::Top), + &RefTargetOp::Set { dst, target } => state[dst] = FlatSet::Elem(target), + &RefTargetOp::Clone { block, idx } => { + self.clone_srcs[block] = if let FlatSet::Elem(x) = state[idx] { + Some(x) + } else { + None + }; + }, + } } } } diff --git a/clippy_mir/Cargo.toml b/clippy_mir/Cargo.toml new file mode 100644 index 000000000000..ec1903cfdce2 --- /dev/null +++ b/clippy_mir/Cargo.toml @@ -0,0 +1,12 @@ +[package] +name = "clippy_mir" +version = "0.0.1" +edition = "2021" + +[dependencies] +clippy_data_structures = { path = "../clippy_data_structures" } +indexmap = "2.0.0" + +[package.metadata.rust-analyzer] +# This package uses #[feature(rustc_private)] +rustc_private = true diff --git a/clippy_mir/src/analysis.rs b/clippy_mir/src/analysis.rs new file mode 100644 index 000000000000..0c61bf953f03 --- /dev/null +++ b/clippy_mir/src/analysis.rs @@ -0,0 +1,245 @@ +use clippy_data_structures::{SliceSet, bit_slice}; +use core::cmp::minmax; +use core::iter; +use rustc_arena::DroplessArena; +use rustc_index::IndexSlice; +use rustc_middle::mir::{BasicBlock, Body, TerminatorKind, UnwindAction}; + +rustc_index::newtype_index! { + /// A reordered block index. + #[orderable] + pub struct OrderedBlock {} +} + +/// Bi-directional mapping to reorder blocks. +pub struct BlockOrderMap<'a> { + from_ordered: &'a IndexSlice, + to_ordered: &'a IndexSlice, +} +impl<'a> BlockOrderMap<'a> { + /// Creates a new mapping for a reverse postorder ordering. + pub fn new_reverse_postorder(arena: &'a DroplessArena, body: &'a Body<'_>) -> Self { + let from_ordered = IndexSlice::::from_raw(body.basic_blocks.reverse_postorder()); + let to_ordered = IndexSlice::::from_raw_mut( + arena.alloc_from_iter(iter::repeat_with(|| OrderedBlock::ZERO).take(from_ordered.len())), + ); + for (x, &y) in from_ordered.iter_enumerated() { + to_ordered[y] = x; + } + + Self { + from_ordered, + to_ordered, + } + } + + #[inline] + pub fn to_ordered(&self) -> &'a IndexSlice { + self.to_ordered + } + + #[inline] + pub fn from_ordered(&self) -> &'a IndexSlice { + self.from_ordered + } +} + +/// Queue that will remove blocks in-order. +pub struct WorkQueue<'arena> { + queue: &'arena mut [bit_slice::Word], + word: bit_slice::Word, + offset: u32, + domain_size: u32, +} +impl<'arena> WorkQueue<'arena> { + /// Creates a new empty queue for the given body. + pub fn new(arena: &'arena DroplessArena, body: &Body<'_>) -> Self { + Self { + queue: arena.alloc_from_iter(iter::repeat_n( + 0, + bit_slice::word_count_from_bits(body.basic_blocks.len()), + )), + offset: 0, + word: 0, + domain_size: body.basic_blocks.len() as u32, + } + } + + /// Fills the queue with all blocks. + fn fill(&mut self) { + self.queue.fill(!0); + if let Some(word) = self.queue.last_mut() { + *word &= bit_slice::final_mask_for_size(self.domain_size as usize); + } + self.offset = 0; + self.word = self.queue.first().copied().unwrap_or(0); + } + + /// Extracts the next block in the queue. + fn next(&mut self) -> Option { + if self.word == 0 { + self.queue[self.offset as usize] = 0; + self.offset += self.queue[self.offset as usize + 1..].iter().position(|&x| x != 0)? as u32 + 1; + self.word = self.queue[self.offset as usize]; + } + let bit = self.word.trailing_zeros() as usize; + self.word ^= 1 << bit; + Some(OrderedBlock::from_usize( + bit | self.offset as usize * bit_slice::WORD_BITS, + )) + } + + /// Inserts a single block into the queue. + #[track_caller] + pub fn insert(&mut self, block: OrderedBlock) { + debug_assert!(block.as_u32() < self.domain_size); + let word = block.as_usize() / bit_slice::WORD_BITS; + let bit = 1 << block.as_usize() % bit_slice::WORD_BITS; + + self.queue[self.offset as usize] = self.word; + self.queue[word] |= bit; + self.offset = self.offset.min(word as u32); + self.word |= self.queue[self.offset as usize]; + } + + /// Inserts a sorted sequence of blocks into the queue. + #[track_caller] + pub fn insert_sorted(&mut self, blocks: impl IntoIterator) { + let mut blocks = blocks.into_iter(); + let Some(block) = blocks.next() else { + return; + }; + debug_assert!(block.as_u32() < self.domain_size); + let block = block.as_usize(); + let word = block / bit_slice::WORD_BITS; + let bit = 1 << block % bit_slice::WORD_BITS; + + self.queue[self.offset as usize] = self.word; + self.offset = self.offset.min(word as u32); + + self.queue[word] |= bit; + for block in blocks { + debug_assert!(block.as_u32() < self.domain_size); + let idx = block.as_usize() / bit_slice::WORD_BITS; + let bit = 1 << block.as_usize() % bit_slice::WORD_BITS; + self.queue[idx] |= bit; + } + + self.word = self.queue[self.offset as usize]; + } +} + +/// Extracts the body's edges and orders them via the block map. +pub fn get_body_edges<'arena>( + arena: &'arena DroplessArena, + body: &Body<'_>, + block_map: &BlockOrderMap<'_>, +) -> &'arena IndexSlice> { + let blocks = IndexSlice::::from_raw_mut( + arena.alloc_from_iter(iter::repeat(SliceSet::empty()).take(body.basic_blocks.len())), + ); + for (block, block_data) in body.basic_blocks.iter_enumerated() { + blocks[block_map.to_ordered[block]] = match block_data.terminator().kind { + TerminatorKind::Drop { + target, + unwind: UnwindAction::Cleanup(cleanup), + .. + } + | TerminatorKind::Call { + target: Some(target), + unwind: UnwindAction::Cleanup(cleanup), + .. + } + | TerminatorKind::Assert { + target, + unwind: UnwindAction::Cleanup(cleanup), + .. + } + | TerminatorKind::Yield { + resume: target, + drop: Some(cleanup), + .. + } => SliceSet::from_sorted( + arena.alloc_from_iter(minmax(block_map.to_ordered[target], block_map.to_ordered[cleanup])), + ), + + TerminatorKind::Goto { target } + | TerminatorKind::Drop { target, .. } + | TerminatorKind::Assert { target, .. } + | TerminatorKind::Call { + target: Some(target), .. + } + | TerminatorKind::Call { + unwind: UnwindAction::Cleanup(target), + .. + } + | TerminatorKind::Yield { resume: target, .. } + | TerminatorKind::FalseEdge { + real_target: target, .. + } + | TerminatorKind::FalseUnwind { + real_target: target, .. + } => SliceSet::from_ref(arena.alloc(block_map.to_ordered[target])), + + TerminatorKind::SwitchInt { ref targets, .. } => SliceSet::from_unsorted_slice_dedup( + arena.alloc_from_iter(targets.all_targets().iter().map(|&target| block_map.to_ordered[target])), + ), + + TerminatorKind::InlineAsm { + ref targets, unwind, .. + } => { + let targets = targets.iter().map(|&target| block_map.to_ordered[target]); + SliceSet::from_unsorted_slice(if let UnwindAction::Cleanup(cleanup) = unwind { + arena.alloc_from_iter(targets.chain([block_map.to_ordered[cleanup]])) + } else { + arena.alloc_from_iter(targets) + }) + }, + + TerminatorKind::UnwindResume + | TerminatorKind::UnwindTerminate(_) + | TerminatorKind::Return + | TerminatorKind::Unreachable + | TerminatorKind::TailCall { .. } + | TerminatorKind::Call { .. } + | TerminatorKind::CoroutineDrop => SliceSet::empty(), + } + } + blocks +} + +pub trait Analysis { + type Domain; + fn clone_block_entry(&mut self, src: &Self::Domain, dst: &mut Self::Domain, block: OrderedBlock); + fn join_domain( + &mut self, + src: &Self::Domain, + dst: &mut Self::Domain, + src_block: OrderedBlock, + dst_block: OrderedBlock, + ) -> bool; + fn apply_block(&mut self, state: &mut Self::Domain, block: OrderedBlock); +} + +pub fn run_analysis( + queue: &mut WorkQueue, + edges: &IndexSlice>, + states: &mut IndexSlice, + tmp_state: &mut A::Domain, + analysis: &mut A, +) { + debug_assert_eq!(queue.domain_size as usize, edges.len()); + debug_assert_eq!(queue.domain_size as usize, states.len()); + + queue.fill(); + while let Some(block) = queue.next() { + analysis.clone_block_entry(&states[block], tmp_state, block); + analysis.apply_block(tmp_state, block); + queue.insert_sorted( + edges[block] + .iter() + .copied() + .filter(|&dst_block| analysis.join_domain(tmp_state, &mut states[dst_block], block, dst_block)), + ); + } +} diff --git a/clippy_mir/src/childless_projection.rs b/clippy_mir/src/childless_projection.rs new file mode 100644 index 000000000000..8ae7ed2c59d3 --- /dev/null +++ b/clippy_mir/src/childless_projection.rs @@ -0,0 +1,237 @@ +use core::option; +use rustc_arena::DroplessArena; +use rustc_data_structures::fx::FxHashMap; +use rustc_index::{Idx as _, IndexSlice}; +use rustc_middle::mir::{Body, Local, Place, PlaceElem, ProjectionElem}; +use rustc_middle::ty::{Ty, TyCtxt, TyKind, TypingEnv}; +use rustc_span::def_id::DefId; + +pub use crate::projection::{ + EMPTY_PLACE_DATA, Idx, PlaceData, PlaceFilter, ResolvedPlace, Resolver, SINGLE_PLACE_DATA, +}; + +/// Type-based interner for `ProjectionData`. +struct TyProjectionInterner<'arena, 'tcx, F> { + tcx: TyCtxt<'tcx>, + typing_env: TypingEnv<'tcx>, + arena: &'arena DroplessArena, + ty_has_value: F, + vis_filter: DefId, + projection_data: FxHashMap, &'arena PlaceData<'arena>>, +} +impl<'arena, 'tcx, F> TyProjectionInterner<'arena, 'tcx, F> +where + F: FnMut(Ty<'tcx>) -> bool, +{ + fn new( + tcx: TyCtxt<'tcx>, + typing_env: TypingEnv<'tcx>, + arena: &'arena DroplessArena, + ty_has_value: F, + vis_filter: DefId, + ) -> Self { + Self { + tcx, + typing_env, + arena, + ty_has_value, + vis_filter, + projection_data: FxHashMap::default(), + } + } + + /// Creates a new `ProjectionData` for the given type. + fn alloc_for_ty(&mut self, ty: Ty<'tcx>) -> &'arena PlaceData<'arena> { + if (self.ty_has_value)(ty) { + SINGLE_PLACE_DATA + } else { + match *ty.kind() { + TyKind::Adt(def, args) if def.is_struct() => PlaceData::alloc_new( + self.arena, + false, + def.non_enum_variant().fields.iter().map(|f| { + if f.vis.is_accessible_from(self.vis_filter, self.tcx) { + let ty = f.ty(self.tcx, args); + self.intern( + self.tcx + .try_normalize_erasing_regions(self.typing_env, ty) + .unwrap_or(ty), + ) + } else { + EMPTY_PLACE_DATA + } + }), + ), + TyKind::Tuple(tys) => PlaceData::alloc_new(self.arena, false, tys.iter().map(|ty| self.intern(ty))), + _ => EMPTY_PLACE_DATA, + } + } + } + + /// Interns the `ProjectionData` for the given type. + fn intern(&mut self, ty: Ty<'tcx>) -> &'arena PlaceData<'arena> { + match self.projection_data.get(&ty) { + Some(&data) => data, + None => { + let data = self.alloc_for_ty(ty); + self.projection_data.insert(ty, data); + data + }, + } + } +} + +/// A resolved place according to a childless projection map. +#[derive(Clone, Copy)] +pub enum Resolved<'arena> { + Value { + start: Idx, + data: &'arena PlaceData<'arena>, + }, + Child { + parent: Idx, + }, + Deref { + parent: Idx, + }, +} +impl<'arena> ResolvedPlace<'arena> for Resolved<'arena> { + type Resolver = Map<'arena>; + type Parents = option::IntoIter; + + #[inline] + fn values(&self) -> (Idx, &'arena PlaceData<'arena>) { + if let Self::Value { start, data } = *self { + (start, data) + } else { + (Idx::ZERO, EMPTY_PLACE_DATA) + } + } + + #[inline] + fn is_deref(&self) -> bool { + matches!(self, Self::Deref { .. }) + } + + #[inline] + fn parents(&self, _: &Map<'arena>) -> Self::Parents { + if let Self::Deref { parent } | Self::Child { parent } = *self { + Some(parent).into_iter() + } else { + None.into_iter() + } + } + + #[inline] + fn affects_any_value(&self) -> bool { + if let Self::Value { data, .. } = *self { + data.contains_values() + } else { + true + } + } + + #[inline] + fn as_scalar_value(self) -> Option { + if let Self::Value { data, start } = self + && data.contains_values() + { + debug_assert_eq!(data.value_count as u32, 1); + debug_assert!(data.has_value); + Some(start) + } else { + None + } + } +} + +/// Mapping between local projections and the range of values they occupy. +/// +/// Like `Map`, but each place containing a value will not have any child nodes. +pub struct Map<'arena> { + local_map: &'arena IndexSlice)>, + domain_size: u32, +} +impl<'arena> Map<'arena> { + pub fn new<'tcx>( + tcx: TyCtxt<'tcx>, + typing_env: TypingEnv<'tcx>, + arena: &'arena DroplessArena, + body: &Body<'tcx>, + ty_has_value: impl FnMut(Ty<'tcx>) -> bool, + vis_filter: DefId, + place_filter: &PlaceFilter<'_>, + ) -> Self { + let mut interner = TyProjectionInterner::new(tcx, typing_env, arena, ty_has_value, vis_filter); + let mut idx_count: u32 = 0u32; + let mut place_filter = place_filter.iter(); + Self { + local_map: IndexSlice::from_raw(arena.alloc_from_iter(body.local_decls.iter_enumerated().map( + |(local, local_decl)| { + let data = interner.intern( + tcx.try_normalize_erasing_regions(typing_env, local_decl.ty) + .unwrap_or(local_decl.ty), + ); + let data = if place_filter.local.is_some_and(|filter| filter == local) { + place_filter.apply_current(arena, data.fields, 0) + } else { + data + }; + let idx = idx_count; + idx_count += data.value_count; + (Idx::from_u32(idx), data) + }, + ))), + domain_size: idx_count, + } + } + + /// Get's the number of values + pub fn domain_size(&self) -> usize { + self.domain_size as usize + } + + pub fn resolve_slice_proj(&self, local: Local, projection: &[PlaceElem<'_>]) -> Resolved<'arena> { + let (mut idx, mut data) = self.local_map[local]; + let mut projections = projection.iter(); + while !data.has_value { + if let Some(projection) = projections.next() + && let &ProjectionElem::Field(field, _) = projection + { + // Note: if all fields contain no value then no field data will be stored. + if let Some(field) = data.fields.get(field) { + data = field.data; + idx = idx.plus(field.offset as usize); + continue; + } + data = EMPTY_PLACE_DATA; + } + break; + } + if data.has_value { + if projections + .clone() + .any(|projection| matches!(projection, ProjectionElem::Deref)) + { + return Resolved::Deref { parent: idx }; + } else if projections + .next() + .is_some_and(|projection| matches!(projection, ProjectionElem::Field(..))) + { + return Resolved::Child { parent: idx }; + } + } + Resolved::Value { data, start: idx } + } +} +impl<'arena> Resolver<'arena> for Map<'arena> { + type Resolved = Resolved<'arena>; + + fn resolve_local(&self, local: Local) -> (Idx, &'arena PlaceData<'arena>) { + self.local_map[local] + } + + fn resolve(&self, place: Place<'_>) -> Self::Resolved { + self.resolve_slice_proj(place.local, place.projection) + } +} diff --git a/clippy_mir/src/lib.rs b/clippy_mir/src/lib.rs new file mode 100644 index 000000000000..e4706aed3b13 --- /dev/null +++ b/clippy_mir/src/lib.rs @@ -0,0 +1,18 @@ +#![feature(anonymous_lifetime_in_impl_trait)] +#![feature(cmp_minmax)] +#![feature(if_let_guard)] +#![feature(let_chains)] +#![feature(rustc_private)] + +extern crate rustc_abi; +extern crate rustc_arena; +extern crate rustc_data_structures; +extern crate rustc_index; +extern crate rustc_middle; +extern crate rustc_mir_dataflow; +extern crate rustc_span; + +pub mod analysis; +pub mod childless_projection; +pub mod projection; +pub mod value_tracking; diff --git a/clippy_mir/src/projection.rs b/clippy_mir/src/projection.rs new file mode 100644 index 000000000000..4ba7266a2342 --- /dev/null +++ b/clippy_mir/src/projection.rs @@ -0,0 +1,677 @@ +//! Create mappings that can resolve local places to a set of tracked values. +//! +//! Starting with each local as a tree where the local is the root node, each field is a +//! child node, and sub-fields are children of their respective nodes; a projection mapping +//! will map each node to a unique index. Once constructed this mapping can be used to +//! resolve a place to it's matching value and that values child and parent values. +//! +//! The constructed map may have multiple filters which prevent nodes from being given an +//! associated index: +//! +//! * First is a visibility filter. Any field which can not be accessed from the current body will +//! not be assigned an index. This filter is not optional. +//! * Second is a type based filter. This will prevent certain types from being assigned an index, +//! but will still allow both parents and children to be given one. +//! * Third is a place based filter. This will prevent a specific place as well as both it's parents +//! and children from being assigned an index. +//! +//! # Example +//! +//! Given the following struct: +//! +//! ```rust +//! struct Foo { +//! x: u32, +//! y: (u32, i32), +//! } +//! ``` +//! +//! This will create the following tree (each node's index is in parenthesis): +//! +//! ```none +//! Foo (0) +//! / \ +//! x (1) y (2) +//! / \ +//! 0 (3) 1 (4) +//! ``` +//! +//! Places within the struct are resolved as follows: +//! +//! * Foo: +//! * parents: N/A +//! * values: 0, 1, 2, 3, 4 +//! * Foo.x: +//! * parents: 0 +//! * values: 1 +//! * Foo.y: +//! * parents: 0 +//! * values: 2, 3, 4 +//! * Foo.y.0: +//! * parents: 2, 0 +//! * values: 3 +//! * Foo.y.1: +//! * parents: 2, 0 +//! * values: 4 +//! +//! If tuples were filtered from storing a value the following tree would be constructed: +//! +//! ```none +//! Foo (0) +//! / \ +//! x (1) y +//! / \ +//! 0 (2) 1 (3) +//! ``` +//! +//! Places would be resolved as follows: +//! +//! * Foo: +//! * parents: N/A +//! * values: 0, 1, 2, 3 +//! * Foo.x: +//! * parents: 0 +//! * values: 1 +//! * Foo.y: +//! * parents: 0 +//! * values: 2, 3 +//! * Foo.y.0: +//! * parents: 0 +//! * values: 3 +//! * Foo.y.1: +//! * parents: 0 +//! * values: 4 + +use clippy_data_structures::{CountedIter, VecSet}; +use core::ops::Range; +use core::{ptr, slice}; +use rustc_abi::FieldIdx; +use rustc_arena::DroplessArena; +use rustc_data_structures::fx::FxHashMap; +use rustc_index::{Idx as _, IndexSlice}; +use rustc_middle::mir::visit::Visitor; +use rustc_middle::mir::{Body, Local, Location, Place, ProjectionElem, Rvalue}; +use rustc_middle::ty::{Ty, TyCtxt, TyKind, TypingEnv}; +use rustc_span::def_id::DefId; + +rustc_index::newtype_index! { + /// Index to a value + #[orderable] + pub struct Idx {} +} + +#[derive(Clone, Copy)] +pub struct FieldData<'arena> { + /// The offset to use to get to the first value stored for this field. + pub offset: u32, + /// The projection data for this field. + pub data: &'arena PlaceData<'arena>, +} +impl FieldData<'_> { + /// A field with no values. + pub const EMPTY: Self = Self { + // The offset doesn't actually matter since the occupied range is empty. + offset: 0, + data: EMPTY_PLACE_DATA, + }; +} + +/// Traversal data about a node in the projection tree. +#[non_exhaustive] +pub struct PlaceData<'arena> { + /// The offset and projection data for each immediate child. + pub fields: &'arena IndexSlice>, + /// The number of values stored by this type, including all children. + pub value_count: u32, + /// Is a value stored for this type. + pub has_value: bool, +} + +// Avoid the need to allocate the two most common values. +pub static EMPTY_PLACE_DATA: &PlaceData<'_> = &PlaceData { + fields: IndexSlice::from_raw(&[]), + value_count: 0, + has_value: false, +}; +pub static SINGLE_PLACE_DATA: &PlaceData<'_> = &PlaceData { + fields: IndexSlice::from_raw(&[]), + value_count: 1, + has_value: true, +}; + +impl PartialEq for PlaceData<'_> { + #[inline] + fn eq(&self, other: &Self) -> bool { + // Most instances will be interned so use pointer equality here. + ptr::addr_eq(self, other) + } +} + +impl<'arena> PlaceData<'arena> { + #[inline] + pub fn contains_values(&self) -> bool { + // No need to dereference. All empty instances are replaced with `EMPTY_PLACE_DATA`. + self != EMPTY_PLACE_DATA + } + + pub fn alloc_new( + arena: &'arena DroplessArena, + has_value: bool, + fields: impl Iterator, + ) -> &'arena Self { + let mut value_count = has_value as u32; + let fields = arena.alloc_from_iter(fields.map(|data| { + let offset = value_count; + value_count += data.value_count; + FieldData { offset, data } + })); + if value_count == has_value as u32 { + if has_value { SINGLE_PLACE_DATA } else { EMPTY_PLACE_DATA } + } else { + arena.alloc(Self { + fields: IndexSlice::from_raw(fields), + value_count, + has_value, + }) + } + } +} + +/// Type-based interner for `ProjectionData` +struct TyProjectionInterner<'arena, 'tcx, F> { + tcx: TyCtxt<'tcx>, + typing_env: TypingEnv<'tcx>, + arena: &'arena DroplessArena, + ty_has_value: F, + vis_filter: DefId, + projection_data: FxHashMap, &'arena PlaceData<'arena>>, +} +impl<'arena, 'tcx, F> TyProjectionInterner<'arena, 'tcx, F> +where + F: FnMut(Ty<'tcx>) -> bool, +{ + fn new( + tcx: TyCtxt<'tcx>, + typing_env: TypingEnv<'tcx>, + arena: &'arena DroplessArena, + ty_has_value: F, + vis_filter: DefId, + ) -> Self { + Self { + tcx, + typing_env, + arena, + ty_has_value, + vis_filter, + projection_data: FxHashMap::default(), + } + } + + /// Creates a new `ProjectionData` for the given type. + fn alloc_for_ty(&mut self, ty: Ty<'tcx>) -> &'arena PlaceData<'arena> { + let has_value = (self.ty_has_value)(ty); + match *ty.kind() { + TyKind::Adt(def, args) if def.is_struct() => PlaceData::alloc_new( + self.arena, + has_value, + def.non_enum_variant().fields.iter().map(|f| { + if f.vis.is_accessible_from(self.vis_filter, self.tcx) { + let ty = f.ty(self.tcx, args); + self.intern( + self.tcx + .try_normalize_erasing_regions(self.typing_env, ty) + .unwrap_or(ty), + ) + } else { + EMPTY_PLACE_DATA + } + }), + ), + TyKind::Tuple(tys) => PlaceData::alloc_new(self.arena, has_value, tys.iter().map(|ty| self.intern(ty))), + _ if has_value => SINGLE_PLACE_DATA, + _ => EMPTY_PLACE_DATA, + } + } + + /// Interns the `ProjectionData` for the given type. + fn intern(&mut self, ty: Ty<'tcx>) -> &'arena PlaceData<'arena> { + match self.projection_data.get(&ty) { + Some(&data) => data, + None => { + let data = self.alloc_for_ty(ty); + self.projection_data.insert(ty, data); + data + }, + } + } +} + +pub(crate) struct PlaceFilterIter<'a> { + iter: slice::Iter<'a, LocalPlace<'a>>, + pub local: Option, + pub projection: &'a [FieldIdx], +} +impl<'a> PlaceFilterIter<'a> { + /// Creates a new `ProjectionData` by applying the current filter. + /// + /// This will move to the next filter not affecting the current field. + pub(crate) fn apply_current<'arena>( + &mut self, + arena: &'arena DroplessArena, + fields: &'arena IndexSlice>, + depth: usize, + ) -> &'arena PlaceData<'arena> { + if let Some(&filter_field) = self.projection.get(depth) { + let filter_local = self.local; + let filter_projection = &self.projection[..depth]; + let mut filter_field: Option = Some(filter_field); + let data = PlaceData::alloc_new( + arena, + false, + fields.iter_enumerated().map(|(field, field_data)| { + if filter_field == Some(field) { + let fields = field_data.data.fields; + let data = self.apply_current(arena, fields, depth + 1); + // Get the next field to filter if the filter still has the same parent field. + filter_field = + self.projection.get(depth).copied().filter(|_| { + self.local == filter_local && self.projection.starts_with(filter_projection) + }); + data + } else { + field_data.data + } + }), + ); + // Skip to the filter after the current field. + // Note: Child fields may have been dropped before applying this filter. + while filter_field.is_some() { + (self.local, self.projection) = self + .iter + .next() + .map_or((None, [].as_slice()), |x| (Some(x.local), x.projection)); + filter_field = self + .projection + .get(depth) + .copied() + .filter(|_| self.local == filter_local && self.projection.starts_with(filter_projection)); + } + data + } else { + // Found the filtered field. Step to the next filter. + (self.local, self.projection) = self + .iter + .next() + .map_or((None, [].as_slice()), |x| (Some(x.local), x.projection)); + EMPTY_PLACE_DATA + } + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)] +struct LocalPlace<'arena> { + local: Local, + projection: &'arena [FieldIdx], +} +impl<'arena> LocalPlace<'arena> { + fn from_place(arena: &'arena DroplessArena, place: Place<'_>) -> Self { + Self { + local: place.local, + projection: arena.alloc_from_iter(CountedIter(place.projection.iter().map_while(|proj| { + if let ProjectionElem::Field(idx, _) = proj { + Some(idx) + } else { + None + } + }))), + } + } + + fn is_parent_of(self, other: LocalPlace) -> bool { + self.local == other.local + && self.projection.len() <= other.projection.len() + && self.projection.iter().zip(other.projection).all(|(&x, &y)| x == y) + } +} + +pub struct PlaceFilter<'a> { + filter: VecSet>, +} +impl<'a> PlaceFilter<'a> { + /// Creates a filter which will remove all places that have a raw borrow taken. + pub fn new_raw_borrow_filter(arena: &'a DroplessArena, body: &Body<'_>) -> Self { + struct V<'a> { + arena: &'a DroplessArena, + borrows: Vec>, + } + impl<'tcx> Visitor<'tcx> for V<'_> { + fn visit_rvalue(&mut self, rvalue: &Rvalue<'tcx>, _: Location) { + if let Rvalue::RawPtr(_, place) = *rvalue { + self.borrows.push(LocalPlace::from_place(self.arena, place)); + } + } + } + let mut v = V { + arena, + borrows: Vec::new(), + }; + for (block, block_data) in body.basic_blocks.iter_enumerated() { + v.visit_basic_block_data(block, block_data); + } + v.borrows.sort(); + v.borrows.dedup_by(|&mut second, &mut first| first.is_parent_of(second)); + Self { + filter: VecSet::from_sorted(v.borrows), + } + } + + pub(crate) fn iter(&self) -> PlaceFilterIter<'_> { + let mut iter = self.filter.iter(); + let (local, projection) = iter + .next() + .map_or((None, [].as_slice()), |x| (Some(x.local), x.projection)); + PlaceFilterIter { + iter, + local, + projection, + } + } +} + +#[derive(Clone)] +struct ResolvedParentsField<'a> { + fields: slice::Iter<'a, FieldData<'a>>, + /// The base index to use for all fields. + idx: Idx, + /// The parent index to use for all fields. + parent: Option, +} +struct ResolvedParents<'a> { + locals: slice::Iter<'a, (Idx, &'a PlaceData<'a>)>, + parents: Vec>, + current: ResolvedParentsField<'a>, + hint: u32, +} +impl<'a> ResolvedParents<'a> { + fn new(locals: &'a IndexSlice)>, hint: u32) -> Self { + Self { + locals: locals.iter(), + parents: Vec::new(), + current: ResolvedParentsField { + fields: [].iter(), + idx: Idx::ZERO, + parent: None, + }, + hint, + } + } +} +impl Iterator for ResolvedParents<'_> { + type Item = Option; + fn next(&mut self) -> Option { + loop { + if let Some(field) = self.current.fields.next() { + self.parents.push(self.current.clone()); + let parent = self.current.parent; + self.current = ResolvedParentsField { + fields: field.data.fields.iter(), + idx: self.current.idx.plus(field.offset as usize), + parent: None, + }; + if field.data.has_value { + self.current.parent = Some(self.current.idx); + return Some(parent); + } + } else { + if let Some(field) = self.parents.pop() { + self.current = field; + } else { + let &(idx, projection) = self + .locals + .by_ref() + .filter(|&(_, data)| data.contains_values()) + .next()?; + self.current = ResolvedParentsField { + fields: projection.fields.iter(), + idx, + parent: self.parents.last().and_then(|x| x.parent), + }; + if projection.has_value { + self.current.parent = Some(self.current.idx); + return Some(None); + } + }; + } + } + } + + /// Pass the size to `DroplessArena::alloc_from_iter`` + fn size_hint(&self) -> (usize, Option) { + (self.hint as usize, Some(self.hint as usize)) + } +} + +/// A place which has been resolved by a projection map. +pub trait ResolvedPlace<'arena>: Copy { + type Resolver; + type Parents: Iterator; + + /// Gets the first value index and the projection data for the place. + fn values(&self) -> (Idx, &'arena PlaceData<'arena>); + + /// whether the place involve a deref projection. + fn is_deref(&self) -> bool; + + /// The parents of the place from most to least specific. + fn parents(&self, map: &Self::Resolver) -> Self::Parents; + + // Checks if this place affects any values. + fn affects_any_value(&self) -> bool; + + /// Gets the contained value assuming the place refers to a scalar value. + /// + /// # Panics + /// This may panic if this place contains multiple values. + fn as_scalar_value(self) -> Option; +} + +pub trait Resolver<'arena> { + type Resolved: ResolvedPlace<'arena, Resolver = Self>; + + /// Resolves the place to the set of values it contains. + fn resolve(&self, place: Place<'_>) -> Self::Resolved; + /// Resolves the local to the set of values it contains. + fn resolve_local(&self, local: Local) -> (Idx, &'arena PlaceData<'arena>); + + /// Gets the set of values contained in the body's arguments. + fn resolve_args(&self, body: &Body<'_>) -> Range { + if body.arg_count > 0 { + let (args_start, _) = self.resolve_local(Local::from_u32(1)); + let (args_end, args_data) = self.resolve_local(Local::from_usize(1 + body.arg_count)); + args_start..args_end.plus(args_data.value_count as usize) + } else { + Idx::ZERO..Idx::ZERO + } + } +} + +#[derive(Clone)] +pub struct ParentIter<'a> { + parent_map: &'a IndexSlice>, + next: Option, +} +impl Iterator for ParentIter<'_> { + type Item = Idx; + fn next(&mut self) -> Option { + match self.next { + Some(x) => { + self.next = self.parent_map[x]; + Some(x) + }, + None => None, + } + } +} + +/// A place which has been resolved by a projection map. +#[derive(Clone, Copy)] +pub enum Resolved<'arena> { + Value { + data: &'arena PlaceData<'arena>, + parent: Option, + idx: Idx, + }, + Deref { + parent: Idx, + }, +} +impl<'arena> ResolvedPlace<'arena> for Resolved<'arena> { + type Resolver = Map<'arena>; + type Parents = ParentIter<'arena>; + + #[inline] + fn values(&self) -> (Idx, &'arena PlaceData<'arena>) { + if let Self::Value { data, idx, .. } = *self { + (idx, data) + } else { + (Idx::ZERO, EMPTY_PLACE_DATA) + } + } + + #[inline] + fn is_deref(&self) -> bool { + matches!(self, Self::Deref { .. }) + } + + #[inline] + fn parents(&self, map: &Map<'arena>) -> Self::Parents { + ParentIter { + parent_map: map.parent_map, + next: match *self { + Self::Value { parent, .. } => parent, + Self::Deref { parent } => Some(parent), + }, + } + } + + #[inline] + fn affects_any_value(&self) -> bool { + if let Self::Value { data, parent, .. } = *self { + data.contains_values() || parent.is_some() + } else { + true + } + } + + #[inline] + fn as_scalar_value(self) -> Option { + match self { + Self::Value { data, idx, .. } => { + debug_assert_eq!(data.value_count, data.has_value as u32); + data.has_value.then_some(idx) + }, + Self::Deref { .. } => None, + } + } +} + +/// Mapping between local projections and the range of values they occupy. +pub struct Map<'arena> { + local_map: &'arena IndexSlice)>, + parent_map: &'arena IndexSlice>, +} +impl<'arena> Map<'arena> { + pub fn new<'tcx>( + tcx: TyCtxt<'tcx>, + typing_env: TypingEnv<'tcx>, + arena: &'arena DroplessArena, + body: &Body<'tcx>, + ty_has_value: impl FnMut(Ty<'tcx>) -> bool, + vis_filter: DefId, + place_filter: &PlaceFilter<'_>, + ) -> Self { + let mut interner = TyProjectionInterner::new(tcx, typing_env, arena, ty_has_value, vis_filter); + let mut idx_count: u32 = 0u32; + let mut place_filter = place_filter.iter(); + let local_map = IndexSlice::::from_raw(arena.alloc_from_iter( + body.local_decls.iter_enumerated().map(|(local, local_decl)| { + let data = interner.intern( + tcx.try_normalize_erasing_regions(typing_env, local_decl.ty) + .unwrap_or(local_decl.ty), + ); + let data = if place_filter.local.is_some_and(|filter| filter == local) { + place_filter.apply_current(arena, data.fields, 0) + } else { + data + }; + let idx = idx_count; + idx_count += data.value_count; + (Idx::from_u32(idx), data) + }), + )); + let parent_map = + IndexSlice::::from_raw(arena.alloc_from_iter(ResolvedParents::new(local_map, idx_count))); + Self { local_map, parent_map } + } + + /// Get's the number of values + pub fn domain_size(&self) -> usize { + self.parent_map.len() + } + + pub fn domain_size_u32(&self) -> u32 { + self.parent_map.len() as u32 + } + + pub fn local_for_idx(&self, idx: Idx) -> Local { + let mut res = Local::ZERO; + for (l, &(x, data)) in self.local_map.iter_enumerated() { + if data.has_value { + if x <= idx { + res = l; + } else { + break; + } + } + } + res + } +} +impl<'arena> Resolver<'arena> for Map<'arena> { + type Resolved = Resolved<'arena>; + + fn resolve(&self, place: Place) -> Self::Resolved { + let (mut idx, mut data) = self.local_map[place.local]; + let mut parent = None; + let mut projections = place.projection.iter(); + while let Some(projection) = projections.next() { + if data.has_value { + parent = Some(idx); + } + if let ProjectionElem::Field(field, _) = projection { + // Note: if all fields contain no value then no field data will be stored. + if let Some(field) = data.fields.get(field) { + data = field.data; + idx = idx.plus(field.offset as usize); + continue; + } + data = EMPTY_PLACE_DATA; + } + if let Some(parent) = parent + && (matches!(projection, ProjectionElem::Deref) + || projections.any(|projection| matches!(projection, ProjectionElem::Deref))) + { + return Resolved::Deref { parent }; + } else { + // At this point we either have a deref of an untracked value, or a projection + // that stays within the local. + break; + } + } + Resolved::Value { data, parent, idx } + } + + fn resolve_local(&self, local: Local) -> (Idx, &'arena PlaceData<'arena>) { + self.local_map[local] + } +} diff --git a/clippy_mir/src/value_tracking.rs b/clippy_mir/src/value_tracking.rs new file mode 100644 index 000000000000..6e11e1c84daf --- /dev/null +++ b/clippy_mir/src/value_tracking.rs @@ -0,0 +1,668 @@ +use crate::projection::{self, PlaceData, ResolvedPlace as _, Resolver}; +use core::ops::Range; +use rustc_abi::{FieldIdx, VariantIdx}; +use rustc_index::{Idx, IndexSlice}; +use rustc_middle::mir::{ + AggregateKind, BasicBlockData, BinOp, Body, BorrowKind, CastKind, ConstOperand, CopyNonOverlapping, + InlineAsmOperand, Local, NonDivergingIntrinsic, NullOp, Operand, Place, RETURN_PLACE, RawPtrKind, Rvalue, + Statement, StatementKind, Terminator, TerminatorKind, UnOp, +}; +use rustc_middle::ty::{self, Ty, TyCtxt}; +use rustc_span::source_map::Spanned; +use rustc_span::{Span, sym}; + +/// Visitor for tracking the movement of values within a MIR body. +pub trait Visitor<'arena, 'tcx>: Sized { + type Resolver: Resolver<'arena>; + fn resolver(&self) -> &Self::Resolver; + + /// Gets the `TyCtxt` this visitor instance is associated with. + fn tcx(&self) -> TyCtxt<'tcx>; + + /// Gets the MIR body this visitor instance is associated with. + fn body(&self) -> &Body<'tcx>; + + /// Visits a read of an individual value. + fn visit_read_idx(&mut self, idx: projection::Idx, sp: Span); + + /// Visits a mutation of an individual value. + fn visit_mutate_idx(&mut self, idx: projection::Idx, sp: Span); + + /// Visits a write of `uninit` bytes to an individual value. + fn visit_uninit_idx(&mut self, idx: projection::Idx, sp: Span); + + /// Visits a copy of one value to another. + fn visit_copy_idx(&mut self, dst: projection::Idx, src: projection::Idx, sp: Span); + + /// Visits a move of one value to another. + fn visit_move_idx(&mut self, dst: projection::Idx, src: projection::Idx, sp: Span); + + /// Visits a move of an individual value to an unknown place. + /// + /// Default to calling `visit_read_idx` followed by `visit_uninit_idx`. + #[inline] + fn visit_consume_idx(&mut self, idx: projection::Idx, sp: Span) { + self.visit_read_idx(idx, sp); + self.visit_uninit_idx(idx, sp); + } + + /// Visits a read of a set of values. + #[inline] + fn visit_read_range(&mut self, range: Range, sp: Span) { + for i in range { + self.visit_read_idx(i, sp); + } + } + + /// Visits a mutation of a set of values. + #[inline] + fn visit_mutate_range(&mut self, range: Range, sp: Span) { + for i in range { + self.visit_mutate_idx(i, sp); + } + } + + /// Visits a write of `uninit` bytes to a set of values. + #[inline] + fn visit_uninit_range(&mut self, range: Range, sp: Span) { + for i in range { + self.visit_uninit_idx(i, sp); + } + } + + /// Visits a copy from one set of values to another. + #[inline] + fn visit_copy_range(&mut self, dst: projection::Idx, src: Range, sp: Span) { + for (dst, src) in (dst..).zip(src) { + self.visit_copy_idx(dst, src, sp); + } + } + + /// Visits a move from one set of values to another. + #[inline] + fn visit_move_range(&mut self, dst: projection::Idx, src: Range, sp: Span) { + for (dst, src) in (dst..).zip(src) { + self.visit_move_idx(dst, src, sp); + } + } + + /// Visits a move of a set of values to an unknown place. + #[inline] + fn visit_consume_range(&mut self, range: Range, sp: Span) { + for i in range { + self.visit_consume_idx(i, sp); + } + } + + /// Visits the parent of a read field. + #[inline] + fn visit_read_parent(&mut self, idx: projection::Idx, sp: Span) { + self.visit_read_idx(idx, sp); + } + + /// Visits the parent of a mutated field. + #[inline] + fn visit_mutate_parent(&mut self, idx: projection::Idx, sp: Span) { + self.visit_mutate_idx(idx, sp); + } + + /// Visits the parent of a consumed field. + /// + /// Defaults to calling `visit_read_parent` followed by `visit_mutate_parent`. + #[inline] + fn visit_consume_parent(&mut self, idx: projection::Idx, sp: Span) { + self.visit_read_parent(idx, sp); + self.visit_mutate_parent(idx, sp); + } + + /// Visits a read of a resolved place. + /// + /// Defaults to calling `visit_read_range` for the contained values and `visit_read_idx` for + /// each parent. + fn visit_read_place(&mut self, place: Place<'tcx>, sp: Span) { + let place = self.resolver().resolve(place); + let (start, data) = place.values(); + if data.contains_values() { + self.visit_read_range(start..start.plus(data.value_count as usize), sp); + } + for idx in place.parents(self.resolver()) { + self.visit_read_parent(idx, sp); + } + } + + /// Visits a mutation of a resolved place. + /// + /// Defaults to calling `visit_mutate_range` for the contained values and `visit_mutate_idx` for + /// each parent. + fn visit_mutate_place(&mut self, place: Place<'tcx>, sp: Span) { + let place = self.resolver().resolve(place); + let (start, data) = place.values(); + if data.contains_values() { + self.visit_mutate_range(start..start.plus(data.value_count as usize), sp); + } + for idx in place.parents(self.resolver()) { + self.visit_mutate_parent(idx, sp); + } + } + + /// Visits a write of `uninit` bytes to a resolved place. + /// + /// Defaults to calling `visit_uninit_range` for the contained values and `visit_mutate_idx` for + /// each parent. + fn visit_uninit_place(&mut self, place: Place<'tcx>, sp: Span) { + let place = self.resolver().resolve(place); + let (start, data) = place.values(); + if data.contains_values() { + self.visit_uninit_range(start..start.plus(data.value_count as usize), sp); + } + for idx in place.parents(self.resolver()) { + self.visit_mutate_parent(idx, sp); + } + } + + /// Visits a copy from one resolved place to another. + fn visit_copy_place(&mut self, dst: Place<'tcx>, src: Place<'tcx>, sp: Span) { + let dst = self.resolver().resolve(dst); + let src = self.resolver().resolve(src); + let (dst_start, dst_data) = dst.values(); + let (src_start, src_data) = src.values(); + if dst_data.contains_values() || src_data.contains_values() { + copy_place(self, Copy, dst_start, dst_data, src_start, src_data, sp); + } + for idx in src.parents(self.resolver()) { + self.visit_read_parent(idx, sp); + } + for idx in dst.parents(self.resolver()) { + self.visit_mutate_parent(idx, sp); + } + } + + /// Visits a move from one resolved place to another. + fn visit_move_place(&mut self, dst: Place<'tcx>, src: Place<'tcx>, sp: Span) { + let dst = self.resolver().resolve(dst); + let src = self.resolver().resolve(src); + let (dst_start, dst_data) = dst.values(); + let (src_start, src_data) = src.values(); + if dst_data.contains_values() || src_data.contains_values() { + copy_place(self, Move, dst_start, dst_data, src_start, src_data, sp); + } + for idx in src.parents(self.resolver()) { + self.visit_consume_parent(idx, sp); + } + for idx in dst.parents(self.resolver()) { + self.visit_mutate_parent(idx, sp); + } + } + + /// Visits a move from a resolved place to an unknown location. + /// + /// Defaults to calling `visit_consume_range` followed by `visit_read_idx` and + /// `visit_mutate_idx` for each parent. + fn visit_consume_place(&mut self, place: Place<'tcx>, sp: Span) { + let place = self.resolver().resolve(place); + let (start, data) = place.values(); + if data.contains_values() { + self.visit_consume_range(start..start.plus(data.value_count as usize), sp); + } + for idx in place.parents(self.resolver()) { + self.visit_consume_parent(idx, sp); + } + } + + /// Visits a drop of a resolved place. + /// + /// Defaults to calling `visit_uninit_place`. + #[inline] + fn visit_drop_place(&mut self, place: Place<'tcx>, sp: Span) { + self.visit_uninit_place(place, sp); + } + + #[inline] + fn visit_uninit_local(&mut self, local: Local, sp: Span) { + let (start, data) = self.resolver().resolve_local(local); + if data.contains_values() { + self.visit_uninit_range(start..start.plus(data.value_count as usize), sp); + } + } + + #[inline] + fn visit_consume_local(&mut self, local: Local, sp: Span) { + let (start, data) = self.resolver().resolve_local(local); + if data.contains_values() { + self.visit_consume_range(start..start.plus(data.value_count as usize), sp); + } + } + + #[inline] + fn visit_assign_constant_field( + &mut self, + dst_start: projection::Idx, + dst_data: &PlaceData<'_>, + _src: &ConstOperand<'tcx>, + sp: Span, + ) { + if dst_data.contains_values() { + self.visit_mutate_range(dst_start..dst_start.plus(dst_data.value_count as usize), sp); + } + } + + fn visit_assign_aggregate( + &mut self, + dst: Place<'tcx>, + _kind: &AggregateKind<'tcx>, + ops: &IndexSlice>, + sp: Span, + ) { + let dst = self.resolver().resolve(dst); + let (dst_start, dst_data) = dst.values(); + if dst_data.contains_values() && dst_data.value_count > u32::from(dst_data.has_value) { + debug_assert_eq!(dst_data.fields.len(), ops.len()); + for (dst_field, op) in dst_data.fields.iter().zip(ops) { + if dst_field.data.contains_values() { + let dst = dst_start.plus(dst_field.offset as usize); + match op { + &Operand::Copy(src) => { + copy_aggregate_field(self, Copy, dst, dst_field.data, self.resolver().resolve(src), sp) + }, + &Operand::Move(src) => { + copy_aggregate_field(self, Move, dst, dst_field.data, self.resolver().resolve(src), sp) + }, + Operand::Constant(src) => self.visit_assign_constant_field(dst, dst_field.data, src, sp), + } + } + } + } else { + for op in ops { + walk_operand(self, op, sp); + } + } + for idx in dst.parents(self.resolver()) { + self.visit_mutate_idx(idx, sp); + } + } + + #[inline] + fn visit_assign_unary_op(&mut self, dst: Place<'tcx>, _op: UnOp, src: &Operand<'tcx>, sp: Span) { + walk_operand(self, src, sp); + self.visit_mutate_place(dst, sp); + } + + #[inline] + fn visit_assign_binary_op( + &mut self, + dst: Place<'tcx>, + _op: BinOp, + (lhs, rhs): &(Operand<'tcx>, Operand<'tcx>), + sp: Span, + ) { + walk_operand(self, lhs, sp); + walk_operand(self, rhs, sp); + self.visit_mutate_place(dst, sp); + } + + #[inline] + fn visit_assign_cast(&mut self, dst: Place<'tcx>, _kind: CastKind, src: &Operand<'tcx>, _ty: Ty<'tcx>, sp: Span) { + walk_operand(self, src, sp); + self.visit_mutate_place(dst, sp); + } + + #[inline] + fn visit_assign_len(&mut self, dst: Place<'tcx>, src: Place<'tcx>, sp: Span) { + self.visit_read_place(src, sp); + self.visit_mutate_place(dst, sp); + } + + #[inline] + fn visit_assign_discriminant(&mut self, dst: Place<'tcx>, src: Place<'tcx>, sp: Span) { + self.visit_read_place(src, sp); + self.visit_mutate_place(dst, sp); + } + + #[inline] + fn visit_assign_null_op(&mut self, dst: Place<'tcx>, _op: &NullOp<'tcx>, _ty: Ty<'tcx>, sp: Span) { + self.visit_mutate_place(dst, sp); + } + + #[inline] + fn visit_assign_raw_ptr(&mut self, dst: Place<'tcx>, src: Place<'tcx>, _kind: RawPtrKind, sp: Span) { + // A raw borrow can invalidate any value tracking done unless special care is taken. + debug_assert!( + !self.resolver().resolve(src).affects_any_value(), + "A raw borrow of a tracked place was taken at `{sp:?}`. \ + Use `clippy_mir::projection::create_raw_borrow_filter` to filter out these places.", + ); + self.visit_mutate_place(dst, sp); + } + + fn visit_assign_borrow(&mut self, dst: Place<'tcx>, src: Place<'tcx>, kind: BorrowKind, sp: Span) { + let src = self.resolver().resolve(src); + let (src_start, src_data) = src.values(); + if src_data.contains_values() { + let src_range = src_start..src_start.plus(src_data.value_count as usize); + if matches!(kind, BorrowKind::Mut { .. }) { + self.visit_mutate_range(src_range.clone(), sp); + } + self.visit_read_range(src_range, sp); + } + if matches!(kind, BorrowKind::Mut { .. }) { + for idx in src.parents(self.resolver()) { + self.visit_mutate_parent(idx, sp); + } + } + for idx in src.parents(self.resolver()) { + self.visit_read_parent(idx, sp); + } + self.visit_mutate_place(dst, sp); + } + + #[inline] + fn visit_assign_shallow_box(&mut self, dst: Place<'tcx>, src: &Operand<'tcx>, _ty: Ty<'tcx>, sp: Span) { + walk_operand(self, src, sp); + self.visit_mutate_place(dst, sp); + } + + #[inline] + fn visit_assign_constant(&mut self, dst: Place<'tcx>, _src: &ConstOperand<'tcx>, sp: Span) { + self.visit_mutate_place(dst, sp); + } + + fn visit_assignment(&mut self, stmt: &(Place<'tcx>, Rvalue<'tcx>), sp: Span) { + let dst = stmt.0; + match &stmt.1 { + Rvalue::Aggregate(kind, ops) => self.visit_assign_aggregate(dst, kind, ops, sp), + &Rvalue::UnaryOp(op, ref src) => self.visit_assign_unary_op(dst, op, src, sp), + &Rvalue::Cast(kind, ref src, ty) => self.visit_assign_cast(dst, kind, src, ty, sp), + &Rvalue::Len(src) => self.visit_assign_len(dst, src, sp), + &Rvalue::BinaryOp(kind, ref ops) => self.visit_assign_binary_op(dst, kind, ops, sp), + &Rvalue::Discriminant(src) => self.visit_assign_discriminant(dst, src, sp), + &Rvalue::CopyForDeref(src) => self.visit_copy_place(dst, src, sp), + &Rvalue::NullaryOp(ref op, ty) => self.visit_assign_null_op(dst, op, ty, sp), + &Rvalue::RawPtr(kind, src) => self.visit_assign_raw_ptr(dst, src, kind, sp), + &Rvalue::Ref(_, kind, src) => self.visit_assign_borrow(dst, src, kind, sp), + Rvalue::Repeat(value, _) => { + walk_operand(self, value, sp); + self.visit_mutate_place(dst, sp); + }, + &Rvalue::ShallowInitBox(ref src, ty) => self.visit_assign_shallow_box(dst, src, ty, sp), + Rvalue::Use(src) => match src { + &Operand::Move(src) => self.visit_move_place(dst, src, sp), + &Operand::Copy(src) => self.visit_copy_place(dst, src, sp), + Operand::Constant(src) => self.visit_assign_constant(dst, src, sp), + }, + Rvalue::ThreadLocalRef(_) => self.visit_mutate_place(dst, sp), + Rvalue::WrapUnsafeBinder(op, _) => walk_operand(self, op, sp), + } + } + + fn visit_copy_nonoverlapping(&mut self, args: &CopyNonOverlapping<'tcx>, sp: Span) { + walk_operand(self, &args.src, sp); + walk_operand(self, &args.dst, sp); + walk_operand(self, &args.count, sp); + if let Operand::Copy(dst) = args.dst { + self.visit_mutate_place(dst, sp); + } + } + + #[inline] + fn visit_set_discriminant(&mut self, dst: Place<'tcx>, _variant: VariantIdx, sp: Span) { + self.visit_mutate_place(dst, sp); + } + + fn visit_statement(&mut self, stmt: &Statement<'tcx>) { + let sp = stmt.source_info.span; + match &stmt.kind { + StatementKind::Assign(stmt) => self.visit_assignment(stmt, sp), + &StatementKind::SetDiscriminant { + ref place, + variant_index, + } => self.visit_set_discriminant(**place, variant_index, sp), + StatementKind::Intrinsic(i) => { + if let NonDivergingIntrinsic::CopyNonOverlapping(copy) = &**i { + self.visit_copy_nonoverlapping(copy, sp); + } + }, + &(StatementKind::StorageLive(local) | StatementKind::StorageDead(local)) => { + // Note: `StorageLive` on a live local fills it with uninit bytes. + self.visit_uninit_local(local, sp); + }, + StatementKind::Deinit(place) => self.visit_uninit_place(**place, sp), + StatementKind::FakeRead(..) + | StatementKind::Retag(..) + | StatementKind::PlaceMention(..) + | StatementKind::AscribeUserType(..) + | StatementKind::Coverage(..) + | StatementKind::ConstEvalCounter + | StatementKind::Nop + | StatementKind::BackwardIncompatibleDropHint { .. } => {}, + } + } + + /// Visits a `Call` terminator. + /// + /// By default this will treat calls to `core::mem::drop` the same as a `Drop` terminator. + #[inline] + fn visit_call(&mut self, func: &Operand<'tcx>, args: &[Spanned>], dst: &Place<'tcx>, sp: Span) { + walk_call(self, func, args, dst, sp); + } + + /// Visits a `TailCall` terminator. + /// + /// By default this will treat calls to `core::mem::drop` the same as a `Drop` terminator. + #[inline] + fn visit_tail_call(&mut self, func: &Operand<'tcx>, args: &[Spanned>], sp: Span) { + walk_tail_call(self, func, args, sp); + } + + #[inline] + fn visit_inline_asm(&mut self, ops: &[InlineAsmOperand<'tcx>], sp: Span) { + walk_inline_asm(self, ops, sp); + } + + fn visit_terminator(&mut self, term: &Terminator<'tcx>) { + let sp = term.source_info.span; + match &term.kind { + TerminatorKind::Assert { cond: value, .. } | TerminatorKind::Yield { value, .. } => { + walk_operand(self, value, sp) + }, + TerminatorKind::Call { + func, + args, + destination, + .. + } => self.visit_call(func, args, destination, sp), + TerminatorKind::TailCall { func, args, .. } => self.visit_tail_call(func, args, sp), + TerminatorKind::InlineAsm { operands, .. } => self.visit_inline_asm(operands, sp), + &TerminatorKind::Drop { place, .. } => self.visit_drop_place(place, sp), + TerminatorKind::Return => self.visit_consume_local(RETURN_PLACE, sp), + TerminatorKind::SwitchInt { discr, .. } => walk_operand(self, discr, sp), + TerminatorKind::Goto { .. } + | TerminatorKind::UnwindResume + | TerminatorKind::UnwindTerminate { .. } + | TerminatorKind::Unreachable + | TerminatorKind::CoroutineDrop + | TerminatorKind::FalseEdge { .. } + | TerminatorKind::FalseUnwind { .. } => {}, + } + } + + fn visit_block_data(&mut self, block: &BasicBlockData<'tcx>) { + for stmt in &block.statements { + self.visit_statement(stmt); + } + if let Some(term) = &block.terminator { + self.visit_terminator(term); + } + } +} + +pub trait CopyVisitor<'arena, 'tcx, V: Visitor<'arena, 'tcx>>: core::marker::Copy { + fn copy_range(self, visitor: &mut V, dst: projection::Idx, src: Range, sp: Span); + fn copy_idx(self, visitor: &mut V, dst: projection::Idx, src: projection::Idx, sp: Span); + fn read_src_range(self, visitor: &mut V, range: Range, sp: Span); + fn read_src_idx(self, visitor: &mut V, idx: projection::Idx, sp: Span); +} + +#[derive(Clone, Copy)] +pub struct Move; +impl<'arena, 'tcx, V: Visitor<'arena, 'tcx>> CopyVisitor<'arena, 'tcx, V> for Move { + fn copy_range(self, visitor: &mut V, dst: projection::Idx, src: Range, sp: Span) { + visitor.visit_move_range(dst, src, sp); + } + fn copy_idx(self, visitor: &mut V, dst: projection::Idx, src: projection::Idx, sp: Span) { + visitor.visit_move_idx(dst, src, sp); + } + fn read_src_range(self, visitor: &mut V, range: Range, sp: Span) { + visitor.visit_consume_range(range, sp); + } + fn read_src_idx(self, visitor: &mut V, idx: projection::Idx, sp: Span) { + visitor.visit_consume_idx(idx, sp); + } +} + +#[derive(Clone, Copy)] +pub struct Copy; +impl<'arena, 'tcx, V: Visitor<'arena, 'tcx>> CopyVisitor<'arena, 'tcx, V> for Copy { + fn copy_range(self, visitor: &mut V, dst: projection::Idx, src: Range, sp: Span) { + visitor.visit_copy_range(dst, src, sp); + } + fn copy_idx(self, visitor: &mut V, dst: projection::Idx, src: projection::Idx, sp: Span) { + visitor.visit_copy_idx(dst, src, sp); + } + fn read_src_range(self, visitor: &mut V, range: Range, sp: Span) { + visitor.visit_read_range(range, sp); + } + fn read_src_idx(self, visitor: &mut V, idx: projection::Idx, sp: Span) { + visitor.visit_read_idx(idx, sp); + } +} + +fn copy_aggregate_field<'arena, 'tcx, V: Visitor<'arena, 'tcx>>( + visitor: &mut V, + copy_visitor: impl CopyVisitor<'arena, 'tcx, V>, + dst_start: projection::Idx, + dst_data: &PlaceData<'_>, + src: >::Resolved, + sp: Span, +) { + let (src_start, src_data) = src.values(); + if src_data.contains_values() { + copy_place(visitor, copy_visitor, dst_start, dst_data, src_start, src_data, sp); + } + for idx in src.parents(visitor.resolver()) { + visitor.visit_read_parent(idx, sp); + } +} + +pub fn copy_place_fields<'arena, 'tcx, V: Visitor<'arena, 'tcx>>( + visitor: &mut V, + copy_visitor: impl CopyVisitor<'arena, 'tcx, V>, + dst: projection::Idx, + dst_data: &PlaceData<'_>, + src: projection::Idx, + src_data: &PlaceData<'_>, + sp: Span, +) { + for (dst_field, src_field) in dst_data.fields.iter().zip(src_data.fields) { + let dst_field_start = dst.plus(dst_field.offset as usize); + let src_field_start = src.plus(src_field.offset as usize); + copy_place( + visitor, + copy_visitor, + dst_field_start, + dst_field.data, + src_field_start, + src_field.data, + sp, + ); + } +} + +pub fn copy_place<'arena, 'tcx, V: Visitor<'arena, 'tcx>>( + visitor: &mut V, + copy_visitor: impl CopyVisitor<'arena, 'tcx, V>, + dst: projection::Idx, + dst_data: &PlaceData<'_>, + src: projection::Idx, + src_data: &PlaceData<'_>, + sp: Span, +) { + let src_end = src.plus(src_data.value_count as usize); + if dst_data == src_data { + copy_visitor.copy_range(visitor, dst, src..src_end, sp); + } else if !dst_data.contains_values() { + copy_visitor.read_src_range(visitor, src..src_end, sp); + } else if !src_data.contains_values() { + visitor.visit_mutate_range(dst..dst.plus(dst_data.value_count as usize), sp); + } else { + debug_assert_eq!(dst_data.fields.len(), src_data.fields.len()); + match (dst_data.has_value, src_data.has_value) { + (true, true) => copy_visitor.copy_idx(visitor, dst, src, sp), + (true, false) => visitor.visit_mutate_idx(dst, sp), + (false, true) => copy_visitor.read_src_idx(visitor, src, sp), + (false, false) => {}, + } + copy_place_fields(visitor, copy_visitor, dst, dst_data, src, src_data, sp); + } +} + +pub fn walk_operand<'tcx>(visitor: &mut impl Visitor<'_, 'tcx>, op: &Operand<'tcx>, sp: Span) { + match *op { + Operand::Move(place) => visitor.visit_consume_place(place, sp), + Operand::Copy(place) => visitor.visit_read_place(place, sp), + Operand::Constant(_) => {}, + } +} + +/// Walks a `Call` terminator. +/// +/// This will treat calls to `core::mem::drop` the same as a `Drop` terminator. +pub fn walk_call<'tcx>( + visitor: &mut impl Visitor<'_, 'tcx>, + func: &Operand<'tcx>, + args: &[Spanned>], + dst: &Place<'tcx>, + sp: Span, +) { + walk_tail_call(visitor, func, args, sp); + visitor.visit_mutate_place(*dst, sp); +} + +/// Walks a `TailCall` terminator. +/// +/// This will treat calls to `core::mem::drop` the same as a `Drop` terminator. +pub fn walk_tail_call<'tcx>( + visitor: &mut impl Visitor<'_, 'tcx>, + func: &Operand<'tcx>, + args: &[Spanned>], + sp: Span, +) { + if let [arg] = args + && let Operand::Move(arg) = arg.node + && let ty::FnDef(fn_id, _) = *func.ty(visitor.body(), visitor.tcx()).kind() + && visitor.tcx().is_diagnostic_item(sym::mem_drop, fn_id) + { + visitor.visit_drop_place(arg, sp); + } else { + walk_operand(visitor, func, sp); + for arg in args { + walk_operand(visitor, &arg.node, arg.span); + } + } +} + +pub fn walk_inline_asm<'tcx>(visitor: &mut impl Visitor<'_, 'tcx>, operands: &[InlineAsmOperand<'tcx>], sp: Span) { + for op in operands { + if let InlineAsmOperand::In { value, .. } | InlineAsmOperand::InOut { in_value: value, .. } = op { + walk_operand(visitor, value, sp); + } + } + for op in operands { + if let InlineAsmOperand::Out { place: Some(place), .. } + | InlineAsmOperand::InOut { + out_place: Some(place), .. + } = *op + { + visitor.visit_mutate_place(place, sp); + } + } +} diff --git a/clippy_utils/src/mir/possible_borrower.rs b/clippy_utils/src/mir/possible_borrower.rs index 152b4272c26c..f7ee30e37901 100644 --- a/clippy_utils/src/mir/possible_borrower.rs +++ b/clippy_utils/src/mir/possible_borrower.rs @@ -1,12 +1,10 @@ use super::possible_origin::PossibleOriginVisitor; use super::transitive_relation::TransitiveRelation; -use crate::ty::is_copy; use rustc_data_structures::fx::FxHashMap; use rustc_index::bit_set::DenseBitSet; -use rustc_lint::LateContext; use rustc_middle::mir::visit::Visitor as _; use rustc_middle::mir::{self, Mutability}; -use rustc_middle::ty::{self, TyCtxt, TypeVisitor}; +use rustc_middle::ty::{self, TyCtxt, TypeVisitor, TypingEnv}; use rustc_mir_dataflow::impls::MaybeStorageLive; use rustc_mir_dataflow::{Analysis, ResultsCursor}; use std::borrow::Cow; @@ -16,35 +14,40 @@ use std::ops::ControlFlow; /// For example, `b = &a; c = &a;` will make `b` and (transitively) `c` /// possible borrowers of `a`. #[allow(clippy::module_name_repetitions)] -struct PossibleBorrowerVisitor<'a, 'b, 'tcx> { +struct PossibleBorrowerVisitor<'body, 'tcx> { possible_borrower: TransitiveRelation, - body: &'b mir::Body<'tcx>, - cx: &'a LateContext<'tcx>, + body: &'body mir::Body<'tcx>, + tcx: TyCtxt<'tcx>, + typing_env: TypingEnv<'tcx>, possible_origin: FxHashMap>, } -impl<'a, 'b, 'tcx> PossibleBorrowerVisitor<'a, 'b, 'tcx> { +impl<'body, 'tcx> PossibleBorrowerVisitor<'body, 'tcx> { fn new( - cx: &'a LateContext<'tcx>, - body: &'b mir::Body<'tcx>, + tcx: TyCtxt<'tcx>, + typing_env: TypingEnv<'tcx>, + body: &'body mir::Body<'tcx>, possible_origin: FxHashMap>, ) -> Self { Self { possible_borrower: TransitiveRelation::default(), body, - cx, + tcx, + typing_env, possible_origin, } } fn into_map( self, - cx: &'a LateContext<'tcx>, - maybe_live: ResultsCursor<'b, 'tcx, MaybeStorageLive<'tcx>>, - ) -> PossibleBorrowerMap<'b, 'tcx> { + maybe_live: ResultsCursor<'body, 'tcx, MaybeStorageLive<'tcx>>, + ) -> PossibleBorrowerMap<'body, 'tcx> { let mut map = FxHashMap::default(); for row in (1..self.body.local_decls.len()).map(mir::Local::from_usize) { - if is_copy(cx, self.body.local_decls[row].ty) { + if self + .tcx + .type_is_copy_modulo_regions(self.typing_env, self.body.local_decls[row].ty) + { continue; } @@ -64,7 +67,7 @@ impl<'a, 'b, 'tcx> PossibleBorrowerVisitor<'a, 'b, 'tcx> { } } -impl<'tcx> mir::visit::Visitor<'tcx> for PossibleBorrowerVisitor<'_, '_, 'tcx> { +impl<'tcx> mir::visit::Visitor<'tcx> for PossibleBorrowerVisitor<'_, 'tcx> { fn visit_assign(&mut self, place: &mir::Place<'tcx>, rvalue: &mir::Rvalue<'_>, _location: mir::Location) { let lhs = place.local; match rvalue { @@ -73,7 +76,7 @@ impl<'tcx> mir::visit::Visitor<'tcx> for PossibleBorrowerVisitor<'_, '_, 'tcx> { }, other => { if ContainsRegion - .visit_ty(place.ty(&self.body.local_decls, self.cx.tcx).ty) + .visit_ty(place.ty(&self.body.local_decls, self.tcx).ty) .is_continue() { return; @@ -177,19 +180,27 @@ pub struct PossibleBorrowerMap<'b, 'tcx> { } impl<'b, 'tcx> PossibleBorrowerMap<'b, 'tcx> { - pub fn new(cx: &LateContext<'tcx>, mir: &'b mir::Body<'tcx>) -> Self { + pub fn new(tcx: TyCtxt<'tcx>, typing_env: TypingEnv<'tcx>, mir: &'b mir::Body<'tcx>) -> Self { let possible_origin = { let mut vis = PossibleOriginVisitor::new(mir); vis.visit_body(mir); - vis.into_map(cx) + vis.into_map(tcx, typing_env) }; let maybe_storage_live_result = MaybeStorageLive::new(Cow::Owned(DenseBitSet::new_empty(mir.local_decls.len()))) - .iterate_to_fixpoint(cx.tcx, mir, Some("redundant_clone")) + .iterate_to_fixpoint(tcx, mir, Some("redundant_clone")) .into_results_cursor(mir); - let mut vis = PossibleBorrowerVisitor::new(cx, mir, possible_origin); + let mut vis = PossibleBorrowerVisitor::new(tcx, typing_env, mir, possible_origin); vis.visit_body(mir); - vis.into_map(cx, maybe_storage_live_result) + vis.into_map(maybe_storage_live_result) + } + + /// Checks if the local has no live borrowers immediately before executing the given statement. + pub fn is_unborrowed_before(&mut self, local: mir::Local, at: mir::Location) -> bool { + self.maybe_live.seek_before_primary_effect(at); + self.map + .get(&local) + .is_none_or(|borrows| borrows.iter().all(|x| !self.maybe_live.get().contains(x))) } /// Returns true if the set of borrowers of `borrowed` living at `at` matches with `borrowers`. @@ -215,7 +226,7 @@ impl<'b, 'tcx> PossibleBorrowerMap<'b, 'tcx> { self.bitset.0.insert(b); } } else { - return false; + return below.is_empty(); } self.bitset.1.clear(); diff --git a/clippy_utils/src/mir/possible_origin.rs b/clippy_utils/src/mir/possible_origin.rs index 3d253fd2bb14..7f856bfe4367 100644 --- a/clippy_utils/src/mir/possible_origin.rs +++ b/clippy_utils/src/mir/possible_origin.rs @@ -1,9 +1,8 @@ use super::transitive_relation::TransitiveRelation; -use crate::ty::is_copy; use rustc_data_structures::fx::FxHashMap; use rustc_index::bit_set::DenseBitSet; -use rustc_lint::LateContext; use rustc_middle::mir; +use rustc_middle::ty::{TyCtxt, TypingEnv}; /// Collect possible borrowed for every `&mut` local. /// For example, `_1 = &mut _2` generate _1: {_2,...} @@ -22,10 +21,14 @@ impl<'a, 'tcx> PossibleOriginVisitor<'a, 'tcx> { } } - pub fn into_map(self, cx: &LateContext<'tcx>) -> FxHashMap> { + pub fn into_map( + self, + tcx: TyCtxt<'tcx>, + typing_env: TypingEnv<'tcx>, + ) -> FxHashMap> { let mut map = FxHashMap::default(); for row in (1..self.body.local_decls.len()).map(mir::Local::from_usize) { - if is_copy(cx, self.body.local_decls[row].ty) { + if tcx.type_is_copy_modulo_regions(typing_env, self.body.local_decls[row].ty) { continue; } diff --git a/tests/ui/redundant_clone.fixed b/tests/ui/redundant_clone.fixed deleted file mode 100644 index c1c389f7c4ed..000000000000 --- a/tests/ui/redundant_clone.fixed +++ /dev/null @@ -1,293 +0,0 @@ -// rustfix-only-machine-applicable -#![warn(clippy::redundant_clone)] -#![allow( - clippy::drop_non_drop, - clippy::implicit_clone, - clippy::pathbuf_init_then_push, - clippy::uninlined_format_args, - clippy::unnecessary_literal_unwrap -)] - -use std::ffi::OsString; -use std::path::Path; - -fn main() { - let _s = ["lorem", "ipsum"].join(" "); - //~^ redundant_clone - - let s = String::from("foo"); - let _s = s; - //~^ redundant_clone - - let s = String::from("foo"); - let _s = s; - //~^ redundant_clone - - let s = String::from("foo"); - let _s = s; - //~^ redundant_clone - - let _s = Path::new("/a/b/").join("c"); - //~^ redundant_clone - - let _s = Path::new("/a/b/").join("c"); - //~^ redundant_clone - - let _s = OsString::new(); - //~^ redundant_clone - - let _s = OsString::new(); - //~^ redundant_clone - - // Check that lint level works - #[allow(clippy::redundant_clone)] - let _s = String::new().to_string(); - - // Check that lint level works - #[expect(clippy::redundant_clone)] - let _s = String::new().to_string(); - - let tup = (String::from("foo"),); - let _t = tup.0; - //~^ redundant_clone - - let tup_ref = &(String::from("foo"),); - let _s = tup_ref.0.clone(); // this `.clone()` cannot be removed - - { - let x = String::new(); - let y = &x; - - let _x = x.clone(); // ok; `x` is borrowed by `y` - - let _ = y.len(); - } - - let x = (String::new(),); - let _ = Some(String::new()).unwrap_or_else(|| x.0.clone()); // ok; closure borrows `x` - - with_branch(Alpha, true); - cannot_double_move(Alpha); - cannot_move_from_type_with_drop(); - borrower_propagation(); - not_consumed(); - issue_5405(); - manually_drop(); - clone_then_move_cloned(); - hashmap_neg(); - false_negative_5707(); -} - -#[derive(Clone)] -struct Alpha; -fn with_branch(a: Alpha, b: bool) -> (Alpha, Alpha) { - if b { (a.clone(), a) } else { (Alpha, a) } - //~^ redundant_clone -} - -fn cannot_double_move(a: Alpha) -> (Alpha, Alpha) { - (a.clone(), a) -} - -struct TypeWithDrop { - x: String, -} - -impl Drop for TypeWithDrop { - fn drop(&mut self) {} -} - -fn cannot_move_from_type_with_drop() -> String { - let s = TypeWithDrop { x: String::new() }; - s.x.clone() // removing this `clone()` summons E0509 -} - -fn borrower_propagation() { - let s = String::new(); - let t = String::new(); - - { - fn b() -> bool { - unimplemented!() - } - let _u = if b() { &s } else { &t }; - - // ok; `s` and `t` are possibly borrowed - let _s = s.clone(); - let _t = t.clone(); - } - - { - let _u = || s.len(); - let _v = [&t; 32]; - let _s = s.clone(); // ok - let _t = t.clone(); // ok - } - - { - let _u = { - let u = Some(&s); - let _ = s.clone(); // ok - u - }; - let _s = s.clone(); // ok - } - - { - use std::convert::identity as id; - let _u = id(id(&s)); - let _s = s.clone(); // ok, `u` borrows `s` - } - - let _s = s; - //~^ redundant_clone - let _t = t; - //~^ redundant_clone - - #[derive(Clone)] - struct Foo { - x: usize, - } - - { - let f = Foo { x: 123 }; - let _x = Some(f.x); - let _f = f; - //~^ redundant_clone - } - - { - let f = Foo { x: 123 }; - let _x = &f.x; - let _f = f.clone(); // ok - } -} - -fn not_consumed() { - let x = std::path::PathBuf::from("home"); - let y = x.join("matthias"); - //~^ redundant_clone - // join() creates a new owned PathBuf, does not take a &mut to x variable, thus the .clone() is - // redundant. (It also does not consume the PathBuf) - - println!("x: {:?}, y: {:?}", x, y); - - let mut s = String::new(); - s.clone().push_str("foo"); // OK, removing this `clone()` will change the behavior. - s.push_str("bar"); - assert_eq!(s, "bar"); - - let t = Some(s); - // OK - if let Some(x) = t.clone() { - println!("{}", x); - } - if let Some(x) = t { - println!("{}", x); - } -} - -#[allow(clippy::clone_on_copy)] -fn issue_5405() { - let a: [String; 1] = [String::from("foo")]; - let _b: String = a[0].clone(); - - let c: [usize; 2] = [2, 3]; - let _d: usize = c[1].clone(); -} - -fn manually_drop() { - use std::mem::ManuallyDrop; - use std::sync::Arc; - - let a = ManuallyDrop::new(Arc::new("Hello!".to_owned())); - let _ = a.clone(); // OK - - let p: *const String = Arc::into_raw(ManuallyDrop::into_inner(a)); - unsafe { - Arc::from_raw(p); - Arc::from_raw(p); - } -} - -fn clone_then_move_cloned() { - // issue #5973 - let x = Some(String::new()); - // ok, x is moved while the clone is in use. - assert_eq!(x.clone(), None, "not equal {}", x.unwrap()); - - // issue #5595 - fn foo(_: &Alpha, _: F) {} - let x = Alpha; - // ok, data is moved while the clone is in use. - foo(&x, move || { - //~^ redundant_clone - let _ = x; - }); - - // issue #6998 - struct S(String); - impl S { - fn m(&mut self) {} - } - let mut x = S(String::new()); - x.0.clone().chars().for_each(|_| x.m()); -} - -fn hashmap_neg() { - // issue 5707 - use std::collections::HashMap; - use std::path::PathBuf; - - let p = PathBuf::from("/"); - - let mut h: HashMap<&str, &str> = HashMap::new(); - h.insert("orig-p", p.to_str().unwrap()); - - let mut q = p.clone(); - q.push("foo"); - - println!("{:?} {}", h, q.display()); -} - -fn false_negative_5707() { - fn foo(_x: &Alpha, _y: &mut Alpha) {} - - let x = Alpha; - let mut y = Alpha; - foo(&x, &mut y); - let _z = x.clone(); // pr 7346 can't lint on `x` - drop(y); -} - -mod issue10074 { - #[derive(Debug, Clone)] - enum MyEnum { - A = 1, - } - - fn false_positive_on_as() { - let e = MyEnum::A; - let v = e.clone() as u16; - - println!("{e:?}"); - println!("{v}"); - } -} - -mod issue13900 { - use std::fmt::Display; - - fn do_something(f: impl Display + Clone) -> String { - let g = f.clone(); - format!("{} + {}", f, g) - } - - fn regression() { - let mut a = String::new(); - let mut b = String::new(); - for _ in 1..10 { - b = a.clone(); - } - } -} diff --git a/tests/ui/redundant_clone.rs b/tests/ui/redundant_clone.rs index 78d98762efc8..802e7958b49c 100644 --- a/tests/ui/redundant_clone.rs +++ b/tests/ui/redundant_clone.rs @@ -1,293 +1,715 @@ -// rustfix-only-machine-applicable -#![warn(clippy::redundant_clone)] -#![allow( - clippy::drop_non_drop, - clippy::implicit_clone, - clippy::pathbuf_init_then_push, - clippy::uninlined_format_args, - clippy::unnecessary_literal_unwrap -)] +#![deny(clippy::redundant_clone)] -use std::ffi::OsString; -use std::path::Path; +use core::borrow::Borrow; +use core::hint::black_box; +use core::mem::ManuallyDrop; +use core::ops::Range; +use std::path::PathBuf; +use std::rc::Rc; +use std::sync::Arc; fn main() { - let _s = ["lorem", "ipsum"].join(" ").to_string(); - //~^ redundant_clone - - let s = String::from("foo"); - let _s = s.clone(); - //~^ redundant_clone - - let s = String::from("foo"); - let _s = s.to_string(); - //~^ redundant_clone - - let s = String::from("foo"); - let _s = s.to_owned(); - //~^ redundant_clone - - let _s = Path::new("/a/b/").join("c").to_owned(); - //~^ redundant_clone - - let _s = Path::new("/a/b/").join("c").to_path_buf(); - //~^ redundant_clone - - let _s = OsString::new().to_owned(); - //~^ redundant_clone - - let _s = OsString::new().to_os_string(); - //~^ redundant_clone - - // Check that lint level works - #[allow(clippy::redundant_clone)] - let _s = String::new().to_string(); - - // Check that lint level works - #[expect(clippy::redundant_clone)] - let _s = String::new().to_string(); - - let tup = (String::from("foo"),); - let _t = tup.0.clone(); - //~^ redundant_clone - - let tup_ref = &(String::from("foo"),); - let _s = tup_ref.0.clone(); // this `.clone()` cannot be removed - { - let x = String::new(); - let y = &x; - - let _x = x.clone(); // ok; `x` is borrowed by `y` - - let _ = y.len(); + let x = black_box(String::new()); + let _ = x.clone(); //~ redundant_clone + black_box(&x); + black_box(x); + } + { + let x = black_box(String::new()).clone(); //~ redundant_clone + black_box(&x); + black_box(x); + } + { + let x = black_box(String::new()); + drop(x.clone()); //~ redundant_clone + black_box(&x); + black_box(x); } - - let x = (String::new(),); - let _ = Some(String::new()).unwrap_or_else(|| x.0.clone()); // ok; closure borrows `x` - - with_branch(Alpha, true); - cannot_double_move(Alpha); - cannot_move_from_type_with_drop(); - borrower_propagation(); - not_consumed(); - issue_5405(); - manually_drop(); - clone_then_move_cloned(); - hashmap_neg(); - false_negative_5707(); -} - -#[derive(Clone)] -struct Alpha; -fn with_branch(a: Alpha, b: bool) -> (Alpha, Alpha) { - if b { (a.clone(), a.clone()) } else { (Alpha, a) } - //~^ redundant_clone -} - -fn cannot_double_move(a: Alpha) -> (Alpha, Alpha) { - (a.clone(), a) -} - -struct TypeWithDrop { - x: String, -} - -impl Drop for TypeWithDrop { - fn drop(&mut self) {} -} - -fn cannot_move_from_type_with_drop() -> String { - let s = TypeWithDrop { x: String::new() }; - s.x.clone() // removing this `clone()` summons E0509 -} - -fn borrower_propagation() { - let s = String::new(); - let t = String::new(); - { - fn b() -> bool { - unimplemented!() + let x = black_box(String::new()); + let y = x.clone(); //~ redundant_clone + drop(x); + black_box(&y); + black_box(y); + } + { + let x = black_box(String::new()); + let _y = x.clone(); //~ redundant_clone + black_box(&x); + black_box(x); + } + { + let x = black_box(String::new()); + let y = x.clone(); //~ redundant_clone + black_box(&y); + black_box(y); + } + { + let x = black_box(String::new()); + black_box(x.clone()); //~ redundant_clone + } + { + let x = black_box(String::new()); + let y = x.clone(); //~ redundant_clone + black_box(x); + } + { + let x = black_box(String::new()); + black_box(&mut x.clone()); //~ redundant_clone + } + { + let x = black_box(String::new()); + let mut y = x.clone(); //~ redundant_clone + black_box(&mut y); + } + { + let mut x = black_box(String::new()); + let _y = x.clone(); //~ redundant_clone + black_box(&mut x); + } + { + let x = black_box(String::new()); + black_box(x.clone()); + black_box(&x); + } + { + let x = black_box(String::new()); + black_box(x.clone()); + black_box(x); + } + { + let x = black_box(String::new()); + let y = x.clone(); + black_box(x); + black_box(&y); + } + { + let x = black_box(String::new()); + let y = x.clone(); + black_box(x); + black_box(y); + } + { + let x = black_box(String::new()); + black_box(&mut x.clone()); + black_box(&x); + } + { + let x = black_box(String::new()); + black_box(&mut x.clone()); + black_box(x); + } + { + let mut x = black_box(String::new()); + let y = x.clone(); + black_box(&mut x); + black_box(&y); + } + { + let mut x = black_box(String::new()); + let y = x.clone(); + black_box(&mut x); + black_box(y); + } + { + let x = black_box(String::new()); + let y = if black_box(true) { + x.clone() //~ redundant_clone + } else { + black_box(String::new()) + }; + black_box(y); + } + { + let x = black_box(String::new()); + let y = if black_box(true) { + x.clone() + } else { + black_box(String::new()) + }; + black_box((y, &x)); + } + { + let x = black_box(String::new()); + black_box({ + let x = &x; + x.clone() //~ redundant_clone + }); + } + { + let x = black_box(String::new()); + black_box({ + let x = &x; + x.clone() + }); + black_box(x); + } + { + for _ in 0..10 { + let x = black_box(String::new()); + black_box(x.clone()); //~ redundant_clone } - let _u = if b() { &s } else { &t }; - - // ok; `s` and `t` are possibly borrowed - let _s = s.clone(); - let _t = t.clone(); } - { - let _u = || s.len(); - let _v = [&t; 32]; - let _s = s.clone(); // ok - let _t = t.clone(); // ok + for _ in 0..10 { + let mut x = black_box(String::new()); + black_box(x.clone()); + black_box(&mut x); + } + } + { + let mut x = black_box(String::new()); + for _ in 0..10 { + let _y = x.clone(); //~ redundant_clone + black_box(&mut x); + } + } + { + let mut x = black_box(String::new()); + for _ in 0..10 { + black_box(x.clone()); + } + } + { + let mut x = black_box(String::new()); + for _ in 0..10 { + let y = x.clone(); + black_box((&y, &mut x)); + } + } + { + let x = black_box(String::new()); + let y = x.clone(); + let x = &x; + black_box(y); + black_box(x); + } + { + let x = black_box(String::new()); + let y = x.clone(); + let z = x.clone(); //~ redundant_clone + black_box((y, &x)); + } + { + let x = black_box(String::new()); + let y = x.clone(); //~ redundant_clone + let z = x.clone(); + black_box((z, &x)); + } + { + let x = black_box(String::new()); + let y = x.clone(); //~ redundant_clone + let z = y.clone(); + black_box((z, &y)); + } + { + let x = black_box((String::new(), 0)); + black_box(x.0.clone()); //~ redundant_clone + } + { + let x = black_box((String::new(), 0)); + let _y = x.0.clone(); //~ redundant_clone + black_box(&x.0); + } + { + let x = black_box((String::new(), 0)); + black_box((x.0.clone(), 0)); //~ redundant_clone + } + { + let x = black_box((String::new(), 0)); + black_box((x.0.clone(), 0)); //~ redundant_clone + black_box(&x.1); + } + { + let x = black_box((String::new(), 0)); + black_box((x.0.clone(), 0)); + black_box(&x.0); + } + { + let x = black_box((String::new(), 0)); + black_box((x.0.clone(), 0)); + black_box(&x); + } + { + let x = black_box((String::new(), 0)); + black_box(x.clone()); //~ redundant_clone + } + { + let x = black_box((String::new(), 0)); + let _y = x.clone(); //~ redundant_clone + black_box(&x.1); + } + { + let x = black_box((String::new(), 0)); + black_box((x.clone(), &x.1)); + } + { + let x = black_box((String::new(), 0)); + black_box((x.clone(), &x.0)); + } + { + let x = black_box((String::new(), 0)); + let y = x.clone(); + let x = &x.1; + black_box((y, x)); + } + { + #[derive(Clone)] + struct X { + x: String, + y: (String, String), + z: u32, + } + let x = black_box(X { + x: String::new(), + y: (String::new(), String::new()), + z: 0, + }); + black_box(( + x.clone(), //~ redundant_clone + x.x.clone(), //~ redundant_clone + x.y.0.clone(), + x.y.1.clone(), + x.y.clone(), //~ redundant_clone + )); + + let x = black_box(X { + x: String::new(), + y: (String::new(), String::new()), + z: 0, + }); + black_box(( + x.clone(), + x.x.clone(), //~ redundant_clone + x.y.0.clone(), + x.y.1.clone(), + x.y.clone(), + )); + black_box(&x.y); + } + { + fn f1(x: T) -> T { + x.clone() //~ redundant_clone + } + fn f2(x: T) -> T { + drop(x.clone()); //~ redundant_clone + x + } + fn f3(x: T) -> T { + black_box(x.clone()); + x + } + fn f4(x: String, y: String) -> String { + let z = if black_box(true) { + x.clone() + } else { + y.clone() //~ redundant_clone + }; + black_box(z); + x + } + } + { + let mut x = black_box(String::new()); + let mut y = x.clone(); //~ redundant_clone + black_box(&mut y); + x = black_box(String::new()); + black_box(x); + } + { + let mut x = black_box(String::new()); + let mut y = x.clone(); //~ redundant_clone + black_box(&mut x); + y = black_box(String::new()); + black_box(y); } - { - let _u = { - let u = Some(&s); - let _ = s.clone(); // ok - u + let mut x = black_box(String::new()); + for _ in 0..10 { + let y = black_box(String::new()); + x = y.clone(); //~ redundant_clone + black_box(&y); + } + } + { + let mut x = black_box(String::new()); + for _ in 0..10 { + let y = black_box(String::new()); + x = y.clone(); //~ redundant_clone + black_box(&y); + } + black_box(&x); + } + { + let mut x = black_box(String::new()); + for _ in 0..10 { + let y = x.clone(); //~ redundant_clone + black_box(y); + x = black_box(String::new()); + } + } + { + let x = black_box(String::new()); + let y = if black_box(true) { + x.clone() //~ redundant_clone + } else { + black_box(0); + x.clone() //~ redundant_clone }; - let _s = s.clone(); // ok + black_box(y); + } + { + let mut x = black_box(String::new()); + let mut y = black_box(String::new()); + for _ in 0..10 { + y = x.clone(); //~ redundant_clone + x = black_box(String::new()); + } + black_box(&x); } - { - use std::convert::identity as id; - let _u = id(id(&s)); - let _s = s.clone(); // ok, `u` borrows `s` + let mut x = black_box(String::new()); + let mut y = x.clone(); //~ redundant_clone + for _ in 0..10 { + black_box(y); + x = black_box(String::new()); + y = x.clone(); //~ redundant_clone + } } + { + let x = black_box(String::new()); + let y = black_box(String::new()); + let z = if black_box(true) { &x } else { &y }; - let _s = s.clone(); - //~^ redundant_clone - let _t = t.clone(); - //~^ redundant_clone + black_box(x.clone()); + black_box(y.clone()); + black_box(z.clone()); - #[derive(Clone)] - struct Foo { - x: usize, + black_box(&z); } - { - let f = Foo { x: 123 }; - let _x = Some(f.x); - let _f = f.clone(); - //~^ redundant_clone + let mut x = black_box(String::new()); + let mut y = x.clone(); //~ redundant_clone + let x2 = x; + let y2 = y; + x = black_box(String::new()); + y = black_box(String::new()); + black_box((x2, x, y)); } - { - let f = Foo { x: 123 }; - let _x = &f.x; - let _f = f.clone(); // ok + let mut x = black_box(String::new()); + let mut y = x.clone(); + let x2 = x; + let y2 = y; + x = black_box(String::new()); + y = black_box(String::new()); + black_box((x2, x, y, &y2)); } -} - -fn not_consumed() { - let x = std::path::PathBuf::from("home"); - let y = x.clone().join("matthias"); - //~^ redundant_clone - // join() creates a new owned PathBuf, does not take a &mut to x variable, thus the .clone() is - // redundant. (It also does not consume the PathBuf) - - println!("x: {:?}, y: {:?}", x, y); - - let mut s = String::new(); - s.clone().push_str("foo"); // OK, removing this `clone()` will change the behavior. - s.push_str("bar"); - assert_eq!(s, "bar"); - - let t = Some(s); - // OK - if let Some(x) = t.clone() { - println!("{}", x); + { + let x = black_box(Rc::new(String::new())); + black_box(x.clone()); //~ redundant_clone } - if let Some(x) = t { - println!("{}", x); + { + let x = black_box(Arc::new(String::new())); + black_box(x.clone()); //~ redundant_clone } -} - -#[allow(clippy::clone_on_copy)] -fn issue_5405() { - let a: [String; 1] = [String::from("foo")]; - let _b: String = a[0].clone(); - - let c: [usize; 2] = [2, 3]; - let _d: usize = c[1].clone(); -} - -fn manually_drop() { - use std::mem::ManuallyDrop; - use std::sync::Arc; - - let a = ManuallyDrop::new(Arc::new("Hello!".to_owned())); - let _ = a.clone(); // OK - - let p: *const String = Arc::into_raw(ManuallyDrop::into_inner(a)); - unsafe { - Arc::from_raw(p); - Arc::from_raw(p); + { + // Leak an `Rc` via `ManuallyDrop` + let x = black_box(ManuallyDrop::new(Rc::new(String::new()))); + let _ = x.clone(); + let raw = Rc::into_raw(ManuallyDrop::into_inner(x)); + unsafe { + let _ = Rc::from_raw(raw); + let _ = Rc::from_raw(raw); + } } -} - -fn clone_then_move_cloned() { - // issue #5973 - let x = Some(String::new()); - // ok, x is moved while the clone is in use. - assert_eq!(x.clone(), None, "not equal {}", x.unwrap()); - - // issue #5595 - fn foo(_: &Alpha, _: F) {} - let x = Alpha; - // ok, data is moved while the clone is in use. - foo(&x.clone(), move || { - //~^ redundant_clone - let _ = x; - }); - - // issue #6998 - struct S(String); - impl S { - fn m(&mut self) {} + { + // Leak an `Arc` via `ManuallyDrop` + let x = black_box(ManuallyDrop::new(Arc::new(String::new()))); + let _ = x.clone(); + let raw = Arc::into_raw(ManuallyDrop::into_inner(x)); + unsafe { + let _ = Arc::from_raw(raw); + let _ = Arc::from_raw(raw); + } } - let mut x = S(String::new()); - x.0.clone().chars().for_each(|_| x.m()); -} - -fn hashmap_neg() { - // issue 5707 - use std::collections::HashMap; - use std::path::PathBuf; - - let p = PathBuf::from("/"); - - let mut h: HashMap<&str, &str> = HashMap::new(); - h.insert("orig-p", p.to_str().unwrap()); - - let mut q = p.clone(); - q.push("foo"); - - println!("{:?} {}", h, q.display()); -} - -fn false_negative_5707() { - fn foo(_x: &Alpha, _y: &mut Alpha) {} - - let x = Alpha; - let mut y = Alpha; - foo(&x, &mut y); - let _z = x.clone(); // pr 7346 can't lint on `x` - drop(y); -} - -mod issue10074 { - #[derive(Debug, Clone)] - enum MyEnum { - A = 1, + { + // Don't lint trivial clones + let x = black_box(Range { start: 0, end: 0 }); + black_box(x.clone()); } - - fn false_positive_on_as() { - let e = MyEnum::A; - let v = e.clone() as u16; - - println!("{e:?}"); - println!("{v}"); + { + let x = 5; + #[allow(clippy::clone_on_copy)] + black_box(x.clone()); } -} + { + let x = black_box(String::new()); + #[allow(clippy::redundant_clone)] + black_box(x.clone()); + #[expect(clippy::redundant_clone)] + black_box(x.clone()); + } + { + let mut x = black_box(<(String, String, String, String, String)>::default()); + for _ in black_box(0..10) { + x.4 = x.3; + x.3 = x.2; + x.2 = x.1; + x.1 = x.0.clone(); //~ redundant_clone + } + black_box(x.0); + } + { + let mut x = black_box(<(String, String, String, String, String)>::default()); + for _ in black_box(0..10) { + x.4 = x.3; + x.3 = x.2; + x.2 = x.1; + x.1 = x.0.clone(); + } + black_box((x.4, x.3)); + } + { + let x = black_box(String::new()); + let mut y = black_box(String::new()); + let mut z = black_box(String::new()); + let (y, z) = if black_box(true) { + y = x.clone(); //~ redundant_clone + (&y, &z) + } else { + z = x.clone(); //~ redundant_clone + (&y, &z) + }; + black_box((y, z)); + } + { + let x = black_box(String::new()); + let mut y = black_box(String::new()); + let mut z = black_box(String::new()); + let (x, y) = if black_box(true) { + y = x.clone(); + (&x, &y) + } else { + z = x.clone(); //~ redundant_clone + (&x, &y) + }; + black_box((x, y)); + } + { + let x = black_box(String::new()); + let mut y = black_box(String::new()); + let mut z = black_box(String::new()); + let (x, y, z) = if black_box(true) { + y = x.clone(); + (&x, &y, &z) + } else { + z = x.clone(); + (&x, &y, &z) + }; + black_box((x, y, z)); + } + { + let mut x = black_box((String::new(), 0)); + let y = x.clone(); //~ redundant_clone + x.1 = 5; + black_box(&y); + } + { + let x = black_box((String::new(), 0)); + let mut y = x.clone(); //~ redundant_clone + y.1 = 5; + black_box(&x); + } + { + let mut x = black_box((String::new(), String::new(), String::new())); + black_box(&mut x); + let y = if black_box(true) { + x.0.clone() + } else { + black_box(String::new()) + }; + black_box((&x.0, &y)); + } + { + let mut x = black_box((String::new(), String::new(), String::new())); + black_box(&mut x); + let y = if black_box(true) { + x.1.clone() + } else { + black_box(String::new()) + }; + black_box((&x.1, &y)); + } + { + let mut x = black_box((String::new(), String::new(), String::new())); + black_box(&mut x); + let y = if black_box(true) { + x.2.clone() + } else { + black_box(String::new()) + }; + black_box((&x.2, &y)); + } + { + let x = black_box(String::new()); + let y = black_box(String::new()); + let z = (if black_box(true) { &x } else { &y }).clone(); + black_box((x, z)); + } + { + let x = black_box(String::new()); + let y = black_box(String::new()); + let z = (if black_box(true) { &x } else { &y }).clone(); + black_box((y, z)); + } + { + let x = black_box(String::new()); + let y = black_box(String::new()); + let z = { + let x = (&x, &y); + ( + x.0.clone(), + x.1.clone(), //~ redundant_clone + ) + }; + black_box((z.0, &z.1, &x, &y)); + } + { + struct X<'a> { + x: &'a String, + y: &'a String, + } + struct Y { + x: String, + y: String, + } + let x = black_box(String::new()); + let y = black_box(String::new()); + let z = { + let x = &x; + let y = &y; + let z = X { x, y }; + let x = (z.x, z.y, x, y).0.clone(); + (x, z.y.clone()) //~ redundant_clone + }; + let a = Y { x, y: z.1 }; + black_box((a.x, &z.0)); + black_box(y); + } + { + let x = black_box((String::new(), String::new())); + let y = (x.0.clone(), black_box(String::new())); + black_box((&x, &y)); + } + { + let mut x = black_box(String::new()); + let y = black_box(&raw mut x); + let z = x.clone(); + unsafe { + *y = black_box(String::new()); + } + black_box((&x, &z)); + } + { + let x = black_box(String::new()); + let y = black_box(&raw const x); + let mut z = x.clone(); + black_box(&mut z); + unsafe { + black_box((&*y, &z)); + } + } + { + let x = black_box((String::new(), String::new())); + let y = black_box(&raw const x.0); + let mut z = x.clone(); + black_box(&mut z); + unsafe { + black_box((&*y, &z)); + } + } + { + let x = black_box((String::new(), String::new())); + let y = black_box(&raw const x.0); + let mut z = x.1.clone(); //~ redundant_clone + black_box(&mut z); + unsafe { + black_box((&*y, &z)); + } + } + { + let x = black_box(String::new()); + let y = black_box(String::new()); + let z = { + let mut x = &x; + unsafe { *black_box(&raw mut x) = &y } + x.clone() + }; + black_box((y, z)); + } + { + #[derive(Default, Clone)] + struct X(String); + struct Y(X); + impl Drop for Y { + fn drop(&mut self) {} + } -mod issue13900 { - use std::fmt::Display; + let x = black_box(Y(X(String::new()))); + black_box(x.0.clone()); //~ redundant_clone + } + { + #[derive(Default, Clone)] + struct X(String); + struct Y(X); + impl Drop for Y { + fn drop(&mut self) {} + } - fn do_something(f: impl Display + Clone) -> String { - let g = f.clone(); - format!("{} + {}", f, g) + let x = black_box(Y(X(String::new()))); + black_box(x.0.clone()); + black_box(&x.0); } + { + #[derive(Clone)] + struct X(String); + struct Y(X); + impl Drop for Y { + fn drop(&mut self) {} + } - fn regression() { - let mut a = String::new(); - let mut b = String::new(); - for _ in 1..10 { - b = a.clone(); + let x = black_box(Y(X(String::new()))); + black_box(x.0.clone()); + } + { + let x = black_box(String::new()); + let _ = black_box(x.to_string()); //~ redundant_clone + } + { + let x = black_box(String::new()); + let _ = black_box(x.to_owned()); //~ redundant_clone + } + { + let x = black_box(PathBuf::new()); + let _ = black_box(x.to_owned()); //~ redundant_clone + } + { + struct X(String); + impl ToOwned for X { + type Owned = i32; + fn to_owned(&self) -> Self::Owned { + 1 + } } + impl Borrow for i32 { + fn borrow(&self) -> &X { + panic!(); + } + } + + let x = black_box(X(String::new())); + let _ = black_box(x.to_owned()); } } diff --git a/tests/ui/redundant_clone.stderr b/tests/ui/redundant_clone.stderr index 5be081f0f2f8..af7ba4570276 100644 --- a/tests/ui/redundant_clone.stderr +++ b/tests/ui/redundant_clone.stderr @@ -1,184 +1,350 @@ error: redundant clone - --> tests/ui/redundant_clone.rs:15:42 + --> tests/ui/redundant_clone.rs:14:17 | -LL | let _s = ["lorem", "ipsum"].join(" ").to_string(); - | ^^^^^^^^^^^^ help: remove this +LL | let _ = x.clone(); + | ^^^^^^^^^ | -note: this value is dropped without further use - --> tests/ui/redundant_clone.rs:15:14 +note: the lint level is defined here + --> tests/ui/redundant_clone.rs:1:9 | -LL | let _s = ["lorem", "ipsum"].join(" ").to_string(); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - = note: `-D clippy::redundant-clone` implied by `-D warnings` - = help: to override `-D warnings` add `#[allow(clippy::redundant_clone)]` +LL | #![deny(clippy::redundant_clone)] + | ^^^^^^^^^^^^^^^^^^^^^^^ error: redundant clone - --> tests/ui/redundant_clone.rs:19:15 + --> tests/ui/redundant_clone.rs:19:17 | -LL | let _s = s.clone(); - | ^^^^^^^^ help: remove this +LL | let x = black_box(String::new()).clone(); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +error: redundant clone + --> tests/ui/redundant_clone.rs:25:14 | -note: this value is dropped without further use - --> tests/ui/redundant_clone.rs:19:14 +LL | drop(x.clone()); + | ^^^^^^^^^ + +error: redundant clone + --> tests/ui/redundant_clone.rs:31:17 | -LL | let _s = s.clone(); - | ^ +LL | let y = x.clone(); + | ^^^^^^^^^ error: redundant clone - --> tests/ui/redundant_clone.rs:23:15 + --> tests/ui/redundant_clone.rs:38:18 | -LL | let _s = s.to_string(); - | ^^^^^^^^^^^^ help: remove this +LL | let _y = x.clone(); + | ^^^^^^^^^ + +error: redundant clone + --> tests/ui/redundant_clone.rs:44:17 | -note: this value is dropped without further use - --> tests/ui/redundant_clone.rs:23:14 +LL | let y = x.clone(); + | ^^^^^^^^^ + +error: redundant clone + --> tests/ui/redundant_clone.rs:50:19 | -LL | let _s = s.to_string(); - | ^ +LL | black_box(x.clone()); + | ^^^^^^^^^ error: redundant clone - --> tests/ui/redundant_clone.rs:27:15 + --> tests/ui/redundant_clone.rs:54:17 | -LL | let _s = s.to_owned(); - | ^^^^^^^^^^^ help: remove this +LL | let y = x.clone(); + | ^^^^^^^^^ + +error: redundant clone + --> tests/ui/redundant_clone.rs:59:24 | -note: this value is dropped without further use - --> tests/ui/redundant_clone.rs:27:14 +LL | black_box(&mut x.clone()); + | ^^^^^^^^^ + +error: redundant clone + --> tests/ui/redundant_clone.rs:63:21 | -LL | let _s = s.to_owned(); - | ^ +LL | let mut y = x.clone(); + | ^^^^^^^^^ error: redundant clone - --> tests/ui/redundant_clone.rs:30:42 + --> tests/ui/redundant_clone.rs:68:18 | -LL | let _s = Path::new("/a/b/").join("c").to_owned(); - | ^^^^^^^^^^^ help: remove this +LL | let _y = x.clone(); + | ^^^^^^^^^ + +error: redundant clone + --> tests/ui/redundant_clone.rs:118:13 | -note: this value is dropped without further use - --> tests/ui/redundant_clone.rs:30:14 +LL | x.clone() + | ^^^^^^^^^ + +error: redundant clone + --> tests/ui/redundant_clone.rs:137:13 | -LL | let _s = Path::new("/a/b/").join("c").to_owned(); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +LL | x.clone() + | ^^^^^^^^^ error: redundant clone - --> tests/ui/redundant_clone.rs:33:42 + --> tests/ui/redundant_clone.rs:151:23 | -LL | let _s = Path::new("/a/b/").join("c").to_path_buf(); - | ^^^^^^^^^^^^^^ help: remove this +LL | black_box(x.clone()); + | ^^^^^^^^^ + +error: redundant clone + --> tests/ui/redundant_clone.rs:164:22 | -note: this value is dropped without further use - --> tests/ui/redundant_clone.rs:33:14 +LL | let _y = x.clone(); + | ^^^^^^^^^ + +error: redundant clone + --> tests/ui/redundant_clone.rs:191:17 | -LL | let _s = Path::new("/a/b/").join("c").to_path_buf(); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +LL | let z = x.clone(); + | ^^^^^^^^^ error: redundant clone - --> tests/ui/redundant_clone.rs:36:29 + --> tests/ui/redundant_clone.rs:196:17 | -LL | let _s = OsString::new().to_owned(); - | ^^^^^^^^^^^ help: remove this +LL | let y = x.clone(); + | ^^^^^^^^^ + +error: redundant clone + --> tests/ui/redundant_clone.rs:202:17 | -note: this value is dropped without further use - --> tests/ui/redundant_clone.rs:36:14 +LL | let y = x.clone(); + | ^^^^^^^^^ + +error: redundant clone + --> tests/ui/redundant_clone.rs:208:19 | -LL | let _s = OsString::new().to_owned(); - | ^^^^^^^^^^^^^^^ +LL | black_box(x.0.clone()); + | ^^^^^^^^^^^ error: redundant clone - --> tests/ui/redundant_clone.rs:39:29 + --> tests/ui/redundant_clone.rs:212:18 | -LL | let _s = OsString::new().to_os_string(); - | ^^^^^^^^^^^^^^^ help: remove this +LL | let _y = x.0.clone(); + | ^^^^^^^^^^^ + +error: redundant clone + --> tests/ui/redundant_clone.rs:217:20 | -note: this value is dropped without further use - --> tests/ui/redundant_clone.rs:39:14 +LL | black_box((x.0.clone(), 0)); + | ^^^^^^^^^^^ + +error: redundant clone + --> tests/ui/redundant_clone.rs:221:20 | -LL | let _s = OsString::new().to_os_string(); - | ^^^^^^^^^^^^^^^ +LL | black_box((x.0.clone(), 0)); + | ^^^^^^^^^^^ error: redundant clone - --> tests/ui/redundant_clone.rs:51:19 + --> tests/ui/redundant_clone.rs:236:19 | -LL | let _t = tup.0.clone(); - | ^^^^^^^^ help: remove this +LL | black_box(x.clone()); + | ^^^^^^^^^ + +error: redundant clone + --> tests/ui/redundant_clone.rs:240:18 | -note: this value is dropped without further use - --> tests/ui/redundant_clone.rs:51:14 +LL | let _y = x.clone(); + | ^^^^^^^^^ + +error: redundant clone + --> tests/ui/redundant_clone.rs:270:13 | -LL | let _t = tup.0.clone(); - | ^^^^^ +LL | x.clone(), + | ^^^^^^^^^ error: redundant clone - --> tests/ui/redundant_clone.rs:84:25 + --> tests/ui/redundant_clone.rs:271:13 | -LL | if b { (a.clone(), a.clone()) } else { (Alpha, a) } - | ^^^^^^^^ help: remove this +LL | x.x.clone(), + | ^^^^^^^^^^^ + +error: redundant clone + --> tests/ui/redundant_clone.rs:274:13 | -note: this value is dropped without further use - --> tests/ui/redundant_clone.rs:84:24 +LL | x.y.clone(), + | ^^^^^^^^^^^ + +error: redundant clone + --> tests/ui/redundant_clone.rs:284:13 | -LL | if b { (a.clone(), a.clone()) } else { (Alpha, a) } - | ^ +LL | x.x.clone(), + | ^^^^^^^^^^^ error: redundant clone - --> tests/ui/redundant_clone.rs:142:15 + --> tests/ui/redundant_clone.rs:315:21 | -LL | let _s = s.clone(); - | ^^^^^^^^ help: remove this +LL | let mut y = x.clone(); + | ^^^^^^^^^ + +error: redundant clone + --> tests/ui/redundant_clone.rs:322:21 | -note: this value is dropped without further use - --> tests/ui/redundant_clone.rs:142:14 +LL | let mut y = x.clone(); + | ^^^^^^^^^ + +error: redundant clone + --> tests/ui/redundant_clone.rs:331:17 | -LL | let _s = s.clone(); - | ^ +LL | x = y.clone(); + | ^^^^^^^^^ error: redundant clone - --> tests/ui/redundant_clone.rs:144:15 + --> tests/ui/redundant_clone.rs:339:17 | -LL | let _t = t.clone(); - | ^^^^^^^^ help: remove this +LL | x = y.clone(); + | ^^^^^^^^^ + +error: redundant clone + --> tests/ui/redundant_clone.rs:347:21 | -note: this value is dropped without further use - --> tests/ui/redundant_clone.rs:144:14 +LL | let y = x.clone(); + | ^^^^^^^^^ + +error: redundant clone + --> tests/ui/redundant_clone.rs:355:13 | -LL | let _t = t.clone(); - | ^ +LL | x.clone() + | ^^^^^^^^^ error: redundant clone - --> tests/ui/redundant_clone.rs:155:19 + --> tests/ui/redundant_clone.rs:358:13 | -LL | let _f = f.clone(); - | ^^^^^^^^ help: remove this +LL | x.clone() + | ^^^^^^^^^ + +error: redundant clone + --> tests/ui/redundant_clone.rs:366:17 | -note: this value is dropped without further use - --> tests/ui/redundant_clone.rs:155:18 +LL | y = x.clone(); + | ^^^^^^^^^ + +error: redundant clone + --> tests/ui/redundant_clone.rs:373:21 | -LL | let _f = f.clone(); - | ^ +LL | let mut y = x.clone(); + | ^^^^^^^^^ error: redundant clone - --> tests/ui/redundant_clone.rs:168:14 + --> tests/ui/redundant_clone.rs:377:17 | -LL | let y = x.clone().join("matthias"); - | ^^^^^^^^ help: remove this +LL | y = x.clone(); + | ^^^^^^^^^ + +error: redundant clone + --> tests/ui/redundant_clone.rs:393:21 | -note: cloned value is neither consumed nor mutated - --> tests/ui/redundant_clone.rs:168:13 +LL | let mut y = x.clone(); + | ^^^^^^^^^ + +error: redundant clone + --> tests/ui/redundant_clone.rs:411:19 | -LL | let y = x.clone().join("matthias"); - | ^^^^^^^^^ +LL | black_box(x.clone()); + | ^^^^^^^^^ + +error: redundant clone + --> tests/ui/redundant_clone.rs:415:19 + | +LL | black_box(x.clone()); + | ^^^^^^^^^ + +error: redundant clone + --> tests/ui/redundant_clone.rs:460:19 + | +LL | x.1 = x.0.clone(); + | ^^^^^^^^^^^ + +error: redundant clone + --> tests/ui/redundant_clone.rs:479:17 + | +LL | y = x.clone(); + | ^^^^^^^^^ + +error: redundant clone + --> tests/ui/redundant_clone.rs:482:17 + | +LL | z = x.clone(); + | ^^^^^^^^^ + +error: redundant clone + --> tests/ui/redundant_clone.rs:495:17 + | +LL | z = x.clone(); + | ^^^^^^^^^ + +error: redundant clone + --> tests/ui/redundant_clone.rs:515:17 + | +LL | let y = x.clone(); + | ^^^^^^^^^ + +error: redundant clone + --> tests/ui/redundant_clone.rs:521:21 + | +LL | let mut y = x.clone(); + | ^^^^^^^^^ error: redundant clone - --> tests/ui/redundant_clone.rs:223:11 + --> tests/ui/redundant_clone.rs:574:17 | -LL | foo(&x.clone(), move || { - | ^^^^^^^^ help: remove this +LL | x.1.clone(), + | ^^^^^^^^^^^ + +error: redundant clone + --> tests/ui/redundant_clone.rs:595:17 + | +LL | (x, z.y.clone()) + | ^^^^^^^^^^^ + +error: redundant clone + --> tests/ui/redundant_clone.rs:636:21 + | +LL | let mut z = x.1.clone(); + | ^^^^^^^^^^^ + +error: redundant clone + --> tests/ui/redundant_clone.rs:661:19 + | +LL | black_box(x.0.clone()); + | ^^^^^^^^^^^ + +error: redundant clone + --> tests/ui/redundant_clone.rs:688:27 + | +LL | let _ = black_box(x.to_string()); + | ^^^^^^^^^^^^^ + +error: redundant clone + --> tests/ui/redundant_clone.rs:692:27 + | +LL | let _ = black_box(x.to_owned()); + | ^^^^^^^^^^^^ + +error: redundant clone + --> tests/ui/redundant_clone.rs:696:27 + | +LL | let _ = black_box(x.to_owned()); + | ^^^^^^^^^^^^ + +error: redundant clone + --> tests/ui/redundant_clone.rs:293:13 | -note: this value is dropped without further use - --> tests/ui/redundant_clone.rs:223:10 +LL | x.clone() + | ^^^^^^^^^ + +error: redundant clone + --> tests/ui/redundant_clone.rs:296:18 + | +LL | drop(x.clone()); + | ^^^^^^^^^ + +error: redundant clone + --> tests/ui/redundant_clone.rs:307:17 | -LL | foo(&x.clone(), move || { - | ^ +LL | y.clone() + | ^^^^^^^^^ -error: aborting due to 15 previous errors +error: aborting due to 57 previous errors diff --git a/tests/ui/unnecessary_to_owned.fixed b/tests/ui/unnecessary_to_owned.fixed index b064a8b8f46f..deb54cb72b3b 100644 --- a/tests/ui/unnecessary_to_owned.fixed +++ b/tests/ui/unnecessary_to_owned.fixed @@ -5,9 +5,10 @@ clippy::needless_lifetimes, clippy::owned_cow, clippy::ptr_arg, + clippy::redundant_clone, clippy::uninlined_format_args )] -#![warn(clippy::unnecessary_to_owned, clippy::redundant_clone)] +#![warn(clippy::unnecessary_to_owned)] use std::borrow::Cow; use std::ffi::{CStr, CString, OsStr, OsString}; @@ -215,16 +216,11 @@ fn main() { require_deref_slice(x.to_owned()); // The following should be flagged by `redundant_clone`, but not by this lint. - require_c_str(&CString::from_vec_with_nul(vec![0]).unwrap()); - //~^ redundant_clone - require_os_str(&OsString::from("x")); - //~^ redundant_clone - require_path(&std::path::PathBuf::from("x")); - //~^ redundant_clone - require_str(&String::from("x")); - //~^ redundant_clone - require_slice(&[String::from("x")]); - //~^ redundant_clone + require_c_str(&CString::from_vec_with_nul(vec![0]).unwrap().to_owned()); + require_os_str(&OsString::from("x").to_os_string()); + require_path(&std::path::PathBuf::from("x").to_path_buf()); + require_str(&String::from("x").to_string()); + require_slice(&[String::from("x")].to_owned()); let slice = [0u8; 1024]; let _ref_str: &str = core::str::from_utf8(&slice).expect("not UTF-8"); diff --git a/tests/ui/unnecessary_to_owned.rs b/tests/ui/unnecessary_to_owned.rs index 7954a4ad4ce7..d1388b5a218c 100644 --- a/tests/ui/unnecessary_to_owned.rs +++ b/tests/ui/unnecessary_to_owned.rs @@ -5,9 +5,10 @@ clippy::needless_lifetimes, clippy::owned_cow, clippy::ptr_arg, + clippy::redundant_clone, clippy::uninlined_format_args )] -#![warn(clippy::unnecessary_to_owned, clippy::redundant_clone)] +#![warn(clippy::unnecessary_to_owned)] use std::borrow::Cow; use std::ffi::{CStr, CString, OsStr, OsString}; @@ -216,15 +217,10 @@ fn main() { // The following should be flagged by `redundant_clone`, but not by this lint. require_c_str(&CString::from_vec_with_nul(vec![0]).unwrap().to_owned()); - //~^ redundant_clone require_os_str(&OsString::from("x").to_os_string()); - //~^ redundant_clone require_path(&std::path::PathBuf::from("x").to_path_buf()); - //~^ redundant_clone require_str(&String::from("x").to_string()); - //~^ redundant_clone require_slice(&[String::from("x")].to_owned()); - //~^ redundant_clone let slice = [0u8; 1024]; let _ref_str: &str = &String::from_utf8(slice.to_vec()).expect("not UTF-8"); diff --git a/tests/ui/unnecessary_to_owned.stderr b/tests/ui/unnecessary_to_owned.stderr index 6c52be839301..0b7d3b9ebde9 100644 --- a/tests/ui/unnecessary_to_owned.stderr +++ b/tests/ui/unnecessary_to_owned.stderr @@ -1,67 +1,5 @@ -error: redundant clone - --> tests/ui/unnecessary_to_owned.rs:218:64 - | -LL | require_c_str(&CString::from_vec_with_nul(vec![0]).unwrap().to_owned()); - | ^^^^^^^^^^^ help: remove this - | -note: this value is dropped without further use - --> tests/ui/unnecessary_to_owned.rs:218:20 - | -LL | require_c_str(&CString::from_vec_with_nul(vec![0]).unwrap().to_owned()); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - = note: `-D clippy::redundant-clone` implied by `-D warnings` - = help: to override `-D warnings` add `#[allow(clippy::redundant_clone)]` - -error: redundant clone - --> tests/ui/unnecessary_to_owned.rs:220:40 - | -LL | require_os_str(&OsString::from("x").to_os_string()); - | ^^^^^^^^^^^^^^^ help: remove this - | -note: this value is dropped without further use - --> tests/ui/unnecessary_to_owned.rs:220:21 - | -LL | require_os_str(&OsString::from("x").to_os_string()); - | ^^^^^^^^^^^^^^^^^^^ - -error: redundant clone - --> tests/ui/unnecessary_to_owned.rs:222:48 - | -LL | require_path(&std::path::PathBuf::from("x").to_path_buf()); - | ^^^^^^^^^^^^^^ help: remove this - | -note: this value is dropped without further use - --> tests/ui/unnecessary_to_owned.rs:222:19 - | -LL | require_path(&std::path::PathBuf::from("x").to_path_buf()); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -error: redundant clone - --> tests/ui/unnecessary_to_owned.rs:224:35 - | -LL | require_str(&String::from("x").to_string()); - | ^^^^^^^^^^^^ help: remove this - | -note: this value is dropped without further use - --> tests/ui/unnecessary_to_owned.rs:224:18 - | -LL | require_str(&String::from("x").to_string()); - | ^^^^^^^^^^^^^^^^^ - -error: redundant clone - --> tests/ui/unnecessary_to_owned.rs:226:39 - | -LL | require_slice(&[String::from("x")].to_owned()); - | ^^^^^^^^^^^ help: remove this - | -note: this value is dropped without further use - --> tests/ui/unnecessary_to_owned.rs:226:20 - | -LL | require_slice(&[String::from("x")].to_owned()); - | ^^^^^^^^^^^^^^^^^^^ - error: unnecessary use of `into_owned` - --> tests/ui/unnecessary_to_owned.rs:66:36 + --> tests/ui/unnecessary_to_owned.rs:67:36 | LL | require_c_str(&Cow::from(c_str).into_owned()); | ^^^^^^^^^^^^^ help: remove this @@ -70,391 +8,391 @@ LL | require_c_str(&Cow::from(c_str).into_owned()); = help: to override `-D warnings` add `#[allow(clippy::unnecessary_to_owned)]` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:68:19 + --> tests/ui/unnecessary_to_owned.rs:69:19 | LL | require_c_str(&c_str.to_owned()); | ^^^^^^^^^^^^^^^^^ help: use: `c_str` error: unnecessary use of `to_os_string` - --> tests/ui/unnecessary_to_owned.rs:71:20 + --> tests/ui/unnecessary_to_owned.rs:72:20 | LL | require_os_str(&os_str.to_os_string()); | ^^^^^^^^^^^^^^^^^^^^^^ help: use: `os_str` error: unnecessary use of `into_owned` - --> tests/ui/unnecessary_to_owned.rs:73:38 + --> tests/ui/unnecessary_to_owned.rs:74:38 | LL | require_os_str(&Cow::from(os_str).into_owned()); | ^^^^^^^^^^^^^ help: remove this error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:75:20 + --> tests/ui/unnecessary_to_owned.rs:76:20 | LL | require_os_str(&os_str.to_owned()); | ^^^^^^^^^^^^^^^^^^ help: use: `os_str` error: unnecessary use of `to_path_buf` - --> tests/ui/unnecessary_to_owned.rs:78:18 + --> tests/ui/unnecessary_to_owned.rs:79:18 | LL | require_path(&path.to_path_buf()); | ^^^^^^^^^^^^^^^^^^^ help: use: `path` error: unnecessary use of `into_owned` - --> tests/ui/unnecessary_to_owned.rs:80:34 + --> tests/ui/unnecessary_to_owned.rs:81:34 | LL | require_path(&Cow::from(path).into_owned()); | ^^^^^^^^^^^^^ help: remove this error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:82:18 + --> tests/ui/unnecessary_to_owned.rs:83:18 | LL | require_path(&path.to_owned()); | ^^^^^^^^^^^^^^^^ help: use: `path` error: unnecessary use of `to_string` - --> tests/ui/unnecessary_to_owned.rs:85:17 + --> tests/ui/unnecessary_to_owned.rs:86:17 | LL | require_str(&s.to_string()); | ^^^^^^^^^^^^^^ help: use: `s` error: unnecessary use of `into_owned` - --> tests/ui/unnecessary_to_owned.rs:87:30 + --> tests/ui/unnecessary_to_owned.rs:88:30 | LL | require_str(&Cow::from(s).into_owned()); | ^^^^^^^^^^^^^ help: remove this error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:89:17 + --> tests/ui/unnecessary_to_owned.rs:90:17 | LL | require_str(&s.to_owned()); | ^^^^^^^^^^^^^ help: use: `s` error: unnecessary use of `to_string` - --> tests/ui/unnecessary_to_owned.rs:91:17 + --> tests/ui/unnecessary_to_owned.rs:92:17 | LL | require_str(&x_ref.to_string()); | ^^^^^^^^^^^^^^^^^^ help: use: `x_ref.as_ref()` error: unnecessary use of `to_vec` - --> tests/ui/unnecessary_to_owned.rs:94:19 + --> tests/ui/unnecessary_to_owned.rs:95:19 | LL | require_slice(&slice.to_vec()); | ^^^^^^^^^^^^^^^ help: use: `slice` error: unnecessary use of `into_owned` - --> tests/ui/unnecessary_to_owned.rs:96:36 + --> tests/ui/unnecessary_to_owned.rs:97:36 | LL | require_slice(&Cow::from(slice).into_owned()); | ^^^^^^^^^^^^^ help: remove this error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:98:19 + --> tests/ui/unnecessary_to_owned.rs:99:19 | LL | require_slice(&array.to_owned()); | ^^^^^^^^^^^^^^^^^ help: use: `array.as_ref()` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:100:19 + --> tests/ui/unnecessary_to_owned.rs:101:19 | LL | require_slice(&array_ref.to_owned()); | ^^^^^^^^^^^^^^^^^^^^^ help: use: `array_ref.as_ref()` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:102:19 + --> tests/ui/unnecessary_to_owned.rs:103:19 | LL | require_slice(&slice.to_owned()); | ^^^^^^^^^^^^^^^^^ help: use: `slice` error: unnecessary use of `into_owned` - --> tests/ui/unnecessary_to_owned.rs:106:42 + --> tests/ui/unnecessary_to_owned.rs:107:42 | LL | require_x(&Cow::::Owned(x.clone()).into_owned()); | ^^^^^^^^^^^^^ help: remove this error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:110:25 + --> tests/ui/unnecessary_to_owned.rs:111:25 | LL | require_deref_c_str(c_str.to_owned()); | ^^^^^^^^^^^^^^^^ help: use: `c_str` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:112:26 + --> tests/ui/unnecessary_to_owned.rs:113:26 | LL | require_deref_os_str(os_str.to_owned()); | ^^^^^^^^^^^^^^^^^ help: use: `os_str` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:114:24 + --> tests/ui/unnecessary_to_owned.rs:115:24 | LL | require_deref_path(path.to_owned()); | ^^^^^^^^^^^^^^^ help: use: `path` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:116:23 + --> tests/ui/unnecessary_to_owned.rs:117:23 | LL | require_deref_str(s.to_owned()); | ^^^^^^^^^^^^ help: use: `s` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:118:25 + --> tests/ui/unnecessary_to_owned.rs:119:25 | LL | require_deref_slice(slice.to_owned()); | ^^^^^^^^^^^^^^^^ help: use: `slice` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:121:30 + --> tests/ui/unnecessary_to_owned.rs:122:30 | LL | require_impl_deref_c_str(c_str.to_owned()); | ^^^^^^^^^^^^^^^^ help: use: `c_str` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:123:31 + --> tests/ui/unnecessary_to_owned.rs:124:31 | LL | require_impl_deref_os_str(os_str.to_owned()); | ^^^^^^^^^^^^^^^^^ help: use: `os_str` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:125:29 + --> tests/ui/unnecessary_to_owned.rs:126:29 | LL | require_impl_deref_path(path.to_owned()); | ^^^^^^^^^^^^^^^ help: use: `path` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:127:28 + --> tests/ui/unnecessary_to_owned.rs:128:28 | LL | require_impl_deref_str(s.to_owned()); | ^^^^^^^^^^^^ help: use: `s` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:129:30 + --> tests/ui/unnecessary_to_owned.rs:130:30 | LL | require_impl_deref_slice(slice.to_owned()); | ^^^^^^^^^^^^^^^^ help: use: `slice` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:132:29 + --> tests/ui/unnecessary_to_owned.rs:133:29 | LL | require_deref_str_slice(s.to_owned(), slice.to_owned()); | ^^^^^^^^^^^^ help: use: `s` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:132:43 + --> tests/ui/unnecessary_to_owned.rs:133:43 | LL | require_deref_str_slice(s.to_owned(), slice.to_owned()); | ^^^^^^^^^^^^^^^^ help: use: `slice` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:135:29 + --> tests/ui/unnecessary_to_owned.rs:136:29 | LL | require_deref_slice_str(slice.to_owned(), s.to_owned()); | ^^^^^^^^^^^^^^^^ help: use: `slice` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:135:47 + --> tests/ui/unnecessary_to_owned.rs:136:47 | LL | require_deref_slice_str(slice.to_owned(), s.to_owned()); | ^^^^^^^^^^^^ help: use: `s` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:139:26 + --> tests/ui/unnecessary_to_owned.rs:140:26 | LL | require_as_ref_c_str(c_str.to_owned()); | ^^^^^^^^^^^^^^^^ help: use: `c_str` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:141:27 + --> tests/ui/unnecessary_to_owned.rs:142:27 | LL | require_as_ref_os_str(os_str.to_owned()); | ^^^^^^^^^^^^^^^^^ help: use: `os_str` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:143:25 + --> tests/ui/unnecessary_to_owned.rs:144:25 | LL | require_as_ref_path(path.to_owned()); | ^^^^^^^^^^^^^^^ help: use: `path` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:145:24 + --> tests/ui/unnecessary_to_owned.rs:146:24 | LL | require_as_ref_str(s.to_owned()); | ^^^^^^^^^^^^ help: use: `s` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:147:24 + --> tests/ui/unnecessary_to_owned.rs:148:24 | LL | require_as_ref_str(x.to_owned()); | ^^^^^^^^^^^^ help: use: `&x` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:149:26 + --> tests/ui/unnecessary_to_owned.rs:150:26 | LL | require_as_ref_slice(array.to_owned()); | ^^^^^^^^^^^^^^^^ help: use: `array` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:151:26 + --> tests/ui/unnecessary_to_owned.rs:152:26 | LL | require_as_ref_slice(array_ref.to_owned()); | ^^^^^^^^^^^^^^^^^^^^ help: use: `array_ref` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:153:26 + --> tests/ui/unnecessary_to_owned.rs:154:26 | LL | require_as_ref_slice(slice.to_owned()); | ^^^^^^^^^^^^^^^^ help: use: `slice` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:156:31 + --> tests/ui/unnecessary_to_owned.rs:157:31 | LL | require_impl_as_ref_c_str(c_str.to_owned()); | ^^^^^^^^^^^^^^^^ help: use: `c_str` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:158:32 + --> tests/ui/unnecessary_to_owned.rs:159:32 | LL | require_impl_as_ref_os_str(os_str.to_owned()); | ^^^^^^^^^^^^^^^^^ help: use: `os_str` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:160:30 + --> tests/ui/unnecessary_to_owned.rs:161:30 | LL | require_impl_as_ref_path(path.to_owned()); | ^^^^^^^^^^^^^^^ help: use: `path` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:162:29 + --> tests/ui/unnecessary_to_owned.rs:163:29 | LL | require_impl_as_ref_str(s.to_owned()); | ^^^^^^^^^^^^ help: use: `s` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:164:29 + --> tests/ui/unnecessary_to_owned.rs:165:29 | LL | require_impl_as_ref_str(x.to_owned()); | ^^^^^^^^^^^^ help: use: `&x` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:166:31 + --> tests/ui/unnecessary_to_owned.rs:167:31 | LL | require_impl_as_ref_slice(array.to_owned()); | ^^^^^^^^^^^^^^^^ help: use: `array` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:168:31 + --> tests/ui/unnecessary_to_owned.rs:169:31 | LL | require_impl_as_ref_slice(array_ref.to_owned()); | ^^^^^^^^^^^^^^^^^^^^ help: use: `array_ref` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:170:31 + --> tests/ui/unnecessary_to_owned.rs:171:31 | LL | require_impl_as_ref_slice(slice.to_owned()); | ^^^^^^^^^^^^^^^^ help: use: `slice` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:173:30 + --> tests/ui/unnecessary_to_owned.rs:174:30 | LL | require_as_ref_str_slice(s.to_owned(), array.to_owned()); | ^^^^^^^^^^^^ help: use: `s` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:173:44 + --> tests/ui/unnecessary_to_owned.rs:174:44 | LL | require_as_ref_str_slice(s.to_owned(), array.to_owned()); | ^^^^^^^^^^^^^^^^ help: use: `array` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:176:30 + --> tests/ui/unnecessary_to_owned.rs:177:30 | LL | require_as_ref_str_slice(s.to_owned(), array_ref.to_owned()); | ^^^^^^^^^^^^ help: use: `s` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:176:44 + --> tests/ui/unnecessary_to_owned.rs:177:44 | LL | require_as_ref_str_slice(s.to_owned(), array_ref.to_owned()); | ^^^^^^^^^^^^^^^^^^^^ help: use: `array_ref` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:179:30 + --> tests/ui/unnecessary_to_owned.rs:180:30 | LL | require_as_ref_str_slice(s.to_owned(), slice.to_owned()); | ^^^^^^^^^^^^ help: use: `s` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:179:44 + --> tests/ui/unnecessary_to_owned.rs:180:44 | LL | require_as_ref_str_slice(s.to_owned(), slice.to_owned()); | ^^^^^^^^^^^^^^^^ help: use: `slice` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:182:30 + --> tests/ui/unnecessary_to_owned.rs:183:30 | LL | require_as_ref_slice_str(array.to_owned(), s.to_owned()); | ^^^^^^^^^^^^^^^^ help: use: `array` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:182:48 + --> tests/ui/unnecessary_to_owned.rs:183:48 | LL | require_as_ref_slice_str(array.to_owned(), s.to_owned()); | ^^^^^^^^^^^^ help: use: `s` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:185:30 + --> tests/ui/unnecessary_to_owned.rs:186:30 | LL | require_as_ref_slice_str(array_ref.to_owned(), s.to_owned()); | ^^^^^^^^^^^^^^^^^^^^ help: use: `array_ref` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:185:52 + --> tests/ui/unnecessary_to_owned.rs:186:52 | LL | require_as_ref_slice_str(array_ref.to_owned(), s.to_owned()); | ^^^^^^^^^^^^ help: use: `s` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:188:30 + --> tests/ui/unnecessary_to_owned.rs:189:30 | LL | require_as_ref_slice_str(slice.to_owned(), s.to_owned()); | ^^^^^^^^^^^^^^^^ help: use: `slice` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:188:48 + --> tests/ui/unnecessary_to_owned.rs:189:48 | LL | require_as_ref_slice_str(slice.to_owned(), s.to_owned()); | ^^^^^^^^^^^^ help: use: `s` error: unnecessary use of `to_string` - --> tests/ui/unnecessary_to_owned.rs:192:20 + --> tests/ui/unnecessary_to_owned.rs:193:20 | LL | let _ = x.join(&x_ref.to_string()); | ^^^^^^^^^^^^^^^^^^ help: use: `x_ref` error: unnecessary use of `to_vec` - --> tests/ui/unnecessary_to_owned.rs:195:13 + --> tests/ui/unnecessary_to_owned.rs:196:13 | LL | let _ = slice.to_vec().into_iter(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use: `slice.iter().copied()` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:197:13 + --> tests/ui/unnecessary_to_owned.rs:198:13 | LL | let _ = slice.to_owned().into_iter(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use: `slice.iter().copied()` error: unnecessary use of `to_vec` - --> tests/ui/unnecessary_to_owned.rs:200:13 + --> tests/ui/unnecessary_to_owned.rs:201:13 | LL | let _ = IntoIterator::into_iter(slice.to_vec()); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use: `slice.iter().copied()` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:202:13 + --> tests/ui/unnecessary_to_owned.rs:203:13 | LL | let _ = IntoIterator::into_iter(slice.to_owned()); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use: `slice.iter().copied()` error: allocating a new `String` only to create a temporary `&str` from it - --> tests/ui/unnecessary_to_owned.rs:230:26 + --> tests/ui/unnecessary_to_owned.rs:226:26 | LL | let _ref_str: &str = &String::from_utf8(slice.to_vec()).expect("not UTF-8"); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -466,7 +404,7 @@ LL + let _ref_str: &str = core::str::from_utf8(&slice).expect("not UTF-8"); | error: allocating a new `String` only to create a temporary `&str` from it - --> tests/ui/unnecessary_to_owned.rs:232:26 + --> tests/ui/unnecessary_to_owned.rs:228:26 | LL | let _ref_str: &str = &String::from_utf8(b"foo".to_vec()).unwrap(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -478,7 +416,7 @@ LL + let _ref_str: &str = core::str::from_utf8(b"foo").unwrap(); | error: allocating a new `String` only to create a temporary `&str` from it - --> tests/ui/unnecessary_to_owned.rs:234:26 + --> tests/ui/unnecessary_to_owned.rs:230:26 | LL | let _ref_str: &str = &String::from_utf8(b"foo".as_slice().to_owned()).unwrap(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -490,7 +428,7 @@ LL + let _ref_str: &str = core::str::from_utf8(b"foo".as_slice()).unwrap(); | error: unnecessary use of `to_vec` - --> tests/ui/unnecessary_to_owned.rs:292:14 + --> tests/ui/unnecessary_to_owned.rs:288:14 | LL | for t in file_types.to_vec() { | ^^^^^^^^^^^^^^^^^^^ @@ -503,52 +441,52 @@ LL ~ let path = match get_file_path(t) { | error: unnecessary use of `to_string` - --> tests/ui/unnecessary_to_owned.rs:358:24 + --> tests/ui/unnecessary_to_owned.rs:354:24 | LL | Box::new(build(y.to_string())) | ^^^^^^^^^^^^^ help: use: `y` error: unnecessary use of `to_string` - --> tests/ui/unnecessary_to_owned.rs:468:12 + --> tests/ui/unnecessary_to_owned.rs:464:12 | LL | id("abc".to_string()) | ^^^^^^^^^^^^^^^^^ help: use: `"abc"` error: unnecessary use of `to_vec` - --> tests/ui/unnecessary_to_owned.rs:612:37 + --> tests/ui/unnecessary_to_owned.rs:608:37 | LL | IntoFuture::into_future(foo([].to_vec(), &0)); | ^^^^^^^^^^^ help: use: `[]` error: unnecessary use of `to_vec` - --> tests/ui/unnecessary_to_owned.rs:623:18 + --> tests/ui/unnecessary_to_owned.rs:619:18 | LL | s.remove(&a.to_vec()); | ^^^^^^^^^^^ help: replace it with: `a` error: unnecessary use of `to_owned` - --> tests/ui/unnecessary_to_owned.rs:628:14 + --> tests/ui/unnecessary_to_owned.rs:624:14 | LL | s.remove(&"b".to_owned()); | ^^^^^^^^^^^^^^^ help: replace it with: `"b"` error: unnecessary use of `to_string` - --> tests/ui/unnecessary_to_owned.rs:630:14 + --> tests/ui/unnecessary_to_owned.rs:626:14 | LL | s.remove(&"b".to_string()); | ^^^^^^^^^^^^^^^^ help: replace it with: `"b"` error: unnecessary use of `to_vec` - --> tests/ui/unnecessary_to_owned.rs:636:14 + --> tests/ui/unnecessary_to_owned.rs:632:14 | LL | s.remove(&["b"].to_vec()); | ^^^^^^^^^^^^^^^ help: replace it with: `["b"].as_slice()` error: unnecessary use of `to_vec` - --> tests/ui/unnecessary_to_owned.rs:638:14 + --> tests/ui/unnecessary_to_owned.rs:634:14 | LL | s.remove(&(&["b"]).to_vec()); | ^^^^^^^^^^^^^^^^^^ help: replace it with: `(&["b"]).as_slice()` -error: aborting due to 82 previous errors +error: aborting due to 77 previous errors