diff --git a/.gitmodules b/.gitmodules index 8f4d3768c21e8..d460b6508f620 100644 --- a/.gitmodules +++ b/.gitmodules @@ -37,7 +37,7 @@ [submodule "src/llvm-project"] path = src/llvm-project url = https://github.com/rust-lang/llvm-project.git - branch = rustc/11.0-2020-08-20 + branch = rustc/11.0-2020-09-22 [submodule "src/doc/embedded-book"] path = src/doc/embedded-book url = https://github.com/rust-embedded/book.git diff --git a/Cargo.lock b/Cargo.lock index 1bbae2cbd80c9..d5466d57e771a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3424,6 +3424,7 @@ dependencies = [ name = "rustc_data_structures" version = "0.0.0" dependencies = [ + "arrayvec", "bitflags", "cfg-if", "crossbeam-utils 0.7.2", @@ -3599,6 +3600,7 @@ dependencies = [ name = "rustc_infer" version = "0.0.0" dependencies = [ + "arrayvec", "rustc_ast", "rustc_data_structures", "rustc_errors", @@ -3738,6 +3740,7 @@ dependencies = [ name = "rustc_middle" version = "0.0.0" dependencies = [ + "arrayvec", "bitflags", "byteorder", "chalk-ir", diff --git a/RELEASES.md b/RELEASES.md index b3d8c2f65f633..10c4994f2da3f 100644 --- a/RELEASES.md +++ b/RELEASES.md @@ -1,3 +1,139 @@ +Version 1.47.0 (2020-10-08) +========================== + +Language +-------- +- [Closures will now warn when not used.][74869] + +Compiler +-------- +- [Stabilized the `-C control-flow-guard` codegen option][73893], which enables + [Control Flow Guard][1.47.0-cfg] for Windows platforms, and is ignored on other + platforms. +- [Upgraded to LLVM 11.][73526] +- [Added tier 3\* support for the `thumbv4t-none-eabi` target.][74419] +- [Upgrade the FreeBSD toolchain to version 11.4][75204] +- [`RUST_BACKTRACE`'s output is now more compact.][75048] + +\* Refer to Rust's [platform support page][forge-platform-support] for more +information on Rust's tiered platform support. + +Libraries +--------- +- [`CStr` now implements `Index>`.][74021] +- [Traits in `std`/`core` are now implemented for arrays of any length, not just + those of length less than 33.][74060] +- [`ops::RangeFull` and `ops::Range` now implement Default.][73197] +- [`panic::Location` now implements `Copy`, `Clone`, `Eq`, `Hash`, `Ord`, + `PartialEq`, and `PartialOrd`.][73583] + +Stabilized APIs +--------------- +- [`Ident::new_raw`] +- [`Range::is_empty`] +- [`RangeInclusive::is_empty`] +- [`Result::as_deref`] +- [`Result::as_deref_mut`] +- [`Vec::leak`] +- [`pointer::offset_from`] +- [`f32::TAU`] +- [`f64::TAU`] + +The following previously stable APIs have now been made const. + +- [The `new` method for all `NonZero` integers.][73858] +- [The `checked_add`,`checked_sub`,`checked_mul`,`checked_neg`, `checked_shl`, + `checked_shr`, `saturating_add`, `saturating_sub`, and `saturating_mul` + methods for all integers.][73858] +- [The `checked_abs`, `saturating_abs`, `saturating_neg`, and `signum` for all + signed integers.][73858] +- [The `is_ascii_alphabetic`, `is_ascii_uppercase`, `is_ascii_lowercase`, + `is_ascii_alphanumeric`, `is_ascii_digit`, `is_ascii_hexdigit`, + `is_ascii_punctuation`, `is_ascii_graphic`, `is_ascii_whitespace`, and + `is_ascii_control` methods for `char` and `u8`.][73858] + +Cargo +----- +- [`build-dependencies` are now built with opt-level 0 by default.][cargo/8500] + You can override this by setting the following in your `Cargo.toml`. + ```toml + [profile.release.build-override] + opt-level = 3 + ``` +- [`cargo-help` will now display man pages for commands rather just the + `--help` text.][cargo/8456] +- [`cargo-metadata` now emits a `test` field indicating if a target has + tests enabled.][cargo/8478] +- [`workspace.default-members` now respects `workspace.exclude`.][cargo/8485] +- [`cargo-publish` will now use an alternative registry by default if it's the + only registry specified in `package.publish`.][cargo/8571] + +Misc +---- +- [Added a help button beside Rustdoc's searchbar that explains rustdoc's + type based search.][75366] +- [Added the Ayu theme to rustdoc.][71237] + +Compatibility Notes +------------------- +- [Bumped the minimum supported Emscripten version to 1.39.20.][75716] +- [Fixed a regression parsing `{} && false` in tail expressions.][74650] +- [Added changes to how proc-macros are expanded in `macro_rules!` that should + help to preserve more span information.][73084] These changes may cause + compiliation errors if your macro was unhygenic or didn't correctly handle + `Delimiter::None`. +- [Moved support for the CloudABI target to tier 3.][75568] +- [`linux-gnu` targets now require minimum kernel 2.6.32 and glibc 2.11.][74163] + +Internal Only +-------- +- [Improved default settings for bootstrapping in `x.py`.][73964] You can read details about this change in the ["Changes to `x.py` defaults"](https://blog.rust-lang.org/inside-rust/2020/08/30/changes-to-x-py-defaults.html) post on the Inside Rust blog. +- [Added the `rustc-docs` component.][75560] This allows you to install + and read the documentation for the compiler internal APIs. (Currently only + available for `x86_64-unknown-linux-gnu`.) + +[1.47.0-cfg]: https://docs.microsoft.com/en-us/windows/win32/secbp/control-flow-guard +[76980]: https://github.com/rust-lang/rust/issues/76980 +[75048]: https://github.com/rust-lang/rust/pull/75048/ +[74163]: https://github.com/rust-lang/rust/pull/74163/ +[71237]: https://github.com/rust-lang/rust/pull/71237/ +[74869]: https://github.com/rust-lang/rust/pull/74869/ +[73858]: https://github.com/rust-lang/rust/pull/73858/ +[75716]: https://github.com/rust-lang/rust/pull/75716/ +[75908]: https://github.com/rust-lang/rust/pull/75908/ +[75516]: https://github.com/rust-lang/rust/pull/75516/ +[75560]: https://github.com/rust-lang/rust/pull/75560/ +[75568]: https://github.com/rust-lang/rust/pull/75568/ +[75366]: https://github.com/rust-lang/rust/pull/75366/ +[75204]: https://github.com/rust-lang/rust/pull/75204/ +[74650]: https://github.com/rust-lang/rust/pull/74650/ +[74419]: https://github.com/rust-lang/rust/pull/74419/ +[73964]: https://github.com/rust-lang/rust/pull/73964/ +[74021]: https://github.com/rust-lang/rust/pull/74021/ +[74060]: https://github.com/rust-lang/rust/pull/74060/ +[73893]: https://github.com/rust-lang/rust/pull/73893/ +[73526]: https://github.com/rust-lang/rust/pull/73526/ +[73583]: https://github.com/rust-lang/rust/pull/73583/ +[73084]: https://github.com/rust-lang/rust/pull/73084/ +[73197]: https://github.com/rust-lang/rust/pull/73197/ +[72488]: https://github.com/rust-lang/rust/pull/72488/ +[cargo/8456]: https://github.com/rust-lang/cargo/pull/8456/ +[cargo/8478]: https://github.com/rust-lang/cargo/pull/8478/ +[cargo/8485]: https://github.com/rust-lang/cargo/pull/8485/ +[cargo/8500]: https://github.com/rust-lang/cargo/pull/8500/ +[cargo/8571]: https://github.com/rust-lang/cargo/pull/8571/ +[`Ident::new_raw`]: https://doc.rust-lang.org/nightly/proc_macro/struct.Ident.html#method.new_raw +[`Range::is_empty`]: https://doc.rust-lang.org/nightly/std/ops/struct.Range.html#method.is_empty +[`RangeInclusive::is_empty`]: https://doc.rust-lang.org/nightly/std/ops/struct.RangeInclusive.html#method.is_empty +[`Result::as_deref_mut`]: https://doc.rust-lang.org/nightly/std/result/enum.Result.html#method.as_deref_mut +[`Result::as_deref`]: https://doc.rust-lang.org/nightly/std/result/enum.Result.html#method.as_deref +[`TypeId::of`]: https://doc.rust-lang.org/nightly/std/any/struct.TypeId.html#method.of +[`Vec::leak`]: https://doc.rust-lang.org/nightly/std/vec/struct.Vec.html#method.leak +[`f32::TAU`]: https://doc.rust-lang.org/nightly/std/f32/consts/constant.TAU.html +[`f64::TAU`]: https://doc.rust-lang.org/nightly/std/f64/consts/constant.TAU.html +[`pointer::offset_from`]: https://doc.rust-lang.org/nightly/std/primitive.pointer.html#method.offset_from + + Version 1.46.0 (2020-08-27) ========================== @@ -10,7 +146,7 @@ Language function's caller's location information for panic messages.][72445] - [Recursively indexing into tuples no longer needs parentheses.][71322] E.g. `x.0.0` over `(x.0).0`. -- [`mem::transmute` can now be used in static and constants.][72920] **Note** +- [`mem::transmute` can now be used in statics and constants.][72920] **Note** You currently can't use `mem::transmute` in constant functions. Compiler @@ -51,7 +187,7 @@ Compatibility Notes ------------------- - [The target configuration option `abi_blacklist` has been renamed to `unsupported_abis`.][74150] The old name will still continue to work. -- [Rustc will now warn if you have a C-like enum that implements `Drop`.][72331] +- [Rustc will now warn if you cast a C-like enum that implements `Drop`.][72331] This was previously accepted but will become a hard error in a future release. - [Rustc will fail to compile if you have a struct with `#[repr(i128)]` or `#[repr(u128)]`.][74109] This representation is currently only @@ -68,7 +204,19 @@ Compatibility Notes - [Rustc now correctly relates the lifetime of an existential associated type.][71896] This fixes some edge cases where `rustc` would erroneously allow you to pass a shorter lifetime than expected. - +- [Rustc now dynamically links to `libz` (also called `zlib`) on Linux.][74420] + The library will need to be installed for `rustc` to work, even though we + expect it to be already available on most systems. +- [Tests annotated with `#[should_panic]` are broken on ARMv7 while running + under QEMU.][74820] +- [Pretty printing of some tokens in procedural macros changed.][75453] The + exact output returned by rustc's pretty printing is an unstable + implementation detail: we recommend any macro relying on it to switch to a + more robust parsing system. + +[75453]: https://github.com/rust-lang/rust/issues/75453/ +[74820]: https://github.com/rust-lang/rust/issues/74820/ +[74420]: https://github.com/rust-lang/rust/issues/74420/ [74109]: https://github.com/rust-lang/rust/pull/74109/ [74150]: https://github.com/rust-lang/rust/pull/74150/ [73862]: https://github.com/rust-lang/rust/pull/73862/ diff --git a/library/core/src/any.rs b/library/core/src/any.rs index d79b9a33b5aa8..d1951fbbf103a 100644 --- a/library/core/src/any.rs +++ b/library/core/src/any.rs @@ -435,7 +435,7 @@ impl TypeId { /// assert_eq!(is_string(&"cookie monster".to_string()), true); /// ``` #[stable(feature = "rust1", since = "1.0.0")] - #[rustc_const_stable(feature = "const_type_id", since = "1.46.0")] + #[rustc_const_unstable(feature = "const_type_id", issue = "77125")] pub const fn of() -> TypeId { TypeId { t: intrinsics::type_id::() } } diff --git a/library/core/src/intrinsics.rs b/library/core/src/intrinsics.rs index 25951e2f58235..d0b12d6982127 100644 --- a/library/core/src/intrinsics.rs +++ b/library/core/src/intrinsics.rs @@ -807,7 +807,7 @@ extern "rust-intrinsic" { /// crate it is invoked in. /// /// The stabilized version of this intrinsic is [`crate::any::TypeId::of`]. - #[rustc_const_stable(feature = "const_type_id", since = "1.46.0")] + #[rustc_const_unstable(feature = "const_type_id", issue = "77125")] pub fn type_id() -> u64; /// A guard for unsafe functions that cannot ever be executed if `T` is uninhabited: diff --git a/library/core/src/iter/adapters/fuse.rs b/library/core/src/iter/adapters/fuse.rs index 94ba6f56476ae..ee5fbe9a84027 100644 --- a/library/core/src/iter/adapters/fuse.rs +++ b/library/core/src/iter/adapters/fuse.rs @@ -116,7 +116,7 @@ where } #[inline] - unsafe fn get_unchecked(&mut self, idx: usize) -> Self::Item + unsafe fn __iterator_get_unchecked(&mut self, idx: usize) -> Self::Item where Self: TrustedRandomAccess, { diff --git a/library/core/src/iter/adapters/mod.rs b/library/core/src/iter/adapters/mod.rs index 9fcd137e1a634..ce90607e7613e 100644 --- a/library/core/src/iter/adapters/mod.rs +++ b/library/core/src/iter/adapters/mod.rs @@ -215,7 +215,7 @@ where self.it.count() } - unsafe fn get_unchecked(&mut self, idx: usize) -> T + unsafe fn __iterator_get_unchecked(&mut self, idx: usize) -> T where Self: TrustedRandomAccess, { @@ -350,7 +350,7 @@ where self.it.map(T::clone).fold(init, f) } - unsafe fn get_unchecked(&mut self, idx: usize) -> T + unsafe fn __iterator_get_unchecked(&mut self, idx: usize) -> T where Self: TrustedRandomAccess, { @@ -865,7 +865,7 @@ where self.iter.fold(init, map_fold(self.f, g)) } - unsafe fn get_unchecked(&mut self, idx: usize) -> B + unsafe fn __iterator_get_unchecked(&mut self, idx: usize) -> B where Self: TrustedRandomAccess, { @@ -1304,7 +1304,7 @@ where self.iter.fold(init, enumerate(self.count, fold)) } - unsafe fn get_unchecked(&mut self, idx: usize) -> ::Item + unsafe fn __iterator_get_unchecked(&mut self, idx: usize) -> ::Item where Self: TrustedRandomAccess, { diff --git a/library/core/src/iter/adapters/zip.rs b/library/core/src/iter/adapters/zip.rs index 6cb618964830e..581ac6e0d82f4 100644 --- a/library/core/src/iter/adapters/zip.rs +++ b/library/core/src/iter/adapters/zip.rs @@ -59,7 +59,7 @@ where } #[inline] - unsafe fn get_unchecked(&mut self, idx: usize) -> Self::Item + unsafe fn __iterator_get_unchecked(&mut self, idx: usize) -> Self::Item where Self: TrustedRandomAccess, { @@ -197,12 +197,14 @@ where let i = self.index; self.index += 1; // SAFETY: `i` is smaller than `self.len`, thus smaller than `self.a.len()` and `self.b.len()` - unsafe { Some((self.a.get_unchecked(i), self.b.get_unchecked(i))) } + unsafe { + Some((self.a.__iterator_get_unchecked(i), self.b.__iterator_get_unchecked(i))) + } } else if A::may_have_side_effect() && self.index < self.a.size() { // match the base implementation's potential side effects // SAFETY: we just checked that `self.index` < `self.a.len()` unsafe { - self.a.get_unchecked(self.index); + self.a.__iterator_get_unchecked(self.index); } self.index += 1; None @@ -229,13 +231,13 @@ where // ensures that `end` is smaller than or equal to `self.len`, // so `i` is also smaller than `self.len`. unsafe { - self.a.get_unchecked(i); + self.a.__iterator_get_unchecked(i); } } if B::may_have_side_effect() { // SAFETY: same as above. unsafe { - self.b.get_unchecked(i); + self.b.__iterator_get_unchecked(i); } } } @@ -277,7 +279,9 @@ where let i = self.len; // SAFETY: `i` is smaller than the previous value of `self.len`, // which is also smaller than or equal to `self.a.len()` and `self.b.len()` - unsafe { Some((self.a.get_unchecked(i), self.b.get_unchecked(i))) } + unsafe { + Some((self.a.__iterator_get_unchecked(i), self.b.__iterator_get_unchecked(i))) + } } else { None } @@ -287,7 +291,7 @@ where unsafe fn get_unchecked(&mut self, idx: usize) -> ::Item { // SAFETY: the caller must uphold the contract for // `Iterator::get_unchecked`. - unsafe { (self.a.get_unchecked(idx), self.b.get_unchecked(idx)) } + unsafe { (self.a.__iterator_get_unchecked(idx), self.b.__iterator_get_unchecked(idx)) } } } @@ -430,6 +434,6 @@ unsafe impl SpecTrustedRandomAccess for I { unsafe fn try_get_unchecked(&mut self, index: usize) -> Self::Item { // SAFETY: the caller must uphold the contract for // `Iterator::get_unchecked`. - unsafe { self.get_unchecked(index) } + unsafe { self.__iterator_get_unchecked(index) } } } diff --git a/library/core/src/iter/traits/iterator.rs b/library/core/src/iter/traits/iterator.rs index 32e43ed42f385..10498f94c2115 100644 --- a/library/core/src/iter/traits/iterator.rs +++ b/library/core/src/iter/traits/iterator.rs @@ -3251,7 +3251,7 @@ pub trait Iterator { #[inline] #[doc(hidden)] #[unstable(feature = "trusted_random_access", issue = "none")] - unsafe fn get_unchecked(&mut self, _idx: usize) -> Self::Item + unsafe fn __iterator_get_unchecked(&mut self, _idx: usize) -> Self::Item where Self: TrustedRandomAccess, { diff --git a/library/core/src/lib.rs b/library/core/src/lib.rs index 99f8cc66638f3..8270b732032fc 100644 --- a/library/core/src/lib.rs +++ b/library/core/src/lib.rs @@ -92,6 +92,7 @@ #![feature(const_slice_ptr_len)] #![feature(const_size_of_val)] #![feature(const_align_of_val)] +#![feature(const_type_id)] #![feature(const_type_name)] #![feature(const_likely)] #![feature(const_unreachable_unchecked)] diff --git a/library/core/src/slice/mod.rs b/library/core/src/slice/mod.rs index 0d97ddb29af79..e7eed4382451b 100644 --- a/library/core/src/slice/mod.rs +++ b/library/core/src/slice/mod.rs @@ -3922,7 +3922,7 @@ macro_rules! iterator { } #[doc(hidden)] - unsafe fn get_unchecked(&mut self, idx: usize) -> Self::Item { + unsafe fn __iterator_get_unchecked(&mut self, idx: usize) -> Self::Item { // SAFETY: the caller must guarantee that `i` is in bounds of // the underlying slice, so `i` cannot overflow an `isize`, and // the returned references is guaranteed to refer to an element @@ -5022,7 +5022,7 @@ impl<'a, T> Iterator for Windows<'a, T> { } #[doc(hidden)] - unsafe fn get_unchecked(&mut self, idx: usize) -> Self::Item { + unsafe fn __iterator_get_unchecked(&mut self, idx: usize) -> Self::Item { // SAFETY: since the caller guarantees that `i` is in bounds, // which means that `i` cannot overflow an `isize`, and the // slice created by `from_raw_parts` is a subslice of `self.v` @@ -5161,7 +5161,7 @@ impl<'a, T> Iterator for Chunks<'a, T> { } #[doc(hidden)] - unsafe fn get_unchecked(&mut self, idx: usize) -> Self::Item { + unsafe fn __iterator_get_unchecked(&mut self, idx: usize) -> Self::Item { let start = idx * self.chunk_size; let end = match start.checked_add(self.chunk_size) { None => self.v.len(), @@ -5310,7 +5310,7 @@ impl<'a, T> Iterator for ChunksMut<'a, T> { } #[doc(hidden)] - unsafe fn get_unchecked(&mut self, idx: usize) -> Self::Item { + unsafe fn __iterator_get_unchecked(&mut self, idx: usize) -> Self::Item { let start = idx * self.chunk_size; let end = match start.checked_add(self.chunk_size) { None => self.v.len(), @@ -5463,7 +5463,7 @@ impl<'a, T> Iterator for ChunksExact<'a, T> { } #[doc(hidden)] - unsafe fn get_unchecked(&mut self, idx: usize) -> Self::Item { + unsafe fn __iterator_get_unchecked(&mut self, idx: usize) -> Self::Item { let start = idx * self.chunk_size; // SAFETY: mostly identical to `Chunks::get_unchecked`. unsafe { from_raw_parts(self.v.as_ptr().add(start), self.chunk_size) } @@ -5597,7 +5597,7 @@ impl<'a, T> Iterator for ChunksExactMut<'a, T> { } #[doc(hidden)] - unsafe fn get_unchecked(&mut self, idx: usize) -> Self::Item { + unsafe fn __iterator_get_unchecked(&mut self, idx: usize) -> Self::Item { let start = idx * self.chunk_size; // SAFETY: see comments for `ChunksMut::get_unchecked`. unsafe { from_raw_parts_mut(self.v.as_mut_ptr().add(start), self.chunk_size) } @@ -5723,10 +5723,10 @@ impl<'a, T, const N: usize> Iterator for ArrayChunks<'a, T, N> { self.iter.last() } - unsafe fn get_unchecked(&mut self, i: usize) -> &'a [T; N] { + unsafe fn __iterator_get_unchecked(&mut self, i: usize) -> &'a [T; N] { // SAFETY: The safety guarantees of `get_unchecked` are transferred to // the caller. - unsafe { self.iter.get_unchecked(i) } + unsafe { self.iter.__iterator_get_unchecked(i) } } } @@ -5853,7 +5853,7 @@ impl<'a, T> Iterator for RChunks<'a, T> { } #[doc(hidden)] - unsafe fn get_unchecked(&mut self, idx: usize) -> Self::Item { + unsafe fn __iterator_get_unchecked(&mut self, idx: usize) -> Self::Item { let end = self.v.len() - idx * self.chunk_size; let start = match end.checked_sub(self.chunk_size) { None => 0, @@ -5999,7 +5999,7 @@ impl<'a, T> Iterator for RChunksMut<'a, T> { } #[doc(hidden)] - unsafe fn get_unchecked(&mut self, idx: usize) -> Self::Item { + unsafe fn __iterator_get_unchecked(&mut self, idx: usize) -> Self::Item { let end = self.v.len() - idx * self.chunk_size; let start = match end.checked_sub(self.chunk_size) { None => 0, @@ -6145,7 +6145,7 @@ impl<'a, T> Iterator for RChunksExact<'a, T> { } #[doc(hidden)] - unsafe fn get_unchecked(&mut self, idx: usize) -> Self::Item { + unsafe fn __iterator_get_unchecked(&mut self, idx: usize) -> Self::Item { let end = self.v.len() - idx * self.chunk_size; let start = end - self.chunk_size; // SAFETY: @@ -6286,7 +6286,7 @@ impl<'a, T> Iterator for RChunksExactMut<'a, T> { } #[doc(hidden)] - unsafe fn get_unchecked(&mut self, idx: usize) -> Self::Item { + unsafe fn __iterator_get_unchecked(&mut self, idx: usize) -> Self::Item { let end = self.v.len() - idx * self.chunk_size; let start = end - self.chunk_size; // SAFETY: see comments for `RChunksMut::get_unchecked`. diff --git a/library/core/src/str/mod.rs b/library/core/src/str/mod.rs index ab9afeb25e0ce..d7af5dd8602a8 100644 --- a/library/core/src/str/mod.rs +++ b/library/core/src/str/mod.rs @@ -824,10 +824,10 @@ impl Iterator for Bytes<'_> { } #[inline] - unsafe fn get_unchecked(&mut self, idx: usize) -> u8 { + unsafe fn __iterator_get_unchecked(&mut self, idx: usize) -> u8 { // SAFETY: the caller must uphold the safety contract // for `Iterator::get_unchecked`. - unsafe { self.0.get_unchecked(idx) } + unsafe { self.0.__iterator_get_unchecked(idx) } } } diff --git a/src/bootstrap/dist.rs b/src/bootstrap/dist.rs index f0b2254be9ee9..d021feafbe416 100644 --- a/src/bootstrap/dist.rs +++ b/src/bootstrap/dist.rs @@ -647,6 +647,7 @@ impl Step for DebuggerScripts { cp_debugger_script("lldb_lookup.py"); cp_debugger_script("lldb_providers.py"); + cp_debugger_script("lldb_commands") } } } diff --git a/src/ci/docker/host-x86_64/dist-x86_64-linux/Dockerfile b/src/ci/docker/host-x86_64/dist-x86_64-linux/Dockerfile index 1f0978c0082c5..7681eaef60f60 100644 --- a/src/ci/docker/host-x86_64/dist-x86_64-linux/Dockerfile +++ b/src/ci/docker/host-x86_64/dist-x86_64-linux/Dockerfile @@ -95,7 +95,7 @@ ENV RUST_CONFIGURE_ARGS \ --set target.x86_64-unknown-linux-gnu.linker=clang \ --set target.x86_64-unknown-linux-gnu.ar=/rustroot/bin/llvm-ar \ --set target.x86_64-unknown-linux-gnu.ranlib=/rustroot/bin/llvm-ranlib \ - --set llvm.thin-lto=true \ + --set llvm.thin-lto=false \ --set rust.jemalloc ENV SCRIPT python2.7 ../x.py dist --host $HOSTS --target $HOSTS ENV CARGO_TARGET_X86_64_UNKNOWN_LINUX_GNU_LINKER=clang diff --git a/src/ci/run.sh b/src/ci/run.sh index 5231aa2e76619..59510f5945b0b 100755 --- a/src/ci/run.sh +++ b/src/ci/run.sh @@ -63,7 +63,7 @@ fi # # FIXME: need a scheme for changing this `nightly` value to `beta` and `stable` # either automatically or manually. -export RUST_RELEASE_CHANNEL=nightly +export RUST_RELEASE_CHANNEL=stable # Always set the release channel for bootstrap; this is normally not important (i.e., only dist # builds would seem to matter) but in practice bootstrap wants to know whether we're targeting diff --git a/src/ci/shared.sh b/src/ci/shared.sh index 8222758ed6dc4..c93d4774e3992 100644 --- a/src/ci/shared.sh +++ b/src/ci/shared.sh @@ -104,7 +104,7 @@ function ciCommandAddPath { if isAzurePipelines; then echo "##vso[task.prependpath]${path}" elif isGitHubActions; then - echo "::add-path::${path}" + echo "${path}" >> "${GITHUB_PATH}" else echo "ciCommandAddPath only works inside CI!" exit 1 @@ -122,7 +122,7 @@ function ciCommandSetEnv { if isAzurePipelines; then echo "##vso[task.setvariable variable=${name}]${value}" elif isGitHubActions; then - echo "::set-env name=${name}::${value}" + echo "${name}=${value}" >> "${GITHUB_ENV}" else echo "ciCommandSetEnv only works inside CI!" exit 1 diff --git a/src/doc/rustc/src/lints/listing/deny-by-default.md b/src/doc/rustc/src/lints/listing/deny-by-default.md index 55714f8f4548b..dc5a9e44acfa2 100644 --- a/src/doc/rustc/src/lints/listing/deny-by-default.md +++ b/src/doc/rustc/src/lints/listing/deny-by-default.md @@ -45,6 +45,15 @@ error: defaults for type parameters are only allowed in `struct`, `enum`, `type` = note: for more information, see issue #36887 ``` +## missing-fragment-specifier + +The missing_fragment_specifier warning is issued when an unused pattern in a +`macro_rules!` macro definition has a meta-variable (e.g. `$e`) that is not +followed by a fragment specifier (e.g. `:expr`). + +This warning can always be fixed by removing the unused pattern in the +`macro_rules!` macro definition. + ## mutable-transmutes This lint catches transmuting from `&T` to `&mut T` because it is undefined diff --git a/src/etc/lldb_commands b/src/etc/lldb_commands index f470c62d89927..979f2fa7ae828 100644 --- a/src/etc/lldb_commands +++ b/src/etc/lldb_commands @@ -1,4 +1,3 @@ -command script import \"$RUSTC_SYSROOT/lib/rustlib/etc/lldb_lookup.py\" type synthetic add -l lldb_lookup.synthetic_lookup -x \".*\" --category Rust type summary add -F lldb_lookup.summary_lookup -e -x -h \"^(alloc::([a-z_]+::)+)String$\" --category Rust type summary add -F lldb_lookup.summary_lookup -e -x -h \"^&str$\" --category Rust diff --git a/src/etc/natvis/libstd.natvis b/src/etc/natvis/libstd.natvis index 4e81173d3d0b8..f791979800f19 100644 --- a/src/etc/natvis/libstd.natvis +++ b/src/etc/natvis/libstd.natvis @@ -41,7 +41,7 @@ n-- - static_cast<tuple<$T1, $T2>*>(base.table.ctrl.pointer)[-(i + 1)].__1 + ((tuple<$T1, $T2>*)base.table.ctrl.pointer)[-(i + 1)].__1 i++ @@ -65,7 +65,7 @@ n-- - static_cast<$T1*>(map.base.table.ctrl.pointer)[-(i + 1)] + (($T1*)map.base.table.ctrl.pointer)[-(i + 1)] i++ diff --git a/src/etc/rust-lldb b/src/etc/rust-lldb index 28b32ef1ad532..bce72f1bad698 100755 --- a/src/etc/rust-lldb +++ b/src/etc/rust-lldb @@ -30,5 +30,8 @@ EOF fi fi +script_import="command script import \"$RUSTC_SYSROOT/lib/rustlib/etc/lldb_lookup.py\"" +commands_file="$RUSTC_SYSROOT/lib/rustlib/etc/lldb_commands" + # Call LLDB with the commands added to the argument list -exec "$lldb" --source-before-file ./lldb_commands "$@" +exec "$lldb" --one-line-before-file "$script_import" --source-before-file "$commands_file" "$@" diff --git a/src/librustc_ast/token.rs b/src/librustc_ast/token.rs index 4a8bf6b4f19b6..7e58aab5a7a24 100644 --- a/src/librustc_ast/token.rs +++ b/src/librustc_ast/token.rs @@ -821,9 +821,19 @@ impl Nonterminal { if let ExpnKind::Macro(_, macro_name) = orig_span.ctxt().outer_expn_data().kind { let filename = source_map.span_to_filename(orig_span); if let FileName::Real(RealFileName::Named(path)) = filename { - if (path.ends_with("time-macros-impl/src/lib.rs") - && macro_name == sym::impl_macros) - || (path.ends_with("js-sys/src/lib.rs") && macro_name == sym::arrays) + let matches_prefix = |prefix| { + // Check for a path that ends with 'prefix*/src/lib.rs' + let mut iter = path.components().rev(); + iter.next().and_then(|p| p.as_os_str().to_str()) == Some("lib.rs") + && iter.next().and_then(|p| p.as_os_str().to_str()) == Some("src") + && iter + .next() + .and_then(|p| p.as_os_str().to_str()) + .map_or(false, |p| p.starts_with(prefix)) + }; + + if (macro_name == sym::impl_macros && matches_prefix("time-macros-impl")) + || (macro_name == sym::arrays && matches_prefix("js-sys")) { let snippet = source_map.span_to_snippet(orig_span); if snippet.as_deref() == Ok("$name") { diff --git a/src/librustc_codegen_ssa/back/command.rs b/src/librustc_codegen_ssa/back/command.rs index 0208bb73abdbe..503c51d24b682 100644 --- a/src/librustc_codegen_ssa/back/command.rs +++ b/src/librustc_codegen_ssa/back/command.rs @@ -111,6 +111,12 @@ impl Command { LldFlavor::Link => "link", LldFlavor::Ld64 => "darwin", }); + if let LldFlavor::Wasm = flavor { + // LLVM expects host-specific formatting for @file + // arguments, but we always generate posix formatted files + // at this time. Indicate as such. + c.arg("--rsp-quoting=posix"); + } c } }; diff --git a/src/librustc_data_structures/Cargo.toml b/src/librustc_data_structures/Cargo.toml index 988bb733f9fb7..590506512021b 100644 --- a/src/librustc_data_structures/Cargo.toml +++ b/src/librustc_data_structures/Cargo.toml @@ -10,6 +10,7 @@ path = "lib.rs" doctest = false [dependencies] +arrayvec = { version = "0.5.1", default-features = false } ena = "0.14" indexmap = "1.5.1" tracing = "0.1" diff --git a/src/librustc_data_structures/lib.rs b/src/librustc_data_structures/lib.rs index af4a7bd18813e..cd25ba2ac6910 100644 --- a/src/librustc_data_structures/lib.rs +++ b/src/librustc_data_structures/lib.rs @@ -85,24 +85,26 @@ pub mod sorted_map; pub mod stable_set; #[macro_use] pub mod stable_hasher; +mod atomic_ref; +pub mod fingerprint; +pub mod profiling; pub mod sharded; pub mod stack; pub mod sync; pub mod thin_vec; pub mod tiny_list; pub mod transitive_relation; -pub use ena::undo_log; -pub use ena::unify; -mod atomic_ref; -pub mod fingerprint; -pub mod profiling; pub mod vec_linked_list; pub mod work_queue; pub use atomic_ref::AtomicRef; pub mod frozen; +pub mod mini_map; pub mod tagged_ptr; pub mod temp_dir; +pub use ena::undo_log; +pub use ena::unify; + pub struct OnDrop(pub F); impl OnDrop { diff --git a/src/librustc_data_structures/mini_map.rs b/src/librustc_data_structures/mini_map.rs new file mode 100644 index 0000000000000..cd3e949d3831a --- /dev/null +++ b/src/librustc_data_structures/mini_map.rs @@ -0,0 +1,61 @@ +use crate::fx::FxHashMap; +use arrayvec::ArrayVec; + +use std::hash::Hash; + +/// Small-storage-optimized implementation of a map +/// made specifically for caching results. +/// +/// Stores elements in a small array up to a certain length +/// and switches to `HashMap` when that length is exceeded. +pub enum MiniMap { + Array(ArrayVec<[(K, V); 8]>), + Map(FxHashMap), +} + +impl MiniMap { + /// Creates an empty `MiniMap`. + pub fn new() -> Self { + MiniMap::Array(ArrayVec::new()) + } + + /// Inserts or updates value in the map. + pub fn insert(&mut self, key: K, value: V) { + match self { + MiniMap::Array(array) => { + for pair in array.iter_mut() { + if pair.0 == key { + pair.1 = value; + return; + } + } + if let Err(error) = array.try_push((key, value)) { + let mut map: FxHashMap = array.drain(..).collect(); + let (key, value) = error.element(); + map.insert(key, value); + *self = MiniMap::Map(map); + } + } + MiniMap::Map(map) => { + map.insert(key, value); + } + } + } + + /// Return value by key if any. + pub fn get(&self, key: &K) -> Option<&V> { + match self { + MiniMap::Array(array) => { + for pair in array { + if pair.0 == *key { + return Some(&pair.1); + } + } + return None; + } + MiniMap::Map(map) => { + return map.get(key); + } + } + } +} diff --git a/src/librustc_errors/emitter.rs b/src/librustc_errors/emitter.rs index 5a654e83aed8e..4555168af0ab5 100644 --- a/src/librustc_errors/emitter.rs +++ b/src/librustc_errors/emitter.rs @@ -959,15 +959,15 @@ impl EmitterWriter { '_', line_offset + pos, width_offset + depth, - code_offset + annotation.start_col - left, + (code_offset + annotation.start_col).saturating_sub(left), style, ); } _ if self.teach => { buffer.set_style_range( line_offset, - code_offset + annotation.start_col - left, - code_offset + annotation.end_col - left, + (code_offset + annotation.start_col).saturating_sub(left), + (code_offset + annotation.end_col).saturating_sub(left), style, annotation.is_primary, ); diff --git a/src/librustc_expand/mbe.rs b/src/librustc_expand/mbe.rs index 9aed307ec93ae..da69b3260f694 100644 --- a/src/librustc_expand/mbe.rs +++ b/src/librustc_expand/mbe.rs @@ -84,7 +84,7 @@ enum TokenTree { /// e.g., `$var` MetaVar(Span, Ident), /// e.g., `$var:expr`. This is only used in the left hand side of MBE macros. - MetaVarDecl(Span, Ident /* name to bind */, NonterminalKind), + MetaVarDecl(Span, Ident /* name to bind */, Option), } impl TokenTree { diff --git a/src/librustc_expand/mbe/macro_parser.rs b/src/librustc_expand/mbe/macro_parser.rs index 92a8f23112679..d2fe7fe10a830 100644 --- a/src/librustc_expand/mbe/macro_parser.rs +++ b/src/librustc_expand/mbe/macro_parser.rs @@ -378,6 +378,11 @@ fn nameize>( n_rec(sess, next_m, res.by_ref(), ret_val)?; } } + TokenTree::MetaVarDecl(span, _, None) => { + if sess.missing_fragment_specifiers.borrow_mut().remove(&span).is_some() { + return Err((span, "missing fragment specifier".to_string())); + } + } TokenTree::MetaVarDecl(sp, bind_name, _) => match ret_val .entry(MacroRulesNormalizedIdent::new(bind_name)) { @@ -437,6 +442,7 @@ fn token_name_eq(t1: &Token, t2: &Token) -> bool { /// /// A `ParseResult`. Note that matches are kept track of through the items generated. fn inner_parse_loop<'root, 'tt>( + sess: &ParseSess, cur_items: &mut SmallVec<[MatcherPosHandle<'root, 'tt>; 1]>, next_items: &mut Vec>, eof_items: &mut SmallVec<[MatcherPosHandle<'root, 'tt>; 1]>, @@ -554,9 +560,16 @@ fn inner_parse_loop<'root, 'tt>( }))); } + // We need to match a metavar (but the identifier is invalid)... this is an error + TokenTree::MetaVarDecl(span, _, None) => { + if sess.missing_fragment_specifiers.borrow_mut().remove(&span).is_some() { + return Error(span, "missing fragment specifier".to_string()); + } + } + // We need to match a metavar with a valid ident... call out to the black-box // parser by adding an item to `bb_items`. - TokenTree::MetaVarDecl(_, _, kind) => { + TokenTree::MetaVarDecl(_, _, Some(kind)) => { // Built-in nonterminals never start with these tokens, // so we can eliminate them from consideration. if Parser::nonterminal_may_begin_with(kind, token) { @@ -627,6 +640,7 @@ pub(super) fn parse_tt(parser: &mut Cow<'_, Parser<'_>>, ms: &[TokenTree]) -> Na // parsing from the black-box parser done. The result is that `next_items` will contain a // bunch of possible next matcher positions in `next_items`. match inner_parse_loop( + parser.sess, &mut cur_items, &mut next_items, &mut eof_items, @@ -688,7 +702,7 @@ pub(super) fn parse_tt(parser: &mut Cow<'_, Parser<'_>>, ms: &[TokenTree]) -> Na let nts = bb_items .iter() .map(|item| match item.top_elts.get_tt(item.idx) { - TokenTree::MetaVarDecl(_, bind, kind) => format!("{} ('{}')", kind, bind), + TokenTree::MetaVarDecl(_, bind, Some(kind)) => format!("{} ('{}')", kind, bind), _ => panic!(), }) .collect::>() @@ -718,7 +732,7 @@ pub(super) fn parse_tt(parser: &mut Cow<'_, Parser<'_>>, ms: &[TokenTree]) -> Na assert_eq!(bb_items.len(), 1); let mut item = bb_items.pop().unwrap(); - if let TokenTree::MetaVarDecl(span, _, kind) = item.top_elts.get_tt(item.idx) { + if let TokenTree::MetaVarDecl(span, _, Some(kind)) = item.top_elts.get_tt(item.idx) { let match_cur = item.match_cur; let nt = match parser.to_mut().parse_nonterminal(kind) { Err(mut err) => { diff --git a/src/librustc_expand/mbe/macro_rules.rs b/src/librustc_expand/mbe/macro_rules.rs index f0e6fe39a3c7f..48a622d13efcb 100644 --- a/src/librustc_expand/mbe/macro_rules.rs +++ b/src/librustc_expand/mbe/macro_rules.rs @@ -400,7 +400,7 @@ pub fn compile_declarative_macro( let diag = &sess.parse_sess.span_diagnostic; let lhs_nm = Ident::new(sym::lhs, def.span); let rhs_nm = Ident::new(sym::rhs, def.span); - let tt_spec = NonterminalKind::TT; + let tt_spec = Some(NonterminalKind::TT); // Parse the macro_rules! invocation let (macro_rules, body) = match &def.kind { @@ -577,7 +577,7 @@ fn check_lhs_no_empty_seq(sess: &ParseSess, tts: &[mbe::TokenTree]) -> bool { TokenTree::Sequence(span, ref seq) => { if seq.separator.is_none() && seq.tts.iter().all(|seq_tt| match *seq_tt { - TokenTree::MetaVarDecl(_, _, NonterminalKind::Vis) => true, + TokenTree::MetaVarDecl(_, _, Some(NonterminalKind::Vis)) => true, TokenTree::Sequence(_, ref sub_seq) => { sub_seq.kleene.op == mbe::KleeneOp::ZeroOrMore || sub_seq.kleene.op == mbe::KleeneOp::ZeroOrOne @@ -960,7 +960,7 @@ fn check_matcher_core( // Now `last` holds the complete set of NT tokens that could // end the sequence before SUFFIX. Check that every one works with `suffix`. for token in &last.tokens { - if let TokenTree::MetaVarDecl(_, name, kind) = *token { + if let TokenTree::MetaVarDecl(_, name, Some(kind)) = *token { for next_token in &suffix_first.tokens { match is_in_follow(next_token, kind) { IsInFollow::Yes => {} @@ -1018,7 +1018,7 @@ fn check_matcher_core( } fn token_can_be_followed_by_any(tok: &mbe::TokenTree) -> bool { - if let mbe::TokenTree::MetaVarDecl(_, _, kind) = *tok { + if let mbe::TokenTree::MetaVarDecl(_, _, Some(kind)) = *tok { frag_can_be_followed_by_any(kind) } else { // (Non NT's can always be followed by anything in matchers.) @@ -1123,7 +1123,7 @@ fn is_in_follow(tok: &mbe::TokenTree, kind: NonterminalKind) -> IsInFollow { } _ => IsInFollow::No(TOKENS), }, - TokenTree::MetaVarDecl(_, _, NonterminalKind::Block) => IsInFollow::Yes, + TokenTree::MetaVarDecl(_, _, Some(NonterminalKind::Block)) => IsInFollow::Yes, _ => IsInFollow::No(TOKENS), } } @@ -1158,7 +1158,7 @@ fn is_in_follow(tok: &mbe::TokenTree, kind: NonterminalKind) -> IsInFollow { TokenTree::MetaVarDecl( _, _, - NonterminalKind::Ident | NonterminalKind::Ty | NonterminalKind::Path, + Some(NonterminalKind::Ident | NonterminalKind::Ty | NonterminalKind::Path), ) => IsInFollow::Yes, _ => IsInFollow::No(TOKENS), } @@ -1171,7 +1171,8 @@ fn quoted_tt_to_string(tt: &mbe::TokenTree) -> String { match *tt { mbe::TokenTree::Token(ref token) => pprust::token_to_string(&token), mbe::TokenTree::MetaVar(_, name) => format!("${}", name), - mbe::TokenTree::MetaVarDecl(_, name, kind) => format!("${}:{}", name, kind), + mbe::TokenTree::MetaVarDecl(_, name, Some(kind)) => format!("${}:{}", name, kind), + mbe::TokenTree::MetaVarDecl(_, name, None) => format!("${}:", name), _ => panic!( "unexpected mbe::TokenTree::{{Sequence or Delimited}} \ in follow set checker" diff --git a/src/librustc_expand/mbe/quoted.rs b/src/librustc_expand/mbe/quoted.rs index 48db532c78f30..01b11bb979d68 100644 --- a/src/librustc_expand/mbe/quoted.rs +++ b/src/librustc_expand/mbe/quoted.rs @@ -3,7 +3,7 @@ use crate::mbe::{Delimited, KleeneOp, KleeneToken, SequenceRepetition, TokenTree use rustc_ast::token::{self, Token}; use rustc_ast::tokenstream; -use rustc_ast::NodeId; +use rustc_ast::{NodeId, DUMMY_NODE_ID}; use rustc_ast_pretty::pprust; use rustc_session::parse::ParseSess; use rustc_span::symbol::{kw, Ident}; @@ -73,7 +73,7 @@ pub(super) fn parse( .emit(); token::NonterminalKind::Ident }); - result.push(TokenTree::MetaVarDecl(span, ident, kind)); + result.push(TokenTree::MetaVarDecl(span, ident, Some(kind))); continue; } _ => token.span, @@ -83,8 +83,11 @@ pub(super) fn parse( } tree => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(start_sp), }; - sess.span_diagnostic.struct_span_err(span, "missing fragment specifier").emit(); - continue; + if node_id != DUMMY_NODE_ID { + // Macros loaded from other crates have dummy node ids. + sess.missing_fragment_specifiers.borrow_mut().insert(span, node_id); + } + result.push(TokenTree::MetaVarDecl(span, ident, None)); } // Not a metavar or no matchers allowed, so just return the tree diff --git a/src/librustc_index/Cargo.toml b/src/librustc_index/Cargo.toml index 8aaf1cb9cbc58..061f440ef16e1 100644 --- a/src/librustc_index/Cargo.toml +++ b/src/librustc_index/Cargo.toml @@ -10,6 +10,6 @@ path = "lib.rs" doctest = false [dependencies] -arrayvec = "0.5.1" +arrayvec = { version = "0.5.1", default-features = false } rustc_serialize = { path = "../librustc_serialize" } rustc_macros = { path = "../librustc_macros" } diff --git a/src/librustc_infer/Cargo.toml b/src/librustc_infer/Cargo.toml index e1698d66323c3..a0fcc881e4b84 100644 --- a/src/librustc_infer/Cargo.toml +++ b/src/librustc_infer/Cargo.toml @@ -23,4 +23,5 @@ rustc_serialize = { path = "../librustc_serialize" } rustc_span = { path = "../librustc_span" } rustc_target = { path = "../librustc_target" } smallvec = { version = "1.0", features = ["union", "may_dangle"] } +arrayvec = { version = "0.5.1", default-features = false } rustc_ast = { path = "../librustc_ast" } diff --git a/src/librustc_infer/infer/combine.rs b/src/librustc_infer/infer/combine.rs index 3a54a6475301e..c89d8ced50db7 100644 --- a/src/librustc_infer/infer/combine.rs +++ b/src/librustc_infer/infer/combine.rs @@ -35,6 +35,7 @@ use super::{InferCtxt, MiscVariable, TypeTrace}; use crate::traits::{Obligation, PredicateObligations}; use rustc_ast as ast; +use rustc_data_structures::mini_map::MiniMap; use rustc_hir::def_id::DefId; use rustc_middle::traits::ObligationCause; use rustc_middle::ty::error::TypeError; @@ -379,6 +380,7 @@ impl<'infcx, 'tcx> CombineFields<'infcx, 'tcx> { needs_wf: false, root_ty: ty, param_env: self.param_env, + cache: MiniMap::new(), }; let ty = match generalize.relate(ty, ty) { @@ -438,6 +440,8 @@ struct Generalizer<'cx, 'tcx> { root_ty: Ty<'tcx>, param_env: ty::ParamEnv<'tcx>, + + cache: MiniMap, RelateResult<'tcx, Ty<'tcx>>>, } /// Result from a generalization operation. This includes @@ -535,13 +539,16 @@ impl TypeRelation<'tcx> for Generalizer<'_, 'tcx> { fn tys(&mut self, t: Ty<'tcx>, t2: Ty<'tcx>) -> RelateResult<'tcx, Ty<'tcx>> { assert_eq!(t, t2); // we are abusing TypeRelation here; both LHS and RHS ought to be == + if let Some(result) = self.cache.get(&t) { + return result.clone(); + } debug!("generalize: t={:?}", t); // Check to see whether the type we are generalizing references // any other type variable related to `vid` via // subtyping. This is basically our "occurs check", preventing // us from creating infinitely sized types. - match t.kind { + let result = match t.kind { ty::Infer(ty::TyVar(vid)) => { let vid = self.infcx.inner.borrow_mut().type_variables().root_var(vid); let sub_vid = self.infcx.inner.borrow_mut().type_variables().sub_root_var(vid); @@ -598,7 +605,10 @@ impl TypeRelation<'tcx> for Generalizer<'_, 'tcx> { Ok(t) } _ => relate::super_relate_tys(self, t, t), - } + }; + + self.cache.insert(t, result.clone()); + return result; } fn regions( diff --git a/src/librustc_infer/infer/outlives/verify.rs b/src/librustc_infer/infer/outlives/verify.rs index 8f20b5743df4f..5a8368700b7c4 100644 --- a/src/librustc_infer/infer/outlives/verify.rs +++ b/src/librustc_infer/infer/outlives/verify.rs @@ -3,6 +3,7 @@ use crate::infer::{GenericKind, VerifyBound}; use rustc_data_structures::captures::Captures; use rustc_hir::def_id::DefId; use rustc_middle::ty::subst::{GenericArg, GenericArgKind, Subst}; +use rustc_middle::ty::walk::MiniSet; use rustc_middle::ty::{self, Ty, TyCtxt}; /// The `TypeOutlives` struct has the job of "lowering" a `T: 'a` @@ -31,16 +32,23 @@ impl<'cx, 'tcx> VerifyBoundCx<'cx, 'tcx> { /// Returns a "verify bound" that encodes what we know about /// `generic` and the regions it outlives. pub fn generic_bound(&self, generic: GenericKind<'tcx>) -> VerifyBound<'tcx> { + let mut visited = MiniSet::new(); match generic { GenericKind::Param(param_ty) => self.param_bound(param_ty), - GenericKind::Projection(projection_ty) => self.projection_bound(projection_ty), + GenericKind::Projection(projection_ty) => { + self.projection_bound(projection_ty, &mut visited) + } } } - fn type_bound(&self, ty: Ty<'tcx>) -> VerifyBound<'tcx> { + fn type_bound( + &self, + ty: Ty<'tcx>, + visited: &mut MiniSet>, + ) -> VerifyBound<'tcx> { match ty.kind { ty::Param(p) => self.param_bound(p), - ty::Projection(data) => self.projection_bound(data), + ty::Projection(data) => self.projection_bound(data, visited), ty::FnDef(_, substs) => { // HACK(eddyb) ignore lifetimes found shallowly in `substs`. // This is inconsistent with `ty::Adt` (including all substs), @@ -50,9 +58,9 @@ impl<'cx, 'tcx> VerifyBoundCx<'cx, 'tcx> { let mut bounds = substs .iter() .filter_map(|child| match child.unpack() { - GenericArgKind::Type(ty) => Some(self.type_bound(ty)), + GenericArgKind::Type(ty) => Some(self.type_bound(ty, visited)), GenericArgKind::Lifetime(_) => None, - GenericArgKind::Const(_) => Some(self.recursive_bound(child)), + GenericArgKind::Const(_) => Some(self.recursive_bound(child, visited)), }) .filter(|bound| { // Remove bounds that must hold, since they are not interesting. @@ -66,7 +74,7 @@ impl<'cx, 'tcx> VerifyBoundCx<'cx, 'tcx> { ), } } - _ => self.recursive_bound(ty.into()), + _ => self.recursive_bound(ty.into(), visited), } } @@ -137,7 +145,11 @@ impl<'cx, 'tcx> VerifyBoundCx<'cx, 'tcx> { self.declared_projection_bounds_from_trait(projection_ty) } - pub fn projection_bound(&self, projection_ty: ty::ProjectionTy<'tcx>) -> VerifyBound<'tcx> { + pub fn projection_bound( + &self, + projection_ty: ty::ProjectionTy<'tcx>, + visited: &mut MiniSet>, + ) -> VerifyBound<'tcx> { debug!("projection_bound(projection_ty={:?})", projection_ty); let projection_ty_as_ty = @@ -166,21 +178,25 @@ impl<'cx, 'tcx> VerifyBoundCx<'cx, 'tcx> { // see the extensive comment in projection_must_outlive let ty = self.tcx.mk_projection(projection_ty.item_def_id, projection_ty.substs); - let recursive_bound = self.recursive_bound(ty.into()); + let recursive_bound = self.recursive_bound(ty.into(), visited); VerifyBound::AnyBound(env_bounds.chain(trait_bounds).collect()).or(recursive_bound) } - fn recursive_bound(&self, parent: GenericArg<'tcx>) -> VerifyBound<'tcx> { + fn recursive_bound( + &self, + parent: GenericArg<'tcx>, + visited: &mut MiniSet>, + ) -> VerifyBound<'tcx> { let mut bounds = parent - .walk_shallow() + .walk_shallow(visited) .filter_map(|child| match child.unpack() { - GenericArgKind::Type(ty) => Some(self.type_bound(ty)), + GenericArgKind::Type(ty) => Some(self.type_bound(ty, visited)), GenericArgKind::Lifetime(lt) => { // Ignore late-bound regions. if !lt.is_late_bound() { Some(VerifyBound::OutlivedBy(lt)) } else { None } } - GenericArgKind::Const(_) => Some(self.recursive_bound(child)), + GenericArgKind::Const(_) => Some(self.recursive_bound(child, visited)), }) .filter(|bound| { // Remove bounds that must hold, since they are not interesting. diff --git a/src/librustc_interface/passes.rs b/src/librustc_interface/passes.rs index 403aea8b304eb..007dbfb9fdd58 100644 --- a/src/librustc_interface/passes.rs +++ b/src/librustc_interface/passes.rs @@ -30,6 +30,7 @@ use rustc_passes::{self, hir_stats, layout_test}; use rustc_plugin_impl as plugin; use rustc_resolve::{Resolver, ResolverArenas}; use rustc_session::config::{CrateType, Input, OutputFilenames, OutputType, PpMode, PpSourceMode}; +use rustc_session::lint; use rustc_session::output::{filename_for_input, filename_for_metadata}; use rustc_session::search_paths::PathKind; use rustc_session::Session; @@ -306,11 +307,27 @@ fn configure_and_expand_inner<'a>( ecx.check_unused_macros(); }); + let mut missing_fragment_specifiers: Vec<_> = ecx + .sess + .parse_sess + .missing_fragment_specifiers + .borrow() + .iter() + .map(|(span, node_id)| (*span, *node_id)) + .collect(); + missing_fragment_specifiers.sort_unstable_by_key(|(span, _)| *span); + + let recursion_limit_hit = ecx.reduced_recursion_limit.is_some(); + + for (span, node_id) in missing_fragment_specifiers { + let lint = lint::builtin::MISSING_FRAGMENT_SPECIFIER; + let msg = "missing fragment specifier"; + resolver.lint_buffer().buffer_lint(lint, node_id, span, msg); + } if cfg!(windows) { env::set_var("PATH", &old_path); } - let recursion_limit_hit = ecx.reduced_recursion_limit.is_some(); if recursion_limit_hit { // If we hit a recursion limit, exit early to avoid later passes getting overwhelmed // with a large AST diff --git a/src/librustc_middle/Cargo.toml b/src/librustc_middle/Cargo.toml index 311126361bc5b..ae09420d4a303 100644 --- a/src/librustc_middle/Cargo.toml +++ b/src/librustc_middle/Cargo.toml @@ -31,5 +31,6 @@ rustc_span = { path = "../librustc_span" } byteorder = { version = "1.3" } chalk-ir = "0.14.0" smallvec = { version = "1.0", features = ["union", "may_dangle"] } +arrayvec = { version = "0.5.1", default-features = false } measureme = "0.7.1" rustc_session = { path = "../librustc_session" } diff --git a/src/librustc_middle/ty/outlives.rs b/src/librustc_middle/ty/outlives.rs index 1a8693b8df711..07a0bcc0c4cf9 100644 --- a/src/librustc_middle/ty/outlives.rs +++ b/src/librustc_middle/ty/outlives.rs @@ -3,6 +3,7 @@ // RFC for reference. use crate::ty::subst::{GenericArg, GenericArgKind}; +use crate::ty::walk::MiniSet; use crate::ty::{self, Ty, TyCtxt, TypeFoldable}; use smallvec::SmallVec; @@ -50,12 +51,18 @@ impl<'tcx> TyCtxt<'tcx> { /// Push onto `out` all the things that must outlive `'a` for the condition /// `ty0: 'a` to hold. Note that `ty0` must be a **fully resolved type**. pub fn push_outlives_components(self, ty0: Ty<'tcx>, out: &mut SmallVec<[Component<'tcx>; 4]>) { - compute_components(self, ty0, out); + let mut visited = MiniSet::new(); + compute_components(self, ty0, out, &mut visited); debug!("components({:?}) = {:?}", ty0, out); } } -fn compute_components(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>, out: &mut SmallVec<[Component<'tcx>; 4]>) { +fn compute_components( + tcx: TyCtxt<'tcx>, + ty: Ty<'tcx>, + out: &mut SmallVec<[Component<'tcx>; 4]>, + visited: &mut MiniSet>, +) { // Descend through the types, looking for the various "base" // components and collecting them into `out`. This is not written // with `collect()` because of the need to sometimes skip subtrees @@ -73,11 +80,11 @@ fn compute_components(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>, out: &mut SmallVec<[Compo for child in substs { match child.unpack() { GenericArgKind::Type(ty) => { - compute_components(tcx, ty, out); + compute_components(tcx, ty, out, visited); } GenericArgKind::Lifetime(_) => {} GenericArgKind::Const(_) => { - compute_components_recursive(tcx, child, out); + compute_components_recursive(tcx, child, out, visited); } } } @@ -85,19 +92,19 @@ fn compute_components(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>, out: &mut SmallVec<[Compo ty::Array(element, _) => { // Don't look into the len const as it doesn't affect regions - compute_components(tcx, element, out); + compute_components(tcx, element, out, visited); } ty::Closure(_, ref substs) => { for upvar_ty in substs.as_closure().upvar_tys() { - compute_components(tcx, upvar_ty, out); + compute_components(tcx, upvar_ty, out, visited); } } ty::Generator(_, ref substs, _) => { // Same as the closure case for upvar_ty in substs.as_generator().upvar_tys() { - compute_components(tcx, upvar_ty, out); + compute_components(tcx, upvar_ty, out, visited); } // We ignore regions in the generator interior as we don't @@ -135,7 +142,8 @@ fn compute_components(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>, out: &mut SmallVec<[Compo // OutlivesProjectionComponents. Continue walking // through and constrain Pi. let mut subcomponents = smallvec![]; - compute_components_recursive(tcx, ty.into(), &mut subcomponents); + let mut subvisited = MiniSet::new(); + compute_components_recursive(tcx, ty.into(), &mut subcomponents, &mut subvisited); out.push(Component::EscapingProjection(subcomponents.into_iter().collect())); } } @@ -177,7 +185,7 @@ fn compute_components(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>, out: &mut SmallVec<[Compo // the "bound regions list". In our representation, no such // list is maintained explicitly, because bound regions // themselves can be readily identified. - compute_components_recursive(tcx, ty.into(), out); + compute_components_recursive(tcx, ty.into(), out, visited); } } } @@ -186,11 +194,12 @@ fn compute_components_recursive( tcx: TyCtxt<'tcx>, parent: GenericArg<'tcx>, out: &mut SmallVec<[Component<'tcx>; 4]>, + visited: &mut MiniSet>, ) { - for child in parent.walk_shallow() { + for child in parent.walk_shallow(visited) { match child.unpack() { GenericArgKind::Type(ty) => { - compute_components(tcx, ty, out); + compute_components(tcx, ty, out, visited); } GenericArgKind::Lifetime(lt) => { // Ignore late-bound regions. @@ -199,7 +208,7 @@ fn compute_components_recursive( } } GenericArgKind::Const(_) => { - compute_components_recursive(tcx, child, out); + compute_components_recursive(tcx, child, out, visited); } } } diff --git a/src/librustc_middle/ty/print/mod.rs b/src/librustc_middle/ty/print/mod.rs index 6c8f23c139f6e..981e013683ba3 100644 --- a/src/librustc_middle/ty/print/mod.rs +++ b/src/librustc_middle/ty/print/mod.rs @@ -4,6 +4,7 @@ use crate::ty::{self, DefIdTree, Ty, TyCtxt}; use rustc_data_structures::fx::FxHashSet; use rustc_hir::def_id::{CrateNum, DefId}; use rustc_hir::definitions::{DefPathData, DisambiguatedDefPathData}; +use rustc_middle::ty::walk::MiniSet; // `pretty` is a separate module only for organization. mod pretty; @@ -265,21 +266,33 @@ pub trait Printer<'tcx>: Sized { /// function tries to find a "characteristic `DefId`" for a /// type. It's just a heuristic so it makes some questionable /// decisions and we may want to adjust it later. -pub fn characteristic_def_id_of_type(ty: Ty<'_>) -> Option { +/// +/// Visited set is needed to avoid full iteration over +/// deeply nested tuples that have no DefId. +fn characteristic_def_id_of_type_cached<'a>( + ty: Ty<'a>, + visited: &mut MiniSet>, +) -> Option { match ty.kind { ty::Adt(adt_def, _) => Some(adt_def.did), ty::Dynamic(data, ..) => data.principal_def_id(), - ty::Array(subty, _) | ty::Slice(subty) => characteristic_def_id_of_type(subty), + ty::Array(subty, _) | ty::Slice(subty) => { + characteristic_def_id_of_type_cached(subty, visited) + } - ty::RawPtr(mt) => characteristic_def_id_of_type(mt.ty), + ty::RawPtr(mt) => characteristic_def_id_of_type_cached(mt.ty, visited), - ty::Ref(_, ty, _) => characteristic_def_id_of_type(ty), + ty::Ref(_, ty, _) => characteristic_def_id_of_type_cached(ty, visited), - ty::Tuple(ref tys) => { - tys.iter().find_map(|ty| characteristic_def_id_of_type(ty.expect_ty())) - } + ty::Tuple(ref tys) => tys.iter().find_map(|ty| { + let ty = ty.expect_ty(); + if visited.insert(ty) { + return characteristic_def_id_of_type_cached(ty, visited); + } + return None; + }), ty::FnDef(def_id, _) | ty::Closure(def_id, _) @@ -304,6 +317,9 @@ pub fn characteristic_def_id_of_type(ty: Ty<'_>) -> Option { | ty::Float(_) => None, } } +pub fn characteristic_def_id_of_type(ty: Ty<'_>) -> Option { + characteristic_def_id_of_type_cached(ty, &mut MiniSet::new()) +} impl<'tcx, P: Printer<'tcx>> Print<'tcx, P> for ty::RegionKind { type Output = P::Region; diff --git a/src/librustc_middle/ty/print/pretty.rs b/src/librustc_middle/ty/print/pretty.rs index 999a1d52a26a2..a29e0b0000130 100644 --- a/src/librustc_middle/ty/print/pretty.rs +++ b/src/librustc_middle/ty/print/pretty.rs @@ -1226,6 +1226,7 @@ pub struct FmtPrinterData<'a, 'tcx, F> { used_region_names: FxHashSet, region_index: usize, binder_depth: usize, + printed_type_count: usize, pub region_highlight_mode: RegionHighlightMode, @@ -1256,6 +1257,7 @@ impl FmtPrinter<'a, 'tcx, F> { used_region_names: Default::default(), region_index: 0, binder_depth: 0, + printed_type_count: 0, region_highlight_mode: RegionHighlightMode::default(), name_resolver: None, })) @@ -1368,8 +1370,14 @@ impl Printer<'tcx> for FmtPrinter<'_, 'tcx, F> { self.pretty_print_region(region) } - fn print_type(self, ty: Ty<'tcx>) -> Result { - self.pretty_print_type(ty) + fn print_type(mut self, ty: Ty<'tcx>) -> Result { + if self.tcx.sess.type_length_limit().value_within_limit(self.printed_type_count) { + self.printed_type_count += 1; + self.pretty_print_type(ty) + } else { + write!(self, "...")?; + Ok(self) + } } fn print_dyn_existential( diff --git a/src/librustc_middle/ty/walk.rs b/src/librustc_middle/ty/walk.rs index 82c649b8f543b..024f655eb6522 100644 --- a/src/librustc_middle/ty/walk.rs +++ b/src/librustc_middle/ty/walk.rs @@ -3,7 +3,50 @@ use crate::ty; use crate::ty::subst::{GenericArg, GenericArgKind}; +use arrayvec::ArrayVec; +use rustc_data_structures::fx::FxHashSet; use smallvec::{self, SmallVec}; +use std::hash::Hash; + +/// Small-storage-optimized implementation of a set +/// made specifically for walking type tree. +/// +/// Stores elements in a small array up to a certain length +/// and switches to `HashSet` when that length is exceeded. +pub enum MiniSet { + Array(ArrayVec<[T; 8]>), + Set(FxHashSet), +} + +impl MiniSet { + /// Creates an empty `MiniSet`. + pub fn new() -> Self { + MiniSet::Array(ArrayVec::new()) + } + + /// Adds a value to the set. + /// + /// If the set did not have this value present, true is returned. + /// + /// If the set did have this value present, false is returned. + pub fn insert(&mut self, elem: T) -> bool { + match self { + MiniSet::Array(array) => { + if array.iter().any(|e| *e == elem) { + false + } else { + if array.try_push(elem).is_err() { + let mut set: FxHashSet = array.iter().copied().collect(); + set.insert(elem); + *self = MiniSet::Set(set); + } + true + } + } + MiniSet::Set(set) => set.insert(elem), + } + } +} // The TypeWalker's stack is hot enough that it's worth going to some effort to // avoid heap allocations. @@ -12,11 +55,20 @@ type TypeWalkerStack<'tcx> = SmallVec<[GenericArg<'tcx>; 8]>; pub struct TypeWalker<'tcx> { stack: TypeWalkerStack<'tcx>, last_subtree: usize, + visited: MiniSet>, } +/// An iterator for walking the type tree. +/// +/// It's very easy to produce a deeply +/// nested type tree with a lot of +/// identical subtrees. In order to work efficiently +/// in this situation walker only visits each type once. +/// It maintains a set of visited types and +/// skips any types that are already there. impl<'tcx> TypeWalker<'tcx> { - pub fn new(root: GenericArg<'tcx>) -> TypeWalker<'tcx> { - TypeWalker { stack: smallvec![root], last_subtree: 1 } + pub fn new(root: GenericArg<'tcx>) -> Self { + Self { stack: smallvec![root], last_subtree: 1, visited: MiniSet::new() } } /// Skips the subtree corresponding to the last type @@ -41,11 +93,15 @@ impl<'tcx> Iterator for TypeWalker<'tcx> { fn next(&mut self) -> Option> { debug!("next(): stack={:?}", self.stack); - let next = self.stack.pop()?; - self.last_subtree = self.stack.len(); - push_inner(&mut self.stack, next); - debug!("next: stack={:?}", self.stack); - Some(next) + loop { + let next = self.stack.pop()?; + self.last_subtree = self.stack.len(); + if self.visited.insert(next) { + push_inner(&mut self.stack, next); + debug!("next: stack={:?}", self.stack); + return Some(next); + } + } } } @@ -67,9 +123,17 @@ impl GenericArg<'tcx> { /// Iterator that walks the immediate children of `self`. Hence /// `Foo, u32>` yields the sequence `[Bar, u32]` /// (but not `i32`, like `walk`). - pub fn walk_shallow(self) -> impl Iterator> { + /// + /// Iterator only walks items once. + /// It accepts visited set, updates it with all visited types + /// and skips any types that are already there. + pub fn walk_shallow( + self, + visited: &mut MiniSet>, + ) -> impl Iterator> { let mut stack = SmallVec::new(); push_inner(&mut stack, self); + stack.retain(|a| visited.insert(*a)); stack.into_iter() } } diff --git a/src/librustc_mir/monomorphize/collector.rs b/src/librustc_mir/monomorphize/collector.rs index d379f4ef428a6..1afa720f69e77 100644 --- a/src/librustc_mir/monomorphize/collector.rs +++ b/src/librustc_mir/monomorphize/collector.rs @@ -419,6 +419,29 @@ fn record_accesses<'a, 'tcx: 'a>( inlining_map.lock_mut().record_accesses(caller, &accesses); } +// Shrinks string by keeping prefix and suffix of given sizes. +fn shrink(s: String, before: usize, after: usize) -> String { + // An iterator of all byte positions including the end of the string. + let positions = || s.char_indices().map(|(i, _)| i).chain(iter::once(s.len())); + + let shrunk = format!( + "{before}...{after}", + before = &s[..positions().nth(before).unwrap_or(s.len())], + after = &s[positions().rev().nth(after).unwrap_or(0)..], + ); + + // Only use the shrunk version if it's really shorter. + // This also avoids the case where before and after slices overlap. + if shrunk.len() < s.len() { shrunk } else { s } +} + +// Format instance name that is already known to be too long for rustc. +// Show only the first and last 32 characters to avoid blasting +// the user's terminal with thousands of lines of type-name. +fn shrunk_instance_name(instance: &Instance<'tcx>) -> String { + shrink(instance.to_string(), 32, 32) +} + fn check_recursion_limit<'tcx>( tcx: TyCtxt<'tcx>, instance: Instance<'tcx>, @@ -441,7 +464,10 @@ fn check_recursion_limit<'tcx>( // more than the recursion limit is assumed to be causing an // infinite expansion. if !tcx.sess.recursion_limit().value_within_limit(adjusted_recursion_depth) { - let error = format!("reached the recursion limit while instantiating `{}`", instance); + let error = format!( + "reached the recursion limit while instantiating `{}`", + shrunk_instance_name(&instance), + ); let mut err = tcx.sess.struct_span_fatal(span, &error); err.span_note( tcx.def_span(def_id), @@ -475,26 +501,9 @@ fn check_type_length_limit<'tcx>(tcx: TyCtxt<'tcx>, instance: Instance<'tcx>) { // // Bail out in these cases to avoid that bad user experience. if !tcx.sess.type_length_limit().value_within_limit(type_length) { - // The instance name is already known to be too long for rustc. - // Show only the first and last 32 characters to avoid blasting - // the user's terminal with thousands of lines of type-name. - let shrink = |s: String, before: usize, after: usize| { - // An iterator of all byte positions including the end of the string. - let positions = || s.char_indices().map(|(i, _)| i).chain(iter::once(s.len())); - - let shrunk = format!( - "{before}...{after}", - before = &s[..positions().nth(before).unwrap_or(s.len())], - after = &s[positions().rev().nth(after).unwrap_or(0)..], - ); - - // Only use the shrunk version if it's really shorter. - // This also avoids the case where before and after slices overlap. - if shrunk.len() < s.len() { shrunk } else { s } - }; let msg = format!( "reached the type-length limit while instantiating `{}`", - shrink(instance.to_string(), 32, 32) + shrunk_instance_name(&instance), ); let mut diag = tcx.sess.struct_span_fatal(tcx.def_span(instance.def_id()), &msg); diag.note(&format!( diff --git a/src/librustc_mir/transform/simplify_try.rs b/src/librustc_mir/transform/simplify_try.rs index 06829cc2f14d5..9a80f0818c373 100644 --- a/src/librustc_mir/transform/simplify_try.rs +++ b/src/librustc_mir/transform/simplify_try.rs @@ -613,10 +613,11 @@ impl<'a, 'tcx> SimplifyBranchSameOptimizationFinder<'a, 'tcx> { // All successor basic blocks must be equal or contain statements that are pairwise considered equal. for ((bb_l_idx,bb_l), (bb_r_idx,bb_r)) in iter_bbs_reachable.tuple_windows() { let trivial_checks = bb_l.is_cleanup == bb_r.is_cleanup - && bb_l.terminator().kind == bb_r.terminator().kind; + && bb_l.terminator().kind == bb_r.terminator().kind + && bb_l.statements.len() == bb_r.statements.len(); let statement_check = || { bb_l.statements.iter().zip(&bb_r.statements).try_fold(StatementEquality::TrivialEqual, |acc,(l,r)| { - let stmt_equality = self.statement_equality(*adt_matched_on, &l, bb_l_idx, &r, bb_r_idx); + let stmt_equality = self.statement_equality(*adt_matched_on, &l, bb_l_idx, &r, bb_r_idx, self.tcx.sess.opts.debugging_opts.mir_opt_level); if matches!(stmt_equality, StatementEquality::NotEqual) { // short circuit None @@ -676,6 +677,7 @@ impl<'a, 'tcx> SimplifyBranchSameOptimizationFinder<'a, 'tcx> { x_bb_idx: BasicBlock, y: &Statement<'tcx>, y_bb_idx: BasicBlock, + mir_opt_level: usize, ) -> StatementEquality { let helper = |rhs: &Rvalue<'tcx>, place: &Box>, @@ -694,7 +696,13 @@ impl<'a, 'tcx> SimplifyBranchSameOptimizationFinder<'a, 'tcx> { match rhs { Rvalue::Use(operand) if operand.place() == Some(adt_matched_on) => { - StatementEquality::ConsideredEqual(side_to_choose) + // FIXME(76803): This logic is currently broken because it does not take into + // account the current discriminant value. + if mir_opt_level > 2 { + StatementEquality::ConsideredEqual(side_to_choose) + } else { + StatementEquality::NotEqual + } } _ => { trace!( diff --git a/src/librustc_parse/parser/mod.rs b/src/librustc_parse/parser/mod.rs index d67ed74bc9976..8803e3add46a8 100644 --- a/src/librustc_parse/parser/mod.rs +++ b/src/librustc_parse/parser/mod.rs @@ -694,9 +694,13 @@ impl<'a> Parser<'a> { Ok(t) => { // Parsed successfully, therefore most probably the code only // misses a separator. + let mut exp_span = self.sess.source_map().next_point(sp); + if self.sess.source_map().is_multiline(exp_span) { + exp_span = sp; + } expect_err .span_suggestion_short( - self.sess.source_map().next_point(sp), + exp_span, &format!("missing `{}`", token_str), token_str, Applicability::MaybeIncorrect, diff --git a/src/librustc_resolve/late/diagnostics.rs b/src/librustc_resolve/late/diagnostics.rs index 2549aee52ad4b..d392967af3856 100644 --- a/src/librustc_resolve/late/diagnostics.rs +++ b/src/librustc_resolve/late/diagnostics.rs @@ -1222,6 +1222,9 @@ impl<'tcx> LifetimeContext<'_, 'tcx> { synthetic: Some(hir::SyntheticTyParamKind::ImplTrait), .. } => false, + hir::GenericParamKind::Lifetime { + kind: hir::LifetimeParamKind::Elided, + } => false, _ => true, }) { (param.span.shrink_to_lo(), format!("{}, ", lifetime_ref)) diff --git a/src/librustc_session/lint/builtin.rs b/src/librustc_session/lint/builtin.rs index 2db4d2a7f51d9..144a06a4916bd 100644 --- a/src/librustc_session/lint/builtin.rs +++ b/src/librustc_session/lint/builtin.rs @@ -252,6 +252,16 @@ declare_lint! { }; } +declare_lint! { + pub MISSING_FRAGMENT_SPECIFIER, + Deny, + "detects missing fragment specifiers in unused `macro_rules!` patterns", + @future_incompatible = FutureIncompatibleInfo { + reference: "issue #40107 ", + edition: None, + }; +} + declare_lint! { pub LATE_BOUND_LIFETIME_ARGUMENTS, Warn, @@ -574,6 +584,7 @@ declare_lint_pass! { UNALIGNED_REFERENCES, SAFE_PACKED_BORROWS, PATTERNS_IN_FNS_WITHOUT_BODY, + MISSING_FRAGMENT_SPECIFIER, LATE_BOUND_LIFETIME_ARGUMENTS, ORDER_DEPENDENT_TRAIT_OBJECTS, COHERENCE_LEAK_CHECK, diff --git a/src/librustc_session/parse.rs b/src/librustc_session/parse.rs index 6f10d0c4b89ea..a2bb8c4f91ff4 100644 --- a/src/librustc_session/parse.rs +++ b/src/librustc_session/parse.rs @@ -119,6 +119,7 @@ pub struct ParseSess { pub unstable_features: UnstableFeatures, pub config: CrateConfig, pub edition: Edition, + pub missing_fragment_specifiers: Lock>, /// Places where raw identifiers were used. This is used for feature-gating raw identifiers. pub raw_identifier_spans: Lock>, /// Used to determine and report recursive module inclusions. @@ -153,6 +154,7 @@ impl ParseSess { unstable_features: UnstableFeatures::from_environment(), config: FxHashSet::default(), edition: ExpnId::root().expn_data().edition, + missing_fragment_specifiers: Default::default(), raw_identifier_spans: Lock::new(Vec::new()), included_mod_stack: Lock::new(vec![]), source_map, diff --git a/src/librustc_trait_selection/traits/query/normalize.rs b/src/librustc_trait_selection/traits/query/normalize.rs index 93652329305a5..17963a6c8290f 100644 --- a/src/librustc_trait_selection/traits/query/normalize.rs +++ b/src/librustc_trait_selection/traits/query/normalize.rs @@ -7,6 +7,7 @@ use crate::infer::canonical::OriginalQueryValues; use crate::infer::{InferCtxt, InferOk}; use crate::traits::error_reporting::InferCtxtExt; use crate::traits::{Obligation, ObligationCause, PredicateObligation, Reveal}; +use rustc_data_structures::mini_map::MiniMap; use rustc_data_structures::stack::ensure_sufficient_stack; use rustc_infer::traits::Normalized; use rustc_middle::ty::fold::{TypeFoldable, TypeFolder}; @@ -57,6 +58,7 @@ impl<'cx, 'tcx> AtExt<'tcx> for At<'cx, 'tcx> { param_env: self.param_env, obligations: vec![], error: false, + cache: MiniMap::new(), anon_depth: 0, }; @@ -85,6 +87,7 @@ struct QueryNormalizer<'cx, 'tcx> { cause: &'cx ObligationCause<'tcx>, param_env: ty::ParamEnv<'tcx>, obligations: Vec>, + cache: MiniMap, Ty<'tcx>>, error: bool, anon_depth: usize, } @@ -99,8 +102,12 @@ impl<'cx, 'tcx> TypeFolder<'tcx> for QueryNormalizer<'cx, 'tcx> { return ty; } + if let Some(ty) = self.cache.get(&ty) { + return ty; + } + let ty = ty.super_fold_with(self); - match ty.kind { + let res = (|| match ty.kind { ty::Opaque(def_id, substs) => { // Only normalize `impl Trait` after type-checking, usually in codegen. match self.param_env.reveal() { @@ -197,7 +204,9 @@ impl<'cx, 'tcx> TypeFolder<'tcx> for QueryNormalizer<'cx, 'tcx> { } _ => ty, - } + })(); + self.cache.insert(ty, res); + res } fn fold_const(&mut self, constant: &'tcx ty::Const<'tcx>) -> &'tcx ty::Const<'tcx> { diff --git a/src/librustdoc/clean/inline.rs b/src/librustdoc/clean/inline.rs index 38fa8a402c483..f67b689bb0e12 100644 --- a/src/librustdoc/clean/inline.rs +++ b/src/librustdoc/clean/inline.rs @@ -337,18 +337,13 @@ pub fn build_impl( // reachable in rustdoc generated documentation if !did.is_local() { if let Some(traitref) = associated_trait { - if !cx.renderinfo.borrow().access_levels.is_public(traitref.def_id) { + let did = traitref.def_id; + if !cx.renderinfo.borrow().access_levels.is_public(did) { return; } - } - // Skip foreign unstable traits from lists of trait implementations and - // such. This helps prevent dependencies of the standard library, for - // example, from getting documented as "traits `u32` implements" which - // isn't really too helpful. - if let Some(trait_did) = associated_trait { - if let Some(stab) = cx.tcx.lookup_stability(trait_did.def_id) { - if stab.level.is_unstable() { + if let Some(stab) = tcx.lookup_stability(did) { + if stab.level.is_unstable() && stab.feature == sym::rustc_private { return; } } @@ -372,6 +367,12 @@ pub fn build_impl( if !cx.renderinfo.borrow().access_levels.is_public(did) { return; } + + if let Some(stab) = tcx.lookup_stability(did) { + if stab.level.is_unstable() && stab.feature == sym::rustc_private { + return; + } + } } } diff --git a/src/librustdoc/passes/doc_test_lints.rs b/src/librustdoc/passes/doc_test_lints.rs index a465a5f681f5c..367f93cfd3893 100644 --- a/src/librustdoc/passes/doc_test_lints.rs +++ b/src/librustdoc/passes/doc_test_lints.rs @@ -1,7 +1,7 @@ //! This pass is overloaded and runs two different lints. //! -//! - MISSING_DOC_CODE_EXAMPLES: this looks for public items missing doc-tests -//! - PRIVATE_DOC_TESTS: this looks for private items with doc-tests. +//! - MISSING_DOC_CODE_EXAMPLES: this lint is **UNSTABLE** and looks for public items missing doc-tests +//! - PRIVATE_DOC_TESTS: this lint is **STABLE** and looks for private items with doc-tests. use super::{span_of_attrs, Pass}; use crate::clean; @@ -89,7 +89,9 @@ pub fn look_for_tests<'tcx>(cx: &DocContext<'tcx>, dox: &str, item: &Item) { find_testable_code(&dox, &mut tests, ErrorCodes::No, false, None); - if tests.found_tests == 0 { + if tests.found_tests == 0 + && rustc_feature::UnstableFeatures::from_environment().is_nightly_build() + { if should_have_doc_example(&item.inner) { debug!("reporting error for {:?} (hir_id={:?})", item, hir_id); let sp = span_of_attrs(&item.attrs).unwrap_or(item.source.span()); @@ -100,9 +102,7 @@ pub fn look_for_tests<'tcx>(cx: &DocContext<'tcx>, dox: &str, item: &Item) { |lint| lint.build("missing code example in this documentation").emit(), ); } - } else if rustc_feature::UnstableFeatures::from_environment().is_nightly_build() - && tests.found_tests > 0 - && !cx.renderinfo.borrow().access_levels.is_public(item.def_id) + } else if tests.found_tests > 0 && !cx.renderinfo.borrow().access_levels.is_public(item.def_id) { cx.tcx.struct_span_lint_hir( lint::builtin::PRIVATE_DOC_TESTS, diff --git a/src/llvm-project b/src/llvm-project index 45790d79496be..7075196da1aa3 160000 --- a/src/llvm-project +++ b/src/llvm-project @@ -1 +1 @@ -Subproject commit 45790d79496be37fbce6ec57abad5af8fa7a34d7 +Subproject commit 7075196da1aa3527f7c87943607e25f3cf24997a diff --git a/src/stage0.txt b/src/stage0.txt index 4234ce4bac07d..67c19a3380c97 100644 --- a/src/stage0.txt +++ b/src/stage0.txt @@ -12,15 +12,15 @@ # source tarball for a stable release you'll likely see `1.x.0` for rustc and # `0.(x+1).0` for Cargo where they were released on `date`. -date: 2020-07-16 -rustc: beta -cargo: beta +date: 2020-08-27 +rustc: 1.46.0 +cargo: 0.47.0 # We use a nightly rustfmt to format the source because it solves some # bootstrapping issues with use of new syntax in this repo. If you're looking at # the beta/stable branch, this key should be omitted, as we don't want to depend # on rustfmt from nightly there. -rustfmt: nightly-2020-07-12 +#rustfmt: nightly-2020-07-12 # When making a stable release the process currently looks like: # diff --git a/src/test/mir-opt/simplify_arm.id.SimplifyBranchSame.diff b/src/test/mir-opt/simplify_arm.id.SimplifyBranchSame.diff index eb1d6f656f497..fb75eb5603a92 100644 --- a/src/test/mir-opt/simplify_arm.id.SimplifyBranchSame.diff +++ b/src/test/mir-opt/simplify_arm.id.SimplifyBranchSame.diff @@ -13,27 +13,24 @@ bb0: { _2 = discriminant(_1); // scope 0 at $DIR/simplify-arm.rs:11:9: 11:16 -- switchInt(move _2) -> [0_isize: bb1, 1_isize: bb3, otherwise: bb2]; // scope 0 at $DIR/simplify-arm.rs:11:9: 11:16 -+ goto -> bb1; // scope 0 at $DIR/simplify-arm.rs:11:9: 11:16 + switchInt(move _2) -> [0_isize: bb1, 1_isize: bb3, otherwise: bb2]; // scope 0 at $DIR/simplify-arm.rs:11:9: 11:16 } bb1: { -- discriminant(_0) = 0; // scope 0 at $DIR/simplify-arm.rs:12:17: 12:21 -- goto -> bb4; // scope 0 at $DIR/simplify-arm.rs:10:5: 13:6 -- } -- -- bb2: { -- unreachable; // scope 0 at $DIR/simplify-arm.rs:10:11: 10:12 -- } -- -- bb3: { + discriminant(_0) = 0; // scope 0 at $DIR/simplify-arm.rs:12:17: 12:21 + goto -> bb4; // scope 0 at $DIR/simplify-arm.rs:10:5: 13:6 + } + + bb2: { + unreachable; // scope 0 at $DIR/simplify-arm.rs:10:11: 10:12 + } + + bb3: { _0 = move _1; // scope 1 at $DIR/simplify-arm.rs:11:20: 11:27 -- goto -> bb4; // scope 0 at $DIR/simplify-arm.rs:10:5: 13:6 -+ goto -> bb2; // scope 0 at $DIR/simplify-arm.rs:10:5: 13:6 + goto -> bb4; // scope 0 at $DIR/simplify-arm.rs:10:5: 13:6 } -- bb4: { -+ bb2: { + bb4: { return; // scope 0 at $DIR/simplify-arm.rs:14:2: 14:2 } } diff --git a/src/test/rustdoc/auxiliary/real_gimli.rs b/src/test/rustdoc/auxiliary/real_gimli.rs new file mode 100644 index 0000000000000..80d5c4ba8bb09 --- /dev/null +++ b/src/test/rustdoc/auxiliary/real_gimli.rs @@ -0,0 +1,13 @@ +// aux-build:realcore.rs + +#![crate_name = "real_gimli"] +#![feature(staged_api, extremely_unstable)] +#![unstable(feature = "rustc_private", issue = "none")] + +extern crate realcore; + +#[unstable(feature = "rustc_private", issue = "none")] +pub struct EndianSlice; + +#[unstable(feature = "rustc_private", issue = "none")] +impl realcore::Deref for EndianSlice {} diff --git a/src/test/rustdoc/auxiliary/realcore.rs b/src/test/rustdoc/auxiliary/realcore.rs new file mode 100644 index 0000000000000..e0a906df002da --- /dev/null +++ b/src/test/rustdoc/auxiliary/realcore.rs @@ -0,0 +1,15 @@ +#![crate_name = "realcore"] +#![feature(staged_api)] +#![unstable(feature = "extremely_unstable", issue = "none")] + +#[unstable(feature = "extremely_unstable_foo", issue = "none")] +pub struct Foo {} + +#[unstable(feature = "extremely_unstable_foo", issue = "none")] +pub trait Join {} + +#[unstable(feature = "extremely_unstable_foo", issue = "none")] +impl Join for Foo {} + +#[stable(feature = "faked_deref", since = "1.47.0")] +pub trait Deref {} diff --git a/src/test/rustdoc/issue-75588.rs b/src/test/rustdoc/issue-75588.rs new file mode 100644 index 0000000000000..835ed02ac00db --- /dev/null +++ b/src/test/rustdoc/issue-75588.rs @@ -0,0 +1,18 @@ +// ignore-tidy-linelength +// aux-build:realcore.rs +// aux-build:real_gimli.rs + +// Ensure unstably exported traits have their Implementors sections. + +#![crate_name = "foo"] +#![feature(extremely_unstable_foo)] + +extern crate realcore; +extern crate real_gimli; + +// issue #74672 +// @!has foo/trait.Deref.html '//*[@id="impl-Deref-for-EndianSlice"]//code' 'impl Deref for EndianSlice' +pub use realcore::Deref; + +// @has foo/trait.Join.html '//*[@id="impl-Join-for-Foo"]//code' 'impl Join for Foo' +pub use realcore::Join; diff --git a/src/test/ui/closures/issue-72408-nested-closures-exponential.rs b/src/test/ui/closures/issue-72408-nested-closures-exponential.rs new file mode 100644 index 0000000000000..2d6ba936572d5 --- /dev/null +++ b/src/test/ui/closures/issue-72408-nested-closures-exponential.rs @@ -0,0 +1,59 @@ +// build-pass + +// Closures include captured types twice in a type tree. +// +// Wrapping one closure with another leads to doubling +// the amount of types in the type tree. +// +// This test ensures that rust can handle +// deeply nested type trees with a lot +// of duplicated subtrees. + +fn dup(f: impl Fn(i32) -> i32) -> impl Fn(i32) -> i32 { + move |a| f(a * 2) +} + +fn main() { + let f = |a| a; + + let f = dup(f); + let f = dup(f); + let f = dup(f); + let f = dup(f); + let f = dup(f); + + let f = dup(f); + let f = dup(f); + let f = dup(f); + let f = dup(f); + let f = dup(f); + + let f = dup(f); + let f = dup(f); + let f = dup(f); + let f = dup(f); + let f = dup(f); + + let f = dup(f); + let f = dup(f); + let f = dup(f); + let f = dup(f); + let f = dup(f); + + // Compiler dies around here if it tries + // to walk the tree exhaustively. + + let f = dup(f); + let f = dup(f); + let f = dup(f); + let f = dup(f); + let f = dup(f); + + let f = dup(f); + let f = dup(f); + let f = dup(f); + let f = dup(f); + let f = dup(f); + + println!("Type size was at least {}", f(1)); +} diff --git a/src/test/ui/consts/const-typeid-of-rpass.rs b/src/test/ui/consts/const-typeid-of-rpass.rs index c49141050b20f..89d57ae4f98e6 100644 --- a/src/test/ui/consts/const-typeid-of-rpass.rs +++ b/src/test/ui/consts/const-typeid-of-rpass.rs @@ -1,4 +1,5 @@ // run-pass +#![feature(const_type_id)] #![feature(core_intrinsics)] use std::any::TypeId; diff --git a/src/test/ui/consts/issue-73976-monomorphic.rs b/src/test/ui/consts/issue-73976-monomorphic.rs index 1db0fdc87c37e..7706a97f23b48 100644 --- a/src/test/ui/consts/issue-73976-monomorphic.rs +++ b/src/test/ui/consts/issue-73976-monomorphic.rs @@ -5,6 +5,7 @@ // will be properly rejected. This test will ensure that monomorphic use of these // would not be wrongly rejected in patterns. +#![feature(const_type_id)] #![feature(const_type_name)] use std::any::{self, TypeId}; diff --git a/src/test/ui/consts/issue-73976-polymorphic.rs b/src/test/ui/consts/issue-73976-polymorphic.rs index b3d8610ff5173..787462da9f960 100644 --- a/src/test/ui/consts/issue-73976-polymorphic.rs +++ b/src/test/ui/consts/issue-73976-polymorphic.rs @@ -5,6 +5,7 @@ // This test case should either run-pass or be rejected at compile time. // Currently we just disallow this usage and require pattern is monomorphic. +#![feature(const_type_id)] #![feature(const_type_name)] use std::any::{self, TypeId}; diff --git a/src/test/ui/consts/issue-73976-polymorphic.stderr b/src/test/ui/consts/issue-73976-polymorphic.stderr index 250f1536d85fc..442ad23f2cc42 100644 --- a/src/test/ui/consts/issue-73976-polymorphic.stderr +++ b/src/test/ui/consts/issue-73976-polymorphic.stderr @@ -1,23 +1,23 @@ error: constant pattern depends on a generic parameter - --> $DIR/issue-73976-polymorphic.rs:19:37 + --> $DIR/issue-73976-polymorphic.rs:20:37 | LL | matches!(GetTypeId::::VALUE, GetTypeId::::VALUE) | ^^^^^^^^^^^^^^^^^^^^^ error: constant pattern depends on a generic parameter - --> $DIR/issue-73976-polymorphic.rs:31:42 + --> $DIR/issue-73976-polymorphic.rs:32:42 | LL | matches!(GetTypeNameLen::::VALUE, GetTypeNameLen::::VALUE) | ^^^^^^^^^^^^^^^^^^^^^^^^^^ error: constant pattern depends on a generic parameter - --> $DIR/issue-73976-polymorphic.rs:19:37 + --> $DIR/issue-73976-polymorphic.rs:20:37 | LL | matches!(GetTypeId::::VALUE, GetTypeId::::VALUE) | ^^^^^^^^^^^^^^^^^^^^^ error: constant pattern depends on a generic parameter - --> $DIR/issue-73976-polymorphic.rs:31:42 + --> $DIR/issue-73976-polymorphic.rs:32:42 | LL | matches!(GetTypeNameLen::::VALUE, GetTypeNameLen::::VALUE) | ^^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/src/test/ui/infinite/infinite-instantiation.stderr b/src/test/ui/infinite/infinite-instantiation.stderr index eb07d8905d609..72683629694f3 100644 --- a/src/test/ui/infinite/infinite-instantiation.stderr +++ b/src/test/ui/infinite/infinite-instantiation.stderr @@ -1,4 +1,4 @@ -error: reached the recursion limit while instantiating `function::>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>` +error: reached the recursion limit while instantiating `function::>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>` --> $DIR/infinite-instantiation.rs:21:9 | LL | function(counter - 1, t.to_option()); diff --git a/src/test/ui/issue-76597.fixed b/src/test/ui/issue-76597.fixed new file mode 100644 index 0000000000000..2d7a30b8361ad --- /dev/null +++ b/src/test/ui/issue-76597.fixed @@ -0,0 +1,11 @@ +// run-rustfix + +#![allow(dead_code)] +#![allow(unused_variables)] +fn f( + x: u8, + y: u8, +) {} +//~^^ ERROR: expected one of `!`, `(`, `)`, `+`, `,`, `::`, or `<`, found `y` + +fn main() {} diff --git a/src/test/ui/issue-76597.rs b/src/test/ui/issue-76597.rs new file mode 100644 index 0000000000000..521b9c64b1c57 --- /dev/null +++ b/src/test/ui/issue-76597.rs @@ -0,0 +1,11 @@ +// run-rustfix + +#![allow(dead_code)] +#![allow(unused_variables)] +fn f( + x: u8 + y: u8, +) {} +//~^^ ERROR: expected one of `!`, `(`, `)`, `+`, `,`, `::`, or `<`, found `y` + +fn main() {} diff --git a/src/test/ui/issue-76597.stderr b/src/test/ui/issue-76597.stderr new file mode 100644 index 0000000000000..50b23329f0ceb --- /dev/null +++ b/src/test/ui/issue-76597.stderr @@ -0,0 +1,13 @@ +error: expected one of `!`, `(`, `)`, `+`, `,`, `::`, or `<`, found `y` + --> $DIR/issue-76597.rs:7:38 + | +LL | ... x: u8 + | - + | | + | expected one of 7 possible tokens + | help: missing `,` +LL | ... y: u8, + | ^ unexpected token + +error: aborting due to previous error + diff --git a/src/test/ui/issues/issue-22638.rs b/src/test/ui/issues/issue-22638.rs index 72c16fddb4b12..89137538425bf 100644 --- a/src/test/ui/issues/issue-22638.rs +++ b/src/test/ui/issues/issue-22638.rs @@ -51,9 +51,9 @@ struct D (Box); impl D { pub fn matches(&self, f: &F) { - //~^ ERROR reached the type-length limit while instantiating `D::matches::<[closure let &D(ref a) = self; a.matches(f) + //~^ ERROR reached the recursion limit while instantiating `A::matches::<[closure } } diff --git a/src/test/ui/issues/issue-22638.stderr b/src/test/ui/issues/issue-22638.stderr index b0df46b11fadb..c4255b95b704e 100644 --- a/src/test/ui/issues/issue-22638.stderr +++ b/src/test/ui/issues/issue-22638.stderr @@ -1,10 +1,14 @@ -error: reached the type-length limit while instantiating `D::matches::$CLOSURE` - --> $DIR/issue-22638.rs:53:5 +error: reached the recursion limit while instantiating `A::matches::$CLOSURE` + --> $DIR/issue-22638.rs:55:9 + | +LL | a.matches(f) + | ^^^^^^^^^^^^ + | +note: `A::matches` defined here + --> $DIR/issue-22638.rs:14:5 | LL | pub fn matches(&self, f: &F) { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - | - = note: consider adding a `#![type_length_limit="30408681"]` attribute to your crate error: aborting due to previous error diff --git a/src/test/ui/issues/issue-37311-type-length-limit/issue-37311.rs b/src/test/ui/issues/issue-37311-type-length-limit/issue-37311.rs index fec4b17153609..d3d5863ddb3c9 100644 --- a/src/test/ui/issues/issue-37311-type-length-limit/issue-37311.rs +++ b/src/test/ui/issues/issue-37311-type-length-limit/issue-37311.rs @@ -12,8 +12,8 @@ trait Foo { impl Foo for T { #[allow(unconditional_recursion)] - fn recurse(&self) { //~ ERROR reached the type-length limit - (self, self).recurse(); + fn recurse(&self) { + (self, self).recurse(); //~ ERROR reached the recursion limit } } diff --git a/src/test/ui/issues/issue-37311-type-length-limit/issue-37311.stderr b/src/test/ui/issues/issue-37311-type-length-limit/issue-37311.stderr index 6229d90d4b477..a94f190d6b25d 100644 --- a/src/test/ui/issues/issue-37311-type-length-limit/issue-37311.stderr +++ b/src/test/ui/issues/issue-37311-type-length-limit/issue-37311.stderr @@ -1,10 +1,14 @@ -error: reached the type-length limit while instantiating `<(&(&(&(&(&(&(&(&(&(&(&(&(&(&(&(...))))))))))))))) as Foo>::recurse` +error: reached the recursion limit while instantiating `<(&(&(&(&(&(&(&(&(&(&(&(&(&(&(&(.....), ...), ...) as Foo>::recurse` + --> $DIR/issue-37311.rs:16:9 + | +LL | (self, self).recurse(); + | ^^^^^^^^^^^^^^^^^^^^^^ + | +note: `::recurse` defined here --> $DIR/issue-37311.rs:15:5 | LL | fn recurse(&self) { | ^^^^^^^^^^^^^^^^^ - | - = note: consider adding a `#![type_length_limit="2097149"]` attribute to your crate error: aborting due to previous error diff --git a/src/test/ui/macros/issue-39404.rs b/src/test/ui/issues/issue-39404.rs similarity index 77% rename from src/test/ui/macros/issue-39404.rs rename to src/test/ui/issues/issue-39404.rs index 054958ba00b8d..2229f2c3900c3 100644 --- a/src/test/ui/macros/issue-39404.rs +++ b/src/test/ui/issues/issue-39404.rs @@ -2,5 +2,6 @@ macro_rules! m { ($i) => {} } //~^ ERROR missing fragment specifier +//~| WARN previously accepted fn main() {} diff --git a/src/test/ui/issues/issue-39404.stderr b/src/test/ui/issues/issue-39404.stderr new file mode 100644 index 0000000000000..d2f2a823c2a6b --- /dev/null +++ b/src/test/ui/issues/issue-39404.stderr @@ -0,0 +1,12 @@ +error: missing fragment specifier + --> $DIR/issue-39404.rs:3:19 + | +LL | macro_rules! m { ($i) => {} } + | ^^ + | + = note: `#[deny(missing_fragment_specifier)]` on by default + = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release! + = note: for more information, see issue #40107 + +error: aborting due to previous error + diff --git a/src/test/ui/issues/issue-67552.stderr b/src/test/ui/issues/issue-67552.stderr index 8243e52039d48..f3e73399b57ce 100644 --- a/src/test/ui/issues/issue-67552.stderr +++ b/src/test/ui/issues/issue-67552.stderr @@ -1,4 +1,4 @@ -error: reached the recursion limit while instantiating `rec::<&mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut &mut Empty>` +error: reached the recursion limit while instantiating `rec::<&mut &mut &mut &mut &mut &... &mut &mut &mut &mut &mut Empty>` --> $DIR/issue-67552.rs:27:9 | LL | rec(identity(&mut it)) diff --git a/src/test/ui/issues/issue-8727.stderr b/src/test/ui/issues/issue-8727.stderr index 59008151f1a5b..8d26c566d4193 100644 --- a/src/test/ui/issues/issue-8727.stderr +++ b/src/test/ui/issues/issue-8727.stderr @@ -9,7 +9,7 @@ LL | generic::>(); = note: `#[warn(unconditional_recursion)]` on by default = help: a `loop` may express intention better if this is on purpose -error: reached the recursion limit while instantiating `generic::>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>` +error: reached the recursion limit while instantiating `generic::>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>` --> $DIR/issue-8727.rs:7:5 | LL | generic::>(); diff --git a/src/test/ui/lint/expansion-time.rs b/src/test/ui/lint/expansion-time.rs index c98ecc980dd3d..6e420c51f0a7f 100644 --- a/src/test/ui/lint/expansion-time.rs +++ b/src/test/ui/lint/expansion-time.rs @@ -5,6 +5,10 @@ macro_rules! foo { ( $($i:ident)* ) => { $($i)+ }; //~ WARN meta-variable repeats with different Kleene operator } +#[warn(missing_fragment_specifier)] +macro_rules! m { ($i) => {} } //~ WARN missing fragment specifier + //~| WARN this was previously accepted + #[warn(soft_unstable)] mod benches { #[bench] //~ WARN use of unstable library feature 'test' diff --git a/src/test/ui/lint/expansion-time.stderr b/src/test/ui/lint/expansion-time.stderr index bc48d64e7e6b7..e6b5cf67e3904 100644 --- a/src/test/ui/lint/expansion-time.stderr +++ b/src/test/ui/lint/expansion-time.stderr @@ -12,14 +12,28 @@ note: the lint level is defined here LL | #[warn(meta_variable_misuse)] | ^^^^^^^^^^^^^^^^^^^^ +warning: missing fragment specifier + --> $DIR/expansion-time.rs:9:19 + | +LL | macro_rules! m { ($i) => {} } + | ^^ + | +note: the lint level is defined here + --> $DIR/expansion-time.rs:8:8 + | +LL | #[warn(missing_fragment_specifier)] + | ^^^^^^^^^^^^^^^^^^^^^^^^^^ + = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release! + = note: for more information, see issue #40107 + warning: use of unstable library feature 'test': `bench` is a part of custom test frameworks which are unstable - --> $DIR/expansion-time.rs:10:7 + --> $DIR/expansion-time.rs:14:7 | LL | #[bench] | ^^^^^ | note: the lint level is defined here - --> $DIR/expansion-time.rs:8:8 + --> $DIR/expansion-time.rs:12:8 | LL | #[warn(soft_unstable)] | ^^^^^^^^^^^^^ @@ -33,10 +47,10 @@ LL | 2 | ^ | note: the lint level is defined here - --> $DIR/expansion-time.rs:15:8 + --> $DIR/expansion-time.rs:19:8 | LL | #[warn(incomplete_include)] | ^^^^^^^^^^^^^^^^^^ -warning: 3 warnings emitted +warning: 4 warnings emitted diff --git a/src/test/ui/macros/issue-39404.stderr b/src/test/ui/macros/issue-39404.stderr deleted file mode 100644 index 645f06e59d817..0000000000000 --- a/src/test/ui/macros/issue-39404.stderr +++ /dev/null @@ -1,8 +0,0 @@ -error: missing fragment specifier - --> $DIR/issue-39404.rs:3:19 - | -LL | macro_rules! m { ($i) => {} } - | ^^ - -error: aborting due to previous error - diff --git a/src/test/ui/macros/macro-match-nonterminal.rs b/src/test/ui/macros/macro-match-nonterminal.rs index 6b023e4137274..b23e5c71c03f0 100644 --- a/src/test/ui/macros/macro-match-nonterminal.rs +++ b/src/test/ui/macros/macro-match-nonterminal.rs @@ -2,6 +2,7 @@ macro_rules! test { ($a, $b) => { //~^ ERROR missing fragment //~| ERROR missing fragment + //~| WARN this was previously accepted () }; } diff --git a/src/test/ui/macros/macro-match-nonterminal.stderr b/src/test/ui/macros/macro-match-nonterminal.stderr index 334d62812cdab..674ce3434aac6 100644 --- a/src/test/ui/macros/macro-match-nonterminal.stderr +++ b/src/test/ui/macros/macro-match-nonterminal.stderr @@ -9,6 +9,10 @@ error: missing fragment specifier | LL | ($a, $b) => { | ^^ + | + = note: `#[deny(missing_fragment_specifier)]` on by default + = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release! + = note: for more information, see issue #40107 error: aborting due to 2 previous errors diff --git a/src/test/ui/mir/issue-76803-branches-not-same.rs b/src/test/ui/mir/issue-76803-branches-not-same.rs new file mode 100644 index 0000000000000..a6a5762200548 --- /dev/null +++ b/src/test/ui/mir/issue-76803-branches-not-same.rs @@ -0,0 +1,19 @@ +// run-pass + +#[derive(Debug, Eq, PartialEq)] +pub enum Type { + A, + B, +} + + +pub fn encode(v: Type) -> Type { + match v { + Type::A => Type::B, + _ => v, + } +} + +fn main() { + assert_eq!(Type::B, encode(Type::A)); +} diff --git a/src/test/ui/mir/simplify-branch-same.rs b/src/test/ui/mir/simplify-branch-same.rs new file mode 100644 index 0000000000000..d631c33d61f84 --- /dev/null +++ b/src/test/ui/mir/simplify-branch-same.rs @@ -0,0 +1,21 @@ +// Regression test for SimplifyBranchSame miscompilation. +// run-pass + +macro_rules! m { + ($a:expr, $b:expr, $c:block) => { + match $a { + Lto::Fat | Lto::Thin => { $b; (); $c } + Lto::No => { $b; () } + } + } +} + +pub enum Lto { No, Thin, Fat } + +fn f(mut cookie: u32, lto: Lto) -> u32 { + let mut _a = false; + m!(lto, _a = true, {cookie = 0}); + cookie +} + +fn main() { assert_eq!(f(42, Lto::Thin), 0) } diff --git a/src/test/ui/parser/macro/issue-33569.rs b/src/test/ui/parser/macro/issue-33569.rs index cf81f0480a2a7..80e2d7c6545ba 100644 --- a/src/test/ui/parser/macro/issue-33569.rs +++ b/src/test/ui/parser/macro/issue-33569.rs @@ -2,7 +2,6 @@ macro_rules! foo { { $+ } => { //~ ERROR expected identifier, found `+` //~^ ERROR missing fragment specifier $(x)(y) //~ ERROR expected one of: `*`, `+`, or `?` - //~^ ERROR attempted to repeat an expression containing no syntax variables } } diff --git a/src/test/ui/parser/macro/issue-33569.stderr b/src/test/ui/parser/macro/issue-33569.stderr index f54efaa6996f2..b4d38d3ce4806 100644 --- a/src/test/ui/parser/macro/issue-33569.stderr +++ b/src/test/ui/parser/macro/issue-33569.stderr @@ -4,23 +4,17 @@ error: expected identifier, found `+` LL | { $+ } => { | ^ -error: missing fragment specifier - --> $DIR/issue-33569.rs:2:8 - | -LL | { $+ } => { - | ^ - error: expected one of: `*`, `+`, or `?` --> $DIR/issue-33569.rs:4:13 | LL | $(x)(y) | ^^^ -error: attempted to repeat an expression containing no syntax variables matched as repeating at this depth - --> $DIR/issue-33569.rs:4:10 +error: missing fragment specifier + --> $DIR/issue-33569.rs:2:8 | -LL | $(x)(y) - | ^^^ +LL | { $+ } => { + | ^ -error: aborting due to 4 previous errors +error: aborting due to 3 previous errors diff --git a/src/test/ui/proc-macro/group-compat-hack/group-compat-hack.rs b/src/test/ui/proc-macro/group-compat-hack/group-compat-hack.rs index 35c101587de05..bc82a2ff196d2 100644 --- a/src/test/ui/proc-macro/group-compat-hack/group-compat-hack.rs +++ b/src/test/ui/proc-macro/group-compat-hack/group-compat-hack.rs @@ -13,18 +13,37 @@ extern crate std; // place of a `None`-delimited group. This allows us to maintain // backwards compatibility for older versions of these crates. -include!("js-sys/src/lib.rs"); -include!("time-macros-impl/src/lib.rs"); +mod no_version { + include!("js-sys/src/lib.rs"); + include!("time-macros-impl/src/lib.rs"); -macro_rules! other { - ($name:ident) => { - #[my_macro] struct Three($name); + macro_rules! other { + ($name:ident) => { + #[my_macro] struct Three($name); + } } + + struct Foo; + impl_macros!(Foo); + arrays!(Foo); + other!(Foo); } -fn main() { +mod with_version { + include!("js-sys-0.3.17/src/lib.rs"); + include!("time-macros-impl-0.1.0/src/lib.rs"); + + macro_rules! other { + ($name:ident) => { + #[my_macro] struct Three($name); + } + } + struct Foo; impl_macros!(Foo); arrays!(Foo); other!(Foo); } + + +fn main() {} diff --git a/src/test/ui/proc-macro/group-compat-hack/group-compat-hack.stdout b/src/test/ui/proc-macro/group-compat-hack/group-compat-hack.stdout index d519daab1f287..e7645280a7509 100644 --- a/src/test/ui/proc-macro/group-compat-hack/group-compat-hack.stdout +++ b/src/test/ui/proc-macro/group-compat-hack/group-compat-hack.stdout @@ -1,3 +1,6 @@ Called proc_macro_hack with TokenStream [Ident { ident: "struct", span: $DIR/time-macros-impl/src/lib.rs:5:21: 5:27 (#5) }, Ident { ident: "One", span: $DIR/time-macros-impl/src/lib.rs:5:28: 5:31 (#5) }, Group { delimiter: Parenthesis, stream: TokenStream [Ident { ident: "Foo", span: $DIR/group-compat-hack.rs:27:18: 27:21 (#0) }], span: $DIR/time-macros-impl/src/lib.rs:5:31: 5:38 (#5) }, Punct { ch: ';', spacing: Alone, span: $DIR/time-macros-impl/src/lib.rs:5:38: 5:39 (#5) }] Called proc_macro_hack with TokenStream [Ident { ident: "struct", span: $DIR/js-sys/src/lib.rs:5:21: 5:27 (#9) }, Ident { ident: "Two", span: $DIR/js-sys/src/lib.rs:5:28: 5:31 (#9) }, Group { delimiter: Parenthesis, stream: TokenStream [Ident { ident: "Foo", span: $DIR/group-compat-hack.rs:28:13: 28:16 (#0) }], span: $DIR/js-sys/src/lib.rs:5:31: 5:38 (#9) }, Punct { ch: ';', spacing: Alone, span: $DIR/js-sys/src/lib.rs:5:38: 5:39 (#9) }] -Called proc_macro_hack with TokenStream [Ident { ident: "struct", span: $DIR/group-compat-hack.rs:21:21: 21:27 (#13) }, Ident { ident: "Three", span: $DIR/group-compat-hack.rs:21:28: 21:33 (#13) }, Group { delimiter: Parenthesis, stream: TokenStream [Group { delimiter: None, stream: TokenStream [Ident { ident: "Foo", span: $DIR/group-compat-hack.rs:29:12: 29:15 (#0) }], span: $DIR/group-compat-hack.rs:21:34: 21:39 (#13) }], span: $DIR/group-compat-hack.rs:21:33: 21:40 (#13) }, Punct { ch: ';', spacing: Alone, span: $DIR/group-compat-hack.rs:21:40: 21:41 (#13) }] +Called proc_macro_hack with TokenStream [Ident { ident: "struct", span: $DIR/group-compat-hack.rs:22:25: 22:31 (#13) }, Ident { ident: "Three", span: $DIR/group-compat-hack.rs:22:32: 22:37 (#13) }, Group { delimiter: Parenthesis, stream: TokenStream [Group { delimiter: None, stream: TokenStream [Ident { ident: "Foo", span: $DIR/group-compat-hack.rs:29:12: 29:15 (#0) }], span: $DIR/group-compat-hack.rs:22:38: 22:43 (#13) }], span: $DIR/group-compat-hack.rs:22:37: 22:44 (#13) }, Punct { ch: ';', spacing: Alone, span: $DIR/group-compat-hack.rs:22:44: 22:45 (#13) }] +Called proc_macro_hack with TokenStream [Ident { ident: "struct", span: $DIR/time-macros-impl-0.1.0/src/lib.rs:5:21: 5:27 (#19) }, Ident { ident: "One", span: $DIR/time-macros-impl-0.1.0/src/lib.rs:5:28: 5:31 (#19) }, Group { delimiter: Parenthesis, stream: TokenStream [Ident { ident: "Foo", span: $DIR/group-compat-hack.rs:43:18: 43:21 (#0) }], span: $DIR/time-macros-impl-0.1.0/src/lib.rs:5:31: 5:38 (#19) }, Punct { ch: ';', spacing: Alone, span: $DIR/time-macros-impl-0.1.0/src/lib.rs:5:38: 5:39 (#19) }] +Called proc_macro_hack with TokenStream [Ident { ident: "struct", span: $DIR/js-sys-0.3.17/src/lib.rs:5:21: 5:27 (#23) }, Ident { ident: "Two", span: $DIR/js-sys-0.3.17/src/lib.rs:5:28: 5:31 (#23) }, Group { delimiter: Parenthesis, stream: TokenStream [Ident { ident: "Foo", span: $DIR/group-compat-hack.rs:44:13: 44:16 (#0) }], span: $DIR/js-sys-0.3.17/src/lib.rs:5:31: 5:38 (#23) }, Punct { ch: ';', spacing: Alone, span: $DIR/js-sys-0.3.17/src/lib.rs:5:38: 5:39 (#23) }] +Called proc_macro_hack with TokenStream [Ident { ident: "struct", span: $DIR/group-compat-hack.rs:38:25: 38:31 (#27) }, Ident { ident: "Three", span: $DIR/group-compat-hack.rs:38:32: 38:37 (#27) }, Group { delimiter: Parenthesis, stream: TokenStream [Group { delimiter: None, stream: TokenStream [Ident { ident: "Foo", span: $DIR/group-compat-hack.rs:45:12: 45:15 (#0) }], span: $DIR/group-compat-hack.rs:38:38: 38:43 (#27) }], span: $DIR/group-compat-hack.rs:38:37: 38:44 (#27) }, Punct { ch: ';', spacing: Alone, span: $DIR/group-compat-hack.rs:38:44: 38:45 (#27) }] diff --git a/src/test/ui/proc-macro/group-compat-hack/js-sys-0.3.17/src/lib.rs b/src/test/ui/proc-macro/group-compat-hack/js-sys-0.3.17/src/lib.rs new file mode 100644 index 0000000000000..d1a66940ebf3c --- /dev/null +++ b/src/test/ui/proc-macro/group-compat-hack/js-sys-0.3.17/src/lib.rs @@ -0,0 +1,7 @@ +// ignore-test this is not a test + +macro_rules! arrays { + ($name:ident) => { + #[my_macro] struct Two($name); + } +} diff --git a/src/test/ui/proc-macro/group-compat-hack/time-macros-impl-0.1.0/src/lib.rs b/src/test/ui/proc-macro/group-compat-hack/time-macros-impl-0.1.0/src/lib.rs new file mode 100644 index 0000000000000..c94c357920974 --- /dev/null +++ b/src/test/ui/proc-macro/group-compat-hack/time-macros-impl-0.1.0/src/lib.rs @@ -0,0 +1,7 @@ +// ignore-test this is not a test + +macro_rules! impl_macros { + ($name:ident) => { + #[my_macro] struct One($name); + } +} diff --git a/src/test/ui/recursion/issue-38591-non-regular-dropck-recursion.stderr b/src/test/ui/recursion/issue-38591-non-regular-dropck-recursion.stderr index ba5e8a9e39f72..72547fe79fdee 100644 --- a/src/test/ui/recursion/issue-38591-non-regular-dropck-recursion.stderr +++ b/src/test/ui/recursion/issue-38591-non-regular-dropck-recursion.stderr @@ -1,4 +1,4 @@ -error: reached the recursion limit while instantiating `std::intrinsics::drop_in_place::> - shim(Some(S))` +error: reached the recursion limit while instantiating `std::intrinsics::drop_in_place::...)))))))))))))))))))))))))))))>))` --> $SRC_DIR/core/src/ptr/mod.rs:LL:COL | LL | / pub unsafe fn drop_in_place(to_drop: *mut T) { diff --git a/src/test/ui/recursion/recursion.stderr b/src/test/ui/recursion/recursion.stderr index db4c99eeb8b16..085bf82ef8b93 100644 --- a/src/test/ui/recursion/recursion.stderr +++ b/src/test/ui/recursion/recursion.stderr @@ -1,4 +1,4 @@ -error: reached the recursion limit while instantiating `test::>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>` +error: reached the recursion limit while instantiating `test::>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>` --> $DIR/recursion.rs:17:11 | LL | _ => {test (n-1, i+1, Cons {head:2*i+1, tail:first}, Cons{head:i*i, tail:second})} diff --git a/src/test/ui/regions/regions-name-undeclared.rs b/src/test/ui/regions/regions-name-undeclared.rs index 044c688977262..b8f50a40c4523 100644 --- a/src/test/ui/regions/regions-name-undeclared.rs +++ b/src/test/ui/regions/regions-name-undeclared.rs @@ -1,3 +1,4 @@ +// edition:2018 // Check that lifetime resolver enforces the lifetime name scoping // rules correctly in various scenarios. @@ -47,4 +48,11 @@ fn fn_types(a: &'a isize, //~ ERROR undeclared lifetime { } +struct Bug {} +impl Bug { + async fn buggy(&self) -> &'a str { //~ ERROR use of undeclared lifetime name `'a` + todo!() + } +} + pub fn main() {} diff --git a/src/test/ui/regions/regions-name-undeclared.stderr b/src/test/ui/regions/regions-name-undeclared.stderr index 57d39d59c8b04..ad0e7bd5afbc0 100644 --- a/src/test/ui/regions/regions-name-undeclared.stderr +++ b/src/test/ui/regions/regions-name-undeclared.stderr @@ -1,5 +1,5 @@ error[E0261]: use of undeclared lifetime name `'b` - --> $DIR/regions-name-undeclared.rs:15:24 + --> $DIR/regions-name-undeclared.rs:16:24 | LL | fn m4(&self, arg: &'b isize) { } | ^^ undeclared lifetime @@ -15,7 +15,7 @@ LL | fn m4<'b>(&self, arg: &'b isize) { } | ^^^^ error[E0261]: use of undeclared lifetime name `'b` - --> $DIR/regions-name-undeclared.rs:16:12 + --> $DIR/regions-name-undeclared.rs:17:12 | LL | fn m5(&'b self) { } | ^^ undeclared lifetime @@ -31,7 +31,7 @@ LL | fn m5<'b>(&'b self) { } | ^^^^ error[E0261]: use of undeclared lifetime name `'b` - --> $DIR/regions-name-undeclared.rs:17:27 + --> $DIR/regions-name-undeclared.rs:18:27 | LL | fn m6(&self, arg: Foo<'b>) { } | ^^ undeclared lifetime @@ -47,7 +47,7 @@ LL | fn m6<'b>(&self, arg: Foo<'b>) { } | ^^^^ error[E0261]: use of undeclared lifetime name `'a` - --> $DIR/regions-name-undeclared.rs:25:22 + --> $DIR/regions-name-undeclared.rs:26:22 | LL | type X = Option<&'a isize>; | - ^^ undeclared lifetime @@ -57,7 +57,7 @@ LL | type X = Option<&'a isize>; = help: if you want to experiment with in-band lifetime bindings, add `#![feature(in_band_lifetimes)]` to the crate attributes error[E0261]: use of undeclared lifetime name `'a` - --> $DIR/regions-name-undeclared.rs:27:13 + --> $DIR/regions-name-undeclared.rs:28:13 | LL | enum E { | - help: consider introducing lifetime `'a` here: `<'a>` @@ -67,7 +67,7 @@ LL | E1(&'a isize) = help: if you want to experiment with in-band lifetime bindings, add `#![feature(in_band_lifetimes)]` to the crate attributes error[E0261]: use of undeclared lifetime name `'a` - --> $DIR/regions-name-undeclared.rs:30:13 + --> $DIR/regions-name-undeclared.rs:31:13 | LL | struct S { | - help: consider introducing lifetime `'a` here: `<'a>` @@ -77,7 +77,7 @@ LL | f: &'a isize = help: if you want to experiment with in-band lifetime bindings, add `#![feature(in_band_lifetimes)]` to the crate attributes error[E0261]: use of undeclared lifetime name `'a` - --> $DIR/regions-name-undeclared.rs:32:14 + --> $DIR/regions-name-undeclared.rs:33:14 | LL | fn f(a: &'a isize) { } | - ^^ undeclared lifetime @@ -87,7 +87,7 @@ LL | fn f(a: &'a isize) { } = help: if you want to experiment with in-band lifetime bindings, add `#![feature(in_band_lifetimes)]` to the crate attributes error[E0261]: use of undeclared lifetime name `'a` - --> $DIR/regions-name-undeclared.rs:40:17 + --> $DIR/regions-name-undeclared.rs:41:17 | LL | fn fn_types(a: &'a isize, | - ^^ undeclared lifetime @@ -97,7 +97,7 @@ LL | fn fn_types(a: &'a isize, = help: if you want to experiment with in-band lifetime bindings, add `#![feature(in_band_lifetimes)]` to the crate attributes error[E0261]: use of undeclared lifetime name `'b` - --> $DIR/regions-name-undeclared.rs:42:36 + --> $DIR/regions-name-undeclared.rs:43:36 | LL | ... &'b isize, | ^^ undeclared lifetime @@ -114,7 +114,7 @@ LL | b: Box FnOnce(&'a isize, | ^^^^ error[E0261]: use of undeclared lifetime name `'b` - --> $DIR/regions-name-undeclared.rs:45:36 + --> $DIR/regions-name-undeclared.rs:46:36 | LL | ... &'b isize)>, | ^^ undeclared lifetime @@ -131,7 +131,7 @@ LL | b: Box FnOnce(&'a isize, | ^^^^ error[E0261]: use of undeclared lifetime name `'a` - --> $DIR/regions-name-undeclared.rs:46:17 + --> $DIR/regions-name-undeclared.rs:47:17 | LL | fn fn_types(a: &'a isize, | - help: consider introducing lifetime `'a` here: `<'a>` @@ -141,6 +141,22 @@ LL | c: &'a isize) | = help: if you want to experiment with in-band lifetime bindings, add `#![feature(in_band_lifetimes)]` to the crate attributes -error: aborting due to 11 previous errors +error[E0261]: use of undeclared lifetime name `'a` + --> $DIR/regions-name-undeclared.rs:53:31 + | +LL | async fn buggy(&self) -> &'a str { + | ^^ undeclared lifetime + | + = help: if you want to experiment with in-band lifetime bindings, add `#![feature(in_band_lifetimes)]` to the crate attributes +help: consider introducing lifetime `'a` here + | +LL | impl<'a> Bug { + | ^^^^ +help: consider introducing lifetime `'a` here + | +LL | async fn buggy<'a>(&self) -> &'a str { + | ^^^^ + +error: aborting due to 12 previous errors For more information about this error, try `rustc --explain E0261`. diff --git a/src/test/ui/type_length_limit.rs b/src/test/ui/type_length_limit.rs index 1f1c8ad962690..921cded5037b6 100644 --- a/src/test/ui/type_length_limit.rs +++ b/src/test/ui/type_length_limit.rs @@ -4,7 +4,7 @@ // Test that the type length limit can be changed. #![allow(dead_code)] -#![type_length_limit="256"] +#![type_length_limit="4"] macro_rules! link { ($id:ident, $t:ty) => { diff --git a/src/test/ui/type_length_limit.stderr b/src/test/ui/type_length_limit.stderr index 0d90f06076ab2..83da193eb04d2 100644 --- a/src/test/ui/type_length_limit.stderr +++ b/src/test/ui/type_length_limit.stderr @@ -1,10 +1,10 @@ -error: reached the type-length limit while instantiating `std::mem::drop::>` +error: reached the type-length limit while instantiating `std::mem::drop::>` --> $SRC_DIR/core/src/mem/mod.rs:LL:COL | LL | pub fn drop(_x: T) {} | ^^^^^^^^^^^^^^^^^^^^^^^^ | - = note: consider adding a `#![type_length_limit="1094"]` attribute to your crate + = note: consider adding a `#![type_length_limit="8"]` attribute to your crate error: aborting due to previous error diff --git a/src/tools/cargo b/src/tools/cargo index 51b66125ba97d..f3c7e066ad66e 160000 --- a/src/tools/cargo +++ b/src/tools/cargo @@ -1 +1 @@ -Subproject commit 51b66125ba97d2906f461b3f4e0408f206299bb6 +Subproject commit f3c7e066ad66e05439cf8eab165a2de580b41aaf diff --git a/src/tools/cargotest/main.rs b/src/tools/cargotest/main.rs index b65163a3bc9f5..0f56dbba1d409 100644 --- a/src/tools/cargotest/main.rs +++ b/src/tools/cargotest/main.rs @@ -29,7 +29,14 @@ const TEST_REPOS: &[Test] = &[ Test { name: "tokei", repo: "https://github.com/XAMPPRocky/tokei", - sha: "a950ff128d5a435a8083b1c7577c0431f98360ca", + sha: "5e11c4852fe4aa086b0e4fe5885822fbe57ba928", + lock: None, + packages: &[], + }, + Test { + name: "treeify", + repo: "https://github.com/dzamlo/treeify", + sha: "999001b223152441198f117a68fb81f57bc086dd", lock: None, packages: &[], }, diff --git a/src/tools/clippy/clippy_lints/src/loops.rs b/src/tools/clippy/clippy_lints/src/loops.rs index 8ffcd417d1df1..294f0449281ab 100644 --- a/src/tools/clippy/clippy_lints/src/loops.rs +++ b/src/tools/clippy/clippy_lints/src/loops.rs @@ -1131,6 +1131,27 @@ fn detect_same_item_push<'tcx>( body: &'tcx Expr<'_>, _: &'tcx Expr<'_>, ) { + fn emit_lint(cx: &LateContext<'_>, vec: &Expr<'_>, pushed_item: &Expr<'_>) { + let vec_str = snippet_with_macro_callsite(cx, vec.span, ""); + let item_str = snippet_with_macro_callsite(cx, pushed_item.span, ""); + + span_lint_and_help( + cx, + SAME_ITEM_PUSH, + vec.span, + "it looks like the same item is being pushed into this Vec", + None, + &format!( + "try using vec![{};SIZE] or {}.resize(NEW_SIZE, {})", + item_str, vec_str, item_str + ), + ) + } + + if !matches!(pat.kind, PatKind::Wild) { + return; + } + // Determine whether it is safe to lint the body let mut same_item_push_visitor = SameItemPushVisitor { should_lint: true, @@ -1140,23 +1161,50 @@ fn detect_same_item_push<'tcx>( walk_expr(&mut same_item_push_visitor, body); if same_item_push_visitor.should_lint { if let Some((vec, pushed_item)) = same_item_push_visitor.vec_push { - // Make sure that the push does not involve possibly mutating values - if mutated_variables(pushed_item, cx).map_or(false, |mutvars| mutvars.is_empty()) { - if let PatKind::Wild = pat.kind { - let vec_str = snippet_with_macro_callsite(cx, vec.span, ""); - let item_str = snippet_with_macro_callsite(cx, pushed_item.span, ""); - - span_lint_and_help( - cx, - SAME_ITEM_PUSH, - vec.span, - "it looks like the same item is being pushed into this Vec", - None, - &format!( - "try using vec![{};SIZE] or {}.resize(NEW_SIZE, {})", - item_str, vec_str, item_str - ), - ) + let vec_ty = cx.typeck_results().expr_ty(vec); + let ty = vec_ty.walk().nth(1).unwrap().expect_ty(); + if cx + .tcx + .lang_items() + .clone_trait() + .map_or(false, |id| implements_trait(cx, ty, id, &[])) + { + // Make sure that the push does not involve possibly mutating values + match pushed_item.kind { + ExprKind::Path(ref qpath) => { + match qpath_res(cx, qpath, pushed_item.hir_id) { + // immutable bindings that are initialized with literal or constant + Res::Local(hir_id) => { + if_chain! { + let node = cx.tcx.hir().get(hir_id); + if let Node::Binding(pat) = node; + if let PatKind::Binding(bind_ann, ..) = pat.kind; + if !matches!(bind_ann, BindingAnnotation::RefMut | BindingAnnotation::Mutable); + let parent_node = cx.tcx.hir().get_parent_node(hir_id); + if let Some(Node::Local(parent_let_expr)) = cx.tcx.hir().find(parent_node); + if let Some(init) = parent_let_expr.init; + then { + match init.kind { + // immutable bindings that are initialized with literal + ExprKind::Lit(..) => emit_lint(cx, vec, pushed_item), + // immutable bindings that are initialized with constant + ExprKind::Path(ref path) => { + if let Res::Def(DefKind::Const, ..) = qpath_res(cx, path, init.hir_id) { + emit_lint(cx, vec, pushed_item); + } + } + _ => {}, + } + } + } + }, + // constant + Res::Def(DefKind::Const, ..) => emit_lint(cx, vec, pushed_item), + _ => {}, + } + }, + ExprKind::Lit(..) => emit_lint(cx, vec, pushed_item), + _ => {}, } } } diff --git a/src/tools/clippy/clippy_lints/src/repeat_once.rs b/src/tools/clippy/clippy_lints/src/repeat_once.rs index 77c206002ea79..c0890018d46ab 100644 --- a/src/tools/clippy/clippy_lints/src/repeat_once.rs +++ b/src/tools/clippy/clippy_lints/src/repeat_once.rs @@ -39,12 +39,12 @@ declare_lint_pass!(RepeatOnce => [REPEAT_ONCE]); impl<'tcx> LateLintPass<'tcx> for RepeatOnce { fn check_expr(&mut self, cx: &LateContext<'_>, expr: &'tcx Expr<'_>) { if_chain! { - if let ExprKind::MethodCall(ref path, _, ref args, _) = expr.kind; + if let ExprKind::MethodCall(path, _, [receiver, count], _) = &expr.kind; if path.ident.name == sym!(repeat); - if let Some(Constant::Int(1)) = constant_context(cx, cx.typeck_results()).expr(&args[1]); - if !in_macro(args[0].span); + if let Some(Constant::Int(1)) = constant_context(cx, cx.typeck_results()).expr(&count); + if !in_macro(receiver.span); then { - let ty = walk_ptrs_ty(cx.typeck_results().expr_ty(&args[0])); + let ty = walk_ptrs_ty(cx.typeck_results().expr_ty(&receiver)); if ty.is_str() { span_lint_and_sugg( cx, @@ -52,7 +52,7 @@ impl<'tcx> LateLintPass<'tcx> for RepeatOnce { expr.span, "calling `repeat(1)` on str", "consider using `.to_string()` instead", - format!("{}.to_string()", snippet(cx, args[0].span, r#""...""#)), + format!("{}.to_string()", snippet(cx, receiver.span, r#""...""#)), Applicability::MachineApplicable, ); } else if ty.builtin_index().is_some() { @@ -62,7 +62,7 @@ impl<'tcx> LateLintPass<'tcx> for RepeatOnce { expr.span, "calling `repeat(1)` on slice", "consider using `.to_vec()` instead", - format!("{}.to_vec()", snippet(cx, args[0].span, r#""...""#)), + format!("{}.to_vec()", snippet(cx, receiver.span, r#""...""#)), Applicability::MachineApplicable, ); } else if is_type_diagnostic_item(cx, ty, sym!(string_type)) { @@ -72,7 +72,7 @@ impl<'tcx> LateLintPass<'tcx> for RepeatOnce { expr.span, "calling `repeat(1)` on a string literal", "consider using `.clone()` instead", - format!("{}.clone()", snippet(cx, args[0].span, r#""...""#)), + format!("{}.clone()", snippet(cx, receiver.span, r#""...""#)), Applicability::MachineApplicable, ); } diff --git a/src/tools/clippy/tests/ui/crashes/ice-5944.rs b/src/tools/clippy/tests/ui/crashes/ice-5944.rs new file mode 100644 index 0000000000000..5caf29c619735 --- /dev/null +++ b/src/tools/clippy/tests/ui/crashes/ice-5944.rs @@ -0,0 +1,13 @@ +#![warn(clippy::repeat_once)] + +trait Repeat { + fn repeat(&self) {} +} + +impl Repeat for usize { + fn repeat(&self) {} +} + +fn main() { + let _ = 42.repeat(); +} diff --git a/src/tools/clippy/tests/ui/same_item_push.rs b/src/tools/clippy/tests/ui/same_item_push.rs index ff1088f86f647..a37c8782ec330 100644 --- a/src/tools/clippy/tests/ui/same_item_push.rs +++ b/src/tools/clippy/tests/ui/same_item_push.rs @@ -1,5 +1,7 @@ #![warn(clippy::same_item_push)] +const VALUE: u8 = 7; + fn mutate_increment(x: &mut u8) -> u8 { *x += 1; *x @@ -9,65 +11,81 @@ fn increment(x: u8) -> u8 { x + 1 } -fn main() { - // Test for basic case - let mut spaces = Vec::with_capacity(10); - for _ in 0..10 { - spaces.push(vec![b' ']); - } +fn fun() -> usize { + 42 +} - let mut vec2: Vec = Vec::new(); +fn main() { + // ** linted cases ** + let mut vec: Vec = Vec::new(); let item = 2; for _ in 5..=20 { - vec2.push(item); + vec.push(item); } - let mut vec3: Vec = Vec::new(); + let mut vec: Vec = Vec::new(); for _ in 0..15 { let item = 2; - vec3.push(item); + vec.push(item); } - let mut vec4: Vec = Vec::new(); + let mut vec: Vec = Vec::new(); for _ in 0..15 { - vec4.push(13); + vec.push(13); + } + + let mut vec = Vec::new(); + for _ in 0..20 { + vec.push(VALUE); + } + + let mut vec = Vec::new(); + let item = VALUE; + for _ in 0..20 { + vec.push(item); + } + + // ** non-linted cases ** + let mut spaces = Vec::with_capacity(10); + for _ in 0..10 { + spaces.push(vec![b' ']); } // Suggestion should not be given as pushed variable can mutate - let mut vec5: Vec = Vec::new(); + let mut vec: Vec = Vec::new(); let mut item: u8 = 2; for _ in 0..30 { - vec5.push(mutate_increment(&mut item)); + vec.push(mutate_increment(&mut item)); } - let mut vec6: Vec = Vec::new(); + let mut vec: Vec = Vec::new(); let mut item: u8 = 2; let mut item2 = &mut mutate_increment(&mut item); for _ in 0..30 { - vec6.push(mutate_increment(item2)); + vec.push(mutate_increment(item2)); } - let mut vec7: Vec = Vec::new(); + let mut vec: Vec = Vec::new(); for (a, b) in [0, 1, 4, 9, 16].iter().enumerate() { - vec7.push(a); + vec.push(a); } - let mut vec8: Vec = Vec::new(); + let mut vec: Vec = Vec::new(); for i in 0..30 { - vec8.push(increment(i)); + vec.push(increment(i)); } - let mut vec9: Vec = Vec::new(); + let mut vec: Vec = Vec::new(); for i in 0..30 { - vec9.push(i + i * i); + vec.push(i + i * i); } // Suggestion should not be given as there are multiple pushes that are not the same - let mut vec10: Vec = Vec::new(); + let mut vec: Vec = Vec::new(); let item: u8 = 2; for _ in 0..30 { - vec10.push(item); - vec10.push(item * 2); + vec.push(item); + vec.push(item * 2); } // Suggestion should not be given as Vec is not involved @@ -82,8 +100,52 @@ fn main() { for i in 0..30 { vec_a.push(A { kind: i }); } - let mut vec12: Vec = Vec::new(); + let mut vec: Vec = Vec::new(); for a in vec_a { - vec12.push(2u8.pow(a.kind)); + vec.push(2u8.pow(a.kind)); + } + + // Fix #5902 + let mut vec: Vec = Vec::new(); + let mut item = 0; + for _ in 0..10 { + vec.push(item); + item += 10; + } + + // Fix #5979 + let mut vec: Vec = Vec::new(); + for _ in 0..10 { + vec.push(std::fs::File::open("foobar").unwrap()); + } + // Fix #5979 + #[derive(Clone)] + struct S {} + + trait T {} + impl T for S {} + + let mut vec: Vec> = Vec::new(); + for _ in 0..10 { + vec.push(Box::new(S {})); + } + + // Fix #5985 + let mut vec = Vec::new(); + let item = 42; + let item = fun(); + for _ in 0..20 { + vec.push(item); + } + + // Fix #5985 + let mut vec = Vec::new(); + let key = 1; + for _ in 0..20 { + let item = match key { + 1 => 10, + _ => 0, + }; + vec.push(item); } } diff --git a/src/tools/clippy/tests/ui/same_item_push.stderr b/src/tools/clippy/tests/ui/same_item_push.stderr index ddc5d48cd4135..d9ffa15780ad0 100644 --- a/src/tools/clippy/tests/ui/same_item_push.stderr +++ b/src/tools/clippy/tests/ui/same_item_push.stderr @@ -1,35 +1,43 @@ error: it looks like the same item is being pushed into this Vec - --> $DIR/same_item_push.rs:16:9 + --> $DIR/same_item_push.rs:23:9 | -LL | spaces.push(vec![b' ']); - | ^^^^^^ +LL | vec.push(item); + | ^^^ | = note: `-D clippy::same-item-push` implied by `-D warnings` - = help: try using vec![vec![b' '];SIZE] or spaces.resize(NEW_SIZE, vec![b' ']) + = help: try using vec![item;SIZE] or vec.resize(NEW_SIZE, item) error: it looks like the same item is being pushed into this Vec - --> $DIR/same_item_push.rs:22:9 + --> $DIR/same_item_push.rs:29:9 | -LL | vec2.push(item); - | ^^^^ +LL | vec.push(item); + | ^^^ | - = help: try using vec![item;SIZE] or vec2.resize(NEW_SIZE, item) + = help: try using vec![item;SIZE] or vec.resize(NEW_SIZE, item) error: it looks like the same item is being pushed into this Vec - --> $DIR/same_item_push.rs:28:9 + --> $DIR/same_item_push.rs:34:9 | -LL | vec3.push(item); - | ^^^^ +LL | vec.push(13); + | ^^^ | - = help: try using vec![item;SIZE] or vec3.resize(NEW_SIZE, item) + = help: try using vec![13;SIZE] or vec.resize(NEW_SIZE, 13) error: it looks like the same item is being pushed into this Vec - --> $DIR/same_item_push.rs:33:9 + --> $DIR/same_item_push.rs:39:9 | -LL | vec4.push(13); - | ^^^^ +LL | vec.push(VALUE); + | ^^^ | - = help: try using vec![13;SIZE] or vec4.resize(NEW_SIZE, 13) + = help: try using vec![VALUE;SIZE] or vec.resize(NEW_SIZE, VALUE) -error: aborting due to 4 previous errors +error: it looks like the same item is being pushed into this Vec + --> $DIR/same_item_push.rs:45:9 + | +LL | vec.push(item); + | ^^^ + | + = help: try using vec![item;SIZE] or vec.resize(NEW_SIZE, item) + +error: aborting due to 5 previous errors diff --git a/src/tools/linkchecker/main.rs b/src/tools/linkchecker/main.rs index b7ceba1e28262..d83fd7b2922cf 100644 --- a/src/tools/linkchecker/main.rs +++ b/src/tools/linkchecker/main.rs @@ -141,6 +141,16 @@ fn is_exception(file: &Path, link: &str) -> bool { if let Some(entry) = LINKCHECK_EXCEPTIONS.iter().find(|&(f, _)| file.ends_with(f)) { entry.1.contains(&link) } else { + // FIXME(#63351): Concat trait in alloc/slice reexported in primitive page + // + // NOTE: This cannot be added to `LINKCHECK_EXCEPTIONS` because the resolved path + // calculated in `check` function is outside `build//doc` dir. + // So the `strip_prefix` method just returns the old absolute broken path. + if file.ends_with("std/primitive.slice.html") { + if link.ends_with("primitive.slice.html") { + return true; + } + } false } }