mirror of
https://github.com/rust-lang/rust.git
synced 2025-02-24 21:05:12 +00:00
Auto merge of #72768 - JohnTitor:rollup-6kwokh6, r=JohnTitor
Rollup of 10 pull requests Successful merges: - #72033 (Update RELEASES.md for 1.44.0) - #72162 (Add Extend::{extend_one,extend_reserve}) - #72419 (Miri read_discriminant: return a scalar instead of raw underlying bytes) - #72621 (Don't bail out of trait selection when predicate references an error) - #72677 (Fix diagnostics for `@ ..` binding pattern in tuples and tuple structs) - #72710 (Add test to make sure -Wunused-crate-dependencies works with tests) - #72724 (Revert recursive `TokenKind::Interpolated` expansion for now) - #72741 (Remove unused mut from long-linker-command-lines test) - #72750 (Remove remaining calls to `as_local_node_id`) - #72752 (remove mk_bool) Failed merges: r? @ghost
This commit is contained in:
commit
91fb72a8a9
@ -4144,7 +4144,6 @@ dependencies = [
|
||||
"rustc_lexer",
|
||||
"rustc_session",
|
||||
"rustc_span",
|
||||
"smallvec 1.4.0",
|
||||
"unicode-normalization",
|
||||
]
|
||||
|
||||
|
160
RELEASES.md
160
RELEASES.md
@ -1,3 +1,163 @@
|
||||
Version 1.44.0 (2020-06-04)
|
||||
==========================
|
||||
|
||||
Language
|
||||
--------
|
||||
- [You can now use `async/.await` with `#[no_std]` enabled.][69033]
|
||||
- [Added the `unused_braces` lint.][70081]
|
||||
|
||||
**Syntax-only changes**
|
||||
|
||||
- [Expansion-driven outline module parsing][69838]
|
||||
```rust
|
||||
#[cfg(FALSE)]
|
||||
mod foo {
|
||||
mod bar {
|
||||
mod baz; // `foo/bar/baz.rs` doesn't exist, but no error!
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
These are still rejected semantically, so you will likely receive an error but
|
||||
these changes can be seen and parsed by macros and conditional compilation.
|
||||
|
||||
Compiler
|
||||
--------
|
||||
- [Rustc now respects the `-C codegen-units` flag in incremental mode.][70156]
|
||||
Additionally when in incremental mode rustc defaults to 256 codegen units.
|
||||
- [Refactored `catch_unwind`, to have zero-cost unless unwinding is enabled and
|
||||
a panic is thrown.][67502]
|
||||
- [Added tier 3\* support for the `aarch64-unknown-none` and
|
||||
`aarch64-unknown-none-softfloat` targets.][68334]
|
||||
- [Added tier 3 support for `arm64-apple-tvos` and
|
||||
`x86_64-apple-tvos` targets.][68191]
|
||||
|
||||
|
||||
Libraries
|
||||
---------
|
||||
- [Special cased `vec![]` to map directly to `Vec::new()`.][70632] This allows
|
||||
`vec![]` to be able to be used in `const` contexts.
|
||||
- [`convert::Infallible` now implements `Hash`.][70281]
|
||||
- [`OsString` now implements `DerefMut` and `IndexMut` returning
|
||||
a `&mut OsStr`.][70048]
|
||||
- [Unicode 13 is now supported.][69929]
|
||||
- [`String` now implements `From<&mut str>`.][69661]
|
||||
- [`IoSlice` now implements `Copy`.][69403]
|
||||
- [`Vec<T>` now implements `From<[T; N]>`.][68692] Where `N` is less than 32.
|
||||
- [`proc_macro::LexError` now implements `fmt::Display` and `Error`.][68899]
|
||||
- [`from_le_bytes`, `to_le_bytes`, `from_be_bytes`, `to_be_bytes`,
|
||||
`from_ne_bytes`, and `to_ne_bytes` methods are now `const` for all
|
||||
integer types.][69373]
|
||||
|
||||
Stabilized APIs
|
||||
---------------
|
||||
- [`PathBuf::with_capacity`]
|
||||
- [`PathBuf::capacity`]
|
||||
- [`PathBuf::clear`]
|
||||
- [`PathBuf::reserve`]
|
||||
- [`PathBuf::reserve_exact`]
|
||||
- [`PathBuf::shrink_to_fit`]
|
||||
- [`f32::to_int_unchecked`]
|
||||
- [`f64::to_int_unchecked`]
|
||||
- [`Layout::align_to`]
|
||||
- [`Layout::pad_to_align`]
|
||||
- [`Layout::array`]
|
||||
- [`Layout::extend`]
|
||||
|
||||
Cargo
|
||||
-----
|
||||
- [Added the `cargo tree` command which will print a tree graph of
|
||||
your dependencies.][cargo/8062] E.g.
|
||||
```
|
||||
mdbook v0.3.2 (/Users/src/rust/mdbook)
|
||||
├── ammonia v3.0.0
|
||||
│ ├── html5ever v0.24.0
|
||||
│ │ ├── log v0.4.8
|
||||
│ │ │ └── cfg-if v0.1.9
|
||||
│ │ ├── mac v0.1.1
|
||||
│ │ └── markup5ever v0.9.0
|
||||
│ │ ├── log v0.4.8 (*)
|
||||
│ │ ├── phf v0.7.24
|
||||
│ │ │ └── phf_shared v0.7.24
|
||||
│ │ │ ├── siphasher v0.2.3
|
||||
│ │ │ └── unicase v1.4.2
|
||||
│ │ │ [build-dependencies]
|
||||
│ │ │ └── version_check v0.1.5
|
||||
...
|
||||
```
|
||||
|
||||
Misc
|
||||
----
|
||||
- [Rustdoc now allows you to specify `--crate-version` to have rustdoc include
|
||||
the version in the sidebar.][69494]
|
||||
|
||||
Compatibility Notes
|
||||
-------------------
|
||||
- [Rustc now correctly generates static libraries on Windows GNU targets with
|
||||
the `.a` extension, rather than the previous `.lib`.][70937]
|
||||
- [Removed the `-C no_integrated_as` flag from rustc.][70345]
|
||||
- [The `file_name` property in JSON output of macro errors now points the actual
|
||||
source file rather than the previous format of `<NAME macros>`.][70969]
|
||||
**Note:** this may not point a file that actually exists on the user's system.
|
||||
- [The minimum required external LLVM version has been bumped to LLVM 8.][71147]
|
||||
- [`mem::{zeroed, uninitialised, MaybeUninit}` will now panic when used with types
|
||||
that do not allow zero initialization such as `NonZeroU8`.][66059] This was
|
||||
previously a warning.
|
||||
- [In 1.45.0 (the next release) converting a `f64` to `u32` using the `as`
|
||||
operator has been defined as a saturating operation.][71269] This was previously
|
||||
undefined behaviour, you can use the `{f64, f32}::to_int_unchecked` methods to
|
||||
continue using the current behaviour which may desirable in rare performance
|
||||
sensitive situations.
|
||||
|
||||
Internal Only
|
||||
-------------
|
||||
These changes provide no direct user facing benefits, but represent significant
|
||||
improvements to the internals and overall performance of rustc and
|
||||
related tools.
|
||||
|
||||
- [dep_graph Avoid allocating a set on when the number reads are small.][69778]
|
||||
- [Replace big JS dict with JSON parsing.][71250]
|
||||
|
||||
[69373]: https://github.com/rust-lang/rust/pull/69373/
|
||||
[66059]: https://github.com/rust-lang/rust/pull/66059/
|
||||
[68191]: https://github.com/rust-lang/rust/pull/68191/
|
||||
[68899]: https://github.com/rust-lang/rust/pull/68899/
|
||||
[71147]: https://github.com/rust-lang/rust/pull/71147/
|
||||
[71250]: https://github.com/rust-lang/rust/pull/71250/
|
||||
[70937]: https://github.com/rust-lang/rust/pull/70937/
|
||||
[70969]: https://github.com/rust-lang/rust/pull/70969/
|
||||
[70632]: https://github.com/rust-lang/rust/pull/70632/
|
||||
[70281]: https://github.com/rust-lang/rust/pull/70281/
|
||||
[70345]: https://github.com/rust-lang/rust/pull/70345/
|
||||
[70048]: https://github.com/rust-lang/rust/pull/70048/
|
||||
[70081]: https://github.com/rust-lang/rust/pull/70081/
|
||||
[70156]: https://github.com/rust-lang/rust/pull/70156/
|
||||
[71269]: https://github.com/rust-lang/rust/pull/71269/
|
||||
[69838]: https://github.com/rust-lang/rust/pull/69838/
|
||||
[69929]: https://github.com/rust-lang/rust/pull/69929/
|
||||
[69661]: https://github.com/rust-lang/rust/pull/69661/
|
||||
[69778]: https://github.com/rust-lang/rust/pull/69778/
|
||||
[69494]: https://github.com/rust-lang/rust/pull/69494/
|
||||
[69403]: https://github.com/rust-lang/rust/pull/69403/
|
||||
[69033]: https://github.com/rust-lang/rust/pull/69033/
|
||||
[68692]: https://github.com/rust-lang/rust/pull/68692/
|
||||
[68334]: https://github.com/rust-lang/rust/pull/68334/
|
||||
[67502]: https://github.com/rust-lang/rust/pull/67502/
|
||||
[cargo/8062]: https://github.com/rust-lang/cargo/pull/8062/
|
||||
[`PathBuf::with_capacity`]: https://doc.rust-lang.org/std/path/struct.PathBuf.html#method.with_capacity
|
||||
[`PathBuf::capacity`]: https://doc.rust-lang.org/std/path/struct.PathBuf.html#method.capacity
|
||||
[`PathBuf::clear`]: https://doc.rust-lang.org/std/path/struct.PathBuf.html#method.clear
|
||||
[`PathBuf::reserve`]: https://doc.rust-lang.org/std/path/struct.PathBuf.html#method.reserve
|
||||
[`PathBuf::reserve_exact`]: https://doc.rust-lang.org/std/path/struct.PathBuf.html#method.reserve_exact
|
||||
[`PathBuf::shrink_to_fit`]: https://doc.rust-lang.org/std/path/struct.PathBuf.html#method.shrink_to_fit
|
||||
[`f32::to_int_unchecked`]: https://doc.rust-lang.org/std/primitive.f32.html#method.to_int_unchecked
|
||||
[`f64::to_int_unchecked`]: https://doc.rust-lang.org/std/primitive.f64.html#method.to_int_unchecked
|
||||
[`Layout::align_to`]: https://doc.rust-lang.org/std/alloc/struct.Layout.html#method.align_to
|
||||
[`Layout::pad_to_align`]: https://doc.rust-lang.org/std/alloc/struct.Layout.html#method.pad_to_align
|
||||
[`Layout::array`]: https://doc.rust-lang.org/std/alloc/struct.Layout.html#method.array
|
||||
[`Layout::extend`]: https://doc.rust-lang.org/std/alloc/struct.Layout.html#method.extend
|
||||
|
||||
|
||||
Version 1.43.1 (2020-05-07)
|
||||
===========================
|
||||
|
||||
|
@ -1376,6 +1376,16 @@ impl<T: Ord> Extend<T> for BinaryHeap<T> {
|
||||
fn extend<I: IntoIterator<Item = T>>(&mut self, iter: I) {
|
||||
<Self as SpecExtend<I>>::spec_extend(self, iter);
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn extend_one(&mut self, item: T) {
|
||||
self.push(item);
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn extend_reserve(&mut self, additional: usize) {
|
||||
self.reserve(additional);
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Ord, I: IntoIterator<Item = T>> SpecExtend<I> for BinaryHeap<T> {
|
||||
@ -1406,4 +1416,14 @@ impl<'a, T: 'a + Ord + Copy> Extend<&'a T> for BinaryHeap<T> {
|
||||
fn extend<I: IntoIterator<Item = &'a T>>(&mut self, iter: I) {
|
||||
self.extend(iter.into_iter().cloned());
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn extend_one(&mut self, &item: &'a T) {
|
||||
self.push(item);
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn extend_reserve(&mut self, additional: usize) {
|
||||
self.reserve(additional);
|
||||
}
|
||||
}
|
||||
|
@ -1901,6 +1901,11 @@ impl<K: Ord, V> Extend<(K, V)> for BTreeMap<K, V> {
|
||||
self.insert(k, v);
|
||||
});
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn extend_one(&mut self, (k, v): (K, V)) {
|
||||
self.insert(k, v);
|
||||
}
|
||||
}
|
||||
|
||||
#[stable(feature = "extend_ref", since = "1.2.0")]
|
||||
@ -1908,6 +1913,11 @@ impl<'a, K: Ord + Copy, V: Copy> Extend<(&'a K, &'a V)> for BTreeMap<K, V> {
|
||||
fn extend<I: IntoIterator<Item = (&'a K, &'a V)>>(&mut self, iter: I) {
|
||||
self.extend(iter.into_iter().map(|(&key, &value)| (key, value)));
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn extend_one(&mut self, (&k, &v): (&'a K, &'a V)) {
|
||||
self.insert(k, v);
|
||||
}
|
||||
}
|
||||
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
|
@ -1152,6 +1152,11 @@ impl<T: Ord> Extend<T> for BTreeSet<T> {
|
||||
self.insert(elem);
|
||||
});
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn extend_one(&mut self, elem: T) {
|
||||
self.insert(elem);
|
||||
}
|
||||
}
|
||||
|
||||
#[stable(feature = "extend_ref", since = "1.2.0")]
|
||||
@ -1159,6 +1164,11 @@ impl<'a, T: 'a + Ord + Copy> Extend<&'a T> for BTreeSet<T> {
|
||||
fn extend<I: IntoIterator<Item = &'a T>>(&mut self, iter: I) {
|
||||
self.extend(iter.into_iter().cloned());
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn extend_one(&mut self, &elem: &'a T) {
|
||||
self.insert(elem);
|
||||
}
|
||||
}
|
||||
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
|
@ -1748,6 +1748,11 @@ impl<T> Extend<T> for LinkedList<T> {
|
||||
fn extend<I: IntoIterator<Item = T>>(&mut self, iter: I) {
|
||||
<Self as SpecExtend<I>>::spec_extend(self, iter);
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn extend_one(&mut self, elem: T) {
|
||||
self.push_back(elem);
|
||||
}
|
||||
}
|
||||
|
||||
impl<I: IntoIterator> SpecExtend<I> for LinkedList<I::Item> {
|
||||
@ -1767,6 +1772,11 @@ impl<'a, T: 'a + Copy> Extend<&'a T> for LinkedList<T> {
|
||||
fn extend<I: IntoIterator<Item = &'a T>>(&mut self, iter: I) {
|
||||
self.extend(iter.into_iter().cloned());
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn extend_one(&mut self, &elem: &'a T) {
|
||||
self.push_back(elem);
|
||||
}
|
||||
}
|
||||
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
|
@ -2881,6 +2881,16 @@ impl<A> Extend<A> for VecDeque<A> {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn extend_one(&mut self, elem: A) {
|
||||
self.push_back(elem);
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn extend_reserve(&mut self, additional: usize) {
|
||||
self.reserve(additional);
|
||||
}
|
||||
}
|
||||
|
||||
#[stable(feature = "extend_ref", since = "1.2.0")]
|
||||
@ -2888,6 +2898,16 @@ impl<'a, T: 'a + Copy> Extend<&'a T> for VecDeque<T> {
|
||||
fn extend<I: IntoIterator<Item = &'a T>>(&mut self, iter: I) {
|
||||
self.extend(iter.into_iter().cloned());
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn extend_one(&mut self, &elem: &T) {
|
||||
self.push_back(elem);
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn extend_reserve(&mut self, additional: usize) {
|
||||
self.reserve(additional);
|
||||
}
|
||||
}
|
||||
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
|
@ -93,6 +93,7 @@
|
||||
#![feature(container_error_extra)]
|
||||
#![feature(dropck_eyepatch)]
|
||||
#![feature(exact_size_is_empty)]
|
||||
#![feature(extend_one)]
|
||||
#![feature(fmt_internals)]
|
||||
#![feature(fn_traits)]
|
||||
#![feature(fundamental)]
|
||||
|
@ -1799,6 +1799,16 @@ impl Extend<char> for String {
|
||||
self.reserve(lower_bound);
|
||||
iterator.for_each(move |c| self.push(c));
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn extend_one(&mut self, c: char) {
|
||||
self.push(c);
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn extend_reserve(&mut self, additional: usize) {
|
||||
self.reserve(additional);
|
||||
}
|
||||
}
|
||||
|
||||
#[stable(feature = "extend_ref", since = "1.2.0")]
|
||||
@ -1806,6 +1816,16 @@ impl<'a> Extend<&'a char> for String {
|
||||
fn extend<I: IntoIterator<Item = &'a char>>(&mut self, iter: I) {
|
||||
self.extend(iter.into_iter().cloned());
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn extend_one(&mut self, &c: &'a char) {
|
||||
self.push(c);
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn extend_reserve(&mut self, additional: usize) {
|
||||
self.reserve(additional);
|
||||
}
|
||||
}
|
||||
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
@ -1813,6 +1833,11 @@ impl<'a> Extend<&'a str> for String {
|
||||
fn extend<I: IntoIterator<Item = &'a str>>(&mut self, iter: I) {
|
||||
iter.into_iter().for_each(move |s| self.push_str(s));
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn extend_one(&mut self, s: &'a str) {
|
||||
self.push_str(s);
|
||||
}
|
||||
}
|
||||
|
||||
#[stable(feature = "extend_string", since = "1.4.0")]
|
||||
@ -1820,6 +1845,11 @@ impl Extend<String> for String {
|
||||
fn extend<I: IntoIterator<Item = String>>(&mut self, iter: I) {
|
||||
iter.into_iter().for_each(move |s| self.push_str(&s));
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn extend_one(&mut self, s: String) {
|
||||
self.push_str(&s);
|
||||
}
|
||||
}
|
||||
|
||||
#[stable(feature = "herd_cows", since = "1.19.0")]
|
||||
@ -1827,6 +1857,11 @@ impl<'a> Extend<Cow<'a, str>> for String {
|
||||
fn extend<I: IntoIterator<Item = Cow<'a, str>>>(&mut self, iter: I) {
|
||||
iter.into_iter().for_each(move |s| self.push_str(&s));
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn extend_one(&mut self, s: Cow<'a, str>) {
|
||||
self.push_str(&s);
|
||||
}
|
||||
}
|
||||
|
||||
/// A convenience impl that delegates to the impl for `&str`.
|
||||
|
@ -2045,6 +2045,16 @@ impl<T> Extend<T> for Vec<T> {
|
||||
fn extend<I: IntoIterator<Item = T>>(&mut self, iter: I) {
|
||||
<Self as SpecExtend<T, I::IntoIter>>::spec_extend(self, iter.into_iter())
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn extend_one(&mut self, item: T) {
|
||||
self.push(item);
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn extend_reserve(&mut self, additional: usize) {
|
||||
self.reserve(additional);
|
||||
}
|
||||
}
|
||||
|
||||
// Specialization trait used for Vec::from_iter and Vec::extend
|
||||
@ -2316,6 +2326,16 @@ impl<'a, T: 'a + Copy> Extend<&'a T> for Vec<T> {
|
||||
fn extend<I: IntoIterator<Item = &'a T>>(&mut self, iter: I) {
|
||||
self.spec_extend(iter.into_iter())
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn extend_one(&mut self, &item: &'a T) {
|
||||
self.push(item);
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn extend_reserve(&mut self, additional: usize) {
|
||||
self.reserve(additional);
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! __impl_slice_eq1 {
|
||||
|
@ -322,7 +322,7 @@ impl<I: Iterator> IntoIterator for I {
|
||||
pub trait Extend<A> {
|
||||
/// Extends a collection with the contents of an iterator.
|
||||
///
|
||||
/// As this is the only method for this trait, the [trait-level] docs
|
||||
/// As this is the only required method for this trait, the [trait-level] docs
|
||||
/// contain more details.
|
||||
///
|
||||
/// [trait-level]: trait.Extend.html
|
||||
@ -341,6 +341,20 @@ pub trait Extend<A> {
|
||||
/// ```
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
fn extend<T: IntoIterator<Item = A>>(&mut self, iter: T);
|
||||
|
||||
/// Extends a collection with exactly one element.
|
||||
#[unstable(feature = "extend_one", issue = "72631")]
|
||||
fn extend_one(&mut self, item: A) {
|
||||
self.extend(Some(item));
|
||||
}
|
||||
|
||||
/// Reserves capacity in a collection for the given number of additional elements.
|
||||
///
|
||||
/// The default implementation does nothing.
|
||||
#[unstable(feature = "extend_one", issue = "72631")]
|
||||
fn extend_reserve(&mut self, additional: usize) {
|
||||
let _ = additional;
|
||||
}
|
||||
}
|
||||
|
||||
#[stable(feature = "extend_for_unit", since = "1.28.0")]
|
||||
@ -348,4 +362,5 @@ impl Extend<()> for () {
|
||||
fn extend<T: IntoIterator<Item = ()>>(&mut self, iter: T) {
|
||||
iter.into_iter().for_each(drop)
|
||||
}
|
||||
fn extend_one(&mut self, _item: ()) {}
|
||||
}
|
||||
|
@ -1711,9 +1711,9 @@ pub trait Iterator {
|
||||
) -> impl FnMut((), T) + 'a {
|
||||
move |(), x| {
|
||||
if f(&x) {
|
||||
left.extend(Some(x));
|
||||
left.extend_one(x);
|
||||
} else {
|
||||
right.extend(Some(x));
|
||||
right.extend_one(x);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -2686,14 +2686,20 @@ pub trait Iterator {
|
||||
us: &'a mut impl Extend<B>,
|
||||
) -> impl FnMut((), (A, B)) + 'a {
|
||||
move |(), (t, u)| {
|
||||
ts.extend(Some(t));
|
||||
us.extend(Some(u));
|
||||
ts.extend_one(t);
|
||||
us.extend_one(u);
|
||||
}
|
||||
}
|
||||
|
||||
let mut ts: FromA = Default::default();
|
||||
let mut us: FromB = Default::default();
|
||||
|
||||
let (lower_bound, _) = self.size_hint();
|
||||
if lower_bound > 0 {
|
||||
ts.extend_reserve(lower_bound);
|
||||
us.extend_reserve(lower_bound);
|
||||
}
|
||||
|
||||
self.fold((), extend(&mut ts, &mut us));
|
||||
|
||||
(ts, us)
|
||||
|
@ -673,6 +673,62 @@ impl Token {
|
||||
|
||||
Some(Token::new(kind, self.span.to(joint.span)))
|
||||
}
|
||||
|
||||
// See comments in `Nonterminal::to_tokenstream` for why we care about
|
||||
// *probably* equal here rather than actual equality
|
||||
crate fn probably_equal_for_proc_macro(&self, other: &Token) -> bool {
|
||||
if mem::discriminant(&self.kind) != mem::discriminant(&other.kind) {
|
||||
return false;
|
||||
}
|
||||
match (&self.kind, &other.kind) {
|
||||
(&Eq, &Eq)
|
||||
| (&Lt, &Lt)
|
||||
| (&Le, &Le)
|
||||
| (&EqEq, &EqEq)
|
||||
| (&Ne, &Ne)
|
||||
| (&Ge, &Ge)
|
||||
| (&Gt, &Gt)
|
||||
| (&AndAnd, &AndAnd)
|
||||
| (&OrOr, &OrOr)
|
||||
| (&Not, &Not)
|
||||
| (&Tilde, &Tilde)
|
||||
| (&At, &At)
|
||||
| (&Dot, &Dot)
|
||||
| (&DotDot, &DotDot)
|
||||
| (&DotDotDot, &DotDotDot)
|
||||
| (&DotDotEq, &DotDotEq)
|
||||
| (&Comma, &Comma)
|
||||
| (&Semi, &Semi)
|
||||
| (&Colon, &Colon)
|
||||
| (&ModSep, &ModSep)
|
||||
| (&RArrow, &RArrow)
|
||||
| (&LArrow, &LArrow)
|
||||
| (&FatArrow, &FatArrow)
|
||||
| (&Pound, &Pound)
|
||||
| (&Dollar, &Dollar)
|
||||
| (&Question, &Question)
|
||||
| (&Whitespace, &Whitespace)
|
||||
| (&Comment, &Comment)
|
||||
| (&Eof, &Eof) => true,
|
||||
|
||||
(&BinOp(a), &BinOp(b)) | (&BinOpEq(a), &BinOpEq(b)) => a == b,
|
||||
|
||||
(&OpenDelim(a), &OpenDelim(b)) | (&CloseDelim(a), &CloseDelim(b)) => a == b,
|
||||
|
||||
(&DocComment(a), &DocComment(b)) | (&Shebang(a), &Shebang(b)) => a == b,
|
||||
|
||||
(&Literal(a), &Literal(b)) => a == b,
|
||||
|
||||
(&Lifetime(a), &Lifetime(b)) => a == b,
|
||||
(&Ident(a, b), &Ident(c, d)) => {
|
||||
b == d && (a == c || a == kw::DollarCrate || c == kw::DollarCrate)
|
||||
}
|
||||
|
||||
(&Interpolated(_), &Interpolated(_)) => false,
|
||||
|
||||
_ => panic!("forgot to add a token?"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq<TokenKind> for Token {
|
||||
|
@ -21,6 +21,8 @@ use rustc_macros::HashStable_Generic;
|
||||
use rustc_span::{Span, DUMMY_SP};
|
||||
use smallvec::{smallvec, SmallVec};
|
||||
|
||||
use log::debug;
|
||||
|
||||
use std::{iter, mem};
|
||||
|
||||
/// When the main rust parser encounters a syntax-extension invocation, it
|
||||
@ -66,6 +68,23 @@ impl TokenTree {
|
||||
}
|
||||
}
|
||||
|
||||
// See comments in `Nonterminal::to_tokenstream` for why we care about
|
||||
// *probably* equal here rather than actual equality
|
||||
//
|
||||
// This is otherwise the same as `eq_unspanned`, only recursing with a
|
||||
// different method.
|
||||
pub fn probably_equal_for_proc_macro(&self, other: &TokenTree) -> bool {
|
||||
match (self, other) {
|
||||
(TokenTree::Token(token), TokenTree::Token(token2)) => {
|
||||
token.probably_equal_for_proc_macro(token2)
|
||||
}
|
||||
(TokenTree::Delimited(_, delim, tts), TokenTree::Delimited(_, delim2, tts2)) => {
|
||||
delim == delim2 && tts.probably_equal_for_proc_macro(&tts2)
|
||||
}
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Retrieves the TokenTree's span.
|
||||
pub fn span(&self) -> Span {
|
||||
match self {
|
||||
@ -288,6 +307,112 @@ impl TokenStream {
|
||||
t1.next().is_none() && t2.next().is_none()
|
||||
}
|
||||
|
||||
// See comments in `Nonterminal::to_tokenstream` for why we care about
|
||||
// *probably* equal here rather than actual equality
|
||||
//
|
||||
// This is otherwise the same as `eq_unspanned`, only recursing with a
|
||||
// different method.
|
||||
pub fn probably_equal_for_proc_macro(&self, other: &TokenStream) -> bool {
|
||||
// When checking for `probably_eq`, we ignore certain tokens that aren't
|
||||
// preserved in the AST. Because they are not preserved, the pretty
|
||||
// printer arbitrarily adds or removes them when printing as token
|
||||
// streams, making a comparison between a token stream generated from an
|
||||
// AST and a token stream which was parsed into an AST more reliable.
|
||||
fn semantic_tree(tree: &TokenTree) -> bool {
|
||||
if let TokenTree::Token(token) = tree {
|
||||
if let
|
||||
// The pretty printer tends to add trailing commas to
|
||||
// everything, and in particular, after struct fields.
|
||||
| token::Comma
|
||||
// The pretty printer emits `NoDelim` as whitespace.
|
||||
| token::OpenDelim(DelimToken::NoDelim)
|
||||
| token::CloseDelim(DelimToken::NoDelim)
|
||||
// The pretty printer collapses many semicolons into one.
|
||||
| token::Semi
|
||||
// The pretty printer collapses whitespace arbitrarily and can
|
||||
// introduce whitespace from `NoDelim`.
|
||||
| token::Whitespace
|
||||
// The pretty printer can turn `$crate` into `::crate_name`
|
||||
| token::ModSep = token.kind {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
true
|
||||
}
|
||||
|
||||
// When comparing two `TokenStream`s, we ignore the `IsJoint` information.
|
||||
//
|
||||
// However, `rustc_parse::lexer::tokentrees::TokenStreamBuilder` will
|
||||
// use `Token.glue` on adjacent tokens with the proper `IsJoint`.
|
||||
// Since we are ignoreing `IsJoint`, a 'glued' token (e.g. `BinOp(Shr)`)
|
||||
// and its 'split'/'unglued' compoenents (e.g. `Gt, Gt`) are equivalent
|
||||
// when determining if two `TokenStream`s are 'probably equal'.
|
||||
//
|
||||
// Therefore, we use `break_two_token_op` to convert all tokens
|
||||
// to the 'unglued' form (if it exists). This ensures that two
|
||||
// `TokenStream`s which differ only in how their tokens are glued
|
||||
// will be considered 'probably equal', which allows us to keep spans.
|
||||
//
|
||||
// This is important when the original `TokenStream` contained
|
||||
// extra spaces (e.g. `f :: < Vec < _ > > ( ) ;'). These extra spaces
|
||||
// will be omitted when we pretty-print, which can cause the original
|
||||
// and reparsed `TokenStream`s to differ in the assignment of `IsJoint`,
|
||||
// leading to some tokens being 'glued' together in one stream but not
|
||||
// the other. See #68489 for more details.
|
||||
fn break_tokens(tree: TokenTree) -> impl Iterator<Item = TokenTree> {
|
||||
// In almost all cases, we should have either zero or one levels
|
||||
// of 'unglueing'. However, in some unusual cases, we may need
|
||||
// to iterate breaking tokens mutliple times. For example:
|
||||
// '[BinOpEq(Shr)] => [Gt, Ge] -> [Gt, Gt, Eq]'
|
||||
let mut token_trees: SmallVec<[_; 2]>;
|
||||
if let TokenTree::Token(token) = &tree {
|
||||
let mut out = SmallVec::<[_; 2]>::new();
|
||||
out.push(token.clone());
|
||||
// Iterate to fixpoint:
|
||||
// * We start off with 'out' containing our initial token, and `temp` empty
|
||||
// * If we are able to break any tokens in `out`, then `out` will have
|
||||
// at least one more element than 'temp', so we will try to break tokens
|
||||
// again.
|
||||
// * If we cannot break any tokens in 'out', we are done
|
||||
loop {
|
||||
let mut temp = SmallVec::<[_; 2]>::new();
|
||||
let mut changed = false;
|
||||
|
||||
for token in out.into_iter() {
|
||||
if let Some((first, second)) = token.kind.break_two_token_op() {
|
||||
temp.push(Token::new(first, DUMMY_SP));
|
||||
temp.push(Token::new(second, DUMMY_SP));
|
||||
changed = true;
|
||||
} else {
|
||||
temp.push(token);
|
||||
}
|
||||
}
|
||||
out = temp;
|
||||
if !changed {
|
||||
break;
|
||||
}
|
||||
}
|
||||
token_trees = out.into_iter().map(|t| TokenTree::Token(t)).collect();
|
||||
if token_trees.len() != 1 {
|
||||
debug!("break_tokens: broke {:?} to {:?}", tree, token_trees);
|
||||
}
|
||||
} else {
|
||||
token_trees = SmallVec::new();
|
||||
token_trees.push(tree);
|
||||
}
|
||||
token_trees.into_iter()
|
||||
}
|
||||
|
||||
let mut t1 = self.trees().filter(semantic_tree).flat_map(break_tokens);
|
||||
let mut t2 = other.trees().filter(semantic_tree).flat_map(break_tokens);
|
||||
for (t1, t2) in t1.by_ref().zip(t2.by_ref()) {
|
||||
if !t1.probably_equal_for_proc_macro(&t2) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
t1.next().is_none() && t2.next().is_none()
|
||||
}
|
||||
|
||||
pub fn map_enumerated<F: FnMut(usize, TokenTree) -> TokenTree>(self, mut f: F) -> TokenStream {
|
||||
TokenStream(Lrc::new(
|
||||
self.0
|
||||
|
@ -1321,12 +1321,15 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
||||
.get_partial_res(bound_pred.bounded_ty.id)
|
||||
.map(|d| d.base_res())
|
||||
{
|
||||
if let Some(node_id) =
|
||||
self.resolver.definitions().as_local_node_id(def_id)
|
||||
{
|
||||
if let Some(def_id) = def_id.as_local() {
|
||||
for param in &generics.params {
|
||||
if let GenericParamKind::Type { .. } = param.kind {
|
||||
if node_id == param.id {
|
||||
if def_id
|
||||
== self
|
||||
.resolver
|
||||
.definitions()
|
||||
.local_def_id(param.id)
|
||||
{
|
||||
add_bounds
|
||||
.entry(param.id)
|
||||
.or_default()
|
||||
|
@ -3,6 +3,7 @@ use super::{ImplTraitContext, LoweringContext, ParamMode};
|
||||
use rustc_ast::ast::*;
|
||||
use rustc_ast::ptr::P;
|
||||
use rustc_data_structures::stack::ensure_sufficient_stack;
|
||||
use rustc_errors::Applicability;
|
||||
use rustc_hir as hir;
|
||||
use rustc_hir::def::Res;
|
||||
use rustc_span::symbol::Ident;
|
||||
@ -102,10 +103,36 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
||||
// Note that unlike for slice patterns,
|
||||
// where `xs @ ..` is a legal sub-slice pattern,
|
||||
// it is not a legal sub-tuple pattern.
|
||||
if pat.is_rest() {
|
||||
rest = Some((idx, pat.span));
|
||||
break;
|
||||
match pat.kind {
|
||||
// Found a sub-tuple rest pattern
|
||||
PatKind::Rest => {
|
||||
rest = Some((idx, pat.span));
|
||||
break;
|
||||
}
|
||||
// Found a sub-tuple pattern `$binding_mode $ident @ ..`.
|
||||
// This is not allowed as a sub-tuple pattern
|
||||
PatKind::Ident(ref _bm, ident, Some(ref sub)) if sub.is_rest() => {
|
||||
rest = Some((idx, pat.span));
|
||||
let sp = pat.span;
|
||||
self.diagnostic()
|
||||
.struct_span_err(
|
||||
sp,
|
||||
&format!("`{} @` is not allowed in a {}", ident.name, ctx),
|
||||
)
|
||||
.span_label(sp, "this is only allowed in slice patterns")
|
||||
.help("remove this and bind each tuple field independently")
|
||||
.span_suggestion_verbose(
|
||||
sp,
|
||||
&format!("if you don't need to use the contents of {}, discard the tuple's remaining fields", ident),
|
||||
"..".to_string(),
|
||||
Applicability::MaybeIncorrect,
|
||||
)
|
||||
.emit();
|
||||
break;
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
// It was not a sub-tuple pattern so lower it normally.
|
||||
elems.push(self.lower_pat(pat));
|
||||
}
|
||||
|
@ -22,6 +22,7 @@
|
||||
#![feature(test)]
|
||||
#![feature(associated_type_bounds)]
|
||||
#![feature(thread_id_value)]
|
||||
#![feature(extend_one)]
|
||||
#![allow(rustc::default_hash_types)]
|
||||
|
||||
#[macro_use]
|
||||
|
@ -53,6 +53,20 @@ impl<T> Extend<T> for ThinVec<T> {
|
||||
ThinVec(None) => *self = iter.into_iter().collect::<Vec<_>>().into(),
|
||||
}
|
||||
}
|
||||
|
||||
fn extend_one(&mut self, item: T) {
|
||||
match *self {
|
||||
ThinVec(Some(ref mut vec)) => vec.push(item),
|
||||
ThinVec(None) => *self = vec![item].into(),
|
||||
}
|
||||
}
|
||||
|
||||
fn extend_reserve(&mut self, additional: usize) {
|
||||
match *self {
|
||||
ThinVec(Some(ref mut vec)) => vec.reserve(additional),
|
||||
ThinVec(None) => *self = Vec::with_capacity(additional).into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: HashStable<CTX>, CTX> HashStable<CTX> for ThinVec<T> {
|
||||
|
@ -332,17 +332,6 @@ impl Definitions {
|
||||
})
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn as_local_node_id(&self, def_id: DefId) -> Option<ast::NodeId> {
|
||||
if let Some(def_id) = def_id.as_local() {
|
||||
let node_id = self.def_id_to_node_id[def_id];
|
||||
if node_id != ast::DUMMY_NODE_ID {
|
||||
return Some(node_id);
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn as_local_hir_id(&self, def_id: LocalDefId) -> hir::HirId {
|
||||
self.local_def_id_to_hir_id(def_id)
|
||||
|
@ -2,6 +2,7 @@
|
||||
#![feature(const_if_match)]
|
||||
#![feature(const_fn)]
|
||||
#![feature(const_panic)]
|
||||
#![feature(extend_one)]
|
||||
#![feature(unboxed_closures)]
|
||||
#![feature(test)]
|
||||
#![feature(fn_traits)]
|
||||
|
@ -736,6 +736,16 @@ impl<I: Idx, T> Extend<T> for IndexVec<I, T> {
|
||||
fn extend<J: IntoIterator<Item = T>>(&mut self, iter: J) {
|
||||
self.raw.extend(iter);
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn extend_one(&mut self, item: T) {
|
||||
self.raw.push(item);
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn extend_reserve(&mut self, additional: usize) {
|
||||
self.raw.reserve(additional);
|
||||
}
|
||||
}
|
||||
|
||||
impl<I: Idx, T> FromIterator<T> for IndexVec<I, T> {
|
||||
|
@ -16,6 +16,7 @@
|
||||
#![feature(bool_to_option)]
|
||||
#![feature(box_patterns)]
|
||||
#![feature(box_syntax)]
|
||||
#![feature(extend_one)]
|
||||
#![feature(never_type)]
|
||||
#![feature(or_patterns)]
|
||||
#![feature(range_is_empty)]
|
||||
|
@ -81,6 +81,14 @@ impl Extend<ty::Predicate<'tcx>> for PredicateSet<'tcx> {
|
||||
self.insert(pred);
|
||||
}
|
||||
}
|
||||
|
||||
fn extend_one(&mut self, pred: ty::Predicate<'tcx>) {
|
||||
self.insert(pred);
|
||||
}
|
||||
|
||||
fn extend_reserve(&mut self, additional: usize) {
|
||||
Extend::<ty::Predicate<'tcx>>::extend_reserve(&mut self.set, additional);
|
||||
}
|
||||
}
|
||||
|
||||
///////////////////////////////////////////////////////////////////////////
|
||||
|
@ -1,4 +1,4 @@
|
||||
use super::{AllocId, Pointer, RawConst, ScalarMaybeUninit};
|
||||
use super::{AllocId, Pointer, RawConst, Scalar};
|
||||
|
||||
use crate::mir::interpret::ConstValue;
|
||||
use crate::ty::layout::LayoutError;
|
||||
@ -391,7 +391,7 @@ pub enum UndefinedBehaviorInfo<'tcx> {
|
||||
/// Using a non-character `u32` as character.
|
||||
InvalidChar(u32),
|
||||
/// An enum discriminant was set to a value which was outside the range of valid values.
|
||||
InvalidDiscriminant(ScalarMaybeUninit),
|
||||
InvalidDiscriminant(Scalar),
|
||||
/// Using a pointer-not-to-a-function as function pointer.
|
||||
InvalidFunctionPointer(Pointer),
|
||||
/// Using a string that is not valid UTF-8,
|
||||
|
@ -5,7 +5,6 @@
|
||||
|
||||
use crate::mir::*;
|
||||
use crate::ty::subst::Subst;
|
||||
use crate::ty::util::IntTypeExt;
|
||||
use crate::ty::{self, Ty, TyCtxt};
|
||||
use rustc_hir as hir;
|
||||
use rustc_target::abi::VariantIdx;
|
||||
@ -174,17 +173,7 @@ impl<'tcx> Rvalue<'tcx> {
|
||||
tcx.intern_tup(&[ty, tcx.types.bool])
|
||||
}
|
||||
Rvalue::UnaryOp(UnOp::Not | UnOp::Neg, ref operand) => operand.ty(local_decls, tcx),
|
||||
Rvalue::Discriminant(ref place) => {
|
||||
let ty = place.ty(local_decls, tcx).ty;
|
||||
match ty.kind {
|
||||
ty::Adt(adt_def, _) => adt_def.repr.discr_type().to_ty(tcx),
|
||||
ty::Generator(_, substs, _) => substs.as_generator().discr_ty(tcx),
|
||||
_ => {
|
||||
// This can only be `0`, for now, so `u8` will suffice.
|
||||
tcx.types.u8
|
||||
}
|
||||
}
|
||||
}
|
||||
Rvalue::Discriminant(ref place) => place.ty(local_decls, tcx).ty.discriminant_ty(tcx),
|
||||
Rvalue::NullaryOp(NullOp::Box, t) => tcx.mk_box(t),
|
||||
Rvalue::NullaryOp(NullOp::SizeOf, _) => tcx.types.usize,
|
||||
Rvalue::Aggregate(ref ak, ref ops) => match **ak {
|
||||
|
@ -2251,11 +2251,6 @@ impl<'tcx> TyCtxt<'tcx> {
|
||||
if self.features().never_type_fallback { self.types.never } else { self.types.unit }
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn mk_bool(self) -> Ty<'tcx> {
|
||||
self.mk_ty(Bool)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn mk_fn_def(self, def_id: DefId, substs: SubstsRef<'tcx>) -> Ty<'tcx> {
|
||||
self.mk_ty(FnDef(def_id, substs))
|
||||
|
@ -2037,6 +2037,8 @@ impl ReprOptions {
|
||||
self.flags.contains(ReprFlags::HIDE_NICHE)
|
||||
}
|
||||
|
||||
/// Returns the discriminant type, given these `repr` options.
|
||||
/// This must only be called on enums!
|
||||
pub fn discr_type(&self) -> attr::IntType {
|
||||
self.int.unwrap_or(attr::SignedInt(ast::IntTy::Isize))
|
||||
}
|
||||
@ -2269,6 +2271,7 @@ impl<'tcx> AdtDef {
|
||||
|
||||
#[inline]
|
||||
pub fn eval_explicit_discr(&self, tcx: TyCtxt<'tcx>, expr_did: DefId) -> Option<Discr<'tcx>> {
|
||||
assert!(self.is_enum());
|
||||
let param_env = tcx.param_env(expr_did);
|
||||
let repr_type = self.repr.discr_type();
|
||||
match tcx.const_eval_poly(expr_did) {
|
||||
@ -2305,6 +2308,7 @@ impl<'tcx> AdtDef {
|
||||
&'tcx self,
|
||||
tcx: TyCtxt<'tcx>,
|
||||
) -> impl Iterator<Item = (VariantIdx, Discr<'tcx>)> + Captures<'tcx> {
|
||||
assert!(self.is_enum());
|
||||
let repr_type = self.repr.discr_type();
|
||||
let initial = repr_type.initial_discriminant(tcx);
|
||||
let mut prev_discr = None::<Discr<'tcx>>;
|
||||
@ -2337,6 +2341,7 @@ impl<'tcx> AdtDef {
|
||||
tcx: TyCtxt<'tcx>,
|
||||
variant_index: VariantIdx,
|
||||
) -> Discr<'tcx> {
|
||||
assert!(self.is_enum());
|
||||
let (val, offset) = self.discriminant_def_for_variant(variant_index);
|
||||
let explicit_value = val
|
||||
.and_then(|expr_did| self.eval_explicit_discr(tcx, expr_did))
|
||||
|
@ -29,6 +29,7 @@ use std::borrow::Cow;
|
||||
use std::cmp::Ordering;
|
||||
use std::marker::PhantomData;
|
||||
use std::ops::Range;
|
||||
use ty::util::IntTypeExt;
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, RustcEncodable, RustcDecodable)]
|
||||
#[derive(HashStable, TypeFoldable, Lift)]
|
||||
@ -2096,7 +2097,9 @@ impl<'tcx> TyS<'tcx> {
|
||||
variant_index: VariantIdx,
|
||||
) -> Option<Discr<'tcx>> {
|
||||
match self.kind {
|
||||
TyKind::Adt(adt, _) => Some(adt.discriminant_for_variant(tcx, variant_index)),
|
||||
TyKind::Adt(adt, _) if adt.is_enum() => {
|
||||
Some(adt.discriminant_for_variant(tcx, variant_index))
|
||||
}
|
||||
TyKind::Generator(def_id, substs, _) => {
|
||||
Some(substs.as_generator().discriminant_for_variant(def_id, tcx, variant_index))
|
||||
}
|
||||
@ -2104,6 +2107,18 @@ impl<'tcx> TyS<'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the type of the discriminant of this type.
|
||||
pub fn discriminant_ty(&self, tcx: TyCtxt<'tcx>) -> Ty<'tcx> {
|
||||
match self.kind {
|
||||
ty::Adt(adt, _) if adt.is_enum() => adt.repr.discr_type().to_ty(tcx),
|
||||
ty::Generator(_, substs, _) => substs.as_generator().discr_ty(tcx),
|
||||
_ => {
|
||||
// This can only be `0`, for now, so `u8` will suffice.
|
||||
tcx.types.u8
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// When we create a closure, we record its kind (i.e., what trait
|
||||
/// it implements) into its `ClosureSubsts` using a type
|
||||
/// parameter. This is kind of a phantom type, except that the
|
||||
|
@ -220,15 +220,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
|
||||
sym::discriminant_value => {
|
||||
let place = self.deref_operand(args[0])?;
|
||||
let discr_val = self.read_discriminant(place.into())?.0;
|
||||
let scalar = match dest.layout.ty.kind {
|
||||
ty::Int(_) => Scalar::from_int(
|
||||
self.sign_extend(discr_val, dest.layout) as i128,
|
||||
dest.layout.size,
|
||||
),
|
||||
ty::Uint(_) => Scalar::from_uint(discr_val, dest.layout.size),
|
||||
_ => bug!("invalid `discriminant_value` return layout: {:?}", dest.layout),
|
||||
};
|
||||
self.write_scalar(scalar, dest)?;
|
||||
self.write_scalar(discr_val, dest)?;
|
||||
}
|
||||
sym::unchecked_shl
|
||||
| sym::unchecked_shr
|
||||
|
@ -7,15 +7,15 @@ use std::fmt::Write;
|
||||
use rustc_errors::ErrorReported;
|
||||
use rustc_hir::def::Namespace;
|
||||
use rustc_macros::HashStable;
|
||||
use rustc_middle::ty::layout::{IntegerExt, PrimitiveExt, TyAndLayout};
|
||||
use rustc_middle::ty::layout::{PrimitiveExt, TyAndLayout};
|
||||
use rustc_middle::ty::print::{FmtPrinter, PrettyPrinter, Printer};
|
||||
use rustc_middle::ty::Ty;
|
||||
use rustc_middle::{mir, ty};
|
||||
use rustc_target::abi::{Abi, DiscriminantKind, HasDataLayout, Integer, LayoutOf, Size};
|
||||
use rustc_target::abi::{Abi, DiscriminantKind, HasDataLayout, LayoutOf, Size};
|
||||
use rustc_target::abi::{VariantIdx, Variants};
|
||||
|
||||
use super::{
|
||||
from_known_layout, sign_extend, truncate, ConstValue, GlobalId, InterpCx, InterpResult,
|
||||
from_known_layout, mir_assign_valid_types, ConstValue, GlobalId, InterpCx, InterpResult,
|
||||
MPlaceTy, Machine, MemPlace, Place, PlaceTy, Pointer, Scalar, ScalarMaybeUninit,
|
||||
};
|
||||
|
||||
@ -469,6 +469,14 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
|
||||
.try_fold(base_op, |op, elem| self.operand_projection(op, elem))?;
|
||||
|
||||
trace!("eval_place_to_op: got {:?}", *op);
|
||||
// Sanity-check the type we ended up with.
|
||||
debug_assert!(mir_assign_valid_types(
|
||||
*self.tcx,
|
||||
self.layout_of(self.subst_from_current_frame_and_normalize_erasing_regions(
|
||||
place.ty(&self.frame().body.local_decls, *self.tcx).ty
|
||||
))?,
|
||||
op.layout,
|
||||
));
|
||||
Ok(op)
|
||||
}
|
||||
|
||||
@ -576,98 +584,113 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
|
||||
/// Read discriminant, return the runtime value as well as the variant index.
|
||||
pub fn read_discriminant(
|
||||
&self,
|
||||
rval: OpTy<'tcx, M::PointerTag>,
|
||||
) -> InterpResult<'tcx, (u128, VariantIdx)> {
|
||||
trace!("read_discriminant_value {:#?}", rval.layout);
|
||||
op: OpTy<'tcx, M::PointerTag>,
|
||||
) -> InterpResult<'tcx, (Scalar<M::PointerTag>, VariantIdx)> {
|
||||
trace!("read_discriminant_value {:#?}", op.layout);
|
||||
|
||||
let (discr_layout, discr_kind, discr_index) = match rval.layout.variants {
|
||||
// Get type and layout of the discriminant.
|
||||
let discr_layout = self.layout_of(op.layout.ty.discriminant_ty(*self.tcx))?;
|
||||
trace!("discriminant type: {:?}", discr_layout.ty);
|
||||
|
||||
// We use "discriminant" to refer to the value associated with a particular enum variant.
|
||||
// This is not to be confused with its "variant index", which is just determining its position in the
|
||||
// declared list of variants -- they can differ with explicitly assigned discriminants.
|
||||
// We use "tag" to refer to how the discriminant is encoded in memory, which can be either
|
||||
// straight-forward (`DiscriminantKind::Tag`) or with a niche (`DiscriminantKind::Niche`).
|
||||
// Unfortunately, the rest of the compiler calls the latter "discriminant", too, which makes things
|
||||
// rather confusing.
|
||||
let (tag_scalar_layout, tag_kind, tag_index) = match op.layout.variants {
|
||||
Variants::Single { index } => {
|
||||
let discr_val = rval
|
||||
.layout
|
||||
.ty
|
||||
.discriminant_for_variant(*self.tcx, index)
|
||||
.map_or(u128::from(index.as_u32()), |discr| discr.val);
|
||||
return Ok((discr_val, index));
|
||||
let discr = match op.layout.ty.discriminant_for_variant(*self.tcx, index) {
|
||||
Some(discr) => {
|
||||
// This type actually has discriminants.
|
||||
assert_eq!(discr.ty, discr_layout.ty);
|
||||
Scalar::from_uint(discr.val, discr_layout.size)
|
||||
}
|
||||
None => {
|
||||
// On a type without actual discriminants, variant is 0.
|
||||
assert_eq!(index.as_u32(), 0);
|
||||
Scalar::from_uint(index.as_u32(), discr_layout.size)
|
||||
}
|
||||
};
|
||||
return Ok((discr, index));
|
||||
}
|
||||
Variants::Multiple { discr: ref discr_layout, ref discr_kind, discr_index, .. } => {
|
||||
(discr_layout, discr_kind, discr_index)
|
||||
Variants::Multiple { ref discr, ref discr_kind, discr_index, .. } => {
|
||||
(discr, discr_kind, discr_index)
|
||||
}
|
||||
};
|
||||
|
||||
// read raw discriminant value
|
||||
let discr_op = self.operand_field(rval, discr_index)?;
|
||||
let discr_val = self.read_immediate(discr_op)?;
|
||||
let raw_discr = discr_val.to_scalar_or_undef();
|
||||
trace!("discr value: {:?}", raw_discr);
|
||||
// post-process
|
||||
Ok(match *discr_kind {
|
||||
// There are *three* layouts that come into play here:
|
||||
// - The discriminant has a type for typechecking. This is `discr_layout`, and is used for
|
||||
// the `Scalar` we return.
|
||||
// - The tag (encoded discriminant) has layout `tag_layout`. This is always an integer type,
|
||||
// and used to interpret the value we read from the tag field.
|
||||
// For the return value, a cast to `discr_layout` is performed.
|
||||
// - The field storing the tag has a layout, which is very similar to `tag_layout` but
|
||||
// may be a pointer. This is `tag_val.layout`; we just use it for sanity checks.
|
||||
|
||||
// Get layout for tag.
|
||||
let tag_layout = self.layout_of(tag_scalar_layout.value.to_int_ty(*self.tcx))?;
|
||||
|
||||
// Read tag and sanity-check `tag_layout`.
|
||||
let tag_val = self.read_immediate(self.operand_field(op, tag_index)?)?;
|
||||
assert_eq!(tag_layout.size, tag_val.layout.size);
|
||||
assert_eq!(tag_layout.abi.is_signed(), tag_val.layout.abi.is_signed());
|
||||
let tag_val = tag_val.to_scalar()?;
|
||||
trace!("tag value: {:?}", tag_val);
|
||||
|
||||
// Figure out which discriminant and variant this corresponds to.
|
||||
Ok(match *tag_kind {
|
||||
DiscriminantKind::Tag => {
|
||||
let bits_discr = raw_discr
|
||||
.not_undef()
|
||||
.and_then(|raw_discr| self.force_bits(raw_discr, discr_val.layout.size))
|
||||
.map_err(|_| err_ub!(InvalidDiscriminant(raw_discr.erase_tag())))?;
|
||||
let real_discr = if discr_val.layout.abi.is_signed() {
|
||||
// going from layout tag type to typeck discriminant type
|
||||
// requires first sign extending with the discriminant layout
|
||||
let sexted = sign_extend(bits_discr, discr_val.layout.size);
|
||||
// and then zeroing with the typeck discriminant type
|
||||
let discr_ty = rval
|
||||
.layout
|
||||
.ty
|
||||
.ty_adt_def()
|
||||
.expect("tagged layout corresponds to adt")
|
||||
.repr
|
||||
.discr_type();
|
||||
let size = Integer::from_attr(self, discr_ty).size();
|
||||
truncate(sexted, size)
|
||||
} else {
|
||||
bits_discr
|
||||
};
|
||||
// Make sure we catch invalid discriminants
|
||||
let index = match rval.layout.ty.kind {
|
||||
let tag_bits = self
|
||||
.force_bits(tag_val, tag_layout.size)
|
||||
.map_err(|_| err_ub!(InvalidDiscriminant(tag_val.erase_tag())))?;
|
||||
// Cast bits from tag layout to discriminant layout.
|
||||
let discr_val_cast = self.cast_from_scalar(tag_bits, tag_layout, discr_layout.ty);
|
||||
let discr_bits = discr_val_cast.assert_bits(discr_layout.size);
|
||||
// Convert discriminant to variant index, and catch invalid discriminants.
|
||||
let index = match op.layout.ty.kind {
|
||||
ty::Adt(adt, _) => {
|
||||
adt.discriminants(self.tcx.tcx).find(|(_, var)| var.val == real_discr)
|
||||
adt.discriminants(self.tcx.tcx).find(|(_, var)| var.val == discr_bits)
|
||||
}
|
||||
ty::Generator(def_id, substs, _) => {
|
||||
let substs = substs.as_generator();
|
||||
substs
|
||||
.discriminants(def_id, self.tcx.tcx)
|
||||
.find(|(_, var)| var.val == real_discr)
|
||||
.find(|(_, var)| var.val == discr_bits)
|
||||
}
|
||||
_ => bug!("tagged layout for non-adt non-generator"),
|
||||
}
|
||||
.ok_or_else(|| err_ub!(InvalidDiscriminant(raw_discr.erase_tag())))?;
|
||||
(real_discr, index.0)
|
||||
.ok_or_else(|| err_ub!(InvalidDiscriminant(tag_val.erase_tag())))?;
|
||||
// Return the cast value, and the index.
|
||||
(discr_val_cast, index.0)
|
||||
}
|
||||
DiscriminantKind::Niche { dataful_variant, ref niche_variants, niche_start } => {
|
||||
// Compute the variant this niche value/"tag" corresponds to. With niche layout,
|
||||
// discriminant (encoded in niche/tag) and variant index are the same.
|
||||
let variants_start = niche_variants.start().as_u32();
|
||||
let variants_end = niche_variants.end().as_u32();
|
||||
let raw_discr = raw_discr
|
||||
.not_undef()
|
||||
.map_err(|_| err_ub!(InvalidDiscriminant(ScalarMaybeUninit::Uninit)))?;
|
||||
match raw_discr.to_bits_or_ptr(discr_val.layout.size, self) {
|
||||
let variant = match tag_val.to_bits_or_ptr(tag_layout.size, self) {
|
||||
Err(ptr) => {
|
||||
// The niche must be just 0 (which an inbounds pointer value never is)
|
||||
let ptr_valid = niche_start == 0
|
||||
&& variants_start == variants_end
|
||||
&& !self.memory.ptr_may_be_null(ptr);
|
||||
if !ptr_valid {
|
||||
throw_ub!(InvalidDiscriminant(raw_discr.erase_tag().into()))
|
||||
throw_ub!(InvalidDiscriminant(tag_val.erase_tag()))
|
||||
}
|
||||
(u128::from(dataful_variant.as_u32()), dataful_variant)
|
||||
dataful_variant
|
||||
}
|
||||
Ok(raw_discr) => {
|
||||
Ok(tag_bits) => {
|
||||
// We need to use machine arithmetic to get the relative variant idx:
|
||||
// variant_index_relative = discr_val - niche_start_val
|
||||
let discr_layout =
|
||||
self.layout_of(discr_layout.value.to_int_ty(*self.tcx))?;
|
||||
let discr_val = ImmTy::from_uint(raw_discr, discr_layout);
|
||||
let niche_start_val = ImmTy::from_uint(niche_start, discr_layout);
|
||||
// variant_index_relative = tag_val - niche_start_val
|
||||
let tag_val = ImmTy::from_uint(tag_bits, tag_layout);
|
||||
let niche_start_val = ImmTy::from_uint(niche_start, tag_layout);
|
||||
let variant_index_relative_val =
|
||||
self.binary_op(mir::BinOp::Sub, discr_val, niche_start_val)?;
|
||||
self.binary_op(mir::BinOp::Sub, tag_val, niche_start_val)?;
|
||||
let variant_index_relative = variant_index_relative_val
|
||||
.to_scalar()?
|
||||
.assert_bits(discr_val.layout.size);
|
||||
.assert_bits(tag_val.layout.size);
|
||||
// Check if this is in the range that indicates an actual discriminant.
|
||||
if variant_index_relative <= u128::from(variants_end - variants_start) {
|
||||
let variant_index_relative = u32::try_from(variant_index_relative)
|
||||
@ -676,7 +699,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
|
||||
let variant_index = variants_start
|
||||
.checked_add(variant_index_relative)
|
||||
.expect("overflow computing absolute variant idx");
|
||||
let variants_len = rval
|
||||
let variants_len = op
|
||||
.layout
|
||||
.ty
|
||||
.ty_adt_def()
|
||||
@ -684,12 +707,16 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
|
||||
.variants
|
||||
.len();
|
||||
assert!(usize::try_from(variant_index).unwrap() < variants_len);
|
||||
(u128::from(variant_index), VariantIdx::from_u32(variant_index))
|
||||
VariantIdx::from_u32(variant_index)
|
||||
} else {
|
||||
(u128::from(dataful_variant.as_u32()), dataful_variant)
|
||||
dataful_variant
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
// Compute the size of the scalar we need to return.
|
||||
// No need to cast, because the variant index directly serves as discriminant and is
|
||||
// encoded in the tag.
|
||||
(Scalar::from_uint(variant.as_u32(), discr_layout.size), variant)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
@ -638,6 +638,14 @@ where
|
||||
}
|
||||
|
||||
self.dump_place(place_ty.place);
|
||||
// Sanity-check the type we ended up with.
|
||||
debug_assert!(mir_assign_valid_types(
|
||||
*self.tcx,
|
||||
self.layout_of(self.subst_from_current_frame_and_normalize_erasing_regions(
|
||||
place.ty(&self.frame().body.local_decls, *self.tcx).ty
|
||||
))?,
|
||||
place_ty.layout,
|
||||
));
|
||||
Ok(place_ty)
|
||||
}
|
||||
|
||||
|
@ -262,8 +262,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
|
||||
Discriminant(place) => {
|
||||
let op = self.eval_place_to_op(place, None)?;
|
||||
let discr_val = self.read_discriminant(op)?.0;
|
||||
let size = dest.layout.size;
|
||||
self.write_scalar(Scalar::from_uint(discr_val, size), dest)?;
|
||||
self.write_scalar(discr_val, dest)?;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -12,7 +12,6 @@ doctest = false
|
||||
[dependencies]
|
||||
bitflags = "1.0"
|
||||
log = "0.4"
|
||||
smallvec = { version = "1.0", features = ["union", "may_dangle"] }
|
||||
rustc_ast_pretty = { path = "../librustc_ast_pretty" }
|
||||
rustc_data_structures = { path = "../librustc_data_structures" }
|
||||
rustc_feature = { path = "../librustc_feature" }
|
||||
|
@ -7,18 +7,14 @@
|
||||
#![feature(or_patterns)]
|
||||
|
||||
use rustc_ast::ast;
|
||||
use rustc_ast::token::{self, DelimToken, Nonterminal, Token, TokenKind};
|
||||
use rustc_ast::tokenstream::{self, IsJoint, TokenStream, TokenTree};
|
||||
use rustc_ast::token::{self, Nonterminal};
|
||||
use rustc_ast::tokenstream::{self, TokenStream, TokenTree};
|
||||
use rustc_ast_pretty::pprust;
|
||||
use rustc_data_structures::sync::Lrc;
|
||||
use rustc_errors::{Diagnostic, FatalError, Level, PResult};
|
||||
use rustc_session::parse::ParseSess;
|
||||
use rustc_span::symbol::kw;
|
||||
use rustc_span::{FileName, SourceFile, Span, DUMMY_SP};
|
||||
use rustc_span::{FileName, SourceFile, Span};
|
||||
|
||||
use smallvec::SmallVec;
|
||||
|
||||
use std::mem;
|
||||
use std::path::Path;
|
||||
use std::str;
|
||||
|
||||
@ -310,7 +306,7 @@ pub fn nt_to_tokenstream(nt: &Nonterminal, sess: &ParseSess, span: Span) -> Toke
|
||||
// modifications, including adding/removing typically non-semantic
|
||||
// tokens such as extra braces and commas, don't happen.
|
||||
if let Some(tokens) = tokens {
|
||||
if tokenstream_probably_equal_for_proc_macro(&tokens, &tokens_for_real, sess) {
|
||||
if tokens.probably_equal_for_proc_macro(&tokens_for_real) {
|
||||
return tokens;
|
||||
}
|
||||
info!(
|
||||
@ -385,203 +381,3 @@ fn prepend_attrs(
|
||||
builder.push(tokens.clone());
|
||||
Some(builder.build())
|
||||
}
|
||||
|
||||
// See comments in `Nonterminal::to_tokenstream` for why we care about
|
||||
// *probably* equal here rather than actual equality
|
||||
//
|
||||
// This is otherwise the same as `eq_unspanned`, only recursing with a
|
||||
// different method.
|
||||
pub fn tokenstream_probably_equal_for_proc_macro(
|
||||
first: &TokenStream,
|
||||
other: &TokenStream,
|
||||
sess: &ParseSess,
|
||||
) -> bool {
|
||||
// When checking for `probably_eq`, we ignore certain tokens that aren't
|
||||
// preserved in the AST. Because they are not preserved, the pretty
|
||||
// printer arbitrarily adds or removes them when printing as token
|
||||
// streams, making a comparison between a token stream generated from an
|
||||
// AST and a token stream which was parsed into an AST more reliable.
|
||||
fn semantic_tree(tree: &TokenTree) -> bool {
|
||||
if let TokenTree::Token(token) = tree {
|
||||
if let
|
||||
// The pretty printer tends to add trailing commas to
|
||||
// everything, and in particular, after struct fields.
|
||||
| token::Comma
|
||||
// The pretty printer emits `NoDelim` as whitespace.
|
||||
| token::OpenDelim(DelimToken::NoDelim)
|
||||
| token::CloseDelim(DelimToken::NoDelim)
|
||||
// The pretty printer collapses many semicolons into one.
|
||||
| token::Semi
|
||||
// The pretty printer collapses whitespace arbitrarily and can
|
||||
// introduce whitespace from `NoDelim`.
|
||||
| token::Whitespace
|
||||
// The pretty printer can turn `$crate` into `::crate_name`
|
||||
| token::ModSep = token.kind {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
true
|
||||
}
|
||||
|
||||
// When comparing two `TokenStream`s, we ignore the `IsJoint` information.
|
||||
//
|
||||
// However, `rustc_parse::lexer::tokentrees::TokenStreamBuilder` will
|
||||
// use `Token.glue` on adjacent tokens with the proper `IsJoint`.
|
||||
// Since we are ignoreing `IsJoint`, a 'glued' token (e.g. `BinOp(Shr)`)
|
||||
// and its 'split'/'unglued' compoenents (e.g. `Gt, Gt`) are equivalent
|
||||
// when determining if two `TokenStream`s are 'probably equal'.
|
||||
//
|
||||
// Therefore, we use `break_two_token_op` to convert all tokens
|
||||
// to the 'unglued' form (if it exists). This ensures that two
|
||||
// `TokenStream`s which differ only in how their tokens are glued
|
||||
// will be considered 'probably equal', which allows us to keep spans.
|
||||
//
|
||||
// This is important when the original `TokenStream` contained
|
||||
// extra spaces (e.g. `f :: < Vec < _ > > ( ) ;'). These extra spaces
|
||||
// will be omitted when we pretty-print, which can cause the original
|
||||
// and reparsed `TokenStream`s to differ in the assignment of `IsJoint`,
|
||||
// leading to some tokens being 'glued' together in one stream but not
|
||||
// the other. See #68489 for more details.
|
||||
fn break_tokens(tree: TokenTree) -> impl Iterator<Item = TokenTree> {
|
||||
// In almost all cases, we should have either zero or one levels
|
||||
// of 'unglueing'. However, in some unusual cases, we may need
|
||||
// to iterate breaking tokens mutliple times. For example:
|
||||
// '[BinOpEq(Shr)] => [Gt, Ge] -> [Gt, Gt, Eq]'
|
||||
let mut token_trees: SmallVec<[_; 2]>;
|
||||
if let TokenTree::Token(token) = &tree {
|
||||
let mut out = SmallVec::<[_; 2]>::new();
|
||||
out.push(token.clone());
|
||||
// Iterate to fixpoint:
|
||||
// * We start off with 'out' containing our initial token, and `temp` empty
|
||||
// * If we are able to break any tokens in `out`, then `out` will have
|
||||
// at least one more element than 'temp', so we will try to break tokens
|
||||
// again.
|
||||
// * If we cannot break any tokens in 'out', we are done
|
||||
loop {
|
||||
let mut temp = SmallVec::<[_; 2]>::new();
|
||||
let mut changed = false;
|
||||
|
||||
for token in out.into_iter() {
|
||||
if let Some((first, second)) = token.kind.break_two_token_op() {
|
||||
temp.push(Token::new(first, DUMMY_SP));
|
||||
temp.push(Token::new(second, DUMMY_SP));
|
||||
changed = true;
|
||||
} else {
|
||||
temp.push(token);
|
||||
}
|
||||
}
|
||||
out = temp;
|
||||
if !changed {
|
||||
break;
|
||||
}
|
||||
}
|
||||
token_trees = out.into_iter().map(|t| TokenTree::Token(t)).collect();
|
||||
if token_trees.len() != 1 {
|
||||
debug!("break_tokens: broke {:?} to {:?}", tree, token_trees);
|
||||
}
|
||||
} else {
|
||||
token_trees = SmallVec::new();
|
||||
token_trees.push(tree);
|
||||
}
|
||||
token_trees.into_iter()
|
||||
}
|
||||
|
||||
let expand_nt = |tree: TokenTree| {
|
||||
if let TokenTree::Token(Token { kind: TokenKind::Interpolated(nt), span }) = &tree {
|
||||
nt_to_tokenstream(nt, sess, *span).into_trees()
|
||||
} else {
|
||||
TokenStream::new(vec![(tree, IsJoint::NonJoint)]).into_trees()
|
||||
}
|
||||
};
|
||||
|
||||
// Break tokens after we expand any nonterminals, so that we break tokens
|
||||
// that are produced as a result of nonterminal expansion.
|
||||
let mut t1 = first.trees().filter(semantic_tree).flat_map(expand_nt).flat_map(break_tokens);
|
||||
let mut t2 = other.trees().filter(semantic_tree).flat_map(expand_nt).flat_map(break_tokens);
|
||||
for (t1, t2) in t1.by_ref().zip(t2.by_ref()) {
|
||||
if !tokentree_probably_equal_for_proc_macro(&t1, &t2, sess) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
t1.next().is_none() && t2.next().is_none()
|
||||
}
|
||||
|
||||
// See comments in `Nonterminal::to_tokenstream` for why we care about
|
||||
// *probably* equal here rather than actual equality
|
||||
crate fn token_probably_equal_for_proc_macro(first: &Token, other: &Token) -> bool {
|
||||
use TokenKind::*;
|
||||
|
||||
if mem::discriminant(&first.kind) != mem::discriminant(&other.kind) {
|
||||
return false;
|
||||
}
|
||||
match (&first.kind, &other.kind) {
|
||||
(&Eq, &Eq)
|
||||
| (&Lt, &Lt)
|
||||
| (&Le, &Le)
|
||||
| (&EqEq, &EqEq)
|
||||
| (&Ne, &Ne)
|
||||
| (&Ge, &Ge)
|
||||
| (&Gt, &Gt)
|
||||
| (&AndAnd, &AndAnd)
|
||||
| (&OrOr, &OrOr)
|
||||
| (&Not, &Not)
|
||||
| (&Tilde, &Tilde)
|
||||
| (&At, &At)
|
||||
| (&Dot, &Dot)
|
||||
| (&DotDot, &DotDot)
|
||||
| (&DotDotDot, &DotDotDot)
|
||||
| (&DotDotEq, &DotDotEq)
|
||||
| (&Comma, &Comma)
|
||||
| (&Semi, &Semi)
|
||||
| (&Colon, &Colon)
|
||||
| (&ModSep, &ModSep)
|
||||
| (&RArrow, &RArrow)
|
||||
| (&LArrow, &LArrow)
|
||||
| (&FatArrow, &FatArrow)
|
||||
| (&Pound, &Pound)
|
||||
| (&Dollar, &Dollar)
|
||||
| (&Question, &Question)
|
||||
| (&Whitespace, &Whitespace)
|
||||
| (&Comment, &Comment)
|
||||
| (&Eof, &Eof) => true,
|
||||
|
||||
(&BinOp(a), &BinOp(b)) | (&BinOpEq(a), &BinOpEq(b)) => a == b,
|
||||
|
||||
(&OpenDelim(a), &OpenDelim(b)) | (&CloseDelim(a), &CloseDelim(b)) => a == b,
|
||||
|
||||
(&DocComment(a), &DocComment(b)) | (&Shebang(a), &Shebang(b)) => a == b,
|
||||
|
||||
(&Literal(a), &Literal(b)) => a == b,
|
||||
|
||||
(&Lifetime(a), &Lifetime(b)) => a == b,
|
||||
(&Ident(a, b), &Ident(c, d)) => {
|
||||
b == d && (a == c || a == kw::DollarCrate || c == kw::DollarCrate)
|
||||
}
|
||||
|
||||
// Expanded by `tokenstream_probably_equal_for_proc_macro`
|
||||
(&Interpolated(_), &Interpolated(_)) => unreachable!(),
|
||||
|
||||
_ => panic!("forgot to add a token?"),
|
||||
}
|
||||
}
|
||||
|
||||
// See comments in `Nonterminal::to_tokenstream` for why we care about
|
||||
// *probably* equal here rather than actual equality
|
||||
//
|
||||
// This is otherwise the same as `eq_unspanned`, only recursing with a
|
||||
// different method.
|
||||
pub fn tokentree_probably_equal_for_proc_macro(
|
||||
first: &TokenTree,
|
||||
other: &TokenTree,
|
||||
sess: &ParseSess,
|
||||
) -> bool {
|
||||
match (first, other) {
|
||||
(TokenTree::Token(token), TokenTree::Token(token2)) => {
|
||||
token_probably_equal_for_proc_macro(token, token2)
|
||||
}
|
||||
(TokenTree::Delimited(_, delim, tts), TokenTree::Delimited(_, delim2, tts2)) => {
|
||||
delim == delim2 && tokenstream_probably_equal_for_proc_macro(&tts, &tts2, sess)
|
||||
}
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
@ -25,7 +25,7 @@ use rustc_errors::{struct_span_err, Applicability};
|
||||
use rustc_expand::base::SyntaxExtension;
|
||||
use rustc_expand::expand::AstFragment;
|
||||
use rustc_hir::def::{self, *};
|
||||
use rustc_hir::def_id::{DefId, CRATE_DEF_INDEX};
|
||||
use rustc_hir::def_id::{DefId, LocalDefId, CRATE_DEF_INDEX};
|
||||
use rustc_metadata::creader::LoadedMacro;
|
||||
use rustc_middle::bug;
|
||||
use rustc_middle::hir::exports::Export;
|
||||
@ -1150,15 +1150,22 @@ impl<'a, 'b> BuildReducedGraphVisitor<'a, 'b> {
|
||||
// Mark the given macro as unused unless its name starts with `_`.
|
||||
// Macro uses will remove items from this set, and the remaining
|
||||
// items will be reported as `unused_macros`.
|
||||
fn insert_unused_macro(&mut self, ident: Ident, node_id: NodeId, span: Span) {
|
||||
fn insert_unused_macro(
|
||||
&mut self,
|
||||
ident: Ident,
|
||||
def_id: LocalDefId,
|
||||
node_id: NodeId,
|
||||
span: Span,
|
||||
) {
|
||||
if !ident.as_str().starts_with('_') {
|
||||
self.r.unused_macros.insert(node_id, span);
|
||||
self.r.unused_macros.insert(def_id, (node_id, span));
|
||||
}
|
||||
}
|
||||
|
||||
fn define_macro(&mut self, item: &ast::Item) -> MacroRulesScope<'a> {
|
||||
let parent_scope = self.parent_scope;
|
||||
let expansion = parent_scope.expansion;
|
||||
let def_id = self.r.definitions.local_def_id(item.id);
|
||||
let (ext, ident, span, macro_rules) = match &item.kind {
|
||||
ItemKind::MacroDef(def) => {
|
||||
let ext = Lrc::new(self.r.compile_macro(item, self.r.session.edition()));
|
||||
@ -1166,7 +1173,7 @@ impl<'a, 'b> BuildReducedGraphVisitor<'a, 'b> {
|
||||
}
|
||||
ItemKind::Fn(..) => match Self::proc_macro_stub(item) {
|
||||
Some((macro_kind, ident, span)) => {
|
||||
self.r.proc_macro_stubs.insert(item.id);
|
||||
self.r.proc_macro_stubs.insert(def_id);
|
||||
(self.r.dummy_ext(macro_kind), ident, span, false)
|
||||
}
|
||||
None => return parent_scope.macro_rules,
|
||||
@ -1174,7 +1181,6 @@ impl<'a, 'b> BuildReducedGraphVisitor<'a, 'b> {
|
||||
_ => unreachable!(),
|
||||
};
|
||||
|
||||
let def_id = self.r.definitions.local_def_id(item.id);
|
||||
let res = Res::Def(DefKind::Macro(ext.macro_kind()), def_id.to_def_id());
|
||||
self.r.macro_map.insert(def_id.to_def_id(), ext);
|
||||
self.r.local_macro_def_scopes.insert(def_id, parent_scope.module);
|
||||
@ -1196,7 +1202,7 @@ impl<'a, 'b> BuildReducedGraphVisitor<'a, 'b> {
|
||||
self.r.define(module, ident, MacroNS, (res, vis, span, expansion, IsMacroExport));
|
||||
} else {
|
||||
self.r.check_reserved_macro_name(ident, res);
|
||||
self.insert_unused_macro(ident, item.id, span);
|
||||
self.insert_unused_macro(ident, def_id, item.id, span);
|
||||
}
|
||||
MacroRulesScope::Binding(self.r.arenas.alloc_macro_rules_binding(MacroRulesBinding {
|
||||
parent_macro_rules_scope: parent_scope.macro_rules,
|
||||
@ -1214,7 +1220,7 @@ impl<'a, 'b> BuildReducedGraphVisitor<'a, 'b> {
|
||||
_ => self.resolve_visibility(&item.vis),
|
||||
};
|
||||
if vis != ty::Visibility::Public {
|
||||
self.insert_unused_macro(ident, item.id, span);
|
||||
self.insert_unused_macro(ident, def_id, item.id, span);
|
||||
}
|
||||
self.r.define(module, ident, MacroNS, (res, vis, span, expansion));
|
||||
self.parent_scope.macro_rules
|
||||
|
@ -1621,11 +1621,10 @@ impl<'a, 'b, 'ast> LateResolutionVisitor<'a, 'b, 'ast> {
|
||||
let report_errors = |this: &mut Self, res: Option<Res>| {
|
||||
let (err, candidates) = this.smart_resolve_report_errors(path, span, source, res);
|
||||
let def_id = this.parent_scope.module.normal_ancestor_id;
|
||||
let node_id = this.r.definitions.as_local_node_id(def_id).unwrap();
|
||||
let better = res.is_some();
|
||||
let suggestion =
|
||||
if res.is_none() { this.report_missing_type_error(path) } else { None };
|
||||
this.r.use_injections.push(UseError { err, candidates, node_id, better, suggestion });
|
||||
this.r.use_injections.push(UseError { err, candidates, def_id, better, suggestion });
|
||||
PartialRes::new(Res::Err)
|
||||
};
|
||||
|
||||
|
@ -23,7 +23,7 @@ use rustc_ast::ast::{self, FloatTy, IntTy, NodeId, UintTy};
|
||||
use rustc_ast::ast::{Crate, CRATE_NODE_ID};
|
||||
use rustc_ast::ast::{ItemKind, Path};
|
||||
use rustc_ast::attr;
|
||||
use rustc_ast::node_id::{NodeMap, NodeSet};
|
||||
use rustc_ast::node_id::NodeMap;
|
||||
use rustc_ast::unwrap_or;
|
||||
use rustc_ast::visit::{self, Visitor};
|
||||
use rustc_ast_pretty::pprust;
|
||||
@ -253,21 +253,31 @@ impl<'a> From<&'a ast::PathSegment> for Segment {
|
||||
}
|
||||
}
|
||||
|
||||
struct UsePlacementFinder {
|
||||
target_module: NodeId,
|
||||
struct UsePlacementFinder<'d> {
|
||||
definitions: &'d Definitions,
|
||||
target_module: LocalDefId,
|
||||
span: Option<Span>,
|
||||
found_use: bool,
|
||||
}
|
||||
|
||||
impl UsePlacementFinder {
|
||||
fn check(krate: &Crate, target_module: NodeId) -> (Option<Span>, bool) {
|
||||
let mut finder = UsePlacementFinder { target_module, span: None, found_use: false };
|
||||
visit::walk_crate(&mut finder, krate);
|
||||
(finder.span, finder.found_use)
|
||||
impl<'d> UsePlacementFinder<'d> {
|
||||
fn check(
|
||||
definitions: &'d Definitions,
|
||||
krate: &Crate,
|
||||
target_module: DefId,
|
||||
) -> (Option<Span>, bool) {
|
||||
if let Some(target_module) = target_module.as_local() {
|
||||
let mut finder =
|
||||
UsePlacementFinder { definitions, target_module, span: None, found_use: false };
|
||||
visit::walk_crate(&mut finder, krate);
|
||||
(finder.span, finder.found_use)
|
||||
} else {
|
||||
(None, false)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx> Visitor<'tcx> for UsePlacementFinder {
|
||||
impl<'tcx, 'd> Visitor<'tcx> for UsePlacementFinder<'d> {
|
||||
fn visit_mod(
|
||||
&mut self,
|
||||
module: &'tcx ast::Mod,
|
||||
@ -278,7 +288,7 @@ impl<'tcx> Visitor<'tcx> for UsePlacementFinder {
|
||||
if self.span.is_some() {
|
||||
return;
|
||||
}
|
||||
if node_id != self.target_module {
|
||||
if self.definitions.local_def_id(node_id) != self.target_module {
|
||||
visit::walk_mod(self, module);
|
||||
return;
|
||||
}
|
||||
@ -611,7 +621,7 @@ struct UseError<'a> {
|
||||
/// Attach `use` statements for these candidates.
|
||||
candidates: Vec<ImportSuggestion>,
|
||||
/// The `NodeId` of the module to place the use-statements in.
|
||||
node_id: NodeId,
|
||||
def_id: DefId,
|
||||
/// Whether the diagnostic should state that it's "better".
|
||||
better: bool,
|
||||
/// Extra free form suggestion. Currently used to suggest new type parameter.
|
||||
@ -926,8 +936,8 @@ pub struct Resolver<'a> {
|
||||
non_macro_attrs: [Lrc<SyntaxExtension>; 2],
|
||||
local_macro_def_scopes: FxHashMap<LocalDefId, Module<'a>>,
|
||||
ast_transform_scopes: FxHashMap<ExpnId, Module<'a>>,
|
||||
unused_macros: NodeMap<Span>,
|
||||
proc_macro_stubs: NodeSet,
|
||||
unused_macros: FxHashMap<LocalDefId, (NodeId, Span)>,
|
||||
proc_macro_stubs: FxHashSet<LocalDefId>,
|
||||
/// Traces collected during macro resolution and validated when it's complete.
|
||||
single_segment_macro_resolutions:
|
||||
Vec<(Ident, MacroKind, ParentScope<'a>, Option<&'a NameBinding<'a>>)>,
|
||||
@ -2567,10 +2577,10 @@ impl<'a> Resolver<'a> {
|
||||
}
|
||||
|
||||
fn report_with_use_injections(&mut self, krate: &Crate) {
|
||||
for UseError { mut err, candidates, node_id, better, suggestion } in
|
||||
for UseError { mut err, candidates, def_id, better, suggestion } in
|
||||
self.use_injections.drain(..)
|
||||
{
|
||||
let (span, found_use) = UsePlacementFinder::check(krate, node_id);
|
||||
let (span, found_use) = UsePlacementFinder::check(&self.definitions, krate, def_id);
|
||||
if !candidates.is_empty() {
|
||||
diagnostics::show_candidates(&mut err, span, &candidates, better, found_use);
|
||||
} else if let Some((span, msg, sugg, appl)) = suggestion {
|
||||
|
@ -333,7 +333,7 @@ impl<'a> base::Resolver for Resolver<'a> {
|
||||
}
|
||||
|
||||
fn check_unused_macros(&mut self) {
|
||||
for (&node_id, &span) in self.unused_macros.iter() {
|
||||
for (_, &(node_id, span)) in self.unused_macros.iter() {
|
||||
self.lint_buffer.buffer_lint(UNUSED_MACROS, node_id, span, "unused macro definition");
|
||||
}
|
||||
}
|
||||
@ -416,9 +416,9 @@ impl<'a> Resolver<'a> {
|
||||
|
||||
match res {
|
||||
Res::Def(DefKind::Macro(_), def_id) => {
|
||||
if let Some(node_id) = self.definitions.as_local_node_id(def_id) {
|
||||
self.unused_macros.remove(&node_id);
|
||||
if self.proc_macro_stubs.contains(&node_id) {
|
||||
if let Some(def_id) = def_id.as_local() {
|
||||
self.unused_macros.remove(&def_id);
|
||||
if self.proc_macro_stubs.contains(&def_id) {
|
||||
self.session.span_err(
|
||||
path.span,
|
||||
"can't use a procedural macro from the same crate that defines it",
|
||||
|
@ -1040,17 +1040,6 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||
&mut self,
|
||||
stack: &TraitObligationStack<'o, 'tcx>,
|
||||
) -> SelectionResult<'tcx, SelectionCandidate<'tcx>> {
|
||||
if stack.obligation.predicate.references_error() {
|
||||
// If we encounter a `Error`, we generally prefer the
|
||||
// most "optimistic" result in response -- that is, the
|
||||
// one least likely to report downstream errors. But
|
||||
// because this routine is shared by coherence and by
|
||||
// trait selection, there isn't an obvious "right" choice
|
||||
// here in that respect, so we opt to just return
|
||||
// ambiguity and let the upstream clients sort it out.
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
if let Some(conflict) = self.is_knowable(stack) {
|
||||
debug!("coherence stage: not knowable");
|
||||
if self.intercrate_ambiguity_causes.is_some() {
|
||||
|
@ -121,9 +121,9 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
let tcx = self.tcx;
|
||||
match BinOpCategory::from(op) {
|
||||
BinOpCategory::Shortcircuit => {
|
||||
self.demand_suptype(*lhs_span, tcx.mk_bool(), lhs_ty);
|
||||
self.demand_suptype(*rhs_span, tcx.mk_bool(), rhs_ty);
|
||||
tcx.mk_bool()
|
||||
self.demand_suptype(*lhs_span, tcx.types.bool, lhs_ty);
|
||||
self.demand_suptype(*rhs_span, tcx.types.bool, rhs_ty);
|
||||
tcx.types.bool
|
||||
}
|
||||
|
||||
BinOpCategory::Shift => {
|
||||
@ -140,7 +140,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
BinOpCategory::Comparison => {
|
||||
// both LHS and RHS and result will have the same type
|
||||
self.demand_suptype(*rhs_span, lhs_ty, rhs_ty);
|
||||
tcx.mk_bool()
|
||||
tcx.types.bool
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -2426,6 +2426,24 @@ where
|
||||
fn extend<T: IntoIterator<Item = (K, V)>>(&mut self, iter: T) {
|
||||
self.base.extend(iter)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn extend_one(&mut self, (k, v): (K, V)) {
|
||||
self.base.insert(k, v);
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn extend_reserve(&mut self, additional: usize) {
|
||||
// self.base.extend_reserve(additional);
|
||||
// FIXME: hashbrown should implement this method.
|
||||
// But until then, use the same reservation logic:
|
||||
|
||||
// Reserve the entire hint lower bound if the map is empty.
|
||||
// Otherwise reserve half the hint (rounded up), so the map
|
||||
// will only resize twice in the worst case.
|
||||
let reserve = if self.is_empty() { additional } else { (additional + 1) / 2 };
|
||||
self.base.reserve(reserve);
|
||||
}
|
||||
}
|
||||
|
||||
#[stable(feature = "hash_extend_copy", since = "1.4.0")]
|
||||
@ -2439,6 +2457,16 @@ where
|
||||
fn extend<T: IntoIterator<Item = (&'a K, &'a V)>>(&mut self, iter: T) {
|
||||
self.base.extend(iter)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn extend_one(&mut self, (&k, &v): (&'a K, &'a V)) {
|
||||
self.base.insert(k, v);
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn extend_reserve(&mut self, additional: usize) {
|
||||
Extend::<(K, V)>::extend_reserve(self, additional)
|
||||
}
|
||||
}
|
||||
|
||||
/// `RandomState` is the default state for [`HashMap`] types.
|
||||
|
@ -970,6 +970,16 @@ where
|
||||
fn extend<I: IntoIterator<Item = T>>(&mut self, iter: I) {
|
||||
self.map.extend(iter.into_iter().map(|k| (k, ())));
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn extend_one(&mut self, item: T) {
|
||||
self.map.insert(item, ());
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn extend_reserve(&mut self, additional: usize) {
|
||||
self.map.extend_reserve(additional);
|
||||
}
|
||||
}
|
||||
|
||||
#[stable(feature = "hash_extend_copy", since = "1.4.0")]
|
||||
@ -982,6 +992,16 @@ where
|
||||
fn extend<I: IntoIterator<Item = &'a T>>(&mut self, iter: I) {
|
||||
self.extend(iter.into_iter().cloned());
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn extend_one(&mut self, &item: &'a T) {
|
||||
self.map.insert(item, ());
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn extend_reserve(&mut self, additional: usize) {
|
||||
Extend::<T>::extend_reserve(self, additional)
|
||||
}
|
||||
}
|
||||
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
|
@ -263,6 +263,7 @@
|
||||
#![feature(duration_constants)]
|
||||
#![feature(exact_size_is_empty)]
|
||||
#![feature(exhaustive_patterns)]
|
||||
#![feature(extend_one)]
|
||||
#![feature(external_doc)]
|
||||
#![feature(fn_traits)]
|
||||
#![feature(format_args_nl)]
|
||||
|
@ -1534,6 +1534,11 @@ impl<P: AsRef<Path>> iter::Extend<P> for PathBuf {
|
||||
fn extend<I: IntoIterator<Item = P>>(&mut self, iter: I) {
|
||||
iter.into_iter().for_each(move |p| self.push(p.as_ref()));
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn extend_one(&mut self, p: P) {
|
||||
self.push(p.as_ref());
|
||||
}
|
||||
}
|
||||
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
|
@ -386,6 +386,17 @@ impl Extend<CodePoint> for Wtf8Buf {
|
||||
self.bytes.reserve(low);
|
||||
iterator.for_each(move |code_point| self.push(code_point));
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn extend_one(&mut self, code_point: CodePoint) {
|
||||
self.push(code_point);
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn extend_reserve(&mut self, additional: usize) {
|
||||
// Lower bound of one byte per code point (ASCII only)
|
||||
self.bytes.reserve(additional);
|
||||
}
|
||||
}
|
||||
|
||||
/// A borrowed slice of well-formed WTF-8 data.
|
||||
|
@ -90,7 +90,7 @@ fn main() {
|
||||
}
|
||||
|
||||
let linker_args = read_linker_args(&ok);
|
||||
for mut arg in linker_args.split('S') {
|
||||
for arg in linker_args.split('S') {
|
||||
expected_libs.remove(arg);
|
||||
}
|
||||
|
||||
|
22
src/test/ui/async-await/issue-72590-type-error-sized.rs
Normal file
22
src/test/ui/async-await/issue-72590-type-error-sized.rs
Normal file
@ -0,0 +1,22 @@
|
||||
// Regression test for issue #72590
|
||||
// Tests that we don't emit a spurious "size cannot be statically determined" error
|
||||
// edition:2018
|
||||
|
||||
struct Foo {
|
||||
foo: Nonexistent, //~ ERROR cannot find
|
||||
other: str
|
||||
}
|
||||
|
||||
struct Bar {
|
||||
test: Missing //~ ERROR cannot find
|
||||
}
|
||||
|
||||
impl Foo {
|
||||
async fn frob(self) {} //~ ERROR the size
|
||||
}
|
||||
|
||||
impl Bar {
|
||||
async fn myfn(self) {}
|
||||
}
|
||||
|
||||
fn main() {}
|
28
src/test/ui/async-await/issue-72590-type-error-sized.stderr
Normal file
28
src/test/ui/async-await/issue-72590-type-error-sized.stderr
Normal file
@ -0,0 +1,28 @@
|
||||
error[E0412]: cannot find type `Nonexistent` in this scope
|
||||
--> $DIR/issue-72590-type-error-sized.rs:6:10
|
||||
|
|
||||
LL | foo: Nonexistent,
|
||||
| ^^^^^^^^^^^ not found in this scope
|
||||
|
||||
error[E0412]: cannot find type `Missing` in this scope
|
||||
--> $DIR/issue-72590-type-error-sized.rs:11:11
|
||||
|
|
||||
LL | test: Missing
|
||||
| ^^^^^^^ not found in this scope
|
||||
|
||||
error[E0277]: the size for values of type `str` cannot be known at compilation time
|
||||
--> $DIR/issue-72590-type-error-sized.rs:15:19
|
||||
|
|
||||
LL | async fn frob(self) {}
|
||||
| ^^^^ doesn't have a size known at compile-time
|
||||
|
|
||||
= help: within `Foo`, the trait `std::marker::Sized` is not implemented for `str`
|
||||
= note: to learn more, visit <https://doc.rust-lang.org/book/ch19-04-advanced-types.html#dynamically-sized-types-and-the-sized-trait>
|
||||
= note: required because it appears within the type `Foo`
|
||||
= note: all local variables must have a statically known size
|
||||
= help: unsized locals are gated as an unstable feature
|
||||
|
||||
error: aborting due to 3 previous errors
|
||||
|
||||
Some errors have detailed explanations: E0277, E0412.
|
||||
For more information about an error, try `rustc --explain E0277`.
|
8
src/test/ui/issues/issue-72574-1.rs
Normal file
8
src/test/ui/issues/issue-72574-1.rs
Normal file
@ -0,0 +1,8 @@
|
||||
fn main() {
|
||||
let x = (1, 2, 3);
|
||||
match x {
|
||||
(_a, _x @ ..) => {}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
//~^^^^ ERROR `_x @` is not allowed in a tuple
|
14
src/test/ui/issues/issue-72574-1.stderr
Normal file
14
src/test/ui/issues/issue-72574-1.stderr
Normal file
@ -0,0 +1,14 @@
|
||||
error: `_x @` is not allowed in a tuple
|
||||
--> $DIR/issue-72574-1.rs:4:14
|
||||
|
|
||||
LL | (_a, _x @ ..) => {}
|
||||
| ^^^^^^^ this is only allowed in slice patterns
|
||||
|
|
||||
= help: remove this and bind each tuple field independently
|
||||
help: if you don't need to use the contents of _x, discard the tuple's remaining fields
|
||||
|
|
||||
LL | (_a, ..) => {}
|
||||
| ^^
|
||||
|
||||
error: aborting due to previous error
|
||||
|
10
src/test/ui/issues/issue-72574-2.rs
Normal file
10
src/test/ui/issues/issue-72574-2.rs
Normal file
@ -0,0 +1,10 @@
|
||||
struct Binder(i32, i32, i32);
|
||||
|
||||
fn main() {
|
||||
let x = Binder(1, 2, 3);
|
||||
match x {
|
||||
Binder(_a, _x @ ..) => {}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
//~^^^^ ERROR `_x @` is not allowed in a tuple struct
|
14
src/test/ui/issues/issue-72574-2.stderr
Normal file
14
src/test/ui/issues/issue-72574-2.stderr
Normal file
@ -0,0 +1,14 @@
|
||||
error: `_x @` is not allowed in a tuple struct
|
||||
--> $DIR/issue-72574-2.rs:6:20
|
||||
|
|
||||
LL | Binder(_a, _x @ ..) => {}
|
||||
| ^^^^^^^ this is only allowed in slice patterns
|
||||
|
|
||||
= help: remove this and bind each tuple field independently
|
||||
help: if you don't need to use the contents of _x, discard the tuple's remaining fields
|
||||
|
|
||||
LL | Binder(_a, ..) => {}
|
||||
| ^^
|
||||
|
||||
error: aborting due to previous error
|
||||
|
@ -1,18 +0,0 @@
|
||||
// aux-build: test-macros.rs
|
||||
|
||||
extern crate test_macros;
|
||||
use test_macros::recollect_attr;
|
||||
|
||||
macro_rules! reemit {
|
||||
($name:ident => $($token:expr)*) => {
|
||||
|
||||
#[recollect_attr]
|
||||
pub fn $name() {
|
||||
$($token)*;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
reemit! { foo => 45u32.into() } //~ ERROR type annotations
|
||||
|
||||
fn main() {}
|
@ -1,12 +0,0 @@
|
||||
error[E0282]: type annotations needed
|
||||
--> $DIR/macro-rules-capture.rs:16:24
|
||||
|
|
||||
LL | reemit! { foo => 45u32.into() }
|
||||
| ------^^^^--
|
||||
| | |
|
||||
| | cannot infer type for type parameter `T` declared on the trait `Into`
|
||||
| this method call resolves to `T`
|
||||
|
||||
error: aborting due to previous error
|
||||
|
||||
For more information about this error, try `rustc --explain E0282`.
|
15
src/test/ui/unused-crate-deps/test-use-ok.rs
Normal file
15
src/test/ui/unused-crate-deps/test-use-ok.rs
Normal file
@ -0,0 +1,15 @@
|
||||
// Test-only use OK
|
||||
|
||||
// edition:2018
|
||||
// check-pass
|
||||
// aux-crate:bar=bar.rs
|
||||
// compile-flags:--test
|
||||
|
||||
#![deny(unused_crate_dependencies)]
|
||||
|
||||
fn main() {}
|
||||
|
||||
#[test]
|
||||
fn test_bar() {
|
||||
assert_eq!(bar::BAR, "bar");
|
||||
}
|
Loading…
Reference in New Issue
Block a user