mirror of
https://github.com/rust-lang/rust.git
synced 2024-10-31 22:41:50 +00:00
Auto merge of #73924 - Manishearth:rollup-8r51ld9, r=Manishearth
Rollup of 17 pull requests Successful merges: - #72071 (Added detailed error code explanation for issue E0687 in Rust compiler.) - #72369 (Bring net/parser.rs up to modern up to date with modern rust patterns) - #72445 (Stabilize `#[track_caller]`.) - #73466 (impl From<char> for String) - #73548 (remove rustdoc warnings) - #73649 (Fix sentence structure) - #73678 (Update Box::from_raw example to generalize better) - #73705 (stop taking references in Relate) - #73716 (Document the static keyword) - #73752 (Remap Windows ERROR_INVALID_PARAMETER to ErrorKind::InvalidInput from Other) - #73776 (Move terminator to new module) - #73778 (Make `likely` and `unlikely` const, gated by feature `const_unlikely`) - #73805 (Document the type keyword) - #73806 (Use an 'approximate' universal upper bound when reporting region errors) - #73828 (Fix wording for anonymous parameter name help) - #73846 (Fix comma in debug_assert! docs) - #73847 (Edit cursor.prev() method docs in lexer) Failed merges: r? @ghost
This commit is contained in:
commit
f781babf87
21
README.md
21
README.md
@ -1,7 +1,9 @@
|
||||
# The Rust Programming Language
|
||||
<a href = "https://www.rust-lang.org/">
|
||||
<img width = "90%" height = "auto" src = "https://img.shields.io/badge/Rust-Programming%20Language-black?style=flat&logo=rust" alt = "The Rust Programming Language">
|
||||
</a>
|
||||
|
||||
This is the main source code repository for [Rust]. It contains the compiler,
|
||||
standard library, and documentation.
|
||||
standard library, and documentation.
|
||||
|
||||
[Rust]: https://www.rust-lang.org
|
||||
|
||||
@ -17,9 +19,9 @@ Read ["Installation"] from [The Book].
|
||||
_Note: If you wish to contribute to the compiler, you should read [this
|
||||
chapter][rustcguidebuild] of the rustc-dev-guide instead of this section._
|
||||
|
||||
The Rust build system has a Python script called `x.py` to bootstrap building
|
||||
the compiler. More information about it may be found by running `./x.py --help`
|
||||
or reading the [rustc dev guide][rustcguidebuild].
|
||||
The Rust build system uses a Python script called `x.py` to build the compiler,
|
||||
which manages the bootstrapping process. More information about it can be found
|
||||
by running `./x.py --help` or reading the [rustc dev guide][rustcguidebuild].
|
||||
|
||||
[rustcguidebuild]: https://rustc-dev-guide.rust-lang.org/building/how-to-build-and-run.html
|
||||
|
||||
@ -54,9 +56,8 @@ or reading the [rustc dev guide][rustcguidebuild].
|
||||
$ cp config.toml.example config.toml
|
||||
```
|
||||
|
||||
It is recommended that if you plan to use the Rust build system to create
|
||||
an installation (using `./x.py install`) that you set the `prefix` value
|
||||
in the `[install]` section to a directory that you have write permissions.
|
||||
If you plan to use `x.py install` to create an installation, it is recommended
|
||||
that you set the `prefix` value in the `[install]` section to a directory.
|
||||
|
||||
Create install directory if you are not installing in default directory
|
||||
|
||||
@ -143,8 +144,8 @@ shell with:
|
||||
```
|
||||
|
||||
Currently, building Rust only works with some known versions of Visual Studio. If
|
||||
you have a more recent version installed the build system doesn't understand
|
||||
then you may need to force rustbuild to use an older version. This can be done
|
||||
you have a more recent version installed and the build system doesn't understand,
|
||||
you may need to force rustbuild to use an older version. This can be done
|
||||
by manually calling the appropriate vcvars file before running the bootstrap.
|
||||
|
||||
```batch
|
||||
|
@ -1,5 +0,0 @@
|
||||
# `track_caller`
|
||||
|
||||
The tracking issue for this feature is: [#47809](https://github.com/rust-lang/rust/issues/47809).
|
||||
|
||||
------------------------
|
@ -384,7 +384,10 @@ impl<T: ?Sized> Box<T> {
|
||||
///
|
||||
/// unsafe {
|
||||
/// let ptr = alloc(Layout::new::<i32>()) as *mut i32;
|
||||
/// *ptr = 5;
|
||||
/// // In general .write is required to avoid attempting to destruct
|
||||
/// // the (uninitialized) previous contents of `ptr`, though for this
|
||||
/// // simple example `*ptr = 5` would have worked as well.
|
||||
/// ptr.write(5);
|
||||
/// let x = Box::from_raw(ptr);
|
||||
/// }
|
||||
/// ```
|
||||
|
@ -2518,3 +2518,11 @@ impl DoubleEndedIterator for Drain<'_> {
|
||||
|
||||
#[stable(feature = "fused", since = "1.26.0")]
|
||||
impl FusedIterator for Drain<'_> {}
|
||||
|
||||
#[stable(feature = "from_char_for_string", since = "1.46.0")]
|
||||
impl From<char> for String {
|
||||
#[inline]
|
||||
fn from(c: char) -> Self {
|
||||
c.to_string()
|
||||
}
|
||||
}
|
||||
|
@ -714,3 +714,10 @@ fn test_try_reserve_exact() {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_from_char() {
|
||||
assert_eq!(String::from('a'), 'a'.to_string());
|
||||
let s: String = 'x'.into();
|
||||
assert_eq!(s, 'x'.to_string());
|
||||
}
|
||||
|
@ -952,6 +952,7 @@ extern "rust-intrinsic" {
|
||||
/// Any use other than with `if` statements will probably not have an effect.
|
||||
///
|
||||
/// This intrinsic does not have a stable counterpart.
|
||||
#[rustc_const_unstable(feature = "const_likely", issue = "none")]
|
||||
pub fn likely(b: bool) -> bool;
|
||||
|
||||
/// Hints to the compiler that branch condition is likely to be false.
|
||||
@ -960,6 +961,7 @@ extern "rust-intrinsic" {
|
||||
/// Any use other than with `if` statements will probably not have an effect.
|
||||
///
|
||||
/// This intrinsic does not have a stable counterpart.
|
||||
#[rustc_const_unstable(feature = "const_likely", issue = "none")]
|
||||
pub fn unlikely(b: bool) -> bool;
|
||||
|
||||
/// Executes a breakpoint trap, for inspection by a debugger.
|
||||
|
@ -92,6 +92,7 @@
|
||||
#![feature(const_slice_from_raw_parts)]
|
||||
#![feature(const_slice_ptr_len)]
|
||||
#![feature(const_type_name)]
|
||||
#![feature(const_likely)]
|
||||
#![feature(custom_inner_attributes)]
|
||||
#![feature(decl_macro)]
|
||||
#![feature(doc_cfg)]
|
||||
@ -118,7 +119,7 @@
|
||||
#![feature(staged_api)]
|
||||
#![feature(std_internals)]
|
||||
#![feature(stmt_expr_attributes)]
|
||||
#![feature(track_caller)]
|
||||
#![cfg_attr(bootstrap, feature(track_caller))]
|
||||
#![feature(transparent_unions)]
|
||||
#![feature(unboxed_closures)]
|
||||
#![feature(unsized_locals)]
|
||||
|
@ -1,6 +1,6 @@
|
||||
#[doc(include = "panic.md")]
|
||||
#[macro_export]
|
||||
#[allow_internal_unstable(core_panic, track_caller)]
|
||||
#[allow_internal_unstable(core_panic, const_caller_location)]
|
||||
#[stable(feature = "core", since = "1.6.0")]
|
||||
macro_rules! panic {
|
||||
() => (
|
||||
@ -151,7 +151,7 @@ macro_rules! assert_ne {
|
||||
/// An unchecked assertion allows a program in an inconsistent state to keep
|
||||
/// running, which might have unexpected consequences but does not introduce
|
||||
/// unsafety as long as this only happens in safe code. The performance cost
|
||||
/// of assertions, is however, not measurable in general. Replacing [`assert!`]
|
||||
/// of assertions, however, is not measurable in general. Replacing [`assert!`]
|
||||
/// with `debug_assert!` is thus only encouraged after thorough profiling, and
|
||||
/// more importantly, only in safe code!
|
||||
///
|
||||
|
@ -190,7 +190,6 @@ impl<'a> Location<'a> {
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// #![feature(track_caller)]
|
||||
/// use core::panic::Location;
|
||||
///
|
||||
/// /// Returns the [`Location`] at which it is called.
|
||||
@ -206,7 +205,7 @@ impl<'a> Location<'a> {
|
||||
///
|
||||
/// let fixed_location = get_just_one_location();
|
||||
/// assert_eq!(fixed_location.file(), file!());
|
||||
/// assert_eq!(fixed_location.line(), 15);
|
||||
/// assert_eq!(fixed_location.line(), 14);
|
||||
/// assert_eq!(fixed_location.column(), 5);
|
||||
///
|
||||
/// // running the same untracked function in a different location gives us the same result
|
||||
@ -217,7 +216,7 @@ impl<'a> Location<'a> {
|
||||
///
|
||||
/// let this_location = get_caller_location();
|
||||
/// assert_eq!(this_location.file(), file!());
|
||||
/// assert_eq!(this_location.line(), 29);
|
||||
/// assert_eq!(this_location.line(), 28);
|
||||
/// assert_eq!(this_location.column(), 21);
|
||||
///
|
||||
/// // running the tracked function in a different location produces a different value
|
||||
@ -226,13 +225,8 @@ impl<'a> Location<'a> {
|
||||
/// assert_ne!(this_location.line(), another_location.line());
|
||||
/// assert_ne!(this_location.column(), another_location.column());
|
||||
/// ```
|
||||
// FIXME: When stabilizing this method, please also update the documentation
|
||||
// of `intrinsics::caller_location`.
|
||||
#[unstable(
|
||||
feature = "track_caller",
|
||||
reason = "uses #[track_caller] which is not yet stable",
|
||||
issue = "47809"
|
||||
)]
|
||||
#[stable(feature = "track_caller", since = "1.46.0")]
|
||||
#[rustc_const_unstable(feature = "const_caller_location", issue = "47809")]
|
||||
#[track_caller]
|
||||
pub const fn caller() -> &'static Location<'static> {
|
||||
crate::intrinsics::caller_location()
|
||||
|
@ -382,6 +382,7 @@ E0668: include_str!("./error_codes/E0668.md"),
|
||||
E0669: include_str!("./error_codes/E0669.md"),
|
||||
E0670: include_str!("./error_codes/E0670.md"),
|
||||
E0671: include_str!("./error_codes/E0671.md"),
|
||||
E0687: include_str!("./error_codes/E0687.md"),
|
||||
E0689: include_str!("./error_codes/E0689.md"),
|
||||
E0690: include_str!("./error_codes/E0690.md"),
|
||||
E0691: include_str!("./error_codes/E0691.md"),
|
||||
@ -613,7 +614,6 @@ E0766: include_str!("./error_codes/E0766.md"),
|
||||
E0640, // infer outlives requirements
|
||||
// E0645, // trait aliases not finished
|
||||
E0667, // `impl Trait` in projections
|
||||
E0687, // in-band lifetimes cannot be used in `fn`/`Fn` syntax
|
||||
E0688, // in-band lifetimes cannot be mixed with explicit lifetime binders
|
||||
// E0694, // an unknown tool name found in scoped attributes
|
||||
// E0702, // replaced with a generic attribute input check
|
||||
|
36
src/librustc_error_codes/error_codes/E0687.md
Normal file
36
src/librustc_error_codes/error_codes/E0687.md
Normal file
@ -0,0 +1,36 @@
|
||||
In-band lifetimes cannot be used in `fn`/`Fn` syntax.
|
||||
|
||||
Erroneous code examples:
|
||||
|
||||
```compile_fail,E0687
|
||||
#![feature(in_band_lifetimes)]
|
||||
|
||||
fn foo(x: fn(&'a u32)) {} // error!
|
||||
|
||||
fn bar(x: &Fn(&'a u32)) {} // error!
|
||||
|
||||
fn baz(x: fn(&'a u32), y: &'a u32) {} // error!
|
||||
|
||||
struct Foo<'a> { x: &'a u32 }
|
||||
|
||||
impl Foo<'a> {
|
||||
fn bar(&self, x: fn(&'a u32)) {} // error!
|
||||
}
|
||||
```
|
||||
|
||||
Lifetimes used in `fn` or `Fn` syntax must be explicitly
|
||||
declared using `<...>` binders. For example:
|
||||
|
||||
```
|
||||
fn foo<'a>(x: fn(&'a u32)) {} // ok!
|
||||
|
||||
fn bar<'a>(x: &Fn(&'a u32)) {} // ok!
|
||||
|
||||
fn baz<'a>(x: fn(&'a u32), y: &'a u32) {} // ok!
|
||||
|
||||
struct Foo<'a> { x: &'a u32 }
|
||||
|
||||
impl<'a> Foo<'a> {
|
||||
fn bar(&self, x: fn(&'a u32)) {} // ok!
|
||||
}
|
||||
```
|
@ -3,8 +3,6 @@
|
||||
Erroneous code example:
|
||||
|
||||
```compile_fail,E0736
|
||||
#![feature(track_caller)]
|
||||
|
||||
#[naked]
|
||||
#[track_caller]
|
||||
fn foo() {}
|
||||
|
@ -5,8 +5,6 @@ restrictions.
|
||||
Erroneous code example:
|
||||
|
||||
```compile_fail,E0737
|
||||
#![feature(track_caller)]
|
||||
|
||||
#[track_caller]
|
||||
extern "C" fn foo() {}
|
||||
```
|
||||
|
@ -3,7 +3,6 @@
|
||||
Erroneous code example:
|
||||
|
||||
```compile_fail,E0739
|
||||
#![feature(track_caller)]
|
||||
#[track_caller]
|
||||
struct Bar {
|
||||
a: u8,
|
||||
|
@ -5,7 +5,7 @@
|
||||
#![doc(html_root_url = "https://doc.rust-lang.org/nightly/")]
|
||||
#![feature(crate_visibility_modifier)]
|
||||
#![feature(nll)]
|
||||
#![feature(track_caller)]
|
||||
#![cfg_attr(bootstrap, feature(track_caller))]
|
||||
|
||||
pub use emitter::ColorConfig;
|
||||
|
||||
|
@ -265,6 +265,9 @@ declare_features! (
|
||||
(accepted, const_if_match, "1.45.0", Some(49146), None),
|
||||
/// Allows the use of `loop` and `while` in constants.
|
||||
(accepted, const_loop, "1.45.0", Some(52000), None),
|
||||
/// Allows `#[track_caller]` to be used which provides
|
||||
/// accurate caller location reporting during panic (RFC 2091).
|
||||
(accepted, track_caller, "1.46.0", Some(47809), None),
|
||||
|
||||
// -------------------------------------------------------------------------
|
||||
// feature-group-end: accepted features
|
||||
|
@ -494,10 +494,6 @@ declare_features! (
|
||||
/// Allows the use of raw-dylibs (RFC 2627).
|
||||
(active, raw_dylib, "1.40.0", Some(58713), None),
|
||||
|
||||
/// Allows `#[track_caller]` to be used which provides
|
||||
/// accurate caller location reporting during panic (RFC 2091).
|
||||
(active, track_caller, "1.40.0", Some(47809), None),
|
||||
|
||||
/// Allows making `dyn Trait` well-formed even if `Trait` is not object safe.
|
||||
/// In that case, `dyn Trait: Trait` does not hold. Moreover, coercions and
|
||||
/// casts in safe Rust to `dyn Trait` for such a `Trait` is also forbidden.
|
||||
|
@ -260,6 +260,7 @@ pub const BUILTIN_ATTRIBUTES: &[BuiltinAttribute] = &[
|
||||
ungated!(cold, Whitelisted, template!(Word)),
|
||||
ungated!(no_builtins, Whitelisted, template!(Word)),
|
||||
ungated!(target_feature, Whitelisted, template!(List: r#"enable = "name""#)),
|
||||
ungated!(track_caller, Whitelisted, template!(Word)),
|
||||
gated!(
|
||||
no_sanitize, Whitelisted,
|
||||
template!(List: "address, memory, thread"),
|
||||
@ -333,7 +334,6 @@ pub const BUILTIN_ATTRIBUTES: &[BuiltinAttribute] = &[
|
||||
gated!(ffi_returns_twice, Whitelisted, template!(Word), experimental!(ffi_returns_twice)),
|
||||
gated!(ffi_pure, Whitelisted, template!(Word), experimental!(ffi_pure)),
|
||||
gated!(ffi_const, Whitelisted, template!(Word), experimental!(ffi_const)),
|
||||
gated!(track_caller, Whitelisted, template!(Word), experimental!(track_caller)),
|
||||
gated!(
|
||||
register_attr, CrateLevel, template!(List: "attr1, attr2, ..."),
|
||||
experimental!(register_attr),
|
||||
|
@ -82,7 +82,7 @@ impl<'a, 'tcx> At<'a, 'tcx> {
|
||||
where
|
||||
T: ToTrace<'tcx>,
|
||||
{
|
||||
self.trace_exp(a_is_expected, a, b).sub(&a, &b)
|
||||
self.trace_exp(a_is_expected, a, b).sub(a, b)
|
||||
}
|
||||
|
||||
/// Makes `actual <: expected`. For example, if type-checking a
|
||||
@ -109,7 +109,7 @@ impl<'a, 'tcx> At<'a, 'tcx> {
|
||||
where
|
||||
T: ToTrace<'tcx>,
|
||||
{
|
||||
self.trace_exp(a_is_expected, a, b).eq(&a, &b)
|
||||
self.trace_exp(a_is_expected, a, b).eq(a, b)
|
||||
}
|
||||
|
||||
/// Makes `expected <: actual`.
|
||||
@ -117,7 +117,7 @@ impl<'a, 'tcx> At<'a, 'tcx> {
|
||||
where
|
||||
T: ToTrace<'tcx>,
|
||||
{
|
||||
self.trace(expected, actual).eq(&expected, &actual)
|
||||
self.trace(expected, actual).eq(expected, actual)
|
||||
}
|
||||
|
||||
pub fn relate<T>(self, expected: T, variance: ty::Variance, actual: T) -> InferResult<'tcx, ()>
|
||||
@ -147,7 +147,7 @@ impl<'a, 'tcx> At<'a, 'tcx> {
|
||||
where
|
||||
T: ToTrace<'tcx>,
|
||||
{
|
||||
self.trace(expected, actual).lub(&expected, &actual)
|
||||
self.trace(expected, actual).lub(expected, actual)
|
||||
}
|
||||
|
||||
/// Computes the greatest-lower-bound, or mutual subtype, of two
|
||||
@ -157,7 +157,7 @@ impl<'a, 'tcx> At<'a, 'tcx> {
|
||||
where
|
||||
T: ToTrace<'tcx>,
|
||||
{
|
||||
self.trace(expected, actual).glb(&expected, &actual)
|
||||
self.trace(expected, actual).glb(expected, actual)
|
||||
}
|
||||
|
||||
/// Sets the "trace" values that will be used for
|
||||
@ -186,7 +186,7 @@ impl<'a, 'tcx> At<'a, 'tcx> {
|
||||
impl<'a, 'tcx> Trace<'a, 'tcx> {
|
||||
/// Makes `a <: b` where `a` may or may not be expected (if
|
||||
/// `a_is_expected` is true, then `a` is expected).
|
||||
pub fn sub<T>(self, a: &T, b: &T) -> InferResult<'tcx, ()>
|
||||
pub fn sub<T>(self, a: T, b: T) -> InferResult<'tcx, ()>
|
||||
where
|
||||
T: Relate<'tcx>,
|
||||
{
|
||||
@ -203,7 +203,7 @@ impl<'a, 'tcx> Trace<'a, 'tcx> {
|
||||
|
||||
/// Makes `a == b`; the expectation is set by the call to
|
||||
/// `trace()`.
|
||||
pub fn eq<T>(self, a: &T, b: &T) -> InferResult<'tcx, ()>
|
||||
pub fn eq<T>(self, a: T, b: T) -> InferResult<'tcx, ()>
|
||||
where
|
||||
T: Relate<'tcx>,
|
||||
{
|
||||
@ -218,7 +218,7 @@ impl<'a, 'tcx> Trace<'a, 'tcx> {
|
||||
})
|
||||
}
|
||||
|
||||
pub fn lub<T>(self, a: &T, b: &T) -> InferResult<'tcx, T>
|
||||
pub fn lub<T>(self, a: T, b: T) -> InferResult<'tcx, T>
|
||||
where
|
||||
T: Relate<'tcx>,
|
||||
{
|
||||
@ -233,7 +233,7 @@ impl<'a, 'tcx> Trace<'a, 'tcx> {
|
||||
})
|
||||
}
|
||||
|
||||
pub fn glb<T>(self, a: &T, b: &T) -> InferResult<'tcx, T>
|
||||
pub fn glb<T>(self, a: T, b: T) -> InferResult<'tcx, T>
|
||||
where
|
||||
T: Relate<'tcx>,
|
||||
{
|
||||
|
@ -271,7 +271,7 @@ impl<'cx, 'tcx> InferCtxt<'cx, 'tcx> {
|
||||
},
|
||||
ty::Variance::Invariant,
|
||||
)
|
||||
.relate(&v1, &v2)?;
|
||||
.relate(v1, v2)?;
|
||||
}
|
||||
|
||||
(GenericArgKind::Const(v1), GenericArgKind::Const(v2)) => {
|
||||
@ -285,7 +285,7 @@ impl<'cx, 'tcx> InferCtxt<'cx, 'tcx> {
|
||||
},
|
||||
ty::Variance::Invariant,
|
||||
)
|
||||
.relate(&v1, &v2)?;
|
||||
.relate(v1, v2)?;
|
||||
}
|
||||
|
||||
_ => {
|
||||
@ -302,7 +302,7 @@ impl<'cx, 'tcx> InferCtxt<'cx, 'tcx> {
|
||||
// Screen out `'a: 'a` cases -- we skip the binder here but
|
||||
// only compare the inner values to one another, so they are still at
|
||||
// consistent binding levels.
|
||||
let &ty::OutlivesPredicate(k1, r2) = r_c.skip_binder();
|
||||
let ty::OutlivesPredicate(k1, r2) = r_c.skip_binder();
|
||||
if k1 != r2.into() { Some(r_c) } else { None }
|
||||
}),
|
||||
);
|
||||
@ -526,7 +526,7 @@ impl<'cx, 'tcx> InferCtxt<'cx, 'tcx> {
|
||||
) -> impl Iterator<Item = PredicateObligation<'tcx>> + 'a + Captures<'tcx> {
|
||||
unsubstituted_region_constraints.iter().map(move |constraint| {
|
||||
let constraint = substitute_value(self.tcx, result_subst, constraint);
|
||||
let &ty::OutlivesPredicate(k1, r2) = constraint.skip_binder(); // restored below
|
||||
let ty::OutlivesPredicate(k1, r2) = constraint.skip_binder(); // restored below
|
||||
|
||||
Obligation::new(
|
||||
cause.clone(),
|
||||
|
@ -318,10 +318,10 @@ impl<'infcx, 'tcx> CombineFields<'infcx, 'tcx> {
|
||||
// to associate causes/spans with each of the relations in
|
||||
// the stack to get this right.
|
||||
match dir {
|
||||
EqTo => self.equate(a_is_expected).relate(&a_ty, &b_ty),
|
||||
SubtypeOf => self.sub(a_is_expected).relate(&a_ty, &b_ty),
|
||||
EqTo => self.equate(a_is_expected).relate(a_ty, b_ty),
|
||||
SubtypeOf => self.sub(a_is_expected).relate(a_ty, b_ty),
|
||||
SupertypeOf => {
|
||||
self.sub(a_is_expected).relate_with_variance(ty::Contravariant, &a_ty, &b_ty)
|
||||
self.sub(a_is_expected).relate_with_variance(ty::Contravariant, a_ty, b_ty)
|
||||
}
|
||||
}?;
|
||||
|
||||
@ -379,7 +379,7 @@ impl<'infcx, 'tcx> CombineFields<'infcx, 'tcx> {
|
||||
param_env: self.param_env,
|
||||
};
|
||||
|
||||
let ty = match generalize.relate(&ty, &ty) {
|
||||
let ty = match generalize.relate(ty, ty) {
|
||||
Ok(ty) => ty,
|
||||
Err(e) => {
|
||||
debug!("generalize: failure {:?}", e);
|
||||
@ -490,8 +490,8 @@ impl TypeRelation<'tcx> for Generalizer<'_, 'tcx> {
|
||||
|
||||
fn binders<T>(
|
||||
&mut self,
|
||||
a: &ty::Binder<T>,
|
||||
b: &ty::Binder<T>,
|
||||
a: ty::Binder<T>,
|
||||
b: ty::Binder<T>,
|
||||
) -> RelateResult<'tcx, ty::Binder<T>>
|
||||
where
|
||||
T: Relate<'tcx>,
|
||||
@ -519,8 +519,8 @@ impl TypeRelation<'tcx> for Generalizer<'_, 'tcx> {
|
||||
fn relate_with_variance<T: Relate<'tcx>>(
|
||||
&mut self,
|
||||
variance: ty::Variance,
|
||||
a: &T,
|
||||
b: &T,
|
||||
a: T,
|
||||
b: T,
|
||||
) -> RelateResult<'tcx, T> {
|
||||
let old_ambient_variance = self.ambient_variance;
|
||||
self.ambient_variance = self.ambient_variance.xform(variance);
|
||||
@ -552,7 +552,7 @@ impl TypeRelation<'tcx> for Generalizer<'_, 'tcx> {
|
||||
match probe {
|
||||
TypeVariableValue::Known { value: u } => {
|
||||
debug!("generalize: known value {:?}", u);
|
||||
self.relate(&u, &u)
|
||||
self.relate(u, u)
|
||||
}
|
||||
TypeVariableValue::Unknown { universe } => {
|
||||
match self.ambient_variance {
|
||||
@ -655,7 +655,7 @@ impl TypeRelation<'tcx> for Generalizer<'_, 'tcx> {
|
||||
let variable_table = &mut inner.const_unification_table();
|
||||
let var_value = variable_table.probe_value(vid);
|
||||
match var_value.val {
|
||||
ConstVariableValue::Known { value: u } => self.relate(&u, &u),
|
||||
ConstVariableValue::Known { value: u } => self.relate(u, u),
|
||||
ConstVariableValue::Unknown { universe } => {
|
||||
if self.for_universe.can_name(universe) {
|
||||
Ok(c)
|
||||
|
@ -59,8 +59,8 @@ impl TypeRelation<'tcx> for Equate<'combine, 'infcx, 'tcx> {
|
||||
fn relate_with_variance<T: Relate<'tcx>>(
|
||||
&mut self,
|
||||
_: ty::Variance,
|
||||
a: &T,
|
||||
b: &T,
|
||||
a: T,
|
||||
b: T,
|
||||
) -> RelateResult<'tcx, T> {
|
||||
self.relate(a, b)
|
||||
}
|
||||
@ -124,8 +124,8 @@ impl TypeRelation<'tcx> for Equate<'combine, 'infcx, 'tcx> {
|
||||
|
||||
fn binders<T>(
|
||||
&mut self,
|
||||
a: &ty::Binder<T>,
|
||||
b: &ty::Binder<T>,
|
||||
a: ty::Binder<T>,
|
||||
b: ty::Binder<T>,
|
||||
) -> RelateResult<'tcx, ty::Binder<T>>
|
||||
where
|
||||
T: Relate<'tcx>,
|
||||
@ -136,7 +136,7 @@ impl TypeRelation<'tcx> for Equate<'combine, 'infcx, 'tcx> {
|
||||
} else {
|
||||
// Fast path for the common case.
|
||||
self.relate(a.skip_binder(), b.skip_binder())?;
|
||||
Ok(a.clone())
|
||||
Ok(a)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -43,8 +43,8 @@ impl TypeRelation<'tcx> for Glb<'combine, 'infcx, 'tcx> {
|
||||
fn relate_with_variance<T: Relate<'tcx>>(
|
||||
&mut self,
|
||||
variance: ty::Variance,
|
||||
a: &T,
|
||||
b: &T,
|
||||
a: T,
|
||||
b: T,
|
||||
) -> RelateResult<'tcx, T> {
|
||||
match variance {
|
||||
ty::Invariant => self.fields.equate(self.a_is_expected).relate(a, b),
|
||||
@ -85,8 +85,8 @@ impl TypeRelation<'tcx> for Glb<'combine, 'infcx, 'tcx> {
|
||||
|
||||
fn binders<T>(
|
||||
&mut self,
|
||||
a: &ty::Binder<T>,
|
||||
b: &ty::Binder<T>,
|
||||
a: ty::Binder<T>,
|
||||
b: ty::Binder<T>,
|
||||
) -> RelateResult<'tcx, ty::Binder<T>>
|
||||
where
|
||||
T: Relate<'tcx>,
|
||||
@ -112,8 +112,8 @@ impl<'combine, 'infcx, 'tcx> LatticeDir<'infcx, 'tcx> for Glb<'combine, 'infcx,
|
||||
|
||||
fn relate_bound(&mut self, v: Ty<'tcx>, a: Ty<'tcx>, b: Ty<'tcx>) -> RelateResult<'tcx, ()> {
|
||||
let mut sub = self.fields.sub(self.a_is_expected);
|
||||
sub.relate(&v, &a)?;
|
||||
sub.relate(&v, &b)?;
|
||||
sub.relate(v, a)?;
|
||||
sub.relate(v, b)?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
@ -11,8 +11,8 @@ use rustc_middle::ty::{self, Binder, TypeFoldable};
|
||||
impl<'a, 'tcx> CombineFields<'a, 'tcx> {
|
||||
pub fn higher_ranked_sub<T>(
|
||||
&mut self,
|
||||
a: &Binder<T>,
|
||||
b: &Binder<T>,
|
||||
a: Binder<T>,
|
||||
b: Binder<T>,
|
||||
a_is_expected: bool,
|
||||
) -> RelateResult<'tcx, Binder<T>>
|
||||
where
|
||||
@ -33,20 +33,20 @@ impl<'a, 'tcx> CombineFields<'a, 'tcx> {
|
||||
self.infcx.commit_if_ok(|_| {
|
||||
// First, we instantiate each bound region in the supertype with a
|
||||
// fresh placeholder region.
|
||||
let (b_prime, _) = self.infcx.replace_bound_vars_with_placeholders(b);
|
||||
let (b_prime, _) = self.infcx.replace_bound_vars_with_placeholders(&b);
|
||||
|
||||
// Next, we instantiate each bound region in the subtype
|
||||
// with a fresh region variable. These region variables --
|
||||
// but no other pre-existing region variables -- can name
|
||||
// the placeholders.
|
||||
let (a_prime, _) =
|
||||
self.infcx.replace_bound_vars_with_fresh_vars(span, HigherRankedType, a);
|
||||
self.infcx.replace_bound_vars_with_fresh_vars(span, HigherRankedType, &a);
|
||||
|
||||
debug!("a_prime={:?}", a_prime);
|
||||
debug!("b_prime={:?}", b_prime);
|
||||
|
||||
// Compare types now that bound regions have been replaced.
|
||||
let result = self.sub(a_is_expected).relate(&a_prime, &b_prime)?;
|
||||
let result = self.sub(a_is_expected).relate(a_prime, b_prime)?;
|
||||
|
||||
debug!("higher_ranked_sub: OK result={:?}", result);
|
||||
|
||||
|
@ -43,8 +43,8 @@ impl TypeRelation<'tcx> for Lub<'combine, 'infcx, 'tcx> {
|
||||
fn relate_with_variance<T: Relate<'tcx>>(
|
||||
&mut self,
|
||||
variance: ty::Variance,
|
||||
a: &T,
|
||||
b: &T,
|
||||
a: T,
|
||||
b: T,
|
||||
) -> RelateResult<'tcx, T> {
|
||||
match variance {
|
||||
ty::Invariant => self.fields.equate(self.a_is_expected).relate(a, b),
|
||||
@ -85,8 +85,8 @@ impl TypeRelation<'tcx> for Lub<'combine, 'infcx, 'tcx> {
|
||||
|
||||
fn binders<T>(
|
||||
&mut self,
|
||||
a: &ty::Binder<T>,
|
||||
b: &ty::Binder<T>,
|
||||
a: ty::Binder<T>,
|
||||
b: ty::Binder<T>,
|
||||
) -> RelateResult<'tcx, ty::Binder<T>>
|
||||
where
|
||||
T: Relate<'tcx>,
|
||||
@ -97,7 +97,7 @@ impl TypeRelation<'tcx> for Lub<'combine, 'infcx, 'tcx> {
|
||||
// very challenging, switch to invariance. This is obviously
|
||||
// overly conservative but works ok in practice.
|
||||
self.relate_with_variance(ty::Variance::Invariant, a, b)?;
|
||||
Ok(a.clone())
|
||||
Ok(a)
|
||||
}
|
||||
}
|
||||
|
||||
@ -118,8 +118,8 @@ impl<'combine, 'infcx, 'tcx> LatticeDir<'infcx, 'tcx> for Lub<'combine, 'infcx,
|
||||
|
||||
fn relate_bound(&mut self, v: Ty<'tcx>, a: Ty<'tcx>, b: Ty<'tcx>) -> RelateResult<'tcx, ()> {
|
||||
let mut sub = self.fields.sub(self.a_is_expected);
|
||||
sub.relate(&a, &v)?;
|
||||
sub.relate(&b, &v)?;
|
||||
sub.relate(a, v)?;
|
||||
sub.relate(b, v)?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
@ -161,7 +161,7 @@ where
|
||||
|
||||
fn create_scope(
|
||||
&mut self,
|
||||
value: &ty::Binder<impl TypeFoldable<'tcx>>,
|
||||
value: ty::Binder<impl Relate<'tcx>>,
|
||||
universally_quantified: UniversallyQuantified,
|
||||
) -> BoundRegionScope<'tcx> {
|
||||
let mut scope = BoundRegionScope::default();
|
||||
@ -369,7 +369,7 @@ where
|
||||
universe,
|
||||
};
|
||||
|
||||
generalizer.relate(&value, &value)
|
||||
generalizer.relate(value, value)
|
||||
}
|
||||
}
|
||||
|
||||
@ -495,8 +495,8 @@ where
|
||||
fn relate_with_variance<T: Relate<'tcx>>(
|
||||
&mut self,
|
||||
variance: ty::Variance,
|
||||
a: &T,
|
||||
b: &T,
|
||||
a: T,
|
||||
b: T,
|
||||
) -> RelateResult<'tcx, T> {
|
||||
debug!("relate_with_variance(variance={:?}, a={:?}, b={:?})", variance, a, b);
|
||||
|
||||
@ -613,8 +613,8 @@ where
|
||||
|
||||
fn binders<T>(
|
||||
&mut self,
|
||||
a: &ty::Binder<T>,
|
||||
b: &ty::Binder<T>,
|
||||
a: ty::Binder<T>,
|
||||
b: ty::Binder<T>,
|
||||
) -> RelateResult<'tcx, ty::Binder<T>>
|
||||
where
|
||||
T: Relate<'tcx>,
|
||||
@ -640,11 +640,10 @@ where
|
||||
|
||||
debug!("binders({:?}: {:?}, ambient_variance={:?})", a, b, self.ambient_variance);
|
||||
|
||||
if !a.skip_binder().has_escaping_bound_vars() && !b.skip_binder().has_escaping_bound_vars()
|
||||
{
|
||||
if let (Some(a), Some(b)) = (a.no_bound_vars(), b.no_bound_vars()) {
|
||||
// Fast path for the common case.
|
||||
self.relate(a.skip_binder(), b.skip_binder())?;
|
||||
return Ok(a.clone());
|
||||
self.relate(a, b)?;
|
||||
return Ok(ty::Binder::bind(a));
|
||||
}
|
||||
|
||||
if self.ambient_covariance() {
|
||||
@ -839,8 +838,8 @@ where
|
||||
fn relate_with_variance<T: Relate<'tcx>>(
|
||||
&mut self,
|
||||
variance: ty::Variance,
|
||||
a: &T,
|
||||
b: &T,
|
||||
a: T,
|
||||
b: T,
|
||||
) -> RelateResult<'tcx, T> {
|
||||
debug!(
|
||||
"TypeGeneralizer::relate_with_variance(variance={:?}, a={:?}, b={:?})",
|
||||
@ -890,7 +889,7 @@ where
|
||||
match variables.probe(vid) {
|
||||
TypeVariableValue::Known { value: u } => {
|
||||
drop(variables);
|
||||
self.relate(&u, &u)
|
||||
self.relate(u, u)
|
||||
}
|
||||
TypeVariableValue::Unknown { universe: _universe } => {
|
||||
if self.ambient_variance == ty::Bivariant {
|
||||
@ -984,7 +983,7 @@ where
|
||||
let variable_table = &mut inner.const_unification_table();
|
||||
let var_value = variable_table.probe_value(vid);
|
||||
match var_value.val.known() {
|
||||
Some(u) => self.relate(&u, &u),
|
||||
Some(u) => self.relate(u, u),
|
||||
None => {
|
||||
let new_var_id = variable_table.new_key(ConstVarValue {
|
||||
origin: var_value.origin,
|
||||
@ -1001,8 +1000,8 @@ where
|
||||
|
||||
fn binders<T>(
|
||||
&mut self,
|
||||
a: &ty::Binder<T>,
|
||||
_: &ty::Binder<T>,
|
||||
a: ty::Binder<T>,
|
||||
_: ty::Binder<T>,
|
||||
) -> RelateResult<'tcx, ty::Binder<T>>
|
||||
where
|
||||
T: Relate<'tcx>,
|
||||
|
@ -62,8 +62,8 @@ impl TypeRelation<'tcx> for Sub<'combine, 'infcx, 'tcx> {
|
||||
fn relate_with_variance<T: Relate<'tcx>>(
|
||||
&mut self,
|
||||
variance: ty::Variance,
|
||||
a: &T,
|
||||
b: &T,
|
||||
a: T,
|
||||
b: T,
|
||||
) -> RelateResult<'tcx, T> {
|
||||
match variance {
|
||||
ty::Invariant => self.fields.equate(self.a_is_expected).relate(a, b),
|
||||
@ -162,8 +162,8 @@ impl TypeRelation<'tcx> for Sub<'combine, 'infcx, 'tcx> {
|
||||
|
||||
fn binders<T>(
|
||||
&mut self,
|
||||
a: &ty::Binder<T>,
|
||||
b: &ty::Binder<T>,
|
||||
a: ty::Binder<T>,
|
||||
b: ty::Binder<T>,
|
||||
) -> RelateResult<'tcx, ty::Binder<T>>
|
||||
where
|
||||
T: Relate<'tcx>,
|
||||
|
@ -23,8 +23,8 @@ impl<'a> Cursor<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
/// For debug assertions only
|
||||
/// Returns the last eaten symbol (or '\0' in release builds).
|
||||
/// Returns the last eaten symbol (or `'\0'` in release builds).
|
||||
/// (For debug assertions only.)
|
||||
pub(crate) fn prev(&self) -> char {
|
||||
#[cfg(debug_assertions)]
|
||||
{
|
||||
|
@ -911,7 +911,7 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for MutableTransmutes {
|
||||
}
|
||||
let sig = cx.tables().node_type(expr.hir_id).fn_sig(cx.tcx);
|
||||
let from = sig.inputs().skip_binder()[0];
|
||||
let to = *sig.output().skip_binder();
|
||||
let to = sig.output().skip_binder();
|
||||
return Some((from, to));
|
||||
}
|
||||
None
|
||||
|
@ -123,7 +123,7 @@ where
|
||||
T: HashStable<StableHashingContext<'a>>,
|
||||
{
|
||||
fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) {
|
||||
self.skip_binder().hash_stable(hcx, hasher);
|
||||
self.as_ref().skip_binder().hash_stable(hcx, hasher);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -42,7 +42,7 @@
|
||||
#![feature(or_patterns)]
|
||||
#![feature(range_is_empty)]
|
||||
#![feature(min_specialization)]
|
||||
#![feature(track_caller)]
|
||||
#![cfg_attr(bootstrap, feature(track_caller))]
|
||||
#![feature(trusted_len)]
|
||||
#![feature(stmt_expr_attributes)]
|
||||
#![feature(test)]
|
||||
|
@ -19,7 +19,6 @@ use rustc_target::abi::VariantIdx;
|
||||
|
||||
use polonius_engine::Atom;
|
||||
pub use rustc_ast::ast::Mutability;
|
||||
use rustc_ast::ast::{InlineAsmOptions, InlineAsmTemplatePiece};
|
||||
use rustc_data_structures::fx::FxHashSet;
|
||||
use rustc_data_structures::graph::dominators::{dominators, Dominators};
|
||||
use rustc_data_structures::graph::{self, GraphSuccessors};
|
||||
@ -45,6 +44,8 @@ pub mod mono;
|
||||
mod predecessors;
|
||||
mod query;
|
||||
pub mod tcx;
|
||||
pub mod terminator;
|
||||
pub use terminator::*;
|
||||
pub mod traversal;
|
||||
mod type_foldable;
|
||||
pub mod visit;
|
||||
@ -1046,191 +1047,6 @@ pub struct BasicBlockData<'tcx> {
|
||||
pub is_cleanup: bool,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, RustcEncodable, RustcDecodable, HashStable)]
|
||||
pub struct Terminator<'tcx> {
|
||||
pub source_info: SourceInfo,
|
||||
pub kind: TerminatorKind<'tcx>,
|
||||
}
|
||||
|
||||
#[derive(Clone, RustcEncodable, RustcDecodable, HashStable, PartialEq)]
|
||||
pub enum TerminatorKind<'tcx> {
|
||||
/// Block should have one successor in the graph; we jump there.
|
||||
Goto { target: BasicBlock },
|
||||
|
||||
/// Operand evaluates to an integer; jump depending on its value
|
||||
/// to one of the targets, and otherwise fallback to `otherwise`.
|
||||
SwitchInt {
|
||||
/// The discriminant value being tested.
|
||||
discr: Operand<'tcx>,
|
||||
|
||||
/// The type of value being tested.
|
||||
/// This is always the same as the type of `discr`.
|
||||
/// FIXME: remove this redundant information. Currently, it is relied on by pretty-printing.
|
||||
switch_ty: Ty<'tcx>,
|
||||
|
||||
/// Possible values. The locations to branch to in each case
|
||||
/// are found in the corresponding indices from the `targets` vector.
|
||||
values: Cow<'tcx, [u128]>,
|
||||
|
||||
/// Possible branch sites. The last element of this vector is used
|
||||
/// for the otherwise branch, so targets.len() == values.len() + 1
|
||||
/// should hold.
|
||||
//
|
||||
// This invariant is quite non-obvious and also could be improved.
|
||||
// One way to make this invariant is to have something like this instead:
|
||||
//
|
||||
// branches: Vec<(ConstInt, BasicBlock)>,
|
||||
// otherwise: Option<BasicBlock> // exhaustive if None
|
||||
//
|
||||
// However we’ve decided to keep this as-is until we figure a case
|
||||
// where some other approach seems to be strictly better than other.
|
||||
targets: Vec<BasicBlock>,
|
||||
},
|
||||
|
||||
/// Indicates that the landing pad is finished and unwinding should
|
||||
/// continue. Emitted by `build::scope::diverge_cleanup`.
|
||||
Resume,
|
||||
|
||||
/// Indicates that the landing pad is finished and that the process
|
||||
/// should abort. Used to prevent unwinding for foreign items.
|
||||
Abort,
|
||||
|
||||
/// Indicates a normal return. The return place should have
|
||||
/// been filled in before this executes. This can occur multiple times
|
||||
/// in different basic blocks.
|
||||
Return,
|
||||
|
||||
/// Indicates a terminator that can never be reached.
|
||||
Unreachable,
|
||||
|
||||
/// Drop the `Place`.
|
||||
Drop { place: Place<'tcx>, target: BasicBlock, unwind: Option<BasicBlock> },
|
||||
|
||||
/// Drop the `Place` and assign the new value over it. This ensures
|
||||
/// that the assignment to `P` occurs *even if* the destructor for
|
||||
/// place unwinds. Its semantics are best explained by the
|
||||
/// elaboration:
|
||||
///
|
||||
/// ```
|
||||
/// BB0 {
|
||||
/// DropAndReplace(P <- V, goto BB1, unwind BB2)
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// becomes
|
||||
///
|
||||
/// ```
|
||||
/// BB0 {
|
||||
/// Drop(P, goto BB1, unwind BB2)
|
||||
/// }
|
||||
/// BB1 {
|
||||
/// // P is now uninitialized
|
||||
/// P <- V
|
||||
/// }
|
||||
/// BB2 {
|
||||
/// // P is now uninitialized -- its dtor panicked
|
||||
/// P <- V
|
||||
/// }
|
||||
/// ```
|
||||
DropAndReplace {
|
||||
place: Place<'tcx>,
|
||||
value: Operand<'tcx>,
|
||||
target: BasicBlock,
|
||||
unwind: Option<BasicBlock>,
|
||||
},
|
||||
|
||||
/// Block ends with a call of a converging function.
|
||||
Call {
|
||||
/// The function that’s being called.
|
||||
func: Operand<'tcx>,
|
||||
/// Arguments the function is called with.
|
||||
/// These are owned by the callee, which is free to modify them.
|
||||
/// This allows the memory occupied by "by-value" arguments to be
|
||||
/// reused across function calls without duplicating the contents.
|
||||
args: Vec<Operand<'tcx>>,
|
||||
/// Destination for the return value. If some, the call is converging.
|
||||
destination: Option<(Place<'tcx>, BasicBlock)>,
|
||||
/// Cleanups to be done if the call unwinds.
|
||||
cleanup: Option<BasicBlock>,
|
||||
/// `true` if this is from a call in HIR rather than from an overloaded
|
||||
/// operator. True for overloaded function call.
|
||||
from_hir_call: bool,
|
||||
/// This `Span` is the span of the function, without the dot and receiver
|
||||
/// (e.g. `foo(a, b)` in `x.foo(a, b)`
|
||||
fn_span: Span,
|
||||
},
|
||||
|
||||
/// Jump to the target if the condition has the expected value,
|
||||
/// otherwise panic with a message and a cleanup target.
|
||||
Assert {
|
||||
cond: Operand<'tcx>,
|
||||
expected: bool,
|
||||
msg: AssertMessage<'tcx>,
|
||||
target: BasicBlock,
|
||||
cleanup: Option<BasicBlock>,
|
||||
},
|
||||
|
||||
/// A suspend point.
|
||||
Yield {
|
||||
/// The value to return.
|
||||
value: Operand<'tcx>,
|
||||
/// Where to resume to.
|
||||
resume: BasicBlock,
|
||||
/// The place to store the resume argument in.
|
||||
resume_arg: Place<'tcx>,
|
||||
/// Cleanup to be done if the generator is dropped at this suspend point.
|
||||
drop: Option<BasicBlock>,
|
||||
},
|
||||
|
||||
/// Indicates the end of the dropping of a generator.
|
||||
GeneratorDrop,
|
||||
|
||||
/// A block where control flow only ever takes one real path, but borrowck
|
||||
/// needs to be more conservative.
|
||||
FalseEdge {
|
||||
/// The target normal control flow will take.
|
||||
real_target: BasicBlock,
|
||||
/// A block control flow could conceptually jump to, but won't in
|
||||
/// practice.
|
||||
imaginary_target: BasicBlock,
|
||||
},
|
||||
/// A terminator for blocks that only take one path in reality, but where we
|
||||
/// reserve the right to unwind in borrowck, even if it won't happen in practice.
|
||||
/// This can arise in infinite loops with no function calls for example.
|
||||
FalseUnwind {
|
||||
/// The target normal control flow will take.
|
||||
real_target: BasicBlock,
|
||||
/// The imaginary cleanup block link. This particular path will never be taken
|
||||
/// in practice, but in order to avoid fragility we want to always
|
||||
/// consider it in borrowck. We don't want to accept programs which
|
||||
/// pass borrowck only when `panic=abort` or some assertions are disabled
|
||||
/// due to release vs. debug mode builds. This needs to be an `Option` because
|
||||
/// of the `remove_noop_landing_pads` and `no_landing_pads` passes.
|
||||
unwind: Option<BasicBlock>,
|
||||
},
|
||||
|
||||
/// Block ends with an inline assembly block. This is a terminator since
|
||||
/// inline assembly is allowed to diverge.
|
||||
InlineAsm {
|
||||
/// The template for the inline assembly, with placeholders.
|
||||
template: &'tcx [InlineAsmTemplatePiece],
|
||||
|
||||
/// The operands for the inline assembly, as `Operand`s or `Place`s.
|
||||
operands: Vec<InlineAsmOperand<'tcx>>,
|
||||
|
||||
/// Miscellaneous options for the inline assembly.
|
||||
options: InlineAsmOptions,
|
||||
|
||||
/// Source spans for each line of the inline assembly code. These are
|
||||
/// used to map assembler errors back to the line in the source code.
|
||||
line_spans: &'tcx [Span],
|
||||
|
||||
/// Destination block after the inline assembly returns, unless it is
|
||||
/// diverging (InlineAsmOptions::NORETURN).
|
||||
destination: Option<BasicBlock>,
|
||||
},
|
||||
}
|
||||
|
||||
/// Information about an assertion failure.
|
||||
#[derive(Clone, RustcEncodable, RustcDecodable, HashStable, PartialEq)]
|
||||
pub enum AssertKind<O> {
|
||||
@ -1279,149 +1095,6 @@ pub type Successors<'a> =
|
||||
pub type SuccessorsMut<'a> =
|
||||
iter::Chain<option::IntoIter<&'a mut BasicBlock>, slice::IterMut<'a, BasicBlock>>;
|
||||
|
||||
impl<'tcx> Terminator<'tcx> {
|
||||
pub fn successors(&self) -> Successors<'_> {
|
||||
self.kind.successors()
|
||||
}
|
||||
|
||||
pub fn successors_mut(&mut self) -> SuccessorsMut<'_> {
|
||||
self.kind.successors_mut()
|
||||
}
|
||||
|
||||
pub fn unwind(&self) -> Option<&Option<BasicBlock>> {
|
||||
self.kind.unwind()
|
||||
}
|
||||
|
||||
pub fn unwind_mut(&mut self) -> Option<&mut Option<BasicBlock>> {
|
||||
self.kind.unwind_mut()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx> TerminatorKind<'tcx> {
|
||||
pub fn if_(
|
||||
tcx: TyCtxt<'tcx>,
|
||||
cond: Operand<'tcx>,
|
||||
t: BasicBlock,
|
||||
f: BasicBlock,
|
||||
) -> TerminatorKind<'tcx> {
|
||||
static BOOL_SWITCH_FALSE: &[u128] = &[0];
|
||||
TerminatorKind::SwitchInt {
|
||||
discr: cond,
|
||||
switch_ty: tcx.types.bool,
|
||||
values: From::from(BOOL_SWITCH_FALSE),
|
||||
targets: vec![f, t],
|
||||
}
|
||||
}
|
||||
|
||||
pub fn successors(&self) -> Successors<'_> {
|
||||
use self::TerminatorKind::*;
|
||||
match *self {
|
||||
Resume
|
||||
| Abort
|
||||
| GeneratorDrop
|
||||
| Return
|
||||
| Unreachable
|
||||
| Call { destination: None, cleanup: None, .. }
|
||||
| InlineAsm { destination: None, .. } => None.into_iter().chain(&[]),
|
||||
Goto { target: ref t }
|
||||
| Call { destination: None, cleanup: Some(ref t), .. }
|
||||
| Call { destination: Some((_, ref t)), cleanup: None, .. }
|
||||
| Yield { resume: ref t, drop: None, .. }
|
||||
| DropAndReplace { target: ref t, unwind: None, .. }
|
||||
| Drop { target: ref t, unwind: None, .. }
|
||||
| Assert { target: ref t, cleanup: None, .. }
|
||||
| FalseUnwind { real_target: ref t, unwind: None }
|
||||
| InlineAsm { destination: Some(ref t), .. } => Some(t).into_iter().chain(&[]),
|
||||
Call { destination: Some((_, ref t)), cleanup: Some(ref u), .. }
|
||||
| Yield { resume: ref t, drop: Some(ref u), .. }
|
||||
| DropAndReplace { target: ref t, unwind: Some(ref u), .. }
|
||||
| Drop { target: ref t, unwind: Some(ref u), .. }
|
||||
| Assert { target: ref t, cleanup: Some(ref u), .. }
|
||||
| FalseUnwind { real_target: ref t, unwind: Some(ref u) } => {
|
||||
Some(t).into_iter().chain(slice::from_ref(u))
|
||||
}
|
||||
SwitchInt { ref targets, .. } => None.into_iter().chain(&targets[..]),
|
||||
FalseEdge { ref real_target, ref imaginary_target } => {
|
||||
Some(real_target).into_iter().chain(slice::from_ref(imaginary_target))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn successors_mut(&mut self) -> SuccessorsMut<'_> {
|
||||
use self::TerminatorKind::*;
|
||||
match *self {
|
||||
Resume
|
||||
| Abort
|
||||
| GeneratorDrop
|
||||
| Return
|
||||
| Unreachable
|
||||
| Call { destination: None, cleanup: None, .. }
|
||||
| InlineAsm { destination: None, .. } => None.into_iter().chain(&mut []),
|
||||
Goto { target: ref mut t }
|
||||
| Call { destination: None, cleanup: Some(ref mut t), .. }
|
||||
| Call { destination: Some((_, ref mut t)), cleanup: None, .. }
|
||||
| Yield { resume: ref mut t, drop: None, .. }
|
||||
| DropAndReplace { target: ref mut t, unwind: None, .. }
|
||||
| Drop { target: ref mut t, unwind: None, .. }
|
||||
| Assert { target: ref mut t, cleanup: None, .. }
|
||||
| FalseUnwind { real_target: ref mut t, unwind: None }
|
||||
| InlineAsm { destination: Some(ref mut t), .. } => Some(t).into_iter().chain(&mut []),
|
||||
Call { destination: Some((_, ref mut t)), cleanup: Some(ref mut u), .. }
|
||||
| Yield { resume: ref mut t, drop: Some(ref mut u), .. }
|
||||
| DropAndReplace { target: ref mut t, unwind: Some(ref mut u), .. }
|
||||
| Drop { target: ref mut t, unwind: Some(ref mut u), .. }
|
||||
| Assert { target: ref mut t, cleanup: Some(ref mut u), .. }
|
||||
| FalseUnwind { real_target: ref mut t, unwind: Some(ref mut u) } => {
|
||||
Some(t).into_iter().chain(slice::from_mut(u))
|
||||
}
|
||||
SwitchInt { ref mut targets, .. } => None.into_iter().chain(&mut targets[..]),
|
||||
FalseEdge { ref mut real_target, ref mut imaginary_target } => {
|
||||
Some(real_target).into_iter().chain(slice::from_mut(imaginary_target))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn unwind(&self) -> Option<&Option<BasicBlock>> {
|
||||
match *self {
|
||||
TerminatorKind::Goto { .. }
|
||||
| TerminatorKind::Resume
|
||||
| TerminatorKind::Abort
|
||||
| TerminatorKind::Return
|
||||
| TerminatorKind::Unreachable
|
||||
| TerminatorKind::GeneratorDrop
|
||||
| TerminatorKind::Yield { .. }
|
||||
| TerminatorKind::SwitchInt { .. }
|
||||
| TerminatorKind::FalseEdge { .. }
|
||||
| TerminatorKind::InlineAsm { .. } => None,
|
||||
TerminatorKind::Call { cleanup: ref unwind, .. }
|
||||
| TerminatorKind::Assert { cleanup: ref unwind, .. }
|
||||
| TerminatorKind::DropAndReplace { ref unwind, .. }
|
||||
| TerminatorKind::Drop { ref unwind, .. }
|
||||
| TerminatorKind::FalseUnwind { ref unwind, .. } => Some(unwind),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn unwind_mut(&mut self) -> Option<&mut Option<BasicBlock>> {
|
||||
match *self {
|
||||
TerminatorKind::Goto { .. }
|
||||
| TerminatorKind::Resume
|
||||
| TerminatorKind::Abort
|
||||
| TerminatorKind::Return
|
||||
| TerminatorKind::Unreachable
|
||||
| TerminatorKind::GeneratorDrop
|
||||
| TerminatorKind::Yield { .. }
|
||||
| TerminatorKind::SwitchInt { .. }
|
||||
| TerminatorKind::FalseEdge { .. }
|
||||
| TerminatorKind::InlineAsm { .. } => None,
|
||||
TerminatorKind::Call { cleanup: ref mut unwind, .. }
|
||||
| TerminatorKind::Assert { cleanup: ref mut unwind, .. }
|
||||
| TerminatorKind::DropAndReplace { ref mut unwind, .. }
|
||||
| TerminatorKind::Drop { ref mut unwind, .. }
|
||||
| TerminatorKind::FalseUnwind { ref mut unwind, .. } => Some(unwind),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx> BasicBlockData<'tcx> {
|
||||
pub fn new(terminator: Option<Terminator<'tcx>>) -> BasicBlockData<'tcx> {
|
||||
BasicBlockData { statements: vec![], terminator, is_cleanup: false }
|
||||
@ -1628,169 +1301,6 @@ impl<O: fmt::Debug> fmt::Debug for AssertKind<O> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx> Debug for TerminatorKind<'tcx> {
|
||||
fn fmt(&self, fmt: &mut Formatter<'_>) -> fmt::Result {
|
||||
self.fmt_head(fmt)?;
|
||||
let successor_count = self.successors().count();
|
||||
let labels = self.fmt_successor_labels();
|
||||
assert_eq!(successor_count, labels.len());
|
||||
|
||||
match successor_count {
|
||||
0 => Ok(()),
|
||||
|
||||
1 => write!(fmt, " -> {:?}", self.successors().next().unwrap()),
|
||||
|
||||
_ => {
|
||||
write!(fmt, " -> [")?;
|
||||
for (i, target) in self.successors().enumerate() {
|
||||
if i > 0 {
|
||||
write!(fmt, ", ")?;
|
||||
}
|
||||
write!(fmt, "{}: {:?}", labels[i], target)?;
|
||||
}
|
||||
write!(fmt, "]")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx> TerminatorKind<'tcx> {
|
||||
/// Writes the "head" part of the terminator; that is, its name and the data it uses to pick the
|
||||
/// successor basic block, if any. The only information not included is the list of possible
|
||||
/// successors, which may be rendered differently between the text and the graphviz format.
|
||||
pub fn fmt_head<W: Write>(&self, fmt: &mut W) -> fmt::Result {
|
||||
use self::TerminatorKind::*;
|
||||
match self {
|
||||
Goto { .. } => write!(fmt, "goto"),
|
||||
SwitchInt { discr, .. } => write!(fmt, "switchInt({:?})", discr),
|
||||
Return => write!(fmt, "return"),
|
||||
GeneratorDrop => write!(fmt, "generator_drop"),
|
||||
Resume => write!(fmt, "resume"),
|
||||
Abort => write!(fmt, "abort"),
|
||||
Yield { value, resume_arg, .. } => write!(fmt, "{:?} = yield({:?})", resume_arg, value),
|
||||
Unreachable => write!(fmt, "unreachable"),
|
||||
Drop { place, .. } => write!(fmt, "drop({:?})", place),
|
||||
DropAndReplace { place, value, .. } => {
|
||||
write!(fmt, "replace({:?} <- {:?})", place, value)
|
||||
}
|
||||
Call { func, args, destination, .. } => {
|
||||
if let Some((destination, _)) = destination {
|
||||
write!(fmt, "{:?} = ", destination)?;
|
||||
}
|
||||
write!(fmt, "{:?}(", func)?;
|
||||
for (index, arg) in args.iter().enumerate() {
|
||||
if index > 0 {
|
||||
write!(fmt, ", ")?;
|
||||
}
|
||||
write!(fmt, "{:?}", arg)?;
|
||||
}
|
||||
write!(fmt, ")")
|
||||
}
|
||||
Assert { cond, expected, msg, .. } => {
|
||||
write!(fmt, "assert(")?;
|
||||
if !expected {
|
||||
write!(fmt, "!")?;
|
||||
}
|
||||
write!(fmt, "{:?}, ", cond)?;
|
||||
msg.fmt_assert_args(fmt)?;
|
||||
write!(fmt, ")")
|
||||
}
|
||||
FalseEdge { .. } => write!(fmt, "falseEdge"),
|
||||
FalseUnwind { .. } => write!(fmt, "falseUnwind"),
|
||||
InlineAsm { template, ref operands, options, .. } => {
|
||||
write!(fmt, "asm!(\"{}\"", InlineAsmTemplatePiece::to_string(template))?;
|
||||
for op in operands {
|
||||
write!(fmt, ", ")?;
|
||||
let print_late = |&late| if late { "late" } else { "" };
|
||||
match op {
|
||||
InlineAsmOperand::In { reg, value } => {
|
||||
write!(fmt, "in({}) {:?}", reg, value)?;
|
||||
}
|
||||
InlineAsmOperand::Out { reg, late, place: Some(place) } => {
|
||||
write!(fmt, "{}out({}) {:?}", print_late(late), reg, place)?;
|
||||
}
|
||||
InlineAsmOperand::Out { reg, late, place: None } => {
|
||||
write!(fmt, "{}out({}) _", print_late(late), reg)?;
|
||||
}
|
||||
InlineAsmOperand::InOut {
|
||||
reg,
|
||||
late,
|
||||
in_value,
|
||||
out_place: Some(out_place),
|
||||
} => {
|
||||
write!(
|
||||
fmt,
|
||||
"in{}out({}) {:?} => {:?}",
|
||||
print_late(late),
|
||||
reg,
|
||||
in_value,
|
||||
out_place
|
||||
)?;
|
||||
}
|
||||
InlineAsmOperand::InOut { reg, late, in_value, out_place: None } => {
|
||||
write!(fmt, "in{}out({}) {:?} => _", print_late(late), reg, in_value)?;
|
||||
}
|
||||
InlineAsmOperand::Const { value } => {
|
||||
write!(fmt, "const {:?}", value)?;
|
||||
}
|
||||
InlineAsmOperand::SymFn { value } => {
|
||||
write!(fmt, "sym_fn {:?}", value)?;
|
||||
}
|
||||
InlineAsmOperand::SymStatic { def_id } => {
|
||||
write!(fmt, "sym_static {:?}", def_id)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
write!(fmt, ", options({:?}))", options)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the list of labels for the edges to the successor basic blocks.
|
||||
pub fn fmt_successor_labels(&self) -> Vec<Cow<'static, str>> {
|
||||
use self::TerminatorKind::*;
|
||||
match *self {
|
||||
Return | Resume | Abort | Unreachable | GeneratorDrop => vec![],
|
||||
Goto { .. } => vec!["".into()],
|
||||
SwitchInt { ref values, switch_ty, .. } => ty::tls::with(|tcx| {
|
||||
let param_env = ty::ParamEnv::empty();
|
||||
let switch_ty = tcx.lift(&switch_ty).unwrap();
|
||||
let size = tcx.layout_of(param_env.and(switch_ty)).unwrap().size;
|
||||
values
|
||||
.iter()
|
||||
.map(|&u| {
|
||||
ty::Const::from_scalar(tcx, Scalar::from_uint(u, size), switch_ty)
|
||||
.to_string()
|
||||
.into()
|
||||
})
|
||||
.chain(iter::once("otherwise".into()))
|
||||
.collect()
|
||||
}),
|
||||
Call { destination: Some(_), cleanup: Some(_), .. } => {
|
||||
vec!["return".into(), "unwind".into()]
|
||||
}
|
||||
Call { destination: Some(_), cleanup: None, .. } => vec!["return".into()],
|
||||
Call { destination: None, cleanup: Some(_), .. } => vec!["unwind".into()],
|
||||
Call { destination: None, cleanup: None, .. } => vec![],
|
||||
Yield { drop: Some(_), .. } => vec!["resume".into(), "drop".into()],
|
||||
Yield { drop: None, .. } => vec!["resume".into()],
|
||||
DropAndReplace { unwind: None, .. } | Drop { unwind: None, .. } => {
|
||||
vec!["return".into()]
|
||||
}
|
||||
DropAndReplace { unwind: Some(_), .. } | Drop { unwind: Some(_), .. } => {
|
||||
vec!["return".into(), "unwind".into()]
|
||||
}
|
||||
Assert { cleanup: None, .. } => vec!["".into()],
|
||||
Assert { .. } => vec!["success".into(), "unwind".into()],
|
||||
FalseEdge { .. } => vec!["real".into(), "imaginary".into()],
|
||||
FalseUnwind { unwind: Some(_), .. } => vec!["real".into(), "cleanup".into()],
|
||||
FalseUnwind { unwind: None, .. } => vec!["real".into()],
|
||||
InlineAsm { destination: Some(_), .. } => vec!["".into()],
|
||||
InlineAsm { destination: None, .. } => vec![],
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
///////////////////////////////////////////////////////////////////////////
|
||||
// Statements
|
||||
|
||||
|
507
src/librustc_middle/mir/terminator/mod.rs
Normal file
507
src/librustc_middle/mir/terminator/mod.rs
Normal file
@ -0,0 +1,507 @@
|
||||
use crate::mir::interpret::Scalar;
|
||||
use crate::ty::{self, Ty, TyCtxt};
|
||||
use rustc_ast::ast::{InlineAsmOptions, InlineAsmTemplatePiece};
|
||||
|
||||
use super::{
|
||||
AssertMessage, BasicBlock, InlineAsmOperand, Operand, Place, SourceInfo, Successors,
|
||||
SuccessorsMut,
|
||||
};
|
||||
pub use rustc_ast::ast::Mutability;
|
||||
use rustc_macros::HashStable;
|
||||
use rustc_span::Span;
|
||||
use std::borrow::Cow;
|
||||
use std::fmt::{self, Debug, Formatter, Write};
|
||||
use std::iter;
|
||||
use std::slice;
|
||||
|
||||
pub use super::query::*;
|
||||
|
||||
#[derive(Clone, RustcEncodable, RustcDecodable, HashStable, PartialEq)]
|
||||
pub enum TerminatorKind<'tcx> {
|
||||
/// Block should have one successor in the graph; we jump there.
|
||||
Goto { target: BasicBlock },
|
||||
|
||||
/// Operand evaluates to an integer; jump depending on its value
|
||||
/// to one of the targets, and otherwise fallback to `otherwise`.
|
||||
SwitchInt {
|
||||
/// The discriminant value being tested.
|
||||
discr: Operand<'tcx>,
|
||||
|
||||
/// The type of value being tested.
|
||||
/// This is always the same as the type of `discr`.
|
||||
/// FIXME: remove this redundant information. Currently, it is relied on by pretty-printing.
|
||||
switch_ty: Ty<'tcx>,
|
||||
|
||||
/// Possible values. The locations to branch to in each case
|
||||
/// are found in the corresponding indices from the `targets` vector.
|
||||
values: Cow<'tcx, [u128]>,
|
||||
|
||||
/// Possible branch sites. The last element of this vector is used
|
||||
/// for the otherwise branch, so targets.len() == values.len() + 1
|
||||
/// should hold.
|
||||
//
|
||||
// This invariant is quite non-obvious and also could be improved.
|
||||
// One way to make this invariant is to have something like this instead:
|
||||
//
|
||||
// branches: Vec<(ConstInt, BasicBlock)>,
|
||||
// otherwise: Option<BasicBlock> // exhaustive if None
|
||||
//
|
||||
// However we’ve decided to keep this as-is until we figure a case
|
||||
// where some other approach seems to be strictly better than other.
|
||||
targets: Vec<BasicBlock>,
|
||||
},
|
||||
|
||||
/// Indicates that the landing pad is finished and unwinding should
|
||||
/// continue. Emitted by `build::scope::diverge_cleanup`.
|
||||
Resume,
|
||||
|
||||
/// Indicates that the landing pad is finished and that the process
|
||||
/// should abort. Used to prevent unwinding for foreign items.
|
||||
Abort,
|
||||
|
||||
/// Indicates a normal return. The return place should have
|
||||
/// been filled in before this executes. This can occur multiple times
|
||||
/// in different basic blocks.
|
||||
Return,
|
||||
|
||||
/// Indicates a terminator that can never be reached.
|
||||
Unreachable,
|
||||
|
||||
/// Drop the `Place`.
|
||||
Drop { place: Place<'tcx>, target: BasicBlock, unwind: Option<BasicBlock> },
|
||||
|
||||
/// Drop the `Place` and assign the new value over it. This ensures
|
||||
/// that the assignment to `P` occurs *even if* the destructor for
|
||||
/// place unwinds. Its semantics are best explained by the
|
||||
/// elaboration:
|
||||
///
|
||||
/// ```
|
||||
/// BB0 {
|
||||
/// DropAndReplace(P <- V, goto BB1, unwind BB2)
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// becomes
|
||||
///
|
||||
/// ```
|
||||
/// BB0 {
|
||||
/// Drop(P, goto BB1, unwind BB2)
|
||||
/// }
|
||||
/// BB1 {
|
||||
/// // P is now uninitialized
|
||||
/// P <- V
|
||||
/// }
|
||||
/// BB2 {
|
||||
/// // P is now uninitialized -- its dtor panicked
|
||||
/// P <- V
|
||||
/// }
|
||||
/// ```
|
||||
DropAndReplace {
|
||||
place: Place<'tcx>,
|
||||
value: Operand<'tcx>,
|
||||
target: BasicBlock,
|
||||
unwind: Option<BasicBlock>,
|
||||
},
|
||||
|
||||
/// Block ends with a call of a converging function.
|
||||
Call {
|
||||
/// The function that’s being called.
|
||||
func: Operand<'tcx>,
|
||||
/// Arguments the function is called with.
|
||||
/// These are owned by the callee, which is free to modify them.
|
||||
/// This allows the memory occupied by "by-value" arguments to be
|
||||
/// reused across function calls without duplicating the contents.
|
||||
args: Vec<Operand<'tcx>>,
|
||||
/// Destination for the return value. If some, the call is converging.
|
||||
destination: Option<(Place<'tcx>, BasicBlock)>,
|
||||
/// Cleanups to be done if the call unwinds.
|
||||
cleanup: Option<BasicBlock>,
|
||||
/// `true` if this is from a call in HIR rather than from an overloaded
|
||||
/// operator. True for overloaded function call.
|
||||
from_hir_call: bool,
|
||||
/// This `Span` is the span of the function, without the dot and receiver
|
||||
/// (e.g. `foo(a, b)` in `x.foo(a, b)`
|
||||
fn_span: Span,
|
||||
},
|
||||
|
||||
/// Jump to the target if the condition has the expected value,
|
||||
/// otherwise panic with a message and a cleanup target.
|
||||
Assert {
|
||||
cond: Operand<'tcx>,
|
||||
expected: bool,
|
||||
msg: AssertMessage<'tcx>,
|
||||
target: BasicBlock,
|
||||
cleanup: Option<BasicBlock>,
|
||||
},
|
||||
|
||||
/// A suspend point.
|
||||
Yield {
|
||||
/// The value to return.
|
||||
value: Operand<'tcx>,
|
||||
/// Where to resume to.
|
||||
resume: BasicBlock,
|
||||
/// The place to store the resume argument in.
|
||||
resume_arg: Place<'tcx>,
|
||||
/// Cleanup to be done if the generator is dropped at this suspend point.
|
||||
drop: Option<BasicBlock>,
|
||||
},
|
||||
|
||||
/// Indicates the end of the dropping of a generator.
|
||||
GeneratorDrop,
|
||||
|
||||
/// A block where control flow only ever takes one real path, but borrowck
|
||||
/// needs to be more conservative.
|
||||
FalseEdge {
|
||||
/// The target normal control flow will take.
|
||||
real_target: BasicBlock,
|
||||
/// A block control flow could conceptually jump to, but won't in
|
||||
/// practice.
|
||||
imaginary_target: BasicBlock,
|
||||
},
|
||||
/// A terminator for blocks that only take one path in reality, but where we
|
||||
/// reserve the right to unwind in borrowck, even if it won't happen in practice.
|
||||
/// This can arise in infinite loops with no function calls for example.
|
||||
FalseUnwind {
|
||||
/// The target normal control flow will take.
|
||||
real_target: BasicBlock,
|
||||
/// The imaginary cleanup block link. This particular path will never be taken
|
||||
/// in practice, but in order to avoid fragility we want to always
|
||||
/// consider it in borrowck. We don't want to accept programs which
|
||||
/// pass borrowck only when `panic=abort` or some assertions are disabled
|
||||
/// due to release vs. debug mode builds. This needs to be an `Option` because
|
||||
/// of the `remove_noop_landing_pads` and `no_landing_pads` passes.
|
||||
unwind: Option<BasicBlock>,
|
||||
},
|
||||
|
||||
/// Block ends with an inline assembly block. This is a terminator since
|
||||
/// inline assembly is allowed to diverge.
|
||||
InlineAsm {
|
||||
/// The template for the inline assembly, with placeholders.
|
||||
template: &'tcx [InlineAsmTemplatePiece],
|
||||
|
||||
/// The operands for the inline assembly, as `Operand`s or `Place`s.
|
||||
operands: Vec<InlineAsmOperand<'tcx>>,
|
||||
|
||||
/// Miscellaneous options for the inline assembly.
|
||||
options: InlineAsmOptions,
|
||||
|
||||
/// Source spans for each line of the inline assembly code. These are
|
||||
/// used to map assembler errors back to the line in the source code.
|
||||
line_spans: &'tcx [Span],
|
||||
|
||||
/// Destination block after the inline assembly returns, unless it is
|
||||
/// diverging (InlineAsmOptions::NORETURN).
|
||||
destination: Option<BasicBlock>,
|
||||
},
|
||||
}
|
||||
#[derive(Clone, Debug, RustcEncodable, RustcDecodable, HashStable)]
|
||||
pub struct Terminator<'tcx> {
|
||||
pub source_info: SourceInfo,
|
||||
pub kind: TerminatorKind<'tcx>,
|
||||
}
|
||||
|
||||
impl<'tcx> Terminator<'tcx> {
|
||||
pub fn successors(&self) -> Successors<'_> {
|
||||
self.kind.successors()
|
||||
}
|
||||
|
||||
pub fn successors_mut(&mut self) -> SuccessorsMut<'_> {
|
||||
self.kind.successors_mut()
|
||||
}
|
||||
|
||||
pub fn unwind(&self) -> Option<&Option<BasicBlock>> {
|
||||
self.kind.unwind()
|
||||
}
|
||||
|
||||
pub fn unwind_mut(&mut self) -> Option<&mut Option<BasicBlock>> {
|
||||
self.kind.unwind_mut()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx> TerminatorKind<'tcx> {
|
||||
pub fn if_(
|
||||
tcx: TyCtxt<'tcx>,
|
||||
cond: Operand<'tcx>,
|
||||
t: BasicBlock,
|
||||
f: BasicBlock,
|
||||
) -> TerminatorKind<'tcx> {
|
||||
static BOOL_SWITCH_FALSE: &[u128] = &[0];
|
||||
TerminatorKind::SwitchInt {
|
||||
discr: cond,
|
||||
switch_ty: tcx.types.bool,
|
||||
values: From::from(BOOL_SWITCH_FALSE),
|
||||
targets: vec![f, t],
|
||||
}
|
||||
}
|
||||
|
||||
pub fn successors(&self) -> Successors<'_> {
|
||||
use self::TerminatorKind::*;
|
||||
match *self {
|
||||
Resume
|
||||
| Abort
|
||||
| GeneratorDrop
|
||||
| Return
|
||||
| Unreachable
|
||||
| Call { destination: None, cleanup: None, .. }
|
||||
| InlineAsm { destination: None, .. } => None.into_iter().chain(&[]),
|
||||
Goto { target: ref t }
|
||||
| Call { destination: None, cleanup: Some(ref t), .. }
|
||||
| Call { destination: Some((_, ref t)), cleanup: None, .. }
|
||||
| Yield { resume: ref t, drop: None, .. }
|
||||
| DropAndReplace { target: ref t, unwind: None, .. }
|
||||
| Drop { target: ref t, unwind: None, .. }
|
||||
| Assert { target: ref t, cleanup: None, .. }
|
||||
| FalseUnwind { real_target: ref t, unwind: None }
|
||||
| InlineAsm { destination: Some(ref t), .. } => Some(t).into_iter().chain(&[]),
|
||||
Call { destination: Some((_, ref t)), cleanup: Some(ref u), .. }
|
||||
| Yield { resume: ref t, drop: Some(ref u), .. }
|
||||
| DropAndReplace { target: ref t, unwind: Some(ref u), .. }
|
||||
| Drop { target: ref t, unwind: Some(ref u), .. }
|
||||
| Assert { target: ref t, cleanup: Some(ref u), .. }
|
||||
| FalseUnwind { real_target: ref t, unwind: Some(ref u) } => {
|
||||
Some(t).into_iter().chain(slice::from_ref(u))
|
||||
}
|
||||
SwitchInt { ref targets, .. } => None.into_iter().chain(&targets[..]),
|
||||
FalseEdge { ref real_target, ref imaginary_target } => {
|
||||
Some(real_target).into_iter().chain(slice::from_ref(imaginary_target))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn successors_mut(&mut self) -> SuccessorsMut<'_> {
|
||||
use self::TerminatorKind::*;
|
||||
match *self {
|
||||
Resume
|
||||
| Abort
|
||||
| GeneratorDrop
|
||||
| Return
|
||||
| Unreachable
|
||||
| Call { destination: None, cleanup: None, .. }
|
||||
| InlineAsm { destination: None, .. } => None.into_iter().chain(&mut []),
|
||||
Goto { target: ref mut t }
|
||||
| Call { destination: None, cleanup: Some(ref mut t), .. }
|
||||
| Call { destination: Some((_, ref mut t)), cleanup: None, .. }
|
||||
| Yield { resume: ref mut t, drop: None, .. }
|
||||
| DropAndReplace { target: ref mut t, unwind: None, .. }
|
||||
| Drop { target: ref mut t, unwind: None, .. }
|
||||
| Assert { target: ref mut t, cleanup: None, .. }
|
||||
| FalseUnwind { real_target: ref mut t, unwind: None }
|
||||
| InlineAsm { destination: Some(ref mut t), .. } => Some(t).into_iter().chain(&mut []),
|
||||
Call { destination: Some((_, ref mut t)), cleanup: Some(ref mut u), .. }
|
||||
| Yield { resume: ref mut t, drop: Some(ref mut u), .. }
|
||||
| DropAndReplace { target: ref mut t, unwind: Some(ref mut u), .. }
|
||||
| Drop { target: ref mut t, unwind: Some(ref mut u), .. }
|
||||
| Assert { target: ref mut t, cleanup: Some(ref mut u), .. }
|
||||
| FalseUnwind { real_target: ref mut t, unwind: Some(ref mut u) } => {
|
||||
Some(t).into_iter().chain(slice::from_mut(u))
|
||||
}
|
||||
SwitchInt { ref mut targets, .. } => None.into_iter().chain(&mut targets[..]),
|
||||
FalseEdge { ref mut real_target, ref mut imaginary_target } => {
|
||||
Some(real_target).into_iter().chain(slice::from_mut(imaginary_target))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn unwind(&self) -> Option<&Option<BasicBlock>> {
|
||||
match *self {
|
||||
TerminatorKind::Goto { .. }
|
||||
| TerminatorKind::Resume
|
||||
| TerminatorKind::Abort
|
||||
| TerminatorKind::Return
|
||||
| TerminatorKind::Unreachable
|
||||
| TerminatorKind::GeneratorDrop
|
||||
| TerminatorKind::Yield { .. }
|
||||
| TerminatorKind::SwitchInt { .. }
|
||||
| TerminatorKind::FalseEdge { .. }
|
||||
| TerminatorKind::InlineAsm { .. } => None,
|
||||
TerminatorKind::Call { cleanup: ref unwind, .. }
|
||||
| TerminatorKind::Assert { cleanup: ref unwind, .. }
|
||||
| TerminatorKind::DropAndReplace { ref unwind, .. }
|
||||
| TerminatorKind::Drop { ref unwind, .. }
|
||||
| TerminatorKind::FalseUnwind { ref unwind, .. } => Some(unwind),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn unwind_mut(&mut self) -> Option<&mut Option<BasicBlock>> {
|
||||
match *self {
|
||||
TerminatorKind::Goto { .. }
|
||||
| TerminatorKind::Resume
|
||||
| TerminatorKind::Abort
|
||||
| TerminatorKind::Return
|
||||
| TerminatorKind::Unreachable
|
||||
| TerminatorKind::GeneratorDrop
|
||||
| TerminatorKind::Yield { .. }
|
||||
| TerminatorKind::SwitchInt { .. }
|
||||
| TerminatorKind::FalseEdge { .. }
|
||||
| TerminatorKind::InlineAsm { .. } => None,
|
||||
TerminatorKind::Call { cleanup: ref mut unwind, .. }
|
||||
| TerminatorKind::Assert { cleanup: ref mut unwind, .. }
|
||||
| TerminatorKind::DropAndReplace { ref mut unwind, .. }
|
||||
| TerminatorKind::Drop { ref mut unwind, .. }
|
||||
| TerminatorKind::FalseUnwind { ref mut unwind, .. } => Some(unwind),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx> Debug for TerminatorKind<'tcx> {
|
||||
fn fmt(&self, fmt: &mut Formatter<'_>) -> fmt::Result {
|
||||
self.fmt_head(fmt)?;
|
||||
let successor_count = self.successors().count();
|
||||
let labels = self.fmt_successor_labels();
|
||||
assert_eq!(successor_count, labels.len());
|
||||
|
||||
match successor_count {
|
||||
0 => Ok(()),
|
||||
|
||||
1 => write!(fmt, " -> {:?}", self.successors().next().unwrap()),
|
||||
|
||||
_ => {
|
||||
write!(fmt, " -> [")?;
|
||||
for (i, target) in self.successors().enumerate() {
|
||||
if i > 0 {
|
||||
write!(fmt, ", ")?;
|
||||
}
|
||||
write!(fmt, "{}: {:?}", labels[i], target)?;
|
||||
}
|
||||
write!(fmt, "]")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx> TerminatorKind<'tcx> {
|
||||
/// Writes the "head" part of the terminator; that is, its name and the data it uses to pick the
|
||||
/// successor basic block, if any. The only information not included is the list of possible
|
||||
/// successors, which may be rendered differently between the text and the graphviz format.
|
||||
pub fn fmt_head<W: Write>(&self, fmt: &mut W) -> fmt::Result {
|
||||
use self::TerminatorKind::*;
|
||||
match self {
|
||||
Goto { .. } => write!(fmt, "goto"),
|
||||
SwitchInt { discr, .. } => write!(fmt, "switchInt({:?})", discr),
|
||||
Return => write!(fmt, "return"),
|
||||
GeneratorDrop => write!(fmt, "generator_drop"),
|
||||
Resume => write!(fmt, "resume"),
|
||||
Abort => write!(fmt, "abort"),
|
||||
Yield { value, resume_arg, .. } => write!(fmt, "{:?} = yield({:?})", resume_arg, value),
|
||||
Unreachable => write!(fmt, "unreachable"),
|
||||
Drop { place, .. } => write!(fmt, "drop({:?})", place),
|
||||
DropAndReplace { place, value, .. } => {
|
||||
write!(fmt, "replace({:?} <- {:?})", place, value)
|
||||
}
|
||||
Call { func, args, destination, .. } => {
|
||||
if let Some((destination, _)) = destination {
|
||||
write!(fmt, "{:?} = ", destination)?;
|
||||
}
|
||||
write!(fmt, "{:?}(", func)?;
|
||||
for (index, arg) in args.iter().enumerate() {
|
||||
if index > 0 {
|
||||
write!(fmt, ", ")?;
|
||||
}
|
||||
write!(fmt, "{:?}", arg)?;
|
||||
}
|
||||
write!(fmt, ")")
|
||||
}
|
||||
Assert { cond, expected, msg, .. } => {
|
||||
write!(fmt, "assert(")?;
|
||||
if !expected {
|
||||
write!(fmt, "!")?;
|
||||
}
|
||||
write!(fmt, "{:?}, ", cond)?;
|
||||
msg.fmt_assert_args(fmt)?;
|
||||
write!(fmt, ")")
|
||||
}
|
||||
FalseEdge { .. } => write!(fmt, "falseEdge"),
|
||||
FalseUnwind { .. } => write!(fmt, "falseUnwind"),
|
||||
InlineAsm { template, ref operands, options, .. } => {
|
||||
write!(fmt, "asm!(\"{}\"", InlineAsmTemplatePiece::to_string(template))?;
|
||||
for op in operands {
|
||||
write!(fmt, ", ")?;
|
||||
let print_late = |&late| if late { "late" } else { "" };
|
||||
match op {
|
||||
InlineAsmOperand::In { reg, value } => {
|
||||
write!(fmt, "in({}) {:?}", reg, value)?;
|
||||
}
|
||||
InlineAsmOperand::Out { reg, late, place: Some(place) } => {
|
||||
write!(fmt, "{}out({}) {:?}", print_late(late), reg, place)?;
|
||||
}
|
||||
InlineAsmOperand::Out { reg, late, place: None } => {
|
||||
write!(fmt, "{}out({}) _", print_late(late), reg)?;
|
||||
}
|
||||
InlineAsmOperand::InOut {
|
||||
reg,
|
||||
late,
|
||||
in_value,
|
||||
out_place: Some(out_place),
|
||||
} => {
|
||||
write!(
|
||||
fmt,
|
||||
"in{}out({}) {:?} => {:?}",
|
||||
print_late(late),
|
||||
reg,
|
||||
in_value,
|
||||
out_place
|
||||
)?;
|
||||
}
|
||||
InlineAsmOperand::InOut { reg, late, in_value, out_place: None } => {
|
||||
write!(fmt, "in{}out({}) {:?} => _", print_late(late), reg, in_value)?;
|
||||
}
|
||||
InlineAsmOperand::Const { value } => {
|
||||
write!(fmt, "const {:?}", value)?;
|
||||
}
|
||||
InlineAsmOperand::SymFn { value } => {
|
||||
write!(fmt, "sym_fn {:?}", value)?;
|
||||
}
|
||||
InlineAsmOperand::SymStatic { def_id } => {
|
||||
write!(fmt, "sym_static {:?}", def_id)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
write!(fmt, ", options({:?}))", options)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the list of labels for the edges to the successor basic blocks.
|
||||
pub fn fmt_successor_labels(&self) -> Vec<Cow<'static, str>> {
|
||||
use self::TerminatorKind::*;
|
||||
match *self {
|
||||
Return | Resume | Abort | Unreachable | GeneratorDrop => vec![],
|
||||
Goto { .. } => vec!["".into()],
|
||||
SwitchInt { ref values, switch_ty, .. } => ty::tls::with(|tcx| {
|
||||
let param_env = ty::ParamEnv::empty();
|
||||
let switch_ty = tcx.lift(&switch_ty).unwrap();
|
||||
let size = tcx.layout_of(param_env.and(switch_ty)).unwrap().size;
|
||||
values
|
||||
.iter()
|
||||
.map(|&u| {
|
||||
ty::Const::from_scalar(tcx, Scalar::from_uint(u, size), switch_ty)
|
||||
.to_string()
|
||||
.into()
|
||||
})
|
||||
.chain(iter::once("otherwise".into()))
|
||||
.collect()
|
||||
}),
|
||||
Call { destination: Some(_), cleanup: Some(_), .. } => {
|
||||
vec!["return".into(), "unwind".into()]
|
||||
}
|
||||
Call { destination: Some(_), cleanup: None, .. } => vec!["return".into()],
|
||||
Call { destination: None, cleanup: Some(_), .. } => vec!["unwind".into()],
|
||||
Call { destination: None, cleanup: None, .. } => vec![],
|
||||
Yield { drop: Some(_), .. } => vec!["resume".into(), "drop".into()],
|
||||
Yield { drop: None, .. } => vec!["resume".into()],
|
||||
DropAndReplace { unwind: None, .. } | Drop { unwind: None, .. } => {
|
||||
vec!["return".into()]
|
||||
}
|
||||
DropAndReplace { unwind: Some(_), .. } | Drop { unwind: Some(_), .. } => {
|
||||
vec!["return".into(), "unwind".into()]
|
||||
}
|
||||
Assert { cleanup: None, .. } => vec!["".into()],
|
||||
Assert { .. } => vec!["success".into(), "unwind".into()],
|
||||
FalseEdge { .. } => vec!["real".into(), "imaginary".into()],
|
||||
FalseUnwind { unwind: Some(_), .. } => vec!["real".into(), "cleanup".into()],
|
||||
FalseUnwind { unwind: None, .. } => vec!["real".into()],
|
||||
InlineAsm { destination: Some(_), .. } => vec!["".into()],
|
||||
InlineAsm { destination: None, .. } => vec![],
|
||||
}
|
||||
}
|
||||
}
|
@ -46,8 +46,8 @@ impl TypeRelation<'tcx> for Match<'tcx> {
|
||||
fn relate_with_variance<T: Relate<'tcx>>(
|
||||
&mut self,
|
||||
_: ty::Variance,
|
||||
a: &T,
|
||||
b: &T,
|
||||
a: T,
|
||||
b: T,
|
||||
) -> RelateResult<'tcx, T> {
|
||||
self.relate(a, b)
|
||||
}
|
||||
@ -112,8 +112,8 @@ impl TypeRelation<'tcx> for Match<'tcx> {
|
||||
|
||||
fn binders<T>(
|
||||
&mut self,
|
||||
a: &ty::Binder<T>,
|
||||
b: &ty::Binder<T>,
|
||||
a: ty::Binder<T>,
|
||||
b: ty::Binder<T>,
|
||||
) -> RelateResult<'tcx, ty::Binder<T>>
|
||||
where
|
||||
T: Relate<'tcx>,
|
||||
|
@ -57,7 +57,6 @@ pub enum TypeError<'tcx> {
|
||||
/// type).
|
||||
CyclicTy(Ty<'tcx>),
|
||||
ProjectionMismatched(ExpectedFound<DefId>),
|
||||
ProjectionBoundsLength(ExpectedFound<usize>),
|
||||
ExistentialMismatch(ExpectedFound<&'tcx ty::List<ty::ExistentialPredicate<'tcx>>>),
|
||||
ObjectUnsafeCoercion(DefId),
|
||||
ConstMismatch(ExpectedFound<&'tcx ty::Const<'tcx>>),
|
||||
@ -174,13 +173,6 @@ impl<'tcx> fmt::Display for TypeError<'tcx> {
|
||||
tcx.def_path_str(values.found)
|
||||
)
|
||||
}),
|
||||
ProjectionBoundsLength(ref values) => write!(
|
||||
f,
|
||||
"expected {} associated type binding{}, found {}",
|
||||
values.expected,
|
||||
pluralize!(values.expected),
|
||||
values.found
|
||||
),
|
||||
ExistentialMismatch(ref values) => report_maybe_different(
|
||||
f,
|
||||
&format!("trait `{}`", values.expected),
|
||||
@ -216,7 +208,6 @@ impl<'tcx> TypeError<'tcx> {
|
||||
| RegionsPlaceholderMismatch
|
||||
| Traits(_)
|
||||
| ProjectionMismatched(_)
|
||||
| ProjectionBoundsLength(_)
|
||||
| ExistentialMismatch(_)
|
||||
| ConstMismatch(_)
|
||||
| IntrinsicCast
|
||||
|
@ -88,13 +88,13 @@ impl FlagComputation {
|
||||
self.add_substs(substs);
|
||||
}
|
||||
|
||||
&ty::GeneratorWitness(ref ts) => {
|
||||
&ty::GeneratorWitness(ts) => {
|
||||
let mut computation = FlagComputation::new();
|
||||
computation.add_tys(&ts.skip_binder()[..]);
|
||||
computation.add_tys(ts.skip_binder());
|
||||
self.add_bound_computation(computation);
|
||||
}
|
||||
|
||||
&ty::Closure(_, ref substs) => {
|
||||
&ty::Closure(_, substs) => {
|
||||
self.add_substs(substs);
|
||||
}
|
||||
|
||||
@ -122,7 +122,7 @@ impl FlagComputation {
|
||||
self.add_substs(substs);
|
||||
}
|
||||
|
||||
&ty::Projection(ref data) => {
|
||||
&ty::Projection(data) => {
|
||||
self.add_flags(TypeFlags::HAS_TY_PROJECTION);
|
||||
self.add_projection_ty(data);
|
||||
}
|
||||
@ -211,7 +211,7 @@ impl FlagComputation {
|
||||
|
||||
self.add_bound_computation(computation);
|
||||
}
|
||||
ty::PredicateKind::Projection(projection) => {
|
||||
&ty::PredicateKind::Projection(projection) => {
|
||||
let mut computation = FlagComputation::new();
|
||||
let ty::ProjectionPredicate { projection_ty, ty } = projection.skip_binder();
|
||||
computation.add_projection_ty(projection_ty);
|
||||
@ -298,7 +298,7 @@ impl FlagComputation {
|
||||
self.add_ty(projection.ty);
|
||||
}
|
||||
|
||||
fn add_projection_ty(&mut self, projection_ty: &ty::ProjectionTy<'_>) {
|
||||
fn add_projection_ty(&mut self, projection_ty: ty::ProjectionTy<'_>) {
|
||||
self.add_substs(projection_ty.substs);
|
||||
}
|
||||
|
||||
|
@ -336,7 +336,7 @@ impl<'tcx> TyCtxt<'tcx> {
|
||||
{
|
||||
fn visit_binder<T: TypeFoldable<'tcx>>(&mut self, t: &Binder<T>) -> bool {
|
||||
self.outer_index.shift_in(1);
|
||||
let result = t.skip_binder().visit_with(self);
|
||||
let result = t.as_ref().skip_binder().visit_with(self);
|
||||
self.outer_index.shift_out(1);
|
||||
result
|
||||
}
|
||||
@ -558,7 +558,7 @@ impl<'tcx> TyCtxt<'tcx> {
|
||||
let fld_c = |bound_ct, ty| {
|
||||
self.mk_const(ty::Const { val: ty::ConstKind::Bound(ty::INNERMOST, bound_ct), ty })
|
||||
};
|
||||
self.replace_escaping_bound_vars(value.skip_binder(), fld_r, fld_t, fld_c)
|
||||
self.replace_escaping_bound_vars(value.as_ref().skip_binder(), fld_r, fld_t, fld_c)
|
||||
}
|
||||
|
||||
/// Replaces all escaping bound vars. The `fld_r` closure replaces escaping
|
||||
@ -617,7 +617,7 @@ impl<'tcx> TyCtxt<'tcx> {
|
||||
H: FnMut(ty::BoundVar, Ty<'tcx>) -> &'tcx ty::Const<'tcx>,
|
||||
T: TypeFoldable<'tcx>,
|
||||
{
|
||||
self.replace_escaping_bound_vars(value.skip_binder(), fld_r, fld_t, fld_c)
|
||||
self.replace_escaping_bound_vars(value.as_ref().skip_binder(), fld_r, fld_t, fld_c)
|
||||
}
|
||||
|
||||
/// Replaces any late-bound regions bound in `value` with
|
||||
@ -673,7 +673,7 @@ impl<'tcx> TyCtxt<'tcx> {
|
||||
T: TypeFoldable<'tcx>,
|
||||
{
|
||||
let mut collector = LateBoundRegionsCollector::new(just_constraint);
|
||||
let result = value.skip_binder().visit_with(&mut collector);
|
||||
let result = value.as_ref().skip_binder().visit_with(&mut collector);
|
||||
assert!(!result); // should never have stopped early
|
||||
collector.regions
|
||||
}
|
||||
|
@ -2303,7 +2303,7 @@ impl<'tcx> ty::Instance<'tcx> {
|
||||
|
||||
let env_ty = tcx.closure_env_ty(def_id, substs).unwrap();
|
||||
sig.map_bound(|sig| tcx.mk_fn_sig(
|
||||
iter::once(*env_ty.skip_binder()).chain(sig.inputs().iter().cloned()),
|
||||
iter::once(env_ty.skip_binder()).chain(sig.inputs().iter().cloned()),
|
||||
sig.output(),
|
||||
sig.c_variadic,
|
||||
sig.unsafety,
|
||||
|
@ -189,7 +189,7 @@ pub trait PrettyPrinter<'tcx>:
|
||||
where
|
||||
T: Print<'tcx, Self, Output = Self, Error = Self::Error> + TypeFoldable<'tcx>,
|
||||
{
|
||||
value.skip_binder().print(self)
|
||||
value.as_ref().skip_binder().print(self)
|
||||
}
|
||||
|
||||
/// Prints comma-separated elements.
|
||||
|
@ -13,7 +13,6 @@ use rustc_hir::def_id::DefId;
|
||||
use rustc_span::DUMMY_SP;
|
||||
use rustc_target::spec::abi;
|
||||
use std::iter;
|
||||
use std::rc::Rc;
|
||||
|
||||
pub type RelateResult<'tcx, T> = Result<T, TypeError<'tcx>>;
|
||||
|
||||
@ -42,7 +41,7 @@ pub trait TypeRelation<'tcx>: Sized {
|
||||
}
|
||||
|
||||
/// Generic relation routine suitable for most anything.
|
||||
fn relate<T: Relate<'tcx>>(&mut self, a: &T, b: &T) -> RelateResult<'tcx, T> {
|
||||
fn relate<T: Relate<'tcx>>(&mut self, a: T, b: T) -> RelateResult<'tcx, T> {
|
||||
Relate::relate(self, a, b)
|
||||
}
|
||||
|
||||
@ -68,8 +67,8 @@ pub trait TypeRelation<'tcx>: Sized {
|
||||
fn relate_with_variance<T: Relate<'tcx>>(
|
||||
&mut self,
|
||||
variance: ty::Variance,
|
||||
a: &T,
|
||||
b: &T,
|
||||
a: T,
|
||||
b: T,
|
||||
) -> RelateResult<'tcx, T>;
|
||||
|
||||
// Overridable relations. You shouldn't typically call these
|
||||
@ -94,18 +93,18 @@ pub trait TypeRelation<'tcx>: Sized {
|
||||
|
||||
fn binders<T>(
|
||||
&mut self,
|
||||
a: &ty::Binder<T>,
|
||||
b: &ty::Binder<T>,
|
||||
a: ty::Binder<T>,
|
||||
b: ty::Binder<T>,
|
||||
) -> RelateResult<'tcx, ty::Binder<T>>
|
||||
where
|
||||
T: Relate<'tcx>;
|
||||
}
|
||||
|
||||
pub trait Relate<'tcx>: TypeFoldable<'tcx> {
|
||||
pub trait Relate<'tcx>: TypeFoldable<'tcx> + Copy {
|
||||
fn relate<R: TypeRelation<'tcx>>(
|
||||
relation: &mut R,
|
||||
a: &Self,
|
||||
b: &Self,
|
||||
a: Self,
|
||||
b: Self,
|
||||
) -> RelateResult<'tcx, Self>;
|
||||
}
|
||||
|
||||
@ -115,8 +114,8 @@ pub trait Relate<'tcx>: TypeFoldable<'tcx> {
|
||||
impl<'tcx> Relate<'tcx> for ty::TypeAndMut<'tcx> {
|
||||
fn relate<R: TypeRelation<'tcx>>(
|
||||
relation: &mut R,
|
||||
a: &ty::TypeAndMut<'tcx>,
|
||||
b: &ty::TypeAndMut<'tcx>,
|
||||
a: ty::TypeAndMut<'tcx>,
|
||||
b: ty::TypeAndMut<'tcx>,
|
||||
) -> RelateResult<'tcx, ty::TypeAndMut<'tcx>> {
|
||||
debug!("{}.mts({:?}, {:?})", relation.tag(), a, b);
|
||||
if a.mutbl != b.mutbl {
|
||||
@ -127,7 +126,7 @@ impl<'tcx> Relate<'tcx> for ty::TypeAndMut<'tcx> {
|
||||
ast::Mutability::Not => ty::Covariant,
|
||||
ast::Mutability::Mut => ty::Invariant,
|
||||
};
|
||||
let ty = relation.relate_with_variance(variance, &a.ty, &b.ty)?;
|
||||
let ty = relation.relate_with_variance(variance, a.ty, b.ty)?;
|
||||
Ok(ty::TypeAndMut { ty, mutbl })
|
||||
}
|
||||
}
|
||||
@ -143,7 +142,7 @@ pub fn relate_substs<R: TypeRelation<'tcx>>(
|
||||
|
||||
let params = a_subst.iter().zip(b_subst).enumerate().map(|(i, (a, b))| {
|
||||
let variance = variances.map_or(ty::Invariant, |v| v[i]);
|
||||
relation.relate_with_variance(variance, &a, &b)
|
||||
relation.relate_with_variance(variance, a, b)
|
||||
});
|
||||
|
||||
Ok(tcx.mk_substs(params)?)
|
||||
@ -152,8 +151,8 @@ pub fn relate_substs<R: TypeRelation<'tcx>>(
|
||||
impl<'tcx> Relate<'tcx> for ty::FnSig<'tcx> {
|
||||
fn relate<R: TypeRelation<'tcx>>(
|
||||
relation: &mut R,
|
||||
a: &ty::FnSig<'tcx>,
|
||||
b: &ty::FnSig<'tcx>,
|
||||
a: ty::FnSig<'tcx>,
|
||||
b: ty::FnSig<'tcx>,
|
||||
) -> RelateResult<'tcx, ty::FnSig<'tcx>> {
|
||||
let tcx = relation.tcx();
|
||||
|
||||
@ -164,8 +163,8 @@ impl<'tcx> Relate<'tcx> for ty::FnSig<'tcx> {
|
||||
&b.c_variadic,
|
||||
)));
|
||||
}
|
||||
let unsafety = relation.relate(&a.unsafety, &b.unsafety)?;
|
||||
let abi = relation.relate(&a.abi, &b.abi)?;
|
||||
let unsafety = relation.relate(a.unsafety, b.unsafety)?;
|
||||
let abi = relation.relate(a.abi, b.abi)?;
|
||||
|
||||
if a.inputs().len() != b.inputs().len() {
|
||||
return Err(TypeError::ArgCount);
|
||||
@ -180,9 +179,9 @@ impl<'tcx> Relate<'tcx> for ty::FnSig<'tcx> {
|
||||
.chain(iter::once(((a.output(), b.output()), true)))
|
||||
.map(|((a, b), is_output)| {
|
||||
if is_output {
|
||||
relation.relate(&a, &b)
|
||||
relation.relate(a, b)
|
||||
} else {
|
||||
relation.relate_with_variance(ty::Contravariant, &a, &b)
|
||||
relation.relate_with_variance(ty::Contravariant, a, b)
|
||||
}
|
||||
});
|
||||
Ok(ty::FnSig {
|
||||
@ -197,13 +196,13 @@ impl<'tcx> Relate<'tcx> for ty::FnSig<'tcx> {
|
||||
impl<'tcx> Relate<'tcx> for ast::Unsafety {
|
||||
fn relate<R: TypeRelation<'tcx>>(
|
||||
relation: &mut R,
|
||||
a: &ast::Unsafety,
|
||||
b: &ast::Unsafety,
|
||||
a: ast::Unsafety,
|
||||
b: ast::Unsafety,
|
||||
) -> RelateResult<'tcx, ast::Unsafety> {
|
||||
if a != b {
|
||||
Err(TypeError::UnsafetyMismatch(expected_found(relation, a, b)))
|
||||
Err(TypeError::UnsafetyMismatch(expected_found(relation, &a, &b)))
|
||||
} else {
|
||||
Ok(*a)
|
||||
Ok(a)
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -211,18 +210,18 @@ impl<'tcx> Relate<'tcx> for ast::Unsafety {
|
||||
impl<'tcx> Relate<'tcx> for abi::Abi {
|
||||
fn relate<R: TypeRelation<'tcx>>(
|
||||
relation: &mut R,
|
||||
a: &abi::Abi,
|
||||
b: &abi::Abi,
|
||||
a: abi::Abi,
|
||||
b: abi::Abi,
|
||||
) -> RelateResult<'tcx, abi::Abi> {
|
||||
if a == b { Ok(*a) } else { Err(TypeError::AbiMismatch(expected_found(relation, a, b))) }
|
||||
if a == b { Ok(a) } else { Err(TypeError::AbiMismatch(expected_found(relation, &a, &b))) }
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx> Relate<'tcx> for ty::ProjectionTy<'tcx> {
|
||||
fn relate<R: TypeRelation<'tcx>>(
|
||||
relation: &mut R,
|
||||
a: &ty::ProjectionTy<'tcx>,
|
||||
b: &ty::ProjectionTy<'tcx>,
|
||||
a: ty::ProjectionTy<'tcx>,
|
||||
b: ty::ProjectionTy<'tcx>,
|
||||
) -> RelateResult<'tcx, ty::ProjectionTy<'tcx>> {
|
||||
if a.item_def_id != b.item_def_id {
|
||||
Err(TypeError::ProjectionMismatched(expected_found(
|
||||
@ -231,7 +230,7 @@ impl<'tcx> Relate<'tcx> for ty::ProjectionTy<'tcx> {
|
||||
&b.item_def_id,
|
||||
)))
|
||||
} else {
|
||||
let substs = relation.relate(&a.substs, &b.substs)?;
|
||||
let substs = relation.relate(a.substs, b.substs)?;
|
||||
Ok(ty::ProjectionTy { item_def_id: a.item_def_id, substs: &substs })
|
||||
}
|
||||
}
|
||||
@ -240,8 +239,8 @@ impl<'tcx> Relate<'tcx> for ty::ProjectionTy<'tcx> {
|
||||
impl<'tcx> Relate<'tcx> for ty::ExistentialProjection<'tcx> {
|
||||
fn relate<R: TypeRelation<'tcx>>(
|
||||
relation: &mut R,
|
||||
a: &ty::ExistentialProjection<'tcx>,
|
||||
b: &ty::ExistentialProjection<'tcx>,
|
||||
a: ty::ExistentialProjection<'tcx>,
|
||||
b: ty::ExistentialProjection<'tcx>,
|
||||
) -> RelateResult<'tcx, ty::ExistentialProjection<'tcx>> {
|
||||
if a.item_def_id != b.item_def_id {
|
||||
Err(TypeError::ProjectionMismatched(expected_found(
|
||||
@ -250,37 +249,18 @@ impl<'tcx> Relate<'tcx> for ty::ExistentialProjection<'tcx> {
|
||||
&b.item_def_id,
|
||||
)))
|
||||
} else {
|
||||
let ty = relation.relate_with_variance(ty::Invariant, &a.ty, &b.ty)?;
|
||||
let substs = relation.relate_with_variance(ty::Invariant, &a.substs, &b.substs)?;
|
||||
let ty = relation.relate_with_variance(ty::Invariant, a.ty, b.ty)?;
|
||||
let substs = relation.relate_with_variance(ty::Invariant, a.substs, b.substs)?;
|
||||
Ok(ty::ExistentialProjection { item_def_id: a.item_def_id, substs, ty })
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx> Relate<'tcx> for Vec<ty::PolyExistentialProjection<'tcx>> {
|
||||
fn relate<R: TypeRelation<'tcx>>(
|
||||
relation: &mut R,
|
||||
a: &Vec<ty::PolyExistentialProjection<'tcx>>,
|
||||
b: &Vec<ty::PolyExistentialProjection<'tcx>>,
|
||||
) -> RelateResult<'tcx, Vec<ty::PolyExistentialProjection<'tcx>>> {
|
||||
// To be compatible, `a` and `b` must be for precisely the
|
||||
// same set of traits and item names. We always require that
|
||||
// projection bounds lists are sorted by trait-def-id and item-name,
|
||||
// so we can just iterate through the lists pairwise, so long as they are the
|
||||
// same length.
|
||||
if a.len() != b.len() {
|
||||
Err(TypeError::ProjectionBoundsLength(expected_found(relation, &a.len(), &b.len())))
|
||||
} else {
|
||||
a.iter().zip(b).map(|(a, b)| relation.relate(a, b)).collect()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx> Relate<'tcx> for ty::TraitRef<'tcx> {
|
||||
fn relate<R: TypeRelation<'tcx>>(
|
||||
relation: &mut R,
|
||||
a: &ty::TraitRef<'tcx>,
|
||||
b: &ty::TraitRef<'tcx>,
|
||||
a: ty::TraitRef<'tcx>,
|
||||
b: ty::TraitRef<'tcx>,
|
||||
) -> RelateResult<'tcx, ty::TraitRef<'tcx>> {
|
||||
// Different traits cannot be related.
|
||||
if a.def_id != b.def_id {
|
||||
@ -295,8 +275,8 @@ impl<'tcx> Relate<'tcx> for ty::TraitRef<'tcx> {
|
||||
impl<'tcx> Relate<'tcx> for ty::ExistentialTraitRef<'tcx> {
|
||||
fn relate<R: TypeRelation<'tcx>>(
|
||||
relation: &mut R,
|
||||
a: &ty::ExistentialTraitRef<'tcx>,
|
||||
b: &ty::ExistentialTraitRef<'tcx>,
|
||||
a: ty::ExistentialTraitRef<'tcx>,
|
||||
b: ty::ExistentialTraitRef<'tcx>,
|
||||
) -> RelateResult<'tcx, ty::ExistentialTraitRef<'tcx>> {
|
||||
// Different traits cannot be related.
|
||||
if a.def_id != b.def_id {
|
||||
@ -308,18 +288,18 @@ impl<'tcx> Relate<'tcx> for ty::ExistentialTraitRef<'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, TypeFoldable)]
|
||||
#[derive(Copy, Debug, Clone, TypeFoldable)]
|
||||
struct GeneratorWitness<'tcx>(&'tcx ty::List<Ty<'tcx>>);
|
||||
|
||||
impl<'tcx> Relate<'tcx> for GeneratorWitness<'tcx> {
|
||||
fn relate<R: TypeRelation<'tcx>>(
|
||||
relation: &mut R,
|
||||
a: &GeneratorWitness<'tcx>,
|
||||
b: &GeneratorWitness<'tcx>,
|
||||
a: GeneratorWitness<'tcx>,
|
||||
b: GeneratorWitness<'tcx>,
|
||||
) -> RelateResult<'tcx, GeneratorWitness<'tcx>> {
|
||||
assert_eq!(a.0.len(), b.0.len());
|
||||
let tcx = relation.tcx();
|
||||
let types = tcx.mk_type_list(a.0.iter().zip(b.0).map(|(a, b)| relation.relate(&a, &b)))?;
|
||||
let types = tcx.mk_type_list(a.0.iter().zip(b.0).map(|(a, b)| relation.relate(a, b)))?;
|
||||
Ok(GeneratorWitness(types))
|
||||
}
|
||||
}
|
||||
@ -327,8 +307,8 @@ impl<'tcx> Relate<'tcx> for GeneratorWitness<'tcx> {
|
||||
impl<'tcx> Relate<'tcx> for Ty<'tcx> {
|
||||
fn relate<R: TypeRelation<'tcx>>(
|
||||
relation: &mut R,
|
||||
a: &Ty<'tcx>,
|
||||
b: &Ty<'tcx>,
|
||||
a: Ty<'tcx>,
|
||||
b: Ty<'tcx>,
|
||||
) -> RelateResult<'tcx, Ty<'tcx>> {
|
||||
relation.tys(a, b)
|
||||
}
|
||||
@ -379,7 +359,7 @@ pub fn super_relate_tys<R: TypeRelation<'tcx>>(
|
||||
|
||||
(&ty::Foreign(a_id), &ty::Foreign(b_id)) if a_id == b_id => Ok(tcx.mk_foreign(a_id)),
|
||||
|
||||
(&ty::Dynamic(ref a_obj, ref a_region), &ty::Dynamic(ref b_obj, ref b_region)) => {
|
||||
(&ty::Dynamic(a_obj, a_region), &ty::Dynamic(b_obj, b_region)) => {
|
||||
let region_bound = relation.with_cause(Cause::ExistentialRegionBound, |relation| {
|
||||
relation.relate_with_variance(ty::Contravariant, a_region, b_region)
|
||||
})?;
|
||||
@ -392,7 +372,7 @@ pub fn super_relate_tys<R: TypeRelation<'tcx>>(
|
||||
// All Generator types with the same id represent
|
||||
// the (anonymous) type of the same generator expression. So
|
||||
// all of their regions should be equated.
|
||||
let substs = relation.relate(&a_substs, &b_substs)?;
|
||||
let substs = relation.relate(a_substs, b_substs)?;
|
||||
Ok(tcx.mk_generator(a_id, substs, movability))
|
||||
}
|
||||
|
||||
@ -402,7 +382,7 @@ pub fn super_relate_tys<R: TypeRelation<'tcx>>(
|
||||
let a_types = a_types.map_bound(GeneratorWitness);
|
||||
let b_types = b_types.map_bound(GeneratorWitness);
|
||||
// Then remove the GeneratorWitness for the result
|
||||
let types = relation.relate(&a_types, &b_types)?.map_bound(|witness| witness.0);
|
||||
let types = relation.relate(a_types, b_types)?.map_bound(|witness| witness.0);
|
||||
Ok(tcx.mk_generator_witness(types))
|
||||
}
|
||||
|
||||
@ -410,26 +390,26 @@ pub fn super_relate_tys<R: TypeRelation<'tcx>>(
|
||||
// All Closure types with the same id represent
|
||||
// the (anonymous) type of the same closure expression. So
|
||||
// all of their regions should be equated.
|
||||
let substs = relation.relate(&a_substs, &b_substs)?;
|
||||
let substs = relation.relate(a_substs, b_substs)?;
|
||||
Ok(tcx.mk_closure(a_id, &substs))
|
||||
}
|
||||
|
||||
(&ty::RawPtr(ref a_mt), &ty::RawPtr(ref b_mt)) => {
|
||||
(&ty::RawPtr(a_mt), &ty::RawPtr(b_mt)) => {
|
||||
let mt = relation.relate(a_mt, b_mt)?;
|
||||
Ok(tcx.mk_ptr(mt))
|
||||
}
|
||||
|
||||
(&ty::Ref(a_r, a_ty, a_mutbl), &ty::Ref(b_r, b_ty, b_mutbl)) => {
|
||||
let r = relation.relate_with_variance(ty::Contravariant, &a_r, &b_r)?;
|
||||
let r = relation.relate_with_variance(ty::Contravariant, a_r, b_r)?;
|
||||
let a_mt = ty::TypeAndMut { ty: a_ty, mutbl: a_mutbl };
|
||||
let b_mt = ty::TypeAndMut { ty: b_ty, mutbl: b_mutbl };
|
||||
let mt = relation.relate(&a_mt, &b_mt)?;
|
||||
let mt = relation.relate(a_mt, b_mt)?;
|
||||
Ok(tcx.mk_ref(r, mt))
|
||||
}
|
||||
|
||||
(&ty::Array(a_t, sz_a), &ty::Array(b_t, sz_b)) => {
|
||||
let t = relation.relate(&a_t, &b_t)?;
|
||||
match relation.relate(&sz_a, &sz_b) {
|
||||
let t = relation.relate(a_t, b_t)?;
|
||||
match relation.relate(sz_a, sz_b) {
|
||||
Ok(sz) => Ok(tcx.mk_ty(ty::Array(t, sz))),
|
||||
// FIXME(#72219) Implement improved diagnostics for mismatched array
|
||||
// length?
|
||||
@ -450,16 +430,14 @@ pub fn super_relate_tys<R: TypeRelation<'tcx>>(
|
||||
}
|
||||
|
||||
(&ty::Slice(a_t), &ty::Slice(b_t)) => {
|
||||
let t = relation.relate(&a_t, &b_t)?;
|
||||
let t = relation.relate(a_t, b_t)?;
|
||||
Ok(tcx.mk_slice(t))
|
||||
}
|
||||
|
||||
(&ty::Tuple(as_), &ty::Tuple(bs)) => {
|
||||
if as_.len() == bs.len() {
|
||||
Ok(tcx.mk_tup(
|
||||
as_.iter()
|
||||
.zip(bs)
|
||||
.map(|(a, b)| relation.relate(&a.expect_ty(), &b.expect_ty())),
|
||||
as_.iter().zip(bs).map(|(a, b)| relation.relate(a.expect_ty(), b.expect_ty())),
|
||||
)?)
|
||||
} else if !(as_.is_empty() || bs.is_empty()) {
|
||||
Err(TypeError::TupleSize(expected_found(relation, &as_.len(), &bs.len())))
|
||||
@ -476,12 +454,12 @@ pub fn super_relate_tys<R: TypeRelation<'tcx>>(
|
||||
}
|
||||
|
||||
(&ty::FnPtr(a_fty), &ty::FnPtr(b_fty)) => {
|
||||
let fty = relation.relate(&a_fty, &b_fty)?;
|
||||
let fty = relation.relate(a_fty, b_fty)?;
|
||||
Ok(tcx.mk_fn_ptr(fty))
|
||||
}
|
||||
|
||||
// these two are already handled downstream in case of lazy normalization
|
||||
(ty::Projection(a_data), ty::Projection(b_data)) => {
|
||||
(&ty::Projection(a_data), &ty::Projection(b_data)) => {
|
||||
let projection_ty = relation.relate(a_data, b_data)?;
|
||||
Ok(tcx.mk_projection(projection_ty.item_def_id, projection_ty.substs))
|
||||
}
|
||||
@ -603,8 +581,8 @@ pub fn super_relate_consts<R: TypeRelation<'tcx>>(
|
||||
ty::ConstKind::Unevaluated(b_def_id, b_substs, b_promoted),
|
||||
) if a_def_id == b_def_id && a_promoted == b_promoted => {
|
||||
let substs =
|
||||
relation.relate_with_variance(ty::Variance::Invariant, &a_substs, &b_substs)?;
|
||||
Ok(ty::ConstKind::Unevaluated(a_def_id, &substs, a_promoted))
|
||||
relation.relate_with_variance(ty::Variance::Invariant, a_substs, b_substs)?;
|
||||
Ok(ty::ConstKind::Unevaluated(a_def_id, substs, a_promoted))
|
||||
}
|
||||
_ => Err(TypeError::ConstMismatch(expected_found(relation, &a, &b))),
|
||||
};
|
||||
@ -614,8 +592,8 @@ pub fn super_relate_consts<R: TypeRelation<'tcx>>(
|
||||
impl<'tcx> Relate<'tcx> for &'tcx ty::List<ty::ExistentialPredicate<'tcx>> {
|
||||
fn relate<R: TypeRelation<'tcx>>(
|
||||
relation: &mut R,
|
||||
a: &Self,
|
||||
b: &Self,
|
||||
a: Self,
|
||||
b: Self,
|
||||
) -> RelateResult<'tcx, Self> {
|
||||
let tcx = relation.tcx();
|
||||
|
||||
@ -629,16 +607,16 @@ impl<'tcx> Relate<'tcx> for &'tcx ty::List<ty::ExistentialPredicate<'tcx>> {
|
||||
b_v.sort_by(|a, b| a.stable_cmp(tcx, b));
|
||||
b_v.dedup();
|
||||
if a_v.len() != b_v.len() {
|
||||
return Err(TypeError::ExistentialMismatch(expected_found(relation, a, b)));
|
||||
return Err(TypeError::ExistentialMismatch(expected_found(relation, &a, &b)));
|
||||
}
|
||||
|
||||
let v = a_v.into_iter().zip(b_v.into_iter()).map(|(ep_a, ep_b)| {
|
||||
use crate::ty::ExistentialPredicate::*;
|
||||
match (ep_a, ep_b) {
|
||||
(Trait(ref a), Trait(ref b)) => Ok(Trait(relation.relate(a, b)?)),
|
||||
(Projection(ref a), Projection(ref b)) => Ok(Projection(relation.relate(a, b)?)),
|
||||
(AutoTrait(ref a), AutoTrait(ref b)) if a == b => Ok(AutoTrait(*a)),
|
||||
_ => Err(TypeError::ExistentialMismatch(expected_found(relation, a, b))),
|
||||
(Trait(a), Trait(b)) => Ok(Trait(relation.relate(a, b)?)),
|
||||
(Projection(a), Projection(b)) => Ok(Projection(relation.relate(a, b)?)),
|
||||
(AutoTrait(a), AutoTrait(b)) if a == b => Ok(AutoTrait(a)),
|
||||
_ => Err(TypeError::ExistentialMismatch(expected_found(relation, &a, &b))),
|
||||
}
|
||||
});
|
||||
Ok(tcx.mk_existential_predicates(v)?)
|
||||
@ -648,8 +626,8 @@ impl<'tcx> Relate<'tcx> for &'tcx ty::List<ty::ExistentialPredicate<'tcx>> {
|
||||
impl<'tcx> Relate<'tcx> for ty::ClosureSubsts<'tcx> {
|
||||
fn relate<R: TypeRelation<'tcx>>(
|
||||
relation: &mut R,
|
||||
a: &ty::ClosureSubsts<'tcx>,
|
||||
b: &ty::ClosureSubsts<'tcx>,
|
||||
a: ty::ClosureSubsts<'tcx>,
|
||||
b: ty::ClosureSubsts<'tcx>,
|
||||
) -> RelateResult<'tcx, ty::ClosureSubsts<'tcx>> {
|
||||
let substs = relate_substs(relation, None, a.substs, b.substs)?;
|
||||
Ok(ty::ClosureSubsts { substs })
|
||||
@ -659,8 +637,8 @@ impl<'tcx> Relate<'tcx> for ty::ClosureSubsts<'tcx> {
|
||||
impl<'tcx> Relate<'tcx> for ty::GeneratorSubsts<'tcx> {
|
||||
fn relate<R: TypeRelation<'tcx>>(
|
||||
relation: &mut R,
|
||||
a: &ty::GeneratorSubsts<'tcx>,
|
||||
b: &ty::GeneratorSubsts<'tcx>,
|
||||
a: ty::GeneratorSubsts<'tcx>,
|
||||
b: ty::GeneratorSubsts<'tcx>,
|
||||
) -> RelateResult<'tcx, ty::GeneratorSubsts<'tcx>> {
|
||||
let substs = relate_substs(relation, None, a.substs, b.substs)?;
|
||||
Ok(ty::GeneratorSubsts { substs })
|
||||
@ -670,8 +648,8 @@ impl<'tcx> Relate<'tcx> for ty::GeneratorSubsts<'tcx> {
|
||||
impl<'tcx> Relate<'tcx> for SubstsRef<'tcx> {
|
||||
fn relate<R: TypeRelation<'tcx>>(
|
||||
relation: &mut R,
|
||||
a: &SubstsRef<'tcx>,
|
||||
b: &SubstsRef<'tcx>,
|
||||
a: SubstsRef<'tcx>,
|
||||
b: SubstsRef<'tcx>,
|
||||
) -> RelateResult<'tcx, SubstsRef<'tcx>> {
|
||||
relate_substs(relation, None, a, b)
|
||||
}
|
||||
@ -680,72 +658,48 @@ impl<'tcx> Relate<'tcx> for SubstsRef<'tcx> {
|
||||
impl<'tcx> Relate<'tcx> for ty::Region<'tcx> {
|
||||
fn relate<R: TypeRelation<'tcx>>(
|
||||
relation: &mut R,
|
||||
a: &ty::Region<'tcx>,
|
||||
b: &ty::Region<'tcx>,
|
||||
a: ty::Region<'tcx>,
|
||||
b: ty::Region<'tcx>,
|
||||
) -> RelateResult<'tcx, ty::Region<'tcx>> {
|
||||
relation.regions(*a, *b)
|
||||
relation.regions(a, b)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx> Relate<'tcx> for &'tcx ty::Const<'tcx> {
|
||||
fn relate<R: TypeRelation<'tcx>>(
|
||||
relation: &mut R,
|
||||
a: &&'tcx ty::Const<'tcx>,
|
||||
b: &&'tcx ty::Const<'tcx>,
|
||||
a: &'tcx ty::Const<'tcx>,
|
||||
b: &'tcx ty::Const<'tcx>,
|
||||
) -> RelateResult<'tcx, &'tcx ty::Const<'tcx>> {
|
||||
relation.consts(*a, *b)
|
||||
relation.consts(a, b)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx, T: Relate<'tcx>> Relate<'tcx> for ty::Binder<T> {
|
||||
fn relate<R: TypeRelation<'tcx>>(
|
||||
relation: &mut R,
|
||||
a: &ty::Binder<T>,
|
||||
b: &ty::Binder<T>,
|
||||
a: ty::Binder<T>,
|
||||
b: ty::Binder<T>,
|
||||
) -> RelateResult<'tcx, ty::Binder<T>> {
|
||||
relation.binders(a, b)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx, T: Relate<'tcx>> Relate<'tcx> for Rc<T> {
|
||||
fn relate<R: TypeRelation<'tcx>>(
|
||||
relation: &mut R,
|
||||
a: &Rc<T>,
|
||||
b: &Rc<T>,
|
||||
) -> RelateResult<'tcx, Rc<T>> {
|
||||
let a: &T = a;
|
||||
let b: &T = b;
|
||||
Ok(Rc::new(relation.relate(a, b)?))
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx, T: Relate<'tcx>> Relate<'tcx> for Box<T> {
|
||||
fn relate<R: TypeRelation<'tcx>>(
|
||||
relation: &mut R,
|
||||
a: &Box<T>,
|
||||
b: &Box<T>,
|
||||
) -> RelateResult<'tcx, Box<T>> {
|
||||
let a: &T = a;
|
||||
let b: &T = b;
|
||||
Ok(Box::new(relation.relate(a, b)?))
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx> Relate<'tcx> for GenericArg<'tcx> {
|
||||
fn relate<R: TypeRelation<'tcx>>(
|
||||
relation: &mut R,
|
||||
a: &GenericArg<'tcx>,
|
||||
b: &GenericArg<'tcx>,
|
||||
a: GenericArg<'tcx>,
|
||||
b: GenericArg<'tcx>,
|
||||
) -> RelateResult<'tcx, GenericArg<'tcx>> {
|
||||
match (a.unpack(), b.unpack()) {
|
||||
(GenericArgKind::Lifetime(a_lt), GenericArgKind::Lifetime(b_lt)) => {
|
||||
Ok(relation.relate(&a_lt, &b_lt)?.into())
|
||||
Ok(relation.relate(a_lt, b_lt)?.into())
|
||||
}
|
||||
(GenericArgKind::Type(a_ty), GenericArgKind::Type(b_ty)) => {
|
||||
Ok(relation.relate(&a_ty, &b_ty)?.into())
|
||||
Ok(relation.relate(a_ty, b_ty)?.into())
|
||||
}
|
||||
(GenericArgKind::Const(a_ct), GenericArgKind::Const(b_ct)) => {
|
||||
Ok(relation.relate(&a_ct, &b_ct)?.into())
|
||||
Ok(relation.relate(a_ct, b_ct)?.into())
|
||||
}
|
||||
(GenericArgKind::Lifetime(unpacked), x) => {
|
||||
bug!("impossible case reached: can't relate: {:?} with {:?}", unpacked, x)
|
||||
@ -763,22 +717,22 @@ impl<'tcx> Relate<'tcx> for GenericArg<'tcx> {
|
||||
impl<'tcx> Relate<'tcx> for ty::TraitPredicate<'tcx> {
|
||||
fn relate<R: TypeRelation<'tcx>>(
|
||||
relation: &mut R,
|
||||
a: &ty::TraitPredicate<'tcx>,
|
||||
b: &ty::TraitPredicate<'tcx>,
|
||||
a: ty::TraitPredicate<'tcx>,
|
||||
b: ty::TraitPredicate<'tcx>,
|
||||
) -> RelateResult<'tcx, ty::TraitPredicate<'tcx>> {
|
||||
Ok(ty::TraitPredicate { trait_ref: relation.relate(&a.trait_ref, &b.trait_ref)? })
|
||||
Ok(ty::TraitPredicate { trait_ref: relation.relate(a.trait_ref, b.trait_ref)? })
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx> Relate<'tcx> for ty::ProjectionPredicate<'tcx> {
|
||||
fn relate<R: TypeRelation<'tcx>>(
|
||||
relation: &mut R,
|
||||
a: &ty::ProjectionPredicate<'tcx>,
|
||||
b: &ty::ProjectionPredicate<'tcx>,
|
||||
a: ty::ProjectionPredicate<'tcx>,
|
||||
b: ty::ProjectionPredicate<'tcx>,
|
||||
) -> RelateResult<'tcx, ty::ProjectionPredicate<'tcx>> {
|
||||
Ok(ty::ProjectionPredicate {
|
||||
projection_ty: relation.relate(&a.projection_ty, &b.projection_ty)?,
|
||||
ty: relation.relate(&a.ty, &b.ty)?,
|
||||
projection_ty: relation.relate(a.projection_ty, b.projection_ty)?,
|
||||
ty: relation.relate(a.ty, b.ty)?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
@ -514,7 +514,7 @@ impl<'a, 'tcx> Lift<'tcx> for ty::PredicateKind<'a> {
|
||||
impl<'tcx, T: Lift<'tcx>> Lift<'tcx> for ty::Binder<T> {
|
||||
type Lifted = ty::Binder<T::Lifted>;
|
||||
fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option<Self::Lifted> {
|
||||
tcx.lift(self.skip_binder()).map(ty::Binder::bind)
|
||||
tcx.lift(self.as_ref().skip_binder()).map(ty::Binder::bind)
|
||||
}
|
||||
}
|
||||
|
||||
@ -655,7 +655,6 @@ impl<'a, 'tcx> Lift<'tcx> for ty::error::TypeError<'a> {
|
||||
VariadicMismatch(x) => VariadicMismatch(x),
|
||||
CyclicTy(t) => return tcx.lift(&t).map(|t| CyclicTy(t)),
|
||||
ProjectionMismatched(x) => ProjectionMismatched(x),
|
||||
ProjectionBoundsLength(x) => ProjectionBoundsLength(x),
|
||||
Sorts(ref x) => return tcx.lift(x).map(Sorts),
|
||||
ExistentialMismatch(ref x) => return tcx.lift(x).map(ExistentialMismatch),
|
||||
ConstMismatch(ref x) => return tcx.lift(x).map(ConstMismatch),
|
||||
@ -798,7 +797,7 @@ impl<'tcx, T: TypeFoldable<'tcx>> TypeFoldable<'tcx> for ty::Binder<T> {
|
||||
}
|
||||
|
||||
fn super_visit_with<V: TypeVisitor<'tcx>>(&self, visitor: &mut V) -> bool {
|
||||
self.skip_binder().visit_with(visitor)
|
||||
self.as_ref().skip_binder().visit_with(visitor)
|
||||
}
|
||||
|
||||
fn visit_with<V: TypeVisitor<'tcx>>(&self, visitor: &mut V) -> bool {
|
||||
|
@ -615,7 +615,7 @@ impl<'tcx> ExistentialPredicate<'tcx> {
|
||||
impl<'tcx> Binder<ExistentialPredicate<'tcx>> {
|
||||
pub fn with_self_ty(&self, tcx: TyCtxt<'tcx>, self_ty: Ty<'tcx>) -> ty::Predicate<'tcx> {
|
||||
use crate::ty::ToPredicate;
|
||||
match *self.skip_binder() {
|
||||
match self.skip_binder() {
|
||||
ExistentialPredicate::Trait(tr) => {
|
||||
Binder(tr).with_self_ty(tcx, self_ty).without_const().to_predicate(tcx)
|
||||
}
|
||||
@ -776,7 +776,7 @@ impl<'tcx> PolyTraitRef<'tcx> {
|
||||
|
||||
pub fn to_poly_trait_predicate(&self) -> ty::PolyTraitPredicate<'tcx> {
|
||||
// Note that we preserve binding levels
|
||||
Binder(ty::TraitPredicate { trait_ref: *self.skip_binder() })
|
||||
Binder(ty::TraitPredicate { trait_ref: self.skip_binder() })
|
||||
}
|
||||
}
|
||||
|
||||
@ -880,8 +880,8 @@ impl<T> Binder<T> {
|
||||
/// - extracting the `DefId` from a PolyTraitRef;
|
||||
/// - comparing the self type of a PolyTraitRef to see if it is equal to
|
||||
/// a type parameter `X`, since the type `X` does not reference any regions
|
||||
pub fn skip_binder(&self) -> &T {
|
||||
&self.0
|
||||
pub fn skip_binder(self) -> T {
|
||||
self.0
|
||||
}
|
||||
|
||||
pub fn as_ref(&self) -> Binder<&T> {
|
||||
@ -916,11 +916,7 @@ impl<T> Binder<T> {
|
||||
where
|
||||
T: TypeFoldable<'tcx>,
|
||||
{
|
||||
if self.skip_binder().has_escaping_bound_vars() {
|
||||
None
|
||||
} else {
|
||||
Some(self.skip_binder().clone())
|
||||
}
|
||||
if self.0.has_escaping_bound_vars() { None } else { Some(self.skip_binder()) }
|
||||
}
|
||||
|
||||
/// Given two things that have the same binder level,
|
||||
@ -997,7 +993,7 @@ impl<'tcx> ProjectionTy<'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, TypeFoldable)]
|
||||
#[derive(Copy, Clone, Debug, TypeFoldable)]
|
||||
pub struct GenSig<'tcx> {
|
||||
pub resume_ty: Ty<'tcx>,
|
||||
pub yield_ty: Ty<'tcx>,
|
||||
|
@ -133,7 +133,7 @@ fn push_inner<'tcx>(stack: &mut TypeWalkerStack<'tcx>, parent: GenericArg<'tcx>)
|
||||
ty::Dynamic(obj, lt) => {
|
||||
stack.push(lt.into());
|
||||
stack.extend(obj.iter().rev().flat_map(|predicate| {
|
||||
let (substs, opt_ty) = match *predicate.skip_binder() {
|
||||
let (substs, opt_ty) = match predicate.skip_binder() {
|
||||
ty::ExistentialPredicate::Trait(tr) => (tr.substs, None),
|
||||
ty::ExistentialPredicate::Projection(p) => (p.substs, Some(p.ty)),
|
||||
ty::ExistentialPredicate::AutoTrait(_) =>
|
||||
|
@ -1923,7 +1923,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
||||
|
||||
// We use a mix of the HIR and the Ty types to get information
|
||||
// as the HIR doesn't have full types for closure arguments.
|
||||
let return_ty = *sig.output().skip_binder();
|
||||
let return_ty = sig.output().skip_binder();
|
||||
let mut return_span = fn_decl.output.span();
|
||||
if let hir::FnRetTy::Return(ty) = &fn_decl.output {
|
||||
if let hir::TyKind::Rptr(lifetime, _) = ty.kind {
|
||||
@ -1965,7 +1965,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
||||
let argument_ty = sig.inputs().skip_binder().first()?;
|
||||
|
||||
let return_span = fn_decl.output.span();
|
||||
let return_ty = *sig.output().skip_binder();
|
||||
let return_ty = sig.output().skip_binder();
|
||||
|
||||
// We expect the first argument to be a reference.
|
||||
match argument_ty.kind {
|
||||
|
@ -122,7 +122,9 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
|
||||
if self.regioncx.universal_regions().is_universal_region(r) {
|
||||
Some(r)
|
||||
} else {
|
||||
let upper_bound = self.regioncx.universal_upper_bound(r);
|
||||
// We just want something nameable, even if it's not
|
||||
// actually an upper bound.
|
||||
let upper_bound = self.regioncx.approx_universal_upper_bound(r);
|
||||
|
||||
if self.regioncx.upper_bound_in_region_scc(r, upper_bound) {
|
||||
self.to_error_region_vid(upper_bound)
|
||||
|
@ -1114,6 +1114,40 @@ impl<'tcx> RegionInferenceContext<'tcx> {
|
||||
lub
|
||||
}
|
||||
|
||||
/// Like `universal_upper_bound`, but returns an approximation more suitable
|
||||
/// for diagnostics. If `r` contains multiple disjoint universal regions
|
||||
/// (e.g. 'a and 'b in `fn foo<'a, 'b> { ... }`, we pick the lower-numbered region.
|
||||
/// This corresponds to picking named regions over unnamed regions
|
||||
/// (e.g. picking early-bound regions over a closure late-bound region).
|
||||
///
|
||||
/// This means that the returned value may not be a true upper bound, since
|
||||
/// only 'static is known to outlive disjoint universal regions.
|
||||
/// Therefore, this method should only be used in diagnostic code,
|
||||
/// where displaying *some* named universal region is better than
|
||||
/// falling back to 'static.
|
||||
pub(in crate::borrow_check) fn approx_universal_upper_bound(&self, r: RegionVid) -> RegionVid {
|
||||
debug!("approx_universal_upper_bound(r={:?}={})", r, self.region_value_str(r));
|
||||
|
||||
// Find the smallest universal region that contains all other
|
||||
// universal regions within `region`.
|
||||
let mut lub = self.universal_regions.fr_fn_body;
|
||||
let r_scc = self.constraint_sccs.scc(r);
|
||||
let static_r = self.universal_regions.fr_static;
|
||||
for ur in self.scc_values.universal_regions_outlived_by(r_scc) {
|
||||
let new_lub = self.universal_region_relations.postdom_upper_bound(lub, ur);
|
||||
debug!("approx_universal_upper_bound: ur={:?} lub={:?} new_lub={:?}", ur, lub, new_lub);
|
||||
if ur != static_r && lub != static_r && new_lub == static_r {
|
||||
lub = std::cmp::min(ur, lub);
|
||||
} else {
|
||||
lub = new_lub;
|
||||
}
|
||||
}
|
||||
|
||||
debug!("approx_universal_upper_bound: r={:?} lub={:?}", r, lub);
|
||||
|
||||
lub
|
||||
}
|
||||
|
||||
/// Tests if `test` is true when applied to `lower_bound` at
|
||||
/// `point`.
|
||||
fn eval_verify_bound(
|
||||
|
@ -141,7 +141,8 @@ impl<'tcx> RegionInferenceContext<'tcx> {
|
||||
{
|
||||
tcx.fold_regions(&ty, &mut false, |region, _| match *region {
|
||||
ty::ReVar(vid) => {
|
||||
let upper_bound = self.universal_upper_bound(vid);
|
||||
// Find something that we can name
|
||||
let upper_bound = self.approx_universal_upper_bound(vid);
|
||||
self.definitions[upper_bound].external_name.unwrap_or(region)
|
||||
}
|
||||
_ => region,
|
||||
|
@ -31,7 +31,7 @@ pub(super) fn relate_types<'tcx>(
|
||||
NllTypeRelatingDelegate::new(infcx, borrowck_context, locations, category),
|
||||
v,
|
||||
)
|
||||
.relate(&a, &b)?;
|
||||
.relate(a, b)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
@ -409,6 +409,10 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
|
||||
);
|
||||
self.copy_op(self.operand_index(args[0], index)?, dest)?;
|
||||
}
|
||||
sym::likely | sym::unlikely => {
|
||||
// These just return their argument
|
||||
self.copy_op(args[0], dest)?;
|
||||
}
|
||||
// FIXME(#73156): Handle source code coverage in const eval
|
||||
sym::count_code_region => (),
|
||||
_ => return Ok(false),
|
||||
|
@ -531,9 +531,12 @@ impl Visitor<'tcx> for Validator<'mir, 'tcx> {
|
||||
if is_lang_panic_fn(self.tcx, def_id) {
|
||||
self.check_op(ops::Panic);
|
||||
} else if let Some(feature) = is_unstable_const_fn(self.tcx, def_id) {
|
||||
// Exempt unstable const fns inside of macros with
|
||||
// Exempt unstable const fns inside of macros or functions with
|
||||
// `#[allow_internal_unstable]`.
|
||||
if !self.span.allows_unstable(feature) {
|
||||
use crate::transform::qualify_min_const_fn::lib_feature_allowed;
|
||||
if !self.span.allows_unstable(feature)
|
||||
&& !lib_feature_allowed(self.tcx, self.def_id, feature)
|
||||
{
|
||||
self.check_op(ops::FnCallUnstable(def_id, feature));
|
||||
}
|
||||
} else {
|
||||
|
@ -328,6 +328,26 @@ fn feature_allowed(tcx: TyCtxt<'tcx>, def_id: DefId, feature_gate: Symbol) -> bo
|
||||
.map_or(false, |mut features| features.any(|name| name == feature_gate))
|
||||
}
|
||||
|
||||
/// Returns `true` if the given library feature gate is allowed within the function with the given `DefId`.
|
||||
pub fn lib_feature_allowed(tcx: TyCtxt<'tcx>, def_id: DefId, feature_gate: Symbol) -> bool {
|
||||
// All features require that the corresponding gate be enabled,
|
||||
// even if the function has `#[allow_internal_unstable(the_gate)]`.
|
||||
if !tcx.features().declared_lib_features.iter().any(|&(sym, _)| sym == feature_gate) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// If this crate is not using stability attributes, or this function is not claiming to be a
|
||||
// stable `const fn`, that is all that is required.
|
||||
if !tcx.features().staged_api || tcx.has_attr(def_id, sym::rustc_const_unstable) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// However, we cannot allow stable `const fn`s to use unstable features without an explicit
|
||||
// opt-in via `allow_internal_unstable`.
|
||||
attr::allow_internal_unstable(&tcx.get_attrs(def_id), &tcx.sess.diagnostic())
|
||||
.map_or(false, |mut features| features.any(|name| name == feature_gate))
|
||||
}
|
||||
|
||||
fn check_terminator(
|
||||
tcx: TyCtxt<'tcx>,
|
||||
body: &'a Body<'tcx>,
|
||||
@ -367,8 +387,17 @@ fn check_terminator(
|
||||
fn_span: _,
|
||||
} => {
|
||||
let fn_ty = func.ty(body, tcx);
|
||||
if let ty::FnDef(def_id, _) = fn_ty.kind {
|
||||
if !crate::const_eval::is_min_const_fn(tcx, def_id) {
|
||||
if let ty::FnDef(fn_def_id, _) = fn_ty.kind {
|
||||
// Allow unstable const if we opt in by using #[allow_internal_unstable]
|
||||
// on function or macro declaration.
|
||||
if !crate::const_eval::is_min_const_fn(tcx, fn_def_id)
|
||||
&& !crate::const_eval::is_unstable_const_fn(tcx, fn_def_id)
|
||||
.map(|feature| {
|
||||
span.allows_unstable(feature)
|
||||
|| lib_feature_allowed(tcx, def_id, feature)
|
||||
})
|
||||
.unwrap_or(false)
|
||||
{
|
||||
return Err((
|
||||
span,
|
||||
format!(
|
||||
@ -380,10 +409,10 @@ fn check_terminator(
|
||||
));
|
||||
}
|
||||
|
||||
check_operand(tcx, func, span, def_id, body)?;
|
||||
check_operand(tcx, func, span, fn_def_id, body)?;
|
||||
|
||||
for arg in args {
|
||||
check_operand(tcx, arg, span, def_id, body)?;
|
||||
check_operand(tcx, arg, span, fn_def_id, body)?;
|
||||
}
|
||||
Ok(())
|
||||
} else {
|
||||
|
@ -74,8 +74,8 @@ pub fn equal_up_to_regions(
|
||||
fn relate_with_variance<T: Relate<'tcx>>(
|
||||
&mut self,
|
||||
_: ty::Variance,
|
||||
a: &T,
|
||||
b: &T,
|
||||
a: T,
|
||||
b: T,
|
||||
) -> RelateResult<'tcx, T> {
|
||||
// Ignore variance, require types to be exactly the same.
|
||||
self.relate(a, b)
|
||||
@ -108,8 +108,8 @@ pub fn equal_up_to_regions(
|
||||
|
||||
fn binders<T>(
|
||||
&mut self,
|
||||
a: &ty::Binder<T>,
|
||||
b: &ty::Binder<T>,
|
||||
a: ty::Binder<T>,
|
||||
b: ty::Binder<T>,
|
||||
) -> RelateResult<'tcx, ty::Binder<T>>
|
||||
where
|
||||
T: Relate<'tcx>,
|
||||
@ -121,7 +121,7 @@ pub fn equal_up_to_regions(
|
||||
|
||||
// Instantiate and run relation.
|
||||
let mut relator: LifetimeIgnoreRelation<'tcx> = LifetimeIgnoreRelation { tcx: tcx, param_env };
|
||||
relator.relate(&src, &dest).is_ok()
|
||||
relator.relate(src, dest).is_ok()
|
||||
}
|
||||
|
||||
struct TypeChecker<'a, 'tcx> {
|
||||
|
@ -4,8 +4,8 @@
|
||||
//! This file includes the logic for exhaustiveness and usefulness checking for
|
||||
//! pattern-matching. Specifically, given a list of patterns for a type, we can
|
||||
//! tell whether:
|
||||
//! (a) the patterns cover every possible constructor for the type [exhaustiveness]
|
||||
//! (b) each pattern is necessary [usefulness]
|
||||
//! (a) the patterns cover every possible constructor for the type (exhaustiveness)
|
||||
//! (b) each pattern is necessary (usefulness)
|
||||
//!
|
||||
//! The algorithm implemented here is a modified version of the one described in:
|
||||
//! http://moscova.inria.fr/~maranget/papers/warn/index.html
|
||||
@ -101,53 +101,54 @@
|
||||
//! To match the paper, the top of the stack is at the beginning / on the left.
|
||||
//!
|
||||
//! There are two important operations on pattern-stacks necessary to understand the algorithm:
|
||||
//! 1. We can pop a given constructor off the top of a stack. This operation is called
|
||||
//! `specialize`, and is denoted `S(c, p)` where `c` is a constructor (like `Some` or
|
||||
//! `None`) and `p` a pattern-stack.
|
||||
//! If the pattern on top of the stack can cover `c`, this removes the constructor and
|
||||
//! pushes its arguments onto the stack. It also expands OR-patterns into distinct patterns.
|
||||
//! Otherwise the pattern-stack is discarded.
|
||||
//! This essentially filters those pattern-stacks whose top covers the constructor `c` and
|
||||
//! discards the others.
|
||||
//!
|
||||
//! For example, the first pattern above initially gives a stack `[(Some(true), _)]`. If we
|
||||
//! pop the tuple constructor, we are left with `[Some(true), _]`, and if we then pop the
|
||||
//! `Some` constructor we get `[true, _]`. If we had popped `None` instead, we would get
|
||||
//! nothing back.
|
||||
//! 1. We can pop a given constructor off the top of a stack. This operation is called
|
||||
//! `specialize`, and is denoted `S(c, p)` where `c` is a constructor (like `Some` or
|
||||
//! `None`) and `p` a pattern-stack.
|
||||
//! If the pattern on top of the stack can cover `c`, this removes the constructor and
|
||||
//! pushes its arguments onto the stack. It also expands OR-patterns into distinct patterns.
|
||||
//! Otherwise the pattern-stack is discarded.
|
||||
//! This essentially filters those pattern-stacks whose top covers the constructor `c` and
|
||||
//! discards the others.
|
||||
//!
|
||||
//! This returns zero or more new pattern-stacks, as follows. We look at the pattern `p_1`
|
||||
//! on top of the stack, and we have four cases:
|
||||
//! 1.1. `p_1 = c(r_1, .., r_a)`, i.e. the top of the stack has constructor `c`. We
|
||||
//! push onto the stack the arguments of this constructor, and return the result:
|
||||
//! r_1, .., r_a, p_2, .., p_n
|
||||
//! 1.2. `p_1 = c'(r_1, .., r_a')` where `c ≠ c'`. We discard the current stack and
|
||||
//! return nothing.
|
||||
//! 1.3. `p_1 = _`. We push onto the stack as many wildcards as the constructor `c` has
|
||||
//! arguments (its arity), and return the resulting stack:
|
||||
//! _, .., _, p_2, .., p_n
|
||||
//! 1.4. `p_1 = r_1 | r_2`. We expand the OR-pattern and then recurse on each resulting
|
||||
//! stack:
|
||||
//! S(c, (r_1, p_2, .., p_n))
|
||||
//! S(c, (r_2, p_2, .., p_n))
|
||||
//! For example, the first pattern above initially gives a stack `[(Some(true), _)]`. If we
|
||||
//! pop the tuple constructor, we are left with `[Some(true), _]`, and if we then pop the
|
||||
//! `Some` constructor we get `[true, _]`. If we had popped `None` instead, we would get
|
||||
//! nothing back.
|
||||
//!
|
||||
//! 2. We can pop a wildcard off the top of the stack. This is called `D(p)`, where `p` is
|
||||
//! a pattern-stack.
|
||||
//! This is used when we know there are missing constructor cases, but there might be
|
||||
//! existing wildcard patterns, so to check the usefulness of the matrix, we have to check
|
||||
//! all its *other* components.
|
||||
//! This returns zero or more new pattern-stacks, as follows. We look at the pattern `p_1`
|
||||
//! on top of the stack, and we have four cases:
|
||||
//! 1.1. `p_1 = c(r_1, .., r_a)`, i.e. the top of the stack has constructor `c`. We
|
||||
//! push onto the stack the arguments of this constructor, and return the result:
|
||||
//! r_1, .., r_a, p_2, .., p_n
|
||||
//! 1.2. `p_1 = c'(r_1, .., r_a')` where `c ≠ c'`. We discard the current stack and
|
||||
//! return nothing.
|
||||
//! 1.3. `p_1 = _`. We push onto the stack as many wildcards as the constructor `c` has
|
||||
//! arguments (its arity), and return the resulting stack:
|
||||
//! _, .., _, p_2, .., p_n
|
||||
//! 1.4. `p_1 = r_1 | r_2`. We expand the OR-pattern and then recurse on each resulting
|
||||
//! stack:
|
||||
//! S(c, (r_1, p_2, .., p_n))
|
||||
//! S(c, (r_2, p_2, .., p_n))
|
||||
//!
|
||||
//! It is computed as follows. We look at the pattern `p_1` on top of the stack,
|
||||
//! and we have three cases:
|
||||
//! 1.1. `p_1 = c(r_1, .., r_a)`. We discard the current stack and return nothing.
|
||||
//! 1.2. `p_1 = _`. We return the rest of the stack:
|
||||
//! p_2, .., p_n
|
||||
//! 1.3. `p_1 = r_1 | r_2`. We expand the OR-pattern and then recurse on each resulting
|
||||
//! stack.
|
||||
//! D((r_1, p_2, .., p_n))
|
||||
//! D((r_2, p_2, .., p_n))
|
||||
//! 2. We can pop a wildcard off the top of the stack. This is called `D(p)`, where `p` is
|
||||
//! a pattern-stack.
|
||||
//! This is used when we know there are missing constructor cases, but there might be
|
||||
//! existing wildcard patterns, so to check the usefulness of the matrix, we have to check
|
||||
//! all its *other* components.
|
||||
//!
|
||||
//! Note that the OR-patterns are not always used directly in Rust, but are used to derive the
|
||||
//! exhaustive integer matching rules, so they're written here for posterity.
|
||||
//! It is computed as follows. We look at the pattern `p_1` on top of the stack,
|
||||
//! and we have three cases:
|
||||
//! 1.1. `p_1 = c(r_1, .., r_a)`. We discard the current stack and return nothing.
|
||||
//! 1.2. `p_1 = _`. We return the rest of the stack:
|
||||
//! p_2, .., p_n
|
||||
//! 1.3. `p_1 = r_1 | r_2`. We expand the OR-pattern and then recurse on each resulting
|
||||
//! stack.
|
||||
//! D((r_1, p_2, .., p_n))
|
||||
//! D((r_2, p_2, .., p_n))
|
||||
//!
|
||||
//! Note that the OR-patterns are not always used directly in Rust, but are used to derive the
|
||||
//! exhaustive integer matching rules, so they're written here for posterity.
|
||||
//!
|
||||
//! Both those operations extend straightforwardly to a list or pattern-stacks, i.e. a matrix, by
|
||||
//! working row-by-row. Popping a constructor ends up keeping only the matrix rows that start with
|
||||
@ -168,66 +169,66 @@
|
||||
//!
|
||||
//! Inductive step. (`n > 0`, i.e., whether there's at least one column
|
||||
//! [which may then be expanded into further columns later])
|
||||
//! We're going to match on the top of the new pattern-stack, `p_1`.
|
||||
//! - If `p_1 == c(r_1, .., r_a)`, i.e. we have a constructor pattern.
|
||||
//! Then, the usefulness of `p_1` can be reduced to whether it is useful when
|
||||
//! we ignore all the patterns in the first column of `P` that involve other constructors.
|
||||
//! This is where `S(c, P)` comes in:
|
||||
//! `U(P, p) := U(S(c, P), S(c, p))`
|
||||
//! This special case is handled in `is_useful_specialized`.
|
||||
//! We're going to match on the top of the new pattern-stack, `p_1`.
|
||||
//! - If `p_1 == c(r_1, .., r_a)`, i.e. we have a constructor pattern.
|
||||
//! Then, the usefulness of `p_1` can be reduced to whether it is useful when
|
||||
//! we ignore all the patterns in the first column of `P` that involve other constructors.
|
||||
//! This is where `S(c, P)` comes in:
|
||||
//! `U(P, p) := U(S(c, P), S(c, p))`
|
||||
//! This special case is handled in `is_useful_specialized`.
|
||||
//!
|
||||
//! For example, if `P` is:
|
||||
//! [
|
||||
//! [Some(true), _],
|
||||
//! [None, 0],
|
||||
//! ]
|
||||
//! and `p` is [Some(false), 0], then we don't care about row 2 since we know `p` only
|
||||
//! matches values that row 2 doesn't. For row 1 however, we need to dig into the
|
||||
//! arguments of `Some` to know whether some new value is covered. So we compute
|
||||
//! `U([[true, _]], [false, 0])`.
|
||||
//! For example, if `P` is:
|
||||
//! [
|
||||
//! [Some(true), _],
|
||||
//! [None, 0],
|
||||
//! ]
|
||||
//! and `p` is [Some(false), 0], then we don't care about row 2 since we know `p` only
|
||||
//! matches values that row 2 doesn't. For row 1 however, we need to dig into the
|
||||
//! arguments of `Some` to know whether some new value is covered. So we compute
|
||||
//! `U([[true, _]], [false, 0])`.
|
||||
//!
|
||||
//! - If `p_1 == _`, then we look at the list of constructors that appear in the first
|
||||
//! component of the rows of `P`:
|
||||
//! + If there are some constructors that aren't present, then we might think that the
|
||||
//! wildcard `_` is useful, since it covers those constructors that weren't covered
|
||||
//! before.
|
||||
//! That's almost correct, but only works if there were no wildcards in those first
|
||||
//! components. So we need to check that `p` is useful with respect to the rows that
|
||||
//! start with a wildcard, if there are any. This is where `D` comes in:
|
||||
//! `U(P, p) := U(D(P), D(p))`
|
||||
//! - If `p_1 == _`, then we look at the list of constructors that appear in the first
|
||||
//! component of the rows of `P`:
|
||||
//! + If there are some constructors that aren't present, then we might think that the
|
||||
//! wildcard `_` is useful, since it covers those constructors that weren't covered
|
||||
//! before.
|
||||
//! That's almost correct, but only works if there were no wildcards in those first
|
||||
//! components. So we need to check that `p` is useful with respect to the rows that
|
||||
//! start with a wildcard, if there are any. This is where `D` comes in:
|
||||
//! `U(P, p) := U(D(P), D(p))`
|
||||
//!
|
||||
//! For example, if `P` is:
|
||||
//! [
|
||||
//! [_, true, _],
|
||||
//! [None, false, 1],
|
||||
//! ]
|
||||
//! and `p` is [_, false, _], the `Some` constructor doesn't appear in `P`. So if we
|
||||
//! only had row 2, we'd know that `p` is useful. However row 1 starts with a
|
||||
//! wildcard, so we need to check whether `U([[true, _]], [false, 1])`.
|
||||
//! For example, if `P` is:
|
||||
//! [
|
||||
//! [_, true, _],
|
||||
//! [None, false, 1],
|
||||
//! ]
|
||||
//! and `p` is [_, false, _], the `Some` constructor doesn't appear in `P`. So if we
|
||||
//! only had row 2, we'd know that `p` is useful. However row 1 starts with a
|
||||
//! wildcard, so we need to check whether `U([[true, _]], [false, 1])`.
|
||||
//!
|
||||
//! + Otherwise, all possible constructors (for the relevant type) are present. In this
|
||||
//! case we must check whether the wildcard pattern covers any unmatched value. For
|
||||
//! that, we can think of the `_` pattern as a big OR-pattern that covers all
|
||||
//! possible constructors. For `Option`, that would mean `_ = None | Some(_)` for
|
||||
//! example. The wildcard pattern is useful in this case if it is useful when
|
||||
//! specialized to one of the possible constructors. So we compute:
|
||||
//! `U(P, p) := ∃(k ϵ constructors) U(S(k, P), S(k, p))`
|
||||
//! + Otherwise, all possible constructors (for the relevant type) are present. In this
|
||||
//! case we must check whether the wildcard pattern covers any unmatched value. For
|
||||
//! that, we can think of the `_` pattern as a big OR-pattern that covers all
|
||||
//! possible constructors. For `Option`, that would mean `_ = None | Some(_)` for
|
||||
//! example. The wildcard pattern is useful in this case if it is useful when
|
||||
//! specialized to one of the possible constructors. So we compute:
|
||||
//! `U(P, p) := ∃(k ϵ constructors) U(S(k, P), S(k, p))`
|
||||
//!
|
||||
//! For example, if `P` is:
|
||||
//! [
|
||||
//! [Some(true), _],
|
||||
//! [None, false],
|
||||
//! ]
|
||||
//! and `p` is [_, false], both `None` and `Some` constructors appear in the first
|
||||
//! components of `P`. We will therefore try popping both constructors in turn: we
|
||||
//! compute U([[true, _]], [_, false]) for the `Some` constructor, and U([[false]],
|
||||
//! [false]) for the `None` constructor. The first case returns true, so we know that
|
||||
//! `p` is useful for `P`. Indeed, it matches `[Some(false), _]` that wasn't matched
|
||||
//! before.
|
||||
//! For example, if `P` is:
|
||||
//! [
|
||||
//! [Some(true), _],
|
||||
//! [None, false],
|
||||
//! ]
|
||||
//! and `p` is [_, false], both `None` and `Some` constructors appear in the first
|
||||
//! components of `P`. We will therefore try popping both constructors in turn: we
|
||||
//! compute `U([[true, _]], [_, false])` for the `Some` constructor, and `U([[false]],
|
||||
//! [false])` for the `None` constructor. The first case returns true, so we know that
|
||||
//! `p` is useful for `P`. Indeed, it matches `[Some(false), _]` that wasn't matched
|
||||
//! before.
|
||||
//!
|
||||
//! - If `p_1 == r_1 | r_2`, then the usefulness depends on each `r_i` separately:
|
||||
//! `U(P, p) := U(P, (r_1, p_2, .., p_n))
|
||||
//! || U(P, (r_2, p_2, .., p_n))`
|
||||
//! - If `p_1 == r_1 | r_2`, then the usefulness depends on each `r_i` separately:
|
||||
//! `U(P, p) := U(P, (r_1, p_2, .., p_n))
|
||||
//! || U(P, (r_2, p_2, .., p_n))`
|
||||
//!
|
||||
//! Modifications to the algorithm
|
||||
//! ------------------------------
|
||||
|
@ -1415,7 +1415,7 @@ impl<'a> Parser<'a> {
|
||||
if self.token != token::Lt {
|
||||
err.span_suggestion(
|
||||
pat.span,
|
||||
"if this was a parameter name, give it a type",
|
||||
"if this is a parameter name, give it a type",
|
||||
format!("{}: TypeName", ident),
|
||||
Applicability::HasPlaceholders,
|
||||
);
|
||||
|
@ -422,7 +422,7 @@ impl Visitor<'tcx> for ExprVisitor<'tcx> {
|
||||
let typ = self.tables.node_type(expr.hir_id);
|
||||
let sig = typ.fn_sig(self.tcx);
|
||||
let from = sig.inputs().skip_binder()[0];
|
||||
let to = *sig.output().skip_binder();
|
||||
let to = sig.output().skip_binder();
|
||||
self.check_transmute(expr.span, from, to);
|
||||
}
|
||||
}
|
||||
|
@ -92,14 +92,14 @@ where
|
||||
for (predicate, _span) in predicates {
|
||||
match predicate.kind() {
|
||||
ty::PredicateKind::Trait(poly_predicate, _) => {
|
||||
let ty::TraitPredicate { trait_ref } = *poly_predicate.skip_binder();
|
||||
let ty::TraitPredicate { trait_ref } = poly_predicate.skip_binder();
|
||||
if self.visit_trait(trait_ref) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
ty::PredicateKind::Projection(poly_predicate) => {
|
||||
let ty::ProjectionPredicate { projection_ty, ty } =
|
||||
*poly_predicate.skip_binder();
|
||||
poly_predicate.skip_binder();
|
||||
if ty.visit_with(self) {
|
||||
return true;
|
||||
}
|
||||
@ -108,7 +108,7 @@ where
|
||||
}
|
||||
}
|
||||
ty::PredicateKind::TypeOutlives(poly_predicate) => {
|
||||
let ty::OutlivesPredicate(ty, _region) = *poly_predicate.skip_binder();
|
||||
let ty::OutlivesPredicate(ty, _region) = poly_predicate.skip_binder();
|
||||
if ty.visit_with(self) {
|
||||
return true;
|
||||
}
|
||||
@ -175,7 +175,7 @@ where
|
||||
ty::Dynamic(predicates, ..) => {
|
||||
// All traits in the list are considered the "primary" part of the type
|
||||
// and are visited by shallow visitors.
|
||||
for predicate in *predicates.skip_binder() {
|
||||
for predicate in predicates.skip_binder() {
|
||||
let trait_ref = match predicate {
|
||||
ty::ExistentialPredicate::Trait(trait_ref) => trait_ref,
|
||||
ty::ExistentialPredicate::Projection(proj) => proj.trait_ref(tcx),
|
||||
@ -1270,7 +1270,7 @@ impl<'a, 'tcx> Visitor<'tcx> for TypePrivacyVisitor<'a, 'tcx> {
|
||||
);
|
||||
|
||||
for (trait_predicate, _, _) in bounds.trait_bounds {
|
||||
if self.visit_trait(*trait_predicate.skip_binder()) {
|
||||
if self.visit_trait(trait_predicate.skip_binder()) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
@ -427,6 +427,7 @@ symbols! {
|
||||
lhs,
|
||||
lib,
|
||||
lifetime,
|
||||
likely,
|
||||
line,
|
||||
link,
|
||||
linkage,
|
||||
@ -813,6 +814,7 @@ symbols! {
|
||||
underscore_lifetimes,
|
||||
uniform_paths,
|
||||
universal_impl_trait,
|
||||
unlikely,
|
||||
unmarked_api,
|
||||
unreachable_code,
|
||||
unrestricted_attribute_tokens,
|
||||
|
@ -219,7 +219,7 @@ impl SymbolMangler<'tcx> {
|
||||
lifetime_depths.end += lifetimes;
|
||||
|
||||
self.binders.push(BinderLevel { lifetime_depths });
|
||||
self = print_value(self, value.skip_binder())?;
|
||||
self = print_value(self, value.as_ref().skip_binder())?;
|
||||
self.binders.pop();
|
||||
|
||||
Ok(self)
|
||||
|
@ -691,7 +691,7 @@ where
|
||||
OP: FnMut(ty::Region<'tcx>),
|
||||
{
|
||||
fn visit_binder<T: TypeFoldable<'tcx>>(&mut self, t: &ty::Binder<T>) -> bool {
|
||||
t.skip_binder().visit_with(self);
|
||||
t.as_ref().skip_binder().visit_with(self);
|
||||
false // keep visiting
|
||||
}
|
||||
|
||||
|
@ -1569,7 +1569,7 @@ impl<'a, 'tcx> InferCtxtPrivExt<'tcx> for InferCtxt<'a, 'tcx> {
|
||||
// no need to overload user in such cases
|
||||
return;
|
||||
}
|
||||
let &SubtypePredicate { a_is_expected: _, a, b } = data.skip_binder();
|
||||
let SubtypePredicate { a_is_expected: _, a, b } = data.skip_binder();
|
||||
// both must be type variables, or the other would've been instantiated
|
||||
assert!(a.is_ty_var() && b.is_ty_var());
|
||||
self.need_type_info_err(body_id, span, a, ErrorCode::E0282)
|
||||
|
@ -122,7 +122,7 @@ impl<'a, 'tcx> InferCtxtExt<'tcx> for InferCtxt<'a, 'tcx> {
|
||||
) -> OnUnimplementedNote {
|
||||
let def_id =
|
||||
self.impl_similar_to(trait_ref, obligation).unwrap_or_else(|| trait_ref.def_id());
|
||||
let trait_ref = *trait_ref.skip_binder();
|
||||
let trait_ref = trait_ref.skip_binder();
|
||||
|
||||
let mut flags = vec![];
|
||||
flags.push((
|
||||
@ -219,7 +219,7 @@ impl<'a, 'tcx> InferCtxtExt<'tcx> for InferCtxt<'a, 'tcx> {
|
||||
}
|
||||
}
|
||||
if let ty::Dynamic(traits, _) = self_ty.kind {
|
||||
for t in *traits.skip_binder() {
|
||||
for t in traits.skip_binder() {
|
||||
if let ty::ExistentialPredicate::Trait(trait_ref) = t {
|
||||
flags.push((sym::_Self, Some(self.tcx.def_path_str(trait_ref.def_id))))
|
||||
}
|
||||
|
@ -1179,7 +1179,7 @@ impl<'a, 'tcx> InferCtxtExt<'tcx> for InferCtxt<'a, 'tcx> {
|
||||
) -> DiagnosticBuilder<'tcx> {
|
||||
crate fn build_fn_sig_string<'tcx>(
|
||||
tcx: TyCtxt<'tcx>,
|
||||
trait_ref: &ty::TraitRef<'tcx>,
|
||||
trait_ref: ty::TraitRef<'tcx>,
|
||||
) -> String {
|
||||
let inputs = trait_ref.substs.type_at(1);
|
||||
let sig = if let ty::Tuple(inputs) = inputs.kind {
|
||||
@ -1360,7 +1360,7 @@ impl<'a, 'tcx> InferCtxtExt<'tcx> for InferCtxt<'a, 'tcx> {
|
||||
}
|
||||
ty::GeneratorWitness(..) => {}
|
||||
_ if generator.is_none() => {
|
||||
trait_ref = Some(*derived_obligation.parent_trait_ref.skip_binder());
|
||||
trait_ref = Some(derived_obligation.parent_trait_ref.skip_binder());
|
||||
target_ty = Some(ty);
|
||||
}
|
||||
_ => {}
|
||||
|
@ -220,7 +220,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||
// Okay to skip binder because the substs on generator types never
|
||||
// touch bound regions, they just capture the in-scope
|
||||
// type/region parameters.
|
||||
let self_ty = *obligation.self_ty().skip_binder();
|
||||
let self_ty = obligation.self_ty().skip_binder();
|
||||
match self_ty.kind {
|
||||
ty::Generator(..) => {
|
||||
debug!(
|
||||
@ -299,7 +299,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||
}
|
||||
|
||||
// Okay to skip binder because what we are inspecting doesn't involve bound regions.
|
||||
let self_ty = *obligation.self_ty().skip_binder();
|
||||
let self_ty = obligation.self_ty().skip_binder();
|
||||
match self_ty.kind {
|
||||
ty::Infer(ty::TyVar(_)) => {
|
||||
debug!("assemble_fn_pointer_candidates: ambiguous self-type");
|
||||
@ -362,7 +362,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||
candidates: &mut SelectionCandidateSet<'tcx>,
|
||||
) -> Result<(), SelectionError<'tcx>> {
|
||||
// Okay to skip binder here because the tests we do below do not involve bound regions.
|
||||
let self_ty = *obligation.self_ty().skip_binder();
|
||||
let self_ty = obligation.self_ty().skip_binder();
|
||||
debug!("assemble_candidates_from_auto_impls(self_ty={:?})", self_ty);
|
||||
|
||||
let def_id = obligation.predicate.def_id();
|
||||
@ -583,7 +583,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||
candidates: &mut SelectionCandidateSet<'tcx>,
|
||||
) -> Result<(), SelectionError<'tcx>> {
|
||||
// Okay to skip binder here because the tests we do below do not involve bound regions.
|
||||
let self_ty = *obligation.self_ty().skip_binder();
|
||||
let self_ty = obligation.self_ty().skip_binder();
|
||||
debug!("assemble_candidates_for_trait_alias(self_ty={:?})", self_ty);
|
||||
|
||||
let def_id = obligation.predicate.def_id();
|
||||
|
@ -326,7 +326,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||
// probably flatten the binder from the obligation and the binder
|
||||
// from the object. Have to try to make a broken test case that
|
||||
// results.
|
||||
let self_ty = self.infcx.shallow_resolve(*obligation.self_ty().skip_binder());
|
||||
let self_ty = self.infcx.shallow_resolve(obligation.self_ty().skip_binder());
|
||||
let poly_trait_ref = match self_ty.kind {
|
||||
ty::Dynamic(ref data, ..) => data
|
||||
.principal()
|
||||
@ -379,7 +379,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||
debug!("confirm_fn_pointer_candidate({:?})", obligation);
|
||||
|
||||
// Okay to skip binder; it is reintroduced below.
|
||||
let self_ty = self.infcx.shallow_resolve(*obligation.self_ty().skip_binder());
|
||||
let self_ty = self.infcx.shallow_resolve(obligation.self_ty().skip_binder());
|
||||
let sig = self_ty.fn_sig(self.tcx());
|
||||
let trait_ref = closure_trait_ref_and_return_type(
|
||||
self.tcx(),
|
||||
@ -448,7 +448,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||
// Okay to skip binder because the substs on generator types never
|
||||
// touch bound regions, they just capture the in-scope
|
||||
// type/region parameters.
|
||||
let self_ty = self.infcx.shallow_resolve(*obligation.self_ty().skip_binder());
|
||||
let self_ty = self.infcx.shallow_resolve(obligation.self_ty().skip_binder());
|
||||
let (generator_def_id, substs) = match self_ty.kind {
|
||||
ty::Generator(id, substs, _) => (id, substs),
|
||||
_ => bug!("closure candidate for non-closure {:?}", obligation),
|
||||
@ -497,7 +497,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||
// Okay to skip binder because the substs on closure types never
|
||||
// touch bound regions, they just capture the in-scope
|
||||
// type/region parameters.
|
||||
let self_ty = self.infcx.shallow_resolve(*obligation.self_ty().skip_binder());
|
||||
let self_ty = self.infcx.shallow_resolve(obligation.self_ty().skip_binder());
|
||||
let (closure_def_id, substs) = match self_ty.kind {
|
||||
ty::Closure(id, substs) => (id, substs),
|
||||
_ => bug!("closure candidate for non-closure {:?}", obligation),
|
||||
|
@ -748,8 +748,8 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||
&& stack.iter().skip(1).any(|prev| {
|
||||
stack.obligation.param_env == prev.obligation.param_env
|
||||
&& self.match_fresh_trait_refs(
|
||||
&stack.fresh_trait_ref,
|
||||
&prev.fresh_trait_ref,
|
||||
stack.fresh_trait_ref,
|
||||
prev.fresh_trait_ref,
|
||||
prev.obligation.param_env,
|
||||
)
|
||||
})
|
||||
@ -1944,8 +1944,8 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||
|
||||
fn match_fresh_trait_refs(
|
||||
&self,
|
||||
previous: &ty::PolyTraitRef<'tcx>,
|
||||
current: &ty::PolyTraitRef<'tcx>,
|
||||
previous: ty::PolyTraitRef<'tcx>,
|
||||
current: ty::PolyTraitRef<'tcx>,
|
||||
param_env: ty::ParamEnv<'tcx>,
|
||||
) -> bool {
|
||||
let mut matcher = ty::_match::Match::new(self.tcx(), param_env);
|
||||
|
@ -695,7 +695,7 @@ pub fn object_region_bounds<'tcx>(
|
||||
let open_ty = tcx.mk_ty_infer(ty::FreshTy(0));
|
||||
|
||||
let predicates = existential_predicates.iter().filter_map(|predicate| {
|
||||
if let ty::ExistentialPredicate::Projection(_) = *predicate.skip_binder() {
|
||||
if let ty::ExistentialPredicate::Projection(_) = predicate.skip_binder() {
|
||||
None
|
||||
} else {
|
||||
Some(predicate.with_self_ty(tcx, open_ty))
|
||||
|
@ -615,7 +615,7 @@ crate fn collect_bound_vars<'a, 'tcx, T: TypeFoldable<'tcx>>(
|
||||
ty: &'a Binder<T>,
|
||||
) -> (T, chalk_ir::VariableKinds<RustInterner<'tcx>>, BTreeMap<DefId, u32>) {
|
||||
let mut bound_vars_collector = BoundVarsCollector::new();
|
||||
ty.skip_binder().visit_with(&mut bound_vars_collector);
|
||||
ty.as_ref().skip_binder().visit_with(&mut bound_vars_collector);
|
||||
let mut parameters = bound_vars_collector.parameters;
|
||||
let named_parameters: BTreeMap<DefId, u32> = bound_vars_collector
|
||||
.named_parameters
|
||||
@ -625,7 +625,7 @@ crate fn collect_bound_vars<'a, 'tcx, T: TypeFoldable<'tcx>>(
|
||||
.collect();
|
||||
|
||||
let mut bound_var_substitutor = NamedBoundVarSubstitutor::new(tcx, &named_parameters);
|
||||
let new_ty = ty.skip_binder().fold_with(&mut bound_var_substitutor);
|
||||
let new_ty = ty.as_ref().skip_binder().fold_with(&mut bound_var_substitutor);
|
||||
|
||||
for var in named_parameters.values() {
|
||||
parameters.insert(*var, chalk_ir::VariableKind::Lifetime);
|
||||
|
@ -1802,7 +1802,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
|
||||
|
||||
// Calling `skip_binder` is okay because the predicates are re-bound.
|
||||
let regular_trait_predicates = existential_trait_refs
|
||||
.map(|trait_ref| ty::ExistentialPredicate::Trait(*trait_ref.skip_binder()));
|
||||
.map(|trait_ref| ty::ExistentialPredicate::Trait(trait_ref.skip_binder()));
|
||||
let auto_trait_predicates = auto_traits
|
||||
.into_iter()
|
||||
.map(|trait_ref| ty::ExistentialPredicate::AutoTrait(trait_ref.trait_ref().def_id()));
|
||||
@ -1810,7 +1810,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
|
||||
.chain(auto_trait_predicates)
|
||||
.chain(
|
||||
existential_projections
|
||||
.map(|x| ty::ExistentialPredicate::Projection(*x.skip_binder())),
|
||||
.map(|x| ty::ExistentialPredicate::Projection(x.skip_binder())),
|
||||
)
|
||||
.collect::<SmallVec<[_; 8]>>();
|
||||
v.sort_by(|a, b| a.stable_cmp(tcx, b));
|
||||
|
@ -188,7 +188,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
}
|
||||
ty::Infer(ty::TyVar(vid)) => self.deduce_expectations_from_obligations(vid),
|
||||
ty::FnPtr(sig) => {
|
||||
let expected_sig = ExpectedSig { cause_span: None, sig: *sig.skip_binder() };
|
||||
let expected_sig = ExpectedSig { cause_span: None, sig: sig.skip_binder() };
|
||||
(Some(expected_sig), Some(ty::ClosureKind::Fn))
|
||||
}
|
||||
_ => (None, None),
|
||||
@ -501,7 +501,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
for ((hir_ty, &supplied_ty), expected_ty) in decl
|
||||
.inputs
|
||||
.iter()
|
||||
.zip(*supplied_sig.inputs().skip_binder()) // binder moved to (*) below
|
||||
.zip(supplied_sig.inputs().skip_binder()) // binder moved to (*) below
|
||||
.zip(expected_sigs.liberated_sig.inputs())
|
||||
// `liberated_sig` is E'.
|
||||
{
|
||||
|
@ -964,7 +964,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
let sig = self
|
||||
.at(cause, self.param_env)
|
||||
.trace(prev_ty, new_ty)
|
||||
.lub(&a_sig, &b_sig)
|
||||
.lub(a_sig, b_sig)
|
||||
.map(|ok| self.register_infer_ok_obligations(ok))?;
|
||||
|
||||
// Reify both sides and return the reified fn pointer type.
|
||||
|
@ -502,7 +502,7 @@ fn compare_self_type<'tcx>(
|
||||
ty::ImplContainer(_) => impl_trait_ref.self_ty(),
|
||||
ty::TraitContainer(_) => tcx.types.self_param,
|
||||
};
|
||||
let self_arg_ty = *tcx.fn_sig(method.def_id).input(0).skip_binder();
|
||||
let self_arg_ty = tcx.fn_sig(method.def_id).input(0).skip_binder();
|
||||
let param_env = ty::ParamEnv::reveal_all();
|
||||
|
||||
tcx.infer_ctxt().enter(|infcx| {
|
||||
|
@ -227,10 +227,10 @@ fn ensure_drop_predicates_are_implied_by_item_defn<'tcx>(
|
||||
let predicate_matches_closure = |p: Predicate<'tcx>| {
|
||||
let mut relator: SimpleEqRelation<'tcx> = SimpleEqRelation::new(tcx, self_param_env);
|
||||
match (predicate.kind(), p.kind()) {
|
||||
(ty::PredicateKind::Trait(a, _), ty::PredicateKind::Trait(b, _)) => {
|
||||
(&ty::PredicateKind::Trait(a, _), &ty::PredicateKind::Trait(b, _)) => {
|
||||
relator.relate(a, b).is_ok()
|
||||
}
|
||||
(ty::PredicateKind::Projection(a), ty::PredicateKind::Projection(b)) => {
|
||||
(&ty::PredicateKind::Projection(a), &ty::PredicateKind::Projection(b)) => {
|
||||
relator.relate(a, b).is_ok()
|
||||
}
|
||||
_ => predicate == p,
|
||||
@ -310,8 +310,8 @@ impl TypeRelation<'tcx> for SimpleEqRelation<'tcx> {
|
||||
fn relate_with_variance<T: Relate<'tcx>>(
|
||||
&mut self,
|
||||
_: ty::Variance,
|
||||
a: &T,
|
||||
b: &T,
|
||||
a: T,
|
||||
b: T,
|
||||
) -> RelateResult<'tcx, T> {
|
||||
// Here we ignore variance because we require drop impl's types
|
||||
// to be *exactly* the same as to the ones in the struct definition.
|
||||
@ -354,8 +354,8 @@ impl TypeRelation<'tcx> for SimpleEqRelation<'tcx> {
|
||||
|
||||
fn binders<T>(
|
||||
&mut self,
|
||||
a: &ty::Binder<T>,
|
||||
b: &ty::Binder<T>,
|
||||
a: ty::Binder<T>,
|
||||
b: ty::Binder<T>,
|
||||
) -> RelateResult<'tcx, ty::Binder<T>>
|
||||
where
|
||||
T: Relate<'tcx>,
|
||||
@ -364,8 +364,8 @@ impl TypeRelation<'tcx> for SimpleEqRelation<'tcx> {
|
||||
|
||||
// Anonymizing the LBRs is necessary to solve (Issue #59497).
|
||||
// After we do so, it should be totally fine to skip the binders.
|
||||
let anon_a = self.tcx.anonymize_late_bound_regions(a);
|
||||
let anon_b = self.tcx.anonymize_late_bound_regions(b);
|
||||
let anon_a = self.tcx.anonymize_late_bound_regions(&a);
|
||||
let anon_b = self.tcx.anonymize_late_bound_regions(&b);
|
||||
self.relate(anon_a.skip_binder(), anon_b.skip_binder())?;
|
||||
|
||||
Ok(a.clone())
|
||||
|
@ -608,7 +608,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
ty::Adt(def, _) => bound_spans.push((def_span(def.did), msg)),
|
||||
// Point at the trait object that couldn't satisfy the bound.
|
||||
ty::Dynamic(preds, _) => {
|
||||
for pred in *preds.skip_binder() {
|
||||
for pred in preds.skip_binder() {
|
||||
match pred {
|
||||
ty::ExistentialPredicate::Trait(tr) => {
|
||||
bound_spans.push((def_span(tr.def_id), msg.clone()))
|
||||
|
@ -2446,7 +2446,7 @@ fn bounds_from_generic_predicates(
|
||||
/// Return placeholder code for the given function.
|
||||
fn fn_sig_suggestion(
|
||||
tcx: TyCtxt<'_>,
|
||||
sig: &ty::FnSig<'_>,
|
||||
sig: ty::FnSig<'_>,
|
||||
ident: Ident,
|
||||
predicates: ty::GenericPredicates<'_>,
|
||||
assoc: &ty::AssocItem,
|
||||
|
@ -500,7 +500,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
return false;
|
||||
}
|
||||
// We're emitting a suggestion, so we can just ignore regions
|
||||
let fn_sig = *self.tcx.fn_sig(def_id).skip_binder();
|
||||
let fn_sig = self.tcx.fn_sig(def_id).skip_binder();
|
||||
|
||||
let other_ty = if let FnDef(def_id, _) = other_ty.kind {
|
||||
if !self.tcx.has_typeck_tables(def_id) {
|
||||
|
@ -216,7 +216,7 @@ fn check_object_overlap<'tcx>(
|
||||
let component_def_ids = data.iter().flat_map(|predicate| {
|
||||
match predicate.skip_binder() {
|
||||
ty::ExistentialPredicate::Trait(tr) => Some(tr.def_id),
|
||||
ty::ExistentialPredicate::AutoTrait(def_id) => Some(*def_id),
|
||||
ty::ExistentialPredicate::AutoTrait(def_id) => Some(def_id),
|
||||
// An associated type projection necessarily comes with
|
||||
// an additional `Trait` requirement.
|
||||
ty::ExistentialPredicate::Projection(..) => None,
|
||||
|
@ -2102,11 +2102,11 @@ fn compute_sig_of_foreign_fn_decl<'tcx>(
|
||||
.emit();
|
||||
}
|
||||
};
|
||||
for (input, ty) in decl.inputs.iter().zip(*fty.inputs().skip_binder()) {
|
||||
for (input, ty) in decl.inputs.iter().zip(fty.inputs().skip_binder()) {
|
||||
check(&input, ty)
|
||||
}
|
||||
if let hir::FnRetTy::Return(ref ty) = decl.output {
|
||||
check(&ty, *fty.output().skip_binder())
|
||||
check(&ty, fty.output().skip_binder())
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -347,7 +347,7 @@ impl Clean<GenericBound> for (ty::PolyTraitRef<'_>, &[TypeBinding]) {
|
||||
|
||||
GenericBound::TraitBound(
|
||||
PolyTrait {
|
||||
trait_: (*poly_trait_ref.skip_binder(), bounds).clean(cx),
|
||||
trait_: (poly_trait_ref.skip_binder(), bounds).clean(cx),
|
||||
generic_params: late_bound_regions,
|
||||
},
|
||||
hir::TraitBoundModifier::None,
|
||||
@ -549,7 +549,7 @@ impl<'tcx> Clean<Option<WherePredicate>> for ty::PolyOutlivesPredicate<Ty<'tcx>,
|
||||
|
||||
impl<'tcx> Clean<WherePredicate> for ty::PolyProjectionPredicate<'tcx> {
|
||||
fn clean(&self, cx: &DocContext<'_>) -> WherePredicate {
|
||||
let ty::ProjectionPredicate { projection_ty, ty } = *self.skip_binder();
|
||||
let ty::ProjectionPredicate { projection_ty, ty } = self.skip_binder();
|
||||
WherePredicate::EqPredicate { lhs: projection_ty.clean(cx), rhs: ty.clean(cx) }
|
||||
}
|
||||
}
|
||||
@ -1177,7 +1177,7 @@ impl Clean<Item> for ty::AssocItem {
|
||||
ty::ImplContainer(def_id) => cx.tcx.type_of(def_id),
|
||||
ty::TraitContainer(_) => cx.tcx.types.self_param,
|
||||
};
|
||||
let self_arg_ty = *sig.input(0).skip_binder();
|
||||
let self_arg_ty = sig.input(0).skip_binder();
|
||||
if self_arg_ty == self_ty {
|
||||
decl.inputs.values[0].type_ = Generic(String::from("Self"));
|
||||
} else if let ty::Ref(_, ty, _) = self_arg_ty.kind {
|
||||
@ -1679,7 +1679,7 @@ impl<'tcx> Clean<Type> for Ty<'tcx> {
|
||||
if let ty::PredicateKind::Projection(proj) = pred.kind() {
|
||||
let proj = proj.skip_binder();
|
||||
if proj.projection_ty.trait_ref(cx.tcx)
|
||||
== *trait_ref.skip_binder()
|
||||
== trait_ref.skip_binder()
|
||||
{
|
||||
Some(TypeBinding {
|
||||
name: cx
|
||||
|
@ -1281,11 +1281,84 @@ mod self_upper_keyword {}
|
||||
|
||||
#[doc(keyword = "static")]
|
||||
//
|
||||
/// A place that is valid for the duration of a program.
|
||||
/// A static item is a value which is valid for the entire duration of your
|
||||
/// program (a `'static` lifetime).
|
||||
///
|
||||
/// The documentation for this keyword is [not yet complete]. Pull requests welcome!
|
||||
/// On the surface, `static` items seem very similar to [`const`]s: both contain
|
||||
/// a value, both require type annotations and both can only be initialized with
|
||||
/// constant functions and values. However, `static`s are notably different in
|
||||
/// that they represent a location in memory. That means that you can have
|
||||
/// references to `static` items and potentially even modify them, making them
|
||||
/// essentially global variables.
|
||||
///
|
||||
/// [not yet complete]: https://github.com/rust-lang/rust/issues/34601
|
||||
/// Static items do not call [`drop`] at the end of the program.
|
||||
///
|
||||
/// There are two types of `static` items: those declared in association with
|
||||
/// the [`mut`] keyword and those without.
|
||||
///
|
||||
/// Static items cannot be moved:
|
||||
///
|
||||
/// ```rust,compile_fail,E0507
|
||||
/// static VEC: Vec<u32> = vec![];
|
||||
///
|
||||
/// fn move_vec(v: Vec<u32>) -> Vec<u32> {
|
||||
/// v
|
||||
/// }
|
||||
///
|
||||
/// // This line causes an error
|
||||
/// move_vec(VEC);
|
||||
/// ```
|
||||
///
|
||||
/// # Simple `static`s
|
||||
///
|
||||
/// Accessing non-[`mut`] `static` items is considered safe, but some
|
||||
/// restrictions apply. Most notably, the type of a `static` value needs to
|
||||
/// implement the [`Sync`] trait, ruling out interior mutability containers
|
||||
/// like [`RefCell`]. See the [Reference] for more information.
|
||||
///
|
||||
/// ```rust
|
||||
/// static FOO: [i32; 5] = [1, 2, 3, 4, 5];
|
||||
///
|
||||
/// let r1 = &FOO as *const _;
|
||||
/// let r2 = &FOO as *const _;
|
||||
/// // With a strictly read-only static, references will have the same adress
|
||||
/// assert_eq!(r1, r2);
|
||||
/// // A static item can be used just like a variable in many cases
|
||||
/// println!("{:?}", FOO);
|
||||
/// ```
|
||||
///
|
||||
/// # Mutable `static`s
|
||||
///
|
||||
/// If a `static` item is declared with the [`mut`] keyword, then it is allowed
|
||||
/// to be modified by the program. However, accessing mutable `static`s can
|
||||
/// cause undefined behavior in a number of ways, for example due to data races
|
||||
/// in a multithreaded context. As such, all accesses to mutable `static`s
|
||||
/// require an [`unsafe`] block.
|
||||
///
|
||||
/// Despite their unsafety, mutable `static`s are necessary in many contexts:
|
||||
/// they can be used to represent global state shared by the whole program or in
|
||||
/// [`extern`] blocks to bind to variables from C libraries.
|
||||
///
|
||||
/// In an [`extern`] block:
|
||||
///
|
||||
/// ```rust,no_run
|
||||
/// # #![allow(dead_code)]
|
||||
/// extern "C" {
|
||||
/// static mut ERROR_MESSAGE: *mut std::os::raw::c_char;
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// Mutable `static`s, just like simple `static`s, have some restrictions that
|
||||
/// apply to them. See the [Reference] for more information.
|
||||
///
|
||||
/// [`const`]: keyword.const.html
|
||||
/// [`extern`]: keyword.extern.html
|
||||
/// [`mut`]: keyword.mut.html
|
||||
/// [`unsafe`]: keyword.unsafe.html
|
||||
/// [`drop`]: mem/fn.drop.html
|
||||
/// [`Sync`]: marker/trait.Sync.html
|
||||
/// [`RefCell`]: cell/struct.RefCell.html
|
||||
/// [Reference]: ../reference/items/static-items.html
|
||||
mod static_keyword {}
|
||||
|
||||
#[doc(keyword = "struct")]
|
||||
@ -1463,9 +1536,44 @@ mod true_keyword {}
|
||||
//
|
||||
/// Define an alias for an existing type.
|
||||
///
|
||||
/// The documentation for this keyword is [not yet complete]. Pull requests welcome!
|
||||
/// The syntax is `type Name = ExistingType;`.
|
||||
///
|
||||
/// [not yet complete]: https://github.com/rust-lang/rust/issues/34601
|
||||
/// # Examples
|
||||
///
|
||||
/// `type` does **not** create a new type:
|
||||
///
|
||||
/// ```rust
|
||||
/// type Meters = u32;
|
||||
/// type Kilograms = u32;
|
||||
///
|
||||
/// let m: Meters = 3;
|
||||
/// let k: Kilograms = 3;
|
||||
///
|
||||
/// assert_eq!(m, k);
|
||||
/// ```
|
||||
///
|
||||
/// In traits, `type` is used to declare an [associated type]:
|
||||
///
|
||||
/// ```rust
|
||||
/// trait Iterator {
|
||||
/// // associated type declaration
|
||||
/// type Item;
|
||||
/// fn next(&mut self) -> Option<Self::Item>;
|
||||
/// }
|
||||
///
|
||||
/// struct Once<T>(Option<T>);
|
||||
///
|
||||
/// impl<T> Iterator for Once<T> {
|
||||
/// // associated type definition
|
||||
/// type Item = T;
|
||||
/// fn next(&mut self) -> Option<Self::Item> {
|
||||
/// self.0.take()
|
||||
/// }
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// [`trait`]: keyword.trait.html
|
||||
/// [associated type]: ../reference/items/associated-items.html#associated-types
|
||||
mod type_keyword {}
|
||||
|
||||
#[doc(keyword = "unsafe")]
|
||||
|
@ -316,7 +316,7 @@
|
||||
#![feature(toowned_clone_into)]
|
||||
#![feature(total_cmp)]
|
||||
#![feature(trace_macros)]
|
||||
#![feature(track_caller)]
|
||||
#![cfg_attr(bootstrap, feature(track_caller))]
|
||||
#![feature(try_reserve)]
|
||||
#![feature(unboxed_closures)]
|
||||
#![feature(untagged_unions)]
|
||||
|
@ -10,163 +10,132 @@ use crate::str::FromStr;
|
||||
|
||||
struct Parser<'a> {
|
||||
// parsing as ASCII, so can use byte array
|
||||
s: &'a [u8],
|
||||
pos: usize,
|
||||
state: &'a [u8],
|
||||
}
|
||||
|
||||
impl<'a> Parser<'a> {
|
||||
fn new(s: &'a str) -> Parser<'a> {
|
||||
Parser { s: s.as_bytes(), pos: 0 }
|
||||
fn new(input: &'a str) -> Parser<'a> {
|
||||
Parser { state: input.as_bytes() }
|
||||
}
|
||||
|
||||
fn is_eof(&self) -> bool {
|
||||
self.pos == self.s.len()
|
||||
self.state.is_empty()
|
||||
}
|
||||
|
||||
// Commit only if parser returns Some
|
||||
fn read_atomically<T, F>(&mut self, cb: F) -> Option<T>
|
||||
/// Run a parser, and restore the pre-parse state if it fails
|
||||
fn read_atomically<T, F>(&mut self, inner: F) -> Option<T>
|
||||
where
|
||||
F: FnOnce(&mut Parser<'_>) -> Option<T>,
|
||||
{
|
||||
let pos = self.pos;
|
||||
let r = cb(self);
|
||||
if r.is_none() {
|
||||
self.pos = pos;
|
||||
let state = self.state;
|
||||
let result = inner(self);
|
||||
if result.is_none() {
|
||||
self.state = state;
|
||||
}
|
||||
r
|
||||
result
|
||||
}
|
||||
|
||||
// Commit only if parser read till EOF
|
||||
fn read_till_eof<T, F>(&mut self, cb: F) -> Option<T>
|
||||
/// Run a parser, but fail if the entire input wasn't consumed.
|
||||
/// Doesn't run atomically.
|
||||
fn read_till_eof<T, F>(&mut self, inner: F) -> Option<T>
|
||||
where
|
||||
F: FnOnce(&mut Parser<'_>) -> Option<T>,
|
||||
{
|
||||
self.read_atomically(move |p| cb(p).filter(|_| p.is_eof()))
|
||||
inner(self).filter(|_| self.is_eof())
|
||||
}
|
||||
|
||||
// Apply 3 parsers sequentially
|
||||
fn read_seq_3<A, B, C, PA, PB, PC>(&mut self, pa: PA, pb: PB, pc: PC) -> Option<(A, B, C)>
|
||||
/// Same as read_till_eof, but returns a Result<AddrParseError> on failure
|
||||
fn parse_with<T, F>(&mut self, inner: F) -> Result<T, AddrParseError>
|
||||
where
|
||||
PA: FnOnce(&mut Parser<'_>) -> Option<A>,
|
||||
PB: FnOnce(&mut Parser<'_>) -> Option<B>,
|
||||
PC: FnOnce(&mut Parser<'_>) -> Option<C>,
|
||||
F: FnOnce(&mut Parser<'_>) -> Option<T>,
|
||||
{
|
||||
self.read_till_eof(inner).ok_or(AddrParseError(()))
|
||||
}
|
||||
|
||||
/// Read the next character from the input
|
||||
fn read_char(&mut self) -> Option<char> {
|
||||
self.state.split_first().map(|(&b, tail)| {
|
||||
self.state = tail;
|
||||
b as char
|
||||
})
|
||||
}
|
||||
|
||||
/// Read the next character from the input if it matches the target
|
||||
fn read_given_char(&mut self, target: char) -> Option<char> {
|
||||
self.read_atomically(|p| p.read_char().filter(|&c| c == target))
|
||||
}
|
||||
|
||||
/// Helper for reading separators in an indexed loop. Reads the separator
|
||||
/// character iff index > 0, then runs the parser. When used in a loop,
|
||||
/// the separator character will only be read on index > 0 (see
|
||||
/// read_ipv4_addr for an example)
|
||||
fn read_separator<T, F>(&mut self, sep: char, index: usize, inner: F) -> Option<T>
|
||||
where
|
||||
F: FnOnce(&mut Parser<'_>) -> Option<T>,
|
||||
{
|
||||
self.read_atomically(move |p| {
|
||||
let a = pa(p);
|
||||
let b = if a.is_some() { pb(p) } else { None };
|
||||
let c = if b.is_some() { pc(p) } else { None };
|
||||
match (a, b, c) {
|
||||
(Some(a), Some(b), Some(c)) => Some((a, b, c)),
|
||||
_ => None,
|
||||
if index > 0 {
|
||||
let _ = p.read_given_char(sep)?;
|
||||
}
|
||||
inner(p)
|
||||
})
|
||||
}
|
||||
|
||||
// Read next char
|
||||
fn read_char(&mut self) -> Option<char> {
|
||||
if self.is_eof() {
|
||||
None
|
||||
} else {
|
||||
let r = self.s[self.pos] as char;
|
||||
self.pos += 1;
|
||||
Some(r)
|
||||
}
|
||||
// Read a single digit in the given radix. For instance, 0-9 in radix 10;
|
||||
// 0-9A-F in radix 16.
|
||||
fn read_digit(&mut self, radix: u32) -> Option<u32> {
|
||||
self.read_atomically(move |p| p.read_char()?.to_digit(radix))
|
||||
}
|
||||
|
||||
// Return char and advance iff next char is equal to requested
|
||||
fn read_given_char(&mut self, c: char) -> Option<char> {
|
||||
self.read_atomically(|p| match p.read_char() {
|
||||
Some(next) if next == c => Some(next),
|
||||
_ => None,
|
||||
// Read a number off the front of the input in the given radix, stopping
|
||||
// at the first non-digit character or eof. Fails if the number has more
|
||||
// digits than max_digits, or the value is >= upto, or if there is no number.
|
||||
fn read_number(&mut self, radix: u32, max_digits: u32, upto: u32) -> Option<u32> {
|
||||
self.read_atomically(move |p| {
|
||||
let mut result = 0;
|
||||
let mut digit_count = 0;
|
||||
|
||||
while let Some(digit) = p.read_digit(radix) {
|
||||
result = (result * radix) + digit;
|
||||
digit_count += 1;
|
||||
if digit_count > max_digits || result >= upto {
|
||||
return None;
|
||||
}
|
||||
}
|
||||
|
||||
if digit_count == 0 { None } else { Some(result) }
|
||||
})
|
||||
}
|
||||
|
||||
// Read digit
|
||||
fn read_digit(&mut self, radix: u8) -> Option<u8> {
|
||||
fn parse_digit(c: char, radix: u8) -> Option<u8> {
|
||||
let c = c as u8;
|
||||
// assuming radix is either 10 or 16
|
||||
if c >= b'0' && c <= b'9' {
|
||||
Some(c - b'0')
|
||||
} else if radix > 10 && c >= b'a' && c < b'a' + (radix - 10) {
|
||||
Some(c - b'a' + 10)
|
||||
} else if radix > 10 && c >= b'A' && c < b'A' + (radix - 10) {
|
||||
Some(c - b'A' + 10)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
self.read_atomically(|p| p.read_char().and_then(|c| parse_digit(c, radix)))
|
||||
}
|
||||
|
||||
fn read_number_impl(&mut self, radix: u8, max_digits: u32, upto: u32) -> Option<u32> {
|
||||
let mut r = 0;
|
||||
let mut digit_count = 0;
|
||||
loop {
|
||||
match self.read_digit(radix) {
|
||||
Some(d) => {
|
||||
r = r * (radix as u32) + (d as u32);
|
||||
digit_count += 1;
|
||||
if digit_count > max_digits || r >= upto {
|
||||
return None;
|
||||
}
|
||||
}
|
||||
None => {
|
||||
if digit_count == 0 {
|
||||
return None;
|
||||
} else {
|
||||
return Some(r);
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// Read number, failing if max_digits of number value exceeded
|
||||
fn read_number(&mut self, radix: u8, max_digits: u32, upto: u32) -> Option<u32> {
|
||||
self.read_atomically(|p| p.read_number_impl(radix, max_digits, upto))
|
||||
}
|
||||
|
||||
fn read_ipv4_addr_impl(&mut self) -> Option<Ipv4Addr> {
|
||||
let mut bs = [0; 4];
|
||||
let mut i = 0;
|
||||
while i < 4 {
|
||||
if i != 0 && self.read_given_char('.').is_none() {
|
||||
return None;
|
||||
}
|
||||
|
||||
bs[i] = self.read_number(10, 3, 0x100).map(|n| n as u8)?;
|
||||
i += 1;
|
||||
}
|
||||
Some(Ipv4Addr::new(bs[0], bs[1], bs[2], bs[3]))
|
||||
}
|
||||
|
||||
// Read IPv4 address
|
||||
/// Read an IPv4 address
|
||||
fn read_ipv4_addr(&mut self) -> Option<Ipv4Addr> {
|
||||
self.read_atomically(|p| p.read_ipv4_addr_impl())
|
||||
self.read_atomically(|p| {
|
||||
let mut groups = [0; 4];
|
||||
|
||||
for (i, slot) in groups.iter_mut().enumerate() {
|
||||
*slot = p.read_separator('.', i, |p| p.read_number(10, 3, 0x100))? as u8;
|
||||
}
|
||||
|
||||
Some(groups.into())
|
||||
})
|
||||
}
|
||||
|
||||
fn read_ipv6_addr_impl(&mut self) -> Option<Ipv6Addr> {
|
||||
fn ipv6_addr_from_head_tail(head: &[u16], tail: &[u16]) -> Ipv6Addr {
|
||||
assert!(head.len() + tail.len() <= 8);
|
||||
let mut gs = [0; 8];
|
||||
gs[..head.len()].copy_from_slice(head);
|
||||
gs[(8 - tail.len())..8].copy_from_slice(tail);
|
||||
Ipv6Addr::new(gs[0], gs[1], gs[2], gs[3], gs[4], gs[5], gs[6], gs[7])
|
||||
}
|
||||
/// Read an IPV6 Address
|
||||
fn read_ipv6_addr(&mut self) -> Option<Ipv6Addr> {
|
||||
/// Read a chunk of an ipv6 address into `groups`. Returns the number
|
||||
/// of groups read, along with a bool indicating if an embedded
|
||||
/// trailing ipv4 address was read. Specifically, read a series of
|
||||
/// colon-separated ipv6 groups (0x0000 - 0xFFFF), with an optional
|
||||
/// trailing embedded ipv4 address.
|
||||
fn read_groups(p: &mut Parser<'_>, groups: &mut [u16]) -> (usize, bool) {
|
||||
let limit = groups.len();
|
||||
|
||||
fn read_groups(p: &mut Parser<'_>, groups: &mut [u16; 8], limit: usize) -> (usize, bool) {
|
||||
let mut i = 0;
|
||||
while i < limit {
|
||||
for (i, slot) in groups.iter_mut().enumerate() {
|
||||
// Try to read a trailing embedded ipv4 address. There must be
|
||||
// at least two groups left.
|
||||
if i < limit - 1 {
|
||||
let ipv4 = p.read_atomically(|p| {
|
||||
if i == 0 || p.read_given_char(':').is_some() {
|
||||
p.read_ipv4_addr()
|
||||
} else {
|
||||
None
|
||||
}
|
||||
});
|
||||
let ipv4 = p.read_separator(':', i, |p| p.read_ipv4_addr());
|
||||
|
||||
if let Some(v4_addr) = ipv4 {
|
||||
let octets = v4_addr.octets();
|
||||
groups[i + 0] = ((octets[0] as u16) << 8) | (octets[1] as u16);
|
||||
@ -175,83 +144,85 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
let group = p.read_atomically(|p| {
|
||||
if i == 0 || p.read_given_char(':').is_some() {
|
||||
p.read_number(16, 4, 0x10000).map(|n| n as u16)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
});
|
||||
let group = p.read_separator(':', i, |p| p.read_number(16, 4, 0x10000));
|
||||
|
||||
match group {
|
||||
Some(g) => groups[i] = g,
|
||||
Some(g) => *slot = g as u16,
|
||||
None => return (i, false),
|
||||
}
|
||||
i += 1;
|
||||
}
|
||||
(i, false)
|
||||
(groups.len(), false)
|
||||
}
|
||||
|
||||
let mut head = [0; 8];
|
||||
let (head_size, head_ipv4) = read_groups(self, &mut head, 8);
|
||||
self.read_atomically(|p| {
|
||||
// Read the front part of the address; either the whole thing, or up
|
||||
// to the first ::
|
||||
let mut head = [0; 8];
|
||||
let (head_size, head_ipv4) = read_groups(p, &mut head);
|
||||
|
||||
if head_size == 8 {
|
||||
return Some(Ipv6Addr::new(
|
||||
head[0], head[1], head[2], head[3], head[4], head[5], head[6], head[7],
|
||||
));
|
||||
}
|
||||
if head_size == 8 {
|
||||
return Some(head.into());
|
||||
}
|
||||
|
||||
// IPv4 part is not allowed before `::`
|
||||
if head_ipv4 {
|
||||
return None;
|
||||
}
|
||||
// IPv4 part is not allowed before `::`
|
||||
if head_ipv4 {
|
||||
return None;
|
||||
}
|
||||
|
||||
// read `::` if previous code parsed less than 8 groups
|
||||
if self.read_given_char(':').is_none() || self.read_given_char(':').is_none() {
|
||||
return None;
|
||||
}
|
||||
// read `::` if previous code parsed less than 8 groups
|
||||
// `::` indicates one or more groups of 16 bits of zeros
|
||||
let _ = p.read_given_char(':')?;
|
||||
let _ = p.read_given_char(':')?;
|
||||
|
||||
let mut tail = [0; 8];
|
||||
// `::` indicates one or more groups of 16 bits of zeros
|
||||
let limit = 8 - (head_size + 1);
|
||||
let (tail_size, _) = read_groups(self, &mut tail, limit);
|
||||
Some(ipv6_addr_from_head_tail(&head[..head_size], &tail[..tail_size]))
|
||||
}
|
||||
|
||||
fn read_ipv6_addr(&mut self) -> Option<Ipv6Addr> {
|
||||
self.read_atomically(|p| p.read_ipv6_addr_impl())
|
||||
// Read the back part of the address. The :: must contain at least one
|
||||
// set of zeroes, so our max length is 7.
|
||||
let mut tail = [0; 7];
|
||||
let limit = 8 - (head_size + 1);
|
||||
let (tail_size, _) = read_groups(p, &mut tail[..limit]);
|
||||
|
||||
// Concat the head and tail of the IP address
|
||||
head[(8 - tail_size)..8].copy_from_slice(&tail[..tail_size]);
|
||||
|
||||
Some(head.into())
|
||||
})
|
||||
}
|
||||
|
||||
/// Read an IP Address, either IPV4 or IPV6.
|
||||
fn read_ip_addr(&mut self) -> Option<IpAddr> {
|
||||
self.read_ipv4_addr().map(IpAddr::V4).or_else(|| self.read_ipv6_addr().map(IpAddr::V6))
|
||||
self.read_ipv4_addr().map(IpAddr::V4).or_else(move || self.read_ipv6_addr().map(IpAddr::V6))
|
||||
}
|
||||
|
||||
/// Read a : followed by a port in base 10
|
||||
fn read_port(&mut self) -> Option<u16> {
|
||||
self.read_atomically(|p| {
|
||||
let _ = p.read_given_char(':')?;
|
||||
let port = p.read_number(10, 5, 0x10000)?;
|
||||
Some(port as u16)
|
||||
})
|
||||
}
|
||||
|
||||
/// Read an IPV4 address with a port
|
||||
fn read_socket_addr_v4(&mut self) -> Option<SocketAddrV4> {
|
||||
let ip_addr = |p: &mut Parser<'_>| p.read_ipv4_addr();
|
||||
let colon = |p: &mut Parser<'_>| p.read_given_char(':');
|
||||
let port = |p: &mut Parser<'_>| p.read_number(10, 5, 0x10000).map(|n| n as u16);
|
||||
|
||||
self.read_seq_3(ip_addr, colon, port).map(|t| {
|
||||
let (ip, _, port): (Ipv4Addr, char, u16) = t;
|
||||
SocketAddrV4::new(ip, port)
|
||||
self.read_atomically(|p| {
|
||||
let ip = p.read_ipv4_addr()?;
|
||||
let port = p.read_port()?;
|
||||
Some(SocketAddrV4::new(ip, port))
|
||||
})
|
||||
}
|
||||
|
||||
/// Read an IPV6 address with a port
|
||||
fn read_socket_addr_v6(&mut self) -> Option<SocketAddrV6> {
|
||||
let ip_addr = |p: &mut Parser<'_>| {
|
||||
let open_br = |p: &mut Parser<'_>| p.read_given_char('[');
|
||||
let ip_addr = |p: &mut Parser<'_>| p.read_ipv6_addr();
|
||||
let clos_br = |p: &mut Parser<'_>| p.read_given_char(']');
|
||||
p.read_seq_3(open_br, ip_addr, clos_br).map(|t| t.1)
|
||||
};
|
||||
let colon = |p: &mut Parser<'_>| p.read_given_char(':');
|
||||
let port = |p: &mut Parser<'_>| p.read_number(10, 5, 0x10000).map(|n| n as u16);
|
||||
self.read_atomically(|p| {
|
||||
let _ = p.read_given_char('[')?;
|
||||
let ip = p.read_ipv6_addr()?;
|
||||
let _ = p.read_given_char(']')?;
|
||||
|
||||
self.read_seq_3(ip_addr, colon, port).map(|t| {
|
||||
let (ip, _, port): (Ipv6Addr, char, u16) = t;
|
||||
SocketAddrV6::new(ip, port, 0, 0)
|
||||
let port = p.read_port()?;
|
||||
Some(SocketAddrV6::new(ip, port, 0, 0))
|
||||
})
|
||||
}
|
||||
|
||||
/// Read an IP address with a port
|
||||
fn read_socket_addr(&mut self) -> Option<SocketAddr> {
|
||||
self.read_socket_addr_v4()
|
||||
.map(SocketAddr::V4)
|
||||
@ -263,10 +234,7 @@ impl<'a> Parser<'a> {
|
||||
impl FromStr for IpAddr {
|
||||
type Err = AddrParseError;
|
||||
fn from_str(s: &str) -> Result<IpAddr, AddrParseError> {
|
||||
match Parser::new(s).read_till_eof(|p| p.read_ip_addr()) {
|
||||
Some(s) => Ok(s),
|
||||
None => Err(AddrParseError(())),
|
||||
}
|
||||
Parser::new(s).parse_with(|p| p.read_ip_addr())
|
||||
}
|
||||
}
|
||||
|
||||
@ -274,10 +242,7 @@ impl FromStr for IpAddr {
|
||||
impl FromStr for Ipv4Addr {
|
||||
type Err = AddrParseError;
|
||||
fn from_str(s: &str) -> Result<Ipv4Addr, AddrParseError> {
|
||||
match Parser::new(s).read_till_eof(|p| p.read_ipv4_addr()) {
|
||||
Some(s) => Ok(s),
|
||||
None => Err(AddrParseError(())),
|
||||
}
|
||||
Parser::new(s).parse_with(|p| p.read_ipv4_addr())
|
||||
}
|
||||
}
|
||||
|
||||
@ -285,10 +250,7 @@ impl FromStr for Ipv4Addr {
|
||||
impl FromStr for Ipv6Addr {
|
||||
type Err = AddrParseError;
|
||||
fn from_str(s: &str) -> Result<Ipv6Addr, AddrParseError> {
|
||||
match Parser::new(s).read_till_eof(|p| p.read_ipv6_addr()) {
|
||||
Some(s) => Ok(s),
|
||||
None => Err(AddrParseError(())),
|
||||
}
|
||||
Parser::new(s).parse_with(|p| p.read_ipv6_addr())
|
||||
}
|
||||
}
|
||||
|
||||
@ -296,10 +258,7 @@ impl FromStr for Ipv6Addr {
|
||||
impl FromStr for SocketAddrV4 {
|
||||
type Err = AddrParseError;
|
||||
fn from_str(s: &str) -> Result<SocketAddrV4, AddrParseError> {
|
||||
match Parser::new(s).read_till_eof(|p| p.read_socket_addr_v4()) {
|
||||
Some(s) => Ok(s),
|
||||
None => Err(AddrParseError(())),
|
||||
}
|
||||
Parser::new(s).parse_with(|p| p.read_socket_addr_v4())
|
||||
}
|
||||
}
|
||||
|
||||
@ -307,10 +266,7 @@ impl FromStr for SocketAddrV4 {
|
||||
impl FromStr for SocketAddrV6 {
|
||||
type Err = AddrParseError;
|
||||
fn from_str(s: &str) -> Result<SocketAddrV6, AddrParseError> {
|
||||
match Parser::new(s).read_till_eof(|p| p.read_socket_addr_v6()) {
|
||||
Some(s) => Ok(s),
|
||||
None => Err(AddrParseError(())),
|
||||
}
|
||||
Parser::new(s).parse_with(|p| p.read_socket_addr_v6())
|
||||
}
|
||||
}
|
||||
|
||||
@ -318,10 +274,7 @@ impl FromStr for SocketAddrV6 {
|
||||
impl FromStr for SocketAddr {
|
||||
type Err = AddrParseError;
|
||||
fn from_str(s: &str) -> Result<SocketAddr, AddrParseError> {
|
||||
match Parser::new(s).read_till_eof(|p| p.read_socket_addr()) {
|
||||
Some(s) => Ok(s),
|
||||
None => Err(AddrParseError(())),
|
||||
}
|
||||
Parser::new(s).parse_with(|p| p.read_socket_addr())
|
||||
}
|
||||
}
|
||||
|
||||
@ -376,3 +329,146 @@ impl Error for AddrParseError {
|
||||
"invalid IP address syntax"
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
// FIXME: These tests are all excellent candidates for AFL fuzz testing
|
||||
use crate::net::{IpAddr, Ipv4Addr, Ipv6Addr, SocketAddr, SocketAddrV4, SocketAddrV6};
|
||||
use crate::str::FromStr;
|
||||
|
||||
const PORT: u16 = 8080;
|
||||
|
||||
const IPV4: Ipv4Addr = Ipv4Addr::new(192, 168, 0, 1);
|
||||
const IPV4_STR: &str = "192.168.0.1";
|
||||
const IPV4_STR_PORT: &str = "192.168.0.1:8080";
|
||||
|
||||
const IPV6: Ipv6Addr = Ipv6Addr::new(0x2001, 0xdb8, 0, 0, 0, 0, 0xc0a8, 0x1);
|
||||
const IPV6_STR_FULL: &str = "2001:db8:0:0:0:0:c0a8:1";
|
||||
const IPV6_STR_COMPRESS: &str = "2001:db8::c0a8:1";
|
||||
const IPV6_STR_V4: &str = "2001:db8::192.168.0.1";
|
||||
const IPV6_STR_PORT: &str = "[2001:db8::c0a8:1]:8080";
|
||||
|
||||
#[test]
|
||||
fn parse_ipv4() {
|
||||
let result: Ipv4Addr = IPV4_STR.parse().unwrap();
|
||||
assert_eq!(result, IPV4);
|
||||
|
||||
assert!(Ipv4Addr::from_str(IPV4_STR_PORT).is_err());
|
||||
assert!(Ipv4Addr::from_str(IPV6_STR_FULL).is_err());
|
||||
assert!(Ipv4Addr::from_str(IPV6_STR_COMPRESS).is_err());
|
||||
assert!(Ipv4Addr::from_str(IPV6_STR_V4).is_err());
|
||||
assert!(Ipv4Addr::from_str(IPV6_STR_PORT).is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_ipv6() {
|
||||
let result: Ipv6Addr = IPV6_STR_FULL.parse().unwrap();
|
||||
assert_eq!(result, IPV6);
|
||||
|
||||
let result: Ipv6Addr = IPV6_STR_COMPRESS.parse().unwrap();
|
||||
assert_eq!(result, IPV6);
|
||||
|
||||
let result: Ipv6Addr = IPV6_STR_V4.parse().unwrap();
|
||||
assert_eq!(result, IPV6);
|
||||
|
||||
assert!(Ipv6Addr::from_str(IPV4_STR).is_err());
|
||||
assert!(Ipv6Addr::from_str(IPV4_STR_PORT).is_err());
|
||||
assert!(Ipv6Addr::from_str(IPV6_STR_PORT).is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_ip() {
|
||||
let result: IpAddr = IPV4_STR.parse().unwrap();
|
||||
assert_eq!(result, IpAddr::from(IPV4));
|
||||
|
||||
let result: IpAddr = IPV6_STR_FULL.parse().unwrap();
|
||||
assert_eq!(result, IpAddr::from(IPV6));
|
||||
|
||||
let result: IpAddr = IPV6_STR_COMPRESS.parse().unwrap();
|
||||
assert_eq!(result, IpAddr::from(IPV6));
|
||||
|
||||
let result: IpAddr = IPV6_STR_V4.parse().unwrap();
|
||||
assert_eq!(result, IpAddr::from(IPV6));
|
||||
|
||||
assert!(IpAddr::from_str(IPV4_STR_PORT).is_err());
|
||||
assert!(IpAddr::from_str(IPV6_STR_PORT).is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_socket_v4() {
|
||||
let result: SocketAddrV4 = IPV4_STR_PORT.parse().unwrap();
|
||||
assert_eq!(result, SocketAddrV4::new(IPV4, PORT));
|
||||
|
||||
assert!(SocketAddrV4::from_str(IPV4_STR).is_err());
|
||||
assert!(SocketAddrV4::from_str(IPV6_STR_FULL).is_err());
|
||||
assert!(SocketAddrV4::from_str(IPV6_STR_COMPRESS).is_err());
|
||||
assert!(SocketAddrV4::from_str(IPV6_STR_V4).is_err());
|
||||
assert!(SocketAddrV4::from_str(IPV6_STR_PORT).is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_socket_v6() {
|
||||
let result: SocketAddrV6 = IPV6_STR_PORT.parse().unwrap();
|
||||
assert_eq!(result, SocketAddrV6::new(IPV6, PORT, 0, 0));
|
||||
|
||||
assert!(SocketAddrV6::from_str(IPV4_STR).is_err());
|
||||
assert!(SocketAddrV6::from_str(IPV4_STR_PORT).is_err());
|
||||
assert!(SocketAddrV6::from_str(IPV6_STR_FULL).is_err());
|
||||
assert!(SocketAddrV6::from_str(IPV6_STR_COMPRESS).is_err());
|
||||
assert!(SocketAddrV6::from_str(IPV6_STR_V4).is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_socket() {
|
||||
let result: SocketAddr = IPV4_STR_PORT.parse().unwrap();
|
||||
assert_eq!(result, SocketAddr::from((IPV4, PORT)));
|
||||
|
||||
let result: SocketAddr = IPV6_STR_PORT.parse().unwrap();
|
||||
assert_eq!(result, SocketAddr::from((IPV6, PORT)));
|
||||
|
||||
assert!(SocketAddr::from_str(IPV4_STR).is_err());
|
||||
assert!(SocketAddr::from_str(IPV6_STR_FULL).is_err());
|
||||
assert!(SocketAddr::from_str(IPV6_STR_COMPRESS).is_err());
|
||||
assert!(SocketAddr::from_str(IPV6_STR_V4).is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ipv6_corner_cases() {
|
||||
let result: Ipv6Addr = "1::".parse().unwrap();
|
||||
assert_eq!(result, Ipv6Addr::new(1, 0, 0, 0, 0, 0, 0, 0));
|
||||
|
||||
let result: Ipv6Addr = "1:1::".parse().unwrap();
|
||||
assert_eq!(result, Ipv6Addr::new(1, 1, 0, 0, 0, 0, 0, 0));
|
||||
|
||||
let result: Ipv6Addr = "::1".parse().unwrap();
|
||||
assert_eq!(result, Ipv6Addr::new(0, 0, 0, 0, 0, 0, 0, 1));
|
||||
|
||||
let result: Ipv6Addr = "::1:1".parse().unwrap();
|
||||
assert_eq!(result, Ipv6Addr::new(0, 0, 0, 0, 0, 0, 1, 1));
|
||||
|
||||
let result: Ipv6Addr = "::".parse().unwrap();
|
||||
assert_eq!(result, Ipv6Addr::new(0, 0, 0, 0, 0, 0, 0, 0));
|
||||
|
||||
let result: Ipv6Addr = "::192.168.0.1".parse().unwrap();
|
||||
assert_eq!(result, Ipv6Addr::new(0, 0, 0, 0, 0, 0, 0xc0a8, 0x1));
|
||||
|
||||
let result: Ipv6Addr = "::1:192.168.0.1".parse().unwrap();
|
||||
assert_eq!(result, Ipv6Addr::new(0, 0, 0, 0, 0, 1, 0xc0a8, 0x1));
|
||||
|
||||
let result: Ipv6Addr = "1:1:1:1:1:1:192.168.0.1".parse().unwrap();
|
||||
assert_eq!(result, Ipv6Addr::new(1, 1, 1, 1, 1, 1, 0xc0a8, 0x1));
|
||||
}
|
||||
|
||||
// Things that might not seem like failures but are
|
||||
#[test]
|
||||
fn ipv6_corner_failures() {
|
||||
// No IP address before the ::
|
||||
assert!(Ipv6Addr::from_str("1:192.168.0.1::").is_err());
|
||||
|
||||
// :: must have at least 1 set of zeroes
|
||||
assert!(Ipv6Addr::from_str("1:1:1:1::1:1:1:1").is_err());
|
||||
|
||||
// Need brackets for a port
|
||||
assert!(SocketAddrV6::from_str("1:1:1:1:1:1:1:1:8080").is_err());
|
||||
}
|
||||
}
|
||||
|
@ -61,6 +61,7 @@ pub fn decode_error_kind(errno: i32) -> ErrorKind {
|
||||
c::ERROR_FILE_NOT_FOUND => return ErrorKind::NotFound,
|
||||
c::ERROR_PATH_NOT_FOUND => return ErrorKind::NotFound,
|
||||
c::ERROR_NO_DATA => return ErrorKind::BrokenPipe,
|
||||
c::ERROR_INVALID_PARAMETER => return ErrorKind::InvalidInput,
|
||||
c::ERROR_SEM_TIMEOUT
|
||||
| c::WAIT_TIMEOUT
|
||||
| c::ERROR_DRIVER_CANCEL_TIMEOUT
|
||||
|
@ -9,7 +9,7 @@ help: if this is a `self` type, give it a parameter name
|
||||
|
|
||||
LL | fn foo(self: i32);
|
||||
| ^^^^^^^^^
|
||||
help: if this was a parameter name, give it a type
|
||||
help: if this is a parameter name, give it a type
|
||||
|
|
||||
LL | fn foo(i32: TypeName);
|
||||
| ^^^^^^^^^^^^^
|
||||
@ -29,7 +29,7 @@ help: if this is a `self` type, give it a parameter name
|
||||
|
|
||||
LL | fn bar_with_default_impl(self: String, String) {}
|
||||
| ^^^^^^^^^^^^
|
||||
help: if this was a parameter name, give it a type
|
||||
help: if this is a parameter name, give it a type
|
||||
|
|
||||
LL | fn bar_with_default_impl(String: TypeName, String) {}
|
||||
| ^^^^^^^^^^^^^^^^
|
||||
@ -45,7 +45,7 @@ LL | fn bar_with_default_impl(String, String) {}
|
||||
| ^ expected one of `:`, `@`, or `|`
|
||||
|
|
||||
= note: anonymous parameters are removed in the 2018 edition (see RFC 1685)
|
||||
help: if this was a parameter name, give it a type
|
||||
help: if this is a parameter name, give it a type
|
||||
|
|
||||
LL | fn bar_with_default_impl(String, String: TypeName) {}
|
||||
| ^^^^^^^^^^^^^^^^
|
||||
@ -61,7 +61,7 @@ LL | fn baz(a:usize, b, c: usize) -> usize {
|
||||
| ^ expected one of `:`, `@`, or `|`
|
||||
|
|
||||
= note: anonymous parameters are removed in the 2018 edition (see RFC 1685)
|
||||
help: if this was a parameter name, give it a type
|
||||
help: if this is a parameter name, give it a type
|
||||
|
|
||||
LL | fn baz(a:usize, b: TypeName, c: usize) -> usize {
|
||||
| ^^^^^^^^^^^
|
||||
|
@ -3,7 +3,7 @@
|
||||
// only-linux
|
||||
// run-pass
|
||||
|
||||
#![feature(asm, track_caller, thread_local)]
|
||||
#![feature(asm, thread_local)]
|
||||
|
||||
extern "C" fn f1() -> i32 {
|
||||
111
|
||||
|
16
src/test/ui/async-await/issue-67765-async-diagnostic.rs
Normal file
16
src/test/ui/async-await/issue-67765-async-diagnostic.rs
Normal file
@ -0,0 +1,16 @@
|
||||
// edition:2018
|
||||
//
|
||||
// Regression test for issue #67765
|
||||
// Tests that we point at the proper location when giving
|
||||
// a lifetime error.
|
||||
fn main() {}
|
||||
|
||||
async fn func<'a>() -> Result<(), &'a str> {
|
||||
let s = String::new();
|
||||
|
||||
let b = &s[..];
|
||||
|
||||
Err(b)?; //~ ERROR cannot return value referencing local variable `s`
|
||||
|
||||
Ok(())
|
||||
}
|
12
src/test/ui/async-await/issue-67765-async-diagnostic.stderr
Normal file
12
src/test/ui/async-await/issue-67765-async-diagnostic.stderr
Normal file
@ -0,0 +1,12 @@
|
||||
error[E0515]: cannot return value referencing local variable `s`
|
||||
--> $DIR/issue-67765-async-diagnostic.rs:13:11
|
||||
|
|
||||
LL | let b = &s[..];
|
||||
| - `s` is borrowed here
|
||||
LL |
|
||||
LL | Err(b)?;
|
||||
| ^ returns a value referencing data owned by the current function
|
||||
|
||||
error: aborting due to previous error
|
||||
|
||||
For more information about this error, try `rustc --explain E0515`.
|
@ -1,5 +0,0 @@
|
||||
#[track_caller]
|
||||
fn f() {}
|
||||
//~^^ ERROR the `#[track_caller]` attribute is an experimental feature
|
||||
|
||||
fn main() {}
|
@ -1,12 +0,0 @@
|
||||
error[E0658]: the `#[track_caller]` attribute is an experimental feature
|
||||
--> $DIR/feature-gate-track_caller.rs:1:1
|
||||
|
|
||||
LL | #[track_caller]
|
||||
| ^^^^^^^^^^^^^^^
|
||||
|
|
||||
= note: see issue #47809 <https://github.com/rust-lang/rust/issues/47809> for more information
|
||||
= help: add `#![feature(track_caller)]` to the crate attributes to enable
|
||||
|
||||
error: aborting due to previous error
|
||||
|
||||
For more information about this error, try `rustc --explain E0658`.
|
@ -24,3 +24,4 @@ LL | fn bar(&self, x: fn(&'a u32)) {}
|
||||
|
||||
error: aborting due to 4 previous errors
|
||||
|
||||
For more information about this error, try `rustc --explain E0687`.
|
||||
|
@ -12,3 +12,4 @@ LL | fn baz(x: &impl Fn(&'a u32)) {}
|
||||
|
||||
error: aborting due to 2 previous errors
|
||||
|
||||
For more information about this error, try `rustc --explain E0687`.
|
||||
|
@ -1,3 +1,3 @@
|
||||
fn main() {
|
||||
[0].iter().flat_map(|a| [0].iter().map(|_| &a)); //~ ERROR `a` does not live long enough
|
||||
[0].iter().flat_map(|a| [0].iter().map(|_| &a)); //~ ERROR closure may outlive
|
||||
}
|
||||
|
@ -1,15 +1,21 @@
|
||||
error[E0597]: `a` does not live long enough
|
||||
--> $DIR/unnamed-closure-doesnt-life-long-enough-issue-67634.rs:2:49
|
||||
error[E0373]: closure may outlive the current function, but it borrows `a`, which is owned by the current function
|
||||
--> $DIR/unnamed-closure-doesnt-life-long-enough-issue-67634.rs:2:44
|
||||
|
|
||||
LL | [0].iter().flat_map(|a| [0].iter().map(|_| &a));
|
||||
| - ^- ...but `a` will be dropped here, when the enclosing closure returns
|
||||
| | |
|
||||
| | `a` would have to be valid for `'_`...
|
||||
| has type `&i32`
|
||||
| ^^^ - `a` is borrowed here
|
||||
| |
|
||||
| may outlive borrowed value `a`
|
||||
|
|
||||
= note: functions cannot return a borrow to data owned within the function's scope, functions can only return borrows to data passed as arguments
|
||||
= note: to learn more, visit <https://doc.rust-lang.org/book/ch04-02-references-and-borrowing.html#dangling-references>
|
||||
note: closure is returned here
|
||||
--> $DIR/unnamed-closure-doesnt-life-long-enough-issue-67634.rs:2:29
|
||||
|
|
||||
LL | [0].iter().flat_map(|a| [0].iter().map(|_| &a));
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^
|
||||
help: to force the closure to take ownership of `a` (and any other referenced variables), use the `move` keyword
|
||||
|
|
||||
LL | [0].iter().flat_map(|a| [0].iter().map(move |_| &a));
|
||||
| ^^^^^^^^
|
||||
|
||||
error: aborting due to previous error
|
||||
|
||||
For more information about this error, try `rustc --explain E0597`.
|
||||
For more information about this error, try `rustc --explain E0373`.
|
||||
|
@ -1,5 +1,3 @@
|
||||
#![feature(track_caller)]
|
||||
|
||||
fn main() {
|
||||
(0..)
|
||||
.map(
|
||||
|
@ -1,5 +1,5 @@
|
||||
error[E0658]: `#[target_feature(..)]` can only be applied to `unsafe` functions
|
||||
--> $DIR/issue-68060.rs:6:13
|
||||
--> $DIR/issue-68060.rs:4:13
|
||||
|
|
||||
LL | #[target_feature(enable = "")]
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
@ -11,13 +11,13 @@ LL | |_| (),
|
||||
= help: add `#![feature(target_feature_11)]` to the crate attributes to enable
|
||||
|
||||
error: the feature named `` is not valid for this target
|
||||
--> $DIR/issue-68060.rs:6:30
|
||||
--> $DIR/issue-68060.rs:4:30
|
||||
|
|
||||
LL | #[target_feature(enable = "")]
|
||||
| ^^^^^^^^^^^ `` is not valid for this target
|
||||
|
||||
error[E0737]: `#[track_caller]` requires Rust ABI
|
||||
--> $DIR/issue-68060.rs:9:13
|
||||
--> $DIR/issue-68060.rs:7:13
|
||||
|
|
||||
LL | #[track_caller]
|
||||
| ^^^^^^^^^^^^^^^
|
||||
|
@ -9,7 +9,6 @@
|
||||
// merged.
|
||||
|
||||
#![feature(test, stmt_expr_attributes)]
|
||||
#![feature(track_caller)]
|
||||
#![deny(overflowing_literals)]
|
||||
extern crate test;
|
||||
|
||||
|
@ -20,7 +20,7 @@ fn pattern((i32, i32) (a, b)) {}
|
||||
|
||||
fn fizz(i32) {}
|
||||
//~^ ERROR expected one of `:`, `@`
|
||||
//~| HELP if this was a parameter name, give it a type
|
||||
//~| HELP if this is a parameter name, give it a type
|
||||
//~| HELP if this is a `self` type, give it a parameter name
|
||||
//~| HELP if this is a type, explicitly ignore the parameter name
|
||||
|
||||
|
@ -39,7 +39,7 @@ help: if this is a `self` type, give it a parameter name
|
||||
|
|
||||
LL | fn fizz(self: i32) {}
|
||||
| ^^^^^^^^^
|
||||
help: if this was a parameter name, give it a type
|
||||
help: if this is a parameter name, give it a type
|
||||
|
|
||||
LL | fn fizz(i32: TypeName) {}
|
||||
| ^^^^^^^^^^^^^
|
||||
|
@ -9,7 +9,7 @@ help: if this is a `self` type, give it a parameter name
|
||||
|
|
||||
LL | fn foo(self: x) {
|
||||
| ^^^^^^^
|
||||
help: if this was a parameter name, give it a type
|
||||
help: if this is a parameter name, give it a type
|
||||
|
|
||||
LL | fn foo(x: TypeName) {
|
||||
| ^^^^^^^^^^^
|
||||
|
7
src/test/ui/return-disjoint-regions.rs
Normal file
7
src/test/ui/return-disjoint-regions.rs
Normal file
@ -0,0 +1,7 @@
|
||||
// See https://github.com/rust-lang/rust/pull/67911#issuecomment-576023915
|
||||
fn f<'a, 'b>(x: i32) -> (&'a i32, &'b i32) {
|
||||
let y = &x;
|
||||
(y, y) //~ ERROR cannot return
|
||||
}
|
||||
|
||||
fn main() {}
|
11
src/test/ui/return-disjoint-regions.stderr
Normal file
11
src/test/ui/return-disjoint-regions.stderr
Normal file
@ -0,0 +1,11 @@
|
||||
error[E0515]: cannot return value referencing function parameter `x`
|
||||
--> $DIR/return-disjoint-regions.rs:4:5
|
||||
|
|
||||
LL | let y = &x;
|
||||
| -- `x` is borrowed here
|
||||
LL | (y, y)
|
||||
| ^^^^^^ returns a value referencing data owned by the current function
|
||||
|
||||
error: aborting due to previous error
|
||||
|
||||
For more information about this error, try `rustc --explain E0515`.
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user