mirror of
https://github.com/rust-lang/rust.git
synced 2025-02-18 01:44:04 +00:00
Auto merge of #88265 - m-ou-se:rollup-soymv20, r=m-ou-se
Rollup of 6 pull requests Successful merges: - #87976 (Account for tabs when highlighting multiline code suggestions) - #88174 (Clarify some wording in Rust 2021 lint docs) - #88188 (Greatly improve limitation handling on parallel rustdoc GUI test run) - #88230 (Fix typos “a”→“an”) - #88232 (Add notes to macro-not-found diagnostics to point out how things with the same name were not a match.) - #88259 (Do not mark `-Z thir-unsafeck` as unsound anymore) Failed merges: r? `@ghost` `@rustbot` modify labels: rollup
This commit is contained in:
commit
a49e38e672
@ -495,7 +495,7 @@ impl Token {
|
|||||||
self.lifetime().is_some()
|
self.lifetime().is_some()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns `true` if the token is a identifier whose name is the given
|
/// Returns `true` if the token is an identifier whose name is the given
|
||||||
/// string slice.
|
/// string slice.
|
||||||
pub fn is_ident_named(&self, name: Symbol) -> bool {
|
pub fn is_ident_named(&self, name: Symbol) -> bool {
|
||||||
self.ident().map_or(false, |(ident, _)| ident.name == name)
|
self.ident().map_or(false, |(ident, _)| ident.name == name)
|
||||||
|
@ -1265,7 +1265,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||||||
ty,
|
ty,
|
||||||
);
|
);
|
||||||
|
|
||||||
// Construct a AnonConst where the expr is the "ty"'s path.
|
// Construct an AnonConst where the expr is the "ty"'s path.
|
||||||
|
|
||||||
let parent_def_id = self.current_hir_id_owner.0;
|
let parent_def_id = self.current_hir_id_owner.0;
|
||||||
let node_id = self.resolver.next_node_id();
|
let node_id = self.resolver.next_node_id();
|
||||||
@ -2690,7 +2690,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Report an error on illegal use of `'_` or a `&T` with no explicit lifetime;
|
/// Report an error on illegal use of `'_` or a `&T` with no explicit lifetime;
|
||||||
/// return a "error lifetime".
|
/// return an "error lifetime".
|
||||||
fn new_error_lifetime(&mut self, id: Option<NodeId>, span: Span) -> hir::Lifetime {
|
fn new_error_lifetime(&mut self, id: Option<NodeId>, span: Span) -> hir::Lifetime {
|
||||||
let (id, msg, label) = match id {
|
let (id, msg, label) = match id {
|
||||||
Some(id) => (id, "`'_` cannot be used here", "`'_` is a reserved lifetime name"),
|
Some(id) => (id, "`'_` cannot be used here", "`'_` is a reserved lifetime name"),
|
||||||
|
@ -1700,7 +1700,7 @@ where
|
|||||||
/// One or more fields: call the base case function on the first value (which depends on
|
/// One or more fields: call the base case function on the first value (which depends on
|
||||||
/// `use_fold`), and use that as the base case. Then perform `cs_fold` on the remainder of the
|
/// `use_fold`), and use that as the base case. Then perform `cs_fold` on the remainder of the
|
||||||
/// fields.
|
/// fields.
|
||||||
/// When the `substructure` is a `EnumNonMatchingCollapsed`, the result of `enum_nonmatch_f`
|
/// When the `substructure` is an `EnumNonMatchingCollapsed`, the result of `enum_nonmatch_f`
|
||||||
/// is returned. Statics may not be folded over.
|
/// is returned. Statics may not be folded over.
|
||||||
/// See `cs_op` in `partial_ord.rs` for a model example.
|
/// See `cs_op` in `partial_ord.rs` for a model example.
|
||||||
pub fn cs_fold1<F, B>(
|
pub fn cs_fold1<F, B>(
|
||||||
|
@ -558,7 +558,7 @@ impl BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
|
|||||||
|
|
||||||
fn range_metadata(&mut self, load: &'ll Value, range: Range<u128>) {
|
fn range_metadata(&mut self, load: &'ll Value, range: Range<u128>) {
|
||||||
if self.sess().target.arch == "amdgpu" {
|
if self.sess().target.arch == "amdgpu" {
|
||||||
// amdgpu/LLVM does something weird and thinks a i64 value is
|
// amdgpu/LLVM does something weird and thinks an i64 value is
|
||||||
// split into a v2i32, halving the bitwidth LLVM expects,
|
// split into a v2i32, halving the bitwidth LLVM expects,
|
||||||
// tripping an assertion. So, for now, just disable this
|
// tripping an assertion. So, for now, just disable this
|
||||||
// optimization.
|
// optimization.
|
||||||
|
@ -137,9 +137,9 @@ impl IntrinsicCallMethods<'tcx> for Builder<'a, 'll, 'tcx> {
|
|||||||
match scalar.value {
|
match scalar.value {
|
||||||
Primitive::Int(..) => {
|
Primitive::Int(..) => {
|
||||||
if self.cx().size_of(ret_ty).bytes() < 4 {
|
if self.cx().size_of(ret_ty).bytes() < 4 {
|
||||||
// `va_arg` should not be called on a integer type
|
// `va_arg` should not be called on an integer type
|
||||||
// less than 4 bytes in length. If it is, promote
|
// less than 4 bytes in length. If it is, promote
|
||||||
// the integer to a `i32` and truncate the result
|
// the integer to an `i32` and truncate the result
|
||||||
// back to the smaller type.
|
// back to the smaller type.
|
||||||
let promoted_result = emit_va_arg(self, args[0], tcx.types.i32);
|
let promoted_result = emit_va_arg(self, args[0], tcx.types.i32);
|
||||||
self.trunc(promoted_result, llret_ty)
|
self.trunc(promoted_result, llret_ty)
|
||||||
@ -1031,7 +1031,7 @@ fn generic_simd_intrinsic(
|
|||||||
// vector mask and returns an unsigned integer containing the most
|
// vector mask and returns an unsigned integer containing the most
|
||||||
// significant bit (MSB) of each lane.
|
// significant bit (MSB) of each lane.
|
||||||
|
|
||||||
// If the vector has less than 8 lanes, an u8 is returned with zeroed
|
// If the vector has less than 8 lanes, a u8 is returned with zeroed
|
||||||
// trailing bits.
|
// trailing bits.
|
||||||
let expected_int_bits = in_len.max(8);
|
let expected_int_bits = in_len.max(8);
|
||||||
match ret_ty.kind() {
|
match ret_ty.kind() {
|
||||||
|
@ -901,7 +901,7 @@ fn cast_float_to_int<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
|
|||||||
//
|
//
|
||||||
// Performance note: Unordered comparison can be lowered to a "flipped"
|
// Performance note: Unordered comparison can be lowered to a "flipped"
|
||||||
// comparison and a negation, and the negation can be merged into the
|
// comparison and a negation, and the negation can be merged into the
|
||||||
// select. Therefore, it not necessarily any more expensive than a
|
// select. Therefore, it not necessarily any more expensive than an
|
||||||
// ordered ("normal") comparison. Whether these optimizations will be
|
// ordered ("normal") comparison. Whether these optimizations will be
|
||||||
// performed is ultimately up to the backend, but at least x86 does
|
// performed is ultimately up to the backend, but at least x86 does
|
||||||
// perform them.
|
// perform them.
|
||||||
|
@ -321,7 +321,7 @@ pub unsafe trait IntoErasedSendSync<'a> {
|
|||||||
/////////////////////////////////////////////////////////////////////////////
|
/////////////////////////////////////////////////////////////////////////////
|
||||||
|
|
||||||
impl<O, T: ?Sized> OwningRef<O, T> {
|
impl<O, T: ?Sized> OwningRef<O, T> {
|
||||||
/// Creates a new owning reference from a owner
|
/// Creates a new owning reference from an owner
|
||||||
/// initialized to the direct dereference of it.
|
/// initialized to the direct dereference of it.
|
||||||
///
|
///
|
||||||
/// # Example
|
/// # Example
|
||||||
@ -368,7 +368,7 @@ impl<O, T: ?Sized> OwningRef<O, T> {
|
|||||||
/// fn main() {
|
/// fn main() {
|
||||||
/// let owning_ref = OwningRef::new(Box::new([1, 2, 3, 4]));
|
/// let owning_ref = OwningRef::new(Box::new([1, 2, 3, 4]));
|
||||||
///
|
///
|
||||||
/// // create a owning reference that points at the
|
/// // create an owning reference that points at the
|
||||||
/// // third element of the array.
|
/// // third element of the array.
|
||||||
/// let owning_ref = owning_ref.map(|array| &array[2]);
|
/// let owning_ref = owning_ref.map(|array| &array[2]);
|
||||||
/// assert_eq!(*owning_ref, 3);
|
/// assert_eq!(*owning_ref, 3);
|
||||||
@ -396,7 +396,7 @@ impl<O, T: ?Sized> OwningRef<O, T> {
|
|||||||
/// fn main() {
|
/// fn main() {
|
||||||
/// let owning_ref = OwningRef::new(Box::new([1, 2, 3, 4]));
|
/// let owning_ref = OwningRef::new(Box::new([1, 2, 3, 4]));
|
||||||
///
|
///
|
||||||
/// // create a owning reference that points at the
|
/// // create an owning reference that points at the
|
||||||
/// // third element of the array.
|
/// // third element of the array.
|
||||||
/// let owning_ref = owning_ref.try_map(|array| {
|
/// let owning_ref = owning_ref.try_map(|array| {
|
||||||
/// if array[2] == 3 { Ok(&array[2]) } else { Err(()) }
|
/// if array[2] == 3 { Ok(&array[2]) } else { Err(()) }
|
||||||
@ -430,7 +430,7 @@ impl<O, T: ?Sized> OwningRef<O, T> {
|
|||||||
/// in an additional `Box<O>`.
|
/// in an additional `Box<O>`.
|
||||||
///
|
///
|
||||||
/// This can be used to safely erase the owner of any `OwningRef<O, T>`
|
/// This can be used to safely erase the owner of any `OwningRef<O, T>`
|
||||||
/// to a `OwningRef<Box<Erased>, T>`.
|
/// to an `OwningRef<Box<Erased>, T>`.
|
||||||
pub fn map_owner_box(self) -> OwningRef<Box<O>, T> {
|
pub fn map_owner_box(self) -> OwningRef<Box<O>, T> {
|
||||||
OwningRef { reference: self.reference, owner: Box::new(self.owner) }
|
OwningRef { reference: self.reference, owner: Box::new(self.owner) }
|
||||||
}
|
}
|
||||||
@ -511,7 +511,7 @@ impl<O, T: ?Sized> OwningRef<O, T> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl<O, T: ?Sized> OwningRefMut<O, T> {
|
impl<O, T: ?Sized> OwningRefMut<O, T> {
|
||||||
/// Creates a new owning reference from a owner
|
/// Creates a new owning reference from an owner
|
||||||
/// initialized to the direct dereference of it.
|
/// initialized to the direct dereference of it.
|
||||||
///
|
///
|
||||||
/// # Example
|
/// # Example
|
||||||
@ -558,7 +558,7 @@ impl<O, T: ?Sized> OwningRefMut<O, T> {
|
|||||||
/// fn main() {
|
/// fn main() {
|
||||||
/// let owning_ref_mut = OwningRefMut::new(Box::new([1, 2, 3, 4]));
|
/// let owning_ref_mut = OwningRefMut::new(Box::new([1, 2, 3, 4]));
|
||||||
///
|
///
|
||||||
/// // create a owning reference that points at the
|
/// // create an owning reference that points at the
|
||||||
/// // third element of the array.
|
/// // third element of the array.
|
||||||
/// let owning_ref = owning_ref_mut.map(|array| &array[2]);
|
/// let owning_ref = owning_ref_mut.map(|array| &array[2]);
|
||||||
/// assert_eq!(*owning_ref, 3);
|
/// assert_eq!(*owning_ref, 3);
|
||||||
@ -586,7 +586,7 @@ impl<O, T: ?Sized> OwningRefMut<O, T> {
|
|||||||
/// fn main() {
|
/// fn main() {
|
||||||
/// let owning_ref_mut = OwningRefMut::new(Box::new([1, 2, 3, 4]));
|
/// let owning_ref_mut = OwningRefMut::new(Box::new([1, 2, 3, 4]));
|
||||||
///
|
///
|
||||||
/// // create a owning reference that points at the
|
/// // create an owning reference that points at the
|
||||||
/// // third element of the array.
|
/// // third element of the array.
|
||||||
/// let owning_ref_mut = owning_ref_mut.map_mut(|array| &mut array[2]);
|
/// let owning_ref_mut = owning_ref_mut.map_mut(|array| &mut array[2]);
|
||||||
/// assert_eq!(*owning_ref_mut, 3);
|
/// assert_eq!(*owning_ref_mut, 3);
|
||||||
@ -614,7 +614,7 @@ impl<O, T: ?Sized> OwningRefMut<O, T> {
|
|||||||
/// fn main() {
|
/// fn main() {
|
||||||
/// let owning_ref_mut = OwningRefMut::new(Box::new([1, 2, 3, 4]));
|
/// let owning_ref_mut = OwningRefMut::new(Box::new([1, 2, 3, 4]));
|
||||||
///
|
///
|
||||||
/// // create a owning reference that points at the
|
/// // create an owning reference that points at the
|
||||||
/// // third element of the array.
|
/// // third element of the array.
|
||||||
/// let owning_ref = owning_ref_mut.try_map(|array| {
|
/// let owning_ref = owning_ref_mut.try_map(|array| {
|
||||||
/// if array[2] == 3 { Ok(&array[2]) } else { Err(()) }
|
/// if array[2] == 3 { Ok(&array[2]) } else { Err(()) }
|
||||||
@ -644,7 +644,7 @@ impl<O, T: ?Sized> OwningRefMut<O, T> {
|
|||||||
/// fn main() {
|
/// fn main() {
|
||||||
/// let owning_ref_mut = OwningRefMut::new(Box::new([1, 2, 3, 4]));
|
/// let owning_ref_mut = OwningRefMut::new(Box::new([1, 2, 3, 4]));
|
||||||
///
|
///
|
||||||
/// // create a owning reference that points at the
|
/// // create an owning reference that points at the
|
||||||
/// // third element of the array.
|
/// // third element of the array.
|
||||||
/// let owning_ref_mut = owning_ref_mut.try_map_mut(|array| {
|
/// let owning_ref_mut = owning_ref_mut.try_map_mut(|array| {
|
||||||
/// if array[2] == 3 { Ok(&mut array[2]) } else { Err(()) }
|
/// if array[2] == 3 { Ok(&mut array[2]) } else { Err(()) }
|
||||||
@ -678,7 +678,7 @@ impl<O, T: ?Sized> OwningRefMut<O, T> {
|
|||||||
/// in an additional `Box<O>`.
|
/// in an additional `Box<O>`.
|
||||||
///
|
///
|
||||||
/// This can be used to safely erase the owner of any `OwningRefMut<O, T>`
|
/// This can be used to safely erase the owner of any `OwningRefMut<O, T>`
|
||||||
/// to a `OwningRefMut<Box<Erased>, T>`.
|
/// to an `OwningRefMut<Box<Erased>, T>`.
|
||||||
pub fn map_owner_box(self) -> OwningRefMut<Box<O>, T> {
|
pub fn map_owner_box(self) -> OwningRefMut<Box<O>, T> {
|
||||||
OwningRefMut { reference: self.reference, owner: Box::new(self.owner) }
|
OwningRefMut { reference: self.reference, owner: Box::new(self.owner) }
|
||||||
}
|
}
|
||||||
@ -970,7 +970,7 @@ where
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// ^ FIXME: Is a Into impl for calling into_inner() possible as well?
|
// ^ FIXME: Is an Into impl for calling into_inner() possible as well?
|
||||||
|
|
||||||
impl<O, T: ?Sized> Debug for OwningRef<O, T>
|
impl<O, T: ?Sized> Debug for OwningRef<O, T>
|
||||||
where
|
where
|
||||||
@ -1139,27 +1139,27 @@ impl<T: 'static> ToHandleMut for RefCell<T> {
|
|||||||
// about which handle creation to use (i.e., read() vs try_read()) as well as
|
// about which handle creation to use (i.e., read() vs try_read()) as well as
|
||||||
// what to do with error results.
|
// what to do with error results.
|
||||||
|
|
||||||
/// Typedef of a owning reference that uses a `Box` as the owner.
|
/// Typedef of an owning reference that uses a `Box` as the owner.
|
||||||
pub type BoxRef<T, U = T> = OwningRef<Box<T>, U>;
|
pub type BoxRef<T, U = T> = OwningRef<Box<T>, U>;
|
||||||
/// Typedef of a owning reference that uses a `Vec` as the owner.
|
/// Typedef of an owning reference that uses a `Vec` as the owner.
|
||||||
pub type VecRef<T, U = T> = OwningRef<Vec<T>, U>;
|
pub type VecRef<T, U = T> = OwningRef<Vec<T>, U>;
|
||||||
/// Typedef of a owning reference that uses a `String` as the owner.
|
/// Typedef of an owning reference that uses a `String` as the owner.
|
||||||
pub type StringRef = OwningRef<String, str>;
|
pub type StringRef = OwningRef<String, str>;
|
||||||
|
|
||||||
/// Typedef of a owning reference that uses a `Rc` as the owner.
|
/// Typedef of an owning reference that uses a `Rc` as the owner.
|
||||||
pub type RcRef<T, U = T> = OwningRef<Rc<T>, U>;
|
pub type RcRef<T, U = T> = OwningRef<Rc<T>, U>;
|
||||||
/// Typedef of a owning reference that uses a `Arc` as the owner.
|
/// Typedef of an owning reference that uses an `Arc` as the owner.
|
||||||
pub type ArcRef<T, U = T> = OwningRef<Arc<T>, U>;
|
pub type ArcRef<T, U = T> = OwningRef<Arc<T>, U>;
|
||||||
|
|
||||||
/// Typedef of a owning reference that uses a `Ref` as the owner.
|
/// Typedef of an owning reference that uses a `Ref` as the owner.
|
||||||
pub type RefRef<'a, T, U = T> = OwningRef<Ref<'a, T>, U>;
|
pub type RefRef<'a, T, U = T> = OwningRef<Ref<'a, T>, U>;
|
||||||
/// Typedef of a owning reference that uses a `RefMut` as the owner.
|
/// Typedef of an owning reference that uses a `RefMut` as the owner.
|
||||||
pub type RefMutRef<'a, T, U = T> = OwningRef<RefMut<'a, T>, U>;
|
pub type RefMutRef<'a, T, U = T> = OwningRef<RefMut<'a, T>, U>;
|
||||||
/// Typedef of a owning reference that uses a `MutexGuard` as the owner.
|
/// Typedef of an owning reference that uses a `MutexGuard` as the owner.
|
||||||
pub type MutexGuardRef<'a, T, U = T> = OwningRef<MutexGuard<'a, T>, U>;
|
pub type MutexGuardRef<'a, T, U = T> = OwningRef<MutexGuard<'a, T>, U>;
|
||||||
/// Typedef of a owning reference that uses a `RwLockReadGuard` as the owner.
|
/// Typedef of an owning reference that uses a `RwLockReadGuard` as the owner.
|
||||||
pub type RwLockReadGuardRef<'a, T, U = T> = OwningRef<RwLockReadGuard<'a, T>, U>;
|
pub type RwLockReadGuardRef<'a, T, U = T> = OwningRef<RwLockReadGuard<'a, T>, U>;
|
||||||
/// Typedef of a owning reference that uses a `RwLockWriteGuard` as the owner.
|
/// Typedef of an owning reference that uses a `RwLockWriteGuard` as the owner.
|
||||||
pub type RwLockWriteGuardRef<'a, T, U = T> = OwningRef<RwLockWriteGuard<'a, T>, U>;
|
pub type RwLockWriteGuardRef<'a, T, U = T> = OwningRef<RwLockWriteGuard<'a, T>, U>;
|
||||||
|
|
||||||
/// Typedef of a mutable owning reference that uses a `Box` as the owner.
|
/// Typedef of a mutable owning reference that uses a `Box` as the owner.
|
||||||
@ -1219,11 +1219,11 @@ unsafe impl<'a, T: Send + Sync + 'a> IntoErasedSendSync<'a> for Arc<T> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Typedef of a owning reference that uses an erased `Box` as the owner.
|
/// Typedef of an owning reference that uses an erased `Box` as the owner.
|
||||||
pub type ErasedBoxRef<U> = OwningRef<Box<dyn Erased>, U>;
|
pub type ErasedBoxRef<U> = OwningRef<Box<dyn Erased>, U>;
|
||||||
/// Typedef of a owning reference that uses an erased `Rc` as the owner.
|
/// Typedef of an owning reference that uses an erased `Rc` as the owner.
|
||||||
pub type ErasedRcRef<U> = OwningRef<Rc<dyn Erased>, U>;
|
pub type ErasedRcRef<U> = OwningRef<Rc<dyn Erased>, U>;
|
||||||
/// Typedef of a owning reference that uses an erased `Arc` as the owner.
|
/// Typedef of an owning reference that uses an erased `Arc` as the owner.
|
||||||
pub type ErasedArcRef<U> = OwningRef<Arc<dyn Erased>, U>;
|
pub type ErasedArcRef<U> = OwningRef<Arc<dyn Erased>, U>;
|
||||||
|
|
||||||
/// Typedef of a mutable owning reference that uses an erased `Box` as the owner.
|
/// Typedef of a mutable owning reference that uses an erased `Box` as the owner.
|
||||||
|
@ -220,7 +220,7 @@ impl SelfProfilerRef {
|
|||||||
VerboseTimingGuard::start(message, self.generic_activity(event_label))
|
VerboseTimingGuard::start(message, self.generic_activity(event_label))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Start profiling a extra verbose generic activity. Profiling continues until the
|
/// Start profiling an extra verbose generic activity. Profiling continues until the
|
||||||
/// VerboseTimingGuard returned from this call is dropped. In addition to recording
|
/// VerboseTimingGuard returned from this call is dropped. In addition to recording
|
||||||
/// a measureme event, "extra verbose" generic activities also print a timing entry to
|
/// a measureme event, "extra verbose" generic activities also print a timing entry to
|
||||||
/// stdout if the compiler is invoked with -Ztime-passes.
|
/// stdout if the compiler is invoked with -Ztime-passes.
|
||||||
|
@ -14,7 +14,7 @@
|
|||||||
//!
|
//!
|
||||||
//! `MTRef` is an immutable reference if cfg!(parallel_compiler), and a mutable reference otherwise.
|
//! `MTRef` is an immutable reference if cfg!(parallel_compiler), and a mutable reference otherwise.
|
||||||
//!
|
//!
|
||||||
//! `rustc_erase_owner!` erases a OwningRef owner into Erased or Erased + Send + Sync
|
//! `rustc_erase_owner!` erases an OwningRef owner into Erased or Erased + Send + Sync
|
||||||
//! depending on the value of cfg!(parallel_compiler).
|
//! depending on the value of cfg!(parallel_compiler).
|
||||||
|
|
||||||
use crate::owning_ref::{Erased, OwningRef};
|
use crate::owning_ref::{Erased, OwningRef};
|
||||||
|
@ -5,7 +5,7 @@
|
|||||||
//!
|
//!
|
||||||
//! - If you have a list that rarely stores more than one element, then this
|
//! - If you have a list that rarely stores more than one element, then this
|
||||||
//! data-structure can store the element without allocating and only uses as
|
//! data-structure can store the element without allocating and only uses as
|
||||||
//! much space as a `Option<(T, usize)>`. If T can double as the `Option`
|
//! much space as an `Option<(T, usize)>`. If T can double as the `Option`
|
||||||
//! discriminant, it will even only be as large as `T, usize`.
|
//! discriminant, it will even only be as large as `T, usize`.
|
||||||
//!
|
//!
|
||||||
//! If you expect to store more than 1 element in the common case, steer clear
|
//! If you expect to store more than 1 element in the common case, steer clear
|
||||||
|
@ -1623,7 +1623,7 @@ impl EmitterWriter {
|
|||||||
let line_start = sm.lookup_char_pos(parts[0].span.lo()).line;
|
let line_start = sm.lookup_char_pos(parts[0].span.lo()).line;
|
||||||
draw_col_separator_no_space(&mut buffer, 1, max_line_num_len + 1);
|
draw_col_separator_no_space(&mut buffer, 1, max_line_num_len + 1);
|
||||||
let mut lines = complete.lines();
|
let mut lines = complete.lines();
|
||||||
for (line_pos, (line, parts)) in
|
for (line_pos, (line, highlight_parts)) in
|
||||||
lines.by_ref().zip(highlights).take(MAX_SUGGESTION_HIGHLIGHT_LINES).enumerate()
|
lines.by_ref().zip(highlights).take(MAX_SUGGESTION_HIGHLIGHT_LINES).enumerate()
|
||||||
{
|
{
|
||||||
// Print the span column to avoid confusion
|
// Print the span column to avoid confusion
|
||||||
@ -1658,7 +1658,7 @@ impl EmitterWriter {
|
|||||||
);
|
);
|
||||||
buffer.puts(row_num, max_line_num_len + 1, "+ ", Style::Addition);
|
buffer.puts(row_num, max_line_num_len + 1, "+ ", Style::Addition);
|
||||||
} else if is_multiline {
|
} else if is_multiline {
|
||||||
match &parts[..] {
|
match &highlight_parts[..] {
|
||||||
[SubstitutionHighlight { start: 0, end }] if *end == line.len() => {
|
[SubstitutionHighlight { start: 0, end }] if *end == line.len() => {
|
||||||
buffer.puts(row_num, max_line_num_len + 1, "+ ", Style::Addition);
|
buffer.puts(row_num, max_line_num_len + 1, "+ ", Style::Addition);
|
||||||
}
|
}
|
||||||
@ -1676,16 +1676,24 @@ impl EmitterWriter {
|
|||||||
// print the suggestion
|
// print the suggestion
|
||||||
buffer.append(row_num, &replace_tabs(line), Style::NoStyle);
|
buffer.append(row_num, &replace_tabs(line), Style::NoStyle);
|
||||||
|
|
||||||
if is_multiline {
|
// Colorize addition/replacements with green.
|
||||||
for SubstitutionHighlight { start, end } in parts {
|
for &SubstitutionHighlight { start, end } in highlight_parts {
|
||||||
buffer.set_style_range(
|
// Account for tabs when highlighting (#87972).
|
||||||
row_num,
|
let tabs: usize = line
|
||||||
max_line_num_len + 3 + start,
|
.chars()
|
||||||
max_line_num_len + 3 + end,
|
.take(start)
|
||||||
Style::Addition,
|
.map(|ch| match ch {
|
||||||
true,
|
'\t' => 3,
|
||||||
);
|
_ => 0,
|
||||||
}
|
})
|
||||||
|
.sum();
|
||||||
|
buffer.set_style_range(
|
||||||
|
row_num,
|
||||||
|
max_line_num_len + 3 + start + tabs,
|
||||||
|
max_line_num_len + 3 + end + tabs,
|
||||||
|
Style::Addition,
|
||||||
|
true,
|
||||||
|
);
|
||||||
}
|
}
|
||||||
row_num += 1;
|
row_num += 1;
|
||||||
}
|
}
|
||||||
@ -1723,13 +1731,6 @@ impl EmitterWriter {
|
|||||||
assert!(underline_start >= 0 && underline_end >= 0);
|
assert!(underline_start >= 0 && underline_end >= 0);
|
||||||
let padding: usize = max_line_num_len + 3;
|
let padding: usize = max_line_num_len + 3;
|
||||||
for p in underline_start..underline_end {
|
for p in underline_start..underline_end {
|
||||||
// Colorize addition/replacements with green.
|
|
||||||
buffer.set_style(
|
|
||||||
row_num - 1,
|
|
||||||
(padding as isize + p) as usize,
|
|
||||||
Style::Addition,
|
|
||||||
true,
|
|
||||||
);
|
|
||||||
if !show_diff {
|
if !show_diff {
|
||||||
// If this is a replacement, underline with `^`, if this is an addition
|
// If this is a replacement, underline with `^`, if this is an addition
|
||||||
// underline with `+`.
|
// underline with `+`.
|
||||||
|
@ -283,6 +283,9 @@ impl CodeSuggestion {
|
|||||||
let mut buf = String::new();
|
let mut buf = String::new();
|
||||||
|
|
||||||
let mut line_highlight = vec![];
|
let mut line_highlight = vec![];
|
||||||
|
// We need to keep track of the difference between the existing code and the added
|
||||||
|
// or deleted code in order to point at the correct column *after* substitution.
|
||||||
|
let mut acc = 0;
|
||||||
for part in &substitution.parts {
|
for part in &substitution.parts {
|
||||||
let cur_lo = sm.lookup_char_pos(part.span.lo());
|
let cur_lo = sm.lookup_char_pos(part.span.lo());
|
||||||
if prev_hi.line == cur_lo.line {
|
if prev_hi.line == cur_lo.line {
|
||||||
@ -290,9 +293,11 @@ impl CodeSuggestion {
|
|||||||
push_trailing(&mut buf, prev_line.as_ref(), &prev_hi, Some(&cur_lo));
|
push_trailing(&mut buf, prev_line.as_ref(), &prev_hi, Some(&cur_lo));
|
||||||
while count > 0 {
|
while count > 0 {
|
||||||
highlights.push(std::mem::take(&mut line_highlight));
|
highlights.push(std::mem::take(&mut line_highlight));
|
||||||
|
acc = 0;
|
||||||
count -= 1;
|
count -= 1;
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
|
acc = 0;
|
||||||
highlights.push(std::mem::take(&mut line_highlight));
|
highlights.push(std::mem::take(&mut line_highlight));
|
||||||
let mut count = push_trailing(&mut buf, prev_line.as_ref(), &prev_hi, None);
|
let mut count = push_trailing(&mut buf, prev_line.as_ref(), &prev_hi, None);
|
||||||
while count > 0 {
|
while count > 0 {
|
||||||
@ -316,18 +321,43 @@ impl CodeSuggestion {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
// Add a whole line highlight per line in the snippet.
|
// Add a whole line highlight per line in the snippet.
|
||||||
|
let len: isize = part
|
||||||
|
.snippet
|
||||||
|
.split('\n')
|
||||||
|
.next()
|
||||||
|
.unwrap_or(&part.snippet)
|
||||||
|
.chars()
|
||||||
|
.map(|c| match c {
|
||||||
|
'\t' => 4,
|
||||||
|
_ => 1,
|
||||||
|
})
|
||||||
|
.sum();
|
||||||
line_highlight.push(SubstitutionHighlight {
|
line_highlight.push(SubstitutionHighlight {
|
||||||
start: cur_lo.col.0,
|
start: (cur_lo.col.0 as isize + acc) as usize,
|
||||||
end: cur_lo.col.0
|
end: (cur_lo.col.0 as isize + acc + len) as usize,
|
||||||
+ part.snippet.split('\n').next().unwrap_or(&part.snippet).len(),
|
|
||||||
});
|
});
|
||||||
for line in part.snippet.split('\n').skip(1) {
|
|
||||||
highlights.push(std::mem::take(&mut line_highlight));
|
|
||||||
line_highlight.push(SubstitutionHighlight { start: 0, end: line.len() });
|
|
||||||
}
|
|
||||||
buf.push_str(&part.snippet);
|
buf.push_str(&part.snippet);
|
||||||
prev_hi = sm.lookup_char_pos(part.span.hi());
|
let cur_hi = sm.lookup_char_pos(part.span.hi());
|
||||||
|
if prev_hi.line == cur_lo.line {
|
||||||
|
// Account for the difference between the width of the current code and the
|
||||||
|
// snippet being suggested, so that the *later* suggestions are correctly
|
||||||
|
// aligned on the screen.
|
||||||
|
acc += len as isize - (cur_hi.col.0 - cur_lo.col.0) as isize;
|
||||||
|
}
|
||||||
|
prev_hi = cur_hi;
|
||||||
prev_line = sf.get_line(prev_hi.line - 1);
|
prev_line = sf.get_line(prev_hi.line - 1);
|
||||||
|
for line in part.snippet.split('\n').skip(1) {
|
||||||
|
acc = 0;
|
||||||
|
highlights.push(std::mem::take(&mut line_highlight));
|
||||||
|
let end: usize = line
|
||||||
|
.chars()
|
||||||
|
.map(|c| match c {
|
||||||
|
'\t' => 4,
|
||||||
|
_ => 1,
|
||||||
|
})
|
||||||
|
.sum();
|
||||||
|
line_highlight.push(SubstitutionHighlight { start: 0, end });
|
||||||
|
}
|
||||||
}
|
}
|
||||||
highlights.push(std::mem::take(&mut line_highlight));
|
highlights.push(std::mem::take(&mut line_highlight));
|
||||||
let only_capitalization = is_case_difference(sm, &buf, bounding_span);
|
let only_capitalization = is_case_difference(sm, &buf, bounding_span);
|
||||||
|
@ -974,7 +974,7 @@ pub enum PatKind<'hir> {
|
|||||||
/// Invariant: `pats.len() >= 2`.
|
/// Invariant: `pats.len() >= 2`.
|
||||||
Or(&'hir [Pat<'hir>]),
|
Or(&'hir [Pat<'hir>]),
|
||||||
|
|
||||||
/// A path pattern for an unit struct/variant or a (maybe-associated) constant.
|
/// A path pattern for a unit struct/variant or a (maybe-associated) constant.
|
||||||
Path(QPath<'hir>),
|
Path(QPath<'hir>),
|
||||||
|
|
||||||
/// A tuple pattern (e.g., `(a, b)`).
|
/// A tuple pattern (e.g., `(a, b)`).
|
||||||
@ -2323,7 +2323,7 @@ pub enum TyKind<'hir> {
|
|||||||
///
|
///
|
||||||
/// Type parameters may be stored in each `PathSegment`.
|
/// Type parameters may be stored in each `PathSegment`.
|
||||||
Path(QPath<'hir>),
|
Path(QPath<'hir>),
|
||||||
/// A opaque type definition itself. This is currently only used for the
|
/// An opaque type definition itself. This is currently only used for the
|
||||||
/// `opaque type Foo: Trait` item that `impl Trait` in desugars to.
|
/// `opaque type Foo: Trait` item that `impl Trait` in desugars to.
|
||||||
///
|
///
|
||||||
/// The generic argument list contains the lifetimes (and in the future
|
/// The generic argument list contains the lifetimes (and in the future
|
||||||
|
@ -313,7 +313,7 @@ pub trait Visitor<'v>: Sized {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// When invoking `visit_all_item_likes()`, you need to supply an
|
/// When invoking `visit_all_item_likes()`, you need to supply an
|
||||||
/// item-like visitor. This method converts a "intra-visit"
|
/// item-like visitor. This method converts an "intra-visit"
|
||||||
/// visitor into an item-like visitor that walks the entire tree.
|
/// visitor into an item-like visitor that walks the entire tree.
|
||||||
/// If you use this, you probably don't want to process the
|
/// If you use this, you probably don't want to process the
|
||||||
/// contents of nested item-like things, since the outer loop will
|
/// contents of nested item-like things, since the outer loop will
|
||||||
|
@ -1697,7 +1697,7 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// In some (most?) cases cause.body_id points to actual body, but in some cases
|
// In some (most?) cases cause.body_id points to actual body, but in some cases
|
||||||
// it's a actual definition. According to the comments (e.g. in
|
// it's an actual definition. According to the comments (e.g. in
|
||||||
// librustc_typeck/check/compare_method.rs:compare_predicate_entailment) the latter
|
// librustc_typeck/check/compare_method.rs:compare_predicate_entailment) the latter
|
||||||
// is relied upon by some other code. This might (or might not) need cleanup.
|
// is relied upon by some other code. This might (or might not) need cleanup.
|
||||||
let body_owner_def_id =
|
let body_owner_def_id =
|
||||||
|
@ -307,7 +307,7 @@ where
|
|||||||
/// relations between `'0` and `'a`).
|
/// relations between `'0` and `'a`).
|
||||||
///
|
///
|
||||||
/// The variable `pair` can be either a `(vid, ty)` or `(ty, vid)`
|
/// The variable `pair` can be either a `(vid, ty)` or `(ty, vid)`
|
||||||
/// -- in other words, it is always a (unresolved) inference
|
/// -- in other words, it is always an (unresolved) inference
|
||||||
/// variable `vid` and a type `ty` that are being related, but the
|
/// variable `vid` and a type `ty` that are being related, but the
|
||||||
/// vid may appear either as the "a" type or the "b" type,
|
/// vid may appear either as the "a" type or the "b" type,
|
||||||
/// depending on where it appears in the tuple. The trait
|
/// depending on where it appears in the tuple. The trait
|
||||||
@ -389,7 +389,7 @@ where
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// When we instantiate a inference variable with a value in
|
/// When we instantiate an inference variable with a value in
|
||||||
/// `relate_ty_var`, we always have the pair of a `TyVid` and a `Ty`,
|
/// `relate_ty_var`, we always have the pair of a `TyVid` and a `Ty`,
|
||||||
/// but the ordering may vary (depending on whether the inference
|
/// but the ordering may vary (depending on whether the inference
|
||||||
/// variable was found on the `a` or `b` sides). Therefore, this trait
|
/// variable was found on the `a` or `b` sides). Therefore, this trait
|
||||||
|
@ -186,7 +186,7 @@ pub enum GenericKind<'tcx> {
|
|||||||
/// ('a: min) || ('b: min)
|
/// ('a: min) || ('b: min)
|
||||||
/// }
|
/// }
|
||||||
///
|
///
|
||||||
/// This is described with a `AnyRegion('a, 'b)` node.
|
/// This is described with an `AnyRegion('a, 'b)` node.
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub enum VerifyBound<'tcx> {
|
pub enum VerifyBound<'tcx> {
|
||||||
/// Given a kind K and a bound B, expands to a function like the
|
/// Given a kind K and a bound B, expands to a function like the
|
||||||
|
@ -96,7 +96,7 @@ impl Default for InferCtxtUndoLogs<'_> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// The UndoLogs trait defines how we undo a particular kind of action (of type T). We can undo any
|
/// The UndoLogs trait defines how we undo a particular kind of action (of type T). We can undo any
|
||||||
/// action that is convertable into a UndoLog (per the From impls above).
|
/// action that is convertable into an UndoLog (per the From impls above).
|
||||||
impl<'tcx, T> UndoLogs<T> for InferCtxtUndoLogs<'tcx>
|
impl<'tcx, T> UndoLogs<T> for InferCtxtUndoLogs<'tcx>
|
||||||
where
|
where
|
||||||
UndoLog<'tcx>: From<T>,
|
UndoLog<'tcx>: From<T>,
|
||||||
|
@ -28,7 +28,7 @@ pub use self::project::{
|
|||||||
pub use rustc_middle::traits::*;
|
pub use rustc_middle::traits::*;
|
||||||
|
|
||||||
/// An `Obligation` represents some trait reference (e.g., `i32: Eq`) for
|
/// An `Obligation` represents some trait reference (e.g., `i32: Eq`) for
|
||||||
/// which the "impl_source" must be found. The process of finding a "impl_source" is
|
/// which the "impl_source" must be found. The process of finding an "impl_source" is
|
||||||
/// called "resolving" the `Obligation`. This process consists of
|
/// called "resolving" the `Obligation`. This process consists of
|
||||||
/// either identifying an `impl` (e.g., `impl Eq for i32`) that
|
/// either identifying an `impl` (e.g., `impl Eq for i32`) that
|
||||||
/// satisfies the obligation, or else finding a bound that is in
|
/// satisfies the obligation, or else finding a bound that is in
|
||||||
|
@ -489,7 +489,7 @@ impl Cursor<'_> {
|
|||||||
// Start is already eaten, eat the rest of identifier.
|
// Start is already eaten, eat the rest of identifier.
|
||||||
self.eat_while(is_id_continue);
|
self.eat_while(is_id_continue);
|
||||||
// Known prefixes must have been handled earlier. So if
|
// Known prefixes must have been handled earlier. So if
|
||||||
// we see a prefix here, it is definitely a unknown prefix.
|
// we see a prefix here, it is definitely an unknown prefix.
|
||||||
match self.first() {
|
match self.first() {
|
||||||
'#' | '"' | '\'' => UnknownPrefix,
|
'#' | '"' | '\'' => UnknownPrefix,
|
||||||
_ => Ident,
|
_ => Ident,
|
||||||
|
@ -118,7 +118,7 @@ fn to_camel_case(s: &str) -> String {
|
|||||||
})
|
})
|
||||||
.fold((String::new(), None), |(acc, prev): (String, Option<String>), next| {
|
.fold((String::new(), None), |(acc, prev): (String, Option<String>), next| {
|
||||||
// separate two components with an underscore if their boundary cannot
|
// separate two components with an underscore if their boundary cannot
|
||||||
// be distinguished using a uppercase/lowercase case distinction
|
// be distinguished using an uppercase/lowercase case distinction
|
||||||
let join = if let Some(prev) = prev {
|
let join = if let Some(prev) = prev {
|
||||||
let l = prev.chars().last().unwrap();
|
let l = prev.chars().last().unwrap();
|
||||||
let f = next.chars().next().unwrap();
|
let f = next.chars().next().unwrap();
|
||||||
|
@ -1171,7 +1171,7 @@ impl<'a, 'tcx> ImproperCTypesVisitor<'a, 'tcx> {
|
|||||||
ty::Projection(..) => {
|
ty::Projection(..) => {
|
||||||
let ty = self.cx.tcx.normalize_erasing_regions(self.cx.param_env, ty);
|
let ty = self.cx.tcx.normalize_erasing_regions(self.cx.param_env, ty);
|
||||||
|
|
||||||
// If `ty` is a opaque type directly then `super_visit_with` won't invoke
|
// If `ty` is an opaque type directly then `super_visit_with` won't invoke
|
||||||
// this function again.
|
// this function again.
|
||||||
if ty.has_opaque_types() {
|
if ty.has_opaque_types() {
|
||||||
self.visit_ty(ty)
|
self.visit_ty(ty)
|
||||||
|
@ -3070,16 +3070,19 @@ declare_lint! {
|
|||||||
|
|
||||||
declare_lint! {
|
declare_lint! {
|
||||||
/// The `rust_2021_incompatible_closure_captures` lint detects variables that aren't completely
|
/// The `rust_2021_incompatible_closure_captures` lint detects variables that aren't completely
|
||||||
/// captured in Rust 2021 and affect the Drop order of at least one path starting at this variable.
|
/// captured in Rust 2021, such that the `Drop` order of their fields may differ between
|
||||||
/// It can also detect when a variable implements a trait, but one of its field does not and
|
/// Rust 2018 and 2021.
|
||||||
/// the field is captured by a closure and used with the assumption that said field implements
|
///
|
||||||
|
/// It can also detect when a variable implements a trait like `Send`, but one of its fields does not,
|
||||||
|
/// and the field is captured by a closure and used with the assumption that said field implements
|
||||||
/// the same trait as the root variable.
|
/// the same trait as the root variable.
|
||||||
///
|
///
|
||||||
/// ### Example of drop reorder
|
/// ### Example of drop reorder
|
||||||
///
|
///
|
||||||
/// ```rust,compile_fail
|
/// ```rust,compile_fail
|
||||||
/// # #![deny(rust_2021_incompatible_closure_captures)]
|
/// #![deny(rust_2021_incompatible_closure_captures)]
|
||||||
/// # #![allow(unused)]
|
/// # #![allow(unused)]
|
||||||
|
///
|
||||||
/// struct FancyInteger(i32);
|
/// struct FancyInteger(i32);
|
||||||
///
|
///
|
||||||
/// impl Drop for FancyInteger {
|
/// impl Drop for FancyInteger {
|
||||||
@ -3133,8 +3136,8 @@ declare_lint! {
|
|||||||
/// ### Explanation
|
/// ### Explanation
|
||||||
///
|
///
|
||||||
/// In the above example, only `fptr.0` is captured in Rust 2021.
|
/// In the above example, only `fptr.0` is captured in Rust 2021.
|
||||||
/// The field is of type *mut i32 which doesn't implement Send, making the code invalid as the
|
/// The field is of type `*mut i32`, which doesn't implement `Send`,
|
||||||
/// field cannot be sent between thread safely.
|
/// making the code invalid as the field cannot be sent between threads safely.
|
||||||
pub RUST_2021_INCOMPATIBLE_CLOSURE_CAPTURES,
|
pub RUST_2021_INCOMPATIBLE_CLOSURE_CAPTURES,
|
||||||
Allow,
|
Allow,
|
||||||
"detects closures affected by Rust 2021 changes",
|
"detects closures affected by Rust 2021 changes",
|
||||||
@ -3254,6 +3257,7 @@ declare_lint! {
|
|||||||
///
|
///
|
||||||
/// ```rust,compile_fail
|
/// ```rust,compile_fail
|
||||||
/// #![deny(rust_2021_incompatible_or_patterns)]
|
/// #![deny(rust_2021_incompatible_or_patterns)]
|
||||||
|
///
|
||||||
/// macro_rules! match_any {
|
/// macro_rules! match_any {
|
||||||
/// ( $expr:expr , $( $( $pat:pat )|+ => $expr_arm:expr ),+ ) => {
|
/// ( $expr:expr , $( $( $pat:pat )|+ => $expr_arm:expr ),+ ) => {
|
||||||
/// match $expr {
|
/// match $expr {
|
||||||
@ -3275,7 +3279,7 @@ declare_lint! {
|
|||||||
///
|
///
|
||||||
/// ### Explanation
|
/// ### Explanation
|
||||||
///
|
///
|
||||||
/// In Rust 2021, the pat matcher will match new patterns, which include the | character.
|
/// In Rust 2021, the `pat` matcher will match additional patterns, which include the `|` character.
|
||||||
pub RUST_2021_INCOMPATIBLE_OR_PATTERNS,
|
pub RUST_2021_INCOMPATIBLE_OR_PATTERNS,
|
||||||
Allow,
|
Allow,
|
||||||
"detects usage of old versions of or-patterns",
|
"detects usage of old versions of or-patterns",
|
||||||
@ -3320,8 +3324,8 @@ declare_lint! {
|
|||||||
/// In Rust 2021, one of the important introductions is the [prelude changes], which add
|
/// In Rust 2021, one of the important introductions is the [prelude changes], which add
|
||||||
/// `TryFrom`, `TryInto`, and `FromIterator` into the standard library's prelude. Since this
|
/// `TryFrom`, `TryInto`, and `FromIterator` into the standard library's prelude. Since this
|
||||||
/// results in an ambiguity as to which method/function to call when an existing `try_into`
|
/// results in an ambiguity as to which method/function to call when an existing `try_into`
|
||||||
/// method is called via dot-call syntax or a `try_from`/`from_iter` associated function
|
/// method is called via dot-call syntax or a `try_from`/`from_iter` associated function
|
||||||
/// is called directly on a type.
|
/// is called directly on a type.
|
||||||
///
|
///
|
||||||
/// [prelude changes]: https://blog.rust-lang.org/inside-rust/2021/03/04/planning-rust-2021.html#prelude-changes
|
/// [prelude changes]: https://blog.rust-lang.org/inside-rust/2021/03/04/planning-rust-2021.html#prelude-changes
|
||||||
pub RUST_2021_PRELUDE_COLLISIONS,
|
pub RUST_2021_PRELUDE_COLLISIONS,
|
||||||
@ -3371,7 +3375,7 @@ declare_lint! {
|
|||||||
}
|
}
|
||||||
|
|
||||||
declare_lint! {
|
declare_lint! {
|
||||||
/// The `unsupported_calling_conventions` lint is output whenever there is an use of the
|
/// The `unsupported_calling_conventions` lint is output whenever there is a use of the
|
||||||
/// `stdcall`, `fastcall`, `thiscall`, `vectorcall` calling conventions (or their unwind
|
/// `stdcall`, `fastcall`, `thiscall`, `vectorcall` calling conventions (or their unwind
|
||||||
/// variants) on targets that cannot meaningfully be supported for the requested target.
|
/// variants) on targets that cannot meaningfully be supported for the requested target.
|
||||||
///
|
///
|
||||||
|
@ -62,7 +62,7 @@ crate struct CrateMetadata {
|
|||||||
// --- Some data pre-decoded from the metadata blob, usually for performance ---
|
// --- Some data pre-decoded from the metadata blob, usually for performance ---
|
||||||
/// Properties of the whole crate.
|
/// Properties of the whole crate.
|
||||||
/// NOTE(eddyb) we pass `'static` to a `'tcx` parameter because this
|
/// NOTE(eddyb) we pass `'static` to a `'tcx` parameter because this
|
||||||
/// lifetime is only used behind `Lazy`, and therefore acts like an
|
/// lifetime is only used behind `Lazy`, and therefore acts like a
|
||||||
/// universal (`for<'tcx>`), that is paired up with whichever `TyCtxt`
|
/// universal (`for<'tcx>`), that is paired up with whichever `TyCtxt`
|
||||||
/// is being used to decode those values.
|
/// is being used to decode those values.
|
||||||
root: CrateRoot<'static>,
|
root: CrateRoot<'static>,
|
||||||
|
@ -63,7 +63,7 @@ impl MaybeFnLike for hir::Expr<'_> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Carries either an FnLikeNode or a Expr, as these are the two
|
/// Carries either an FnLikeNode or an Expr, as these are the two
|
||||||
/// constructs that correspond to "code" (as in, something from which
|
/// constructs that correspond to "code" (as in, something from which
|
||||||
/// we can construct a control-flow graph).
|
/// we can construct a control-flow graph).
|
||||||
#[derive(Copy, Clone)]
|
#[derive(Copy, Clone)]
|
||||||
|
@ -151,7 +151,7 @@ rustc_index::newtype_index! {
|
|||||||
static_assert_size!(ScopeData, 4);
|
static_assert_size!(ScopeData, 4);
|
||||||
|
|
||||||
impl Scope {
|
impl Scope {
|
||||||
/// Returns a item-local ID associated with this scope.
|
/// Returns an item-local ID associated with this scope.
|
||||||
///
|
///
|
||||||
/// N.B., likely to be replaced as API is refined; e.g., pnkfelix
|
/// N.B., likely to be replaced as API is refined; e.g., pnkfelix
|
||||||
/// anticipates `fn entry_node_id` and `fn each_exit_node_id`.
|
/// anticipates `fn entry_node_id` and `fn each_exit_node_id`.
|
||||||
|
@ -46,7 +46,7 @@ static_assert_size!(InterpErrorInfo<'_>, 8);
|
|||||||
/// Packages the kind of error we got from the const code interpreter
|
/// Packages the kind of error we got from the const code interpreter
|
||||||
/// up with a Rust-level backtrace of where the error occurred.
|
/// up with a Rust-level backtrace of where the error occurred.
|
||||||
/// These should always be constructed by calling `.into()` on
|
/// These should always be constructed by calling `.into()` on
|
||||||
/// a `InterpError`. In `rustc_mir::interpret`, we have `throw_err_*`
|
/// an `InterpError`. In `rustc_mir::interpret`, we have `throw_err_*`
|
||||||
/// macros for this.
|
/// macros for this.
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct InterpErrorInfo<'tcx>(Box<InterpErrorInfoInner<'tcx>>);
|
pub struct InterpErrorInfo<'tcx>(Box<InterpErrorInfoInner<'tcx>>);
|
||||||
|
@ -17,7 +17,7 @@ use super::{
|
|||||||
/// Represents the result of const evaluation via the `eval_to_allocation` query.
|
/// Represents the result of const evaluation via the `eval_to_allocation` query.
|
||||||
#[derive(Copy, Clone, HashStable, TyEncodable, TyDecodable, Debug, Hash, Eq, PartialEq)]
|
#[derive(Copy, Clone, HashStable, TyEncodable, TyDecodable, Debug, Hash, Eq, PartialEq)]
|
||||||
pub struct ConstAlloc<'tcx> {
|
pub struct ConstAlloc<'tcx> {
|
||||||
// the value lives here, at offset 0, and that allocation definitely is a `AllocKind::Memory`
|
// the value lives here, at offset 0, and that allocation definitely is an `AllocKind::Memory`
|
||||||
// (so you can use `AllocMap::unwrap_memory`).
|
// (so you can use `AllocMap::unwrap_memory`).
|
||||||
pub alloc_id: AllocId,
|
pub alloc_id: AllocId,
|
||||||
pub ty: Ty<'tcx>,
|
pub ty: Ty<'tcx>,
|
||||||
@ -113,7 +113,7 @@ impl<'tcx> ConstValue<'tcx> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// A `Scalar` represents an immediate, primitive value existing outside of a
|
/// A `Scalar` represents an immediate, primitive value existing outside of a
|
||||||
/// `memory::Allocation`. It is in many ways like a small chunk of a `Allocation`, up to 16 bytes in
|
/// `memory::Allocation`. It is in many ways like a small chunk of an `Allocation`, up to 16 bytes in
|
||||||
/// size. Like a range of bytes in an `Allocation`, a `Scalar` can either represent the raw bytes
|
/// size. Like a range of bytes in an `Allocation`, a `Scalar` can either represent the raw bytes
|
||||||
/// of a simple value or a pointer into another `Allocation`
|
/// of a simple value or a pointer into another `Allocation`
|
||||||
///
|
///
|
||||||
@ -376,27 +376,27 @@ impl<'tcx, Tag: Provenance> Scalar<Tag> {
|
|||||||
self.to_bits(sz)
|
self.to_bits(sz)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Converts the scalar to produce an `u8`. Fails if the scalar is a pointer.
|
/// Converts the scalar to produce a `u8`. Fails if the scalar is a pointer.
|
||||||
pub fn to_u8(self) -> InterpResult<'static, u8> {
|
pub fn to_u8(self) -> InterpResult<'static, u8> {
|
||||||
self.to_unsigned_with_bit_width(8).map(|v| u8::try_from(v).unwrap())
|
self.to_unsigned_with_bit_width(8).map(|v| u8::try_from(v).unwrap())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Converts the scalar to produce an `u16`. Fails if the scalar is a pointer.
|
/// Converts the scalar to produce a `u16`. Fails if the scalar is a pointer.
|
||||||
pub fn to_u16(self) -> InterpResult<'static, u16> {
|
pub fn to_u16(self) -> InterpResult<'static, u16> {
|
||||||
self.to_unsigned_with_bit_width(16).map(|v| u16::try_from(v).unwrap())
|
self.to_unsigned_with_bit_width(16).map(|v| u16::try_from(v).unwrap())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Converts the scalar to produce an `u32`. Fails if the scalar is a pointer.
|
/// Converts the scalar to produce a `u32`. Fails if the scalar is a pointer.
|
||||||
pub fn to_u32(self) -> InterpResult<'static, u32> {
|
pub fn to_u32(self) -> InterpResult<'static, u32> {
|
||||||
self.to_unsigned_with_bit_width(32).map(|v| u32::try_from(v).unwrap())
|
self.to_unsigned_with_bit_width(32).map(|v| u32::try_from(v).unwrap())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Converts the scalar to produce an `u64`. Fails if the scalar is a pointer.
|
/// Converts the scalar to produce a `u64`. Fails if the scalar is a pointer.
|
||||||
pub fn to_u64(self) -> InterpResult<'static, u64> {
|
pub fn to_u64(self) -> InterpResult<'static, u64> {
|
||||||
self.to_unsigned_with_bit_width(64).map(|v| u64::try_from(v).unwrap())
|
self.to_unsigned_with_bit_width(64).map(|v| u64::try_from(v).unwrap())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Converts the scalar to produce an `u128`. Fails if the scalar is a pointer.
|
/// Converts the scalar to produce a `u128`. Fails if the scalar is a pointer.
|
||||||
pub fn to_u128(self) -> InterpResult<'static, u128> {
|
pub fn to_u128(self) -> InterpResult<'static, u128> {
|
||||||
self.to_unsigned_with_bit_width(128)
|
self.to_unsigned_with_bit_width(128)
|
||||||
}
|
}
|
||||||
|
@ -265,7 +265,7 @@ impl BorrowKind {
|
|||||||
BorrowKind::Shared => hir::Mutability::Not,
|
BorrowKind::Shared => hir::Mutability::Not,
|
||||||
|
|
||||||
// We have no type corresponding to a unique imm borrow, so
|
// We have no type corresponding to a unique imm borrow, so
|
||||||
// use `&mut`. It gives all the capabilities of an `&uniq`
|
// use `&mut`. It gives all the capabilities of a `&uniq`
|
||||||
// and hence is a safe "over approximation".
|
// and hence is a safe "over approximation".
|
||||||
BorrowKind::Unique => hir::Mutability::Mut,
|
BorrowKind::Unique => hir::Mutability::Mut,
|
||||||
|
|
||||||
|
@ -1202,7 +1202,7 @@ pub enum NonUseContext {
|
|||||||
StorageDead,
|
StorageDead,
|
||||||
/// User type annotation assertions for NLL.
|
/// User type annotation assertions for NLL.
|
||||||
AscribeUserTy,
|
AscribeUserTy,
|
||||||
/// The data of an user variable, for debug info.
|
/// The data of a user variable, for debug info.
|
||||||
VarDebugInfo,
|
VarDebugInfo,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -648,7 +648,7 @@ rustc_queries! {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// HACK: when evaluated, this reports a "unsafe derive on repr(packed)" error.
|
/// HACK: when evaluated, this reports an "unsafe derive on repr(packed)" error.
|
||||||
///
|
///
|
||||||
/// Unsafety checking is executed for each method separately, but we only want
|
/// Unsafety checking is executed for each method separately, but we only want
|
||||||
/// to emit this error once per derive. As there are some impls with multiple
|
/// to emit this error once per derive. As there are some impls with multiple
|
||||||
@ -1733,7 +1733,7 @@ rustc_queries! {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Performs an HIR-based well-formed check on the item with the given `HirId`. If
|
/// Performs an HIR-based well-formed check on the item with the given `HirId`. If
|
||||||
/// we get an `Umimplemented` error that matches the provided `Predicate`, return
|
/// we get an `Unimplemented` error that matches the provided `Predicate`, return
|
||||||
/// the cause of the newly created obligation.
|
/// the cause of the newly created obligation.
|
||||||
///
|
///
|
||||||
/// This is only used by error-reporting code to get a better cause (in particular, a better
|
/// This is only used by error-reporting code to get a better cause (in particular, a better
|
||||||
|
@ -509,7 +509,7 @@ pub enum ImplSource<'tcx, N> {
|
|||||||
TraitUpcasting(ImplSourceTraitUpcastingData<'tcx, N>),
|
TraitUpcasting(ImplSourceTraitUpcastingData<'tcx, N>),
|
||||||
|
|
||||||
/// ImplSource automatically generated for a closure. The `DefId` is the ID
|
/// ImplSource automatically generated for a closure. The `DefId` is the ID
|
||||||
/// of the closure expression. This is a `ImplSource::UserDefined` in spirit, but the
|
/// of the closure expression. This is an `ImplSource::UserDefined` in spirit, but the
|
||||||
/// impl is generated by the compiler and does not appear in the source.
|
/// impl is generated by the compiler and does not appear in the source.
|
||||||
Closure(ImplSourceClosureData<'tcx, N>),
|
Closure(ImplSourceClosureData<'tcx, N>),
|
||||||
|
|
||||||
|
@ -111,7 +111,7 @@ pub enum SelectionCandidate<'tcx> {
|
|||||||
ProjectionCandidate(usize),
|
ProjectionCandidate(usize),
|
||||||
|
|
||||||
/// Implementation of a `Fn`-family trait by one of the anonymous types
|
/// Implementation of a `Fn`-family trait by one of the anonymous types
|
||||||
/// generated for a `||` expression.
|
/// generated for an `||` expression.
|
||||||
ClosureCandidate,
|
ClosureCandidate,
|
||||||
|
|
||||||
/// Implementation of a `Generator` trait by one of the anonymous types
|
/// Implementation of a `Generator` trait by one of the anonymous types
|
||||||
|
@ -209,7 +209,7 @@ impl<'tcx> AdtDef {
|
|||||||
self.flags.contains(AdtFlags::IS_UNION)
|
self.flags.contains(AdtFlags::IS_UNION)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns `true` if this is a enum.
|
/// Returns `true` if this is an enum.
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn is_enum(&self) -> bool {
|
pub fn is_enum(&self) -> bool {
|
||||||
self.flags.contains(AdtFlags::IS_ENUM)
|
self.flags.contains(AdtFlags::IS_ENUM)
|
||||||
|
@ -434,7 +434,7 @@ impl BorrowKind {
|
|||||||
ImmBorrow => hir::Mutability::Not,
|
ImmBorrow => hir::Mutability::Not,
|
||||||
|
|
||||||
// We have no type corresponding to a unique imm borrow, so
|
// We have no type corresponding to a unique imm borrow, so
|
||||||
// use `&mut`. It gives all the capabilities of an `&uniq`
|
// use `&mut`. It gives all the capabilities of a `&uniq`
|
||||||
// and hence is a safe "over approximation".
|
// and hence is a safe "over approximation".
|
||||||
UniqueImmBorrow => hir::Mutability::Mut,
|
UniqueImmBorrow => hir::Mutability::Mut,
|
||||||
}
|
}
|
||||||
|
@ -209,7 +209,7 @@ where
|
|||||||
impl<'tcx, D: TyDecoder<'tcx>> Decodable<D> for Ty<'tcx> {
|
impl<'tcx, D: TyDecoder<'tcx>> Decodable<D> for Ty<'tcx> {
|
||||||
#[allow(rustc::usage_of_ty_tykind)]
|
#[allow(rustc::usage_of_ty_tykind)]
|
||||||
fn decode(decoder: &mut D) -> Result<Ty<'tcx>, D::Error> {
|
fn decode(decoder: &mut D) -> Result<Ty<'tcx>, D::Error> {
|
||||||
// Handle shorthands first, if we have an usize > 0x80.
|
// Handle shorthands first, if we have a usize > 0x80.
|
||||||
if decoder.positioned_at_shorthand() {
|
if decoder.positioned_at_shorthand() {
|
||||||
let pos = decoder.read_usize()?;
|
let pos = decoder.read_usize()?;
|
||||||
assert!(pos >= SHORTHAND_OFFSET);
|
assert!(pos >= SHORTHAND_OFFSET);
|
||||||
@ -228,7 +228,7 @@ impl<'tcx, D: TyDecoder<'tcx>> Decodable<D> for Ty<'tcx> {
|
|||||||
impl<'tcx, D: TyDecoder<'tcx>> Decodable<D> for ty::Binder<'tcx, ty::PredicateKind<'tcx>> {
|
impl<'tcx, D: TyDecoder<'tcx>> Decodable<D> for ty::Binder<'tcx, ty::PredicateKind<'tcx>> {
|
||||||
fn decode(decoder: &mut D) -> Result<ty::Binder<'tcx, ty::PredicateKind<'tcx>>, D::Error> {
|
fn decode(decoder: &mut D) -> Result<ty::Binder<'tcx, ty::PredicateKind<'tcx>>, D::Error> {
|
||||||
let bound_vars = Decodable::decode(decoder)?;
|
let bound_vars = Decodable::decode(decoder)?;
|
||||||
// Handle shorthands first, if we have an usize > 0x80.
|
// Handle shorthands first, if we have a usize > 0x80.
|
||||||
Ok(ty::Binder::bind_with_vars(
|
Ok(ty::Binder::bind_with_vars(
|
||||||
if decoder.positioned_at_shorthand() {
|
if decoder.positioned_at_shorthand() {
|
||||||
let pos = decoder.read_usize()?;
|
let pos = decoder.read_usize()?;
|
||||||
|
@ -1792,7 +1792,7 @@ pub mod tls {
|
|||||||
if context == 0 {
|
if context == 0 {
|
||||||
f(None)
|
f(None)
|
||||||
} else {
|
} else {
|
||||||
// We could get a `ImplicitCtxt` pointer from another thread.
|
// We could get an `ImplicitCtxt` pointer from another thread.
|
||||||
// Ensure that `ImplicitCtxt` is `Sync`.
|
// Ensure that `ImplicitCtxt` is `Sync`.
|
||||||
sync::assert_sync::<ImplicitCtxt<'_, '_>>();
|
sync::assert_sync::<ImplicitCtxt<'_, '_>>();
|
||||||
|
|
||||||
|
@ -9,7 +9,7 @@ use crate::ty::{DefId, SubstsRef};
|
|||||||
|
|
||||||
mod def_id_forest;
|
mod def_id_forest;
|
||||||
|
|
||||||
// The methods in this module calculate `DefIdForest`s of modules in which a
|
// The methods in this module calculate `DefIdForest`s of modules in which an
|
||||||
// `AdtDef`/`VariantDef`/`FieldDef` is visibly uninhabited.
|
// `AdtDef`/`VariantDef`/`FieldDef` is visibly uninhabited.
|
||||||
//
|
//
|
||||||
// # Example
|
// # Example
|
||||||
|
@ -2571,14 +2571,14 @@ where
|
|||||||
/// Compute a `FnAbi` suitable for indirect calls, i.e. to `fn` pointers.
|
/// Compute a `FnAbi` suitable for indirect calls, i.e. to `fn` pointers.
|
||||||
///
|
///
|
||||||
/// NB: this doesn't handle virtual calls - those should use `FnAbi::of_instance`
|
/// NB: this doesn't handle virtual calls - those should use `FnAbi::of_instance`
|
||||||
/// instead, where the instance is a `InstanceDef::Virtual`.
|
/// instead, where the instance is an `InstanceDef::Virtual`.
|
||||||
fn of_fn_ptr(cx: &C, sig: ty::PolyFnSig<'tcx>, extra_args: &[Ty<'tcx>]) -> Self;
|
fn of_fn_ptr(cx: &C, sig: ty::PolyFnSig<'tcx>, extra_args: &[Ty<'tcx>]) -> Self;
|
||||||
|
|
||||||
/// Compute a `FnAbi` suitable for declaring/defining an `fn` instance, and for
|
/// Compute a `FnAbi` suitable for declaring/defining an `fn` instance, and for
|
||||||
/// direct calls to an `fn`.
|
/// direct calls to an `fn`.
|
||||||
///
|
///
|
||||||
/// NB: that includes virtual calls, which are represented by "direct calls"
|
/// NB: that includes virtual calls, which are represented by "direct calls"
|
||||||
/// to a `InstanceDef::Virtual` instance (of `<dyn Trait as Trait>::fn`).
|
/// to an `InstanceDef::Virtual` instance (of `<dyn Trait as Trait>::fn`).
|
||||||
fn of_instance(cx: &C, instance: ty::Instance<'tcx>, extra_args: &[Ty<'tcx>]) -> Self;
|
fn of_instance(cx: &C, instance: ty::Instance<'tcx>, extra_args: &[Ty<'tcx>]) -> Self;
|
||||||
|
|
||||||
fn new_internal(
|
fn new_internal(
|
||||||
|
@ -865,7 +865,7 @@ impl<'tcx> Predicate<'tcx> {
|
|||||||
|
|
||||||
/// Represents the bounds declared on a particular set of type
|
/// Represents the bounds declared on a particular set of type
|
||||||
/// parameters. Should eventually be generalized into a flag list of
|
/// parameters. Should eventually be generalized into a flag list of
|
||||||
/// where-clauses. You can obtain a `InstantiatedPredicates` list from a
|
/// where-clauses. You can obtain an `InstantiatedPredicates` list from a
|
||||||
/// `GenericPredicates` by using the `instantiate` method. Note that this method
|
/// `GenericPredicates` by using the `instantiate` method. Note that this method
|
||||||
/// reflects an important semantic invariant of `InstantiatedPredicates`: while
|
/// reflects an important semantic invariant of `InstantiatedPredicates`: while
|
||||||
/// the `GenericPredicates` are expressed in terms of the bound type
|
/// the `GenericPredicates` are expressed in terms of the bound type
|
||||||
@ -1371,7 +1371,7 @@ bitflags! {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Definition of a variant -- a struct's fields or a enum variant.
|
/// Definition of a variant -- a struct's fields or an enum variant.
|
||||||
#[derive(Debug, HashStable)]
|
#[derive(Debug, HashStable)]
|
||||||
pub struct VariantDef {
|
pub struct VariantDef {
|
||||||
/// `DefId` that identifies the variant itself.
|
/// `DefId` that identifies the variant itself.
|
||||||
|
@ -239,7 +239,7 @@ static_assert_size!(TyKind<'_>, 32);
|
|||||||
/// implements `CK<(u32, u32), Output = u32>`, where `CK` is the trait
|
/// implements `CK<(u32, u32), Output = u32>`, where `CK` is the trait
|
||||||
/// specified above.
|
/// specified above.
|
||||||
/// - U is a type parameter representing the types of its upvars, tupled up
|
/// - U is a type parameter representing the types of its upvars, tupled up
|
||||||
/// (borrowed, if appropriate; that is, if an U field represents a by-ref upvar,
|
/// (borrowed, if appropriate; that is, if a U field represents a by-ref upvar,
|
||||||
/// and the up-var has the type `Foo`, then that field of U will be `&Foo`).
|
/// and the up-var has the type `Foo`, then that field of U will be `&Foo`).
|
||||||
///
|
///
|
||||||
/// So, for example, given this function:
|
/// So, for example, given this function:
|
||||||
@ -1323,7 +1323,7 @@ pub type Region<'tcx> = &'tcx RegionKind;
|
|||||||
/// These are regions that are stored behind a binder and must be substituted
|
/// These are regions that are stored behind a binder and must be substituted
|
||||||
/// with some concrete region before being used. There are two kind of
|
/// with some concrete region before being used. There are two kind of
|
||||||
/// bound regions: early-bound, which are bound in an item's `Generics`,
|
/// bound regions: early-bound, which are bound in an item's `Generics`,
|
||||||
/// and are substituted by a `InternalSubsts`, and late-bound, which are part of
|
/// and are substituted by an `InternalSubsts`, and late-bound, which are part of
|
||||||
/// higher-ranked types (e.g., `for<'a> fn(&'a ())`), and are substituted by
|
/// higher-ranked types (e.g., `for<'a> fn(&'a ())`), and are substituted by
|
||||||
/// the likes of `liberate_late_bound_regions`. The distinction exists
|
/// the likes of `liberate_late_bound_regions`. The distinction exists
|
||||||
/// because higher-ranked lifetimes aren't supported in all places. See [1][2].
|
/// because higher-ranked lifetimes aren't supported in all places. See [1][2].
|
||||||
@ -1471,7 +1471,7 @@ pub type PolyExistentialProjection<'tcx> = Binder<'tcx, ExistentialProjection<'t
|
|||||||
impl<'tcx> ExistentialProjection<'tcx> {
|
impl<'tcx> ExistentialProjection<'tcx> {
|
||||||
/// Extracts the underlying existential trait reference from this projection.
|
/// Extracts the underlying existential trait reference from this projection.
|
||||||
/// For example, if this is a projection of `exists T. <T as Iterator>::Item == X`,
|
/// For example, if this is a projection of `exists T. <T as Iterator>::Item == X`,
|
||||||
/// then this function would return a `exists T. T: Iterator` existential trait
|
/// then this function would return an `exists T. T: Iterator` existential trait
|
||||||
/// reference.
|
/// reference.
|
||||||
pub fn trait_ref(&self, tcx: TyCtxt<'tcx>) -> ty::ExistentialTraitRef<'tcx> {
|
pub fn trait_ref(&self, tcx: TyCtxt<'tcx>) -> ty::ExistentialTraitRef<'tcx> {
|
||||||
let def_id = tcx.associated_item(self.item_def_id).container.id();
|
let def_id = tcx.associated_item(self.item_def_id).container.id();
|
||||||
|
@ -22,7 +22,7 @@ use std::ops::ControlFlow;
|
|||||||
|
|
||||||
/// An entity in the Rust type system, which can be one of
|
/// An entity in the Rust type system, which can be one of
|
||||||
/// several kinds (types, lifetimes, and consts).
|
/// several kinds (types, lifetimes, and consts).
|
||||||
/// To reduce memory usage, a `GenericArg` is a interned pointer,
|
/// To reduce memory usage, a `GenericArg` is an interned pointer,
|
||||||
/// with the lowest 2 bits being reserved for a tag to
|
/// with the lowest 2 bits being reserved for a tag to
|
||||||
/// indicate the type (`Ty`, `Region`, or `Const`) it points to.
|
/// indicate the type (`Ty`, `Region`, or `Const`) it points to.
|
||||||
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
|
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
|
||||||
@ -204,12 +204,12 @@ impl<'a, 'tcx> InternalSubsts<'tcx> {
|
|||||||
GeneratorSubsts { substs: self }
|
GeneratorSubsts { substs: self }
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Creates a `InternalSubsts` that maps each generic parameter to itself.
|
/// Creates an `InternalSubsts` that maps each generic parameter to itself.
|
||||||
pub fn identity_for_item(tcx: TyCtxt<'tcx>, def_id: DefId) -> SubstsRef<'tcx> {
|
pub fn identity_for_item(tcx: TyCtxt<'tcx>, def_id: DefId) -> SubstsRef<'tcx> {
|
||||||
Self::for_item(tcx, def_id, |param, _| tcx.mk_param_from_def(param))
|
Self::for_item(tcx, def_id, |param, _| tcx.mk_param_from_def(param))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Creates a `InternalSubsts` for generic parameter definitions,
|
/// Creates an `InternalSubsts` for generic parameter definitions,
|
||||||
/// by calling closures to obtain each kind.
|
/// by calling closures to obtain each kind.
|
||||||
/// The closures get to observe the `InternalSubsts` as they're
|
/// The closures get to observe the `InternalSubsts` as they're
|
||||||
/// being built, which can be used to correctly
|
/// being built, which can be used to correctly
|
||||||
|
@ -681,7 +681,7 @@ impl<'tcx> ty::TyS<'tcx> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Checks whether values of this type `T` implement the `Freeze`
|
/// Checks whether values of this type `T` implement the `Freeze`
|
||||||
/// trait -- frozen types are those that do not contain a
|
/// trait -- frozen types are those that do not contain an
|
||||||
/// `UnsafeCell` anywhere. This is a language concept used to
|
/// `UnsafeCell` anywhere. This is a language concept used to
|
||||||
/// distinguish "true immutability", which is relevant to
|
/// distinguish "true immutability", which is relevant to
|
||||||
/// optimization as well as the rules around static values. Note
|
/// optimization as well as the rules around static values. Note
|
||||||
|
@ -825,7 +825,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
|||||||
// We're going to want to traverse the first borrowed place to see if we can find
|
// We're going to want to traverse the first borrowed place to see if we can find
|
||||||
// field access to a union. If we find that, then we will keep the place of the
|
// field access to a union. If we find that, then we will keep the place of the
|
||||||
// union being accessed and the field that was being accessed so we can check the
|
// union being accessed and the field that was being accessed so we can check the
|
||||||
// second borrowed place for the same union and a access to a different field.
|
// second borrowed place for the same union and an access to a different field.
|
||||||
for (place_base, elem) in first_borrowed_place.iter_projections().rev() {
|
for (place_base, elem) in first_borrowed_place.iter_projections().rev() {
|
||||||
match elem {
|
match elem {
|
||||||
ProjectionElem::Field(field, _) if union_ty(place_base).is_some() => {
|
ProjectionElem::Field(field, _) if union_ty(place_base).is_some() => {
|
||||||
@ -838,7 +838,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
|||||||
})
|
})
|
||||||
.and_then(|(target_base, target_field)| {
|
.and_then(|(target_base, target_field)| {
|
||||||
// With the place of a union and a field access into it, we traverse the second
|
// With the place of a union and a field access into it, we traverse the second
|
||||||
// borrowed place and look for a access to a different field of the same union.
|
// borrowed place and look for an access to a different field of the same union.
|
||||||
for (place_base, elem) in second_borrowed_place.iter_projections().rev() {
|
for (place_base, elem) in second_borrowed_place.iter_projections().rev() {
|
||||||
if let ProjectionElem::Field(field, _) = elem {
|
if let ProjectionElem::Field(field, _) = elem {
|
||||||
if let Some(union_ty) = union_ty(place_base) {
|
if let Some(union_ty) = union_ty(place_base) {
|
||||||
|
@ -606,7 +606,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
|||||||
|
|
||||||
/// Checks if a borrowed value was captured by a trait object. We do this by
|
/// Checks if a borrowed value was captured by a trait object. We do this by
|
||||||
/// looking forward in the MIR from the reserve location and checking if we see
|
/// looking forward in the MIR from the reserve location and checking if we see
|
||||||
/// a unsized cast to a trait object on our data.
|
/// an unsized cast to a trait object on our data.
|
||||||
fn was_captured_by_trait_object(&self, borrow: &BorrowData<'tcx>) -> bool {
|
fn was_captured_by_trait_object(&self, borrow: &BorrowData<'tcx>) -> bool {
|
||||||
// Start at the reserve location, find the place that we want to see cast to a trait object.
|
// Start at the reserve location, find the place that we want to see cast to a trait object.
|
||||||
let location = borrow.reserve_location;
|
let location = borrow.reserve_location;
|
||||||
@ -666,7 +666,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
|||||||
}
|
}
|
||||||
_ => {}
|
_ => {}
|
||||||
},
|
},
|
||||||
// If we see a unsized cast, then if it is our data we should check
|
// If we see an unsized cast, then if it is our data we should check
|
||||||
// whether it is being cast to a trait object.
|
// whether it is being cast to a trait object.
|
||||||
Rvalue::Cast(CastKind::Pointer(PointerCast::Unsize), operand, ty) => {
|
Rvalue::Cast(CastKind::Pointer(PointerCast::Unsize), operand, ty) => {
|
||||||
match operand {
|
match operand {
|
||||||
|
@ -72,7 +72,7 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
|
|||||||
|
|
||||||
// If the place is immutable then:
|
// If the place is immutable then:
|
||||||
//
|
//
|
||||||
// - Either we deref a immutable ref to get to our final place.
|
// - Either we deref an immutable ref to get to our final place.
|
||||||
// - We don't capture derefs of raw ptrs
|
// - We don't capture derefs of raw ptrs
|
||||||
// - Or the final place is immut because the root variable of the capture
|
// - Or the final place is immut because the root variable of the capture
|
||||||
// isn't marked mut and we should suggest that to the user.
|
// isn't marked mut and we should suggest that to the user.
|
||||||
@ -601,7 +601,7 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
|
|||||||
let reason = if let PlaceBase::Upvar(upvar_id) = closure_kind_origin.base {
|
let reason = if let PlaceBase::Upvar(upvar_id) = closure_kind_origin.base {
|
||||||
let upvar = ty::place_to_string_for_capture(tcx, closure_kind_origin);
|
let upvar = ty::place_to_string_for_capture(tcx, closure_kind_origin);
|
||||||
let root_hir_id = upvar_id.var_path.hir_id;
|
let root_hir_id = upvar_id.var_path.hir_id;
|
||||||
// we have a origin for this closure kind starting at this root variable so it's safe to unwrap here
|
// we have an origin for this closure kind starting at this root variable so it's safe to unwrap here
|
||||||
let captured_places = tables.closure_min_captures[id].get(&root_hir_id).unwrap();
|
let captured_places = tables.closure_min_captures[id].get(&root_hir_id).unwrap();
|
||||||
|
|
||||||
let origin_projection = closure_kind_origin
|
let origin_projection = closure_kind_origin
|
||||||
|
@ -423,7 +423,7 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
|
|||||||
diag
|
diag
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Reports a error specifically for when data is escaping a closure.
|
/// Reports an error specifically for when data is escaping a closure.
|
||||||
///
|
///
|
||||||
/// ```text
|
/// ```text
|
||||||
/// error: borrowed data escapes outside of function
|
/// error: borrowed data escapes outside of function
|
||||||
@ -566,7 +566,7 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
|
|||||||
diag
|
diag
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Adds a suggestion to errors where a `impl Trait` is returned.
|
/// Adds a suggestion to errors where an `impl Trait` is returned.
|
||||||
///
|
///
|
||||||
/// ```text
|
/// ```text
|
||||||
/// help: to allow this `impl Trait` to capture borrowed data with lifetime `'1`, add `'_` as
|
/// help: to allow this `impl Trait` to capture borrowed data with lifetime `'1`, add `'_` as
|
||||||
|
@ -1241,7 +1241,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Special case: you can assign a immutable local variable
|
// Special case: you can assign an immutable local variable
|
||||||
// (e.g., `x = ...`) so long as it has never been initialized
|
// (e.g., `x = ...`) so long as it has never been initialized
|
||||||
// before (at this point in the flow).
|
// before (at this point in the flow).
|
||||||
if let Some(local) = place_span.0.as_local() {
|
if let Some(local) = place_span.0.as_local() {
|
||||||
@ -1702,7 +1702,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
|||||||
// initialization state of `a.b` is all we need to inspect to
|
// initialization state of `a.b` is all we need to inspect to
|
||||||
// know if `a.b.c` is valid (and from that we infer that the
|
// know if `a.b.c` is valid (and from that we infer that the
|
||||||
// dereference and `.d` access is also valid, since we assume
|
// dereference and `.d` access is also valid, since we assume
|
||||||
// `a.b.c` is assigned a reference to a initialized and
|
// `a.b.c` is assigned a reference to an initialized and
|
||||||
// well-formed record structure.)
|
// well-formed record structure.)
|
||||||
|
|
||||||
// Therefore, if we seek out the *closest* prefix for which we
|
// Therefore, if we seek out the *closest* prefix for which we
|
||||||
@ -1889,7 +1889,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
|||||||
ProjectionElem::Downcast(_/*adt_def*/, _/*variant_idx*/) =>
|
ProjectionElem::Downcast(_/*adt_def*/, _/*variant_idx*/) =>
|
||||||
// assigning to (P->variant) is okay if assigning to `P` is okay
|
// assigning to (P->variant) is okay if assigning to `P` is okay
|
||||||
//
|
//
|
||||||
// FIXME: is this true even if P is a adt with a dtor?
|
// FIXME: is this true even if P is an adt with a dtor?
|
||||||
{ }
|
{ }
|
||||||
|
|
||||||
// assigning to (*P) requires P to be initialized
|
// assigning to (*P) requires P to be initialized
|
||||||
@ -2003,8 +2003,8 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if let Some((prefix, mpi)) = shortest_uninit_seen {
|
if let Some((prefix, mpi)) = shortest_uninit_seen {
|
||||||
// Check for a reassignment into a uninitialized field of a union (for example,
|
// Check for a reassignment into an uninitialized field of a union (for example,
|
||||||
// after a move out). In this case, do not report a error here. There is an
|
// after a move out). In this case, do not report an error here. There is an
|
||||||
// exception, if this is the first assignment into the union (that is, there is
|
// exception, if this is the first assignment into the union (that is, there is
|
||||||
// no move out from an earlier location) then this is an attempt at initialization
|
// no move out from an earlier location) then this is an attempt at initialization
|
||||||
// of the union - we should error in that case.
|
// of the union - we should error in that case.
|
||||||
|
@ -169,7 +169,7 @@ struct UniversalRegionIndices<'tcx> {
|
|||||||
/// used because trait matching and type-checking will feed us
|
/// used because trait matching and type-checking will feed us
|
||||||
/// region constraints that reference those regions and we need to
|
/// region constraints that reference those regions and we need to
|
||||||
/// be able to map them our internal `RegionVid`. This is
|
/// be able to map them our internal `RegionVid`. This is
|
||||||
/// basically equivalent to a `InternalSubsts`, except that it also
|
/// basically equivalent to an `InternalSubsts`, except that it also
|
||||||
/// contains an entry for `ReStatic` -- it might be nice to just
|
/// contains an entry for `ReStatic` -- it might be nice to just
|
||||||
/// use a substs, and then handle `ReStatic` another way.
|
/// use a substs, and then handle `ReStatic` another way.
|
||||||
indices: FxHashMap<ty::Region<'tcx>, RegionVid>,
|
indices: FxHashMap<ty::Region<'tcx>, RegionVid>,
|
||||||
|
@ -295,7 +295,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
|
|||||||
/// we can find the data.
|
/// we can find the data.
|
||||||
/// Note that for a given layout, this operation will either always fail or always
|
/// Note that for a given layout, this operation will either always fail or always
|
||||||
/// succeed! Whether it succeeds depends on whether the layout can be represented
|
/// succeed! Whether it succeeds depends on whether the layout can be represented
|
||||||
/// in a `Immediate`, not on which data is stored there currently.
|
/// in an `Immediate`, not on which data is stored there currently.
|
||||||
pub(crate) fn try_read_immediate(
|
pub(crate) fn try_read_immediate(
|
||||||
&self,
|
&self,
|
||||||
src: &OpTy<'tcx, M::PointerTag>,
|
src: &OpTy<'tcx, M::PointerTag>,
|
||||||
|
@ -857,7 +857,7 @@ impl<'rt, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValueVisitor<'mir, 'tcx, M>
|
|||||||
// types above, in `visit_primitive`.
|
// types above, in `visit_primitive`.
|
||||||
// In run-time mode, we accept pointers in here. This is actually more
|
// In run-time mode, we accept pointers in here. This is actually more
|
||||||
// permissive than a per-element check would be, e.g., we accept
|
// permissive than a per-element check would be, e.g., we accept
|
||||||
// an &[u8] that contains a pointer even though bytewise checking would
|
// a &[u8] that contains a pointer even though bytewise checking would
|
||||||
// reject it. However, that's good: We don't inherently want
|
// reject it. However, that's good: We don't inherently want
|
||||||
// to reject those pointers, we just do not have the machinery to
|
// to reject those pointers, we just do not have the machinery to
|
||||||
// talk about parts of a pointer.
|
// talk about parts of a pointer.
|
||||||
|
@ -455,7 +455,7 @@ fn mono_item_visibility(
|
|||||||
def_id
|
def_id
|
||||||
} else {
|
} else {
|
||||||
return if export_generics && is_generic {
|
return if export_generics && is_generic {
|
||||||
// If it is a upstream monomorphization and we export generics, we must make
|
// If it is an upstream monomorphization and we export generics, we must make
|
||||||
// it available to downstream crates.
|
// it available to downstream crates.
|
||||||
*can_be_internalized = false;
|
*can_be_internalized = false;
|
||||||
default_visibility(tcx, def_id, true)
|
default_visibility(tcx, def_id, true)
|
||||||
|
@ -86,7 +86,7 @@ impl NonConstOp for FnCallNonConst {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A call to a `#[unstable]` const fn or `#[rustc_const_unstable]` function.
|
/// A call to an `#[unstable]` const fn or `#[rustc_const_unstable]` function.
|
||||||
///
|
///
|
||||||
/// Contains the name of the feature that would allow the use of this function.
|
/// Contains the name of the feature that would allow the use of this function.
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
|
@ -145,7 +145,7 @@ impl<'tcx> MirPass<'tcx> for ConstProp {
|
|||||||
|
|
||||||
// FIXME(oli-obk, eddyb) Optimize locals (or even local paths) to hold
|
// FIXME(oli-obk, eddyb) Optimize locals (or even local paths) to hold
|
||||||
// constants, instead of just checking for const-folding succeeding.
|
// constants, instead of just checking for const-folding succeeding.
|
||||||
// That would require an uniform one-def no-mutation analysis
|
// That would require a uniform one-def no-mutation analysis
|
||||||
// and RPO (or recursing when needing the value of a local).
|
// and RPO (or recursing when needing the value of a local).
|
||||||
let mut optimization_finder = ConstPropagator::new(body, dummy_body, tcx);
|
let mut optimization_finder = ConstPropagator::new(body, dummy_body, tcx);
|
||||||
optimization_finder.visit_body(body);
|
optimization_finder.visit_body(body);
|
||||||
|
@ -100,7 +100,7 @@ impl CoverageCounters {
|
|||||||
CounterValueReference::from(next)
|
CounterValueReference::from(next)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Expression IDs start from u32::MAX and go down because a Expression can reference
|
/// Expression IDs start from u32::MAX and go down because an Expression can reference
|
||||||
/// (add or subtract counts) of both Counter regions and Expression regions. The counter
|
/// (add or subtract counts) of both Counter regions and Expression regions. The counter
|
||||||
/// expression operand IDs must be unique across both types.
|
/// expression operand IDs must be unique across both types.
|
||||||
fn next_expression(&mut self) -> InjectedExpressionId {
|
fn next_expression(&mut self) -> InjectedExpressionId {
|
||||||
|
@ -334,7 +334,7 @@ impl<'a, 'tcx> Instrumentor<'a, 'tcx> {
|
|||||||
/// process (via `take_counter()`).
|
/// process (via `take_counter()`).
|
||||||
///
|
///
|
||||||
/// Any other counter associated with a `BasicCoverageBlock`, or its incoming edge, but not
|
/// Any other counter associated with a `BasicCoverageBlock`, or its incoming edge, but not
|
||||||
/// associated with a `CoverageSpan`, should only exist if the counter is a `Expression`
|
/// associated with a `CoverageSpan`, should only exist if the counter is an `Expression`
|
||||||
/// dependency (one of the expression operands). Collect them, and inject the additional
|
/// dependency (one of the expression operands). Collect them, and inject the additional
|
||||||
/// counters into the MIR, without a reportable coverage span.
|
/// counters into the MIR, without a reportable coverage span.
|
||||||
fn inject_indirect_counters(
|
fn inject_indirect_counters(
|
||||||
|
@ -88,11 +88,11 @@ impl<'tcx> MirPass<'tcx> for SanityCheck {
|
|||||||
/// For each such call, determines what the dataflow bit-state is for
|
/// For each such call, determines what the dataflow bit-state is for
|
||||||
/// the L-value corresponding to `expr`; if the bit-state is a 1, then
|
/// the L-value corresponding to `expr`; if the bit-state is a 1, then
|
||||||
/// that call to `rustc_peek` is ignored by the sanity check. If the
|
/// that call to `rustc_peek` is ignored by the sanity check. If the
|
||||||
/// bit-state is a 0, then this pass emits a error message saying
|
/// bit-state is a 0, then this pass emits an error message saying
|
||||||
/// "rustc_peek: bit not set".
|
/// "rustc_peek: bit not set".
|
||||||
///
|
///
|
||||||
/// The intention is that one can write unit tests for dataflow by
|
/// The intention is that one can write unit tests for dataflow by
|
||||||
/// putting code into an UI test and using `rustc_peek` to
|
/// putting code into a UI test and using `rustc_peek` to
|
||||||
/// make observations about the results of dataflow static analyses.
|
/// make observations about the results of dataflow static analyses.
|
||||||
///
|
///
|
||||||
/// (If there are any calls to `rustc_peek` that do not match the
|
/// (If there are any calls to `rustc_peek` that do not match the
|
||||||
|
@ -61,7 +61,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
|||||||
unpack!(this.expr_into_dest(destination, else_blk, &this.thir[else_opt]))
|
unpack!(this.expr_into_dest(destination, else_blk, &this.thir[else_opt]))
|
||||||
} else {
|
} else {
|
||||||
// Body of the `if` expression without an `else` clause must return `()`, thus
|
// Body of the `if` expression without an `else` clause must return `()`, thus
|
||||||
// we implicitly generate a `else {}` if it is not specified.
|
// we implicitly generate an `else {}` if it is not specified.
|
||||||
let correct_si = this.source_info(expr_span.shrink_to_hi());
|
let correct_si = this.source_info(expr_span.shrink_to_hi());
|
||||||
this.cfg.push_assign_unit(else_blk, correct_si, destination, this.tcx);
|
this.cfg.push_assign_unit(else_blk, correct_si, destination, this.tcx);
|
||||||
else_blk
|
else_blk
|
||||||
@ -208,7 +208,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
|||||||
);
|
);
|
||||||
this.diverge_from(loop_block);
|
this.diverge_from(loop_block);
|
||||||
|
|
||||||
// The “return” value of the loop body must always be an unit. We therefore
|
// The “return” value of the loop body must always be a unit. We therefore
|
||||||
// introduce a unit temporary as the destination for the loop body.
|
// introduce a unit temporary as the destination for the loop body.
|
||||||
let tmp = this.get_unit_temp();
|
let tmp = this.get_unit_temp();
|
||||||
// Execute the body, branching back to the test.
|
// Execute the body, branching back to the test.
|
||||||
|
@ -1936,7 +1936,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
|||||||
// }
|
// }
|
||||||
// ```
|
// ```
|
||||||
//
|
//
|
||||||
// would yield a `arm_block` something like:
|
// would yield an `arm_block` something like:
|
||||||
//
|
//
|
||||||
// ```
|
// ```
|
||||||
// StorageLive(_4); // _4 is `x`
|
// StorageLive(_4); // _4 is `x`
|
||||||
|
@ -700,7 +700,7 @@ fn construct_const<'a, 'tcx>(
|
|||||||
builder.finish()
|
builder.finish()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Construct MIR for a item that has had errors in type checking.
|
/// Construct MIR for an item that has had errors in type checking.
|
||||||
///
|
///
|
||||||
/// This is required because we may still want to run MIR passes on an item
|
/// This is required because we may still want to run MIR passes on an item
|
||||||
/// with type errors, but normal MIR construction can't handle that in general.
|
/// with type errors, but normal MIR construction can't handle that in general.
|
||||||
@ -885,7 +885,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
|||||||
let tcx_hir = tcx.hir();
|
let tcx_hir = tcx.hir();
|
||||||
let hir_typeck_results = self.typeck_results;
|
let hir_typeck_results = self.typeck_results;
|
||||||
|
|
||||||
// In analyze_closure() in upvar.rs we gathered a list of upvars used by a
|
// In analyze_closure() in upvar.rs we gathered a list of upvars used by an
|
||||||
// indexed closure and we stored in a map called closure_min_captures in TypeckResults
|
// indexed closure and we stored in a map called closure_min_captures in TypeckResults
|
||||||
// with the closure's DefId. Here, we run through that vec of UpvarIds for
|
// with the closure's DefId. Here, we run through that vec of UpvarIds for
|
||||||
// the given closure and use the necessary information to create upvar
|
// the given closure and use the necessary information to create upvar
|
||||||
|
@ -788,7 +788,7 @@ impl<'tcx> Cx<'tcx> {
|
|||||||
self.user_substs_applied_to_ty_of_hir_id(hir_id)
|
self.user_substs_applied_to_ty_of_hir_id(hir_id)
|
||||||
}
|
}
|
||||||
|
|
||||||
// `Self` is used in expression as a tuple struct constructor or an unit struct constructor
|
// `Self` is used in expression as a tuple struct constructor or a unit struct constructor
|
||||||
Res::SelfCtor(_) => self.user_substs_applied_to_ty_of_hir_id(hir_id),
|
Res::SelfCtor(_) => self.user_substs_applied_to_ty_of_hir_id(hir_id),
|
||||||
|
|
||||||
_ => bug!("user_substs_applied_to_res: unexpected res {:?} at {:?}", res, hir_id),
|
_ => bug!("user_substs_applied_to_res: unexpected res {:?} at {:?}", res, hir_id),
|
||||||
|
@ -762,7 +762,7 @@ impl<'p, 'tcx> SubPatSet<'p, 'tcx> {
|
|||||||
for i in 0..*alt_count {
|
for i in 0..*alt_count {
|
||||||
let sub_set = subpats.get(&i).unwrap_or(&SubPatSet::Empty);
|
let sub_set = subpats.get(&i).unwrap_or(&SubPatSet::Empty);
|
||||||
if sub_set.is_empty() {
|
if sub_set.is_empty() {
|
||||||
// Found a unreachable subpattern.
|
// Found an unreachable subpattern.
|
||||||
spans.push(expanded[i].span);
|
spans.push(expanded[i].span);
|
||||||
} else {
|
} else {
|
||||||
fill_spans(sub_set, spans);
|
fill_spans(sub_set, spans);
|
||||||
|
@ -301,7 +301,7 @@ impl<'a> Parser<'a> {
|
|||||||
// If we 'broke' the last token (e.g. breaking a '>>' token to two '>' tokens),
|
// If we 'broke' the last token (e.g. breaking a '>>' token to two '>' tokens),
|
||||||
// then extend the range of captured tokens to include it, since the parser
|
// then extend the range of captured tokens to include it, since the parser
|
||||||
// was not actually bumped past it. When the `LazyTokenStream` gets converted
|
// was not actually bumped past it. When the `LazyTokenStream` gets converted
|
||||||
// into a `AttrAnnotatedTokenStream`, we will create the proper token.
|
// into an `AttrAnnotatedTokenStream`, we will create the proper token.
|
||||||
if self.token_cursor.break_last_token {
|
if self.token_cursor.break_last_token {
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
trailing,
|
trailing,
|
||||||
@ -320,7 +320,7 @@ impl<'a> Parser<'a> {
|
|||||||
} else {
|
} else {
|
||||||
// Grab any replace ranges that occur *inside* the current AST node.
|
// Grab any replace ranges that occur *inside* the current AST node.
|
||||||
// We will perform the actual replacement when we convert the `LazyTokenStream`
|
// We will perform the actual replacement when we convert the `LazyTokenStream`
|
||||||
// to a `AttrAnnotatedTokenStream`
|
// to an `AttrAnnotatedTokenStream`
|
||||||
let start_calls: u32 = cursor_snapshot_next_calls.try_into().unwrap();
|
let start_calls: u32 = cursor_snapshot_next_calls.try_into().unwrap();
|
||||||
self.capture_state.replace_ranges[replace_ranges_start..replace_ranges_end]
|
self.capture_state.replace_ranges[replace_ranges_start..replace_ranges_end]
|
||||||
.iter()
|
.iter()
|
||||||
@ -486,7 +486,7 @@ fn make_token_stream(
|
|||||||
if let AttrAnnotatedTokenTree::Token(last_token) = last_token {
|
if let AttrAnnotatedTokenTree::Token(last_token) = last_token {
|
||||||
let unglued_first = last_token.kind.break_two_token_op().unwrap().0;
|
let unglued_first = last_token.kind.break_two_token_op().unwrap().0;
|
||||||
|
|
||||||
// A 'unglued' token is always two ASCII characters
|
// An 'unglued' token is always two ASCII characters
|
||||||
let mut first_span = last_token.span.shrink_to_lo();
|
let mut first_span = last_token.span.shrink_to_lo();
|
||||||
first_span = first_span.with_hi(first_span.lo() + rustc_span::BytePos(1));
|
first_span = first_span.with_hi(first_span.lo() + rustc_span::BytePos(1));
|
||||||
|
|
||||||
|
@ -1092,7 +1092,7 @@ impl<'a> Parser<'a> {
|
|||||||
// added to the return value after the fact.
|
// added to the return value after the fact.
|
||||||
//
|
//
|
||||||
// Therefore, prevent sub-parser from parsing
|
// Therefore, prevent sub-parser from parsing
|
||||||
// attributes by giving them a empty "already-parsed" list.
|
// attributes by giving them an empty "already-parsed" list.
|
||||||
let attrs = AttrVec::new();
|
let attrs = AttrVec::new();
|
||||||
|
|
||||||
// Note: when adding new syntax here, don't forget to adjust `TokenKind::can_begin_expr()`.
|
// Note: when adding new syntax here, don't forget to adjust `TokenKind::can_begin_expr()`.
|
||||||
|
@ -152,7 +152,7 @@ pub struct Parser<'a> {
|
|||||||
/// attribute, we parse a nested AST node that has `#[cfg]` or `#[cfg_attr]`
|
/// attribute, we parse a nested AST node that has `#[cfg]` or `#[cfg_attr]`
|
||||||
/// In this case, we use a `ReplaceRange` to replace the entire inner AST node
|
/// In this case, we use a `ReplaceRange` to replace the entire inner AST node
|
||||||
/// with `FlatToken::AttrTarget`, allowing us to perform eager cfg-expansion
|
/// with `FlatToken::AttrTarget`, allowing us to perform eager cfg-expansion
|
||||||
/// on a `AttrAnnotatedTokenStream`
|
/// on an `AttrAnnotatedTokenStream`
|
||||||
///
|
///
|
||||||
/// 2. When we parse an inner attribute while collecting tokens. We
|
/// 2. When we parse an inner attribute while collecting tokens. We
|
||||||
/// remove inner attributes from the token stream entirely, and
|
/// remove inner attributes from the token stream entirely, and
|
||||||
@ -165,7 +165,7 @@ pub type ReplaceRange = (Range<u32>, Vec<(FlatToken, Spacing)>);
|
|||||||
|
|
||||||
/// Controls how we capture tokens. Capturing can be expensive,
|
/// Controls how we capture tokens. Capturing can be expensive,
|
||||||
/// so we try to avoid performing capturing in cases where
|
/// so we try to avoid performing capturing in cases where
|
||||||
/// we will never need a `AttrAnnotatedTokenStream`
|
/// we will never need an `AttrAnnotatedTokenStream`
|
||||||
#[derive(Copy, Clone)]
|
#[derive(Copy, Clone)]
|
||||||
pub enum Capturing {
|
pub enum Capturing {
|
||||||
/// We aren't performing any capturing - this is the default mode.
|
/// We aren't performing any capturing - this is the default mode.
|
||||||
@ -1362,10 +1362,10 @@ pub fn emit_unclosed_delims(unclosed_delims: &mut Vec<UnmatchedBrace>, sess: &Pa
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A helper struct used when building a `AttrAnnotatedTokenStream` from
|
/// A helper struct used when building an `AttrAnnotatedTokenStream` from
|
||||||
/// a `LazyTokenStream`. Both delimiter and non-delimited tokens
|
/// a `LazyTokenStream`. Both delimiter and non-delimited tokens
|
||||||
/// are stored as `FlatToken::Token`. A vector of `FlatToken`s
|
/// are stored as `FlatToken::Token`. A vector of `FlatToken`s
|
||||||
/// is then 'parsed' to build up a `AttrAnnotatedTokenStream` with nested
|
/// is then 'parsed' to build up an `AttrAnnotatedTokenStream` with nested
|
||||||
/// `AttrAnnotatedTokenTree::Delimited` tokens
|
/// `AttrAnnotatedTokenTree::Delimited` tokens
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub enum FlatToken {
|
pub enum FlatToken {
|
||||||
@ -1375,10 +1375,10 @@ pub enum FlatToken {
|
|||||||
/// Holds the `AttributesData` for an AST node. The
|
/// Holds the `AttributesData` for an AST node. The
|
||||||
/// `AttributesData` is inserted directly into the
|
/// `AttributesData` is inserted directly into the
|
||||||
/// constructed `AttrAnnotatedTokenStream` as
|
/// constructed `AttrAnnotatedTokenStream` as
|
||||||
/// a `AttrAnnotatedTokenTree::Attributes`
|
/// an `AttrAnnotatedTokenTree::Attributes`
|
||||||
AttrTarget(AttributesData),
|
AttrTarget(AttributesData),
|
||||||
/// A special 'empty' token that is ignored during the conversion
|
/// A special 'empty' token that is ignored during the conversion
|
||||||
/// to a `AttrAnnotatedTokenStream`. This is used to simplify the
|
/// to an `AttrAnnotatedTokenStream`. This is used to simplify the
|
||||||
/// handling of replace ranges.
|
/// handling of replace ranges.
|
||||||
Empty,
|
Empty,
|
||||||
}
|
}
|
||||||
|
@ -575,7 +575,7 @@ impl EmbargoVisitor<'tcx> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Given the path segments of a `ItemKind::Use`, then we need
|
/// Given the path segments of an `ItemKind::Use`, then we need
|
||||||
/// to update the visibility of the intermediate use so that it isn't linted
|
/// to update the visibility of the intermediate use so that it isn't linted
|
||||||
/// by `unreachable_pub`.
|
/// by `unreachable_pub`.
|
||||||
///
|
///
|
||||||
|
@ -956,9 +956,61 @@ impl<'a> Resolver<'a> {
|
|||||||
if macro_kind == MacroKind::Derive && (ident.name == sym::Send || ident.name == sym::Sync) {
|
if macro_kind == MacroKind::Derive && (ident.name == sym::Send || ident.name == sym::Sync) {
|
||||||
let msg = format!("unsafe traits like `{}` should be implemented explicitly", ident);
|
let msg = format!("unsafe traits like `{}` should be implemented explicitly", ident);
|
||||||
err.span_note(ident.span, &msg);
|
err.span_note(ident.span, &msg);
|
||||||
|
return;
|
||||||
}
|
}
|
||||||
if self.macro_names.contains(&ident.normalize_to_macros_2_0()) {
|
if self.macro_names.contains(&ident.normalize_to_macros_2_0()) {
|
||||||
err.help("have you added the `#[macro_use]` on the module/import?");
|
err.help("have you added the `#[macro_use]` on the module/import?");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
for ns in [Namespace::MacroNS, Namespace::TypeNS, Namespace::ValueNS] {
|
||||||
|
if let Ok(binding) = self.early_resolve_ident_in_lexical_scope(
|
||||||
|
ident,
|
||||||
|
ScopeSet::All(ns, false),
|
||||||
|
&parent_scope,
|
||||||
|
false,
|
||||||
|
false,
|
||||||
|
ident.span,
|
||||||
|
) {
|
||||||
|
let desc = match binding.res() {
|
||||||
|
Res::Def(DefKind::Macro(MacroKind::Bang), _) => {
|
||||||
|
"a function-like macro".to_string()
|
||||||
|
}
|
||||||
|
Res::Def(DefKind::Macro(MacroKind::Attr), _) | Res::NonMacroAttr(..) => {
|
||||||
|
format!("an attribute: `#[{}]`", ident)
|
||||||
|
}
|
||||||
|
Res::Def(DefKind::Macro(MacroKind::Derive), _) => {
|
||||||
|
format!("a derive macro: `#[derive({})]`", ident)
|
||||||
|
}
|
||||||
|
Res::ToolMod => {
|
||||||
|
// Don't confuse the user with tool modules.
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
Res::Def(DefKind::Trait, _) if macro_kind == MacroKind::Derive => {
|
||||||
|
"only a trait, without a derive macro".to_string()
|
||||||
|
}
|
||||||
|
res => format!(
|
||||||
|
"{} {}, not {} {}",
|
||||||
|
res.article(),
|
||||||
|
res.descr(),
|
||||||
|
macro_kind.article(),
|
||||||
|
macro_kind.descr_expected(),
|
||||||
|
),
|
||||||
|
};
|
||||||
|
if let crate::NameBindingKind::Import { import, .. } = binding.kind {
|
||||||
|
if !import.span.is_dummy() {
|
||||||
|
err.span_note(
|
||||||
|
import.span,
|
||||||
|
&format!("`{}` is imported here, but it is {}", ident, desc),
|
||||||
|
);
|
||||||
|
// Silence the 'unused import' warning we might get,
|
||||||
|
// since this diagnostic already covers that import.
|
||||||
|
self.record_use(ident, binding, false);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
err.note(&format!("`{}` is in scope, but it is {}", ident, desc));
|
||||||
|
return;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1826,7 +1826,7 @@ impl<'tcx> LifetimeContext<'_, 'tcx> {
|
|||||||
err.emit();
|
err.emit();
|
||||||
}
|
}
|
||||||
|
|
||||||
// FIXME(const_generics): This patches over a ICE caused by non-'static lifetimes in const
|
// FIXME(const_generics): This patches over an ICE caused by non-'static lifetimes in const
|
||||||
// generics. We are disallowing this until we can decide on how we want to handle non-'static
|
// generics. We are disallowing this until we can decide on how we want to handle non-'static
|
||||||
// lifetimes in const generics. See issue #74052 for discussion.
|
// lifetimes in const generics. See issue #74052 for discussion.
|
||||||
crate fn emit_non_static_lt_in_const_generic_error(&self, lifetime_ref: &hir::Lifetime) {
|
crate fn emit_non_static_lt_in_const_generic_error(&self, lifetime_ref: &hir::Lifetime) {
|
||||||
|
@ -668,7 +668,7 @@ enum NameBindingKind<'a> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> NameBindingKind<'a> {
|
impl<'a> NameBindingKind<'a> {
|
||||||
/// Is this a name binding of a import?
|
/// Is this a name binding of an import?
|
||||||
fn is_import(&self) -> bool {
|
fn is_import(&self) -> bool {
|
||||||
matches!(*self, NameBindingKind::Import { .. })
|
matches!(*self, NameBindingKind::Import { .. })
|
||||||
}
|
}
|
||||||
@ -3065,7 +3065,7 @@ impl<'a> Resolver<'a> {
|
|||||||
self.extern_prelude.get(&ident).map_or(true, |entry| entry.introduced_by_item);
|
self.extern_prelude.get(&ident).map_or(true, |entry| entry.introduced_by_item);
|
||||||
// Only suggest removing an import if both bindings are to the same def, if both spans
|
// Only suggest removing an import if both bindings are to the same def, if both spans
|
||||||
// aren't dummy spans. Further, if both bindings are imports, then the ident must have
|
// aren't dummy spans. Further, if both bindings are imports, then the ident must have
|
||||||
// been introduced by a item.
|
// been introduced by an item.
|
||||||
let should_remove_import = duplicate
|
let should_remove_import = duplicate
|
||||||
&& !has_dummy_span
|
&& !has_dummy_span
|
||||||
&& ((new_binding.is_extern_crate() || old_binding.is_extern_crate()) || from_item);
|
&& ((new_binding.is_extern_crate() || old_binding.is_extern_crate()) || from_item);
|
||||||
@ -3160,7 +3160,7 @@ impl<'a> Resolver<'a> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// This function adds a suggestion to remove a unnecessary binding from an import that is
|
/// This function adds a suggestion to remove an unnecessary binding from an import that is
|
||||||
/// nested. In the following example, this function will be invoked to remove the `a` binding
|
/// nested. In the following example, this function will be invoked to remove the `a` binding
|
||||||
/// in the second use statement:
|
/// in the second use statement:
|
||||||
///
|
///
|
||||||
@ -3212,7 +3212,7 @@ impl<'a> Resolver<'a> {
|
|||||||
Applicability::MaybeIncorrect,
|
Applicability::MaybeIncorrect,
|
||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
// Remove the entire line if we cannot extend the span back, this indicates a
|
// Remove the entire line if we cannot extend the span back, this indicates an
|
||||||
// `issue_52891::{self}` case.
|
// `issue_52891::{self}` case.
|
||||||
err.span_suggestion(
|
err.span_suggestion(
|
||||||
import.use_span_with_attributes,
|
import.use_span_with_attributes,
|
||||||
|
@ -1202,7 +1202,7 @@ impl Json {
|
|||||||
matches!(*self, Json::I64(_) | Json::U64(_) | Json::F64(_))
|
matches!(*self, Json::I64(_) | Json::U64(_) | Json::F64(_))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns `true` if the Json value is a `i64`.
|
/// Returns `true` if the Json value is an `i64`.
|
||||||
pub fn is_i64(&self) -> bool {
|
pub fn is_i64(&self) -> bool {
|
||||||
matches!(*self, Json::I64(_))
|
matches!(*self, Json::I64(_))
|
||||||
}
|
}
|
||||||
@ -1217,7 +1217,7 @@ impl Json {
|
|||||||
matches!(*self, Json::F64(_))
|
matches!(*self, Json::F64(_))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// If the Json value is a number, returns or cast it to a `i64`;
|
/// If the Json value is a number, returns or cast it to an `i64`;
|
||||||
/// returns `None` otherwise.
|
/// returns `None` otherwise.
|
||||||
pub fn as_i64(&self) -> Option<i64> {
|
pub fn as_i64(&self) -> Option<i64> {
|
||||||
match *self {
|
match *self {
|
||||||
|
@ -644,7 +644,7 @@ impl<D: Decoder, T: Decodable<D> + Copy> Decodable<D> for Cell<T> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// FIXME: #15036
|
// FIXME: #15036
|
||||||
// Should use `try_borrow`, returning a
|
// Should use `try_borrow`, returning an
|
||||||
// `encoder.error("attempting to Encode borrowed RefCell")`
|
// `encoder.error("attempting to Encode borrowed RefCell")`
|
||||||
// from `encode` when `try_borrow` returns `None`.
|
// from `encode` when `try_borrow` returns `None`.
|
||||||
|
|
||||||
|
@ -1287,7 +1287,7 @@ options! {
|
|||||||
thinlto: Option<bool> = (None, parse_opt_bool, [TRACKED],
|
thinlto: Option<bool> = (None, parse_opt_bool, [TRACKED],
|
||||||
"enable ThinLTO when possible"),
|
"enable ThinLTO when possible"),
|
||||||
thir_unsafeck: bool = (false, parse_bool, [TRACKED],
|
thir_unsafeck: bool = (false, parse_bool, [TRACKED],
|
||||||
"use the work-in-progress THIR unsafety checker. NOTE: this is unsound (default: no)"),
|
"use the THIR unsafety checker (default: no)"),
|
||||||
/// We default to 1 here since we want to behave like
|
/// We default to 1 here since we want to behave like
|
||||||
/// a sequential compiler for now. This'll likely be adjusted
|
/// a sequential compiler for now. This'll likely be adjusted
|
||||||
/// in the future. Note that -Zthreads=0 is the way to get
|
/// in the future. Note that -Zthreads=0 is the way to get
|
||||||
|
@ -100,7 +100,7 @@ pub trait FileLoader {
|
|||||||
/// Query the existence of a file.
|
/// Query the existence of a file.
|
||||||
fn file_exists(&self, path: &Path) -> bool;
|
fn file_exists(&self, path: &Path) -> bool;
|
||||||
|
|
||||||
/// Read the contents of an UTF-8 file into memory.
|
/// Read the contents of a UTF-8 file into memory.
|
||||||
fn read_file(&self, path: &Path) -> io::Result<String>;
|
fn read_file(&self, path: &Path) -> io::Result<String>;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -469,7 +469,7 @@ impl InlineAsmRegClass {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns a list of supported types for this register class, each with a
|
/// Returns a list of supported types for this register class, each with an
|
||||||
/// options target feature required to use this type.
|
/// options target feature required to use this type.
|
||||||
pub fn supported_types(
|
pub fn supported_types(
|
||||||
self,
|
self,
|
||||||
|
@ -77,7 +77,7 @@ impl<'a, 'tcx> InferCtxtExt<'tcx> for InferCtxt<'a, 'tcx> {
|
|||||||
/// ?0: Iterator<Item = ?1>
|
/// ?0: Iterator<Item = ?1>
|
||||||
/// ?1: Debug
|
/// ?1: Debug
|
||||||
///
|
///
|
||||||
/// Moreover, it returns a `OpaqueTypeMap` that would map `?0` to
|
/// Moreover, it returns an `OpaqueTypeMap` that would map `?0` to
|
||||||
/// info about the `impl Iterator<..>` type and `?1` to info about
|
/// info about the `impl Iterator<..>` type and `?1` to info about
|
||||||
/// the `impl Debug` type.
|
/// the `impl Debug` type.
|
||||||
///
|
///
|
||||||
@ -886,7 +886,7 @@ impl<'a, 'tcx> Instantiator<'a, 'tcx> {
|
|||||||
// }
|
// }
|
||||||
// ```
|
// ```
|
||||||
//
|
//
|
||||||
// Here, the return type of `foo` references a
|
// Here, the return type of `foo` references an
|
||||||
// `Opaque` indeed, but not one whose value is
|
// `Opaque` indeed, but not one whose value is
|
||||||
// presently being inferred. You can get into a
|
// presently being inferred. You can get into a
|
||||||
// similar situation with closure return types
|
// similar situation with closure return types
|
||||||
|
@ -12,7 +12,7 @@ use rustc_errors::ErrorReported;
|
|||||||
use rustc_middle::ty::fold::TypeFoldable;
|
use rustc_middle::ty::fold::TypeFoldable;
|
||||||
use rustc_middle::ty::{self, TyCtxt};
|
use rustc_middle::ty::{self, TyCtxt};
|
||||||
|
|
||||||
/// Attempts to resolve an obligation to a `ImplSource`. The result is
|
/// Attempts to resolve an obligation to an `ImplSource`. The result is
|
||||||
/// a shallow `ImplSource` resolution, meaning that we do not
|
/// a shallow `ImplSource` resolution, meaning that we do not
|
||||||
/// (necessarily) resolve all nested obligations on the impl. Note
|
/// (necessarily) resolve all nested obligations on the impl. Note
|
||||||
/// that type check should guarantee to us that all nested
|
/// that type check should guarantee to us that all nested
|
||||||
|
@ -2009,7 +2009,7 @@ pub enum ArgKind {
|
|||||||
Arg(String, String),
|
Arg(String, String),
|
||||||
|
|
||||||
/// An argument of tuple type. For a "found" argument, the span is
|
/// An argument of tuple type. For a "found" argument, the span is
|
||||||
/// the location in the source of the pattern. For a "expected"
|
/// the location in the source of the pattern. For an "expected"
|
||||||
/// argument, it will be None. The vector is a list of (name, ty)
|
/// argument, it will be None. The vector is a list of (name, ty)
|
||||||
/// strings for the components of the tuple.
|
/// strings for the components of the tuple.
|
||||||
Tuple(Option<Span>, Vec<(String, String)>),
|
Tuple(Option<Span>, Vec<(String, String)>),
|
||||||
|
@ -1365,7 +1365,7 @@ impl<'a, 'tcx> InferCtxtExt<'tcx> for InferCtxt<'a, 'tcx> {
|
|||||||
// When a future does not implement a trait because of a captured type in one of the
|
// When a future does not implement a trait because of a captured type in one of the
|
||||||
// generators somewhere in the call stack, then the result is a chain of obligations.
|
// generators somewhere in the call stack, then the result is a chain of obligations.
|
||||||
//
|
//
|
||||||
// Given a `async fn` A that calls a `async fn` B which captures a non-send type and that
|
// Given an `async fn` A that calls an `async fn` B which captures a non-send type and that
|
||||||
// future is passed as an argument to a function C which requires a `Send` type, then the
|
// future is passed as an argument to a function C which requires a `Send` type, then the
|
||||||
// chain looks something like this:
|
// chain looks something like this:
|
||||||
//
|
//
|
||||||
|
@ -62,7 +62,7 @@ enum ProjectionTyCandidate<'tcx> {
|
|||||||
/// Bounds specified on an object type
|
/// Bounds specified on an object type
|
||||||
Object(ty::PolyProjectionPredicate<'tcx>),
|
Object(ty::PolyProjectionPredicate<'tcx>),
|
||||||
|
|
||||||
/// From a "impl" (or a "pseudo-impl" returned by select)
|
/// From an "impl" (or a "pseudo-impl" returned by select)
|
||||||
Select(Selection<'tcx>),
|
Select(Selection<'tcx>),
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1011,7 +1011,7 @@ fn prune_cache_value_obligations<'a, 'tcx>(
|
|||||||
/// Note that we used to return `Error` here, but that was quite
|
/// Note that we used to return `Error` here, but that was quite
|
||||||
/// dubious -- the premise was that an error would *eventually* be
|
/// dubious -- the premise was that an error would *eventually* be
|
||||||
/// reported, when the obligation was processed. But in general once
|
/// reported, when the obligation was processed. But in general once
|
||||||
/// you see a `Error` you are supposed to be able to assume that an
|
/// you see an `Error` you are supposed to be able to assume that an
|
||||||
/// error *has been* reported, so that you can take whatever heuristic
|
/// error *has been* reported, so that you can take whatever heuristic
|
||||||
/// paths you want to take. To make things worse, it was possible for
|
/// paths you want to take. To make things worse, it was possible for
|
||||||
/// cycles to arise, where you basically had a setup like `<MyType<$0>
|
/// cycles to arise, where you basically had a setup like `<MyType<$0>
|
||||||
|
@ -24,7 +24,7 @@ impl<'tcx> super::QueryTypeOp<'tcx> for ImpliedOutlivesBounds<'tcx> {
|
|||||||
) -> Fallible<CanonicalizedQueryResponse<'tcx, Self::QueryResponse>> {
|
) -> Fallible<CanonicalizedQueryResponse<'tcx, Self::QueryResponse>> {
|
||||||
// FIXME this `unchecked_map` is only necessary because the
|
// FIXME this `unchecked_map` is only necessary because the
|
||||||
// query is defined as taking a `ParamEnvAnd<Ty>`; it should
|
// query is defined as taking a `ParamEnvAnd<Ty>`; it should
|
||||||
// take a `ImpliedOutlivesBounds` instead
|
// take an `ImpliedOutlivesBounds` instead
|
||||||
let canonicalized = canonicalized.unchecked_map(|ParamEnvAnd { param_env, value }| {
|
let canonicalized = canonicalized.unchecked_map(|ParamEnvAnd { param_env, value }| {
|
||||||
let ImpliedOutlivesBounds { ty } = value;
|
let ImpliedOutlivesBounds { ty } = value;
|
||||||
param_env.and(ty)
|
param_env.and(ty)
|
||||||
|
@ -257,7 +257,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
|||||||
ImplSourceBuiltinData { nested: obligations }
|
ImplSourceBuiltinData { nested: obligations }
|
||||||
}
|
}
|
||||||
|
|
||||||
/// This handles the case where a `auto trait Foo` impl is being used.
|
/// This handles the case where an `auto trait Foo` impl is being used.
|
||||||
/// The idea is that the impl applies to `X : Foo` if the following conditions are met:
|
/// The idea is that the impl applies to `X : Foo` if the following conditions are met:
|
||||||
///
|
///
|
||||||
/// 1. For each constituent type `Y` in `X`, `Y : Foo` holds
|
/// 1. For each constituent type `Y` in `X`, `Y : Foo` holds
|
||||||
|
@ -149,7 +149,7 @@ pub(super) fn specializes(tcx: TyCtxt<'_>, (impl1_def_id, impl2_def_id): (DefId,
|
|||||||
let penv = tcx.param_env(impl1_def_id);
|
let penv = tcx.param_env(impl1_def_id);
|
||||||
let impl1_trait_ref = tcx.impl_trait_ref(impl1_def_id).unwrap();
|
let impl1_trait_ref = tcx.impl_trait_ref(impl1_def_id).unwrap();
|
||||||
|
|
||||||
// Create a infcx, taking the predicates of impl1 as assumptions:
|
// Create an infcx, taking the predicates of impl1 as assumptions:
|
||||||
tcx.infer_ctxt().enter(|infcx| {
|
tcx.infer_ctxt().enter(|infcx| {
|
||||||
// Normalize the trait reference. The WF rules ought to ensure
|
// Normalize the trait reference. The WF rules ought to ensure
|
||||||
// that this always succeeds.
|
// that this always succeeds.
|
||||||
|
@ -718,7 +718,7 @@ impl<'tcx> chalk_ir::UnificationDatabase<RustInterner<'tcx>> for RustIrDatabase<
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Creates a `InternalSubsts` that maps each generic parameter to a higher-ranked
|
/// Creates an `InternalSubsts` that maps each generic parameter to a higher-ranked
|
||||||
/// var bound at index `0`. For types, we use a `BoundVar` index equal to
|
/// var bound at index `0`. For types, we use a `BoundVar` index equal to
|
||||||
/// the type parameter index. For regions, we use the `BoundRegionKind::BrNamed`
|
/// the type parameter index. For regions, we use the `BoundRegionKind::BrNamed`
|
||||||
/// variant (which has a `DefId`).
|
/// variant (which has a `DefId`).
|
||||||
|
@ -185,7 +185,7 @@ fn impl_constness(tcx: TyCtxt<'_>, def_id: DefId) -> hir::Constness {
|
|||||||
/// - a type parameter or projection whose Sizedness can't be known
|
/// - a type parameter or projection whose Sizedness can't be known
|
||||||
/// - a tuple of type parameters or projections, if there are multiple
|
/// - a tuple of type parameters or projections, if there are multiple
|
||||||
/// such.
|
/// such.
|
||||||
/// - a Error, if a type contained itself. The representability
|
/// - an Error, if a type contained itself. The representability
|
||||||
/// check should catch this case.
|
/// check should catch this case.
|
||||||
fn adt_sized_constraint(tcx: TyCtxt<'_>, def_id: DefId) -> ty::AdtSizedConstraint<'_> {
|
fn adt_sized_constraint(tcx: TyCtxt<'_>, def_id: DefId) -> ty::AdtSizedConstraint<'_> {
|
||||||
let def = tcx.adt_def(def_id);
|
let def = tcx.adt_def(def_id);
|
||||||
@ -253,7 +253,7 @@ fn param_env(tcx: TyCtxt<'_>, def_id: DefId) -> ty::ParamEnv<'_> {
|
|||||||
// `<i32 as Foo>::Bar` where `i32` does not implement `Foo`. We
|
// `<i32 as Foo>::Bar` where `i32` does not implement `Foo`. We
|
||||||
// report these errors right here; this doesn't actually feel
|
// report these errors right here; this doesn't actually feel
|
||||||
// right to me, because constructing the environment feels like a
|
// right to me, because constructing the environment feels like a
|
||||||
// kind of a "idempotent" action, but I'm not sure where would be
|
// kind of an "idempotent" action, but I'm not sure where would be
|
||||||
// a better place. In practice, we construct environments for
|
// a better place. In practice, we construct environments for
|
||||||
// every fn once during type checking, and we'll abort if there
|
// every fn once during type checking, and we'll abort if there
|
||||||
// are any errors at that point, so after type checking you can be
|
// are any errors at that point, so after type checking you can be
|
||||||
|
@ -2350,7 +2350,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
|
|||||||
}
|
}
|
||||||
hir::TyKind::Infer => {
|
hir::TyKind::Infer => {
|
||||||
// Infer also appears as the type of arguments or return
|
// Infer also appears as the type of arguments or return
|
||||||
// values in a ExprKind::Closure, or as
|
// values in an ExprKind::Closure, or as
|
||||||
// the type of local variables. Both of these cases are
|
// the type of local variables. Both of these cases are
|
||||||
// handled specially and will not descend into this routine.
|
// handled specially and will not descend into this routine.
|
||||||
self.ty_infer(None, ast_ty.span)
|
self.ty_infer(None, ast_ty.span)
|
||||||
|
@ -617,7 +617,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
|||||||
/// Invoked when we are translating the generator that results
|
/// Invoked when we are translating the generator that results
|
||||||
/// from desugaring an `async fn`. Returns the "sugared" return
|
/// from desugaring an `async fn`. Returns the "sugared" return
|
||||||
/// type of the `async fn` -- that is, the return type that the
|
/// type of the `async fn` -- that is, the return type that the
|
||||||
/// user specified. The "desugared" return type is a `impl
|
/// user specified. The "desugared" return type is an `impl
|
||||||
/// Future<Output = T>`, so we do this by searching through the
|
/// Future<Output = T>`, so we do this by searching through the
|
||||||
/// obligations to extract the `T`.
|
/// obligations to extract the `T`.
|
||||||
fn deduce_future_output_from_obligations(&self, expr_def_id: DefId) -> Option<Ty<'tcx>> {
|
fn deduce_future_output_from_obligations(&self, expr_def_id: DefId) -> Option<Ty<'tcx>> {
|
||||||
|
@ -743,7 +743,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
|||||||
// possibly be referring to the current closure,
|
// possibly be referring to the current closure,
|
||||||
// because we haven't produced the `Closure` for
|
// because we haven't produced the `Closure` for
|
||||||
// this closure yet; this is exactly why the other
|
// this closure yet; this is exactly why the other
|
||||||
// code is looking for a self type of a unresolved
|
// code is looking for a self type of an unresolved
|
||||||
// inference variable.
|
// inference variable.
|
||||||
ty::PredicateKind::ClosureKind(..) => None,
|
ty::PredicateKind::ClosureKind(..) => None,
|
||||||
ty::PredicateKind::TypeWellFormedFromEnv(..) => None,
|
ty::PredicateKind::TypeWellFormedFromEnv(..) => None,
|
||||||
|
@ -96,7 +96,7 @@ pub struct FnCtxt<'a, 'tcx> {
|
|||||||
/// `foo(return)`; we warn on the `foo()` expression. (We then
|
/// `foo(return)`; we warn on the `foo()` expression. (We then
|
||||||
/// update the flag to `WarnedAlways` to suppress duplicate
|
/// update the flag to `WarnedAlways` to suppress duplicate
|
||||||
/// reports.) Similarly, if we traverse to a fresh statement (or
|
/// reports.) Similarly, if we traverse to a fresh statement (or
|
||||||
/// tail expression) from a `Always` setting, we will issue a
|
/// tail expression) from an `Always` setting, we will issue a
|
||||||
/// warning. This corresponds to something like `{return;
|
/// warning. This corresponds to something like `{return;
|
||||||
/// foo();}` or `{return; 22}`, where we would warn on the
|
/// foo();}` or `{return; 22}`, where we would warn on the
|
||||||
/// `foo()` or `22`.
|
/// `foo()` or `22`.
|
||||||
|
@ -1456,7 +1456,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
|||||||
// an immut-ref after on top of this.
|
// an immut-ref after on top of this.
|
||||||
ty::Ref(.., hir::Mutability::Mut) => is_mutbl = hir::Mutability::Mut,
|
ty::Ref(.., hir::Mutability::Mut) => is_mutbl = hir::Mutability::Mut,
|
||||||
|
|
||||||
// The place isn't mutable once we dereference a immutable reference.
|
// The place isn't mutable once we dereference an immutable reference.
|
||||||
ty::Ref(.., hir::Mutability::Not) => return hir::Mutability::Not,
|
ty::Ref(.., hir::Mutability::Not) => return hir::Mutability::Not,
|
||||||
|
|
||||||
// Dereferencing a box doesn't change mutability
|
// Dereferencing a box doesn't change mutability
|
||||||
|
@ -155,7 +155,7 @@ fn unused_crates_lint(tcx: TyCtxt<'_>) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// If the extern crate isn't in the extern prelude,
|
// If the extern crate isn't in the extern prelude,
|
||||||
// there is no way it can be written as an `use`.
|
// there is no way it can be written as a `use`.
|
||||||
let orig_name = extern_crate.orig_name.unwrap_or(item.ident.name);
|
let orig_name = extern_crate.orig_name.unwrap_or(item.ident.name);
|
||||||
if !extern_prelude.get(&orig_name).map_or(false, |from_item| !from_item) {
|
if !extern_prelude.get(&orig_name).map_or(false, |from_item| !from_item) {
|
||||||
continue;
|
continue;
|
||||||
|
@ -819,7 +819,7 @@ fn copy_or_move<'a, 'tcx>(
|
|||||||
}
|
}
|
||||||
|
|
||||||
// - If a place is used in a `ByValue` context then move it if it's not a `Copy` type.
|
// - If a place is used in a `ByValue` context then move it if it's not a `Copy` type.
|
||||||
// - If the place that is a `Copy` type consider it a `ImmBorrow`.
|
// - If the place that is a `Copy` type consider it an `ImmBorrow`.
|
||||||
fn delegate_consume<'a, 'tcx>(
|
fn delegate_consume<'a, 'tcx>(
|
||||||
mc: &mc::MemCategorizationContext<'a, 'tcx>,
|
mc: &mc::MemCategorizationContext<'a, 'tcx>,
|
||||||
delegate: &mut (dyn Delegate<'tcx> + 'a),
|
delegate: &mut (dyn Delegate<'tcx> + 'a),
|
||||||
|
@ -297,7 +297,7 @@ pub fn check_explicit_predicates<'tcx>(
|
|||||||
// to apply the substs, and not filter this predicate, we might then falsely
|
// to apply the substs, and not filter this predicate, we might then falsely
|
||||||
// conclude that e.g., `X: 'x` was a reasonable inferred requirement.
|
// conclude that e.g., `X: 'x` was a reasonable inferred requirement.
|
||||||
//
|
//
|
||||||
// Another similar case is where we have a inferred
|
// Another similar case is where we have an inferred
|
||||||
// requirement like `<Self as Trait>::Foo: 'b`. We presently
|
// requirement like `<Self as Trait>::Foo: 'b`. We presently
|
||||||
// ignore such requirements as well (cc #54467)-- though
|
// ignore such requirements as well (cc #54467)-- though
|
||||||
// conceivably it might be better if we could extract the `Foo
|
// conceivably it might be better if we could extract the `Foo
|
||||||
|
@ -387,7 +387,7 @@ pub mod __alloc_error_handler {
|
|||||||
panic!("memory allocation of {} bytes failed", size)
|
panic!("memory allocation of {} bytes failed", size)
|
||||||
}
|
}
|
||||||
|
|
||||||
// if there is a `#[alloc_error_handler]`
|
// if there is an `#[alloc_error_handler]`
|
||||||
#[rustc_std_internal_symbol]
|
#[rustc_std_internal_symbol]
|
||||||
pub unsafe extern "C" fn __rg_oom(size: usize, align: usize) -> ! {
|
pub unsafe extern "C" fn __rg_oom(size: usize, align: usize) -> ! {
|
||||||
let layout = unsafe { Layout::from_size_align_unchecked(size, align) };
|
let layout = unsafe { Layout::from_size_align_unchecked(size, align) };
|
||||||
|
@ -46,7 +46,7 @@
|
|||||||
//! }
|
//! }
|
||||||
//! }
|
//! }
|
||||||
//!
|
//!
|
||||||
//! // Each node is represented as an `usize`, for a shorter implementation.
|
//! // Each node is represented as a `usize`, for a shorter implementation.
|
||||||
//! struct Edge {
|
//! struct Edge {
|
||||||
//! node: usize,
|
//! node: usize,
|
||||||
//! cost: usize,
|
//! cost: usize,
|
||||||
|
@ -45,7 +45,7 @@ impl<T, const N: usize> IntoIter<T, N> {
|
|||||||
/// use std::array;
|
/// use std::array;
|
||||||
///
|
///
|
||||||
/// for value in array::IntoIter::new([1, 2, 3, 4, 5]) {
|
/// for value in array::IntoIter::new([1, 2, 3, 4, 5]) {
|
||||||
/// // The type of `value` is a `i32` here, instead of `&i32`
|
/// // The type of `value` is an `i32` here, instead of `&i32`
|
||||||
/// let _: i32 = value;
|
/// let _: i32 = value;
|
||||||
/// }
|
/// }
|
||||||
/// ```
|
/// ```
|
||||||
|
@ -958,7 +958,7 @@ impl char {
|
|||||||
/// Returns an iterator that yields the uppercase mapping of this `char` as one or more
|
/// Returns an iterator that yields the uppercase mapping of this `char` as one or more
|
||||||
/// `char`s.
|
/// `char`s.
|
||||||
///
|
///
|
||||||
/// If this `char` does not have a uppercase mapping, the iterator yields the same `char`.
|
/// If this `char` does not have an uppercase mapping, the iterator yields the same `char`.
|
||||||
///
|
///
|
||||||
/// If this `char` has a one-to-one uppercase mapping given by the [Unicode Character
|
/// If this `char` has a one-to-one uppercase mapping given by the [Unicode Character
|
||||||
/// Database][ucd] [`UnicodeData.txt`], the iterator yields that `char`.
|
/// Database][ucd] [`UnicodeData.txt`], the iterator yields that `char`.
|
||||||
|
@ -1013,7 +1013,7 @@ extern "rust-intrinsic" {
|
|||||||
/// let val_casts = unsafe { &mut *(ptr as *mut i32 as *mut u32) };
|
/// let val_casts = unsafe { &mut *(ptr as *mut i32 as *mut u32) };
|
||||||
/// ```
|
/// ```
|
||||||
///
|
///
|
||||||
/// Turning an `&str` into an `&[u8]`:
|
/// Turning an `&str` into a `&[u8]`:
|
||||||
///
|
///
|
||||||
/// ```
|
/// ```
|
||||||
/// // this is not a good way to do this.
|
/// // this is not a good way to do this.
|
||||||
|
@ -563,7 +563,7 @@ where
|
|||||||
}
|
}
|
||||||
|
|
||||||
unsafe trait SpecTrustedRandomAccess: Iterator {
|
unsafe trait SpecTrustedRandomAccess: Iterator {
|
||||||
/// If `Self: TrustedRandomAccess`, it must be safe to call a
|
/// If `Self: TrustedRandomAccess`, it must be safe to call
|
||||||
/// `Iterator::__iterator_get_unchecked(self, index)`.
|
/// `Iterator::__iterator_get_unchecked(self, index)`.
|
||||||
unsafe fn try_get_unchecked(&mut self, index: usize) -> Self::Item;
|
unsafe fn try_get_unchecked(&mut self, index: usize) -> Self::Item;
|
||||||
}
|
}
|
||||||
|
@ -25,7 +25,7 @@ impl<I: FusedIterator + ?Sized> FusedIterator for &mut I {}
|
|||||||
/// (lower bound is equal to upper bound), or the upper bound is [`None`].
|
/// (lower bound is equal to upper bound), or the upper bound is [`None`].
|
||||||
/// The upper bound must only be [`None`] if the actual iterator length is
|
/// The upper bound must only be [`None`] if the actual iterator length is
|
||||||
/// larger than [`usize::MAX`]. In that case, the lower bound must be
|
/// larger than [`usize::MAX`]. In that case, the lower bound must be
|
||||||
/// [`usize::MAX`], resulting in a [`Iterator::size_hint()`] of
|
/// [`usize::MAX`], resulting in an [`Iterator::size_hint()`] of
|
||||||
/// `(usize::MAX, None)`.
|
/// `(usize::MAX, None)`.
|
||||||
///
|
///
|
||||||
/// The iterator must produce exactly the number of elements it reported
|
/// The iterator must produce exactly the number of elements it reported
|
||||||
|
@ -660,7 +660,7 @@ pub unsafe fn zeroed<T>() -> T {
|
|||||||
#[rustc_diagnostic_item = "mem_uninitialized"]
|
#[rustc_diagnostic_item = "mem_uninitialized"]
|
||||||
#[track_caller]
|
#[track_caller]
|
||||||
pub unsafe fn uninitialized<T>() -> T {
|
pub unsafe fn uninitialized<T>() -> T {
|
||||||
// SAFETY: the caller must guarantee that an unitialized value is valid for `T`.
|
// SAFETY: the caller must guarantee that an uninitialized value is valid for `T`.
|
||||||
unsafe {
|
unsafe {
|
||||||
intrinsics::assert_uninit_valid::<T>();
|
intrinsics::assert_uninit_valid::<T>();
|
||||||
MaybeUninit::uninit().assume_init()
|
MaybeUninit::uninit().assume_init()
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user