mirror of
https://github.com/rust-lang/rust.git
synced 2024-11-25 08:13:41 +00:00
Match unmatched backticks in comments in compiler/
This commit is contained in:
parent
ff2c609d66
commit
6df5ae4fb0
@ -225,7 +225,7 @@ impl AssocOp {
|
||||
AssignOp(_) | // `{ 42 } +=`
|
||||
As | // `{ 42 } as usize`
|
||||
// Equal | // `{ 42 } == { 42 }` Accepting these here would regress incorrect
|
||||
// NotEqual | // `{ 42 } != { 42 } struct literals parser recovery.
|
||||
// NotEqual | // `{ 42 } != { 42 }` struct literals parser recovery.
|
||||
Colon, // `{ 42 }: usize`
|
||||
)
|
||||
}
|
||||
|
@ -192,7 +192,7 @@ impl<'a> AstValidator<'a> {
|
||||
// We allow these:
|
||||
// - `Option<impl Trait>`
|
||||
// - `option::Option<impl Trait>`
|
||||
// - `option::Option<T>::Foo<impl Trait>
|
||||
// - `option::Option<T>::Foo<impl Trait>`
|
||||
//
|
||||
// But not these:
|
||||
// - `<impl Trait>::Foo`
|
||||
|
@ -1873,7 +1873,7 @@ pub(super) fn check_type_bounds<'tcx>(
|
||||
// type Bar<C> =...
|
||||
// }
|
||||
//
|
||||
// - `impl_trait_ref` would be `<(A, B) as Foo<u32>>
|
||||
// - `impl_trait_ref` would be `<(A, B) as Foo<u32>>`
|
||||
// - `impl_ty_substs` would be `[A, B, ^0.0]` (`^0.0` here is the bound var with db 0 and index 0)
|
||||
// - `rebased_substs` would be `[(A, B), u32, ^0.0]`, combining the substs from
|
||||
// the *trait* with the generic associated type parameters (as bound vars).
|
||||
|
@ -437,7 +437,7 @@ pub fn coerce_unsized_info<'tcx>(tcx: TyCtxt<'tcx>, impl_did: DefId) -> CoerceUn
|
||||
}
|
||||
|
||||
// Here we are considering a case of converting
|
||||
// `S<P0...Pn>` to S<Q0...Qn>`. As an example, let's imagine a struct `Foo<T, U>`,
|
||||
// `S<P0...Pn>` to `S<Q0...Qn>`. As an example, let's imagine a struct `Foo<T, U>`,
|
||||
// which acts like a pointer to `U`, but carries along some extra data of type `T`:
|
||||
//
|
||||
// struct Foo<T, U> {
|
||||
|
@ -636,7 +636,7 @@ impl<'a, 'tcx> MemCategorizationContext<'a, 'tcx> {
|
||||
// `&&Some(x,)` `place_foo`
|
||||
// `&Some(x,)` `deref { place_foo}`
|
||||
// `Some(x,)` `deref { deref { place_foo }}`
|
||||
// (x,)` `field0 { deref { deref { place_foo }}}` <- resulting place
|
||||
// `(x,)` `field0 { deref { deref { place_foo }}}` <- resulting place
|
||||
//
|
||||
// The above example has no adjustments. If the code were instead the (after adjustments,
|
||||
// equivalent) version
|
||||
|
@ -1621,7 +1621,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
|
||||
#[instrument(level = "debug", skip(self))]
|
||||
fn encode_info_for_closure(&mut self, def_id: LocalDefId) {
|
||||
// NOTE(eddyb) `tcx.type_of(def_id)` isn't used because it's fully generic,
|
||||
// including on the signature, which is inferred in `typeck.
|
||||
// including on the signature, which is inferred in `typeck`.
|
||||
let typeck_result: &'tcx ty::TypeckResults<'tcx> = self.tcx.typeck(def_id);
|
||||
let hir_id = self.tcx.hir().local_def_id_to_hir_id(def_id);
|
||||
let ty = typeck_result.node_type(hir_id);
|
||||
|
@ -2908,7 +2908,7 @@ fn pretty_print_const_value<'tcx>(
|
||||
// the `destructure_const` query with an empty `ty::ParamEnv` without
|
||||
// introducing ICEs (e.g. via `layout_of`) from missing bounds.
|
||||
// E.g. `transmute([0usize; 2]): (u8, *mut T)` needs to know `T: Sized`
|
||||
// to be able to destructure the tuple into `(0u8, *mut T)
|
||||
// to be able to destructure the tuple into `(0u8, *mut T)`
|
||||
//
|
||||
// FIXME(eddyb) for `--emit=mir`/`-Z dump-mir`, we should provide the
|
||||
// correct `ty::ParamEnv` to allow printing *all* constant values.
|
||||
|
@ -11,7 +11,7 @@ use std::io::{self, Write};
|
||||
pub const TOOLTIP_INDENT: &str = " ";
|
||||
|
||||
const CARET: char = '\u{2038}'; // Unicode `CARET`
|
||||
const ANNOTATION_LEFT_BRACKET: char = '\u{298a}'; // Unicode `Z NOTATION RIGHT BINDING BRACKET
|
||||
const ANNOTATION_LEFT_BRACKET: char = '\u{298a}'; // Unicode `Z NOTATION RIGHT BINDING BRACKET`
|
||||
const ANNOTATION_RIGHT_BRACKET: char = '\u{2989}'; // Unicode `Z NOTATION LEFT BINDING BRACKET`
|
||||
const NEW_LINE_SPAN: &str = "</span>\n<span class=\"line\">";
|
||||
const HEADER: &str = r#"<!DOCTYPE html>
|
||||
|
@ -2187,7 +2187,7 @@ impl<'tcx> TyCtxt<'tcx> {
|
||||
// Actually intern type lists as lists of `GenericArg`s.
|
||||
//
|
||||
// Transmuting from `Ty<'tcx>` to `GenericArg<'tcx>` is sound
|
||||
// as explained in ty_slice_as_generic_arg`. With this,
|
||||
// as explained in `ty_slice_as_generic_arg`. With this,
|
||||
// we guarantee that even when transmuting between `List<Ty<'tcx>>`
|
||||
// and `List<GenericArg<'tcx>>`, the uniqueness requirement for
|
||||
// lists is upheld.
|
||||
|
@ -105,7 +105,7 @@ impl<'tcx> VariantDef {
|
||||
impl<'tcx> Ty<'tcx> {
|
||||
pub fn inhabited_predicate(self, tcx: TyCtxt<'tcx>) -> InhabitedPredicate<'tcx> {
|
||||
match self.kind() {
|
||||
// For now, union`s are always considered inhabited
|
||||
// For now, unions are always considered inhabited
|
||||
Adt(adt, _) if adt.is_union() => InhabitedPredicate::True,
|
||||
// Non-exhaustive ADTs from other crates are always considered inhabited
|
||||
Adt(adt, _) if adt.is_variant_list_non_exhaustive() && !adt.did().is_local() => {
|
||||
|
@ -710,7 +710,7 @@ impl<'tcx> Predicate<'tcx> {
|
||||
// The substitution from the input trait-ref is therefore going to be
|
||||
// `'a => 'x` (where `'x` has a DB index of 1).
|
||||
// - The supertrait-ref is `for<'b> Bar1<'a,'b>`, where `'a` is an
|
||||
// early-bound parameter and `'b' is a late-bound parameter with a
|
||||
// early-bound parameter and `'b` is a late-bound parameter with a
|
||||
// DB index of 1.
|
||||
// - If we replace `'a` with `'x` from the input, it too will have
|
||||
// a DB index of 1, and thus we'll have `for<'x,'b> Bar1<'x,'b>`
|
||||
|
@ -74,7 +74,7 @@ impl<'tcx> ConstMutationChecker<'_, 'tcx> {
|
||||
//
|
||||
// `unsafe { *FOO = 0; *BAR.field = 1; }`
|
||||
// `unsafe { &mut *FOO }`
|
||||
// `unsafe { (*ARRAY)[0] = val; }
|
||||
// `unsafe { (*ARRAY)[0] = val; }`
|
||||
if !place.projection.iter().any(|p| matches!(p, PlaceElem::Deref)) {
|
||||
let source_info = self.body.source_info(location);
|
||||
let lint_root = self.body.source_scopes[source_info.scope]
|
||||
|
@ -407,7 +407,7 @@ impl<'a, 'tcx> CoverageSpans<'a, 'tcx> {
|
||||
if self.prev().is_macro_expansion() && self.curr().is_macro_expansion() {
|
||||
// Macros that expand to include branching (such as
|
||||
// `assert_eq!()`, `assert_ne!()`, `info!()`, `debug!()`, or
|
||||
// `trace!()) typically generate callee spans with identical
|
||||
// `trace!()`) typically generate callee spans with identical
|
||||
// ranges (typically the full span of the macro) for all
|
||||
// `BasicBlocks`. This makes it impossible to distinguish
|
||||
// the condition (`if val1 != val2`) from the optional
|
||||
|
@ -248,7 +248,7 @@ fn mir_const_qualif(tcx: TyCtxt<'_>, def: ty::WithOptConstParam<LocalDefId>) ->
|
||||
|
||||
// N.B., this `borrow()` is guaranteed to be valid (i.e., the value
|
||||
// cannot yet be stolen), because `mir_promoted()`, which steals
|
||||
// from `mir_const(), forces this query to execute before
|
||||
// from `mir_const()`, forces this query to execute before
|
||||
// performing the steal.
|
||||
let body = &tcx.mir_const(def).borrow();
|
||||
|
||||
|
@ -68,8 +68,11 @@ fn lower_slice_len_call<'tcx>(
|
||||
ty::FnDef(fn_def_id, _) if fn_def_id == &slice_len_fn_item_def_id => {
|
||||
// perform modifications
|
||||
// from something like `_5 = core::slice::<impl [u8]>::len(move _6) -> bb1`
|
||||
// into `_5 = Len(*_6)
|
||||
// into:
|
||||
// ```
|
||||
// _5 = Len(*_6)
|
||||
// goto bb1
|
||||
// ```
|
||||
|
||||
// make new RValue for Len
|
||||
let deref_arg = tcx.mk_place_deref(arg);
|
||||
|
@ -134,11 +134,11 @@ impl ToAttrTokenStream for LazyAttrTokenStreamImpl {
|
||||
// Process the replace ranges, starting from the highest start
|
||||
// position and working our way back. If have tokens like:
|
||||
//
|
||||
// `#[cfg(FALSE)]` struct Foo { #[cfg(FALSE)] field: bool }`
|
||||
// `#[cfg(FALSE)] struct Foo { #[cfg(FALSE)] field: bool }`
|
||||
//
|
||||
// Then we will generate replace ranges for both
|
||||
// the `#[cfg(FALSE)] field: bool` and the entire
|
||||
// `#[cfg(FALSE)]` struct Foo { #[cfg(FALSE)] field: bool }`
|
||||
// `#[cfg(FALSE)] struct Foo { #[cfg(FALSE)] field: bool }`
|
||||
//
|
||||
// By starting processing from the replace range with the greatest
|
||||
// start position, we ensure that any replace range which encloses
|
||||
|
@ -335,7 +335,7 @@ impl TokenCursor {
|
||||
num_of_hashes = cmp::max(num_of_hashes, count);
|
||||
}
|
||||
|
||||
// `/// foo` becomes `doc = r"foo".
|
||||
// `/// foo` becomes `doc = r"foo"`.
|
||||
let delim_span = DelimSpan::from_single(span);
|
||||
let body = TokenTree::Delimited(
|
||||
delim_span,
|
||||
|
@ -646,7 +646,7 @@ impl<'v> ast_visit::Visitor<'v> for StatCollector<'v> {
|
||||
}
|
||||
|
||||
// `UseTree` has one inline use (in `ast::ItemKind::Use`) and one
|
||||
// non-inline use (in `ast::UseTreeKind::Nested). The former case is more
|
||||
// non-inline use (in `ast::UseTreeKind::Nested`). The former case is more
|
||||
// common, so we don't implement `visit_use_tree` and tolerate the missed
|
||||
// coverage in the latter case.
|
||||
|
||||
|
@ -333,7 +333,7 @@ impl<'sess> rustc_middle::ty::OnDiskCache<'sess> for OnDiskCache<'sess> {
|
||||
},
|
||||
);
|
||||
|
||||
// `Encode the file footer.
|
||||
// Encode the file footer.
|
||||
let footer_pos = encoder.position() as u64;
|
||||
encoder.encode_tagged(
|
||||
TAG_FILE_FOOTER,
|
||||
|
@ -878,7 +878,7 @@ impl<'a: 'ast, 'ast, 'tcx> LateResolutionVisitor<'a, '_, 'ast, 'tcx> {
|
||||
);
|
||||
let is_assoc_fn = self.self_type_is_available();
|
||||
if let Some((fn_kind, span)) = &self.diagnostic_metadata.current_function {
|
||||
// The current function has a `self' parameter, but we were unable to resolve
|
||||
// The current function has a `self` parameter, but we were unable to resolve
|
||||
// a reference to `self`. This can only happen if the `self` identifier we
|
||||
// are resolving came from a different hygiene context.
|
||||
if fn_kind.decl().inputs.get(0).map_or(false, |p| p.is_self()) {
|
||||
|
@ -235,7 +235,7 @@ impl<D: Decoder> Decodable<D> for DefIndex {
|
||||
pub struct DefId {
|
||||
// cfg-ing the order of fields so that the `DefIndex` which is high entropy always ends up in
|
||||
// the lower bits no matter the endianness. This allows the compiler to turn that `Hash` impl
|
||||
// into a direct call to 'u64::hash(_)`.
|
||||
// into a direct call to `u64::hash(_)`.
|
||||
#[cfg(not(all(target_pointer_width = "64", target_endian = "big")))]
|
||||
pub index: DefIndex,
|
||||
pub krate: CrateNum,
|
||||
|
@ -1954,7 +1954,7 @@ impl Interner {
|
||||
let name = Symbol::new(inner.strings.len() as u32);
|
||||
|
||||
// SAFETY: we convert from `&str` to `&[u8]`, clone it into the arena,
|
||||
// and immediately convert the clone back to `&[u8], all because there
|
||||
// and immediately convert the clone back to `&[u8]`, all because there
|
||||
// is no `inner.arena.alloc_str()` method. This is clearly safe.
|
||||
let string: &str =
|
||||
unsafe { str::from_utf8_unchecked(inner.arena.alloc_slice(string.as_bytes())) };
|
||||
|
@ -339,7 +339,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||
|
||||
// Essentially any user-written impl will match with an error type,
|
||||
// so creating `ImplCandidates` isn't useful. However, we might
|
||||
// end up finding a candidate elsewhere (e.g. a `BuiltinCandidate` for `Sized)
|
||||
// end up finding a candidate elsewhere (e.g. a `BuiltinCandidate` for `Sized`)
|
||||
// This helps us avoid overflow: see issue #72839
|
||||
// Since compilation is already guaranteed to fail, this is just
|
||||
// to try to show the 'nicest' possible errors to the user.
|
||||
|
@ -727,7 +727,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||
// Otherwise, we can say that `T: NonAutoTrait` is
|
||||
// true.
|
||||
// Let's imagine we have a predicate stack like
|
||||
// `Foo: Bar -> WF(T) -> T: NonAutoTrait -> T: Auto
|
||||
// `Foo: Bar -> WF(T) -> T: NonAutoTrait -> T: Auto`
|
||||
// depth ^1 ^2 ^3
|
||||
// and the current predicate is `WF(T)`. `wf_args`
|
||||
// would contain `(T, 1)`. We want to check all
|
||||
|
Loading…
Reference in New Issue
Block a user