mirror of
https://github.com/rust-lang/rust.git
synced 2024-11-25 16:24:46 +00:00
Rollup merge of #94960 - codehorseman:master, r=oli-obk
Fix many spelling mistakes Signed-off-by: codehorseman <cricis@yeah.net>
This commit is contained in:
commit
270a41c33e
@ -26,7 +26,7 @@ pub(super) struct ItemLowerer<'a, 'lowering, 'hir> {
|
||||
}
|
||||
|
||||
/// When we have a ty alias we *may* have two where clauses. To give the best diagnostics, we set the span
|
||||
/// to the where clause that is prefered, if it exists. Otherwise, it sets the span to the other where
|
||||
/// to the where clause that is preferred, if it exists. Otherwise, it sets the span to the other where
|
||||
/// clause if it exists.
|
||||
fn add_ty_alias_where_clause(
|
||||
generics: &mut ast::Generics,
|
||||
|
@ -3695,7 +3695,7 @@ declare_lint! {
|
||||
/// ### Explanation
|
||||
///
|
||||
/// A duplicated attribute may erroneously originate from a copy-paste and the effect of it
|
||||
/// being duplicated may not be obvious or desireable.
|
||||
/// being duplicated may not be obvious or desirable.
|
||||
///
|
||||
/// For instance, doubling the `#[test]` attributes registers the test to be run twice with no
|
||||
/// change to its environment.
|
||||
|
@ -99,7 +99,7 @@ impl<T: HasDataLayout> PointerArithmetic for T {}
|
||||
/// mostly opaque; the `Machine` trait extends it with some more operations that also have access to
|
||||
/// some global state.
|
||||
/// We don't actually care about this `Debug` bound (we use `Provenance::fmt` to format the entire
|
||||
/// pointer), but `derive` adds some unecessary bounds.
|
||||
/// pointer), but `derive` adds some unnecessary bounds.
|
||||
pub trait Provenance: Copy + fmt::Debug {
|
||||
/// Says whether the `offset` field of `Pointer`s with this provenance is the actual physical address.
|
||||
/// If `true, ptr-to-int casts work by simply discarding the provenance.
|
||||
|
@ -337,7 +337,7 @@ rustc_query_append! { [define_callbacks!][<'tcx>] }
|
||||
mod sealed {
|
||||
use super::{DefId, LocalDefId};
|
||||
|
||||
/// An analogue of the `Into` trait that's intended only for query paramaters.
|
||||
/// An analogue of the `Into` trait that's intended only for query parameters.
|
||||
///
|
||||
/// This exists to allow queries to accept either `DefId` or `LocalDefId` while requiring that the
|
||||
/// user call `to_def_id` to convert between them everywhere else.
|
||||
|
@ -61,7 +61,7 @@ pub fn ty_slice_as_generic_args<'a, 'tcx>(ts: &'a [Ty<'tcx>]) -> &'a [GenericArg
|
||||
}
|
||||
|
||||
impl<'tcx> List<Ty<'tcx>> {
|
||||
/// Allows to freely switch betwen `List<Ty<'tcx>>` and `List<GenericArg<'tcx>>`.
|
||||
/// Allows to freely switch between `List<Ty<'tcx>>` and `List<GenericArg<'tcx>>`.
|
||||
///
|
||||
/// As lists are interned, `List<Ty<'tcx>>` and `List<GenericArg<'tcx>>` have
|
||||
/// be interned together, see `intern_type_list` for more details.
|
||||
|
@ -486,7 +486,7 @@ impl<'tcx> TyCtxt<'tcx> {
|
||||
}
|
||||
|
||||
/// Given the `DefId`, returns the `DefId` of the innermost item that
|
||||
/// has its own type-checking context or "inference enviornment".
|
||||
/// has its own type-checking context or "inference environment".
|
||||
///
|
||||
/// For example, a closure has its own `DefId`, but it is type-checked
|
||||
/// with the containing item. Similarly, an inline const block has its
|
||||
|
@ -527,7 +527,7 @@ impl<'a, 'tcx> ConstToPat<'a, 'tcx> {
|
||||
ty::RawPtr(pointee) if pointee.ty.is_sized(tcx.at(span), param_env) => {
|
||||
PatKind::Constant { value: cv }
|
||||
}
|
||||
// FIXME: these can have very suprising behaviour where optimization levels or other
|
||||
// FIXME: these can have very surprising behaviour where optimization levels or other
|
||||
// compilation choices change the runtime behaviour of the match.
|
||||
// See https://github.com/rust-lang/rust/issues/70861 for examples.
|
||||
ty::FnPtr(..) | ty::RawPtr(..) => {
|
||||
|
@ -156,7 +156,7 @@ impl<'tcx> Visitor<'tcx> for UnsafetyChecker<'_, 'tcx> {
|
||||
// temporary holding the static pointer to avoid duplicate errors
|
||||
// <https://github.com/rust-lang/rust/pull/78068#issuecomment-731753506>.
|
||||
if decl.internal && place.projection.first() == Some(&ProjectionElem::Deref) {
|
||||
// If the projection root is an artifical local that we introduced when
|
||||
// If the projection root is an artificial local that we introduced when
|
||||
// desugaring `static`, give a more specific error message
|
||||
// (avoid the general "raw pointer" clause below, that would only be confusing).
|
||||
if let Some(box LocalInfo::StaticRef { def_id, .. }) = decl.local_info {
|
||||
|
@ -60,7 +60,7 @@ fn find_optimization_oportunities<'tcx>(body: &Body<'tcx>) -> Vec<(Local, Consta
|
||||
}
|
||||
}
|
||||
|
||||
let mut eligable_locals = Vec::new();
|
||||
let mut eligible_locals = Vec::new();
|
||||
for (local, mutating_uses) in visitor.local_mutating_uses.drain_enumerated(..) {
|
||||
if mutating_uses != 1 || !locals_to_debuginfo.contains(local) {
|
||||
continue;
|
||||
@ -78,13 +78,13 @@ fn find_optimization_oportunities<'tcx>(body: &Body<'tcx>) -> Vec<(Local, Consta
|
||||
&bb.statements[location.statement_index].kind
|
||||
{
|
||||
if let Some(local) = p.as_local() {
|
||||
eligable_locals.push((local, *c));
|
||||
eligible_locals.push((local, *c));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
eligable_locals
|
||||
eligible_locals
|
||||
}
|
||||
|
||||
impl Visitor<'_> for LocalUseVisitor {
|
||||
|
@ -359,7 +359,7 @@ fn verify_candidate_branch<'tcx>(
|
||||
if branch.statements.len() != 1 {
|
||||
return false;
|
||||
}
|
||||
// ...assign the descriminant of `place` in that statement
|
||||
// ...assign the discriminant of `place` in that statement
|
||||
let StatementKind::Assign(boxed) = &branch.statements[0].kind else {
|
||||
return false
|
||||
};
|
||||
|
@ -362,7 +362,7 @@ fn optimization_applies<'tcx>(
|
||||
return false;
|
||||
} else if last_assigned_to != opt_info.local_tmp_s1 {
|
||||
trace!(
|
||||
"NO: end of assignemnt chain does not match written enum temp: {:?} != {:?}",
|
||||
"NO: end of assignment chain does not match written enum temp: {:?} != {:?}",
|
||||
last_assigned_to,
|
||||
opt_info.local_tmp_s1
|
||||
);
|
||||
|
@ -1970,7 +1970,7 @@ impl<'a> Parser<'a> {
|
||||
// We use an over-approximation here.
|
||||
// `const const`, `fn const` won't parse, but we're not stepping over other syntax either.
|
||||
// `pub` is added in case users got confused with the ordering like `async pub fn`,
|
||||
// only if it wasn't preceeded by `default` as `default pub` is invalid.
|
||||
// only if it wasn't preceded by `default` as `default pub` is invalid.
|
||||
let quals: &[Symbol] = if check_pub {
|
||||
&[kw::Pub, kw::Const, kw::Async, kw::Unsafe, kw::Extern]
|
||||
} else {
|
||||
|
@ -2002,7 +2002,7 @@ impl<'a, 'tcx> LifetimeContext<'a, 'tcx> {
|
||||
if !matches!(opaque.origin, hir::OpaqueTyOrigin::AsyncFn(..)) {
|
||||
continue 'lifetimes;
|
||||
}
|
||||
// We want to do this only if the liftime identifier is already defined
|
||||
// We want to do this only if the lifetime identifier is already defined
|
||||
// in the async function that generated this. Otherwise it could be
|
||||
// an opaque type defined by the developer and we still want this
|
||||
// lint to fail compilation
|
||||
|
@ -223,7 +223,7 @@ top_level_options!(
|
||||
/// `true` if we're emitting a JSON blob containing the unused externs
|
||||
json_unused_externs: bool [UNTRACKED],
|
||||
|
||||
/// `true` if we're emitting a JSON job containg a future-incompat report for lints
|
||||
/// `true` if we're emitting a JSON job containing a future-incompat report for lints
|
||||
json_future_incompat: bool [TRACKED],
|
||||
|
||||
pretty: Option<PpMode> [UNTRACKED],
|
||||
|
@ -69,7 +69,7 @@ pub struct SymbolGallery {
|
||||
|
||||
impl SymbolGallery {
|
||||
/// Insert a symbol and its span into symbol gallery.
|
||||
/// If the symbol has occurred before, ignore the new occurrance.
|
||||
/// If the symbol has occurred before, ignore the new occurrence.
|
||||
pub fn insert(&self, symbol: Symbol, span: Span) {
|
||||
self.symbols.lock().entry(symbol).or_insert(span);
|
||||
}
|
||||
|
@ -29,14 +29,14 @@ pub enum NativeLibKind {
|
||||
/// Dynamic library (e.g. `libfoo.so` on Linux)
|
||||
/// or an import library corresponding to a dynamic library (e.g. `foo.lib` on Windows/MSVC).
|
||||
Dylib {
|
||||
/// Whether the dynamic library will be linked only if it satifies some undefined symbols
|
||||
/// Whether the dynamic library will be linked only if it satisfies some undefined symbols
|
||||
as_needed: Option<bool>,
|
||||
},
|
||||
/// Dynamic library (e.g. `foo.dll` on Windows) without a corresponding import library.
|
||||
RawDylib,
|
||||
/// A macOS-specific kind of dynamic libraries.
|
||||
Framework {
|
||||
/// Whether the framework will be linked only if it satifies some undefined symbols
|
||||
/// Whether the framework will be linked only if it satisfies some undefined symbols
|
||||
as_needed: Option<bool>,
|
||||
},
|
||||
/// The library kind wasn't specified, `Dylib` is currently used as a default.
|
||||
|
@ -226,7 +226,7 @@ fn compute_symbol_name<'tcx>(
|
||||
|
||||
// If we're dealing with an instance of a function that's inlined from
|
||||
// another crate but we're marking it as globally shared to our
|
||||
// compliation (aka we're not making an internal copy in each of our
|
||||
// compilation (aka we're not making an internal copy in each of our
|
||||
// codegen units) then this symbol may become an exported (but hidden
|
||||
// visibility) symbol. This means that multiple crates may do the same
|
||||
// and we want to be sure to avoid any symbol conflicts here.
|
||||
|
@ -1105,7 +1105,7 @@ impl Niche {
|
||||
|
||||
// Extend the range of valid values being reserved by moving either `v.start` or `v.end` bound.
|
||||
// Given an eventual `Option<T>`, we try to maximize the chance for `None` to occupy the niche of zero.
|
||||
// This is accomplished by prefering enums with 2 variants(`count==1`) and always taking the shortest path to niche zero.
|
||||
// This is accomplished by preferring enums with 2 variants(`count==1`) and always taking the shortest path to niche zero.
|
||||
// Having `None` in niche zero can enable some special optimizations.
|
||||
//
|
||||
// Bound selection criteria:
|
||||
|
@ -538,7 +538,7 @@ fn prepare_vtable_segments<'tcx, T>(
|
||||
|
||||
// the main traversal loop:
|
||||
// basically we want to cut the inheritance directed graph into a few non-overlapping slices of nodes
|
||||
// that each node is emited after all its descendents have been emitted.
|
||||
// that each node is emitted after all its descendents have been emitted.
|
||||
// so we convert the directed graph into a tree by skipping all previously visted nodes using a visited set.
|
||||
// this is done on the fly.
|
||||
// Each loop run emits a slice - it starts by find a "childless" unvisited node, backtracking upwards, and it
|
||||
@ -553,10 +553,10 @@ fn prepare_vtable_segments<'tcx, T>(
|
||||
// Starting point 0 stack [D]
|
||||
// Loop run #0: Stack after diving in is [D B A], A is "childless"
|
||||
// after this point, all newly visited nodes won't have a vtable that equals to a prefix of this one.
|
||||
// Loop run #0: Emiting the slice [B A] (in reverse order), B has a next-sibling node, so this slice stops here.
|
||||
// Loop run #0: Emitting the slice [B A] (in reverse order), B has a next-sibling node, so this slice stops here.
|
||||
// Loop run #0: Stack after exiting out is [D C], C is the next starting point.
|
||||
// Loop run #1: Stack after diving in is [D C], C is "childless", since its child A is skipped(already emitted).
|
||||
// Loop run #1: Emiting the slice [D C] (in reverse order). No one has a next-sibling node.
|
||||
// Loop run #1: Emitting the slice [D C] (in reverse order). No one has a next-sibling node.
|
||||
// Loop run #1: Stack after exiting out is []. Now the function exits.
|
||||
|
||||
loop {
|
||||
|
@ -719,7 +719,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||
// TraitA+Kx+'a -> TraitB+Ky+'b (trait upcasting coercion).
|
||||
(&ty::Dynamic(ref data_a, r_a), &ty::Dynamic(ref data_b, r_b)) => {
|
||||
// See `assemble_candidates_for_unsizing` for more info.
|
||||
// We already checked the compatiblity of auto traits within `assemble_candidates_for_unsizing`.
|
||||
// We already checked the compatibility of auto traits within `assemble_candidates_for_unsizing`.
|
||||
let principal_a = data_a.principal().unwrap();
|
||||
source_trait_ref = principal_a.with_self_ty(tcx, source);
|
||||
upcast_trait_ref = util::supertraits(tcx, source_trait_ref).nth(idx).unwrap();
|
||||
@ -823,7 +823,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||
// Trait+Kx+'a -> Trait+Ky+'b (auto traits and lifetime subtyping).
|
||||
(&ty::Dynamic(ref data_a, r_a), &ty::Dynamic(ref data_b, r_b)) => {
|
||||
// See `assemble_candidates_for_unsizing` for more info.
|
||||
// We already checked the compatiblity of auto traits within `assemble_candidates_for_unsizing`.
|
||||
// We already checked the compatibility of auto traits within `assemble_candidates_for_unsizing`.
|
||||
let iter = data_a
|
||||
.principal()
|
||||
.map(|b| b.map_bound(ty::ExistentialPredicate::Trait))
|
||||
@ -1084,7 +1084,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||
| ty::Foreign(_) => {}
|
||||
|
||||
// These types are built-in, so we can fast-track by registering
|
||||
// nested predicates for their constituient type(s)
|
||||
// nested predicates for their constituent type(s)
|
||||
ty::Array(ty, _) | ty::Slice(ty) => {
|
||||
stack.push(ty);
|
||||
}
|
||||
|
@ -553,7 +553,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||
match project::poly_project_and_unify_type(self, &project_obligation) {
|
||||
Ok(Ok(Some(mut subobligations))) => {
|
||||
'compute_res: {
|
||||
// If we've previously marked this projection as 'complete', thne
|
||||
// If we've previously marked this projection as 'complete', then
|
||||
// use the final cached result (either `EvaluatedToOk` or
|
||||
// `EvaluatedToOkModuloRegions`), and skip re-evaluating the
|
||||
// sub-obligations.
|
||||
|
@ -181,12 +181,12 @@ enum DtorType {
|
||||
/// "significant" / "insignificant".
|
||||
Insignificant,
|
||||
|
||||
/// Type has a `Drop` implentation.
|
||||
/// Type has a `Drop` implantation.
|
||||
Significant,
|
||||
}
|
||||
|
||||
// This is a helper function for `adt_drop_tys` and `adt_significant_drop_tys`.
|
||||
// Depending on the implentation of `adt_has_dtor`, it is used to check if the
|
||||
// Depending on the implantation of `adt_has_dtor`, it is used to check if the
|
||||
// ADT has a destructor or if the ADT only has a significant destructor. For
|
||||
// understanding significant destructor look at `adt_significant_drop_tys`.
|
||||
fn drop_tys_helper<'tcx>(
|
||||
@ -295,7 +295,7 @@ fn adt_drop_tys<'tcx>(
|
||||
.map(|components| tcx.intern_type_list(&components))
|
||||
}
|
||||
// If `def_id` refers to a generic ADT, the queries above and below act as if they had been handed
|
||||
// a `tcx.make_ty(def, identity_substs)` and as such it is legal to substitue the generic parameters
|
||||
// a `tcx.make_ty(def, identity_substs)` and as such it is legal to substitute the generic parameters
|
||||
// of the ADT into the outputted `ty`s.
|
||||
fn adt_significant_drop_tys(
|
||||
tcx: TyCtxt<'_>,
|
||||
|
@ -126,7 +126,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
|
||||
"the precise format of `Fn`-family traits' type parameters is subject to change",
|
||||
);
|
||||
// Do not suggest the other syntax if we are in trait impl:
|
||||
// the desugaring would contain an associated type constrait.
|
||||
// the desugaring would contain an associated type constraint.
|
||||
if !is_impl {
|
||||
let args = trait_segment
|
||||
.args
|
||||
|
@ -429,7 +429,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
|
||||
let param_counts = gen_params.own_counts();
|
||||
|
||||
// Subtracting from param count to ensure type params synthesized from `impl Trait`
|
||||
// cannot be explictly specified even with `explicit_generic_args_with_impl_trait`
|
||||
// cannot be explicitly specified even with `explicit_generic_args_with_impl_trait`
|
||||
// feature enabled.
|
||||
let synth_type_param_count = if tcx.features().explicit_generic_args_with_impl_trait {
|
||||
gen_params
|
||||
|
@ -486,11 +486,11 @@ pub struct SuspendCheckData<'a, 'tcx> {
|
||||
}
|
||||
|
||||
// Returns whether it emitted a diagnostic or not
|
||||
// Note that this fn and the proceding one are based on the code
|
||||
// Note that this fn and the proceeding one are based on the code
|
||||
// for creating must_use diagnostics
|
||||
//
|
||||
// Note that this technique was chosen over things like a `Suspend` marker trait
|
||||
// as it is simpler and has precendent in the compiler
|
||||
// as it is simpler and has precedent in the compiler
|
||||
pub fn check_must_not_suspend_ty<'tcx>(
|
||||
fcx: &FnCtxt<'_, 'tcx>,
|
||||
ty: Ty<'tcx>,
|
||||
|
@ -379,7 +379,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
}
|
||||
|
||||
/// Creates a string version of the `expr` that includes explicit adjustments.
|
||||
/// Returns the string and also a bool indicating whther this is a *precise*
|
||||
/// Returns the string and also a bool indicating whether this is a *precise*
|
||||
/// suggestion.
|
||||
fn adjust_expr(
|
||||
&self,
|
||||
|
@ -1371,7 +1371,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
// represents the case of the path being completely captured by the variable.
|
||||
//
|
||||
// eg. If `a.b` is captured and we are processing `a.b`, then we can't have the closure also
|
||||
// capture `a.b.c`, because that voilates min capture.
|
||||
// capture `a.b.c`, because that violates min capture.
|
||||
let is_completely_captured = captured_by_move_projs.iter().any(|projs| projs.is_empty());
|
||||
|
||||
assert!(!is_completely_captured || (captured_by_move_projs.len() == 1));
|
||||
@ -1411,7 +1411,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
ty::RawPtr(..) => unreachable!(),
|
||||
|
||||
ty::Adt(def, substs) => {
|
||||
// Multi-varaint enums are captured in entirety,
|
||||
// Multi-variant enums are captured in entirety,
|
||||
// which would've been handled in the case of single empty slice in `captured_by_move_projs`.
|
||||
assert_eq!(def.variants().len(), 1);
|
||||
|
||||
@ -2208,8 +2208,8 @@ fn determine_place_ancestry_relation<'tcx>(
|
||||
}
|
||||
}
|
||||
|
||||
/// Reduces the precision of the captured place when the precision doesn't yeild any benefit from
|
||||
/// borrow checking prespective, allowing us to save us on the size of the capture.
|
||||
/// Reduces the precision of the captured place when the precision doesn't yield any benefit from
|
||||
/// borrow checking perspective, allowing us to save us on the size of the capture.
|
||||
///
|
||||
///
|
||||
/// Fields that are read through a shared reference will always be read via a shared ref or a copy,
|
||||
|
@ -1469,7 +1469,7 @@ fn check_fn_or_method<'fcx, 'tcx>(
|
||||
},
|
||||
)
|
||||
}));
|
||||
// Manually call `normalize_assocaited_types_in` on the other types
|
||||
// Manually call `normalize_associated_types_in` on the other types
|
||||
// in `FnSig`. This ensures that if the types of these fields
|
||||
// ever change to include projections, we will start normalizing
|
||||
// them automatically.
|
||||
|
@ -323,7 +323,7 @@ impl<'a, 'tcx> WrongNumberOfGenericArgs<'a, 'tcx> {
|
||||
.skip(self.params_offset + self.num_provided_type_or_const_args())
|
||||
.take(num_params_to_take)
|
||||
.map(|param| match param.kind {
|
||||
// This is being infered from the item's inputs, no need to set it.
|
||||
// This is being inferred from the item's inputs, no need to set it.
|
||||
ty::GenericParamDefKind::Type { .. } if is_used_in_input(param.def_id) => {
|
||||
"_".to_string()
|
||||
}
|
||||
|
@ -68,7 +68,7 @@ pub macro unreachable_2015 {
|
||||
$crate::panicking::panic("internal error: entered unreachable code")
|
||||
),
|
||||
// Use of `unreachable_display` for non_fmt_panic lint.
|
||||
// NOTE: the message ("internal error ...") is embeded directly in unreachable_display
|
||||
// NOTE: the message ("internal error ...") is embedded directly in unreachable_display
|
||||
($msg:expr $(,)?) => (
|
||||
$crate::panicking::unreachable_display(&$msg)
|
||||
),
|
||||
|
@ -28,7 +28,7 @@ pub const MIN_ALIGN: usize = 8;
|
||||
target_arch = "wasm64",
|
||||
)))]
|
||||
pub const MIN_ALIGN: usize = 16;
|
||||
// The allocator on the esp-idf platform guarentees 4 byte alignment.
|
||||
// The allocator on the esp-idf platform guarantees 4 byte alignment.
|
||||
#[cfg(all(any(
|
||||
all(target_arch = "riscv32", target_os = "espidf"),
|
||||
all(target_arch = "xtensa", target_os = "espidf"),
|
||||
|
@ -1005,7 +1005,7 @@ impl<'a> Builder<'a> {
|
||||
// the rustc_llvm cache. That will always work, even though it
|
||||
// may mean that on the next non-check build we'll need to rebuild
|
||||
// rustc_llvm. But if LLVM is stale, that'll be a tiny amount
|
||||
// of work comparitively, and we'd likely need to rebuild it anyway,
|
||||
// of work comparatively, and we'd likely need to rebuild it anyway,
|
||||
// so that's okay.
|
||||
if crate::native::prebuilt_llvm_config(self, target).is_err() {
|
||||
cargo.env("RUST_CHECK", "1");
|
||||
|
@ -1185,7 +1185,7 @@ impl Step for Assemble {
|
||||
for tool in LLVM_TOOLS {
|
||||
let tool_exe = exe(tool, target_compiler.host);
|
||||
let src_path = llvm_bin_dir.join(&tool_exe);
|
||||
// When using `donwload-ci-llvm`, some of the tools
|
||||
// When using `download-ci-llvm`, some of the tools
|
||||
// may not exist, so skip trying to copy them.
|
||||
if src_path.exists() {
|
||||
builder.copy(&src_path, &libdir_bin.join(&tool_exe));
|
||||
|
@ -2082,7 +2082,7 @@ impl Step for RustDev {
|
||||
}
|
||||
}
|
||||
|
||||
/// Tarball containing a prebuilt version of the build-manifest tool, intented to be used by the
|
||||
/// Tarball containing a prebuilt version of the build-manifest tool, intended to be used by the
|
||||
/// release process to avoid cloning the monorepo and building stuff.
|
||||
///
|
||||
/// Should not be considered stable by end users.
|
||||
|
@ -89,7 +89,7 @@ To cross compile, you'll need to:
|
||||
```
|
||||
* Copy the binary to your target device and run.
|
||||
|
||||
We specify `CC`, `CXX`, `AR`, `CFLAGS`, and `CXXFLAGS` environment variables because somtimes a project or a subproject requires the use of your `'C'` cross toolchain. Since Tomatoware has a modified sysroot we also pass via RUSTFLAGS the location of the dynamic-linker and rpath.
|
||||
We specify `CC`, `CXX`, `AR`, `CFLAGS`, and `CXXFLAGS` environment variables because sometimes a project or a subproject requires the use of your `'C'` cross toolchain. Since Tomatoware has a modified sysroot we also pass via RUSTFLAGS the location of the dynamic-linker and rpath.
|
||||
|
||||
### Test with QEMU
|
||||
|
||||
|
@ -212,7 +212,7 @@ fn add_one(x: i32) -> i32 {
|
||||
|
||||
#[naked]
|
||||
pub extern "C" fn add_two(x: i32) {
|
||||
// x + 2 preceeded by a landing pad/nop block
|
||||
// x + 2 preceded by a landing pad/nop block
|
||||
unsafe {
|
||||
asm!(
|
||||
"
|
||||
|
@ -2052,7 +2052,7 @@ crate struct Typedef {
|
||||
/// alias instead of the final type. This will always have the final type, regardless of whether
|
||||
/// `type_` came from HIR or from metadata.
|
||||
///
|
||||
/// If `item_type.is_none()`, `type_` is guarenteed to come from metadata (and therefore hold the
|
||||
/// If `item_type.is_none()`, `type_` is guaranteed to come from metadata (and therefore hold the
|
||||
/// final type).
|
||||
crate item_type: Option<Type>,
|
||||
}
|
||||
|
@ -13,7 +13,7 @@ crate trait FormatRenderer<'tcx>: Sized {
|
||||
/// Gives a description of the renderer. Used for performance profiling.
|
||||
fn descr() -> &'static str;
|
||||
|
||||
/// Whether to call `item` recursivly for modules
|
||||
/// Whether to call `item` recursively for modules
|
||||
///
|
||||
/// This is true for html, and false for json. See #80664
|
||||
const RUN_ON_MODULE: bool;
|
||||
|
@ -2821,7 +2821,7 @@ fn render_call_locations(w: &mut Buffer, cx: &Context<'_>, item: &clean::Item) {
|
||||
let mut it = ordered_locations.into_iter().peekable();
|
||||
|
||||
// An example may fail to write if its source can't be read for some reason, so this method
|
||||
// continues iterating until a write suceeds
|
||||
// continues iterating until a write succeeds
|
||||
let write_and_skip_failure = |w: &mut Buffer, it: &mut Peekable<_>| {
|
||||
while let Some(example) = it.next() {
|
||||
if write_example(&mut *w, example) {
|
||||
|
@ -1,6 +1,6 @@
|
||||
// compile-flags: -O
|
||||
|
||||
// On x86 the closure is inlined in foo() producting something like
|
||||
// On x86 the closure is inlined in foo() producing something like
|
||||
// define i32 @foo() [...] {
|
||||
// tail call void @bar() [...]
|
||||
// ret i32 0
|
||||
|
@ -1,4 +1,4 @@
|
||||
// Verify that debuginfo column nubmers are 1-based byte offsets.
|
||||
// Verify that debuginfo column numbers are 1-based byte offsets.
|
||||
//
|
||||
// ignore-windows
|
||||
// compile-flags: -C debuginfo=2
|
||||
|
@ -4,7 +4,7 @@ struct ReallyBig {
|
||||
}
|
||||
|
||||
// The limit for "too big for the current architecture" is dependent on the target pointer size
|
||||
// however it's artifically limited on 64 bits
|
||||
// however it's artificially limited on 64 bits
|
||||
// logic copied from rustc_target::abi::TargetDataLayout::obj_size_bound()
|
||||
const fn max_size() -> usize {
|
||||
#[cfg(target_pointer_width = "16")]
|
||||
|
@ -1,4 +1,4 @@
|
||||
// Test that multiple liftimes are allowed in impl trait types.
|
||||
// Test that multiple lifetimes are allowed in impl trait types.
|
||||
// build-pass (FIXME(62277): could be check-pass?)
|
||||
|
||||
trait X<'x>: Sized {}
|
||||
|
@ -67,7 +67,7 @@ const TEST_REPOS: &[Test] = &[
|
||||
sha: "91493fe47175076f330ce5fc518f0196c0476f56",
|
||||
lock: None,
|
||||
packages: &[],
|
||||
// Test the embeded sqlite variant of diesel
|
||||
// Test the embedded sqlite variant of diesel
|
||||
// This does not require any dependency to be present,
|
||||
// sqlite will be compiled as part of the build process
|
||||
features: Some(&["sqlite", "libsqlite3-sys/bundled"]),
|
||||
|
@ -126,7 +126,7 @@ declare_clippy_lint! {
|
||||
/// Duplicate code is less maintainable.
|
||||
///
|
||||
/// ### Known problems
|
||||
/// * The lint doesn't check if the moved expressions modify values that are beeing used in
|
||||
/// * The lint doesn't check if the moved expressions modify values that are being used in
|
||||
/// the if condition. The suggestion can in that case modify the behavior of the program.
|
||||
/// See [rust-clippy#7452](https://github.com/rust-lang/rust-clippy/issues/7452)
|
||||
///
|
||||
|
@ -86,9 +86,9 @@ fn lint_impl_body<'tcx>(cx: &LateContext<'tcx>, impl_span: Span, impl_items: &[h
|
||||
|
||||
// check for `unwrap`
|
||||
if let Some(arglists) = method_chain_args(expr, &["unwrap"]) {
|
||||
let reciever_ty = self.typeck_results.expr_ty(&arglists[0][0]).peel_refs();
|
||||
if is_type_diagnostic_item(self.lcx, reciever_ty, sym::Option)
|
||||
|| is_type_diagnostic_item(self.lcx, reciever_ty, sym::Result)
|
||||
let receiver_ty = self.typeck_results.expr_ty(&arglists[0][0]).peel_refs();
|
||||
if is_type_diagnostic_item(self.lcx, receiver_ty, sym::Option)
|
||||
|| is_type_diagnostic_item(self.lcx, receiver_ty, sym::Result)
|
||||
{
|
||||
self.result.push(expr.span);
|
||||
}
|
||||
|
@ -20,7 +20,7 @@ declare_clippy_lint! {
|
||||
///
|
||||
/// ### Known problems
|
||||
/// If the user can ensure that b is larger than a, the `.abs()` is
|
||||
/// technically unneccessary. However, it will make the code more robust and doesn't have any
|
||||
/// technically unnecessary. However, it will make the code more robust and doesn't have any
|
||||
/// large performance implications. If the abs call was deliberately left out for performance
|
||||
/// reasons, it is probably better to state this explicitly in the code, which then can be done
|
||||
/// with an allow.
|
||||
@ -69,7 +69,7 @@ impl<'tcx> LateLintPass<'tcx> for FloatEqualityWithoutAbs {
|
||||
|
||||
if_chain! {
|
||||
|
||||
// left hand side is a substraction
|
||||
// left hand side is a subtraction
|
||||
if let ExprKind::Binary(
|
||||
Spanned {
|
||||
node: BinOpKind::Sub,
|
||||
@ -84,7 +84,7 @@ impl<'tcx> LateLintPass<'tcx> for FloatEqualityWithoutAbs {
|
||||
if let Res::Def(DefKind::AssocConst, def_id) = cx.qpath_res(epsilon_path, rhs.hir_id);
|
||||
if match_def_path(cx, def_id, &paths::F32_EPSILON) || match_def_path(cx, def_id, &paths::F64_EPSILON);
|
||||
|
||||
// values of the substractions on the left hand side are of the type float
|
||||
// values of the subtractions on the left hand side are of the type float
|
||||
let t_val_l = cx.typeck_results().expr_ty(val_l);
|
||||
let t_val_r = cx.typeck_results().expr_ty(val_r);
|
||||
if let ty::Float(_) = t_val_l.kind();
|
||||
|
@ -224,7 +224,7 @@ pub fn is_array(ty: Ty<'_>) -> bool {
|
||||
/// This builds the graph of side effect.
|
||||
/// The edge `a -> b` means if `a` has side effect, `b` will have side effect.
|
||||
///
|
||||
/// There are some exmaple in following code:
|
||||
/// There are some example in following code:
|
||||
/// ```rust, ignore
|
||||
/// let b = 1;
|
||||
/// let a = b; // a -> b
|
||||
|
@ -290,7 +290,7 @@ fn ident_swap_sugg(
|
||||
// used instead, in these cases.
|
||||
*applicability = Applicability::MaybeIncorrect;
|
||||
|
||||
// We arbitraily choose one side to suggest changing,
|
||||
// We arbitrarily choose one side to suggest changing,
|
||||
// since we don't have a better guess. If the user
|
||||
// ends up duplicating a clause, the `logic_bug` lint
|
||||
// should catch it.
|
||||
@ -374,19 +374,19 @@ fn strip_non_ident_wrappers(expr: &Expr) -> &Expr {
|
||||
}
|
||||
|
||||
fn extract_related_binops(kind: &ExprKind) -> Option<Vec<BinaryOp<'_>>> {
|
||||
append_opt_vecs(chained_binops(kind), if_statment_binops(kind))
|
||||
append_opt_vecs(chained_binops(kind), if_statement_binops(kind))
|
||||
}
|
||||
|
||||
fn if_statment_binops(kind: &ExprKind) -> Option<Vec<BinaryOp<'_>>> {
|
||||
fn if_statement_binops(kind: &ExprKind) -> Option<Vec<BinaryOp<'_>>> {
|
||||
match kind {
|
||||
ExprKind::If(ref condition, _, _) => chained_binops(&condition.kind),
|
||||
ExprKind::Paren(ref e) => if_statment_binops(&e.kind),
|
||||
ExprKind::Paren(ref e) => if_statement_binops(&e.kind),
|
||||
ExprKind::Block(ref block, _) => {
|
||||
let mut output = None;
|
||||
for stmt in &block.stmts {
|
||||
match stmt.kind {
|
||||
StmtKind::Expr(ref e) | StmtKind::Semi(ref e) => {
|
||||
output = append_opt_vecs(output, if_statment_binops(&e.kind));
|
||||
output = append_opt_vecs(output, if_statement_binops(&e.kind));
|
||||
},
|
||||
_ => {},
|
||||
}
|
||||
|
@ -10,7 +10,7 @@ declare_clippy_lint! {
|
||||
/// Displays a warning when a struct with a trailing zero-sized array is declared without a `repr` attribute.
|
||||
///
|
||||
/// ### Why is this bad?
|
||||
/// Zero-sized arrays aren't very useful in Rust itself, so such a struct is likely being created to pass to C code or in some other situation where control over memory layout matters (for example, in conjuction with manual allocation to make it easy to compute the offset of the array). Either way, `#[repr(C)]` (or another `repr` attribute) is needed.
|
||||
/// Zero-sized arrays aren't very useful in Rust itself, so such a struct is likely being created to pass to C code or in some other situation where control over memory layout matters (for example, in conjunction with manual allocation to make it easy to compute the offset of the array). Either way, `#[repr(C)]` (or another `repr` attribute) is needed.
|
||||
///
|
||||
/// ### Example
|
||||
/// ```rust
|
||||
|
@ -46,7 +46,7 @@ declare_clippy_lint! {
|
||||
///
|
||||
/// ### Why is this bad?
|
||||
/// Duplicate bounds makes the code
|
||||
/// less readable than specifing them only once.
|
||||
/// less readable than specifying them only once.
|
||||
///
|
||||
/// ### Example
|
||||
/// ```rust
|
||||
|
@ -83,9 +83,9 @@ impl<'a, 'tcx> Visitor<'tcx> for FindExpectUnwrap<'a, 'tcx> {
|
||||
fn visit_expr(&mut self, expr: &'tcx Expr<'_>) {
|
||||
// check for `expect`
|
||||
if let Some(arglists) = method_chain_args(expr, &["expect"]) {
|
||||
let reciever_ty = self.typeck_results.expr_ty(&arglists[0][0]).peel_refs();
|
||||
if is_type_diagnostic_item(self.lcx, reciever_ty, sym::Option)
|
||||
|| is_type_diagnostic_item(self.lcx, reciever_ty, sym::Result)
|
||||
let receiver_ty = self.typeck_results.expr_ty(&arglists[0][0]).peel_refs();
|
||||
if is_type_diagnostic_item(self.lcx, receiver_ty, sym::Option)
|
||||
|| is_type_diagnostic_item(self.lcx, receiver_ty, sym::Result)
|
||||
{
|
||||
self.result.push(expr.span);
|
||||
}
|
||||
@ -93,9 +93,9 @@ impl<'a, 'tcx> Visitor<'tcx> for FindExpectUnwrap<'a, 'tcx> {
|
||||
|
||||
// check for `unwrap`
|
||||
if let Some(arglists) = method_chain_args(expr, &["unwrap"]) {
|
||||
let reciever_ty = self.typeck_results.expr_ty(&arglists[0][0]).peel_refs();
|
||||
if is_type_diagnostic_item(self.lcx, reciever_ty, sym::Option)
|
||||
|| is_type_diagnostic_item(self.lcx, reciever_ty, sym::Result)
|
||||
let receiver_ty = self.typeck_results.expr_ty(&arglists[0][0]).peel_refs();
|
||||
if is_type_diagnostic_item(self.lcx, receiver_ty, sym::Option)
|
||||
|| is_type_diagnostic_item(self.lcx, receiver_ty, sym::Result)
|
||||
{
|
||||
self.result.push(expr.span);
|
||||
}
|
||||
|
@ -16,7 +16,7 @@ impl Message {
|
||||
fn new(path: PathBuf) -> Self {
|
||||
let content: String = std::fs::read_to_string(&path).unwrap();
|
||||
// we don't want the first letter after "error: ", "help: " ... to be capitalized
|
||||
// also no puncutation (except for "?" ?) at the end of a line
|
||||
// also no punctuation (except for "?" ?) at the end of a line
|
||||
let regex_set: RegexSet = RegexSet::new(&[
|
||||
r"error: [A-Z]",
|
||||
r"help: [A-Z]",
|
||||
|
@ -59,7 +59,7 @@ pub fn manual_copy_with_counters(src: &[i32], dst: &mut [i32], dst2: &mut [i32])
|
||||
}
|
||||
|
||||
// make sure parentheses are added properly to bitwise operators, which have lower precedence than
|
||||
// arithmetric ones
|
||||
// arithmetic ones
|
||||
let mut count = 0 << 1;
|
||||
for i in 0..1 << 1 {
|
||||
dst[count] = src[i + 2];
|
||||
|
@ -93,7 +93,7 @@ fn test_no_deps_ignores_path_deps_in_workspaces() {
|
||||
output
|
||||
};
|
||||
|
||||
// Trigger a sucessful build, so Cargo would like to cache the build result.
|
||||
// Trigger a successful build, so Cargo would like to cache the build result.
|
||||
successful_build();
|
||||
|
||||
// Make sure there's no spurious rebuild when nothing changes.
|
||||
|
@ -575,7 +575,7 @@ where
|
||||
pub(crate) fn extract_pre_comment(pre_snippet: &str) -> (Option<String>, ListItemCommentStyle) {
|
||||
let trimmed_pre_snippet = pre_snippet.trim();
|
||||
// Both start and end are checked to support keeping a block comment inline with
|
||||
// the item, even if there are preceeding line comments, while still supporting
|
||||
// the item, even if there are preceding line comments, while still supporting
|
||||
// a snippet that starts with a block comment but also contains one or more
|
||||
// trailing single line comments.
|
||||
// https://github.com/rust-lang/rustfmt/issues/3025
|
||||
|
@ -251,7 +251,7 @@ fn rewrite_segment(
|
||||
match **args {
|
||||
ast::GenericArgs::AngleBracketed(ref data) if !data.args.is_empty() => {
|
||||
// HACK: squeeze out the span between the identifier and the parameters.
|
||||
// The hack is requried so that we don't remove the separator inside macro calls.
|
||||
// The hack is required so that we don't remove the separator inside macro calls.
|
||||
// This does not work in the presence of comment, hoping that people are
|
||||
// sane about where to put their comment.
|
||||
let separator_snippet = context
|
||||
|
@ -132,7 +132,7 @@ fn check_cfgs(
|
||||
continue;
|
||||
}
|
||||
|
||||
let preceeded_by_doc_comment = {
|
||||
let preceded_by_doc_comment = {
|
||||
let pre_contents = &contents[..idx];
|
||||
let pre_newline = pre_contents.rfind('\n');
|
||||
let pre_doc_comment = pre_contents.rfind("///");
|
||||
@ -143,7 +143,7 @@ fn check_cfgs(
|
||||
}
|
||||
};
|
||||
|
||||
if preceeded_by_doc_comment {
|
||||
if preceded_by_doc_comment {
|
||||
continue;
|
||||
}
|
||||
|
||||
|
@ -396,7 +396,7 @@ pub fn check(path: &Path, bad: &mut bool) {
|
||||
};
|
||||
suppressible_tidy_err!(err, skip_file_length, "");
|
||||
} else if lines > (LINES * 7) / 10 {
|
||||
// Just set it to something that doesn't trigger the "unneccessarily ignored" warning.
|
||||
// Just set it to something that doesn't trigger the "unnecessarily ignored" warning.
|
||||
skip_file_length = Directive::Ignore(true);
|
||||
}
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user