Fix clippy::needless_borrow in the compiler

`x clippy compiler -Aclippy::all -Wclippy::needless_borrow --fix`.

Then I had to remove a few unnecessary parens and muts that were exposed
now.
This commit is contained in:
Nilstrieb 2023-11-21 20:07:32 +01:00
parent 0ff8610964
commit 21a870515b
304 changed files with 1101 additions and 1174 deletions

View File

@ -111,8 +111,8 @@ pub trait LayoutCalculator {
alt_tail_space, alt_tail_space,
layout.fields.count(), layout.fields.count(),
prefer_alt_layout, prefer_alt_layout,
format_field_niches(&layout, &fields, &dl), format_field_niches(layout, fields, dl),
format_field_niches(&alt_layout, &fields, &dl), format_field_niches(&alt_layout, fields, dl),
); );
if prefer_alt_layout { if prefer_alt_layout {
@ -1025,7 +1025,7 @@ fn univariant<
// At the bottom of this function, we invert `inverse_memory_index` to // At the bottom of this function, we invert `inverse_memory_index` to
// produce `memory_index` (see `invert_mapping`). // produce `memory_index` (see `invert_mapping`).
let mut sized = true; let mut sized = true;
let mut offsets = IndexVec::from_elem(Size::ZERO, &fields); let mut offsets = IndexVec::from_elem(Size::ZERO, fields);
let mut offset = Size::ZERO; let mut offset = Size::ZERO;
let mut largest_niche = None; let mut largest_niche = None;
let mut largest_niche_available = 0; let mut largest_niche_available = 0;

View File

@ -391,7 +391,7 @@ impl MetaItemKind {
MetaItemKind::name_value_from_tokens(&mut inner_tokens.trees()) MetaItemKind::name_value_from_tokens(&mut inner_tokens.trees())
} }
Some(TokenTree::Token(token, _)) => { Some(TokenTree::Token(token, _)) => {
MetaItemLit::from_token(&token).map(MetaItemKind::NameValue) MetaItemLit::from_token(token).map(MetaItemKind::NameValue)
} }
_ => None, _ => None,
} }

View File

@ -959,7 +959,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
e e
}); });
let coroutine_option = let coroutine_option =
this.coroutine_movability_for_fn(&decl, fn_decl_span, coroutine_kind, movability); this.coroutine_movability_for_fn(decl, fn_decl_span, coroutine_kind, movability);
this.current_item = prev; this.current_item = prev;
(body_id, coroutine_option) (body_id, coroutine_option)
}); });
@ -1057,7 +1057,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
let body_id = this.lower_fn_body(&outer_decl, |this| { let body_id = this.lower_fn_body(&outer_decl, |this| {
let async_ret_ty = if let FnRetTy::Ty(ty) = &decl.output { let async_ret_ty = if let FnRetTy::Ty(ty) = &decl.output {
let itctx = ImplTraitContext::Disallowed(ImplTraitPosition::AsyncBlock); let itctx = ImplTraitContext::Disallowed(ImplTraitPosition::AsyncBlock);
Some(hir::FnRetTy::Return(this.lower_ty(&ty, &itctx))) Some(hir::FnRetTy::Return(this.lower_ty(ty, &itctx)))
} else { } else {
None None
}; };
@ -1156,7 +1156,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
.alloc_from_iter(std::iter::once(destructure_let).chain(assignments.into_iter())); .alloc_from_iter(std::iter::once(destructure_let).chain(assignments.into_iter()));
// Wrap everything in a block. // Wrap everything in a block.
hir::ExprKind::Block(&self.block_all(whole_span, stmts, None), None) hir::ExprKind::Block(self.block_all(whole_span, stmts, None), None)
} }
/// If the given expression is a path to a tuple struct, returns that path. /// If the given expression is a path to a tuple struct, returns that path.
@ -1413,7 +1413,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
let fields = self.arena.alloc_from_iter( let fields = self.arena.alloc_from_iter(
e1.iter().map(|e| (sym::start, e)).chain(e2.iter().map(|e| (sym::end, e))).map( e1.iter().map(|e| (sym::start, e)).chain(e2.iter().map(|e| (sym::end, e))).map(
|(s, e)| { |(s, e)| {
let expr = self.lower_expr(&e); let expr = self.lower_expr(e);
let ident = Ident::new(s, self.lower_span(e.span)); let ident = Ident::new(s, self.lower_span(e.span));
self.expr_field(ident, expr, e.span) self.expr_field(ident, expr, e.span)
}, },

View File

@ -338,8 +338,8 @@ fn make_format_spec<'hir>(
| ((debug_hex == Some(FormatDebugHex::Lower)) as u32) << 4 | ((debug_hex == Some(FormatDebugHex::Lower)) as u32) << 4
| ((debug_hex == Some(FormatDebugHex::Upper)) as u32) << 5; | ((debug_hex == Some(FormatDebugHex::Upper)) as u32) << 5;
let flags = ctx.expr_u32(sp, flags); let flags = ctx.expr_u32(sp, flags);
let precision = make_count(ctx, sp, &precision, argmap); let precision = make_count(ctx, sp, precision, argmap);
let width = make_count(ctx, sp, &width, argmap); let width = make_count(ctx, sp, width, argmap);
let format_placeholder_new = ctx.arena.alloc(ctx.expr_lang_item_type_relative( let format_placeholder_new = ctx.arena.alloc(ctx.expr_lang_item_type_relative(
sp, sp,
hir::LangItem::FormatPlaceholder, hir::LangItem::FormatPlaceholder,

View File

@ -47,7 +47,7 @@ pub(super) fn index_hir<'hir>(
match item { match item {
OwnerNode::Crate(citem) => { OwnerNode::Crate(citem) => {
collector.visit_mod(&citem, citem.spans.inner_span, hir::CRATE_HIR_ID) collector.visit_mod(citem, citem.spans.inner_span, hir::CRATE_HIR_ID)
} }
OwnerNode::Item(item) => collector.visit_item(item), OwnerNode::Item(item) => collector.visit_item(item),
OwnerNode::TraitItem(item) => collector.visit_trait_item(item), OwnerNode::TraitItem(item) => collector.visit_trait_item(item),

View File

@ -276,19 +276,14 @@ impl<'hir> LoweringContext<'_, 'hir> {
// only cares about the input argument patterns in the function // only cares about the input argument patterns in the function
// declaration (decl), not the return types. // declaration (decl), not the return types.
let asyncness = header.asyncness; let asyncness = header.asyncness;
let body_id = this.lower_maybe_async_body( let body_id =
span, this.lower_maybe_async_body(span, hir_id, decl, asyncness, body.as_deref());
hir_id,
&decl,
asyncness,
body.as_deref(),
);
let itctx = ImplTraitContext::Universal; let itctx = ImplTraitContext::Universal;
let (generics, decl) = let (generics, decl) =
this.lower_generics(generics, header.constness, id, &itctx, |this| { this.lower_generics(generics, header.constness, id, &itctx, |this| {
let ret_id = asyncness.opt_return_id(); let ret_id = asyncness.opt_return_id();
this.lower_fn_decl(&decl, id, *fn_sig_span, FnDeclKind::Fn, ret_id) this.lower_fn_decl(decl, id, *fn_sig_span, FnDeclKind::Fn, ret_id)
}); });
let sig = hir::FnSig { let sig = hir::FnSig {
decl, decl,
@ -744,7 +739,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
let (generics, kind, has_default) = match &i.kind { let (generics, kind, has_default) = match &i.kind {
AssocItemKind::Const(box ConstItem { generics, ty, expr, .. }) => { AssocItemKind::Const(box ConstItem { generics, ty, expr, .. }) => {
let (generics, kind) = self.lower_generics( let (generics, kind) = self.lower_generics(
&generics, generics,
Const::No, Const::No,
i.id, i.id,
&ImplTraitContext::Disallowed(ImplTraitPosition::Generic), &ImplTraitContext::Disallowed(ImplTraitPosition::Generic),
@ -775,7 +770,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
AssocItemKind::Fn(box Fn { sig, generics, body: Some(body), .. }) => { AssocItemKind::Fn(box Fn { sig, generics, body: Some(body), .. }) => {
let asyncness = sig.header.asyncness; let asyncness = sig.header.asyncness;
let body_id = let body_id =
self.lower_maybe_async_body(i.span, hir_id, &sig.decl, asyncness, Some(&body)); self.lower_maybe_async_body(i.span, hir_id, &sig.decl, asyncness, Some(body));
let (generics, sig) = self.lower_method_sig( let (generics, sig) = self.lower_method_sig(
generics, generics,
sig, sig,
@ -857,7 +852,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
let (generics, kind) = match &i.kind { let (generics, kind) = match &i.kind {
AssocItemKind::Const(box ConstItem { generics, ty, expr, .. }) => self.lower_generics( AssocItemKind::Const(box ConstItem { generics, ty, expr, .. }) => self.lower_generics(
&generics, generics,
Const::No, Const::No,
i.id, i.id,
&ImplTraitContext::Disallowed(ImplTraitPosition::Generic), &ImplTraitContext::Disallowed(ImplTraitPosition::Generic),

View File

@ -1157,7 +1157,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
itctx: &ImplTraitContext, itctx: &ImplTraitContext,
) -> hir::GenericArg<'hir> { ) -> hir::GenericArg<'hir> {
match arg { match arg {
ast::GenericArg::Lifetime(lt) => GenericArg::Lifetime(self.lower_lifetime(&lt)), ast::GenericArg::Lifetime(lt) => GenericArg::Lifetime(self.lower_lifetime(lt)),
ast::GenericArg::Type(ty) => { ast::GenericArg::Type(ty) => {
match &ty.kind { match &ty.kind {
TyKind::Infer if self.tcx.features().generic_arg_infer => { TyKind::Infer if self.tcx.features().generic_arg_infer => {
@ -1221,10 +1221,10 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
} }
_ => {} _ => {}
} }
GenericArg::Type(self.lower_ty(&ty, itctx)) GenericArg::Type(self.lower_ty(ty, itctx))
} }
ast::GenericArg::Const(ct) => GenericArg::Const(ConstArg { ast::GenericArg::Const(ct) => GenericArg::Const(ConstArg {
value: self.lower_anon_const(&ct), value: self.lower_anon_const(ct),
span: self.lower_span(ct.value.span), span: self.lower_span(ct.value.span),
is_desugared_from_effects: false, is_desugared_from_effects: false,
}), }),
@ -1267,7 +1267,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
let lifetime_bound = this.elided_dyn_bound(t.span); let lifetime_bound = this.elided_dyn_bound(t.span);
(bounds, lifetime_bound) (bounds, lifetime_bound)
}); });
let kind = hir::TyKind::TraitObject(bounds, &lifetime_bound, TraitObjectSyntax::None); let kind = hir::TyKind::TraitObject(bounds, lifetime_bound, TraitObjectSyntax::None);
return hir::Ty { kind, span: self.lower_span(t.span), hir_id: self.next_id() }; return hir::Ty { kind, span: self.lower_span(t.span), hir_id: self.next_id() };
} }
@ -1551,7 +1551,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
// in fn return position, like the `fn test<'a>() -> impl Debug + 'a` // in fn return position, like the `fn test<'a>() -> impl Debug + 'a`
// example, we only need to duplicate lifetimes that appear in the // example, we only need to duplicate lifetimes that appear in the
// bounds, since those are the only ones that are captured by the opaque. // bounds, since those are the only ones that are captured by the opaque.
lifetime_collector::lifetimes_in_bounds(&self.resolver, bounds) lifetime_collector::lifetimes_in_bounds(self.resolver, bounds)
} }
} }
hir::OpaqueTyOrigin::AsyncFn(..) => { hir::OpaqueTyOrigin::AsyncFn(..) => {
@ -2067,10 +2067,8 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
(hir::ParamName::Plain(self.lower_ident(param.ident)), kind) (hir::ParamName::Plain(self.lower_ident(param.ident)), kind)
} }
GenericParamKind::Const { ty, kw_span: _, default } => { GenericParamKind::Const { ty, kw_span: _, default } => {
let ty = self.lower_ty( let ty = self
&ty, .lower_ty(ty, &ImplTraitContext::Disallowed(ImplTraitPosition::GenericDefault));
&ImplTraitContext::Disallowed(ImplTraitPosition::GenericDefault),
);
let default = default.as_ref().map(|def| self.lower_anon_const(def)); let default = default.as_ref().map(|def| self.lower_anon_const(def));
( (
hir::ParamName::Plain(self.lower_ident(param.ident)), hir::ParamName::Plain(self.lower_ident(param.ident)),

View File

@ -372,10 +372,10 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
// ``` // ```
FnRetTy::Ty(ty) if matches!(itctx, ImplTraitContext::ReturnPositionOpaqueTy { .. }) => { FnRetTy::Ty(ty) if matches!(itctx, ImplTraitContext::ReturnPositionOpaqueTy { .. }) => {
if self.tcx.features().impl_trait_in_fn_trait_return { if self.tcx.features().impl_trait_in_fn_trait_return {
self.lower_ty(&ty, itctx) self.lower_ty(ty, itctx)
} else { } else {
self.lower_ty( self.lower_ty(
&ty, ty,
&ImplTraitContext::FeatureGated( &ImplTraitContext::FeatureGated(
ImplTraitPosition::FnTraitReturn, ImplTraitPosition::FnTraitReturn,
sym::impl_trait_in_fn_trait_return, sym::impl_trait_in_fn_trait_return,
@ -384,7 +384,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
} }
} }
FnRetTy::Ty(ty) => { FnRetTy::Ty(ty) => {
self.lower_ty(&ty, &ImplTraitContext::Disallowed(ImplTraitPosition::FnTraitReturn)) self.lower_ty(ty, &ImplTraitContext::Disallowed(ImplTraitPosition::FnTraitReturn))
} }
FnRetTy::Default(_) => self.arena.alloc(self.ty_tup(*span, &[])), FnRetTy::Default(_) => self.arena.alloc(self.ty_tup(*span, &[])),
}; };

View File

@ -221,7 +221,7 @@ impl<'a> AstValidator<'a> {
} }
fn err_handler(&self) -> &rustc_errors::Handler { fn err_handler(&self) -> &rustc_errors::Handler {
&self.session.diagnostic() self.session.diagnostic()
} }
fn check_lifetime(&self, ident: Ident) { fn check_lifetime(&self, ident: Ident) {
@ -622,7 +622,7 @@ impl<'a> AstValidator<'a> {
data: data.span, data: data.span,
constraint_spans: errors::EmptyLabelManySpans(constraint_spans), constraint_spans: errors::EmptyLabelManySpans(constraint_spans),
arg_spans2: errors::EmptyLabelManySpans(arg_spans), arg_spans2: errors::EmptyLabelManySpans(arg_spans),
suggestion: self.correct_generic_order_suggestion(&data), suggestion: self.correct_generic_order_suggestion(data),
constraint_len, constraint_len,
args_len, args_len,
}); });
@ -738,7 +738,7 @@ fn validate_generic_param_order(
if !bounds.is_empty() { if !bounds.is_empty() {
ordered_params += ": "; ordered_params += ": ";
ordered_params += &pprust::bounds_to_string(&bounds); ordered_params += &pprust::bounds_to_string(bounds);
} }
match kind { match kind {

View File

@ -88,7 +88,7 @@ impl<'a> PostExpansionVisitor<'a> {
} }
} }
match abi::is_enabled(&self.features, span, symbol_unescaped.as_str()) { match abi::is_enabled(self.features, span, symbol_unescaped.as_str()) {
Ok(()) => (), Ok(()) => (),
Err(abi::AbiDisabled::Unstable { feature, explain }) => { Err(abi::AbiDisabled::Unstable { feature, explain }) => {
feature_err_issue( feature_err_issue(
@ -182,7 +182,7 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> {
.. ..
}) = attr_info }) = attr_info
{ {
gate_alt!(self, has_feature(&self.features), *name, attr.span, *descr); gate_alt!(self, has_feature(self.features), *name, attr.span, *descr);
} }
// Check unstable flavors of the `#[doc]` attribute. // Check unstable flavors of the `#[doc]` attribute.
if attr.has_name(sym::doc) { if attr.has_name(sym::doc) {
@ -300,7 +300,7 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> {
} }
ast::ItemKind::TyAlias(box ast::TyAlias { ty: Some(ty), .. }) => { ast::ItemKind::TyAlias(box ast::TyAlias { ty: Some(ty), .. }) => {
self.check_impl_trait(&ty, false) self.check_impl_trait(ty, false)
} }
_ => {} _ => {}

View File

@ -1078,11 +1078,11 @@ impl<'a> State<'a> {
} }
ast::TyKind::AnonStruct(fields) => { ast::TyKind::AnonStruct(fields) => {
self.head("struct"); self.head("struct");
self.print_record_struct_body(&fields, ty.span); self.print_record_struct_body(fields, ty.span);
} }
ast::TyKind::AnonUnion(fields) => { ast::TyKind::AnonUnion(fields) => {
self.head("union"); self.head("union");
self.print_record_struct_body(&fields, ty.span); self.print_record_struct_body(fields, ty.span);
} }
ast::TyKind::Paren(typ) => { ast::TyKind::Paren(typ) => {
self.popen(); self.popen();

View File

@ -368,7 +368,7 @@ impl<'a> State<'a> {
self.nbsp(); self.nbsp();
if !bounds.is_empty() { if !bounds.is_empty() {
self.word_nbsp("="); self.word_nbsp("=");
self.print_type_bounds(&bounds); self.print_type_bounds(bounds);
} }
self.print_where_clause(&generics.where_clause); self.print_where_clause(&generics.where_clause);
self.word(";"); self.word(";");

View File

@ -552,7 +552,7 @@ pub fn cfg_matches(
fn try_gate_cfg(name: Symbol, span: Span, sess: &ParseSess, features: Option<&Features>) { fn try_gate_cfg(name: Symbol, span: Span, sess: &ParseSess, features: Option<&Features>) {
let gate = find_gated_cfg(|sym| sym == name); let gate = find_gated_cfg(|sym| sym == name);
if let (Some(feats), Some(gated_cfg)) = (features, gate) { if let (Some(feats), Some(gated_cfg)) = (features, gate) {
gate_cfg(&gated_cfg, span, sess, feats); gate_cfg(gated_cfg, span, sess, feats);
} }
} }

View File

@ -107,7 +107,7 @@ impl LocalsStateAtExit {
LocalsStateAtExit::AllAreInvalidated LocalsStateAtExit::AllAreInvalidated
} else { } else {
let mut has_storage_dead = HasStorageDead(BitSet::new_empty(body.local_decls.len())); let mut has_storage_dead = HasStorageDead(BitSet::new_empty(body.local_decls.len()));
has_storage_dead.visit_body(&body); has_storage_dead.visit_body(body);
let mut has_storage_dead_or_moved = has_storage_dead.0; let mut has_storage_dead_or_moved = has_storage_dead.0;
for move_out in &move_data.moves { for move_out in &move_data.moves {
if let Some(index) = move_data.base_local(move_out.path) { if let Some(index) = move_data.base_local(move_out.path) {
@ -128,7 +128,7 @@ impl<'tcx> BorrowSet<'tcx> {
) -> Self { ) -> Self {
let mut visitor = GatherBorrows { let mut visitor = GatherBorrows {
tcx, tcx,
body: &body, body: body,
location_map: Default::default(), location_map: Default::default(),
activation_map: Default::default(), activation_map: Default::default(),
local_map: Default::default(), local_map: Default::default(),
@ -140,7 +140,7 @@ impl<'tcx> BorrowSet<'tcx> {
), ),
}; };
for (block, block_data) in traversal::preorder(&body) { for (block, block_data) in traversal::preorder(body) {
visitor.visit_basic_block_data(block, block_data); visitor.visit_basic_block_data(block, block_data);
} }

View File

@ -490,7 +490,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
let mut spans = vec![]; let mut spans = vec![];
for init_idx in inits { for init_idx in inits {
let init = &self.move_data.inits[*init_idx]; let init = &self.move_data.inits[*init_idx];
let span = init.span(&self.body); let span = init.span(self.body);
if !span.is_dummy() { if !span.is_dummy() {
spans.push(span); spans.push(span);
} }
@ -518,7 +518,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
let body = map.body(body_id); let body = map.body(body_id);
let mut visitor = ConditionVisitor { spans: &spans, name: &name, errors: vec![] }; let mut visitor = ConditionVisitor { spans: &spans, name: &name, errors: vec![] };
visitor.visit_body(&body); visitor.visit_body(body);
let mut show_assign_sugg = false; let mut show_assign_sugg = false;
let isnt_initialized = if let InitializationRequiringAction::PartialAssignment let isnt_initialized = if let InitializationRequiringAction::PartialAssignment
@ -614,7 +614,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
} }
let mut visitor = LetVisitor { decl_span, sugg_span: None }; let mut visitor = LetVisitor { decl_span, sugg_span: None };
visitor.visit_body(&body); visitor.visit_body(body);
if let Some(span) = visitor.sugg_span { if let Some(span) = visitor.sugg_span {
self.suggest_assign_value(&mut err, moved_place, span); self.suggest_assign_value(&mut err, moved_place, span);
} }
@ -779,7 +779,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
return; return;
}; };
// Try to find predicates on *generic params* that would allow copying `ty` // Try to find predicates on *generic params* that would allow copying `ty`
let ocx = ObligationCtxt::new(&self.infcx); let ocx = ObligationCtxt::new(self.infcx);
let copy_did = tcx.require_lang_item(LangItem::Copy, Some(span)); let copy_did = tcx.require_lang_item(LangItem::Copy, Some(span));
let cause = ObligationCause::misc(span, self.mir_def_id()); let cause = ObligationCause::misc(span, self.mir_def_id());
@ -856,7 +856,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
self.explain_why_borrow_contains_point(location, borrow, None) self.explain_why_borrow_contains_point(location, borrow, None)
.add_explanation_to_diagnostic( .add_explanation_to_diagnostic(
self.infcx.tcx, self.infcx.tcx,
&self.body, self.body,
&self.local_names, &self.local_names,
&mut err, &mut err,
"", "",
@ -903,7 +903,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
self.explain_why_borrow_contains_point(location, borrow, None) self.explain_why_borrow_contains_point(location, borrow, None)
.add_explanation_to_diagnostic( .add_explanation_to_diagnostic(
self.infcx.tcx, self.infcx.tcx,
&self.body, self.body,
&self.local_names, &self.local_names,
&mut err, &mut err,
"", "",
@ -1174,7 +1174,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
explanation.add_explanation_to_diagnostic( explanation.add_explanation_to_diagnostic(
self.infcx.tcx, self.infcx.tcx,
&self.body, self.body,
&self.local_names, &self.local_names,
&mut err, &mut err,
first_borrow_desc, first_borrow_desc,
@ -1932,7 +1932,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
let place_desc = self.describe_place(borrow.borrowed_place.as_ref()); let place_desc = self.describe_place(borrow.borrowed_place.as_ref());
let kind_place = kind.filter(|_| place_desc.is_some()).map(|k| (k, place_span.0)); let kind_place = kind.filter(|_| place_desc.is_some()).map(|k| (k, place_span.0));
let explanation = self.explain_why_borrow_contains_point(location, &borrow, kind_place); let explanation = self.explain_why_borrow_contains_point(location, borrow, kind_place);
debug!(?place_desc, ?explanation); debug!(?place_desc, ?explanation);
@ -2001,14 +2001,14 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
(Some(name), explanation) => self.report_local_value_does_not_live_long_enough( (Some(name), explanation) => self.report_local_value_does_not_live_long_enough(
location, location,
&name, &name,
&borrow, borrow,
drop_span, drop_span,
borrow_spans, borrow_spans,
explanation, explanation,
), ),
(None, explanation) => self.report_temporary_value_does_not_live_long_enough( (None, explanation) => self.report_temporary_value_does_not_live_long_enough(
location, location,
&borrow, borrow,
drop_span, drop_span,
borrow_spans, borrow_spans,
proper_span, proper_span,
@ -2098,7 +2098,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
} else { } else {
explanation.add_explanation_to_diagnostic( explanation.add_explanation_to_diagnostic(
self.infcx.tcx, self.infcx.tcx,
&self.body, self.body,
&self.local_names, &self.local_names,
&mut err, &mut err,
"", "",
@ -2119,7 +2119,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
explanation.add_explanation_to_diagnostic( explanation.add_explanation_to_diagnostic(
self.infcx.tcx, self.infcx.tcx,
&self.body, self.body,
&self.local_names, &self.local_names,
&mut err, &mut err,
"", "",
@ -2180,7 +2180,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
explanation.add_explanation_to_diagnostic( explanation.add_explanation_to_diagnostic(
self.infcx.tcx, self.infcx.tcx,
&self.body, self.body,
&self.local_names, &self.local_names,
&mut err, &mut err,
"", "",
@ -2365,7 +2365,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
} }
explanation.add_explanation_to_diagnostic( explanation.add_explanation_to_diagnostic(
self.infcx.tcx, self.infcx.tcx,
&self.body, self.body,
&self.local_names, &self.local_names,
&mut err, &mut err,
"", "",
@ -2842,7 +2842,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
self.explain_why_borrow_contains_point(location, loan, None).add_explanation_to_diagnostic( self.explain_why_borrow_contains_point(location, loan, None).add_explanation_to_diagnostic(
self.infcx.tcx, self.infcx.tcx,
&self.body, self.body,
&self.local_names, &self.local_names,
&mut err, &mut err,
"", "",
@ -3020,7 +3020,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
} }
} }
let mut visitor = FakeReadCauseFinder { place, cause: None }; let mut visitor = FakeReadCauseFinder { place, cause: None };
visitor.visit_body(&self.body); visitor.visit_body(self.body);
match visitor.cause { match visitor.cause {
Some(FakeReadCause::ForMatchGuard) => Some("match guard"), Some(FakeReadCause::ForMatchGuard) => Some("match guard"),
Some(FakeReadCause::ForIndex) => Some("indexing expression"), Some(FakeReadCause::ForIndex) => Some("indexing expression"),

View File

@ -422,7 +422,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
kind_place: Option<(WriteKind, Place<'tcx>)>, kind_place: Option<(WriteKind, Place<'tcx>)>,
) -> BorrowExplanation<'tcx> { ) -> BorrowExplanation<'tcx> {
let regioncx = &self.regioncx; let regioncx = &self.regioncx;
let body: &Body<'_> = &self.body; let body: &Body<'_> = self.body;
let tcx = self.infcx.tcx; let tcx = self.infcx.tcx;
let borrow_region_vid = borrow.region; let borrow_region_vid = borrow.region;

View File

@ -354,7 +354,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
ty::Adt(def, _) => { ty::Adt(def, _) => {
let variant = if let Some(idx) = variant_index { let variant = if let Some(idx) = variant_index {
assert!(def.is_enum()); assert!(def.is_enum());
&def.variant(idx) def.variant(idx)
} else { } else {
def.non_enum_variant() def.non_enum_variant()
}; };
@ -851,7 +851,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
{ {
let Some((method_did, method_args)) = rustc_middle::util::find_self_call( let Some((method_did, method_args)) = rustc_middle::util::find_self_call(
self.infcx.tcx, self.infcx.tcx,
&self.body, self.body,
target_temp, target_temp,
location.block, location.block,
) else { ) else {
@ -1048,7 +1048,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
let ty = moved_place.ty(self.body, tcx).ty; let ty = moved_place.ty(self.body, tcx).ty;
let suggest = match tcx.get_diagnostic_item(sym::IntoIterator) { let suggest = match tcx.get_diagnostic_item(sym::IntoIterator) {
Some(def_id) => type_known_to_meet_bound_modulo_regions( Some(def_id) => type_known_to_meet_bound_modulo_regions(
&self.infcx, self.infcx,
self.param_env, self.param_env,
Ty::new_imm_ref(tcx, tcx.lifetimes.re_erased, ty), Ty::new_imm_ref(tcx, tcx.lifetimes.re_erased, ty),
def_id, def_id,

View File

@ -321,7 +321,7 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
let deref_base = match deref_target_place.projection.as_ref() { let deref_base = match deref_target_place.projection.as_ref() {
[proj_base @ .., ProjectionElem::Deref] => { [proj_base @ .., ProjectionElem::Deref] => {
PlaceRef { local: deref_target_place.local, projection: &proj_base } PlaceRef { local: deref_target_place.local, projection: proj_base }
} }
_ => bug!("deref_target_place is not a deref projection"), _ => bug!("deref_target_place is not a deref projection"),
}; };
@ -583,7 +583,7 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
err.subdiagnostic(crate::session_diagnostics::TypeNoCopy::Label { err.subdiagnostic(crate::session_diagnostics::TypeNoCopy::Label {
is_partial_move: false, is_partial_move: false,
ty: bind_to.ty, ty: bind_to.ty,
place: &place_desc, place: place_desc,
span: binding_span, span: binding_span,
}); });
} }

View File

@ -650,14 +650,14 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
let fr_name_and_span = self.regioncx.get_var_name_and_span_for_region( let fr_name_and_span = self.regioncx.get_var_name_and_span_for_region(
self.infcx.tcx, self.infcx.tcx,
&self.body, self.body,
&self.local_names, &self.local_names,
&self.upvars, &self.upvars,
errci.fr, errci.fr,
); );
let outlived_fr_name_and_span = self.regioncx.get_var_name_and_span_for_region( let outlived_fr_name_and_span = self.regioncx.get_var_name_and_span_for_region(
self.infcx.tcx, self.infcx.tcx,
&self.body, self.body,
&self.local_names, &self.local_names,
&self.upvars, &self.upvars,
errci.outlived_fr, errci.outlived_fr,
@ -971,7 +971,7 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
for found_did in found_dids { for found_did in found_dids {
let mut traits = vec![]; let mut traits = vec![];
let mut hir_v = HirTraitObjectVisitor(&mut traits, *found_did); let mut hir_v = HirTraitObjectVisitor(&mut traits, *found_did);
hir_v.visit_ty(&self_ty); hir_v.visit_ty(self_ty);
debug!("trait spans found: {:?}", traits); debug!("trait spans found: {:?}", traits);
for span in &traits { for span in &traits {
let mut multi_span: MultiSpan = vec![*span].into(); let mut multi_span: MultiSpan = vec![*span].into();

View File

@ -387,7 +387,7 @@ impl<'tcx> MirBorrowckCtxt<'_, 'tcx> {
let arg_ty = self.regioncx.universal_regions().unnormalized_input_tys let arg_ty = self.regioncx.universal_regions().unnormalized_input_tys
[implicit_inputs + argument_index]; [implicit_inputs + argument_index];
let (_, span) = self.regioncx.get_argument_name_and_span_for_region( let (_, span) = self.regioncx.get_argument_name_and_span_for_region(
&self.body, self.body,
&self.local_names, &self.local_names,
argument_index, argument_index,
); );

View File

@ -34,7 +34,7 @@ pub(super) fn generate_invalidates<'tcx>(
borrow_set, borrow_set,
tcx, tcx,
location_table, location_table,
body: &body, body: body,
dominators, dominators,
}; };
ig.visit_body(body); ig.visit_body(body);
@ -383,7 +383,7 @@ impl<'cx, 'tcx> InvalidationGenerator<'cx, 'tcx> {
(Read(_), BorrowKind::Mut { .. }) => { (Read(_), BorrowKind::Mut { .. }) => {
// Reading from mere reservations of mutable-borrows is OK. // Reading from mere reservations of mutable-borrows is OK.
if !is_active(&this.dominators, borrow, location) { if !is_active(this.dominators, borrow, location) {
// If the borrow isn't active yet, reads don't invalidate it // If the borrow isn't active yet, reads don't invalidate it
assert!(allow_two_phase_borrow(borrow.kind)); assert!(allow_two_phase_borrow(borrow.kind));
return Control::Continue; return Control::Continue;

View File

@ -219,18 +219,18 @@ fn do_mir_borrowck<'tcx>(
let location_table_owned = LocationTable::new(body); let location_table_owned = LocationTable::new(body);
let location_table = &location_table_owned; let location_table = &location_table_owned;
let move_data = MoveData::gather_moves(&body, tcx, param_env, |_| true); let move_data = MoveData::gather_moves(body, tcx, param_env, |_| true);
let promoted_move_data = promoted let promoted_move_data = promoted
.iter_enumerated() .iter_enumerated()
.map(|(idx, body)| (idx, MoveData::gather_moves(&body, tcx, param_env, |_| true))); .map(|(idx, body)| (idx, MoveData::gather_moves(body, tcx, param_env, |_| true)));
let mdpe = MoveDataParamEnv { move_data, param_env }; let mdpe = MoveDataParamEnv { move_data, param_env };
let mut flow_inits = MaybeInitializedPlaces::new(tcx, &body, &mdpe) let mut flow_inits = MaybeInitializedPlaces::new(tcx, body, &mdpe)
.into_engine(tcx, &body) .into_engine(tcx, body)
.pass_name("borrowck") .pass_name("borrowck")
.iterate_to_fixpoint() .iterate_to_fixpoint()
.into_results_cursor(&body); .into_results_cursor(body);
let locals_are_invalidated_at_exit = tcx.hir().body_owner_kind(def).is_fn_or_closure(); let locals_are_invalidated_at_exit = tcx.hir().body_owner_kind(def).is_fn_or_closure();
let borrow_set = let borrow_set =
@ -260,13 +260,13 @@ fn do_mir_borrowck<'tcx>(
// Dump MIR results into a file, if that is enabled. This let us // Dump MIR results into a file, if that is enabled. This let us
// write unit-tests, as well as helping with debugging. // write unit-tests, as well as helping with debugging.
nll::dump_mir_results(&infcx, &body, &regioncx, &opt_closure_req); nll::dump_mir_results(&infcx, body, &regioncx, &opt_closure_req);
// We also have a `#[rustc_regions]` annotation that causes us to dump // We also have a `#[rustc_regions]` annotation that causes us to dump
// information. // information.
nll::dump_annotation( nll::dump_annotation(
&infcx, &infcx,
&body, body,
&regioncx, &regioncx,
&opt_closure_req, &opt_closure_req,
&opaque_type_values, &opaque_type_values,
@ -1538,7 +1538,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
if places_conflict::borrow_conflicts_with_place( if places_conflict::borrow_conflicts_with_place(
self.infcx.tcx, self.infcx.tcx,
&self.body, self.body,
place, place,
borrow.kind, borrow.kind,
root_place, root_place,
@ -2193,7 +2193,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
// If this is a mutate access to an immutable local variable with no projections // If this is a mutate access to an immutable local variable with no projections
// report the error as an illegal reassignment // report the error as an illegal reassignment
let init = &self.move_data.inits[init_index]; let init = &self.move_data.inits[init_index];
let assigned_span = init.span(&self.body); let assigned_span = init.span(self.body);
self.report_illegal_reassignment(location, (place, span), assigned_span, place); self.report_illegal_reassignment(location, (place, span), assigned_span, place);
} else { } else {
self.report_mutability_error(place, span, the_place_err, error_access, location) self.report_mutability_error(place, span, the_place_err, error_access, location)

View File

@ -179,7 +179,7 @@ pub(crate) fn compute_regions<'cx, 'tcx>(
let universal_regions = Rc::new(universal_regions); let universal_regions = Rc::new(universal_regions);
let elements = &Rc::new(RegionValueElements::new(&body)); let elements = &Rc::new(RegionValueElements::new(body));
// Run the MIR type-checker. // Run the MIR type-checker.
let MirTypeckResults { let MirTypeckResults {
@ -206,7 +206,7 @@ pub(crate) fn compute_regions<'cx, 'tcx>(
if let Some(all_facts) = &mut all_facts { if let Some(all_facts) = &mut all_facts {
let _prof_timer = infcx.tcx.prof.generic_activity("polonius_fact_generation"); let _prof_timer = infcx.tcx.prof.generic_activity("polonius_fact_generation");
all_facts.universal_region.extend(universal_regions.universal_regions()); all_facts.universal_region.extend(universal_regions.universal_regions());
populate_polonius_move_facts(all_facts, move_data, location_table, &body); populate_polonius_move_facts(all_facts, move_data, location_table, body);
// Emit universal regions facts, and their relations, for Polonius. // Emit universal regions facts, and their relations, for Polonius.
// //
@ -263,7 +263,7 @@ pub(crate) fn compute_regions<'cx, 'tcx>(
&mut liveness_constraints, &mut liveness_constraints,
&mut all_facts, &mut all_facts,
location_table, location_table,
&body, body,
borrow_set, borrow_set,
); );
@ -302,7 +302,7 @@ pub(crate) fn compute_regions<'cx, 'tcx>(
let algorithm = Algorithm::from_str(&algorithm).unwrap(); let algorithm = Algorithm::from_str(&algorithm).unwrap();
debug!("compute_regions: using polonius algorithm {:?}", algorithm); debug!("compute_regions: using polonius algorithm {:?}", algorithm);
let _prof_timer = infcx.tcx.prof.generic_activity("polonius_analysis"); let _prof_timer = infcx.tcx.prof.generic_activity("polonius_analysis");
Some(Rc::new(Output::compute(&all_facts, algorithm, false))) Some(Rc::new(Output::compute(all_facts, algorithm, false)))
} else { } else {
None None
} }
@ -310,7 +310,7 @@ pub(crate) fn compute_regions<'cx, 'tcx>(
// Solve the region constraints. // Solve the region constraints.
let (closure_region_requirements, nll_errors) = let (closure_region_requirements, nll_errors) =
regioncx.solve(infcx, param_env, &body, polonius_output.clone()); regioncx.solve(infcx, param_env, body, polonius_output.clone());
if !nll_errors.is_empty() { if !nll_errors.is_empty() {
// Suppress unhelpful extra errors in `infer_opaque_types`. // Suppress unhelpful extra errors in `infer_opaque_types`.
@ -320,7 +320,7 @@ pub(crate) fn compute_regions<'cx, 'tcx>(
)); ));
} }
let remapped_opaque_tys = regioncx.infer_opaque_types(&infcx, opaque_type_values); let remapped_opaque_tys = regioncx.infer_opaque_types(infcx, opaque_type_values);
NllOutput { NllOutput {
regioncx, regioncx,

View File

@ -303,7 +303,7 @@ impl<'tcx> UniversalRegionRelationsBuilder<'_, 'tcx> {
Locations::All(span), Locations::All(span),
span, span,
ConstraintCategory::Internal, ConstraintCategory::Internal,
&mut self.constraints, self.constraints,
) )
.convert_all(data); .convert_all(data);
} }

View File

@ -80,7 +80,7 @@ impl LocalUseMap {
live_locals.iter().for_each(|&local| locals_with_use_data[local] = true); live_locals.iter().for_each(|&local| locals_with_use_data[local] = true);
LocalUseMapBuild { local_use_map: &mut local_use_map, elements, locals_with_use_data } LocalUseMapBuild { local_use_map: &mut local_use_map, elements, locals_with_use_data }
.visit_body(&body); .visit_body(body);
local_use_map local_use_map
} }

View File

@ -42,11 +42,11 @@ pub(super) fn generate<'mir, 'tcx>(
let free_regions = regions_that_outlive_free_regions( let free_regions = regions_that_outlive_free_regions(
typeck.infcx.num_region_vars(), typeck.infcx.num_region_vars(),
&typeck.borrowck_context.universal_regions, typeck.borrowck_context.universal_regions,
&typeck.borrowck_context.constraints.outlives_constraints, &typeck.borrowck_context.constraints.outlives_constraints,
); );
let (relevant_live_locals, boring_locals) = let (relevant_live_locals, boring_locals) =
compute_relevant_live_locals(typeck.tcx(), &free_regions, &body); compute_relevant_live_locals(typeck.tcx(), &free_regions, body);
let facts_enabled = use_polonius || AllFacts::enabled(typeck.tcx()); let facts_enabled = use_polonius || AllFacts::enabled(typeck.tcx());
let polonius_drop_used = facts_enabled.then(|| { let polonius_drop_used = facts_enabled.then(|| {

View File

@ -100,7 +100,7 @@ pub(super) fn populate_access_facts<'a, 'tcx>(
location_table, location_table,
move_data, move_data,
}; };
extractor.visit_body(&body); extractor.visit_body(body);
facts.var_dropped_at.extend( facts.var_dropped_at.extend(
dropped_at.iter().map(|&(local, location)| (local, location_table.mid_index(location))), dropped_at.iter().map(|&(local, location)| (local, location_table.mid_index(location))),

View File

@ -64,7 +64,7 @@ pub(super) fn trace<'mir, 'tcx>(
let num_region_vars = typeck.infcx.num_region_vars(); let num_region_vars = typeck.infcx.num_region_vars();
let graph = constraint_set.graph(num_region_vars); let graph = constraint_set.graph(num_region_vars);
let region_graph = let region_graph =
graph.region_graph(&constraint_set, borrowck_context.universal_regions.fr_static); graph.region_graph(constraint_set, borrowck_context.universal_regions.fr_static);
// Traverse each issuing region's constraints, and record the loan as flowing into the // Traverse each issuing region's constraints, and record the loan as flowing into the
// outlived region. // outlived region.
@ -489,7 +489,7 @@ impl<'tcx> LivenessContext<'_, '_, '_, 'tcx> {
} }
let move_paths = &self.flow_inits.analysis().move_data().move_paths; let move_paths = &self.flow_inits.analysis().move_data().move_paths;
move_paths[mpi].find_descendant(&move_paths, |mpi| state.contains(mpi)).is_some() move_paths[mpi].find_descendant(move_paths, |mpi| state.contains(mpi)).is_some()
} }
/// Returns `true` if the local variable (or some part of it) is initialized in /// Returns `true` if the local variable (or some part of it) is initialized in
@ -522,7 +522,7 @@ impl<'tcx> LivenessContext<'_, '_, '_, 'tcx> {
Self::make_all_regions_live( Self::make_all_regions_live(
self.elements, self.elements,
&mut self.typeck, self.typeck,
value, value,
live_at, live_at,
&self.inflowing_loans, &self.inflowing_loans,
@ -579,13 +579,13 @@ impl<'tcx> LivenessContext<'_, '_, '_, 'tcx> {
for &kind in &drop_data.dropck_result.kinds { for &kind in &drop_data.dropck_result.kinds {
Self::make_all_regions_live( Self::make_all_regions_live(
self.elements, self.elements,
&mut self.typeck, self.typeck,
kind, kind,
live_at, live_at,
&self.inflowing_loans, &self.inflowing_loans,
); );
polonius::add_drop_of_var_derefs_origin(&mut self.typeck, dropped_local, &kind); polonius::add_drop_of_var_derefs_origin(self.typeck, dropped_local, &kind);
} }
} }

View File

@ -191,11 +191,11 @@ pub(crate) fn type_check<'mir, 'tcx>(
checker.check_user_type_annotations(); checker.check_user_type_annotations();
let mut verifier = TypeVerifier::new(&mut checker, promoted); let mut verifier = TypeVerifier::new(&mut checker, promoted);
verifier.visit_body(&body); verifier.visit_body(body);
checker.typeck_mir(body); checker.typeck_mir(body);
checker.equate_inputs_and_outputs(&body, universal_regions, &normalized_inputs_and_output); checker.equate_inputs_and_outputs(body, universal_regions, &normalized_inputs_and_output);
checker.check_signature_annotation(&body); checker.check_signature_annotation(body);
liveness::generate( liveness::generate(
&mut checker, &mut checker,
@ -389,7 +389,7 @@ impl<'a, 'b, 'tcx> Visitor<'tcx> for TypeVerifier<'a, 'b, 'tcx> {
self.cx.ascribe_user_type( self.cx.ascribe_user_type(
constant.const_.ty(), constant.const_.ty(),
UserType::TypeOf(uv.def, UserArgs { args: uv.args, user_self_ty: None }), UserType::TypeOf(uv.def, UserArgs { args: uv.args, user_self_ty: None }),
locations.span(&self.cx.body), locations.span(self.cx.body),
); );
} }
} else if let Some(static_def_id) = constant.check_static_ptr(tcx) { } else if let Some(static_def_id) = constant.check_static_ptr(tcx) {
@ -553,7 +553,7 @@ impl<'a, 'b, 'tcx> TypeVerifier<'a, 'b, 'tcx> {
let all_facts = &mut None; let all_facts = &mut None;
let mut constraints = Default::default(); let mut constraints = Default::default();
let mut liveness_constraints = let mut liveness_constraints =
LivenessValues::new(Rc::new(RegionValueElements::new(&promoted_body))); LivenessValues::new(Rc::new(RegionValueElements::new(promoted_body)));
// Don't try to add borrow_region facts for the promoted MIR // Don't try to add borrow_region facts for the promoted MIR
let mut swap_constraints = |this: &mut Self| { let mut swap_constraints = |this: &mut Self| {
@ -570,7 +570,7 @@ impl<'a, 'b, 'tcx> TypeVerifier<'a, 'b, 'tcx> {
swap_constraints(self); swap_constraints(self);
self.visit_body(&promoted_body); self.visit_body(promoted_body);
self.cx.typeck_mir(promoted_body); self.cx.typeck_mir(promoted_body);
@ -1127,7 +1127,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
locations, locations,
locations.span(self.body), locations.span(self.body),
category, category,
&mut self.borrowck_context.constraints, self.borrowck_context.constraints,
) )
.convert_all(data); .convert_all(data);
} }
@ -1854,7 +1854,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
for op in ops { for op in ops {
self.check_operand(op, location); self.check_operand(op, location);
} }
self.check_aggregate_rvalue(&body, rvalue, ak, ops, location) self.check_aggregate_rvalue(body, rvalue, ak, ops, location)
} }
Rvalue::Repeat(operand, len) => { Rvalue::Repeat(operand, len) => {
@ -2300,7 +2300,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
} }
Rvalue::Ref(region, _borrow_kind, borrowed_place) => { Rvalue::Ref(region, _borrow_kind, borrowed_place) => {
self.add_reborrow_constraint(&body, location, *region, borrowed_place); self.add_reborrow_constraint(body, location, *region, borrowed_place);
} }
Rvalue::BinaryOp( Rvalue::BinaryOp(
@ -2512,7 +2512,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
let tcx = self.infcx.tcx; let tcx = self.infcx.tcx;
let field = path_utils::is_upvar_field_projection( let field = path_utils::is_upvar_field_projection(
tcx, tcx,
&self.borrowck_context.upvars, self.borrowck_context.upvars,
borrowed_place.as_ref(), borrowed_place.as_ref(),
body, body,
); );
@ -2668,13 +2668,9 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
location.to_locations(), location.to_locations(),
DUMMY_SP, // irrelevant; will be overridden. DUMMY_SP, // irrelevant; will be overridden.
ConstraintCategory::Boring, // same as above. ConstraintCategory::Boring, // same as above.
&mut self.borrowck_context.constraints, self.borrowck_context.constraints,
) )
.apply_closure_requirements( .apply_closure_requirements(closure_requirements, def_id.to_def_id(), args);
&closure_requirements,
def_id.to_def_id(),
args,
);
} }
// Now equate closure args to regions inherited from `typeck_root_def_id`. Fixes #98589. // Now equate closure args to regions inherited from `typeck_root_def_id`. Fixes #98589.
@ -2714,7 +2710,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
debug!(?body.span); debug!(?body.span);
for (local, local_decl) in body.local_decls.iter_enumerated() { for (local, local_decl) in body.local_decls.iter_enumerated() {
self.check_local(&body, local, local_decl); self.check_local(body, local, local_decl);
} }
for (block, block_data) in body.basic_blocks.iter_enumerated() { for (block, block_data) in body.basic_blocks.iter_enumerated() {
@ -2727,8 +2723,8 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
location.statement_index += 1; location.statement_index += 1;
} }
self.check_terminator(&body, block_data.terminator(), location); self.check_terminator(body, block_data.terminator(), location);
self.check_iscleanup(&body, block_data); self.check_iscleanup(body, block_data);
} }
} }
} }

View File

@ -35,7 +35,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
never_initialized_mut_locals: &mut never_initialized_mut_locals, never_initialized_mut_locals: &mut never_initialized_mut_locals,
mbcx: self, mbcx: self,
}; };
visitor.visit_body(&visitor.mbcx.body); visitor.visit_body(visitor.mbcx.body);
} }
// Take the union of the existed `used_mut` set with those variables we've found were // Take the union of the existed `used_mut` set with those variables we've found were

View File

@ -47,7 +47,7 @@ impl MultiItemModifier for Expander {
let template = AttributeTemplate { list: Some("path"), ..Default::default() }; let template = AttributeTemplate { list: Some("path"), ..Default::default() };
validate_attr::check_builtin_meta_item( validate_attr::check_builtin_meta_item(
&ecx.sess.parse_sess, &ecx.sess.parse_sess,
&meta_item, meta_item,
ast::AttrStyle::Outer, ast::AttrStyle::Outer,
sym::cfg_accessible, sym::cfg_accessible,
template, template,

View File

@ -25,7 +25,7 @@ pub(crate) fn expand(
annotatable: Annotatable, annotatable: Annotatable,
) -> Vec<Annotatable> { ) -> Vec<Annotatable> {
check_builtin_macro_attribute(ecx, meta_item, sym::cfg_eval); check_builtin_macro_attribute(ecx, meta_item, sym::cfg_eval);
warn_on_duplicate_attribute(&ecx, &annotatable, sym::cfg_eval); warn_on_duplicate_attribute(ecx, &annotatable, sym::cfg_eval);
vec![cfg_eval(ecx.sess, ecx.ecfg.features, annotatable, ecx.current_expansion.lint_node_id)] vec![cfg_eval(ecx.sess, ecx.ecfg.features, annotatable, ecx.current_expansion.lint_node_id)]
} }
@ -95,19 +95,19 @@ impl CfgFinder {
fn has_cfg_or_cfg_attr(annotatable: &Annotatable) -> bool { fn has_cfg_or_cfg_attr(annotatable: &Annotatable) -> bool {
let mut finder = CfgFinder { has_cfg_or_cfg_attr: false }; let mut finder = CfgFinder { has_cfg_or_cfg_attr: false };
match annotatable { match annotatable {
Annotatable::Item(item) => finder.visit_item(&item), Annotatable::Item(item) => finder.visit_item(item),
Annotatable::TraitItem(item) => finder.visit_assoc_item(&item, visit::AssocCtxt::Trait), Annotatable::TraitItem(item) => finder.visit_assoc_item(item, visit::AssocCtxt::Trait),
Annotatable::ImplItem(item) => finder.visit_assoc_item(&item, visit::AssocCtxt::Impl), Annotatable::ImplItem(item) => finder.visit_assoc_item(item, visit::AssocCtxt::Impl),
Annotatable::ForeignItem(item) => finder.visit_foreign_item(&item), Annotatable::ForeignItem(item) => finder.visit_foreign_item(item),
Annotatable::Stmt(stmt) => finder.visit_stmt(&stmt), Annotatable::Stmt(stmt) => finder.visit_stmt(stmt),
Annotatable::Expr(expr) => finder.visit_expr(&expr), Annotatable::Expr(expr) => finder.visit_expr(expr),
Annotatable::Arm(arm) => finder.visit_arm(&arm), Annotatable::Arm(arm) => finder.visit_arm(arm),
Annotatable::ExprField(field) => finder.visit_expr_field(&field), Annotatable::ExprField(field) => finder.visit_expr_field(field),
Annotatable::PatField(field) => finder.visit_pat_field(&field), Annotatable::PatField(field) => finder.visit_pat_field(field),
Annotatable::GenericParam(param) => finder.visit_generic_param(&param), Annotatable::GenericParam(param) => finder.visit_generic_param(param),
Annotatable::Param(param) => finder.visit_param(&param), Annotatable::Param(param) => finder.visit_param(param),
Annotatable::FieldDef(field) => finder.visit_field_def(&field), Annotatable::FieldDef(field) => finder.visit_field_def(field),
Annotatable::Variant(variant) => finder.visit_variant(&variant), Annotatable::Variant(variant) => finder.visit_variant(variant),
Annotatable::Crate(krate) => finder.visit_crate(krate), Annotatable::Crate(krate) => finder.visit_crate(krate),
}; };
finder.has_cfg_or_cfg_attr finder.has_cfg_or_cfg_attr

View File

@ -11,7 +11,7 @@ pub fn inject(krate: &mut ast::Crate, parse_sess: &ParseSess, attrs: &[String])
for raw_attr in attrs { for raw_attr in attrs {
let mut parser = rustc_parse::new_parser_from_source_str( let mut parser = rustc_parse::new_parser_from_source_str(
parse_sess, parse_sess,
FileName::cli_crate_attr_source_code(&raw_attr), FileName::cli_crate_attr_source_code(raw_attr),
raw_attr.clone(), raw_attr.clone(),
); );

View File

@ -159,7 +159,7 @@ pub fn expand_concat_bytes(
accumulator.push(val); accumulator.push(val);
} }
Ok(ast::LitKind::ByteStr(ref bytes, _)) => { Ok(ast::LitKind::ByteStr(ref bytes, _)) => {
accumulator.extend_from_slice(&bytes); accumulator.extend_from_slice(bytes);
} }
_ => { _ => {
if !has_errors { if !has_errors {

View File

@ -35,7 +35,7 @@ impl MultiItemModifier for Expander {
AttributeTemplate { list: Some("Trait1, Trait2, ..."), ..Default::default() }; AttributeTemplate { list: Some("Trait1, Trait2, ..."), ..Default::default() };
validate_attr::check_builtin_meta_item( validate_attr::check_builtin_meta_item(
&sess.parse_sess, &sess.parse_sess,
&meta_item, meta_item,
ast::AttrStyle::Outer, ast::AttrStyle::Outer,
sym::derive, sym::derive,
template, template,
@ -48,14 +48,14 @@ impl MultiItemModifier for Expander {
NestedMetaItem::MetaItem(meta) => Some(meta), NestedMetaItem::MetaItem(meta) => Some(meta),
NestedMetaItem::Lit(lit) => { NestedMetaItem::Lit(lit) => {
// Reject `#[derive("Debug")]`. // Reject `#[derive("Debug")]`.
report_unexpected_meta_item_lit(sess, &lit); report_unexpected_meta_item_lit(sess, lit);
None None
} }
}) })
.map(|meta| { .map(|meta| {
// Reject `#[derive(Debug = "value", Debug(abc))]`, but recover the // Reject `#[derive(Debug = "value", Debug(abc))]`, but recover the
// paths. // paths.
report_path_args(sess, &meta); report_path_args(sess, meta);
meta.path.clone() meta.path.clone()
}) })
.map(|path| (path, dummy_annotatable(), None, self.0)) .map(|path| (path, dummy_annotatable(), None, self.0))

View File

@ -467,7 +467,7 @@ impl<'a> TraitDef<'a> {
match item { match item {
Annotatable::Item(item) => { Annotatable::Item(item) => {
let is_packed = item.attrs.iter().any(|attr| { let is_packed = item.attrs.iter().any(|attr| {
for r in attr::find_repr_attrs(&cx.sess, attr) { for r in attr::find_repr_attrs(cx.sess, attr) {
if let attr::ReprPacked(_) = r { if let attr::ReprPacked(_) = r {
return true; return true;
} }
@ -478,7 +478,7 @@ impl<'a> TraitDef<'a> {
let newitem = match &item.kind { let newitem = match &item.kind {
ast::ItemKind::Struct(struct_def, generics) => self.expand_struct_def( ast::ItemKind::Struct(struct_def, generics) => self.expand_struct_def(
cx, cx,
&struct_def, struct_def,
item.ident, item.ident,
generics, generics,
from_scratch, from_scratch,
@ -496,7 +496,7 @@ impl<'a> TraitDef<'a> {
if self.supports_unions { if self.supports_unions {
self.expand_struct_def( self.expand_struct_def(
cx, cx,
&struct_def, struct_def,
item.ident, item.ident,
generics, generics,
from_scratch, from_scratch,

View File

@ -182,7 +182,7 @@ impl Bounds {
let params = self let params = self
.bounds .bounds
.iter() .iter()
.map(|&(name, ref bounds)| mk_ty_param(cx, span, name, &bounds, self_ty, self_generics)) .map(|&(name, ref bounds)| mk_ty_param(cx, span, name, bounds, self_ty, self_generics))
.collect(); .collect();
Generics { Generics {

View File

@ -547,7 +547,7 @@ fn make_format_args(
span: arg_name.span.into(), span: arg_name.span.into(),
msg: format!("named argument `{}` is not used by name", arg_name.name).into(), msg: format!("named argument `{}` is not used by name", arg_name.name).into(),
node_id: rustc_ast::CRATE_NODE_ID, node_id: rustc_ast::CRATE_NODE_ID,
lint_id: LintId::of(&NAMED_ARGUMENTS_USED_POSITIONALLY), lint_id: LintId::of(NAMED_ARGUMENTS_USED_POSITIONALLY),
diagnostic: BuiltinLintDiagnostics::NamedArgumentUsedPositionally { diagnostic: BuiltinLintDiagnostics::NamedArgumentUsedPositionally {
position_sp_to_replace, position_sp_to_replace,
position_sp_for_msg, position_sp_for_msg,
@ -632,8 +632,7 @@ fn report_missing_placeholders(
.collect::<Vec<_>>(); .collect::<Vec<_>>();
if !placeholders.is_empty() { if !placeholders.is_empty() {
if let Some(mut new_diag) = if let Some(mut new_diag) = report_redundant_format_arguments(ecx, args, used, placeholders)
report_redundant_format_arguments(ecx, &args, used, placeholders)
{ {
diag.cancel(); diag.cancel();
new_diag.emit(); new_diag.emit();

View File

@ -133,7 +133,7 @@ pub fn expand_include<'cx>(
let r = base::parse_expr(&mut self.p)?; let r = base::parse_expr(&mut self.p)?;
if self.p.token != token::Eof { if self.p.token != token::Eof {
self.p.sess.buffer_lint( self.p.sess.buffer_lint(
&INCOMPLETE_INCLUDE, INCOMPLETE_INCLUDE,
self.p.token.span, self.p.token.span,
self.node_id, self.node_id,
"include macro expected single expression in source", "include macro expected single expression in source",
@ -189,7 +189,7 @@ pub fn expand_include_str(
match cx.source_map().load_binary_file(&file) { match cx.source_map().load_binary_file(&file) {
Ok(bytes) => match std::str::from_utf8(&bytes) { Ok(bytes) => match std::str::from_utf8(&bytes) {
Ok(src) => { Ok(src) => {
let interned_src = Symbol::intern(&src); let interned_src = Symbol::intern(src);
base::MacEager::expr(cx.expr_str(sp, interned_src)) base::MacEager::expr(cx.expr_str(sp, interned_src))
} }
Err(_) => { Err(_) => {

View File

@ -26,7 +26,7 @@ pub fn expand_test_case(
anno_item: Annotatable, anno_item: Annotatable,
) -> Vec<Annotatable> { ) -> Vec<Annotatable> {
check_builtin_macro_attribute(ecx, meta_item, sym::test_case); check_builtin_macro_attribute(ecx, meta_item, sym::test_case);
warn_on_duplicate_attribute(&ecx, &anno_item, sym::test_case); warn_on_duplicate_attribute(ecx, &anno_item, sym::test_case);
if !ecx.ecfg.should_test { if !ecx.ecfg.should_test {
return vec![]; return vec![];
@ -79,7 +79,7 @@ pub fn expand_test(
item: Annotatable, item: Annotatable,
) -> Vec<Annotatable> { ) -> Vec<Annotatable> {
check_builtin_macro_attribute(cx, meta_item, sym::test); check_builtin_macro_attribute(cx, meta_item, sym::test);
warn_on_duplicate_attribute(&cx, &item, sym::test); warn_on_duplicate_attribute(cx, &item, sym::test);
expand_test_or_bench(cx, attr_sp, item, false) expand_test_or_bench(cx, attr_sp, item, false)
} }
@ -90,7 +90,7 @@ pub fn expand_bench(
item: Annotatable, item: Annotatable,
) -> Vec<Annotatable> { ) -> Vec<Annotatable> {
check_builtin_macro_attribute(cx, meta_item, sym::bench); check_builtin_macro_attribute(cx, meta_item, sym::bench);
warn_on_duplicate_attribute(&cx, &item, sym::bench); warn_on_duplicate_attribute(cx, &item, sym::bench);
expand_test_or_bench(cx, attr_sp, item, true) expand_test_or_bench(cx, attr_sp, item, true)
} }
@ -134,9 +134,9 @@ pub fn expand_test_or_bench(
// will fail. We shouldn't try to expand in this case because the errors // will fail. We shouldn't try to expand in this case because the errors
// would be spurious. // would be spurious.
let check_result = if is_bench { let check_result = if is_bench {
check_bench_signature(cx, &item, &fn_) check_bench_signature(cx, &item, fn_)
} else { } else {
check_test_signature(cx, &item, &fn_) check_test_signature(cx, &item, fn_)
}; };
if check_result.is_err() { if check_result.is_err() {
return if is_stmt { return if is_stmt {

View File

@ -60,7 +60,7 @@ pub fn inject(
// Do this here so that the test_runner crate attribute gets marked as used // Do this here so that the test_runner crate attribute gets marked as used
// even in non-test builds // even in non-test builds
let test_runner = get_test_runner(span_diagnostic, &krate); let test_runner = get_test_runner(span_diagnostic, krate);
if sess.is_test_crate() { if sess.is_test_crate() {
let panic_strategy = match (panic_strategy, sess.opts.unstable_opts.panic_abort_tests) { let panic_strategy = match (panic_strategy, sess.opts.unstable_opts.panic_abort_tests) {
@ -372,7 +372,7 @@ fn mk_tests_slice(cx: &TestCtxt<'_>, sp: Span) -> P<ast::Expr> {
let ecx = &cx.ext_cx; let ecx = &cx.ext_cx;
let mut tests = cx.test_cases.clone(); let mut tests = cx.test_cases.clone();
tests.sort_by(|a, b| a.name.as_str().cmp(&b.name.as_str())); tests.sort_by(|a, b| a.name.as_str().cmp(b.name.as_str()));
ecx.expr_array_ref( ecx.expr_array_ref(
sp, sp,

View File

@ -10,7 +10,7 @@ pub fn check_builtin_macro_attribute(ecx: &ExtCtxt<'_>, meta_item: &MetaItem, na
let template = AttributeTemplate { word: true, ..Default::default() }; let template = AttributeTemplate { word: true, ..Default::default() };
validate_attr::check_builtin_meta_item( validate_attr::check_builtin_meta_item(
&ecx.sess.parse_sess, &ecx.sess.parse_sess,
&meta_item, meta_item,
AttrStyle::Outer, AttrStyle::Outer,
name, name,
template, template,

View File

@ -493,7 +493,7 @@ impl<'ll, 'tcx> FnAbiLlvmExt<'ll, 'tcx> for FnAbi<'tcx, Ty<'tcx>> {
PassMode::Cast { cast, pad_i32: _ } => { PassMode::Cast { cast, pad_i32: _ } => {
cast.attrs.apply_attrs_to_callsite( cast.attrs.apply_attrs_to_callsite(
llvm::AttributePlace::ReturnValue, llvm::AttributePlace::ReturnValue,
&bx.cx, bx.cx,
callsite, callsite,
); );
} }

View File

@ -67,7 +67,7 @@ pub(crate) unsafe fn codegen(
llcx, llcx,
llmod, llmod,
"__rust_alloc_error_handler", "__rust_alloc_error_handler",
&alloc_error_handler_name(alloc_error_handler_kind), alloc_error_handler_name(alloc_error_handler_kind),
&[usize, usize], // size, align &[usize, usize], // size, align
None, None,
true, true,

View File

@ -136,7 +136,7 @@ fn instrument_function_attr<'ll>(cx: &CodegenCx<'ll, '_>) -> SmallVec<[&'ll Attr
attrs.push(llvm::CreateAttrStringValue( attrs.push(llvm::CreateAttrStringValue(
cx.llcx, cx.llcx,
"instrument-function-entry-inlined", "instrument-function-entry-inlined",
&mcount_name, mcount_name,
)); ));
} }
if let Some(options) = &cx.sess().opts.unstable_opts.instrument_xray { if let Some(options) = &cx.sess().opts.unstable_opts.instrument_xray {
@ -459,7 +459,7 @@ pub fn from_fn_attrs<'ll, 'tcx>(
// If this function is an import from the environment but the wasm // If this function is an import from the environment but the wasm
// import has a specific module/name, apply them here. // import has a specific module/name, apply them here.
if let Some(module) = wasm_import_module(cx.tcx, instance.def_id()) { if let Some(module) = wasm_import_module(cx.tcx, instance.def_id()) {
to_add.push(llvm::CreateAttrStringValue(cx.llcx, "wasm-import-module", &module)); to_add.push(llvm::CreateAttrStringValue(cx.llcx, "wasm-import-module", module));
let name = let name =
codegen_fn_attrs.link_name.unwrap_or_else(|| cx.tcx.item_name(instance.def_id())); codegen_fn_attrs.link_name.unwrap_or_else(|| cx.tcx.item_name(instance.def_id()));

View File

@ -68,7 +68,7 @@ impl<'a> ArchiveBuilder<'a> for LlvmArchiveBuilder<'a> {
) -> io::Result<()> { ) -> io::Result<()> {
let mut archive = archive.to_path_buf(); let mut archive = archive.to_path_buf();
if self.sess.target.llvm_target.contains("-apple-macosx") { if self.sess.target.llvm_target.contains("-apple-macosx") {
if let Some(new_archive) = try_extract_macho_fat_archive(&self.sess, &archive)? { if let Some(new_archive) = try_extract_macho_fat_archive(self.sess, &archive)? {
archive = new_archive archive = new_archive
} }
} }

View File

@ -124,7 +124,7 @@ pub fn create_target_machine(tcx: TyCtxt<'_>, mod_name: &str) -> OwnedTargetMach
let config = TargetMachineFactoryConfig { split_dwarf_file, output_obj_file }; let config = TargetMachineFactoryConfig { split_dwarf_file, output_obj_file };
target_machine_factory( target_machine_factory(
&tcx.sess, tcx.sess,
tcx.backend_optimization_level(()), tcx.backend_optimization_level(()),
tcx.global_backend_features(()), tcx.global_backend_features(()),
)(config) )(config)
@ -1106,7 +1106,7 @@ fn record_llvm_cgu_instructions_stats(prof: &SelfProfilerRef, llmod: &llvm::Modu
} }
let raw_stats = let raw_stats =
llvm::build_string(|s| unsafe { llvm::LLVMRustModuleInstructionStats(&llmod, s) }) llvm::build_string(|s| unsafe { llvm::LLVMRustModuleInstructionStats(llmod, s) })
.expect("cannot get module instruction stats"); .expect("cannot get module instruction stats");
#[derive(serde::Deserialize)] #[derive(serde::Deserialize)]

View File

@ -62,7 +62,7 @@ pub fn get_fn<'ll, 'tcx>(cx: &CodegenCx<'ll, 'tcx>, instance: Instance<'tcx>) ->
// exemption for MinGW for backwards compatibility. // exemption for MinGW for backwards compatibility.
let llfn = cx.declare_fn( let llfn = cx.declare_fn(
&common::i686_decorated_name( &common::i686_decorated_name(
&dllimport, dllimport,
common::is_mingw_gnu_toolchain(&tcx.sess.target), common::is_mingw_gnu_toolchain(&tcx.sess.target),
true, true,
), ),

View File

@ -187,7 +187,7 @@ fn check_and_apply_linkage<'ll, 'tcx>(
{ {
cx.declare_global( cx.declare_global(
&common::i686_decorated_name( &common::i686_decorated_name(
&dllimport, dllimport,
common::is_mingw_gnu_toolchain(&cx.tcx.sess.target), common::is_mingw_gnu_toolchain(&cx.tcx.sess.target),
true, true,
), ),

View File

@ -176,7 +176,7 @@ impl GlobalFileTable {
// compilation directory can be combined with the relative paths // compilation directory can be combined with the relative paths
// to get absolute paths, if needed. // to get absolute paths, if needed.
use rustc_session::RemapFileNameExt; use rustc_session::RemapFileNameExt;
let working_dir: &str = &tcx.sess.opts.working_dir.for_codegen(&tcx.sess).to_string_lossy(); let working_dir: &str = &tcx.sess.opts.working_dir.for_codegen(tcx.sess).to_string_lossy();
llvm::build_byte_buffer(|buffer| { llvm::build_byte_buffer(|buffer| {
coverageinfo::write_filenames_section_to_buffer( coverageinfo::write_filenames_section_to_buffer(

View File

@ -853,7 +853,7 @@ pub fn build_compile_unit_di_node<'ll, 'tcx>(
use rustc_session::RemapFileNameExt; use rustc_session::RemapFileNameExt;
let name_in_debuginfo = name_in_debuginfo.to_string_lossy(); let name_in_debuginfo = name_in_debuginfo.to_string_lossy();
let work_dir = tcx.sess.opts.working_dir.for_codegen(&tcx.sess).to_string_lossy(); let work_dir = tcx.sess.opts.working_dir.for_codegen(tcx.sess).to_string_lossy();
let flags = "\0"; let flags = "\0";
let output_filenames = tcx.output_filenames(()); let output_filenames = tcx.output_filenames(());
let split_name = if tcx.sess.target_can_use_split_dwarf() { let split_name = if tcx.sess.target_can_use_split_dwarf() {

View File

@ -715,7 +715,7 @@ fn build_union_fields_for_direct_tag_coroutine<'ll, 'tcx>(
coroutine_type_and_layout, coroutine_type_and_layout,
coroutine_type_di_node, coroutine_type_di_node,
coroutine_layout, coroutine_layout,
&common_upvar_names, common_upvar_names,
); );
let span = coroutine_layout.variant_source_info[variant_index].span; let span = coroutine_layout.variant_source_info[variant_index].span;

View File

@ -197,7 +197,7 @@ pub(super) fn build_coroutine_di_node<'ll, 'tcx>(
coroutine_type_and_layout, coroutine_type_and_layout,
coroutine_type_di_node, coroutine_type_di_node,
coroutine_layout, coroutine_layout,
&common_upvar_names, common_upvar_names,
), ),
source_info, source_info,
} }

View File

@ -537,8 +537,7 @@ pub(crate) fn global_llvm_features(sess: &Session, diagnostics: bool) -> Vec<Str
if feature_state.is_none() { if feature_state.is_none() {
let rust_feature = supported_features.iter().find_map(|&(rust_feature, _)| { let rust_feature = supported_features.iter().find_map(|&(rust_feature, _)| {
let llvm_features = to_llvm_features(sess, rust_feature); let llvm_features = to_llvm_features(sess, rust_feature);
if llvm_features.contains(&feature) if llvm_features.contains(feature) && !llvm_features.contains(rust_feature)
&& !llvm_features.contains(&rust_feature)
{ {
Some(rust_feature) Some(rust_feature)
} else { } else {

View File

@ -149,7 +149,7 @@ impl<'tcx> AssertModuleSource<'tcx> {
self.cgu_reuse_tracker.set_expectation( self.cgu_reuse_tracker.set_expectation(
cgu_name, cgu_name,
&user_path, user_path,
attr.span, attr.span,
expected_reuse, expected_reuse,
comp_kind, comp_kind,

View File

@ -175,8 +175,7 @@ impl<'a> ArchiveBuilder<'a> for ArArchiveBuilder<'a> {
) -> io::Result<()> { ) -> io::Result<()> {
let mut archive_path = archive_path.to_path_buf(); let mut archive_path = archive_path.to_path_buf();
if self.sess.target.llvm_target.contains("-apple-macosx") { if self.sess.target.llvm_target.contains("-apple-macosx") {
if let Some(new_archive_path) = if let Some(new_archive_path) = try_extract_macho_fat_archive(self.sess, &archive_path)?
try_extract_macho_fat_archive(&self.sess, &archive_path)?
{ {
archive_path = new_archive_path archive_path = new_archive_path
} }

View File

@ -277,7 +277,7 @@ pub fn each_linked_rlib(
let crate_name = info.crate_name[&cnum]; let crate_name = info.crate_name[&cnum];
let used_crate_source = &info.used_crate_source[&cnum]; let used_crate_source = &info.used_crate_source[&cnum];
if let Some((path, _)) = &used_crate_source.rlib { if let Some((path, _)) = &used_crate_source.rlib {
f(cnum, &path); f(cnum, path);
} else { } else {
if used_crate_source.rmeta.is_some() { if used_crate_source.rmeta.is_some() {
return Err(errors::LinkRlibError::OnlyRmetaFound { crate_name }); return Err(errors::LinkRlibError::OnlyRmetaFound { crate_name });
@ -524,7 +524,7 @@ fn link_staticlib<'a>(
&& !ignored_for_lto(sess, &codegen_results.crate_info, cnum); && !ignored_for_lto(sess, &codegen_results.crate_info, cnum);
let native_libs = codegen_results.crate_info.native_libraries[&cnum].iter(); let native_libs = codegen_results.crate_info.native_libraries[&cnum].iter();
let relevant = native_libs.clone().filter(|lib| relevant_lib(sess, &lib)); let relevant = native_libs.clone().filter(|lib| relevant_lib(sess, lib));
let relevant_libs: FxHashSet<_> = relevant.filter_map(|lib| lib.filename).collect(); let relevant_libs: FxHashSet<_> = relevant.filter_map(|lib| lib.filename).collect();
let bundled_libs: FxHashSet<_> = native_libs.filter_map(|lib| lib.filename).collect(); let bundled_libs: FxHashSet<_> = native_libs.filter_map(|lib| lib.filename).collect();
@ -689,7 +689,7 @@ fn link_dwarf_object<'a>(
// Adding an executable is primarily done to make `thorin` check that all the referenced // Adding an executable is primarily done to make `thorin` check that all the referenced
// dwarf objects are found in the end. // dwarf objects are found in the end.
package.add_executable( package.add_executable(
&executable_out_filename, executable_out_filename,
thorin::MissingReferencedObjectBehaviour::Skip, thorin::MissingReferencedObjectBehaviour::Skip,
)?; )?;
@ -945,7 +945,7 @@ fn link_natively<'a>(
{ {
let is_vs_installed = windows_registry::find_vs_version().is_ok(); let is_vs_installed = windows_registry::find_vs_version().is_ok();
let has_linker = windows_registry::find_tool( let has_linker = windows_registry::find_tool(
&sess.opts.target_triple.triple(), sess.opts.target_triple.triple(),
"link.exe", "link.exe",
) )
.is_some(); .is_some();
@ -1038,14 +1038,14 @@ fn link_natively<'a>(
if sess.target.is_like_osx { if sess.target.is_like_osx {
match (strip, crate_type) { match (strip, crate_type) {
(Strip::Debuginfo, _) => { (Strip::Debuginfo, _) => {
strip_symbols_with_external_utility(sess, "strip", &out_filename, Some("-S")) strip_symbols_with_external_utility(sess, "strip", out_filename, Some("-S"))
} }
// Per the manpage, `-x` is the maximum safe strip level for dynamic libraries. (#93988) // Per the manpage, `-x` is the maximum safe strip level for dynamic libraries. (#93988)
(Strip::Symbols, CrateType::Dylib | CrateType::Cdylib | CrateType::ProcMacro) => { (Strip::Symbols, CrateType::Dylib | CrateType::Cdylib | CrateType::ProcMacro) => {
strip_symbols_with_external_utility(sess, "strip", &out_filename, Some("-x")) strip_symbols_with_external_utility(sess, "strip", out_filename, Some("-x"))
} }
(Strip::Symbols, _) => { (Strip::Symbols, _) => {
strip_symbols_with_external_utility(sess, "strip", &out_filename, None) strip_symbols_with_external_utility(sess, "strip", out_filename, None)
} }
(Strip::None, _) => {} (Strip::None, _) => {}
} }
@ -1059,7 +1059,7 @@ fn link_natively<'a>(
match strip { match strip {
// Always preserve the symbol table (-x). // Always preserve the symbol table (-x).
Strip::Debuginfo => { Strip::Debuginfo => {
strip_symbols_with_external_utility(sess, stripcmd, &out_filename, Some("-x")) strip_symbols_with_external_utility(sess, stripcmd, out_filename, Some("-x"))
} }
// Strip::Symbols is handled via the --strip-all linker option. // Strip::Symbols is handled via the --strip-all linker option.
Strip::Symbols => {} Strip::Symbols => {}
@ -1245,13 +1245,13 @@ fn link_sanitizer_runtime(sess: &Session, linker: &mut dyn Linker, name: &str) {
// rpath to the library as well (the rpath should be absolute, see // rpath to the library as well (the rpath should be absolute, see
// PR #41352 for details). // PR #41352 for details).
let filename = format!("rustc{channel}_rt.{name}"); let filename = format!("rustc{channel}_rt.{name}");
let path = find_sanitizer_runtime(&sess, &filename); let path = find_sanitizer_runtime(sess, &filename);
let rpath = path.to_str().expect("non-utf8 component in path"); let rpath = path.to_str().expect("non-utf8 component in path");
linker.args(&["-Wl,-rpath", "-Xlinker", rpath]); linker.args(&["-Wl,-rpath", "-Xlinker", rpath]);
linker.link_dylib(&filename, false, true); linker.link_dylib(&filename, false, true);
} else { } else {
let filename = format!("librustc{channel}_rt.{name}.a"); let filename = format!("librustc{channel}_rt.{name}.a");
let path = find_sanitizer_runtime(&sess, &filename).join(&filename); let path = find_sanitizer_runtime(sess, &filename).join(&filename);
linker.link_whole_rlib(&path); linker.link_whole_rlib(&path);
} }
} }
@ -1685,7 +1685,7 @@ fn link_output_kind(sess: &Session, crate_type: CrateType) -> LinkOutputKind {
// Returns true if linker is located within sysroot // Returns true if linker is located within sysroot
fn detect_self_contained_mingw(sess: &Session) -> bool { fn detect_self_contained_mingw(sess: &Session) -> bool {
let (linker, _) = linker_and_flavor(&sess); let (linker, _) = linker_and_flavor(sess);
// Assume `-C linker=rust-lld` as self-contained mode // Assume `-C linker=rust-lld` as self-contained mode
if linker == Path::new("rust-lld") { if linker == Path::new("rust-lld") {
return true; return true;
@ -1737,7 +1737,7 @@ fn self_contained_components(sess: &Session, crate_type: CrateType) -> LinkSelfC
LinkSelfContainedDefault::InferredForMingw => { LinkSelfContainedDefault::InferredForMingw => {
sess.host == sess.target sess.host == sess.target
&& sess.target.vendor != "uwp" && sess.target.vendor != "uwp"
&& detect_self_contained_mingw(&sess) && detect_self_contained_mingw(sess)
} }
} }
}; };
@ -2432,7 +2432,7 @@ fn add_native_libs_from_crate(
// If rlib contains native libs as archives, unpack them to tmpdir. // If rlib contains native libs as archives, unpack them to tmpdir.
let rlib = &codegen_results.crate_info.used_crate_source[&cnum].rlib.as_ref().unwrap().0; let rlib = &codegen_results.crate_info.used_crate_source[&cnum].rlib.as_ref().unwrap().0;
archive_builder_builder archive_builder_builder
.extract_bundled_libs(rlib, tmpdir, &bundled_libs) .extract_bundled_libs(rlib, tmpdir, bundled_libs)
.unwrap_or_else(|e| sess.emit_fatal(e)); .unwrap_or_else(|e| sess.emit_fatal(e));
} }
@ -2485,7 +2485,7 @@ fn add_native_libs_from_crate(
cmd.link_whole_staticlib( cmd.link_whole_staticlib(
name, name,
verbatim, verbatim,
&search_paths.get_or_init(|| archive_search_paths(sess)), search_paths.get_or_init(|| archive_search_paths(sess)),
); );
} else { } else {
cmd.link_staticlib(name, verbatim) cmd.link_staticlib(name, verbatim)
@ -2719,7 +2719,7 @@ fn rehome_sysroot_lib_dir<'a>(sess: &'a Session, lib_dir: &Path) -> PathBuf {
// already had `fix_windows_verbatim_for_gcc()` applied if needed. // already had `fix_windows_verbatim_for_gcc()` applied if needed.
sysroot_lib_path sysroot_lib_path
} else { } else {
fix_windows_verbatim_for_gcc(&lib_dir) fix_windows_verbatim_for_gcc(lib_dir)
} }
} }
@ -2756,7 +2756,7 @@ fn add_static_crate<'a>(
let mut link_upstream = |path: &Path| { let mut link_upstream = |path: &Path| {
let rlib_path = if let Some(dir) = path.parent() { let rlib_path = if let Some(dir) = path.parent() {
let file_name = path.file_name().expect("rlib path has no file name path component"); let file_name = path.file_name().expect("rlib path has no file name path component");
rehome_sysroot_lib_dir(sess, &dir).join(file_name) rehome_sysroot_lib_dir(sess, dir).join(file_name)
} else { } else {
fix_windows_verbatim_for_gcc(path) fix_windows_verbatim_for_gcc(path)
}; };
@ -2793,7 +2793,7 @@ fn add_static_crate<'a>(
let canonical = f.replace('-', "_"); let canonical = f.replace('-', "_");
let is_rust_object = let is_rust_object =
canonical.starts_with(&canonical_name) && looks_like_rust_object_file(&f); canonical.starts_with(&canonical_name) && looks_like_rust_object_file(f);
// If we're performing LTO and this is a rust-generated object // If we're performing LTO and this is a rust-generated object
// file, then we don't need the object file as it's part of the // file, then we don't need the object file as it's part of the

View File

@ -45,7 +45,7 @@ pub fn get_linker<'a>(
self_contained: bool, self_contained: bool,
target_cpu: &'a str, target_cpu: &'a str,
) -> Box<dyn Linker + 'a> { ) -> Box<dyn Linker + 'a> {
let msvc_tool = windows_registry::find_tool(&sess.opts.target_triple.triple(), "link.exe"); let msvc_tool = windows_registry::find_tool(sess.opts.target_triple.triple(), "link.exe");
// If our linker looks like a batch script on Windows then to execute this // If our linker looks like a batch script on Windows then to execute this
// we'll need to spawn `cmd` explicitly. This is primarily done to handle // we'll need to spawn `cmd` explicitly. This is primarily done to handle
@ -78,7 +78,7 @@ pub fn get_linker<'a>(
if matches!(flavor, LinkerFlavor::Msvc(..)) && t.vendor == "uwp" { if matches!(flavor, LinkerFlavor::Msvc(..)) && t.vendor == "uwp" {
if let Some(ref tool) = msvc_tool { if let Some(ref tool) = msvc_tool {
let original_path = tool.path(); let original_path = tool.path();
if let Some(ref root_lib_path) = original_path.ancestors().nth(4) { if let Some(root_lib_path) = original_path.ancestors().nth(4) {
let arch = match t.arch.as_ref() { let arch = match t.arch.as_ref() {
"x86_64" => Some("x64"), "x86_64" => Some("x64"),
"x86" => Some("x86"), "x86" => Some("x86"),
@ -519,7 +519,7 @@ impl<'a> Linker for GccLinker<'a> {
// -force_load is the macOS equivalent of --whole-archive, but it // -force_load is the macOS equivalent of --whole-archive, but it
// involves passing the full path to the library to link. // involves passing the full path to the library to link.
self.linker_arg("-force_load"); self.linker_arg("-force_load");
let lib = find_native_static_library(lib, verbatim, search_path, &self.sess); let lib = find_native_static_library(lib, verbatim, search_path, self.sess);
self.linker_arg(&lib); self.linker_arg(&lib);
} }
} }
@ -1590,7 +1590,7 @@ impl<'a> Linker for AixLinker<'a> {
fn link_whole_staticlib(&mut self, lib: &str, verbatim: bool, search_path: &[PathBuf]) { fn link_whole_staticlib(&mut self, lib: &str, verbatim: bool, search_path: &[PathBuf]) {
self.hint_static(); self.hint_static();
let lib = find_native_static_library(lib, verbatim, search_path, &self.sess); let lib = find_native_static_library(lib, verbatim, search_path, self.sess);
self.cmd.arg(format!("-bkeepfile:{}", lib.to_str().unwrap())); self.cmd.arg(format!("-bkeepfile:{}", lib.to_str().unwrap()));
} }

View File

@ -621,7 +621,7 @@ fn wasm_import_module_map(tcx: TyCtxt<'_>, cnum: CrateNum) -> FxHashMap<DefId, S
let mut ret = FxHashMap::default(); let mut ret = FxHashMap::default();
for (def_id, lib) in tcx.foreign_modules(cnum).iter() { for (def_id, lib) in tcx.foreign_modules(cnum).iter() {
let module = def_id_to_native_lib.get(&def_id).and_then(|s| s.wasm_import_module()); let module = def_id_to_native_lib.get(def_id).and_then(|s| s.wasm_import_module());
let Some(module) = module else { continue }; let Some(module) = module else { continue };
ret.extend(lib.foreign_items.iter().map(|id| { ret.extend(lib.foreign_items.iter().map(|id| {
assert_eq!(id.krate, cnum); assert_eq!(id.krate, cnum);

View File

@ -892,7 +892,7 @@ fn execute_copy_from_cache_work_item<B: ExtraBackendMethods>(
let incr_comp_session_dir = cgcx.incr_comp_session_dir.as_ref().unwrap(); let incr_comp_session_dir = cgcx.incr_comp_session_dir.as_ref().unwrap();
let load_from_incr_comp_dir = |output_path: PathBuf, saved_path: &str| { let load_from_incr_comp_dir = |output_path: PathBuf, saved_path: &str| {
let source_file = in_incr_comp_dir(&incr_comp_session_dir, saved_path); let source_file = in_incr_comp_dir(incr_comp_session_dir, saved_path);
debug!( debug!(
"copying preexisting module `{}` from {:?} to {}", "copying preexisting module `{}` from {:?} to {}",
module.name, module.name,
@ -914,7 +914,7 @@ fn execute_copy_from_cache_work_item<B: ExtraBackendMethods>(
let object = load_from_incr_comp_dir( let object = load_from_incr_comp_dir(
cgcx.output_filenames.temp_path(OutputType::Object, Some(&module.name)), cgcx.output_filenames.temp_path(OutputType::Object, Some(&module.name)),
&module.source.saved_files.get("o").expect("no saved object file in work product"), module.source.saved_files.get("o").expect("no saved object file in work product"),
); );
let dwarf_object = let dwarf_object =
module.source.saved_files.get("dwo").as_ref().and_then(|saved_dwarf_object_file| { module.source.saved_files.get("dwo").as_ref().and_then(|saved_dwarf_object_file| {
@ -924,7 +924,7 @@ fn execute_copy_from_cache_work_item<B: ExtraBackendMethods>(
.expect( .expect(
"saved dwarf object in work product but `split_dwarf_path` returned `None`", "saved dwarf object in work product but `split_dwarf_path` returned `None`",
); );
load_from_incr_comp_dir(dwarf_obj_out, &saved_dwarf_object_file) load_from_incr_comp_dir(dwarf_obj_out, saved_dwarf_object_file)
}); });
WorkItemResult::Finished(CompiledModule { WorkItemResult::Finished(CompiledModule {

View File

@ -148,10 +148,9 @@ pub fn unsized_info<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
(&ty::Array(_, len), &ty::Slice(_)) => { (&ty::Array(_, len), &ty::Slice(_)) => {
cx.const_usize(len.eval_target_usize(cx.tcx(), ty::ParamEnv::reveal_all())) cx.const_usize(len.eval_target_usize(cx.tcx(), ty::ParamEnv::reveal_all()))
} }
( (&ty::Dynamic(data_a, _, src_dyn_kind), &ty::Dynamic(data_b, _, target_dyn_kind))
&ty::Dynamic(ref data_a, _, src_dyn_kind), if src_dyn_kind == target_dyn_kind =>
&ty::Dynamic(ref data_b, _, target_dyn_kind), {
) if src_dyn_kind == target_dyn_kind => {
let old_info = let old_info =
old_info.expect("unsized_info: missing old info for trait upcasting coercion"); old_info.expect("unsized_info: missing old info for trait upcasting coercion");
if data_a.principal_def_id() == data_b.principal_def_id() { if data_a.principal_def_id() == data_b.principal_def_id() {
@ -458,8 +457,8 @@ pub fn maybe_create_entry_wrapper<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
cx.set_frame_pointer_type(llfn); cx.set_frame_pointer_type(llfn);
cx.apply_target_cpu_attr(llfn); cx.apply_target_cpu_attr(llfn);
let llbb = Bx::append_block(&cx, llfn, "top"); let llbb = Bx::append_block(cx, llfn, "top");
let mut bx = Bx::build(&cx, llbb); let mut bx = Bx::build(cx, llbb);
bx.insert_reference_to_gdb_debug_scripts_section_global(); bx.insert_reference_to_gdb_debug_scripts_section_global();
@ -685,7 +684,7 @@ pub fn codegen_crate<B: ExtraBackendMethods>(
// Calculate the CGU reuse // Calculate the CGU reuse
let cgu_reuse = tcx.sess.time("find_cgu_reuse", || { let cgu_reuse = tcx.sess.time("find_cgu_reuse", || {
codegen_units.iter().map(|cgu| determine_cgu_reuse(tcx, &cgu)).collect::<Vec<_>>() codegen_units.iter().map(|cgu| determine_cgu_reuse(tcx, cgu)).collect::<Vec<_>>()
}); });
crate::assert_module_sources::assert_module_sources(tcx, &|cgu_reuse_tracker| { crate::assert_module_sources::assert_module_sources(tcx, &|cgu_reuse_tracker| {

View File

@ -477,9 +477,9 @@ fn codegen_fn_attrs(tcx: TyCtxt<'_>, did: LocalDefId) -> CodegenFnAttrs {
) )
.emit(); .emit();
InlineAttr::None InlineAttr::None
} else if list_contains_name(&items, sym::always) { } else if list_contains_name(items, sym::always) {
InlineAttr::Always InlineAttr::Always
} else if list_contains_name(&items, sym::never) { } else if list_contains_name(items, sym::never) {
InlineAttr::Never InlineAttr::Never
} else { } else {
struct_span_err!( struct_span_err!(
@ -514,9 +514,9 @@ fn codegen_fn_attrs(tcx: TyCtxt<'_>, did: LocalDefId) -> CodegenFnAttrs {
if items.len() != 1 { if items.len() != 1 {
err(attr.span, "expected one argument"); err(attr.span, "expected one argument");
OptimizeAttr::None OptimizeAttr::None
} else if list_contains_name(&items, sym::size) { } else if list_contains_name(items, sym::size) {
OptimizeAttr::Size OptimizeAttr::Size
} else if list_contains_name(&items, sym::speed) { } else if list_contains_name(items, sym::speed) {
OptimizeAttr::Speed OptimizeAttr::Speed
} else { } else {
err(items[0].span(), "invalid argument"); err(items[0].span(), "invalid argument");

View File

@ -216,7 +216,7 @@ fn push_debuginfo_type_name<'tcx>(
output.push(']'); output.push(']');
} }
} }
ty::Dynamic(ref trait_data, ..) => { ty::Dynamic(trait_data, ..) => {
let auto_traits: SmallVec<[DefId; 4]> = trait_data.auto_traits().collect(); let auto_traits: SmallVec<[DefId; 4]> = trait_data.auto_traits().collect();
let has_enclosing_parens = if cpp_like_debuginfo { let has_enclosing_parens = if cpp_like_debuginfo {

View File

@ -42,7 +42,7 @@ pub fn non_ssa_locals<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
// If there exists a local definition that dominates all uses of that local, // If there exists a local definition that dominates all uses of that local,
// the definition should be visited first. Traverse blocks in an order that // the definition should be visited first. Traverse blocks in an order that
// is a topological sort of dominance partial order. // is a topological sort of dominance partial order.
for (bb, data) in traversal::reverse_postorder(&mir) { for (bb, data) in traversal::reverse_postorder(mir) {
analyzer.visit_basic_block_data(bb, data); analyzer.visit_basic_block_data(bb, data);
} }
@ -202,7 +202,7 @@ impl<'mir, 'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> Visitor<'tcx>
) => match &mut self.locals[local] { ) => match &mut self.locals[local] {
LocalKind::ZST => {} LocalKind::ZST => {}
LocalKind::Memory => {} LocalKind::Memory => {}
LocalKind::SSA(def) if def.dominates(location, &self.dominators) => {} LocalKind::SSA(def) if def.dominates(location, self.dominators) => {}
// Reads from uninitialized variables (e.g., in dead code, after // Reads from uninitialized variables (e.g., in dead code, after
// optimizations) require locals to be in (uninitialized) memory. // optimizations) require locals to be in (uninitialized) memory.
// N.B., there can be uninitialized reads of a local visited after // N.B., there can be uninitialized reads of a local visited after

View File

@ -47,7 +47,7 @@ impl<'a, 'tcx> TerminatorCodegenHelper<'tcx> {
&self, &self,
fx: &'b mut FunctionCx<'a, 'tcx, Bx>, fx: &'b mut FunctionCx<'a, 'tcx, Bx>,
) -> Option<&'b Bx::Funclet> { ) -> Option<&'b Bx::Funclet> {
let cleanup_kinds = (&fx.cleanup_kinds).as_ref()?; let cleanup_kinds = fx.cleanup_kinds.as_ref()?;
let funclet_bb = cleanup_kinds[self.bb].funclet_bb(self.bb)?; let funclet_bb = cleanup_kinds[self.bb].funclet_bb(self.bb)?;
// If `landing_pad_for` hasn't been called yet to create the `Funclet`, // If `landing_pad_for` hasn't been called yet to create the `Funclet`,
// it has to be now. This may not seem necessary, as RPO should lead // it has to be now. This may not seem necessary, as RPO should lead
@ -161,7 +161,7 @@ impl<'a, 'tcx> TerminatorCodegenHelper<'tcx> {
) -> MergingSucc { ) -> MergingSucc {
// If there is a cleanup block and the function we're calling can unwind, then // If there is a cleanup block and the function we're calling can unwind, then
// do an invoke, otherwise do a call. // do an invoke, otherwise do a call.
let fn_ty = bx.fn_decl_backend_type(&fn_abi); let fn_ty = bx.fn_decl_backend_type(fn_abi);
let fn_attrs = if bx.tcx().def_kind(fx.instance.def_id()).has_codegen_attrs() { let fn_attrs = if bx.tcx().def_kind(fx.instance.def_id()).has_codegen_attrs() {
Some(bx.tcx().codegen_fn_attrs(fx.instance.def_id())) Some(bx.tcx().codegen_fn_attrs(fx.instance.def_id()))
@ -204,9 +204,9 @@ impl<'a, 'tcx> TerminatorCodegenHelper<'tcx> {
let invokeret = bx.invoke( let invokeret = bx.invoke(
fn_ty, fn_ty,
fn_attrs, fn_attrs,
Some(&fn_abi), Some(fn_abi),
fn_ptr, fn_ptr,
&llargs, llargs,
ret_llbb, ret_llbb,
unwind_block, unwind_block,
self.funclet(fx), self.funclet(fx),
@ -225,7 +225,7 @@ impl<'a, 'tcx> TerminatorCodegenHelper<'tcx> {
} }
MergingSucc::False MergingSucc::False
} else { } else {
let llret = bx.call(fn_ty, fn_attrs, Some(&fn_abi), fn_ptr, &llargs, self.funclet(fx)); let llret = bx.call(fn_ty, fn_attrs, Some(fn_abi), fn_ptr, llargs, self.funclet(fx));
if fx.mir[self.bb].is_cleanup { if fx.mir[self.bb].is_cleanup {
bx.apply_attrs_to_cleanup_callsite(llret); bx.apply_attrs_to_cleanup_callsite(llret);
} }
@ -273,7 +273,7 @@ impl<'a, 'tcx> TerminatorCodegenHelper<'tcx> {
bx.codegen_inline_asm( bx.codegen_inline_asm(
template, template,
&operands, operands,
options, options,
line_spans, line_spans,
instance, instance,
@ -281,7 +281,7 @@ impl<'a, 'tcx> TerminatorCodegenHelper<'tcx> {
); );
MergingSucc::False MergingSucc::False
} else { } else {
bx.codegen_inline_asm(template, &operands, options, line_spans, instance, None); bx.codegen_inline_asm(template, operands, options, line_spans, instance, None);
if let Some(target) = destination { if let Some(target) = destination {
self.funclet_br(fx, bx, target, mergeable_succ) self.funclet_br(fx, bx, target, mergeable_succ)
@ -318,7 +318,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
discr: &mir::Operand<'tcx>, discr: &mir::Operand<'tcx>,
targets: &SwitchTargets, targets: &SwitchTargets,
) { ) {
let discr = self.codegen_operand(bx, &discr); let discr = self.codegen_operand(bx, discr);
let switch_ty = discr.layout.ty; let switch_ty = discr.layout.ty;
let mut target_iter = targets.iter(); let mut target_iter = targets.iter();
if target_iter.len() == 1 { if target_iter.len() == 1 {
@ -498,7 +498,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
args = &args[..1]; args = &args[..1];
( (
meth::VirtualIndex::from_index(ty::COMMON_VTABLE_ENTRIES_DROPINPLACE) meth::VirtualIndex::from_index(ty::COMMON_VTABLE_ENTRIES_DROPINPLACE)
.get_fn(bx, vtable, ty, &fn_abi), .get_fn(bx, vtable, ty, fn_abi),
fn_abi, fn_abi,
) )
} }
@ -540,7 +540,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
debug!("args' = {:?}", args); debug!("args' = {:?}", args);
( (
meth::VirtualIndex::from_index(ty::COMMON_VTABLE_ENTRIES_DROPINPLACE) meth::VirtualIndex::from_index(ty::COMMON_VTABLE_ENTRIES_DROPINPLACE)
.get_fn(bx, meta.immediate(), ty, &fn_abi), .get_fn(bx, meta.immediate(), ty, fn_abi),
fn_abi, fn_abi,
) )
} }
@ -864,7 +864,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
// promotes any complex rvalues to constants. // promotes any complex rvalues to constants.
if i == 2 && intrinsic == sym::simd_shuffle { if i == 2 && intrinsic == sym::simd_shuffle {
if let mir::Operand::Constant(constant) = arg { if let mir::Operand::Constant(constant) = arg {
let (llval, ty) = self.simd_shuffle_indices(&bx, constant); let (llval, ty) = self.simd_shuffle_indices(bx, constant);
return OperandRef { return OperandRef {
val: Immediate(llval), val: Immediate(llval),
layout: bx.layout_of(ty), layout: bx.layout_of(ty),
@ -881,7 +881,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
Self::codegen_intrinsic_call( Self::codegen_intrinsic_call(
bx, bx,
*instance.as_ref().unwrap(), *instance.as_ref().unwrap(),
&fn_abi, fn_abi,
&args, &args,
dest, dest,
span, span,
@ -937,7 +937,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
bx, bx,
meta, meta,
op.layout.ty, op.layout.ty,
&fn_abi, fn_abi,
)); ));
llargs.push(data_ptr); llargs.push(data_ptr);
continue 'make_args; continue 'make_args;
@ -948,7 +948,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
bx, bx,
meta, meta,
op.layout.ty, op.layout.ty,
&fn_abi, fn_abi,
)); ));
llargs.push(data_ptr); llargs.push(data_ptr);
continue; continue;
@ -975,7 +975,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
bx, bx,
meta.immediate(), meta.immediate(),
op.layout.ty, op.layout.ty,
&fn_abi, fn_abi,
)); ));
llargs.push(data_ptr.llval); llargs.push(data_ptr.llval);
continue; continue;
@ -1587,9 +1587,9 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
self.set_debug_loc(&mut bx, mir::SourceInfo::outermost(self.mir.span)); self.set_debug_loc(&mut bx, mir::SourceInfo::outermost(self.mir.span));
let (fn_abi, fn_ptr) = common::build_langcall(&bx, None, reason.lang_item()); let (fn_abi, fn_ptr) = common::build_langcall(&bx, None, reason.lang_item());
let fn_ty = bx.fn_decl_backend_type(&fn_abi); let fn_ty = bx.fn_decl_backend_type(fn_abi);
let llret = bx.call(fn_ty, None, Some(&fn_abi), fn_ptr, &[], funclet.as_ref()); let llret = bx.call(fn_ty, None, Some(fn_abi), fn_ptr, &[], funclet.as_ref());
bx.apply_attrs_to_cleanup_callsite(llret); bx.apply_attrs_to_cleanup_callsite(llret);
bx.unreachable(); bx.unreachable();
@ -1662,10 +1662,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
} }
} }
} else { } else {
self.codegen_place( self.codegen_place(bx, mir::PlaceRef { local: dest.local, projection: dest.projection })
bx,
mir::PlaceRef { local: dest.local, projection: &dest.projection },
)
}; };
if fn_ret.is_indirect() { if fn_ret.is_indirect() {
if dest.align < dest.layout.align.abi { if dest.align < dest.layout.align.abi {
@ -1696,7 +1693,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
match dest { match dest {
Nothing => (), Nothing => (),
Store(dst) => bx.store_arg(&ret_abi, llval, dst), Store(dst) => bx.store_arg(ret_abi, llval, dst),
IndirectOperand(tmp, index) => { IndirectOperand(tmp, index) => {
let op = bx.load_operand(tmp); let op = bx.load_operand(tmp);
tmp.storage_dead(bx); tmp.storage_dead(bx);
@ -1708,7 +1705,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
let op = if let PassMode::Cast { .. } = ret_abi.mode { let op = if let PassMode::Cast { .. } = ret_abi.mode {
let tmp = PlaceRef::alloca(bx, ret_abi.layout); let tmp = PlaceRef::alloca(bx, ret_abi.layout);
tmp.storage_live(bx); tmp.storage_live(bx);
bx.store_arg(&ret_abi, llval, tmp); bx.store_arg(ret_abi, llval, tmp);
let op = bx.load_operand(tmp); let op = bx.load_operand(tmp);
tmp.storage_dead(bx); tmp.storage_dead(bx);
op op

View File

@ -398,7 +398,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
let Some(dbg_loc) = self.dbg_loc(var.source_info) else { return }; let Some(dbg_loc) = self.dbg_loc(var.source_info) else { return };
let DebugInfoOffset { direct_offset, indirect_offsets, result: _ } = let DebugInfoOffset { direct_offset, indirect_offsets, result: _ } =
calculate_debuginfo_offset(bx, &var.projection, base.layout); calculate_debuginfo_offset(bx, var.projection, base.layout);
// When targeting MSVC, create extra allocas for arguments instead of pointing multiple // When targeting MSVC, create extra allocas for arguments instead of pointing multiple
// dbg_var_addr() calls into the same alloca with offsets. MSVC uses CodeView records // dbg_var_addr() calls into the same alloca with offsets. MSVC uses CodeView records
@ -416,7 +416,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
if should_create_individual_allocas { if should_create_individual_allocas {
let DebugInfoOffset { direct_offset: _, indirect_offsets: _, result: place } = let DebugInfoOffset { direct_offset: _, indirect_offsets: _, result: place } =
calculate_debuginfo_offset(bx, &var.projection, base); calculate_debuginfo_offset(bx, var.projection, base);
// Create a variable which will be a pointer to the actual value // Create a variable which will be a pointer to the actual value
let ptr_ty = Ty::new_ptr( let ptr_ty = Ty::new_ptr(

View File

@ -168,7 +168,7 @@ pub fn codegen_mir<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
let fn_abi = cx.fn_abi_of_instance(instance, ty::List::empty()); let fn_abi = cx.fn_abi_of_instance(instance, ty::List::empty());
debug!("fn_abi: {:?}", fn_abi); debug!("fn_abi: {:?}", fn_abi);
let debug_context = cx.create_function_debug_context(instance, &fn_abi, llfn, &mir); let debug_context = cx.create_function_debug_context(instance, fn_abi, llfn, mir);
let start_llbb = Bx::append_block(cx, llfn, "start"); let start_llbb = Bx::append_block(cx, llfn, "start");
let mut start_bx = Bx::build(cx, start_llbb); let mut start_bx = Bx::build(cx, start_llbb);
@ -180,7 +180,7 @@ pub fn codegen_mir<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
} }
let cleanup_kinds = let cleanup_kinds =
base::wants_new_eh_instructions(cx.tcx().sess).then(|| analyze::cleanup_kinds(&mir)); base::wants_new_eh_instructions(cx.tcx().sess).then(|| analyze::cleanup_kinds(mir));
let cached_llbbs: IndexVec<mir::BasicBlock, CachedLlbb<Bx::BasicBlock>> = let cached_llbbs: IndexVec<mir::BasicBlock, CachedLlbb<Bx::BasicBlock>> =
mir.basic_blocks mir.basic_blocks
@ -261,7 +261,7 @@ pub fn codegen_mir<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
drop(start_bx); drop(start_bx);
// Codegen the body of each block using reverse postorder // Codegen the body of each block using reverse postorder
for (bb, _) in traversal::reverse_postorder(&mir) { for (bb, _) in traversal::reverse_postorder(mir) {
fx.codegen_block(bb); fx.codegen_block(bb);
} }
} }

View File

@ -702,7 +702,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
}; };
let fn_ptr = bx.get_fn_addr(instance); let fn_ptr = bx.get_fn_addr(instance);
let fn_abi = bx.fn_abi_of_instance(instance, ty::List::empty()); let fn_abi = bx.fn_abi_of_instance(instance, ty::List::empty());
let fn_ty = bx.fn_decl_backend_type(&fn_abi); let fn_ty = bx.fn_decl_backend_type(fn_abi);
let fn_attrs = if bx.tcx().def_kind(instance.def_id()).has_codegen_attrs() { let fn_attrs = if bx.tcx().def_kind(instance.def_id()).has_codegen_attrs() {
Some(bx.tcx().codegen_fn_attrs(instance.def_id())) Some(bx.tcx().codegen_fn_attrs(instance.def_id()))
} else { } else {

View File

@ -34,7 +34,7 @@ impl<'a, 'tcx: 'a> MonoItemExt<'a, 'tcx> for MonoItem<'tcx> {
} }
MonoItem::GlobalAsm(item_id) => { MonoItem::GlobalAsm(item_id) => {
let item = cx.tcx().hir().item(item_id); let item = cx.tcx().hir().item(item_id);
if let hir::ItemKind::GlobalAsm(ref asm) = item.kind { if let hir::ItemKind::GlobalAsm(asm) = item.kind {
let operands: Vec<_> = asm let operands: Vec<_> = asm
.operands .operands
.iter() .iter()
@ -88,7 +88,7 @@ impl<'a, 'tcx: 'a> MonoItemExt<'a, 'tcx> for MonoItem<'tcx> {
} }
} }
MonoItem::Fn(instance) => { MonoItem::Fn(instance) => {
base::codegen_instance::<Bx>(&cx, instance); base::codegen_instance::<Bx>(cx, instance);
} }
} }
@ -119,10 +119,10 @@ impl<'a, 'tcx: 'a> MonoItemExt<'a, 'tcx> for MonoItem<'tcx> {
match *self { match *self {
MonoItem::Static(def_id) => { MonoItem::Static(def_id) => {
cx.predefine_static(def_id, linkage, visibility, &symbol_name); cx.predefine_static(def_id, linkage, visibility, symbol_name);
} }
MonoItem::Fn(instance) => { MonoItem::Fn(instance) => {
cx.predefine_fn(instance, linkage, visibility, &symbol_name); cx.predefine_fn(instance, linkage, visibility, symbol_name);
} }
MonoItem::GlobalAsm(..) => {} MonoItem::GlobalAsm(..) => {}
} }

View File

@ -314,7 +314,7 @@ pub fn eval_in_interpreter<'mir, 'tcx>(
is_static: bool, is_static: bool,
) -> ::rustc_middle::mir::interpret::EvalToAllocationRawResult<'tcx> { ) -> ::rustc_middle::mir::interpret::EvalToAllocationRawResult<'tcx> {
let res = ecx.load_mir(cid.instance.def, cid.promoted); let res = ecx.load_mir(cid.instance.def, cid.promoted);
match res.and_then(|body| eval_body_using_ecx(&mut ecx, cid, &body)) { match res.and_then(|body| eval_body_using_ecx(&mut ecx, cid, body)) {
Err(error) => { Err(error) => {
let (error, backtrace) = error.into_parts(); let (error, backtrace) = error.into_parts();
backtrace.print_backtrace(); backtrace.print_backtrace();

View File

@ -200,7 +200,7 @@ impl<'mir, 'tcx: 'mir> CompileTimeEvalContext<'mir, 'tcx> {
&caller &caller
.file .file
.name .name
.for_scope(&self.tcx.sess, RemapPathScopeComponents::DIAGNOSTICS) .for_scope(self.tcx.sess, RemapPathScopeComponents::DIAGNOSTICS)
.to_string_lossy(), .to_string_lossy(),
), ),
u32::try_from(caller.line).unwrap(), u32::try_from(caller.line).unwrap(),

View File

@ -387,7 +387,7 @@ fn valtree_into_mplace<'tcx>(
debug!(?place_inner); debug!(?place_inner);
valtree_into_mplace(ecx, &place_inner, *inner_valtree); valtree_into_mplace(ecx, &place_inner, *inner_valtree);
dump_place(&ecx, &place_inner); dump_place(ecx, &place_inner);
} }
debug!("dump of place_adjusted:"); debug!("dump of place_adjusted:");

View File

@ -256,7 +256,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
let addr = addr.to_target_usize(self)?; let addr = addr.to_target_usize(self)?;
// Then turn address into pointer. // Then turn address into pointer.
let ptr = M::ptr_from_addr_cast(&self, addr)?; let ptr = M::ptr_from_addr_cast(self, addr)?;
Ok(ImmTy::from_scalar(Scalar::from_maybe_pointer(ptr, self), cast_to)) Ok(ImmTy::from_scalar(Scalar::from_maybe_pointer(ptr, self), cast_to))
} }

View File

@ -259,7 +259,7 @@ impl<'rt, 'mir, 'tcx: 'mir, M: CompileTimeMachine<'mir, 'tcx, const_eval::Memory
// to avoid could be expensive: on the potentially larger types, arrays and slices, // to avoid could be expensive: on the potentially larger types, arrays and slices,
// rather than on all aggregates unconditionally. // rather than on all aggregates unconditionally.
if matches!(mplace.layout.ty.kind(), ty::Array(..) | ty::Slice(..)) { if matches!(mplace.layout.ty.kind(), ty::Array(..) | ty::Slice(..)) {
let Some((size, _align)) = self.ecx.size_and_align_of_mplace(&mplace)? else { let Some((size, _align)) = self.ecx.size_and_align_of_mplace(mplace)? else {
// We do the walk if we can't determine the size of the mplace: we may be // We do the walk if we can't determine the size of the mplace: we may be
// dealing with extern types here in the future. // dealing with extern types here in the future.
return Ok(true); return Ok(true);

View File

@ -505,7 +505,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
// Performs an exact division, resulting in undefined behavior where // Performs an exact division, resulting in undefined behavior where
// `x % y != 0` or `y == 0` or `x == T::MIN && y == -1`. // `x % y != 0` or `y == 0` or `x == T::MIN && y == -1`.
// First, check x % y != 0 (or if that computation overflows). // First, check x % y != 0 (or if that computation overflows).
let (res, overflow) = self.overflowing_binary_op(BinOp::Rem, &a, &b)?; let (res, overflow) = self.overflowing_binary_op(BinOp::Rem, a, b)?;
assert!(!overflow); // All overflow is UB, so this should never return on overflow. assert!(!overflow); // All overflow is UB, so this should never return on overflow.
if res.to_scalar().assert_bits(a.layout.size) != 0 { if res.to_scalar().assert_bits(a.layout.size) != 0 {
throw_ub_custom!( throw_ub_custom!(
@ -515,7 +515,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
) )
} }
// `Rem` says this is all right, so we can let `Div` do its job. // `Rem` says this is all right, so we can let `Div` do its job.
self.binop_ignore_overflow(BinOp::Div, &a, &b, dest) self.binop_ignore_overflow(BinOp::Div, a, b, dest)
} }
pub fn saturating_arith( pub fn saturating_arith(

View File

@ -20,7 +20,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
right: &ImmTy<'tcx, M::Provenance>, right: &ImmTy<'tcx, M::Provenance>,
dest: &PlaceTy<'tcx, M::Provenance>, dest: &PlaceTy<'tcx, M::Provenance>,
) -> InterpResult<'tcx> { ) -> InterpResult<'tcx> {
let (val, overflowed) = self.overflowing_binary_op(op, &left, &right)?; let (val, overflowed) = self.overflowing_binary_op(op, left, right)?;
debug_assert_eq!( debug_assert_eq!(
Ty::new_tup(self.tcx.tcx, &[val.layout.ty, self.tcx.types.bool]), Ty::new_tup(self.tcx.tcx, &[val.layout.ty, self.tcx.types.bool]),
dest.layout.ty, dest.layout.ty,

View File

@ -456,7 +456,7 @@ where
) -> InterpResult<'tcx, Option<AllocRef<'_, 'tcx, M::Provenance, M::AllocExtra, M::Bytes>>> ) -> InterpResult<'tcx, Option<AllocRef<'_, 'tcx, M::Provenance, M::AllocExtra, M::Bytes>>>
{ {
let (size, _align) = self let (size, _align) = self
.size_and_align_of_mplace(&mplace)? .size_and_align_of_mplace(mplace)?
.unwrap_or((mplace.layout.size, mplace.layout.align.abi)); .unwrap_or((mplace.layout.size, mplace.layout.align.abi));
// We check alignment separately, and *after* checking everything else. // We check alignment separately, and *after* checking everything else.
// If an access is both OOB and misaligned, we want to see the bounds error. // If an access is both OOB and misaligned, we want to see the bounds error.
@ -472,7 +472,7 @@ where
) -> InterpResult<'tcx, Option<AllocRefMut<'_, 'tcx, M::Provenance, M::AllocExtra, M::Bytes>>> ) -> InterpResult<'tcx, Option<AllocRefMut<'_, 'tcx, M::Provenance, M::AllocExtra, M::Bytes>>>
{ {
let (size, _align) = self let (size, _align) = self
.size_and_align_of_mplace(&mplace)? .size_and_align_of_mplace(mplace)?
.unwrap_or((mplace.layout.size, mplace.layout.align.abi)); .unwrap_or((mplace.layout.size, mplace.layout.align.abi));
// We check alignment separately, and raise that error *after* checking everything else. // We check alignment separately, and raise that error *after* checking everything else.
// If an access is both OOB and misaligned, we want to see the bounds error. // If an access is both OOB and misaligned, we want to see the bounds error.

View File

@ -51,7 +51,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
) -> InterpResult<'tcx, OpTy<'tcx, M::Provenance>> { ) -> InterpResult<'tcx, OpTy<'tcx, M::Provenance>> {
match arg { match arg {
FnArg::Copy(op) => Ok(op.clone()), FnArg::Copy(op) => Ok(op.clone()),
FnArg::InPlace(place) => self.place_to_op(&place), FnArg::InPlace(place) => self.place_to_op(place),
} }
} }
@ -410,7 +410,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
// so we implement a type-based check that reflects the guaranteed rules for ABI compatibility. // so we implement a type-based check that reflects the guaranteed rules for ABI compatibility.
if self.layout_compat(caller_abi.layout, callee_abi.layout)? { if self.layout_compat(caller_abi.layout, callee_abi.layout)? {
// Ensure that our checks imply actual ABI compatibility for this concrete call. // Ensure that our checks imply actual ABI compatibility for this concrete call.
assert!(caller_abi.eq_abi(&callee_abi)); assert!(caller_abi.eq_abi(callee_abi));
return Ok(true); return Ok(true);
} else { } else {
trace!( trace!(
@ -464,7 +464,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
// We work with a copy of the argument for now; if this is in-place argument passing, we // We work with a copy of the argument for now; if this is in-place argument passing, we
// will later protect the source it comes from. This means the callee cannot observe if we // will later protect the source it comes from. This means the callee cannot observe if we
// did in-place of by-copy argument passing, except for pointer equality tests. // did in-place of by-copy argument passing, except for pointer equality tests.
let caller_arg_copy = self.copy_fn_arg(&caller_arg)?; let caller_arg_copy = self.copy_fn_arg(caller_arg)?;
if !already_live { if !already_live {
let local = callee_arg.as_local().unwrap(); let local = callee_arg.as_local().unwrap();
let meta = caller_arg_copy.meta(); let meta = caller_arg_copy.meta();

View File

@ -896,7 +896,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
let mut visitor = ValidityVisitor { path, ref_tracking, ctfe_mode, ecx: self }; let mut visitor = ValidityVisitor { path, ref_tracking, ctfe_mode, ecx: self };
// Run it. // Run it.
match visitor.visit_value(&op) { match visitor.visit_value(op) {
Ok(()) => Ok(()), Ok(()) => Ok(()),
// Pass through validation failures and "invalid program" issues. // Pass through validation failures and "invalid program" issues.
Err(err) Err(err)

View File

@ -97,14 +97,14 @@ pub trait ValueVisitor<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>>: Sized {
let inner_mplace = self.ecx().unpack_dyn_trait(&dest)?.0; let inner_mplace = self.ecx().unpack_dyn_trait(&dest)?.0;
trace!("walk_value: dyn object layout: {:#?}", inner_mplace.layout); trace!("walk_value: dyn object layout: {:#?}", inner_mplace.layout);
// recurse with the inner type // recurse with the inner type
return self.visit_field(&v, 0, &inner_mplace.into()); return self.visit_field(v, 0, &inner_mplace.into());
} }
ty::Dynamic(_, _, ty::DynStar) => { ty::Dynamic(_, _, ty::DynStar) => {
// DynStar types. Very different from a dyn type (but strangely part of the // DynStar types. Very different from a dyn type (but strangely part of the
// same variant in `TyKind`): These are pairs where the 2nd component is the // same variant in `TyKind`): These are pairs where the 2nd component is the
// vtable, and the first component is the data (which must be ptr-sized). // vtable, and the first component is the data (which must be ptr-sized).
let data = self.ecx().unpack_dyn_star(v)?.0; let data = self.ecx().unpack_dyn_star(v)?.0;
return self.visit_field(&v, 0, &data); return self.visit_field(v, 0, &data);
} }
// Slices do not need special handling here: they have `Array` field // Slices do not need special handling here: they have `Array` field
// placement with length 0, so we enter the `Array` case below which // placement with length 0, so we enter the `Array` case below which

View File

@ -60,9 +60,9 @@ impl<'mir, 'tcx> Qualifs<'mir, 'tcx> {
let ConstCx { tcx, body, .. } = *ccx; let ConstCx { tcx, body, .. } = *ccx;
FlowSensitiveAnalysis::new(NeedsDrop, ccx) FlowSensitiveAnalysis::new(NeedsDrop, ccx)
.into_engine(tcx, &body) .into_engine(tcx, body)
.iterate_to_fixpoint() .iterate_to_fixpoint()
.into_results_cursor(&body) .into_results_cursor(body)
}); });
needs_drop.seek_before_primary_effect(location); needs_drop.seek_before_primary_effect(location);
@ -122,9 +122,9 @@ impl<'mir, 'tcx> Qualifs<'mir, 'tcx> {
let ConstCx { tcx, body, .. } = *ccx; let ConstCx { tcx, body, .. } = *ccx;
FlowSensitiveAnalysis::new(HasMutInterior, ccx) FlowSensitiveAnalysis::new(HasMutInterior, ccx)
.into_engine(tcx, &body) .into_engine(tcx, body)
.iterate_to_fixpoint() .iterate_to_fixpoint()
.into_results_cursor(&body) .into_results_cursor(body)
}); });
has_mut_interior.seek_before_primary_effect(location); has_mut_interior.seek_before_primary_effect(location);
@ -170,9 +170,9 @@ impl<'mir, 'tcx> Qualifs<'mir, 'tcx> {
hir::ConstContext::Const { .. } | hir::ConstContext::Static(_) => { hir::ConstContext::Const { .. } | hir::ConstContext::Static(_) => {
let mut cursor = FlowSensitiveAnalysis::new(CustomEq, ccx) let mut cursor = FlowSensitiveAnalysis::new(CustomEq, ccx)
.into_engine(ccx.tcx, &ccx.body) .into_engine(ccx.tcx, ccx.body)
.iterate_to_fixpoint() .iterate_to_fixpoint()
.into_results_cursor(&ccx.body); .into_results_cursor(ccx.body);
cursor.seek_after_primary_effect(return_loc); cursor.seek_after_primary_effect(return_loc);
cursor.get().contains(RETURN_PLACE) cursor.get().contains(RETURN_PLACE)
@ -225,7 +225,7 @@ impl<'mir, 'tcx> Deref for Checker<'mir, 'tcx> {
type Target = ConstCx<'mir, 'tcx>; type Target = ConstCx<'mir, 'tcx>;
fn deref(&self) -> &Self::Target { fn deref(&self) -> &Self::Target {
&self.ccx self.ccx
} }
} }
@ -272,7 +272,7 @@ impl<'mir, 'tcx> Checker<'mir, 'tcx> {
} }
if !tcx.has_attr(def_id, sym::rustc_do_not_const_check) { if !tcx.has_attr(def_id, sym::rustc_do_not_const_check) {
self.visit_body(&body); self.visit_body(body);
} }
// If we got through const-checking without emitting any "primary" errors, emit any // If we got through const-checking without emitting any "primary" errors, emit any
@ -503,7 +503,7 @@ impl<'tcx> Visitor<'tcx> for Checker<'_, 'tcx> {
Rvalue::Ref(_, BorrowKind::Shared | BorrowKind::Fake, place) Rvalue::Ref(_, BorrowKind::Shared | BorrowKind::Fake, place)
| Rvalue::AddressOf(Mutability::Not, place) => { | Rvalue::AddressOf(Mutability::Not, place) => {
let borrowed_place_has_mut_interior = qualifs::in_place::<HasMutInterior, _>( let borrowed_place_has_mut_interior = qualifs::in_place::<HasMutInterior, _>(
&self.ccx, self.ccx,
&mut |local| self.qualifs.has_mut_interior(self.ccx, local, location), &mut |local| self.qualifs.has_mut_interior(self.ccx, local, location),
place.as_ref(), place.as_ref(),
); );

View File

@ -83,7 +83,7 @@ pub fn rustc_allow_const_fn_unstable(
feature_gate: Symbol, feature_gate: Symbol,
) -> bool { ) -> bool {
let attrs = tcx.hir().attrs(tcx.hir().local_def_id_to_hir_id(def_id)); let attrs = tcx.hir().attrs(tcx.hir().local_def_id_to_hir_id(def_id));
attr::rustc_allow_const_fn_unstable(&tcx.sess, attrs).any(|name| name == feature_gate) attr::rustc_allow_const_fn_unstable(tcx.sess, attrs).any(|name| name == feature_gate)
} }
/// Returns `true` if the given `const fn` is "const-stable". /// Returns `true` if the given `const fn` is "const-stable".

View File

@ -129,7 +129,7 @@ impl<'tcx> NonConstOp<'tcx> for FnCallNonConst<'tcx> {
tcx, tcx,
generics, generics,
err, err,
&param_ty.name.as_str(), param_ty.name.as_str(),
&constraint, &constraint,
None, None,
None, None,

View File

@ -54,7 +54,7 @@ impl<'mir, 'tcx> std::ops::Deref for CheckLiveDrops<'mir, 'tcx> {
type Target = ConstCx<'mir, 'tcx>; type Target = ConstCx<'mir, 'tcx>;
fn deref(&self) -> &Self::Target { fn deref(&self) -> &Self::Target {
&self.ccx self.ccx
} }
} }

View File

@ -188,7 +188,7 @@ impl<'a, 'tcx> std::ops::Deref for Validator<'a, 'tcx> {
type Target = ConstCx<'a, 'tcx>; type Target = ConstCx<'a, 'tcx>;
fn deref(&self) -> &Self::Target { fn deref(&self) -> &Self::Target {
&self.ccx self.ccx
} }
} }
@ -229,7 +229,7 @@ impl<'tcx> Validator<'_, 'tcx> {
let statement = &self.body[loc.block].statements[loc.statement_index]; let statement = &self.body[loc.block].statements[loc.statement_index];
match &statement.kind { match &statement.kind {
StatementKind::Assign(box (_, rhs)) => qualifs::in_rvalue::<Q, _>( StatementKind::Assign(box (_, rhs)) => qualifs::in_rvalue::<Q, _>(
&self.ccx, self.ccx,
&mut |l| self.qualif_local::<Q>(l), &mut |l| self.qualif_local::<Q>(l),
rhs, rhs,
), ),
@ -246,7 +246,7 @@ impl<'tcx> Validator<'_, 'tcx> {
match &terminator.kind { match &terminator.kind {
TerminatorKind::Call { .. } => { TerminatorKind::Call { .. } => {
let return_ty = self.body.local_decls[local].ty; let return_ty = self.body.local_decls[local].ty;
Q::in_any_value_of_ty(&self.ccx, return_ty) Q::in_any_value_of_ty(self.ccx, return_ty)
} }
kind => { kind => {
span_bug!(terminator.source_info.span, "{:?} not promotable", kind); span_bug!(terminator.source_info.span, "{:?} not promotable", kind);

View File

@ -50,7 +50,7 @@ impl<T> Sharded<T> {
#[inline] #[inline]
pub fn get_shard_by_value<K: Hash + ?Sized>(&self, _val: &K) -> &Lock<T> { pub fn get_shard_by_value<K: Hash + ?Sized>(&self, _val: &K) -> &Lock<T> {
match self { match self {
Self::Single(single) => &single, Self::Single(single) => single,
#[cfg(parallel_compiler)] #[cfg(parallel_compiler)]
Self::Shards(..) => self.get_shard_by_hash(make_hash(_val)), Self::Shards(..) => self.get_shard_by_hash(make_hash(_val)),
} }
@ -64,7 +64,7 @@ impl<T> Sharded<T> {
#[inline] #[inline]
pub fn get_shard_by_index(&self, _i: usize) -> &Lock<T> { pub fn get_shard_by_index(&self, _i: usize) -> &Lock<T> {
match self { match self {
Self::Single(single) => &single, Self::Single(single) => single,
#[cfg(parallel_compiler)] #[cfg(parallel_compiler)]
Self::Shards(shards) => { Self::Shards(shards) => {
// SAFETY: The index gets ANDed with the shard mask, ensuring it is always inbounds. // SAFETY: The index gets ANDed with the shard mask, ensuring it is always inbounds.

View File

@ -138,7 +138,7 @@ impl AnnotateSnippetEmitterWriter {
let message = self.translate_messages(messages, args); let message = self.translate_messages(messages, args);
if let Some(source_map) = &self.source_map { if let Some(source_map) = &self.source_map {
// Make sure our primary file comes first // Make sure our primary file comes first
let primary_lo = if let Some(ref primary_span) = msp.primary_span().as_ref() { let primary_lo = if let Some(primary_span) = msp.primary_span().as_ref() {
if primary_span.is_dummy() { if primary_span.is_dummy() {
// FIXME(#59346): Not sure when this is the case and what // FIXME(#59346): Not sure when this is the case and what
// should be done if it happens // should be done if it happens
@ -203,7 +203,7 @@ impl AnnotateSnippetEmitterWriter {
Slice { Slice {
source, source,
line_start: *line_index, line_start: *line_index,
origin: Some(&file_name), origin: Some(file_name),
// FIXME(#59346): Not really sure when `fold` should be true or false // FIXME(#59346): Not really sure when `fold` should be true or false
fold: false, fold: false,
annotations: annotations annotations: annotations

View File

@ -1297,7 +1297,7 @@ impl EmitterWriter {
buffer.append(line_number, line, style_or_override(*style, override_style)); buffer.append(line_number, line, style_or_override(*style, override_style));
} }
} else { } else {
buffer.append(line_number, &text, style_or_override(*style, override_style)); buffer.append(line_number, text, style_or_override(*style, override_style));
} }
} }
} }
@ -1931,7 +1931,7 @@ impl EmitterWriter {
self.draw_code_line( self.draw_code_line(
&mut buffer, &mut buffer,
&mut row_num, &mut row_num,
&highlight_parts, highlight_parts,
line_pos + line_start, line_pos + line_start,
line, line,
show_code_change, show_code_change,
@ -2338,7 +2338,7 @@ impl FileWithAnnotatedLines {
let mut output = vec![]; let mut output = vec![];
let mut multiline_annotations = vec![]; let mut multiline_annotations = vec![];
if let Some(ref sm) = emitter.source_map() { if let Some(sm) = emitter.source_map() {
for SpanLabel { span, is_primary, label } in msp.span_labels() { for SpanLabel { span, is_primary, label } in msp.span_labels() {
// If we don't have a useful span, pick the primary span if that exists. // If we don't have a useful span, pick the primary span if that exists.
// Worst case we'll just print an error at the top of the main file. // Worst case we'll just print an error at the top of the main file.
@ -2362,7 +2362,7 @@ impl FileWithAnnotatedLines {
let label = label.as_ref().map(|m| { let label = label.as_ref().map(|m| {
normalize_whitespace( normalize_whitespace(
&emitter.translate_message(m, &args).map_err(Report::new).unwrap(), &emitter.translate_message(m, args).map_err(Report::new).unwrap(),
) )
}); });

View File

@ -777,7 +777,7 @@ impl SyntaxExtension {
attrs: &[ast::Attribute], attrs: &[ast::Attribute],
) -> SyntaxExtension { ) -> SyntaxExtension {
let allow_internal_unstable = let allow_internal_unstable =
attr::allow_internal_unstable(sess, &attrs).collect::<Vec<Symbol>>(); attr::allow_internal_unstable(sess, attrs).collect::<Vec<Symbol>>();
let allow_internal_unsafe = attr::contains_name(attrs, sym::allow_internal_unsafe); let allow_internal_unsafe = attr::contains_name(attrs, sym::allow_internal_unsafe);
let local_inner_macros = attr::find_by_name(attrs, sym::macro_export) let local_inner_macros = attr::find_by_name(attrs, sym::macro_export)
@ -796,9 +796,9 @@ impl SyntaxExtension {
) )
}) })
.unwrap_or_else(|| (None, helper_attrs)); .unwrap_or_else(|| (None, helper_attrs));
let stability = attr::find_stability(&sess, attrs, span); let stability = attr::find_stability(sess, attrs, span);
let const_stability = attr::find_const_stability(&sess, attrs, span); let const_stability = attr::find_const_stability(sess, attrs, span);
let body_stability = attr::find_body_stability(&sess, attrs); let body_stability = attr::find_body_stability(sess, attrs);
if let Some((_, sp)) = const_stability { if let Some((_, sp)) = const_stability {
sess.emit_err(errors::MacroConstStability { sess.emit_err(errors::MacroConstStability {
span: sp, span: sp,
@ -818,7 +818,7 @@ impl SyntaxExtension {
allow_internal_unstable: (!allow_internal_unstable.is_empty()) allow_internal_unstable: (!allow_internal_unstable.is_empty())
.then(|| allow_internal_unstable.into()), .then(|| allow_internal_unstable.into()),
stability: stability.map(|(s, _)| s), stability: stability.map(|(s, _)| s),
deprecation: attr::find_deprecation(&sess, features, attrs).map(|(d, _)| d), deprecation: attr::find_deprecation(sess, features, attrs).map(|(d, _)| d),
helper_attrs, helper_attrs,
edition, edition,
builtin_name, builtin_name,
@ -1464,7 +1464,7 @@ fn pretty_printing_compatibility_hack(item: &Item, sess: &ParseSess) -> bool {
if crate_matches { if crate_matches {
sess.buffer_lint_with_diagnostic( sess.buffer_lint_with_diagnostic(
&PROC_MACRO_BACK_COMPAT, PROC_MACRO_BACK_COMPAT,
item.ident.span, item.ident.span,
ast::CRATE_NODE_ID, ast::CRATE_NODE_ID,
"using an old version of `rental`", "using an old version of `rental`",

View File

@ -434,9 +434,9 @@ impl<'a> StripUnconfigured<'a> {
} }
}; };
( (
parse_cfg(&meta_item, &self.sess).map_or(true, |meta_item| { parse_cfg(&meta_item, self.sess).map_or(true, |meta_item| {
attr::cfg_matches( attr::cfg_matches(
&meta_item, meta_item,
&self.sess.parse_sess, &self.sess.parse_sess,
self.lint_node_id, self.lint_node_id,
self.features, self.features,

View File

@ -1096,7 +1096,7 @@ impl InvocationCollectorNode for P<ast::Item> {
ModKind::Loaded(_, inline, _) => { ModKind::Loaded(_, inline, _) => {
// Inline `mod foo { ... }`, but we still need to push directories. // Inline `mod foo { ... }`, but we still need to push directories.
let (dir_path, dir_ownership) = mod_dir_path( let (dir_path, dir_ownership) = mod_dir_path(
&ecx.sess, ecx.sess,
ident, ident,
&attrs, &attrs,
&ecx.current_expansion.module, &ecx.current_expansion.module,
@ -1111,7 +1111,7 @@ impl InvocationCollectorNode for P<ast::Item> {
let old_attrs_len = attrs.len(); let old_attrs_len = attrs.len();
let ParsedExternalMod { items, spans, file_path, dir_path, dir_ownership } = let ParsedExternalMod { items, spans, file_path, dir_path, dir_ownership } =
parse_external_mod( parse_external_mod(
&ecx.sess, ecx.sess,
ident, ident,
span, span,
&ecx.current_expansion.module, &ecx.current_expansion.module,
@ -1168,14 +1168,14 @@ impl InvocationCollectorNode for P<ast::Item> {
ast::UseTreeKind::Simple(_) => idents.push(ut.ident()), ast::UseTreeKind::Simple(_) => idents.push(ut.ident()),
ast::UseTreeKind::Nested(nested) => { ast::UseTreeKind::Nested(nested) => {
for (ut, _) in nested { for (ut, _) in nested {
collect_use_tree_leaves(&ut, idents); collect_use_tree_leaves(ut, idents);
} }
} }
} }
} }
let mut idents = Vec::new(); let mut idents = Vec::new();
collect_use_tree_leaves(&ut, &mut idents); collect_use_tree_leaves(ut, &mut idents);
return idents; return idents;
} }
@ -1531,7 +1531,7 @@ impl InvocationCollectorNode for AstNodeWrapper<P<ast::Expr>, OptExprTag> {
} }
} }
fn pre_flat_map_node_collect_attr(cfg: &StripUnconfigured<'_>, attr: &ast::Attribute) { fn pre_flat_map_node_collect_attr(cfg: &StripUnconfigured<'_>, attr: &ast::Attribute) {
cfg.maybe_emit_expr_attr_err(&attr); cfg.maybe_emit_expr_attr_err(attr);
} }
} }
@ -1580,7 +1580,7 @@ struct InvocationCollector<'a, 'b> {
impl<'a, 'b> InvocationCollector<'a, 'b> { impl<'a, 'b> InvocationCollector<'a, 'b> {
fn cfg(&self) -> StripUnconfigured<'_> { fn cfg(&self) -> StripUnconfigured<'_> {
StripUnconfigured { StripUnconfigured {
sess: &self.cx.sess, sess: self.cx.sess,
features: Some(self.cx.ecfg.features), features: Some(self.cx.ecfg.features),
config_tokens: false, config_tokens: false,
lint_node_id: self.cx.current_expansion.lint_node_id, lint_node_id: self.cx.current_expansion.lint_node_id,
@ -1693,7 +1693,7 @@ impl<'a, 'b> InvocationCollector<'a, 'b> {
if attr.is_doc_comment() { if attr.is_doc_comment() {
self.cx.sess.parse_sess.buffer_lint_with_diagnostic( self.cx.sess.parse_sess.buffer_lint_with_diagnostic(
&UNUSED_DOC_COMMENTS, UNUSED_DOC_COMMENTS,
current_span, current_span,
self.cx.current_expansion.lint_node_id, self.cx.current_expansion.lint_node_id,
"unused doc comment", "unused doc comment",
@ -1705,7 +1705,7 @@ impl<'a, 'b> InvocationCollector<'a, 'b> {
// eagerly evaluated. // eagerly evaluated.
if attr_name != sym::cfg && attr_name != sym::cfg_attr { if attr_name != sym::cfg && attr_name != sym::cfg_attr {
self.cx.sess.parse_sess.buffer_lint_with_diagnostic( self.cx.sess.parse_sess.buffer_lint_with_diagnostic(
&UNUSED_ATTRIBUTES, UNUSED_ATTRIBUTES,
attr.span, attr.span,
self.cx.current_expansion.lint_node_id, self.cx.current_expansion.lint_node_id,
format!("unused attribute `{attr_name}`"), format!("unused attribute `{attr_name}`"),

View File

@ -650,6 +650,6 @@ fn buffer_lint(
) { ) {
// Macros loaded from other crates have dummy node ids. // Macros loaded from other crates have dummy node ids.
if node_id != DUMMY_NODE_ID { if node_id != DUMMY_NODE_ID {
sess.buffer_lint(&META_VARIABLE_MISUSE, span, node_id, message); sess.buffer_lint(META_VARIABLE_MISUSE, span, node_id, message);
} }
} }

View File

@ -483,7 +483,7 @@ impl TtParser {
if matches!(t, Token { kind: DocComment(..), .. }) { if matches!(t, Token { kind: DocComment(..), .. }) {
mp.idx += 1; mp.idx += 1;
self.cur_mps.push(mp); self.cur_mps.push(mp);
} else if token_name_eq(&t, token) { } else if token_name_eq(t, token) {
mp.idx += 1; mp.idx += 1;
self.next_mps.push(mp); self.next_mps.push(mp);
} }

View File

@ -213,7 +213,7 @@ fn expand_macro<'cx>(
let arm_span = rhses[i].span(); let arm_span = rhses[i].span();
// rhs has holes ( `$id` and `$(...)` that need filled) // rhs has holes ( `$id` and `$(...)` that need filled)
let mut tts = match transcribe(cx, &named_matches, &rhs, rhs_span, transparency) { let mut tts = match transcribe(cx, &named_matches, rhs, rhs_span, transparency) {
Ok(tts) => tts, Ok(tts) => tts,
Err(mut err) => { Err(mut err) => {
err.emit(); err.emit();
@ -511,7 +511,7 @@ pub fn compile_declarative_macro(
) )
.pop() .pop()
.unwrap(); .unwrap();
valid &= check_lhs_nt_follows(&sess.parse_sess, &def, &tt); valid &= check_lhs_nt_follows(&sess.parse_sess, def, &tt);
return tt; return tt;
} }
sess.parse_sess.span_diagnostic.span_bug(def.span, "wrong-structured lhs") sess.parse_sess.span_diagnostic.span_bug(def.span, "wrong-structured lhs")
@ -927,7 +927,7 @@ impl<'tt> TtHandle<'tt> {
fn get(&'tt self) -> &'tt mbe::TokenTree { fn get(&'tt self) -> &'tt mbe::TokenTree {
match self { match self {
TtHandle::TtRef(tt) => tt, TtHandle::TtRef(tt) => tt,
TtHandle::Token(token_tt) => &token_tt, TtHandle::Token(token_tt) => token_tt,
} }
} }
} }
@ -1170,7 +1170,7 @@ fn check_matcher_core<'tt>(
Some(NonterminalKind::PatParam { inferred: false }), Some(NonterminalKind::PatParam { inferred: false }),
)); ));
sess.buffer_lint_with_diagnostic( sess.buffer_lint_with_diagnostic(
&RUST_2021_INCOMPATIBLE_OR_PATTERNS, RUST_2021_INCOMPATIBLE_OR_PATTERNS,
span, span,
ast::CRATE_NODE_ID, ast::CRATE_NODE_ID,
"the meaning of the `pat` fragment specifier is changing in Rust 2021, which may affect this macro", "the meaning of the `pat` fragment specifier is changing in Rust 2021, which may affect this macro",
@ -1407,7 +1407,7 @@ fn is_in_follow(tok: &mbe::TokenTree, kind: NonterminalKind) -> IsInFollow {
fn quoted_tt_to_string(tt: &mbe::TokenTree) -> String { fn quoted_tt_to_string(tt: &mbe::TokenTree) -> String {
match tt { match tt {
mbe::TokenTree::Token(token) => pprust::token_to_string(&token).into(), mbe::TokenTree::Token(token) => pprust::token_to_string(token).into(),
mbe::TokenTree::MetaVar(_, name) => format!("${name}"), mbe::TokenTree::MetaVar(_, name) => format!("${name}"),
mbe::TokenTree::MetaVarDecl(_, name, Some(kind)) => format!("${name}:{kind}"), mbe::TokenTree::MetaVarDecl(_, name, Some(kind)) => format!("${name}:{kind}"),
mbe::TokenTree::MetaVarDecl(_, name, None) => format!("${name}:"), mbe::TokenTree::MetaVarDecl(_, name, None) => format!("${name}:"),

View File

@ -116,7 +116,7 @@ pub(super) fn parse(
fn maybe_emit_macro_metavar_expr_feature(features: &Features, sess: &ParseSess, span: Span) { fn maybe_emit_macro_metavar_expr_feature(features: &Features, sess: &ParseSess, span: Span) {
if !features.macro_metavar_expr { if !features.macro_metavar_expr {
let msg = "meta-variable expressions are unstable"; let msg = "meta-variable expressions are unstable";
feature_err(&sess, sym::macro_metavar_expr, span, msg).emit(); feature_err(sess, sym::macro_metavar_expr, span, msg).emit();
} }
} }
@ -174,7 +174,7 @@ fn parse_tree<'a>(
// The delimiter is `{`. This indicates the beginning // The delimiter is `{`. This indicates the beginning
// of a meta-variable expression (e.g. `${count(ident)}`). // of a meta-variable expression (e.g. `${count(ident)}`).
// Try to parse the meta-variable expression. // Try to parse the meta-variable expression.
match MetaVarExpr::parse(&tts, delim_span.entire(), sess) { match MetaVarExpr::parse(tts, delim_span.entire(), sess) {
Err(mut err) => { Err(mut err) => {
err.emit(); err.emit();
// Returns early the same read `$` to avoid spanning // Returns early the same read `$` to avoid spanning
@ -242,10 +242,8 @@ fn parse_tree<'a>(
// `tree` is followed by some other token. This is an error. // `tree` is followed by some other token. This is an error.
Some(tokenstream::TokenTree::Token(token, _)) => { Some(tokenstream::TokenTree::Token(token, _)) => {
let msg = format!( let msg =
"expected identifier, found `{}`", format!("expected identifier, found `{}`", pprust::token_to_string(token),);
pprust::token_to_string(&token),
);
sess.span_diagnostic.span_err(token.span, msg); sess.span_diagnostic.span_err(token.span, msg);
TokenTree::MetaVar(token.span, Ident::empty()) TokenTree::MetaVar(token.span, Ident::empty())
} }
@ -291,7 +289,7 @@ fn parse_kleene_op<'a>(
span: Span, span: Span,
) -> Result<Result<(KleeneOp, Span), Token>, Span> { ) -> Result<Result<(KleeneOp, Span), Token>, Span> {
match input.next() { match input.next() {
Some(tokenstream::TokenTree::Token(token, _)) => match kleene_op(&token) { Some(tokenstream::TokenTree::Token(token, _)) => match kleene_op(token) {
Some(op) => Ok(Ok((op, token.span))), Some(op) => Ok(Ok((op, token.span))),
None => Ok(Err(token.clone())), None => Ok(Err(token.clone())),
}, },

View File

@ -90,7 +90,7 @@ pub(super) fn transcribe<'a>(
// We descend into the RHS (`src`), expanding things as we go. This stack contains the things // We descend into the RHS (`src`), expanding things as we go. This stack contains the things
// we have yet to expand/are still expanding. We start the stack off with the whole RHS. // we have yet to expand/are still expanding. We start the stack off with the whole RHS.
let mut stack: SmallVec<[Frame<'_>; 1]> = smallvec![Frame::new(&src, src_span)]; let mut stack: SmallVec<[Frame<'_>; 1]> = smallvec![Frame::new(src, src_span)];
// As we descend in the RHS, we will need to be able to match nested sequences of matchers. // As we descend in the RHS, we will need to be able to match nested sequences of matchers.
// `repeats` keeps track of where we are in matching at each level, with the last element being // `repeats` keeps track of where we are in matching at each level, with the last element being
@ -166,7 +166,7 @@ pub(super) fn transcribe<'a>(
// and the matches in `interp` have the same shape. Otherwise, either the caller or the // and the matches in `interp` have the same shape. Otherwise, either the caller or the
// macro writer has made a mistake. // macro writer has made a mistake.
seq @ mbe::TokenTree::Sequence(_, delimited) => { seq @ mbe::TokenTree::Sequence(_, delimited) => {
match lockstep_iter_size(&seq, interp, &repeats) { match lockstep_iter_size(seq, interp, &repeats) {
LockstepIterSize::Unconstrained => { LockstepIterSize::Unconstrained => {
return Err(cx.create_err(NoSyntaxVarsExprRepeat { span: seq.span() })); return Err(cx.create_err(NoSyntaxVarsExprRepeat { span: seq.span() }));
} }
@ -250,7 +250,7 @@ pub(super) fn transcribe<'a>(
// Replace meta-variable expressions with the result of their expansion. // Replace meta-variable expressions with the result of their expansion.
mbe::TokenTree::MetaVarExpr(sp, expr) => { mbe::TokenTree::MetaVarExpr(sp, expr) => {
transcribe_metavar_expr(cx, expr, interp, &mut marker, &repeats, &mut result, &sp)?; transcribe_metavar_expr(cx, expr, interp, &mut marker, &repeats, &mut result, sp)?;
} }
// If we are entering a new delimiter, we push its contents to the `stack` to be // If we are entering a new delimiter, we push its contents to the `stack` to be
@ -529,7 +529,7 @@ fn transcribe_metavar_expr<'a>(
match *expr { match *expr {
MetaVarExpr::Count(original_ident, depth_opt) => { MetaVarExpr::Count(original_ident, depth_opt) => {
let matched = matched_from_ident(cx, original_ident, interp)?; let matched = matched_from_ident(cx, original_ident, interp)?;
let count = count_repetitions(cx, depth_opt, matched, &repeats, sp)?; let count = count_repetitions(cx, depth_opt, matched, repeats, sp)?;
let tt = TokenTree::token_alone( let tt = TokenTree::token_alone(
TokenKind::lit(token::Integer, sym::integer(count), None), TokenKind::lit(token::Integer, sym::integer(count), None),
visited_span(), visited_span(),

View File

@ -57,7 +57,7 @@ pub(crate) fn parse_external_mod(
// We bail on the first error, but that error does not cause a fatal error... (1) // We bail on the first error, but that error does not cause a fatal error... (1)
let result: Result<_, ModError<'_>> = try { let result: Result<_, ModError<'_>> = try {
// Extract the file path and the new ownership. // Extract the file path and the new ownership.
let mp = mod_file_path(sess, ident, &attrs, &module.dir_path, dir_ownership)?; let mp = mod_file_path(sess, ident, attrs, &module.dir_path, dir_ownership)?;
dir_ownership = mp.dir_ownership; dir_ownership = mp.dir_ownership;
// Ensure file paths are acyclic. // Ensure file paths are acyclic.
@ -119,7 +119,7 @@ pub(crate) fn mod_dir_path(
Inline::No => { Inline::No => {
// FIXME: This is a subset of `parse_external_mod` without actual parsing, // FIXME: This is a subset of `parse_external_mod` without actual parsing,
// check whether the logic for unloaded, loaded and inline modules can be unified. // check whether the logic for unloaded, loaded and inline modules can be unified.
let file_path = mod_file_path(sess, ident, &attrs, &module.dir_path, dir_ownership) let file_path = mod_file_path(sess, ident, attrs, &module.dir_path, dir_ownership)
.map(|mp| { .map(|mp| {
dir_ownership = mp.dir_ownership; dir_ownership = mp.dir_ownership;
mp.file_path mp.file_path

View File

@ -784,6 +784,6 @@ impl server::Server for Rustc<'_, '_> {
} }
fn with_symbol_string(symbol: &Self::Symbol, f: impl FnOnce(&str)) { fn with_symbol_string(symbol: &Self::Symbol, f: impl FnOnce(&str)) {
f(&symbol.as_str()) f(symbol.as_str())
} }
} }

View File

@ -227,7 +227,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
self.tcx(), self.tcx(),
generics, generics,
&mut err, &mut err,
&ty_param_name, ty_param_name,
&trait_name, &trait_name,
None, None,
None, None,

View File

@ -106,7 +106,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
); );
} }
// check if the impl trait that we are considering is a impl of a local trait // check if the impl trait that we are considering is a impl of a local trait
self.maybe_lint_blanket_trait_impl(&self_ty, &mut diag); self.maybe_lint_blanket_trait_impl(self_ty, &mut diag);
diag.stash(self_ty.span, StashKey::TraitMissingMethod); diag.stash(self_ty.span, StashKey::TraitMissingMethod);
} else { } else {
let msg = "trait objects without an explicit `dyn` are deprecated"; let msg = "trait objects without an explicit `dyn` are deprecated";
@ -121,7 +121,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
sugg, sugg,
Applicability::MachineApplicable, Applicability::MachineApplicable,
); );
self.maybe_lint_blanket_trait_impl(&self_ty, lint); self.maybe_lint_blanket_trait_impl(self_ty, lint);
lint lint
}, },
); );

Some files were not shown because too many files have changed in this diff Show More