Rollup merge of #131375 - klensy:clone_on_ref_ptr, r=cjgillot

compiler: apply clippy::clone_on_ref_ptr for CI

Apply lint https://rust-lang.github.io/rust-clippy/master/index.html#/clone_on_ref_ptr for compiler, also see https://github.com/rust-lang/rust/pull/131225#discussion_r1790109443.

Some Arc's can be misplaced with Lrc's, sorry.

https://rust-lang.zulipchat.com/#narrow/channel/131828-t-compiler/topic/enable.20more.20clippy.20lints.20for.20compiler.20.28and.5Cor.20std.29
This commit is contained in:
Jubilee 2024-10-29 03:11:39 -07:00 committed by GitHub
commit 5d0f52efa4
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
34 changed files with 100 additions and 92 deletions

View File

@ -3301,6 +3301,7 @@ version = "0.0.0"
dependencies = [ dependencies = [
"itertools", "itertools",
"rustc_ast", "rustc_ast",
"rustc_data_structures",
"rustc_lexer", "rustc_lexer",
"rustc_span", "rustc_span",
"thin-vec", "thin-vec",

View File

@ -368,7 +368,7 @@ impl Clone for TokenKind {
// a copy. This is faster than the `derive(Clone)` version which has a // a copy. This is faster than the `derive(Clone)` version which has a
// separate path for every variant. // separate path for every variant.
match self { match self {
Interpolated(nt) => Interpolated(nt.clone()), Interpolated(nt) => Interpolated(Lrc::clone(nt)),
_ => unsafe { std::ptr::read(self) }, _ => unsafe { std::ptr::read(self) },
} }
} }

View File

@ -3,6 +3,7 @@ use std::assert_matches::assert_matches;
use rustc_ast::ptr::P as AstP; use rustc_ast::ptr::P as AstP;
use rustc_ast::*; use rustc_ast::*;
use rustc_data_structures::stack::ensure_sufficient_stack; use rustc_data_structures::stack::ensure_sufficient_stack;
use rustc_data_structures::sync::Lrc;
use rustc_hir as hir; use rustc_hir as hir;
use rustc_hir::HirId; use rustc_hir::HirId;
use rustc_hir::def::{DefKind, Res}; use rustc_hir::def::{DefKind, Res};
@ -143,7 +144,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
ExprKind::IncludedBytes(bytes) => { ExprKind::IncludedBytes(bytes) => {
let lit = self.arena.alloc(respan( let lit = self.arena.alloc(respan(
self.lower_span(e.span), self.lower_span(e.span),
LitKind::ByteStr(bytes.clone(), StrStyle::Cooked), LitKind::ByteStr(Lrc::clone(bytes), StrStyle::Cooked),
)); ));
hir::ExprKind::Lit(lit) hir::ExprKind::Lit(lit)
} }
@ -536,7 +537,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
this.mark_span_with_reason( this.mark_span_with_reason(
DesugaringKind::TryBlock, DesugaringKind::TryBlock,
expr.span, expr.span,
Some(this.allow_try_trait.clone()), Some(Lrc::clone(&this.allow_try_trait)),
), ),
expr, expr,
) )
@ -544,7 +545,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
let try_span = this.mark_span_with_reason( let try_span = this.mark_span_with_reason(
DesugaringKind::TryBlock, DesugaringKind::TryBlock,
this.tcx.sess.source_map().end_point(body.span), this.tcx.sess.source_map().end_point(body.span),
Some(this.allow_try_trait.clone()), Some(Lrc::clone(&this.allow_try_trait)),
); );
(try_span, this.expr_unit(try_span)) (try_span, this.expr_unit(try_span))
@ -653,7 +654,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
let unstable_span = self.mark_span_with_reason( let unstable_span = self.mark_span_with_reason(
DesugaringKind::Async, DesugaringKind::Async,
self.lower_span(span), self.lower_span(span),
Some(self.allow_gen_future.clone()), Some(Lrc::clone(&self.allow_gen_future)),
); );
let resume_ty = let resume_ty =
self.make_lang_item_qpath(hir::LangItem::ResumeTy, unstable_span, None); self.make_lang_item_qpath(hir::LangItem::ResumeTy, unstable_span, None);
@ -739,7 +740,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
let unstable_span = self.mark_span_with_reason( let unstable_span = self.mark_span_with_reason(
DesugaringKind::Async, DesugaringKind::Async,
span, span,
Some(self.allow_gen_future.clone()), Some(Lrc::clone(&self.allow_gen_future)),
); );
self.lower_attrs(inner_hir_id, &[Attribute { self.lower_attrs(inner_hir_id, &[Attribute {
kind: AttrKind::Normal(ptr::P(NormalAttr::from_ident(Ident::new( kind: AttrKind::Normal(ptr::P(NormalAttr::from_ident(Ident::new(
@ -815,13 +816,13 @@ impl<'hir> LoweringContext<'_, 'hir> {
let features = match await_kind { let features = match await_kind {
FutureKind::Future => None, FutureKind::Future => None,
FutureKind::AsyncIterator => Some(self.allow_for_await.clone()), FutureKind::AsyncIterator => Some(Lrc::clone(&self.allow_for_await)),
}; };
let span = self.mark_span_with_reason(DesugaringKind::Await, await_kw_span, features); let span = self.mark_span_with_reason(DesugaringKind::Await, await_kw_span, features);
let gen_future_span = self.mark_span_with_reason( let gen_future_span = self.mark_span_with_reason(
DesugaringKind::Await, DesugaringKind::Await,
full_span, full_span,
Some(self.allow_gen_future.clone()), Some(Lrc::clone(&self.allow_gen_future)),
); );
let expr_hir_id = expr.hir_id; let expr_hir_id = expr.hir_id;
@ -1841,13 +1842,13 @@ impl<'hir> LoweringContext<'_, 'hir> {
let unstable_span = self.mark_span_with_reason( let unstable_span = self.mark_span_with_reason(
DesugaringKind::QuestionMark, DesugaringKind::QuestionMark,
span, span,
Some(self.allow_try_trait.clone()), Some(Lrc::clone(&self.allow_try_trait)),
); );
let try_span = self.tcx.sess.source_map().end_point(span); let try_span = self.tcx.sess.source_map().end_point(span);
let try_span = self.mark_span_with_reason( let try_span = self.mark_span_with_reason(
DesugaringKind::QuestionMark, DesugaringKind::QuestionMark,
try_span, try_span,
Some(self.allow_try_trait.clone()), Some(Lrc::clone(&self.allow_try_trait)),
); );
// `Try::branch(<expr>)` // `Try::branch(<expr>)`
@ -1941,7 +1942,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
let unstable_span = self.mark_span_with_reason( let unstable_span = self.mark_span_with_reason(
DesugaringKind::YeetExpr, DesugaringKind::YeetExpr,
span, span,
Some(self.allow_try_trait.clone()), Some(Lrc::clone(&self.allow_try_trait)),
); );
let from_yeet_expr = self.wrap_in_try_constructor( let from_yeet_expr = self.wrap_in_try_constructor(

View File

@ -1878,7 +1878,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
CoroutineKind::Async { return_impl_trait_id, .. } => (return_impl_trait_id, None), CoroutineKind::Async { return_impl_trait_id, .. } => (return_impl_trait_id, None),
CoroutineKind::Gen { return_impl_trait_id, .. } => (return_impl_trait_id, None), CoroutineKind::Gen { return_impl_trait_id, .. } => (return_impl_trait_id, None),
CoroutineKind::AsyncGen { return_impl_trait_id, .. } => { CoroutineKind::AsyncGen { return_impl_trait_id, .. } => {
(return_impl_trait_id, Some(self.allow_async_iterator.clone())) (return_impl_trait_id, Some(Lrc::clone(&self.allow_async_iterator)))
} }
}; };

View File

@ -73,7 +73,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
let bound_modifier_allowed_features = if let Res::Def(DefKind::Trait, async_def_id) = res let bound_modifier_allowed_features = if let Res::Def(DefKind::Trait, async_def_id) = res
&& self.tcx.async_fn_trait_kind_from_def_id(async_def_id).is_some() && self.tcx.async_fn_trait_kind_from_def_id(async_def_id).is_some()
{ {
Some(self.allow_async_fn_traits.clone()) Some(Lrc::clone(&self.allow_async_fn_traits))
} else { } else {
None None
}; };

View File

@ -7,6 +7,7 @@ edition = "2021"
# tidy-alphabetical-start # tidy-alphabetical-start
itertools = "0.12" itertools = "0.12"
rustc_ast = { path = "../rustc_ast" } rustc_ast = { path = "../rustc_ast" }
rustc_data_structures = { path = "../rustc_data_structures" }
rustc_lexer = { path = "../rustc_lexer" } rustc_lexer = { path = "../rustc_lexer" }
rustc_span = { path = "../rustc_span" } rustc_span = { path = "../rustc_span" }
thin-vec = "0.2.12" thin-vec = "0.2.12"

View File

@ -21,6 +21,7 @@ use rustc_ast::{
GenericBound, InlineAsmOperand, InlineAsmOptions, InlineAsmRegOrRegClass, GenericBound, InlineAsmOperand, InlineAsmOptions, InlineAsmRegOrRegClass,
InlineAsmTemplatePiece, PatKind, RangeEnd, RangeSyntax, Safety, SelfKind, Term, attr, InlineAsmTemplatePiece, PatKind, RangeEnd, RangeSyntax, Safety, SelfKind, Term, attr,
}; };
use rustc_data_structures::sync::Lrc;
use rustc_span::edition::Edition; use rustc_span::edition::Edition;
use rustc_span::source_map::{SourceMap, Spanned}; use rustc_span::source_map::{SourceMap, Spanned};
use rustc_span::symbol::{Ident, IdentPrinter, Symbol, kw, sym}; use rustc_span::symbol::{Ident, IdentPrinter, Symbol, kw, sym};
@ -105,7 +106,7 @@ fn split_block_comment_into_lines(text: &str, col: CharPos) -> Vec<String> {
fn gather_comments(sm: &SourceMap, path: FileName, src: String) -> Vec<Comment> { fn gather_comments(sm: &SourceMap, path: FileName, src: String) -> Vec<Comment> {
let sm = SourceMap::new(sm.path_mapping().clone()); let sm = SourceMap::new(sm.path_mapping().clone());
let source_file = sm.new_source_file(path, src); let source_file = sm.new_source_file(path, src);
let text = (*source_file.src.as_ref().unwrap()).clone(); let text = Lrc::clone(&(*source_file.src.as_ref().unwrap()));
let text: &str = text.as_str(); let text: &str = text.as_str();
let start_bpos = source_file.start_pos; let start_bpos = source_file.start_pos;

View File

@ -107,13 +107,13 @@ pub(crate) fn compute_regions<'a, 'tcx>(
param_env, param_env,
body, body,
promoted, promoted,
universal_regions.clone(), Rc::clone(&universal_regions),
location_table, location_table,
borrow_set, borrow_set,
&mut all_facts, &mut all_facts,
flow_inits, flow_inits,
move_data, move_data,
elements.clone(), Rc::clone(&elements),
upvars, upvars,
); );

View File

@ -733,7 +733,7 @@ impl<'tcx> RegionInferenceContext<'tcx> {
} }
// Now take member constraints into account. // Now take member constraints into account.
let member_constraints = self.member_constraints.clone(); let member_constraints = Rc::clone(&self.member_constraints);
for m_c_i in member_constraints.indices(scc_a) { for m_c_i in member_constraints.indices(scc_a) {
self.apply_member_constraint(scc_a, m_c_i, member_constraints.choice_regions(m_c_i)); self.apply_member_constraint(scc_a, m_c_i, member_constraints.choice_regions(m_c_i));
} }
@ -1679,7 +1679,7 @@ impl<'tcx> RegionInferenceContext<'tcx> {
infcx: &InferCtxt<'tcx>, infcx: &InferCtxt<'tcx>,
errors_buffer: &mut RegionErrors<'tcx>, errors_buffer: &mut RegionErrors<'tcx>,
) { ) {
let member_constraints = self.member_constraints.clone(); let member_constraints = Rc::clone(&self.member_constraints);
for m_c_i in member_constraints.all_indices() { for m_c_i in member_constraints.all_indices() {
debug!(?m_c_i); debug!(?m_c_i);
let m_c = &member_constraints[m_c_i]; let m_c = &member_constraints[m_c_i];

View File

@ -134,7 +134,7 @@ pub(crate) fn type_check<'a, 'tcx>(
let mut constraints = MirTypeckRegionConstraints { let mut constraints = MirTypeckRegionConstraints {
placeholder_indices: PlaceholderIndices::default(), placeholder_indices: PlaceholderIndices::default(),
placeholder_index_to_region: IndexVec::default(), placeholder_index_to_region: IndexVec::default(),
liveness_constraints: LivenessValues::with_specific_points(elements.clone()), liveness_constraints: LivenessValues::with_specific_points(Rc::clone(&elements)),
outlives_constraints: OutlivesConstraintSet::default(), outlives_constraints: OutlivesConstraintSet::default(),
member_constraints: MemberConstraintSet::default(), member_constraints: MemberConstraintSet::default(),
type_tests: Vec::default(), type_tests: Vec::default(),
@ -150,7 +150,7 @@ pub(crate) fn type_check<'a, 'tcx>(
infcx, infcx,
param_env, param_env,
implicit_region_bound, implicit_region_bound,
universal_regions.clone(), Rc::clone(&universal_regions),
&mut constraints, &mut constraints,
); );

View File

@ -570,7 +570,7 @@ fn thin_lto(
info!(" - {}: re-compiled", module_name); info!(" - {}: re-compiled", module_name);
opt_jobs.push(LtoModuleCodegen::Thin(ThinModule { opt_jobs.push(LtoModuleCodegen::Thin(ThinModule {
shared: shared.clone(), shared: Arc::clone(&shared),
idx: module_index, idx: module_index,
})); }));
} }

View File

@ -514,7 +514,7 @@ pub(crate) fn start_async_codegen<B: ExtraBackendMethods>(
future: Some(coordinator_thread), future: Some(coordinator_thread),
phantom: PhantomData, phantom: PhantomData,
}, },
output_filenames: tcx.output_filenames(()).clone(), output_filenames: Arc::clone(tcx.output_filenames(())),
} }
} }
@ -1203,7 +1203,7 @@ fn start_executing_work<B: ExtraBackendMethods>(
coordinator_send, coordinator_send,
expanded_args: tcx.sess.expanded_args.clone(), expanded_args: tcx.sess.expanded_args.clone(),
diag_emitter: shared_emitter.clone(), diag_emitter: shared_emitter.clone(),
output_filenames: tcx.output_filenames(()).clone(), output_filenames: Arc::clone(tcx.output_filenames(())),
regular_module_config: regular_config, regular_module_config: regular_config,
metadata_module_config: metadata_config, metadata_module_config: metadata_config,
allocator_module_config: allocator_config, allocator_module_config: allocator_config,

View File

@ -7,7 +7,7 @@ use rustc_ast::expand::allocator::{ALLOCATOR_METHODS, AllocatorKind, global_fn_n
use rustc_attr as attr; use rustc_attr as attr;
use rustc_data_structures::fx::{FxHashMap, FxIndexSet}; use rustc_data_structures::fx::{FxHashMap, FxIndexSet};
use rustc_data_structures::profiling::{get_resident_set_size, print_time_passes_entry}; use rustc_data_structures::profiling::{get_resident_set_size, print_time_passes_entry};
use rustc_data_structures::sync::par_map; use rustc_data_structures::sync::{Lrc, par_map};
use rustc_data_structures::unord::UnordMap; use rustc_data_structures::unord::UnordMap;
use rustc_hir::def_id::{DefId, LOCAL_CRATE}; use rustc_hir::def_id::{DefId, LOCAL_CRATE};
use rustc_hir::lang_items::LangItem; use rustc_hir::lang_items::LangItem;
@ -923,7 +923,7 @@ impl CrateInfo {
crate_name: UnordMap::with_capacity(n_crates), crate_name: UnordMap::with_capacity(n_crates),
used_crates, used_crates,
used_crate_source: UnordMap::with_capacity(n_crates), used_crate_source: UnordMap::with_capacity(n_crates),
dependency_formats: tcx.dependency_formats(()).clone(), dependency_formats: Lrc::clone(tcx.dependency_formats(())),
windows_subsystem, windows_subsystem,
natvis_debugger_visualizers: Default::default(), natvis_debugger_visualizers: Default::default(),
}; };
@ -936,7 +936,7 @@ impl CrateInfo {
info.crate_name.insert(cnum, tcx.crate_name(cnum)); info.crate_name.insert(cnum, tcx.crate_name(cnum));
let used_crate_source = tcx.used_crate_source(cnum); let used_crate_source = tcx.used_crate_source(cnum);
info.used_crate_source.insert(cnum, used_crate_source.clone()); info.used_crate_source.insert(cnum, Lrc::clone(used_crate_source));
if tcx.is_profiler_runtime(cnum) { if tcx.is_profiler_runtime(cnum) {
info.profiler_runtime = Some(cnum); info.profiler_runtime = Some(cnum);
} }

View File

@ -1395,7 +1395,7 @@ pub fn install_ice_hook(
} }
let using_internal_features = Arc::new(std::sync::atomic::AtomicBool::default()); let using_internal_features = Arc::new(std::sync::atomic::AtomicBool::default());
let using_internal_features_hook = using_internal_features.clone(); let using_internal_features_hook = Arc::clone(&using_internal_features);
panic::update_hook(Box::new( panic::update_hook(Box::new(
move |default_hook: &(dyn Fn(&PanicHookInfo<'_>) + Send + Sync + 'static), move |default_hook: &(dyn Fn(&PanicHookInfo<'_>) + Send + Sync + 'static),
info: &PanicHookInfo<'_>| { info: &PanicHookInfo<'_>| {

View File

@ -173,7 +173,7 @@ impl AnnotateSnippetEmitter {
source_map.ensure_source_file_source_present(&file); source_map.ensure_source_file_source_present(&file);
( (
format!("{}", source_map.filename_for_diagnostics(&file.name)), format!("{}", source_map.filename_for_diagnostics(&file.name)),
source_string(file.clone(), &line), source_string(Lrc::clone(&file), &line),
line.line_index, line.line_index,
line.annotations, line.annotations,
) )

View File

@ -1555,7 +1555,7 @@ impl HumanEmitter {
// Get the left-side margin to remove it // Get the left-side margin to remove it
let mut whitespace_margin = usize::MAX; let mut whitespace_margin = usize::MAX;
for line_idx in 0..annotated_file.lines.len() { for line_idx in 0..annotated_file.lines.len() {
let file = annotated_file.file.clone(); let file = Lrc::clone(&annotated_file.file);
let line = &annotated_file.lines[line_idx]; let line = &annotated_file.lines[line_idx];
if let Some(source_string) = if let Some(source_string) =
line.line_index.checked_sub(1).and_then(|l| file.get_line(l)) line.line_index.checked_sub(1).and_then(|l| file.get_line(l))
@ -1646,7 +1646,7 @@ impl HumanEmitter {
let depths = self.render_source_line( let depths = self.render_source_line(
&mut buffer, &mut buffer,
annotated_file.file.clone(), Lrc::clone(&annotated_file.file),
&annotated_file.lines[line_idx], &annotated_file.lines[line_idx],
width_offset, width_offset,
code_offset, code_offset,
@ -2529,7 +2529,12 @@ impl FileWithAnnotatedLines {
// | | | // | | |
// | |______foo // | |______foo
// | baz // | baz
add_annotation_to_file(&mut output, file.clone(), ann.line_start, ann.as_start()); add_annotation_to_file(
&mut output,
Lrc::clone(&file),
ann.line_start,
ann.as_start(),
);
// 4 is the minimum vertical length of a multiline span when presented: two lines // 4 is the minimum vertical length of a multiline span when presented: two lines
// of code and two lines of underline. This is not true for the special case where // of code and two lines of underline. This is not true for the special case where
// the beginning doesn't have an underline, but the current logic seems to be // the beginning doesn't have an underline, but the current logic seems to be
@ -2545,11 +2550,11 @@ impl FileWithAnnotatedLines {
.unwrap_or(ann.line_start); .unwrap_or(ann.line_start);
for line in ann.line_start + 1..until { for line in ann.line_start + 1..until {
// Every `|` that joins the beginning of the span (`___^`) to the end (`|__^`). // Every `|` that joins the beginning of the span (`___^`) to the end (`|__^`).
add_annotation_to_file(&mut output, file.clone(), line, ann.as_line()); add_annotation_to_file(&mut output, Lrc::clone(&file), line, ann.as_line());
} }
let line_end = ann.line_end - 1; let line_end = ann.line_end - 1;
if middle < line_end { if middle < line_end {
add_annotation_to_file(&mut output, file.clone(), line_end, ann.as_line()); add_annotation_to_file(&mut output, Lrc::clone(&file), line_end, ann.as_line());
} }
} else { } else {
end_ann.annotation_type = AnnotationType::Singleline; end_ann.annotation_type = AnnotationType::Singleline;

View File

@ -367,9 +367,9 @@ impl Diagnostic {
ColorConfig::Always | ColorConfig::Auto => dst = Box::new(termcolor::Ansi::new(dst)), ColorConfig::Always | ColorConfig::Auto => dst = Box::new(termcolor::Ansi::new(dst)),
ColorConfig::Never => {} ColorConfig::Never => {}
} }
HumanEmitter::new(dst, je.fallback_bundle.clone()) HumanEmitter::new(dst, Lrc::clone(&je.fallback_bundle))
.short_message(short) .short_message(short)
.sm(Some(je.sm.clone())) .sm(Some(Lrc::clone(&je.sm)))
.fluent_bundle(je.fluent_bundle.clone()) .fluent_bundle(je.fluent_bundle.clone())
.diagnostic_width(je.diagnostic_width) .diagnostic_width(je.diagnostic_width)
.macro_backtrace(je.macro_backtrace) .macro_backtrace(je.macro_backtrace)

View File

@ -622,7 +622,7 @@ impl TtParser {
// possible next positions into `next_mps`. After some post-processing, the contents of // possible next positions into `next_mps`. After some post-processing, the contents of
// `next_mps` replenish `cur_mps` and we start over again. // `next_mps` replenish `cur_mps` and we start over again.
self.cur_mps.clear(); self.cur_mps.clear();
self.cur_mps.push(MatcherPos { idx: 0, matches: self.empty_matches.clone() }); self.cur_mps.push(MatcherPos { idx: 0, matches: Rc::clone(&self.empty_matches) });
loop { loop {
self.next_mps.clear(); self.next_mps.clear();

View File

@ -5,6 +5,7 @@ use rustc_ast::mut_visit::{self, MutVisitor};
use rustc_ast::token::{self, Delimiter, IdentIsRaw, Lit, LitKind, Nonterminal, Token, TokenKind}; use rustc_ast::token::{self, Delimiter, IdentIsRaw, Lit, LitKind, Nonterminal, Token, TokenKind};
use rustc_ast::tokenstream::{DelimSpacing, DelimSpan, Spacing, TokenStream, TokenTree}; use rustc_ast::tokenstream::{DelimSpacing, DelimSpan, Spacing, TokenStream, TokenTree};
use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::fx::FxHashMap;
use rustc_data_structures::sync::Lrc;
use rustc_errors::{Diag, DiagCtxtHandle, PResult, pluralize}; use rustc_errors::{Diag, DiagCtxtHandle, PResult, pluralize};
use rustc_parse::lexer::nfc_normalize; use rustc_parse::lexer::nfc_normalize;
use rustc_parse::parser::ParseNtResult; use rustc_parse::parser::ParseNtResult;
@ -293,7 +294,7 @@ pub(super) fn transcribe<'a>(
// `Delimiter::Invisible` to maintain parsing priorities. // `Delimiter::Invisible` to maintain parsing priorities.
// `Interpolated` is currently used for such groups in rustc parser. // `Interpolated` is currently used for such groups in rustc parser.
marker.visit_span(&mut sp); marker.visit_span(&mut sp);
TokenTree::token_alone(token::Interpolated(nt.clone()), sp) TokenTree::token_alone(token::Interpolated(Lrc::clone(nt)), sp)
} }
MatchedSeq(..) => { MatchedSeq(..) => {
// We were unable to descend far enough. This is an error. // We were unable to descend far enough. This is an error.

View File

@ -364,7 +364,7 @@ impl<'tcx> InferCtxt<'tcx> {
span, span,
concrete_ty, concrete_ty,
r, r,
choice_regions.clone(), Lrc::clone(&choice_regions),
) )
}, },
}); });

View File

@ -142,7 +142,7 @@ impl Linker {
Ok(Linker { Ok(Linker {
dep_graph: tcx.dep_graph.clone(), dep_graph: tcx.dep_graph.clone(),
output_filenames: tcx.output_filenames(()).clone(), output_filenames: Arc::clone(tcx.output_filenames(())),
crate_hash: if tcx.needs_crate_hash() { crate_hash: if tcx.needs_crate_hash() {
Some(tcx.crate_hash(LOCAL_CRATE)) Some(tcx.crate_hash(LOCAL_CRATE))
} else { } else {

View File

@ -278,7 +278,7 @@ impl<'a, 'tcx> Encodable<EncodeContext<'a, 'tcx>> for SpanData {
let source_map = s.tcx.sess.source_map(); let source_map = s.tcx.sess.source_map();
let source_file_index = source_map.lookup_source_file_idx(self.lo); let source_file_index = source_map.lookup_source_file_idx(self.lo);
s.source_file_cache = s.source_file_cache =
(source_map.files()[source_file_index].clone(), source_file_index); (Lrc::clone(&source_map.files()[source_file_index]), source_file_index);
} }
let (ref source_file, source_file_index) = s.source_file_cache; let (ref source_file, source_file_index) = s.source_file_cache;
debug_assert!(source_file.contains(self.lo)); debug_assert!(source_file.contains(self.lo));
@ -2275,7 +2275,7 @@ pub fn encode_metadata(tcx: TyCtxt<'_>, path: &Path) {
encoder.emit_raw_bytes(&0u64.to_le_bytes()); encoder.emit_raw_bytes(&0u64.to_le_bytes());
let source_map_files = tcx.sess.source_map().files(); let source_map_files = tcx.sess.source_map().files();
let source_file_cache = (source_map_files[0].clone(), 0); let source_file_cache = (Lrc::clone(&source_map_files[0]), 0);
let required_source_files = Some(FxIndexSet::default()); let required_source_files = Some(FxIndexSet::default());
drop(source_map_files); drop(source_map_files);

View File

@ -32,7 +32,7 @@ impl DebuggerVisualizerFile {
pub fn path_erased(&self) -> Self { pub fn path_erased(&self) -> Self {
DebuggerVisualizerFile { DebuggerVisualizerFile {
src: self.src.clone(), src: Lrc::clone(&self.src),
visualizer_type: self.visualizer_type, visualizer_type: self.visualizer_type,
path: None, path: None,
} }

View File

@ -472,13 +472,9 @@ impl<'a, 'tcx> CacheDecoder<'a, 'tcx> {
let CacheDecoder { tcx, file_index_to_file, file_index_to_stable_id, source_map, .. } = let CacheDecoder { tcx, file_index_to_file, file_index_to_stable_id, source_map, .. } =
*self; *self;
file_index_to_file Lrc::clone(file_index_to_file.borrow_mut().entry(index).or_insert_with(|| {
.borrow_mut()
.entry(index)
.or_insert_with(|| {
let source_file_id = &file_index_to_stable_id[&index]; let source_file_id = &file_index_to_stable_id[&index];
let source_file_cnum = let source_file_cnum = tcx.stable_crate_id_to_crate_num(source_file_id.stable_crate_id);
tcx.stable_crate_id_to_crate_num(source_file_id.stable_crate_id);
// If this `SourceFile` is from a foreign crate, then make sure // If this `SourceFile` is from a foreign crate, then make sure
// that we've imported all of the source files from that crate. // that we've imported all of the source files from that crate.
@ -496,8 +492,7 @@ impl<'a, 'tcx> CacheDecoder<'a, 'tcx> {
source_map source_map
.source_file_by_stable_id(source_file_id.stable_source_file_id) .source_file_by_stable_id(source_file_id.stable_source_file_id)
.expect("failed to lookup `SourceFile` in new context") .expect("failed to lookup `SourceFile` in new context")
}) }))
.clone()
} }
} }

View File

@ -14,6 +14,7 @@ use rustc_ast::{
}; };
use rustc_ast_pretty::pprust; use rustc_ast_pretty::pprust;
use rustc_data_structures::fx::FxHashSet; use rustc_data_structures::fx::FxHashSet;
use rustc_data_structures::sync::Lrc;
use rustc_errors::{ use rustc_errors::{
Applicability, Diag, DiagCtxtHandle, ErrorGuaranteed, FatalError, PErr, PResult, Subdiagnostic, Applicability, Diag, DiagCtxtHandle, ErrorGuaranteed, FatalError, PErr, PResult, Subdiagnostic,
Suggestions, pluralize, Suggestions, pluralize,
@ -2437,7 +2438,7 @@ impl<'a> Parser<'a> {
let mut labels = vec![]; let mut labels = vec![];
while let TokenKind::Interpolated(nt) = &tok.kind { while let TokenKind::Interpolated(nt) = &tok.kind {
let tokens = nt.tokens(); let tokens = nt.tokens();
labels.push(nt.clone()); labels.push(Lrc::clone(nt));
if let Some(tokens) = tokens if let Some(tokens) = tokens
&& let tokens = tokens.to_attr_token_stream() && let tokens = tokens.to_attr_token_stream()
&& let tokens = tokens.0.deref() && let tokens = tokens.0.deref()

View File

@ -134,7 +134,7 @@ impl<D: Deps> DepGraph<D> {
encoder, encoder,
record_graph, record_graph,
record_stats, record_stats,
prev_graph.clone(), Arc::clone(&prev_graph),
); );
let colors = DepNodeColorMap::new(prev_graph_node_count); let colors = DepNodeColorMap::new(prev_graph_node_count);

View File

@ -237,7 +237,7 @@ impl QueryLatch {
// the `wait` call below, by 1) the `set` method or 2) by deadlock detection. // the `wait` call below, by 1) the `set` method or 2) by deadlock detection.
// Both of these will remove it from the `waiters` list before resuming // Both of these will remove it from the `waiters` list before resuming
// this thread. // this thread.
info.waiters.push(waiter.clone()); info.waiters.push(Arc::clone(waiter));
// If this detects a deadlock and the deadlock handler wants to resume this thread // If this detects a deadlock and the deadlock handler wants to resume this thread
// we have to be in the `wait` call. This is ensured by the deadlock handler // we have to be in the `wait` call. This is ensured by the deadlock handler

View File

@ -1694,9 +1694,9 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
fn dummy_ext(&self, macro_kind: MacroKind) -> Lrc<SyntaxExtension> { fn dummy_ext(&self, macro_kind: MacroKind) -> Lrc<SyntaxExtension> {
match macro_kind { match macro_kind {
MacroKind::Bang => self.dummy_ext_bang.clone(), MacroKind::Bang => Lrc::clone(&self.dummy_ext_bang),
MacroKind::Derive => self.dummy_ext_derive.clone(), MacroKind::Derive => Lrc::clone(&self.dummy_ext_derive),
MacroKind::Attr => self.non_macro_attr.ext.clone(), MacroKind::Attr => Lrc::clone(&self.non_macro_attr.ext),
} }
} }

View File

@ -826,7 +826,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
} }
_ => None, _ => None,
}, },
None => self.get_macro(res).map(|macro_data| macro_data.ext.clone()), None => self.get_macro(res).map(|macro_data| Lrc::clone(&macro_data.ext)),
}; };
Ok((ext, res)) Ok((ext, res))
} }

View File

@ -241,7 +241,7 @@ impl ParseSess {
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty())); let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
let emitter = Box::new( let emitter = Box::new(
HumanEmitter::new(stderr_destination(ColorConfig::Auto), fallback_bundle) HumanEmitter::new(stderr_destination(ColorConfig::Auto), fallback_bundle)
.sm(Some(sm.clone())), .sm(Some(Lrc::clone(&sm))),
); );
let dcx = DiagCtxt::new(emitter); let dcx = DiagCtxt::new(emitter);
ParseSess::with_dcx(dcx, sm) ParseSess::with_dcx(dcx, sm)
@ -278,7 +278,7 @@ impl ParseSess {
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty())); let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
let emitter = Box::new(HumanEmitter::new( let emitter = Box::new(HumanEmitter::new(
stderr_destination(ColorConfig::Auto), stderr_destination(ColorConfig::Auto),
fallback_bundle.clone(), Lrc::clone(&fallback_bundle),
)); ));
let fatal_dcx = DiagCtxt::new(emitter); let fatal_dcx = DiagCtxt::new(emitter);
let dcx = DiagCtxt::new(Box::new(SilentEmitter { let dcx = DiagCtxt::new(Box::new(SilentEmitter {
@ -297,7 +297,7 @@ impl ParseSess {
} }
pub fn clone_source_map(&self) -> Lrc<SourceMap> { pub fn clone_source_map(&self) -> Lrc<SourceMap> {
self.source_map.clone() Lrc::clone(&self.source_map)
} }
pub fn buffer_lint( pub fn buffer_lint(

View File

@ -1036,7 +1036,8 @@ pub fn build_session(
sopts.unstable_opts.translate_directionality_markers, sopts.unstable_opts.translate_directionality_markers,
); );
let source_map = rustc_span::source_map::get_source_map().unwrap(); let source_map = rustc_span::source_map::get_source_map().unwrap();
let emitter = default_emitter(&sopts, registry, source_map.clone(), bundle, fallback_bundle); let emitter =
default_emitter(&sopts, registry, Lrc::clone(&source_map), bundle, fallback_bundle);
let mut dcx = let mut dcx =
DiagCtxt::new(emitter).with_flags(sopts.unstable_opts.dcx_flags(can_emit_warnings)); DiagCtxt::new(emitter).with_flags(sopts.unstable_opts.dcx_flags(can_emit_warnings));
@ -1079,7 +1080,7 @@ pub fn build_session(
let target_tlib_path = if host_triple == target_triple { let target_tlib_path = if host_triple == target_triple {
// Use the same `SearchPath` if host and target triple are identical to avoid unnecessary // Use the same `SearchPath` if host and target triple are identical to avoid unnecessary
// rescanning of the target lib path and an unnecessary allocation. // rescanning of the target lib path and an unnecessary allocation.
host_tlib_path.clone() Lrc::clone(&host_tlib_path)
} else { } else {
Lrc::new(SearchPath::from_sysroot_and_triple(&sysroot, target_triple)) Lrc::new(SearchPath::from_sysroot_and_triple(&sysroot, target_triple))
}; };

View File

@ -63,7 +63,7 @@ pub struct CachingSourceMapView<'sm> {
impl<'sm> CachingSourceMapView<'sm> { impl<'sm> CachingSourceMapView<'sm> {
pub fn new(source_map: &'sm SourceMap) -> CachingSourceMapView<'sm> { pub fn new(source_map: &'sm SourceMap) -> CachingSourceMapView<'sm> {
let files = source_map.files(); let files = source_map.files();
let first_file = files[0].clone(); let first_file = Lrc::clone(&files[0]);
let entry = CacheEntry { let entry = CacheEntry {
time_stamp: 0, time_stamp: 0,
line_number: 0, line_number: 0,
@ -92,7 +92,7 @@ impl<'sm> CachingSourceMapView<'sm> {
cache_entry.touch(self.time_stamp); cache_entry.touch(self.time_stamp);
let col = RelativeBytePos(pos.to_u32() - cache_entry.line.start.to_u32()); let col = RelativeBytePos(pos.to_u32() - cache_entry.line.start.to_u32());
return Some((cache_entry.file.clone(), cache_entry.line_number, col)); return Some((Lrc::clone(&cache_entry.file), cache_entry.line_number, col));
} }
// No cache hit ... // No cache hit ...
@ -109,7 +109,7 @@ impl<'sm> CachingSourceMapView<'sm> {
cache_entry.update(new_file_and_idx, pos, self.time_stamp); cache_entry.update(new_file_and_idx, pos, self.time_stamp);
let col = RelativeBytePos(pos.to_u32() - cache_entry.line.start.to_u32()); let col = RelativeBytePos(pos.to_u32() - cache_entry.line.start.to_u32());
Some((cache_entry.file.clone(), cache_entry.line_number, col)) Some((Lrc::clone(&cache_entry.file), cache_entry.line_number, col))
} }
pub fn span_data_to_lines_and_cols( pub fn span_data_to_lines_and_cols(
@ -133,7 +133,7 @@ impl<'sm> CachingSourceMapView<'sm> {
} }
( (
lo.file.clone(), Lrc::clone(&lo.file),
lo.line_number, lo.line_number,
span_data.lo - lo.line.start, span_data.lo - lo.line.start,
hi.line_number, hi.line_number,
@ -181,7 +181,7 @@ impl<'sm> CachingSourceMapView<'sm> {
lo.update(new_file_and_idx, span_data.lo, self.time_stamp); lo.update(new_file_and_idx, span_data.lo, self.time_stamp);
if !lo.line.contains(&span_data.hi) { if !lo.line.contains(&span_data.hi) {
let new_file_and_idx = Some((lo.file.clone(), lo.file_index)); let new_file_and_idx = Some((Lrc::clone(&lo.file), lo.file_index));
let next_oldest = self.oldest_cache_entry_index_avoid(oldest); let next_oldest = self.oldest_cache_entry_index_avoid(oldest);
let hi = &mut self.line_cache[next_oldest]; let hi = &mut self.line_cache[next_oldest];
hi.update(new_file_and_idx, span_data.hi, self.time_stamp); hi.update(new_file_and_idx, span_data.hi, self.time_stamp);
@ -227,7 +227,7 @@ impl<'sm> CachingSourceMapView<'sm> {
assert_eq!(lo.file_index, hi.file_index); assert_eq!(lo.file_index, hi.file_index);
Some(( Some((
lo.file.clone(), Lrc::clone(&lo.file),
lo.line_number, lo.line_number,
span_data.lo - lo.line.start, span_data.lo - lo.line.start,
hi.line_number, hi.line_number,
@ -277,7 +277,7 @@ impl<'sm> CachingSourceMapView<'sm> {
let file = &self.source_map.files()[file_idx]; let file = &self.source_map.files()[file_idx];
if file_contains(file, pos) { if file_contains(file, pos) {
return Some((file.clone(), file_idx)); return Some((Lrc::clone(file), file_idx));
} }
} }

View File

@ -286,8 +286,8 @@ impl SourceMap {
}); });
let file = Lrc::new(file); let file = Lrc::new(file);
files.source_files.push(file.clone()); files.source_files.push(Lrc::clone(&file));
files.stable_id_to_source_file.insert(file_id, file.clone()); files.stable_id_to_source_file.insert(file_id, Lrc::clone(&file));
Ok(file) Ok(file)
} }
@ -386,7 +386,7 @@ impl SourceMap {
/// Return the SourceFile that contains the given `BytePos` /// Return the SourceFile that contains the given `BytePos`
pub fn lookup_source_file(&self, pos: BytePos) -> Lrc<SourceFile> { pub fn lookup_source_file(&self, pos: BytePos) -> Lrc<SourceFile> {
let idx = self.lookup_source_file_idx(pos); let idx = self.lookup_source_file_idx(pos);
(*self.files.borrow().source_files)[idx].clone() Lrc::clone(&(*self.files.borrow().source_files)[idx])
} }
/// Looks up source information about a `BytePos`. /// Looks up source information about a `BytePos`.
@ -468,7 +468,7 @@ impl SourceMap {
if lo != hi { if lo != hi {
return true; return true;
} }
let f = (*self.files.borrow().source_files)[lo].clone(); let f = Lrc::clone(&(*self.files.borrow().source_files)[lo]);
let lo = f.relative_position(sp.lo()); let lo = f.relative_position(sp.lo());
let hi = f.relative_position(sp.hi()); let hi = f.relative_position(sp.hi());
f.lookup_line(lo) != f.lookup_line(hi) f.lookup_line(lo) != f.lookup_line(hi)
@ -994,7 +994,7 @@ impl SourceMap {
let filename = self.path_mapping().map_filename_prefix(filename).0; let filename = self.path_mapping().map_filename_prefix(filename).0;
for sf in self.files.borrow().source_files.iter() { for sf in self.files.borrow().source_files.iter() {
if filename == sf.name { if filename == sf.name {
return Some(sf.clone()); return Some(Lrc::clone(&sf));
} }
} }
None None
@ -1003,7 +1003,7 @@ impl SourceMap {
/// For a global `BytePos`, computes the local offset within the containing `SourceFile`. /// For a global `BytePos`, computes the local offset within the containing `SourceFile`.
pub fn lookup_byte_offset(&self, bpos: BytePos) -> SourceFileAndBytePos { pub fn lookup_byte_offset(&self, bpos: BytePos) -> SourceFileAndBytePos {
let idx = self.lookup_source_file_idx(bpos); let idx = self.lookup_source_file_idx(bpos);
let sf = (*self.files.borrow().source_files)[idx].clone(); let sf = Lrc::clone(&(*self.files.borrow().source_files)[idx]);
let offset = bpos - sf.start_pos; let offset = bpos - sf.start_pos;
SourceFileAndBytePos { sf, pos: offset } SourceFileAndBytePos { sf, pos: offset }
} }

View File

@ -51,7 +51,8 @@ ENV SCRIPT \
/scripts/check-default-config-profiles.sh && \ /scripts/check-default-config-profiles.sh && \
python3 ../x.py check --target=i686-pc-windows-gnu --host=i686-pc-windows-gnu && \ python3 ../x.py check --target=i686-pc-windows-gnu --host=i686-pc-windows-gnu && \
python3 ../x.py clippy bootstrap -Dwarnings && \ python3 ../x.py clippy bootstrap -Dwarnings && \
python3 ../x.py clippy compiler library -Aclippy::all -Dclippy::correctness && \ python3 ../x.py clippy library -Aclippy::all -Dclippy::correctness && \
python3 ../x.py clippy compiler -Aclippy::all -Dclippy::correctness -Dclippy::clone_on_ref_ptr && \
python3 ../x.py build --stage 0 src/tools/build-manifest && \ python3 ../x.py build --stage 0 src/tools/build-manifest && \
python3 ../x.py test --stage 0 src/tools/compiletest && \ python3 ../x.py test --stage 0 src/tools/compiletest && \
python3 ../x.py test --stage 0 core alloc std test proc_macro && \ python3 ../x.py test --stage 0 core alloc std test proc_macro && \