mirror of
https://github.com/rust-lang/rust.git
synced 2024-11-25 08:13:41 +00:00
Auto merge of #95573 - cjgillot:lower-query, r=michaelwoerister
Make lowering a query Split from https://github.com/rust-lang/rust/pull/88186. This PR refactors the relationship between lowering and the resolver outputs in order to make lowering itself a query. In a first part, lowering is changed to avoid modifying resolver outputs, by maintaining its own data structures for creating new `NodeId`s and so. Then, the `TyCtxt` is modified to allow creating new `LocalDefId`s from inside it. This is done by: - enclosing `Definitions` in a lock, so as to allow modification; - creating a query `register_def` whose purpose is to declare a `LocalDefId` to the query system. See `TyCtxt::create_def` and `TyCtxt::iter_local_def_id` for more detailed explanations of the design.
This commit is contained in:
commit
0f573a0c54
@ -3869,6 +3869,7 @@ name = "rustc_hir"
|
|||||||
version = "0.0.0"
|
version = "0.0.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"odht",
|
"odht",
|
||||||
|
"rustc_arena",
|
||||||
"rustc_ast",
|
"rustc_ast",
|
||||||
"rustc_data_structures",
|
"rustc_data_structures",
|
||||||
"rustc_error_messages",
|
"rustc_error_messages",
|
||||||
|
@ -24,9 +24,15 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||||||
) -> &'hir hir::InlineAsm<'hir> {
|
) -> &'hir hir::InlineAsm<'hir> {
|
||||||
// Rustdoc needs to support asm! from foreign architectures: don't try
|
// Rustdoc needs to support asm! from foreign architectures: don't try
|
||||||
// lowering the register constraints in this case.
|
// lowering the register constraints in this case.
|
||||||
let asm_arch = if self.sess.opts.actually_rustdoc { None } else { self.sess.asm_arch };
|
let asm_arch =
|
||||||
if asm_arch.is_none() && !self.sess.opts.actually_rustdoc {
|
if self.tcx.sess.opts.actually_rustdoc { None } else { self.tcx.sess.asm_arch };
|
||||||
struct_span_err!(self.sess, sp, E0472, "inline assembly is unsupported on this target")
|
if asm_arch.is_none() && !self.tcx.sess.opts.actually_rustdoc {
|
||||||
|
struct_span_err!(
|
||||||
|
self.tcx.sess,
|
||||||
|
sp,
|
||||||
|
E0472,
|
||||||
|
"inline assembly is unsupported on this target"
|
||||||
|
)
|
||||||
.emit();
|
.emit();
|
||||||
}
|
}
|
||||||
if let Some(asm_arch) = asm_arch {
|
if let Some(asm_arch) = asm_arch {
|
||||||
@ -40,9 +46,9 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||||||
| asm::InlineAsmArch::RiscV32
|
| asm::InlineAsmArch::RiscV32
|
||||||
| asm::InlineAsmArch::RiscV64
|
| asm::InlineAsmArch::RiscV64
|
||||||
);
|
);
|
||||||
if !is_stable && !self.sess.features_untracked().asm_experimental_arch {
|
if !is_stable && !self.tcx.features().asm_experimental_arch {
|
||||||
feature_err(
|
feature_err(
|
||||||
&self.sess.parse_sess,
|
&self.tcx.sess.parse_sess,
|
||||||
sym::asm_experimental_arch,
|
sym::asm_experimental_arch,
|
||||||
sp,
|
sp,
|
||||||
"inline assembly is not stable yet on this architecture",
|
"inline assembly is not stable yet on this architecture",
|
||||||
@ -52,17 +58,16 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||||||
}
|
}
|
||||||
if asm.options.contains(InlineAsmOptions::ATT_SYNTAX)
|
if asm.options.contains(InlineAsmOptions::ATT_SYNTAX)
|
||||||
&& !matches!(asm_arch, Some(asm::InlineAsmArch::X86 | asm::InlineAsmArch::X86_64))
|
&& !matches!(asm_arch, Some(asm::InlineAsmArch::X86 | asm::InlineAsmArch::X86_64))
|
||||||
&& !self.sess.opts.actually_rustdoc
|
&& !self.tcx.sess.opts.actually_rustdoc
|
||||||
{
|
{
|
||||||
self.sess
|
self.tcx
|
||||||
|
.sess
|
||||||
.struct_span_err(sp, "the `att_syntax` option is only supported on x86")
|
.struct_span_err(sp, "the `att_syntax` option is only supported on x86")
|
||||||
.emit();
|
.emit();
|
||||||
}
|
}
|
||||||
if asm.options.contains(InlineAsmOptions::MAY_UNWIND)
|
if asm.options.contains(InlineAsmOptions::MAY_UNWIND) && !self.tcx.features().asm_unwind {
|
||||||
&& !self.sess.features_untracked().asm_unwind
|
|
||||||
{
|
|
||||||
feature_err(
|
feature_err(
|
||||||
&self.sess.parse_sess,
|
&self.tcx.sess.parse_sess,
|
||||||
sym::asm_unwind,
|
sym::asm_unwind,
|
||||||
sp,
|
sp,
|
||||||
"the `may_unwind` option is unstable",
|
"the `may_unwind` option is unstable",
|
||||||
@ -73,12 +78,12 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||||||
let mut clobber_abis = FxHashMap::default();
|
let mut clobber_abis = FxHashMap::default();
|
||||||
if let Some(asm_arch) = asm_arch {
|
if let Some(asm_arch) = asm_arch {
|
||||||
for (abi_name, abi_span) in &asm.clobber_abis {
|
for (abi_name, abi_span) in &asm.clobber_abis {
|
||||||
match asm::InlineAsmClobberAbi::parse(asm_arch, &self.sess.target, *abi_name) {
|
match asm::InlineAsmClobberAbi::parse(asm_arch, &self.tcx.sess.target, *abi_name) {
|
||||||
Ok(abi) => {
|
Ok(abi) => {
|
||||||
// If the abi was already in the list, emit an error
|
// If the abi was already in the list, emit an error
|
||||||
match clobber_abis.get(&abi) {
|
match clobber_abis.get(&abi) {
|
||||||
Some((prev_name, prev_sp)) => {
|
Some((prev_name, prev_sp)) => {
|
||||||
let mut err = self.sess.struct_span_err(
|
let mut err = self.tcx.sess.struct_span_err(
|
||||||
*abi_span,
|
*abi_span,
|
||||||
&format!("`{}` ABI specified multiple times", prev_name),
|
&format!("`{}` ABI specified multiple times", prev_name),
|
||||||
);
|
);
|
||||||
@ -86,7 +91,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||||||
|
|
||||||
// Multiple different abi names may actually be the same ABI
|
// Multiple different abi names may actually be the same ABI
|
||||||
// If the specified ABIs are not the same name, alert the user that they resolve to the same ABI
|
// If the specified ABIs are not the same name, alert the user that they resolve to the same ABI
|
||||||
let source_map = self.sess.source_map();
|
let source_map = self.tcx.sess.source_map();
|
||||||
if source_map.span_to_snippet(*prev_sp)
|
if source_map.span_to_snippet(*prev_sp)
|
||||||
!= source_map.span_to_snippet(*abi_span)
|
!= source_map.span_to_snippet(*abi_span)
|
||||||
{
|
{
|
||||||
@ -101,7 +106,8 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
Err(&[]) => {
|
Err(&[]) => {
|
||||||
self.sess
|
self.tcx
|
||||||
|
.sess
|
||||||
.struct_span_err(
|
.struct_span_err(
|
||||||
*abi_span,
|
*abi_span,
|
||||||
"`clobber_abi` is not supported on this target",
|
"`clobber_abi` is not supported on this target",
|
||||||
@ -109,8 +115,10 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||||||
.emit();
|
.emit();
|
||||||
}
|
}
|
||||||
Err(supported_abis) => {
|
Err(supported_abis) => {
|
||||||
let mut err =
|
let mut err = self
|
||||||
self.sess.struct_span_err(*abi_span, "invalid ABI for `clobber_abi`");
|
.tcx
|
||||||
|
.sess
|
||||||
|
.struct_span_err(*abi_span, "invalid ABI for `clobber_abi`");
|
||||||
let mut abis = format!("`{}`", supported_abis[0]);
|
let mut abis = format!("`{}`", supported_abis[0]);
|
||||||
for m in &supported_abis[1..] {
|
for m in &supported_abis[1..] {
|
||||||
let _ = write!(abis, ", `{}`", m);
|
let _ = write!(abis, ", `{}`", m);
|
||||||
@ -128,7 +136,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||||||
// Lower operands to HIR. We use dummy register classes if an error
|
// Lower operands to HIR. We use dummy register classes if an error
|
||||||
// occurs during lowering because we still need to be able to produce a
|
// occurs during lowering because we still need to be able to produce a
|
||||||
// valid HIR.
|
// valid HIR.
|
||||||
let sess = self.sess;
|
let sess = self.tcx.sess;
|
||||||
let mut operands: Vec<_> = asm
|
let mut operands: Vec<_> = asm
|
||||||
.operands
|
.operands
|
||||||
.iter()
|
.iter()
|
||||||
@ -184,9 +192,9 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
InlineAsmOperand::Const { ref anon_const } => {
|
InlineAsmOperand::Const { ref anon_const } => {
|
||||||
if !self.sess.features_untracked().asm_const {
|
if !self.tcx.features().asm_const {
|
||||||
feature_err(
|
feature_err(
|
||||||
&self.sess.parse_sess,
|
&sess.parse_sess,
|
||||||
sym::asm_const,
|
sym::asm_const,
|
||||||
*op_sp,
|
*op_sp,
|
||||||
"const operands for inline assembly are unstable",
|
"const operands for inline assembly are unstable",
|
||||||
@ -198,9 +206,9 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
InlineAsmOperand::Sym { ref sym } => {
|
InlineAsmOperand::Sym { ref sym } => {
|
||||||
if !self.sess.features_untracked().asm_sym {
|
if !self.tcx.features().asm_sym {
|
||||||
feature_err(
|
feature_err(
|
||||||
&self.sess.parse_sess,
|
&sess.parse_sess,
|
||||||
sym::asm_sym,
|
sym::asm_sym,
|
||||||
*op_sp,
|
*op_sp,
|
||||||
"sym operands for inline assembly are unstable",
|
"sym operands for inline assembly are unstable",
|
||||||
|
@ -159,9 +159,9 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||||||
span,
|
span,
|
||||||
kind: hir::ExprKind::If(let_expr, then_expr, Some(else_expr)),
|
kind: hir::ExprKind::If(let_expr, then_expr, Some(else_expr)),
|
||||||
});
|
});
|
||||||
if !self.sess.features_untracked().let_else {
|
if !self.tcx.features().let_else {
|
||||||
feature_err(
|
feature_err(
|
||||||
&self.sess.parse_sess,
|
&self.tcx.sess.parse_sess,
|
||||||
sym::let_else,
|
sym::let_else,
|
||||||
local.span,
|
local.span,
|
||||||
"`let...else` statements are unstable",
|
"`let...else` statements are unstable",
|
||||||
|
@ -46,7 +46,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||||||
let hir_id = self.lower_node_id(e.id);
|
let hir_id = self.lower_node_id(e.id);
|
||||||
return hir::Expr { hir_id, kind, span: self.lower_span(e.span) };
|
return hir::Expr { hir_id, kind, span: self.lower_span(e.span) };
|
||||||
} else {
|
} else {
|
||||||
self.sess
|
self.tcx.sess
|
||||||
.struct_span_err(
|
.struct_span_err(
|
||||||
e.span,
|
e.span,
|
||||||
"#[rustc_box] requires precisely one argument \
|
"#[rustc_box] requires precisely one argument \
|
||||||
@ -207,8 +207,8 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||||||
self.lower_expr_range(e.span, e1.as_deref(), e2.as_deref(), lims)
|
self.lower_expr_range(e.span, e1.as_deref(), e2.as_deref(), lims)
|
||||||
}
|
}
|
||||||
ExprKind::Underscore => {
|
ExprKind::Underscore => {
|
||||||
self.sess
|
self.tcx
|
||||||
.struct_span_err(
|
.sess.struct_span_err(
|
||||||
e.span,
|
e.span,
|
||||||
"in expressions, `_` can only be used on the left-hand side of an assignment",
|
"in expressions, `_` can only be used on the left-hand side of an assignment",
|
||||||
)
|
)
|
||||||
@ -245,7 +245,8 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||||||
let rest = match &se.rest {
|
let rest = match &se.rest {
|
||||||
StructRest::Base(e) => Some(self.lower_expr(e)),
|
StructRest::Base(e) => Some(self.lower_expr(e)),
|
||||||
StructRest::Rest(sp) => {
|
StructRest::Rest(sp) => {
|
||||||
self.sess
|
self.tcx
|
||||||
|
.sess
|
||||||
.struct_span_err(*sp, "base expression required after `..`")
|
.struct_span_err(*sp, "base expression required after `..`")
|
||||||
.span_label(*sp, "add a base expression here")
|
.span_label(*sp, "add a base expression here")
|
||||||
.emit();
|
.emit();
|
||||||
@ -474,7 +475,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||||||
} else {
|
} else {
|
||||||
let try_span = this.mark_span_with_reason(
|
let try_span = this.mark_span_with_reason(
|
||||||
DesugaringKind::TryBlock,
|
DesugaringKind::TryBlock,
|
||||||
this.sess.source_map().end_point(body.span),
|
this.tcx.sess.source_map().end_point(body.span),
|
||||||
this.allow_try_trait.clone(),
|
this.allow_try_trait.clone(),
|
||||||
);
|
);
|
||||||
|
|
||||||
@ -653,7 +654,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||||||
Some(hir::GeneratorKind::Async(_)) => {}
|
Some(hir::GeneratorKind::Async(_)) => {}
|
||||||
Some(hir::GeneratorKind::Gen) | None => {
|
Some(hir::GeneratorKind::Gen) | None => {
|
||||||
let mut err = struct_span_err!(
|
let mut err = struct_span_err!(
|
||||||
self.sess,
|
self.tcx.sess,
|
||||||
dot_await_span,
|
dot_await_span,
|
||||||
E0728,
|
E0728,
|
||||||
"`await` is only allowed inside `async` functions and blocks"
|
"`await` is only allowed inside `async` functions and blocks"
|
||||||
@ -878,7 +879,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||||||
Some(hir::GeneratorKind::Gen) => {
|
Some(hir::GeneratorKind::Gen) => {
|
||||||
if decl.inputs.len() > 1 {
|
if decl.inputs.len() > 1 {
|
||||||
struct_span_err!(
|
struct_span_err!(
|
||||||
self.sess,
|
self.tcx.sess,
|
||||||
fn_decl_span,
|
fn_decl_span,
|
||||||
E0628,
|
E0628,
|
||||||
"too many parameters for a generator (expected 0 or 1 parameters)"
|
"too many parameters for a generator (expected 0 or 1 parameters)"
|
||||||
@ -892,7 +893,12 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||||||
}
|
}
|
||||||
None => {
|
None => {
|
||||||
if movability == Movability::Static {
|
if movability == Movability::Static {
|
||||||
struct_span_err!(self.sess, fn_decl_span, E0697, "closures cannot be static")
|
struct_span_err!(
|
||||||
|
self.tcx.sess,
|
||||||
|
fn_decl_span,
|
||||||
|
E0697,
|
||||||
|
"closures cannot be static"
|
||||||
|
)
|
||||||
.emit();
|
.emit();
|
||||||
}
|
}
|
||||||
None
|
None
|
||||||
@ -916,7 +922,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||||||
// FIXME(cramertj): allow `async` non-`move` closures with arguments.
|
// FIXME(cramertj): allow `async` non-`move` closures with arguments.
|
||||||
if capture_clause == CaptureBy::Ref && !decl.inputs.is_empty() {
|
if capture_clause == CaptureBy::Ref && !decl.inputs.is_empty() {
|
||||||
struct_span_err!(
|
struct_span_err!(
|
||||||
this.sess,
|
this.tcx.sess,
|
||||||
fn_decl_span,
|
fn_decl_span,
|
||||||
E0708,
|
E0708,
|
||||||
"`async` non-`move` closures with parameters are not currently supported",
|
"`async` non-`move` closures with parameters are not currently supported",
|
||||||
@ -1163,7 +1169,8 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||||||
);
|
);
|
||||||
let fields_omitted = match &se.rest {
|
let fields_omitted = match &se.rest {
|
||||||
StructRest::Base(e) => {
|
StructRest::Base(e) => {
|
||||||
self.sess
|
self.tcx
|
||||||
|
.sess
|
||||||
.struct_span_err(
|
.struct_span_err(
|
||||||
e.span,
|
e.span,
|
||||||
"functional record updates are not allowed in destructuring \
|
"functional record updates are not allowed in destructuring \
|
||||||
@ -1371,7 +1378,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||||||
Some(hir::GeneratorKind::Gen) => {}
|
Some(hir::GeneratorKind::Gen) => {}
|
||||||
Some(hir::GeneratorKind::Async(_)) => {
|
Some(hir::GeneratorKind::Async(_)) => {
|
||||||
struct_span_err!(
|
struct_span_err!(
|
||||||
self.sess,
|
self.tcx.sess,
|
||||||
span,
|
span,
|
||||||
E0727,
|
E0727,
|
||||||
"`async` generators are not yet supported"
|
"`async` generators are not yet supported"
|
||||||
@ -1516,7 +1523,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||||||
span,
|
span,
|
||||||
self.allow_try_trait.clone(),
|
self.allow_try_trait.clone(),
|
||||||
);
|
);
|
||||||
let try_span = self.sess.source_map().end_point(span);
|
let try_span = self.tcx.sess.source_map().end_point(span);
|
||||||
let try_span = self.mark_span_with_reason(
|
let try_span = self.mark_span_with_reason(
|
||||||
DesugaringKind::QuestionMark,
|
DesugaringKind::QuestionMark,
|
||||||
try_span,
|
try_span,
|
||||||
|
@ -1,7 +1,6 @@
|
|||||||
use super::ResolverAstLoweringExt;
|
use super::ResolverAstLoweringExt;
|
||||||
use super::{AstOwner, ImplTraitContext, ImplTraitPosition};
|
use super::{AstOwner, ImplTraitContext, ImplTraitPosition};
|
||||||
use super::{LoweringContext, ParamMode};
|
use super::{FnDeclKind, LoweringContext, ParamMode};
|
||||||
use crate::{Arena, FnDeclKind};
|
|
||||||
|
|
||||||
use rustc_ast::ptr::P;
|
use rustc_ast::ptr::P;
|
||||||
use rustc_ast::visit::AssocCtxt;
|
use rustc_ast::visit::AssocCtxt;
|
||||||
@ -12,12 +11,9 @@ use rustc_errors::struct_span_err;
|
|||||||
use rustc_hir as hir;
|
use rustc_hir as hir;
|
||||||
use rustc_hir::def::{DefKind, Res};
|
use rustc_hir::def::{DefKind, Res};
|
||||||
use rustc_hir::def_id::{LocalDefId, CRATE_DEF_ID};
|
use rustc_hir::def_id::{LocalDefId, CRATE_DEF_ID};
|
||||||
use rustc_hir::definitions::Definitions;
|
|
||||||
use rustc_hir::PredicateOrigin;
|
use rustc_hir::PredicateOrigin;
|
||||||
use rustc_index::vec::{Idx, IndexVec};
|
use rustc_index::vec::{Idx, IndexVec};
|
||||||
use rustc_middle::ty::{ResolverAstLowering, ResolverOutputs};
|
use rustc_middle::ty::{DefIdTree, ResolverAstLowering, TyCtxt};
|
||||||
use rustc_session::cstore::CrateStoreDyn;
|
|
||||||
use rustc_session::Session;
|
|
||||||
use rustc_span::source_map::DesugaringKind;
|
use rustc_span::source_map::DesugaringKind;
|
||||||
use rustc_span::symbol::{kw, sym, Ident};
|
use rustc_span::symbol::{kw, sym, Ident};
|
||||||
use rustc_span::Span;
|
use rustc_span::Span;
|
||||||
@ -27,12 +23,8 @@ use smallvec::{smallvec, SmallVec};
|
|||||||
use std::iter;
|
use std::iter;
|
||||||
|
|
||||||
pub(super) struct ItemLowerer<'a, 'hir> {
|
pub(super) struct ItemLowerer<'a, 'hir> {
|
||||||
pub(super) sess: &'a Session,
|
pub(super) tcx: TyCtxt<'hir>,
|
||||||
pub(super) definitions: &'a mut Definitions,
|
|
||||||
pub(super) cstore: &'a CrateStoreDyn,
|
|
||||||
pub(super) resolutions: &'a ResolverOutputs,
|
|
||||||
pub(super) resolver: &'a mut ResolverAstLowering,
|
pub(super) resolver: &'a mut ResolverAstLowering,
|
||||||
pub(super) arena: &'hir Arena<'hir>,
|
|
||||||
pub(super) ast_index: &'a IndexVec<LocalDefId, AstOwner<'a>>,
|
pub(super) ast_index: &'a IndexVec<LocalDefId, AstOwner<'a>>,
|
||||||
pub(super) owners: &'a mut IndexVec<LocalDefId, hir::MaybeOwner<&'hir hir::OwnerInfo<'hir>>>,
|
pub(super) owners: &'a mut IndexVec<LocalDefId, hir::MaybeOwner<&'hir hir::OwnerInfo<'hir>>>,
|
||||||
}
|
}
|
||||||
@ -65,12 +57,9 @@ impl<'a, 'hir> ItemLowerer<'a, 'hir> {
|
|||||||
) {
|
) {
|
||||||
let mut lctx = LoweringContext {
|
let mut lctx = LoweringContext {
|
||||||
// Pseudo-globals.
|
// Pseudo-globals.
|
||||||
sess: &self.sess,
|
tcx: self.tcx,
|
||||||
definitions: self.definitions,
|
|
||||||
cstore: self.cstore,
|
|
||||||
resolutions: self.resolutions,
|
|
||||||
resolver: self.resolver,
|
resolver: self.resolver,
|
||||||
arena: self.arena,
|
arena: self.tcx.hir_arena,
|
||||||
|
|
||||||
// HirId handling.
|
// HirId handling.
|
||||||
bodies: Vec::new(),
|
bodies: Vec::new(),
|
||||||
@ -144,12 +133,7 @@ impl<'a, 'hir> ItemLowerer<'a, 'hir> {
|
|||||||
fn lower_assoc_item(&mut self, item: &AssocItem, ctxt: AssocCtxt) {
|
fn lower_assoc_item(&mut self, item: &AssocItem, ctxt: AssocCtxt) {
|
||||||
let def_id = self.resolver.node_id_to_def_id[&item.id];
|
let def_id = self.resolver.node_id_to_def_id[&item.id];
|
||||||
|
|
||||||
let parent_id = {
|
let parent_id = self.tcx.local_parent(def_id);
|
||||||
let parent = self.definitions.def_key(def_id).parent;
|
|
||||||
let local_def_index = parent.unwrap();
|
|
||||||
LocalDefId { local_def_index }
|
|
||||||
};
|
|
||||||
|
|
||||||
let parent_hir = self.lower_node(parent_id).unwrap();
|
let parent_hir = self.lower_node(parent_id).unwrap();
|
||||||
self.with_lctx(item.id, |lctx| {
|
self.with_lctx(item.id, |lctx| {
|
||||||
// Evaluate with the lifetimes in `params` in-scope.
|
// Evaluate with the lifetimes in `params` in-scope.
|
||||||
@ -1278,7 +1262,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn error_on_invalid_abi(&self, abi: StrLit) {
|
fn error_on_invalid_abi(&self, abi: StrLit) {
|
||||||
struct_span_err!(self.sess, abi.span, E0703, "invalid ABI: found `{}`", abi.symbol)
|
struct_span_err!(self.tcx.sess, abi.span, E0703, "invalid ABI: found `{}`", abi.symbol)
|
||||||
.span_label(abi.span, "invalid ABI")
|
.span_label(abi.span, "invalid ABI")
|
||||||
.help(&format!("valid ABIs: {}", abi::all_names().join(", ")))
|
.help(&format!("valid ABIs: {}", abi::all_names().join(", ")))
|
||||||
.emit();
|
.emit();
|
||||||
|
@ -49,18 +49,15 @@ use rustc_data_structures::fx::{FxHashMap, FxHashSet};
|
|||||||
use rustc_data_structures::sorted_map::SortedMap;
|
use rustc_data_structures::sorted_map::SortedMap;
|
||||||
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
|
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
|
||||||
use rustc_data_structures::sync::Lrc;
|
use rustc_data_structures::sync::Lrc;
|
||||||
use rustc_errors::{struct_span_err, Applicability};
|
use rustc_errors::{struct_span_err, Applicability, Handler};
|
||||||
use rustc_hir as hir;
|
use rustc_hir as hir;
|
||||||
use rustc_hir::def::{DefKind, LifetimeRes, Namespace, PartialRes, PerNS, Res};
|
use rustc_hir::def::{DefKind, LifetimeRes, Namespace, PartialRes, PerNS, Res};
|
||||||
use rustc_hir::def_id::{LocalDefId, CRATE_DEF_ID};
|
use rustc_hir::def_id::{LocalDefId, CRATE_DEF_ID};
|
||||||
use rustc_hir::definitions::{DefPathData, Definitions};
|
use rustc_hir::definitions::DefPathData;
|
||||||
use rustc_hir::{ConstArg, GenericArg, ItemLocalId, ParamName, TraitCandidate};
|
use rustc_hir::{ConstArg, GenericArg, ItemLocalId, ParamName, TraitCandidate};
|
||||||
use rustc_index::vec::{Idx, IndexVec};
|
use rustc_index::vec::{Idx, IndexVec};
|
||||||
use rustc_middle::ty::{ResolverAstLowering, ResolverOutputs};
|
use rustc_middle::ty::{ResolverAstLowering, TyCtxt};
|
||||||
use rustc_query_system::ich::StableHashingContext;
|
|
||||||
use rustc_session::cstore::CrateStoreDyn;
|
|
||||||
use rustc_session::parse::feature_err;
|
use rustc_session::parse::feature_err;
|
||||||
use rustc_session::Session;
|
|
||||||
use rustc_span::hygiene::MacroKind;
|
use rustc_span::hygiene::MacroKind;
|
||||||
use rustc_span::source_map::DesugaringKind;
|
use rustc_span::source_map::DesugaringKind;
|
||||||
use rustc_span::symbol::{kw, sym, Ident, Symbol};
|
use rustc_span::symbol::{kw, sym, Ident, Symbol};
|
||||||
@ -83,19 +80,12 @@ mod item;
|
|||||||
mod pat;
|
mod pat;
|
||||||
mod path;
|
mod path;
|
||||||
|
|
||||||
rustc_hir::arena_types!(rustc_arena::declare_arena);
|
struct LoweringContext<'a, 'hir> {
|
||||||
|
tcx: TyCtxt<'hir>,
|
||||||
struct LoweringContext<'a, 'hir: 'a> {
|
|
||||||
/// Used to assign IDs to HIR nodes that do not directly correspond to AST nodes.
|
|
||||||
sess: &'a Session,
|
|
||||||
|
|
||||||
definitions: &'a mut Definitions,
|
|
||||||
cstore: &'a CrateStoreDyn,
|
|
||||||
resolutions: &'a ResolverOutputs,
|
|
||||||
resolver: &'a mut ResolverAstLowering,
|
resolver: &'a mut ResolverAstLowering,
|
||||||
|
|
||||||
/// Used to allocate HIR nodes.
|
/// Used to allocate HIR nodes.
|
||||||
arena: &'hir Arena<'hir>,
|
arena: &'hir hir::Arena<'hir>,
|
||||||
|
|
||||||
/// Bodies inside the owner being lowered.
|
/// Bodies inside the owner being lowered.
|
||||||
bodies: Vec<(hir::ItemLocalId, &'hir hir::Body<'hir>)>,
|
bodies: Vec<(hir::ItemLocalId, &'hir hir::Body<'hir>)>,
|
||||||
@ -391,61 +381,58 @@ fn index_crate<'a>(
|
|||||||
/// Compute the hash for the HIR of the full crate.
|
/// Compute the hash for the HIR of the full crate.
|
||||||
/// This hash will then be part of the crate_hash which is stored in the metadata.
|
/// This hash will then be part of the crate_hash which is stored in the metadata.
|
||||||
fn compute_hir_hash(
|
fn compute_hir_hash(
|
||||||
sess: &Session,
|
tcx: TyCtxt<'_>,
|
||||||
definitions: &Definitions,
|
|
||||||
cstore: &CrateStoreDyn,
|
|
||||||
resolver: &ResolverOutputs,
|
|
||||||
owners: &IndexVec<LocalDefId, hir::MaybeOwner<&hir::OwnerInfo<'_>>>,
|
owners: &IndexVec<LocalDefId, hir::MaybeOwner<&hir::OwnerInfo<'_>>>,
|
||||||
) -> Fingerprint {
|
) -> Fingerprint {
|
||||||
let mut hir_body_nodes: Vec<_> = owners
|
let mut hir_body_nodes: Vec<_> = owners
|
||||||
.iter_enumerated()
|
.iter_enumerated()
|
||||||
.filter_map(|(def_id, info)| {
|
.filter_map(|(def_id, info)| {
|
||||||
let info = info.as_owner()?;
|
let info = info.as_owner()?;
|
||||||
let def_path_hash = definitions.def_path_hash(def_id);
|
let def_path_hash = tcx.hir().def_path_hash(def_id);
|
||||||
Some((def_path_hash, info))
|
Some((def_path_hash, info))
|
||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
hir_body_nodes.sort_unstable_by_key(|bn| bn.0);
|
hir_body_nodes.sort_unstable_by_key(|bn| bn.0);
|
||||||
|
|
||||||
|
tcx.with_stable_hashing_context(|mut hcx| {
|
||||||
let mut stable_hasher = StableHasher::new();
|
let mut stable_hasher = StableHasher::new();
|
||||||
let mut hcx = StableHashingContext::new(sess, definitions, cstore, &resolver.source_span);
|
|
||||||
hir_body_nodes.hash_stable(&mut hcx, &mut stable_hasher);
|
hir_body_nodes.hash_stable(&mut hcx, &mut stable_hasher);
|
||||||
stable_hasher.finish()
|
stable_hasher.finish()
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn lower_crate<'hir>(
|
pub fn lower_to_hir<'hir>(tcx: TyCtxt<'hir>, (): ()) -> hir::Crate<'hir> {
|
||||||
sess: &Session,
|
let sess = tcx.sess;
|
||||||
krate: &Crate,
|
let krate = tcx.untracked_crate.steal();
|
||||||
definitions: &mut Definitions,
|
let mut resolver = tcx.resolver_for_lowering(()).steal();
|
||||||
cstore: &CrateStoreDyn,
|
|
||||||
resolutions: &ResolverOutputs,
|
|
||||||
mut resolver: ResolverAstLowering,
|
|
||||||
arena: &'hir Arena<'hir>,
|
|
||||||
) -> &'hir hir::Crate<'hir> {
|
|
||||||
let _prof_timer = sess.prof.verbose_generic_activity("hir_lowering");
|
|
||||||
|
|
||||||
let ast_index = index_crate(&resolver.node_id_to_def_id, krate);
|
let ast_index = index_crate(&resolver.node_id_to_def_id, &krate);
|
||||||
|
let mut owners = IndexVec::from_fn_n(
|
||||||
let mut owners =
|
|_| hir::MaybeOwner::Phantom,
|
||||||
IndexVec::from_fn_n(|_| hir::MaybeOwner::Phantom, definitions.def_index_count());
|
tcx.definitions_untracked().def_index_count(),
|
||||||
|
);
|
||||||
|
|
||||||
for def_id in ast_index.indices() {
|
for def_id in ast_index.indices() {
|
||||||
item::ItemLowerer {
|
item::ItemLowerer {
|
||||||
sess,
|
tcx,
|
||||||
definitions,
|
|
||||||
cstore,
|
|
||||||
resolutions,
|
|
||||||
resolver: &mut resolver,
|
resolver: &mut resolver,
|
||||||
arena,
|
|
||||||
ast_index: &ast_index,
|
ast_index: &ast_index,
|
||||||
owners: &mut owners,
|
owners: &mut owners,
|
||||||
}
|
}
|
||||||
.lower_node(def_id);
|
.lower_node(def_id);
|
||||||
}
|
}
|
||||||
|
|
||||||
let hir_hash = compute_hir_hash(sess, definitions, cstore, resolutions, &owners);
|
// Drop AST to free memory
|
||||||
let krate = hir::Crate { owners, hir_hash };
|
std::mem::drop(ast_index);
|
||||||
arena.alloc(krate)
|
sess.time("drop_ast", || std::mem::drop(krate));
|
||||||
|
|
||||||
|
// Discard hygiene data, which isn't required after lowering to HIR.
|
||||||
|
if !sess.opts.debugging_opts.keep_hygiene_data {
|
||||||
|
rustc_span::hygiene::clear_syntax_context_map();
|
||||||
|
}
|
||||||
|
|
||||||
|
let hir_hash = compute_hir_hash(tcx, &owners);
|
||||||
|
hir::Crate { owners, hir_hash }
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Copy, Clone, PartialEq, Debug)]
|
#[derive(Copy, Clone, PartialEq, Debug)]
|
||||||
@ -464,38 +451,25 @@ enum ParenthesizedGenericArgs {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
||||||
fn create_stable_hashing_context(&self) -> StableHashingContext<'_> {
|
|
||||||
StableHashingContext::new(
|
|
||||||
self.sess,
|
|
||||||
self.definitions,
|
|
||||||
self.cstore,
|
|
||||||
&self.resolutions.source_span,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn create_def(
|
fn create_def(
|
||||||
&mut self,
|
&mut self,
|
||||||
parent: LocalDefId,
|
parent: LocalDefId,
|
||||||
node_id: ast::NodeId,
|
node_id: ast::NodeId,
|
||||||
data: DefPathData,
|
data: DefPathData,
|
||||||
) -> LocalDefId {
|
) -> LocalDefId {
|
||||||
|
debug_assert_ne!(node_id, ast::DUMMY_NODE_ID);
|
||||||
assert!(
|
assert!(
|
||||||
self.opt_local_def_id(node_id).is_none(),
|
self.opt_local_def_id(node_id).is_none(),
|
||||||
"adding a def'n for node-id {:?} and data {:?} but a previous def'n exists: {:?}",
|
"adding a def'n for node-id {:?} and data {:?} but a previous def'n exists: {:?}",
|
||||||
node_id,
|
node_id,
|
||||||
data,
|
data,
|
||||||
self.definitions.def_key(self.local_def_id(node_id)),
|
self.tcx.hir().def_key(self.local_def_id(node_id)),
|
||||||
);
|
);
|
||||||
|
|
||||||
let def_id = self.definitions.create_def(parent, data);
|
let def_id = self.tcx.create_def(parent, data);
|
||||||
|
|
||||||
// Some things for which we allocate `LocalDefId`s don't correspond to
|
|
||||||
// anything in the AST, so they don't have a `NodeId`. For these cases
|
|
||||||
// we don't need a mapping from `NodeId` to `LocalDefId`.
|
|
||||||
if node_id != ast::DUMMY_NODE_ID {
|
|
||||||
debug!("create_def: def_id_to_node_id[{:?}] <-> {:?}", def_id, node_id);
|
debug!("create_def: def_id_to_node_id[{:?}] <-> {:?}", def_id, node_id);
|
||||||
self.resolver.node_id_to_def_id.insert(node_id, def_id);
|
self.resolver.node_id_to_def_id.insert(node_id, def_id);
|
||||||
}
|
|
||||||
|
|
||||||
def_id
|
def_id
|
||||||
}
|
}
|
||||||
@ -515,6 +489,11 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||||||
self.opt_local_def_id(node).unwrap_or_else(|| panic!("no entry for node id: `{:?}`", node))
|
self.opt_local_def_id(node).unwrap_or_else(|| panic!("no entry for node id: `{:?}`", node))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Freshen the `LoweringContext` and ready it to lower a nested item.
|
||||||
|
/// The lowered item is registered into `self.children`.
|
||||||
|
///
|
||||||
|
/// This function sets up `HirId` lowering infrastructure,
|
||||||
|
/// and stashes the shared mutable state to avoid pollution by the closure.
|
||||||
#[instrument(level = "debug", skip(self, f))]
|
#[instrument(level = "debug", skip(self, f))]
|
||||||
fn with_hir_id_owner(
|
fn with_hir_id_owner(
|
||||||
&mut self,
|
&mut self,
|
||||||
@ -533,8 +512,10 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||||||
std::mem::replace(&mut self.item_local_id_counter, hir::ItemLocalId::new(1));
|
std::mem::replace(&mut self.item_local_id_counter, hir::ItemLocalId::new(1));
|
||||||
let current_impl_trait_defs = std::mem::take(&mut self.impl_trait_defs);
|
let current_impl_trait_defs = std::mem::take(&mut self.impl_trait_defs);
|
||||||
let current_impl_trait_bounds = std::mem::take(&mut self.impl_trait_bounds);
|
let current_impl_trait_bounds = std::mem::take(&mut self.impl_trait_bounds);
|
||||||
// Do not reset `next_node_id` and `node_id_to_def_id` as we want to refer to the
|
|
||||||
// subdefinitions' nodes.
|
// Do not reset `next_node_id` and `node_id_to_def_id`:
|
||||||
|
// we want `f` to be able to refer to the `LocalDefId`s that the caller created.
|
||||||
|
// and the caller to refer to some of the subdefinitions' nodes' `LocalDefId`s.
|
||||||
|
|
||||||
// Always allocate the first `HirId` for the owner itself.
|
// Always allocate the first `HirId` for the owner itself.
|
||||||
let _old = self.node_id_to_local_id.insert(owner, hir::ItemLocalId::new(0));
|
let _old = self.node_id_to_local_id.insert(owner, hir::ItemLocalId::new(0));
|
||||||
@ -578,7 +559,8 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||||||
bodies.sort_by_key(|(k, _)| *k);
|
bodies.sort_by_key(|(k, _)| *k);
|
||||||
let bodies = SortedMap::from_presorted_elements(bodies);
|
let bodies = SortedMap::from_presorted_elements(bodies);
|
||||||
let (hash_including_bodies, hash_without_bodies) = self.hash_owner(node, &bodies);
|
let (hash_including_bodies, hash_without_bodies) = self.hash_owner(node, &bodies);
|
||||||
let (nodes, parenting) = index::index_hir(self.sess, self.definitions, node, &bodies);
|
let (nodes, parenting) =
|
||||||
|
index::index_hir(self.tcx.sess, &*self.tcx.definitions_untracked(), node, &bodies);
|
||||||
let nodes = hir::OwnerNodes {
|
let nodes = hir::OwnerNodes {
|
||||||
hash_including_bodies,
|
hash_including_bodies,
|
||||||
hash_without_bodies,
|
hash_without_bodies,
|
||||||
@ -587,10 +569,11 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||||||
local_id_to_def_id,
|
local_id_to_def_id,
|
||||||
};
|
};
|
||||||
let attrs = {
|
let attrs = {
|
||||||
let mut hcx = self.create_stable_hashing_context();
|
let hash = self.tcx.with_stable_hashing_context(|mut hcx| {
|
||||||
let mut stable_hasher = StableHasher::new();
|
let mut stable_hasher = StableHasher::new();
|
||||||
attrs.hash_stable(&mut hcx, &mut stable_hasher);
|
attrs.hash_stable(&mut hcx, &mut stable_hasher);
|
||||||
let hash = stable_hasher.finish();
|
stable_hasher.finish()
|
||||||
|
});
|
||||||
hir::AttributeMap { map: attrs, hash }
|
hir::AttributeMap { map: attrs, hash }
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -604,7 +587,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||||||
node: hir::OwnerNode<'hir>,
|
node: hir::OwnerNode<'hir>,
|
||||||
bodies: &SortedMap<hir::ItemLocalId, &'hir hir::Body<'hir>>,
|
bodies: &SortedMap<hir::ItemLocalId, &'hir hir::Body<'hir>>,
|
||||||
) -> (Fingerprint, Fingerprint) {
|
) -> (Fingerprint, Fingerprint) {
|
||||||
let mut hcx = self.create_stable_hashing_context();
|
self.tcx.with_stable_hashing_context(|mut hcx| {
|
||||||
let mut stable_hasher = StableHasher::new();
|
let mut stable_hasher = StableHasher::new();
|
||||||
hcx.with_hir_bodies(true, node.def_id(), bodies, |hcx| {
|
hcx.with_hir_bodies(true, node.def_id(), bodies, |hcx| {
|
||||||
node.hash_stable(hcx, &mut stable_hasher)
|
node.hash_stable(hcx, &mut stable_hasher)
|
||||||
@ -616,6 +599,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||||||
});
|
});
|
||||||
let hash_without_bodies = stable_hasher.finish();
|
let hash_without_bodies = stable_hasher.finish();
|
||||||
(hash_including_bodies, hash_without_bodies)
|
(hash_including_bodies, hash_without_bodies)
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
/// This method allocates a new `HirId` for the given `NodeId` and stores it in
|
/// This method allocates a new `HirId` for the given `NodeId` and stores it in
|
||||||
@ -656,9 +640,13 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Generate a new `HirId` without a backing `NodeId`.
|
||||||
fn next_id(&mut self) -> hir::HirId {
|
fn next_id(&mut self) -> hir::HirId {
|
||||||
let node_id = self.next_node_id();
|
let owner = self.current_hir_id_owner;
|
||||||
self.lower_node_id(node_id)
|
let local_id = self.item_local_id_counter;
|
||||||
|
assert_ne!(local_id, hir::ItemLocalId::new(0));
|
||||||
|
self.item_local_id_counter.increment_by(1);
|
||||||
|
hir::HirId { owner, local_id }
|
||||||
}
|
}
|
||||||
|
|
||||||
#[instrument(level = "trace", skip(self))]
|
#[instrument(level = "trace", skip(self))]
|
||||||
@ -691,8 +679,8 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||||||
self.resolver.get_import_res(id).present_items()
|
self.resolver.get_import_res(id).present_items()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn diagnostic(&self) -> &rustc_errors::Handler {
|
fn diagnostic(&self) -> &Handler {
|
||||||
self.sess.diagnostic()
|
self.tcx.sess.diagnostic()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Reuses the span but adds information like the kind of the desugaring and features that are
|
/// Reuses the span but adds information like the kind of the desugaring and features that are
|
||||||
@ -703,18 +691,15 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||||||
span: Span,
|
span: Span,
|
||||||
allow_internal_unstable: Option<Lrc<[Symbol]>>,
|
allow_internal_unstable: Option<Lrc<[Symbol]>>,
|
||||||
) -> Span {
|
) -> Span {
|
||||||
span.mark_with_reason(
|
self.tcx.with_stable_hashing_context(|hcx| {
|
||||||
allow_internal_unstable,
|
span.mark_with_reason(allow_internal_unstable, reason, self.tcx.sess.edition(), hcx)
|
||||||
reason,
|
})
|
||||||
self.sess.edition(),
|
|
||||||
self.create_stable_hashing_context(),
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Intercept all spans entering HIR.
|
/// Intercept all spans entering HIR.
|
||||||
/// Mark a span as relative to the current owning item.
|
/// Mark a span as relative to the current owning item.
|
||||||
fn lower_span(&self, span: Span) -> Span {
|
fn lower_span(&self, span: Span) -> Span {
|
||||||
if self.sess.opts.debugging_opts.incremental_relative_spans {
|
if self.tcx.sess.opts.debugging_opts.incremental_relative_spans {
|
||||||
span.with_parent(Some(self.current_hir_id_owner))
|
span.with_parent(Some(self.current_hir_id_owner))
|
||||||
} else {
|
} else {
|
||||||
// Do not make spans relative when not using incremental compilation.
|
// Do not make spans relative when not using incremental compilation.
|
||||||
@ -1061,7 +1046,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn emit_bad_parenthesized_trait_in_assoc_ty(&self, data: &ParenthesizedArgs) {
|
fn emit_bad_parenthesized_trait_in_assoc_ty(&self, data: &ParenthesizedArgs) {
|
||||||
let mut err = self.sess.struct_span_err(
|
let mut err = self.tcx.sess.struct_span_err(
|
||||||
data.span,
|
data.span,
|
||||||
"parenthesized generic arguments cannot be used in associated type constraints",
|
"parenthesized generic arguments cannot be used in associated type constraints",
|
||||||
);
|
);
|
||||||
@ -1106,7 +1091,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||||||
ast::GenericArg::Lifetime(lt) => GenericArg::Lifetime(self.lower_lifetime(<)),
|
ast::GenericArg::Lifetime(lt) => GenericArg::Lifetime(self.lower_lifetime(<)),
|
||||||
ast::GenericArg::Type(ty) => {
|
ast::GenericArg::Type(ty) => {
|
||||||
match ty.kind {
|
match ty.kind {
|
||||||
TyKind::Infer if self.sess.features_untracked().generic_arg_infer => {
|
TyKind::Infer if self.tcx.features().generic_arg_infer => {
|
||||||
return GenericArg::Infer(hir::InferArg {
|
return GenericArg::Infer(hir::InferArg {
|
||||||
hir_id: self.lower_node_id(ty.id),
|
hir_id: self.lower_node_id(ty.id),
|
||||||
span: self.lower_span(ty.span),
|
span: self.lower_span(ty.span),
|
||||||
@ -1203,7 +1188,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||||||
} else {
|
} else {
|
||||||
self.next_node_id()
|
self.next_node_id()
|
||||||
};
|
};
|
||||||
let span = self.sess.source_map().next_point(t.span.shrink_to_lo());
|
let span = self.tcx.sess.source_map().next_point(t.span.shrink_to_lo());
|
||||||
Lifetime { ident: Ident::new(kw::UnderscoreLifetime, span), id }
|
Lifetime { ident: Ident::new(kw::UnderscoreLifetime, span), id }
|
||||||
});
|
});
|
||||||
let lifetime = self.lower_lifetime(®ion);
|
let lifetime = self.lower_lifetime(®ion);
|
||||||
@ -1307,7 +1292,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||||||
}
|
}
|
||||||
ImplTraitContext::Disallowed(position) => {
|
ImplTraitContext::Disallowed(position) => {
|
||||||
let mut err = struct_span_err!(
|
let mut err = struct_span_err!(
|
||||||
self.sess,
|
self.tcx.sess,
|
||||||
t.span,
|
t.span,
|
||||||
E0562,
|
E0562,
|
||||||
"`impl Trait` only allowed in function and inherent method return types, not in {}",
|
"`impl Trait` only allowed in function and inherent method return types, not in {}",
|
||||||
@ -1320,7 +1305,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||||||
}
|
}
|
||||||
TyKind::MacCall(_) => panic!("`TyKind::MacCall` should have been expanded by now"),
|
TyKind::MacCall(_) => panic!("`TyKind::MacCall` should have been expanded by now"),
|
||||||
TyKind::CVarArgs => {
|
TyKind::CVarArgs => {
|
||||||
self.sess.delay_span_bug(
|
self.tcx.sess.delay_span_bug(
|
||||||
t.span,
|
t.span,
|
||||||
"`TyKind::CVarArgs` should have been handled elsewhere",
|
"`TyKind::CVarArgs` should have been handled elsewhere",
|
||||||
);
|
);
|
||||||
@ -1925,7 +1910,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||||||
hir_id,
|
hir_id,
|
||||||
name,
|
name,
|
||||||
span: self.lower_span(param.span()),
|
span: self.lower_span(param.span()),
|
||||||
pure_wrt_drop: self.sess.contains_name(¶m.attrs, sym::may_dangle),
|
pure_wrt_drop: self.tcx.sess.contains_name(¶m.attrs, sym::may_dangle),
|
||||||
kind,
|
kind,
|
||||||
colon_span: param.colon_span.map(|s| self.lower_span(s)),
|
colon_span: param.colon_span.map(|s| self.lower_span(s)),
|
||||||
}
|
}
|
||||||
@ -2067,11 +2052,11 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||||||
fn lower_array_length(&mut self, c: &AnonConst) -> hir::ArrayLen {
|
fn lower_array_length(&mut self, c: &AnonConst) -> hir::ArrayLen {
|
||||||
match c.value.kind {
|
match c.value.kind {
|
||||||
ExprKind::Underscore => {
|
ExprKind::Underscore => {
|
||||||
if self.sess.features_untracked().generic_arg_infer {
|
if self.tcx.features().generic_arg_infer {
|
||||||
hir::ArrayLen::Infer(self.lower_node_id(c.id), c.value.span)
|
hir::ArrayLen::Infer(self.lower_node_id(c.id), c.value.span)
|
||||||
} else {
|
} else {
|
||||||
feature_err(
|
feature_err(
|
||||||
&self.sess.parse_sess,
|
&self.tcx.sess.parse_sess,
|
||||||
sym::generic_arg_infer,
|
sym::generic_arg_infer,
|
||||||
c.value.span,
|
c.value.span,
|
||||||
"using `_` for array lengths is unstable",
|
"using `_` for array lengths is unstable",
|
||||||
|
@ -133,7 +133,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||||||
|
|
||||||
// We should've returned in the for loop above.
|
// We should've returned in the for loop above.
|
||||||
|
|
||||||
self.sess.diagnostic().span_bug(
|
self.diagnostic().span_bug(
|
||||||
p.span,
|
p.span,
|
||||||
&format!(
|
&format!(
|
||||||
"lower_qpath: no final extension segment in {}..{}",
|
"lower_qpath: no final extension segment in {}..{}",
|
||||||
@ -193,7 +193,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||||||
GenericArgs::Parenthesized(ref data) => match parenthesized_generic_args {
|
GenericArgs::Parenthesized(ref data) => match parenthesized_generic_args {
|
||||||
ParenthesizedGenericArgs::Ok => self.lower_parenthesized_parameter_data(data),
|
ParenthesizedGenericArgs::Ok => self.lower_parenthesized_parameter_data(data),
|
||||||
ParenthesizedGenericArgs::Err => {
|
ParenthesizedGenericArgs::Err => {
|
||||||
let mut err = struct_span_err!(self.sess, data.span, E0214, "{}", msg);
|
let mut err = struct_span_err!(self.tcx.sess, data.span, E0214, "{}", msg);
|
||||||
err.span_label(data.span, "only `Fn` traits may use parentheses");
|
err.span_label(data.span, "only `Fn` traits may use parentheses");
|
||||||
// Suggest replacing parentheses with angle brackets `Trait(params...)` to `Trait<params...>`
|
// Suggest replacing parentheses with angle brackets `Trait(params...)` to `Trait<params...>`
|
||||||
if !data.inputs.is_empty() {
|
if !data.inputs.is_empty() {
|
||||||
|
@ -93,8 +93,9 @@ impl<'tcx> UniqueTypeId<'tcx> {
|
|||||||
/// Right now this takes the form of a hex-encoded opaque hash value.
|
/// Right now this takes the form of a hex-encoded opaque hash value.
|
||||||
pub fn generate_unique_id_string(self, tcx: TyCtxt<'tcx>) -> String {
|
pub fn generate_unique_id_string(self, tcx: TyCtxt<'tcx>) -> String {
|
||||||
let mut hasher = StableHasher::new();
|
let mut hasher = StableHasher::new();
|
||||||
let mut hcx = tcx.create_stable_hashing_context();
|
tcx.with_stable_hashing_context(|mut hcx| {
|
||||||
hcx.while_hashing_spans(false, |hcx| self.hash_stable(hcx, &mut hasher));
|
hcx.while_hashing_spans(false, |hcx| self.hash_stable(hcx, &mut hasher))
|
||||||
|
});
|
||||||
hasher.finish::<Fingerprint>().to_hex()
|
hasher.finish::<Fingerprint>().to_hex()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -701,16 +701,20 @@ fn push_const_param<'tcx>(tcx: TyCtxt<'tcx>, ct: ty::Const<'tcx>, output: &mut S
|
|||||||
// If we cannot evaluate the constant to a known type, we fall back
|
// If we cannot evaluate the constant to a known type, we fall back
|
||||||
// to emitting a stable hash value of the constant. This isn't very pretty
|
// to emitting a stable hash value of the constant. This isn't very pretty
|
||||||
// but we get a deterministic, virtually unique value for the constant.
|
// but we get a deterministic, virtually unique value for the constant.
|
||||||
let hcx = &mut tcx.create_stable_hashing_context();
|
//
|
||||||
let mut hasher = StableHasher::new();
|
|
||||||
let ct = ct.eval(tcx, ty::ParamEnv::reveal_all());
|
|
||||||
hcx.while_hashing_spans(false, |hcx| ct.to_valtree().hash_stable(hcx, &mut hasher));
|
|
||||||
// Let's only emit 64 bits of the hash value. That should be plenty for
|
// Let's only emit 64 bits of the hash value. That should be plenty for
|
||||||
// avoiding collisions and will make the emitted type names shorter.
|
// avoiding collisions and will make the emitted type names shorter.
|
||||||
|
let hash_short = tcx.with_stable_hashing_context(|mut hcx| {
|
||||||
|
let mut hasher = StableHasher::new();
|
||||||
|
let ct = ct.eval(tcx, ty::ParamEnv::reveal_all());
|
||||||
|
hcx.while_hashing_spans(false, |hcx| {
|
||||||
|
ct.to_valtree().hash_stable(hcx, &mut hasher)
|
||||||
|
});
|
||||||
// Note: Don't use `StableHashResult` impl of `u64` here directly, since that
|
// Note: Don't use `StableHashResult` impl of `u64` here directly, since that
|
||||||
// would lead to endianness problems.
|
// would lead to endianness problems.
|
||||||
let hash: u128 = hasher.finish();
|
let hash: u128 = hasher.finish();
|
||||||
let hash_short = (hash.to_le() as u64).to_le();
|
(hash.to_le() as u64).to_le()
|
||||||
|
});
|
||||||
|
|
||||||
if cpp_like_debuginfo(tcx) {
|
if cpp_like_debuginfo(tcx) {
|
||||||
write!(output, "CONST${:x}", hash_short)
|
write!(output, "CONST${:x}", hash_short)
|
||||||
|
@ -10,6 +10,7 @@
|
|||||||
#![feature(array_windows)]
|
#![feature(array_windows)]
|
||||||
#![feature(associated_type_bounds)]
|
#![feature(associated_type_bounds)]
|
||||||
#![feature(auto_traits)]
|
#![feature(auto_traits)]
|
||||||
|
#![feature(cell_leak)]
|
||||||
#![feature(control_flow_enum)]
|
#![feature(control_flow_enum)]
|
||||||
#![feature(extend_one)]
|
#![feature(extend_one)]
|
||||||
#![feature(let_else)]
|
#![feature(let_else)]
|
||||||
|
@ -539,6 +539,33 @@ impl<T> RwLock<T> {
|
|||||||
pub fn borrow_mut(&self) -> WriteGuard<'_, T> {
|
pub fn borrow_mut(&self) -> WriteGuard<'_, T> {
|
||||||
self.write()
|
self.write()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(not(parallel_compiler))]
|
||||||
|
#[inline(always)]
|
||||||
|
pub fn clone_guard<'a>(rg: &ReadGuard<'a, T>) -> ReadGuard<'a, T> {
|
||||||
|
ReadGuard::clone(rg)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(parallel_compiler)]
|
||||||
|
#[inline(always)]
|
||||||
|
pub fn clone_guard<'a>(rg: &ReadGuard<'a, T>) -> ReadGuard<'a, T> {
|
||||||
|
ReadGuard::rwlock(&rg).read()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(not(parallel_compiler))]
|
||||||
|
#[inline(always)]
|
||||||
|
pub fn leak(&self) -> &T {
|
||||||
|
ReadGuard::leak(self.read())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(parallel_compiler)]
|
||||||
|
#[inline(always)]
|
||||||
|
pub fn leak(&self) -> &T {
|
||||||
|
let guard = self.read();
|
||||||
|
let ret = unsafe { &*(&*guard as *const T) };
|
||||||
|
std::mem::forget(guard);
|
||||||
|
ret
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// FIXME: Probably a bad idea
|
// FIXME: Probably a bad idea
|
||||||
|
@ -7,6 +7,7 @@ edition = "2021"
|
|||||||
doctest = false
|
doctest = false
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
|
rustc_arena = { path = "../rustc_arena" }
|
||||||
rustc_target = { path = "../rustc_target" }
|
rustc_target = { path = "../rustc_target" }
|
||||||
rustc_macros = { path = "../rustc_macros" }
|
rustc_macros = { path = "../rustc_macros" }
|
||||||
rustc_data_structures = { path = "../rustc_data_structures" }
|
rustc_data_structures = { path = "../rustc_data_structures" }
|
||||||
|
@ -9,7 +9,7 @@ macro_rules! arena_types {
|
|||||||
// HIR types
|
// HIR types
|
||||||
[] hir_krate: rustc_hir::Crate<'tcx>,
|
[] hir_krate: rustc_hir::Crate<'tcx>,
|
||||||
[] arm: rustc_hir::Arm<'tcx>,
|
[] arm: rustc_hir::Arm<'tcx>,
|
||||||
[] asm_operand: (rustc_hir::InlineAsmOperand<'tcx>, Span),
|
[] asm_operand: (rustc_hir::InlineAsmOperand<'tcx>, rustc_span::Span),
|
||||||
[] asm_template: rustc_ast::InlineAsmTemplatePiece,
|
[] asm_template: rustc_ast::InlineAsmTemplatePiece,
|
||||||
[] attribute: rustc_ast::Attribute,
|
[] attribute: rustc_ast::Attribute,
|
||||||
[] block: rustc_hir::Block<'tcx>,
|
[] block: rustc_hir::Block<'tcx>,
|
||||||
|
@ -18,6 +18,8 @@ extern crate rustc_macros;
|
|||||||
#[macro_use]
|
#[macro_use]
|
||||||
extern crate rustc_data_structures;
|
extern crate rustc_data_structures;
|
||||||
|
|
||||||
|
extern crate self as rustc_hir;
|
||||||
|
|
||||||
mod arena;
|
mod arena;
|
||||||
pub mod def;
|
pub mod def;
|
||||||
pub mod def_path_hash_map;
|
pub mod def_path_hash_map;
|
||||||
@ -41,3 +43,5 @@ pub use hir_id::*;
|
|||||||
pub use lang_items::{LangItem, LanguageItems};
|
pub use lang_items::{LangItem, LanguageItems};
|
||||||
pub use stable_hash_impls::HashStableContext;
|
pub use stable_hash_impls::HashStableContext;
|
||||||
pub use target::{MethodKind, Target};
|
pub use target::{MethodKind, Target};
|
||||||
|
|
||||||
|
arena_types!(rustc_arena::declare_arena);
|
||||||
|
@ -14,7 +14,6 @@ use rustc_errors::{Applicability, ErrorGuaranteed, MultiSpan, PResult};
|
|||||||
use rustc_expand::base::{ExtCtxt, LintStoreExpand, ResolverExpand};
|
use rustc_expand::base::{ExtCtxt, LintStoreExpand, ResolverExpand};
|
||||||
use rustc_hir::def_id::{StableCrateId, LOCAL_CRATE};
|
use rustc_hir::def_id::{StableCrateId, LOCAL_CRATE};
|
||||||
use rustc_hir::definitions::Definitions;
|
use rustc_hir::definitions::Definitions;
|
||||||
use rustc_hir::Crate;
|
|
||||||
use rustc_lint::{EarlyCheckNode, LintStore};
|
use rustc_lint::{EarlyCheckNode, LintStore};
|
||||||
use rustc_metadata::creader::CStore;
|
use rustc_metadata::creader::CStore;
|
||||||
use rustc_metadata::{encode_metadata, EncodedMetadata};
|
use rustc_metadata::{encode_metadata, EncodedMetadata};
|
||||||
@ -482,37 +481,6 @@ pub fn configure_and_expand(
|
|||||||
Ok(krate)
|
Ok(krate)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn lower_to_hir<'tcx>(
|
|
||||||
sess: &Session,
|
|
||||||
definitions: &mut Definitions,
|
|
||||||
cstore: &CrateStoreDyn,
|
|
||||||
resolutions: &ty::ResolverOutputs,
|
|
||||||
resolver: ty::ResolverAstLowering,
|
|
||||||
krate: Rc<ast::Crate>,
|
|
||||||
arena: &'tcx rustc_ast_lowering::Arena<'tcx>,
|
|
||||||
) -> &'tcx Crate<'tcx> {
|
|
||||||
// Lower AST to HIR.
|
|
||||||
let hir_crate = rustc_ast_lowering::lower_crate(
|
|
||||||
sess,
|
|
||||||
&krate,
|
|
||||||
definitions,
|
|
||||||
cstore,
|
|
||||||
resolutions,
|
|
||||||
resolver,
|
|
||||||
arena,
|
|
||||||
);
|
|
||||||
|
|
||||||
// Drop AST to free memory
|
|
||||||
sess.time("drop_ast", || std::mem::drop(krate));
|
|
||||||
|
|
||||||
// Discard hygiene data, which isn't required after lowering to HIR.
|
|
||||||
if !sess.opts.debugging_opts.keep_hygiene_data {
|
|
||||||
rustc_span::hygiene::clear_syntax_context_map();
|
|
||||||
}
|
|
||||||
|
|
||||||
hir_crate
|
|
||||||
}
|
|
||||||
|
|
||||||
// Returns all the paths that correspond to generated files.
|
// Returns all the paths that correspond to generated files.
|
||||||
fn generated_output_paths(
|
fn generated_output_paths(
|
||||||
sess: &Session,
|
sess: &Session,
|
||||||
@ -777,6 +745,7 @@ pub fn prepare_outputs(
|
|||||||
pub static DEFAULT_QUERY_PROVIDERS: LazyLock<Providers> = LazyLock::new(|| {
|
pub static DEFAULT_QUERY_PROVIDERS: LazyLock<Providers> = LazyLock::new(|| {
|
||||||
let providers = &mut Providers::default();
|
let providers = &mut Providers::default();
|
||||||
providers.analysis = analysis;
|
providers.analysis = analysis;
|
||||||
|
providers.hir_crate = rustc_ast_lowering::lower_to_hir;
|
||||||
proc_macro_decls::provide(providers);
|
proc_macro_decls::provide(providers);
|
||||||
rustc_const_eval::provide(providers);
|
rustc_const_eval::provide(providers);
|
||||||
rustc_middle::hir::provide(providers);
|
rustc_middle::hir::provide(providers);
|
||||||
@ -823,7 +792,7 @@ impl<'tcx> QueryContext<'tcx> {
|
|||||||
pub fn create_global_ctxt<'tcx>(
|
pub fn create_global_ctxt<'tcx>(
|
||||||
compiler: &'tcx Compiler,
|
compiler: &'tcx Compiler,
|
||||||
lint_store: Lrc<LintStore>,
|
lint_store: Lrc<LintStore>,
|
||||||
krate: Rc<ast::Crate>,
|
krate: Lrc<ast::Crate>,
|
||||||
dep_graph: DepGraph,
|
dep_graph: DepGraph,
|
||||||
resolver: Rc<RefCell<BoxedResolver>>,
|
resolver: Rc<RefCell<BoxedResolver>>,
|
||||||
outputs: OutputFilenames,
|
outputs: OutputFilenames,
|
||||||
@ -831,29 +800,17 @@ pub fn create_global_ctxt<'tcx>(
|
|||||||
queries: &'tcx OnceCell<TcxQueries<'tcx>>,
|
queries: &'tcx OnceCell<TcxQueries<'tcx>>,
|
||||||
global_ctxt: &'tcx OnceCell<GlobalCtxt<'tcx>>,
|
global_ctxt: &'tcx OnceCell<GlobalCtxt<'tcx>>,
|
||||||
arena: &'tcx WorkerLocal<Arena<'tcx>>,
|
arena: &'tcx WorkerLocal<Arena<'tcx>>,
|
||||||
hir_arena: &'tcx WorkerLocal<rustc_ast_lowering::Arena<'tcx>>,
|
hir_arena: &'tcx WorkerLocal<rustc_hir::Arena<'tcx>>,
|
||||||
) -> QueryContext<'tcx> {
|
) -> QueryContext<'tcx> {
|
||||||
// We're constructing the HIR here; we don't care what we will
|
// We're constructing the HIR here; we don't care what we will
|
||||||
// read, since we haven't even constructed the *input* to
|
// read, since we haven't even constructed the *input* to
|
||||||
// incr. comp. yet.
|
// incr. comp. yet.
|
||||||
dep_graph.assert_ignored();
|
dep_graph.assert_ignored();
|
||||||
|
|
||||||
let (mut definitions, cstore, resolver_outputs, resolver_for_lowering) =
|
let (definitions, cstore, resolver_outputs, resolver_for_lowering) =
|
||||||
BoxedResolver::to_resolver_outputs(resolver);
|
BoxedResolver::to_resolver_outputs(resolver);
|
||||||
|
|
||||||
let sess = &compiler.session();
|
let sess = &compiler.session();
|
||||||
|
|
||||||
// Lower AST to HIR.
|
|
||||||
let krate = lower_to_hir(
|
|
||||||
sess,
|
|
||||||
&mut definitions,
|
|
||||||
&*cstore,
|
|
||||||
&resolver_outputs,
|
|
||||||
resolver_for_lowering,
|
|
||||||
krate,
|
|
||||||
hir_arena,
|
|
||||||
);
|
|
||||||
|
|
||||||
let query_result_on_disk_cache = rustc_incremental::load_query_result_cache(sess);
|
let query_result_on_disk_cache = rustc_incremental::load_query_result_cache(sess);
|
||||||
|
|
||||||
let codegen_backend = compiler.codegen_backend();
|
let codegen_backend = compiler.codegen_backend();
|
||||||
@ -877,9 +834,11 @@ pub fn create_global_ctxt<'tcx>(
|
|||||||
sess,
|
sess,
|
||||||
lint_store,
|
lint_store,
|
||||||
arena,
|
arena,
|
||||||
|
hir_arena,
|
||||||
definitions,
|
definitions,
|
||||||
cstore,
|
cstore,
|
||||||
resolver_outputs,
|
resolver_outputs,
|
||||||
|
resolver_for_lowering,
|
||||||
krate,
|
krate,
|
||||||
dep_graph,
|
dep_graph,
|
||||||
queries.on_disk_cache.as_ref().map(OnDiskCache::as_dyn),
|
queries.on_disk_cache.as_ref().map(OnDiskCache::as_dyn),
|
||||||
|
@ -72,13 +72,13 @@ pub struct Queries<'tcx> {
|
|||||||
queries: OnceCell<TcxQueries<'tcx>>,
|
queries: OnceCell<TcxQueries<'tcx>>,
|
||||||
|
|
||||||
arena: WorkerLocal<Arena<'tcx>>,
|
arena: WorkerLocal<Arena<'tcx>>,
|
||||||
hir_arena: WorkerLocal<rustc_ast_lowering::Arena<'tcx>>,
|
hir_arena: WorkerLocal<rustc_hir::Arena<'tcx>>,
|
||||||
|
|
||||||
dep_graph_future: Query<Option<DepGraphFuture>>,
|
dep_graph_future: Query<Option<DepGraphFuture>>,
|
||||||
parse: Query<ast::Crate>,
|
parse: Query<ast::Crate>,
|
||||||
crate_name: Query<String>,
|
crate_name: Query<String>,
|
||||||
register_plugins: Query<(ast::Crate, Lrc<LintStore>)>,
|
register_plugins: Query<(ast::Crate, Lrc<LintStore>)>,
|
||||||
expansion: Query<(Rc<ast::Crate>, Rc<RefCell<BoxedResolver>>, Lrc<LintStore>)>,
|
expansion: Query<(Lrc<ast::Crate>, Rc<RefCell<BoxedResolver>>, Lrc<LintStore>)>,
|
||||||
dep_graph: Query<DepGraph>,
|
dep_graph: Query<DepGraph>,
|
||||||
prepare_outputs: Query<OutputFilenames>,
|
prepare_outputs: Query<OutputFilenames>,
|
||||||
global_ctxt: Query<QueryContext<'tcx>>,
|
global_ctxt: Query<QueryContext<'tcx>>,
|
||||||
@ -92,7 +92,7 @@ impl<'tcx> Queries<'tcx> {
|
|||||||
gcx: OnceCell::new(),
|
gcx: OnceCell::new(),
|
||||||
queries: OnceCell::new(),
|
queries: OnceCell::new(),
|
||||||
arena: WorkerLocal::new(|_| Arena::default()),
|
arena: WorkerLocal::new(|_| Arena::default()),
|
||||||
hir_arena: WorkerLocal::new(|_| rustc_ast_lowering::Arena::default()),
|
hir_arena: WorkerLocal::new(|_| rustc_hir::Arena::default()),
|
||||||
dep_graph_future: Default::default(),
|
dep_graph_future: Default::default(),
|
||||||
parse: Default::default(),
|
parse: Default::default(),
|
||||||
crate_name: Default::default(),
|
crate_name: Default::default(),
|
||||||
@ -164,7 +164,7 @@ impl<'tcx> Queries<'tcx> {
|
|||||||
|
|
||||||
pub fn expansion(
|
pub fn expansion(
|
||||||
&self,
|
&self,
|
||||||
) -> Result<&Query<(Rc<ast::Crate>, Rc<RefCell<BoxedResolver>>, Lrc<LintStore>)>> {
|
) -> Result<&Query<(Lrc<ast::Crate>, Rc<RefCell<BoxedResolver>>, Lrc<LintStore>)>> {
|
||||||
tracing::trace!("expansion");
|
tracing::trace!("expansion");
|
||||||
self.expansion.compute(|| {
|
self.expansion.compute(|| {
|
||||||
let crate_name = self.crate_name()?.peek().clone();
|
let crate_name = self.crate_name()?.peek().clone();
|
||||||
@ -180,7 +180,7 @@ impl<'tcx> Queries<'tcx> {
|
|||||||
let krate = resolver.access(|resolver| {
|
let krate = resolver.access(|resolver| {
|
||||||
passes::configure_and_expand(sess, &lint_store, krate, &crate_name, resolver)
|
passes::configure_and_expand(sess, &lint_store, krate, &crate_name, resolver)
|
||||||
})?;
|
})?;
|
||||||
Ok((Rc::new(krate), Rc::new(RefCell::new(resolver)), lint_store))
|
Ok((Lrc::new(krate), Rc::new(RefCell::new(resolver)), lint_store))
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -34,7 +34,7 @@ use tracing::debug;
|
|||||||
|
|
||||||
/// Extract the `LintStore` from the query context.
|
/// Extract the `LintStore` from the query context.
|
||||||
/// This function exists because we've erased `LintStore` as `dyn Any` in the context.
|
/// This function exists because we've erased `LintStore` as `dyn Any` in the context.
|
||||||
pub(crate) fn unerased_lint_store(tcx: TyCtxt<'_>) -> &LintStore {
|
pub fn unerased_lint_store(tcx: TyCtxt<'_>) -> &LintStore {
|
||||||
let store: &dyn Any = &*tcx.lint_store;
|
let store: &dyn Any = &*tcx.lint_store;
|
||||||
store.downcast_ref().unwrap()
|
store.downcast_ref().unwrap()
|
||||||
}
|
}
|
||||||
|
@ -99,7 +99,7 @@ pub use builtin::SoftLints;
|
|||||||
pub use context::{CheckLintNameResult, FindLintError, LintStore};
|
pub use context::{CheckLintNameResult, FindLintError, LintStore};
|
||||||
pub use context::{EarlyContext, LateContext, LintContext};
|
pub use context::{EarlyContext, LateContext, LintContext};
|
||||||
pub use early::{check_ast_node, EarlyCheckNode};
|
pub use early::{check_ast_node, EarlyCheckNode};
|
||||||
pub use late::check_crate;
|
pub use late::{check_crate, unerased_lint_store};
|
||||||
pub use passes::{EarlyLintPass, LateLintPass};
|
pub use passes::{EarlyLintPass, LateLintPass};
|
||||||
pub use rustc_session::lint::Level::{self, *};
|
pub use rustc_session::lint::Level::{self, *};
|
||||||
pub use rustc_session::lint::{BufferedEarlyLint, FutureIncompatibleInfo, Lint, LintId};
|
pub use rustc_session::lint::{BufferedEarlyLint, FutureIncompatibleInfo, Lint, LintId};
|
||||||
|
@ -423,7 +423,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn encode_def_path_table(&mut self) {
|
fn encode_def_path_table(&mut self) {
|
||||||
let table = self.tcx.definitions_untracked().def_path_table();
|
let table = self.tcx.def_path_table();
|
||||||
if self.is_proc_macro {
|
if self.is_proc_macro {
|
||||||
for def_index in std::iter::once(CRATE_DEF_INDEX)
|
for def_index in std::iter::once(CRATE_DEF_INDEX)
|
||||||
.chain(self.tcx.resolutions(()).proc_macros.iter().map(|p| p.local_def_index))
|
.chain(self.tcx.resolutions(()).proc_macros.iter().map(|p| p.local_def_index))
|
||||||
@ -443,9 +443,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn encode_def_path_hash_map(&mut self) -> LazyValue<DefPathHashMapRef<'static>> {
|
fn encode_def_path_hash_map(&mut self) -> LazyValue<DefPathHashMapRef<'static>> {
|
||||||
self.lazy(DefPathHashMapRef::BorrowedFromTcx(
|
self.lazy(DefPathHashMapRef::BorrowedFromTcx(self.tcx.def_path_hash_to_def_index_map()))
|
||||||
self.tcx.definitions_untracked().def_path_hash_to_def_index_map(),
|
|
||||||
))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn encode_source_map(&mut self) -> LazyArray<rustc_span::SourceFile> {
|
fn encode_source_map(&mut self) -> LazyArray<rustc_span::SourceFile> {
|
||||||
@ -614,7 +612,8 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
|
|||||||
let interpret_alloc_index_bytes = self.position() - i;
|
let interpret_alloc_index_bytes = self.position() - i;
|
||||||
|
|
||||||
// Encode the proc macro data. This affects 'tables',
|
// Encode the proc macro data. This affects 'tables',
|
||||||
// so we need to do this before we encode the tables
|
// so we need to do this before we encode the tables.
|
||||||
|
// This overwrites def_keys, so it must happen after encode_def_path_table.
|
||||||
i = self.position();
|
i = self.position();
|
||||||
let proc_macro_data = self.encode_proc_macros();
|
let proc_macro_data = self.encode_proc_macros();
|
||||||
let proc_macro_data_bytes = self.position() - i;
|
let proc_macro_data_bytes = self.position() - i;
|
||||||
@ -992,8 +991,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
let tcx = self.tcx;
|
let tcx = self.tcx;
|
||||||
let hir = tcx.hir();
|
for local_id in tcx.iter_local_def_id() {
|
||||||
for local_id in hir.iter_local_def_id() {
|
|
||||||
let def_id = local_id.to_def_id();
|
let def_id = local_id.to_def_id();
|
||||||
let def_kind = tcx.opt_def_kind(local_id);
|
let def_kind = tcx.opt_def_kind(local_id);
|
||||||
let Some(def_kind) = def_kind else { continue };
|
let Some(def_kind) = def_kind else { continue };
|
||||||
@ -1854,12 +1852,13 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
|
|||||||
debug!("EncodeContext::encode_traits_and_impls()");
|
debug!("EncodeContext::encode_traits_and_impls()");
|
||||||
empty_proc_macro!(self);
|
empty_proc_macro!(self);
|
||||||
let tcx = self.tcx;
|
let tcx = self.tcx;
|
||||||
let mut ctx = tcx.create_stable_hashing_context();
|
|
||||||
let mut all_impls: Vec<_> = tcx.crate_inherent_impls(()).incoherent_impls.iter().collect();
|
let mut all_impls: Vec<_> = tcx.crate_inherent_impls(()).incoherent_impls.iter().collect();
|
||||||
|
tcx.with_stable_hashing_context(|mut ctx| {
|
||||||
all_impls.sort_by_cached_key(|&(&simp, _)| {
|
all_impls.sort_by_cached_key(|&(&simp, _)| {
|
||||||
let mut hasher = StableHasher::new();
|
let mut hasher = StableHasher::new();
|
||||||
simp.hash_stable(&mut ctx, &mut hasher);
|
simp.hash_stable(&mut ctx, &mut hasher);
|
||||||
hasher.finish::<Fingerprint>();
|
hasher.finish::<Fingerprint>()
|
||||||
|
})
|
||||||
});
|
});
|
||||||
let all_impls: Vec<_> = all_impls
|
let all_impls: Vec<_> = all_impls
|
||||||
.into_iter()
|
.into_iter()
|
||||||
|
@ -183,6 +183,9 @@ rustc_dep_node_append!([define_dep_nodes!][ <'tcx>
|
|||||||
// We use this for most things when incr. comp. is turned off.
|
// We use this for most things when incr. comp. is turned off.
|
||||||
[] Null,
|
[] Null,
|
||||||
|
|
||||||
|
// We use this to create a forever-red node.
|
||||||
|
[] Red,
|
||||||
|
|
||||||
[anon] TraitSelect,
|
[anon] TraitSelect,
|
||||||
|
|
||||||
// WARNING: if `Symbol` is changed, make sure you update `make_compile_codegen_unit` below.
|
// WARNING: if `Symbol` is changed, make sure you update `make_compile_codegen_unit` below.
|
||||||
|
@ -23,6 +23,7 @@ pub type EdgeFilter = rustc_query_system::dep_graph::debug::EdgeFilter<DepKind>;
|
|||||||
|
|
||||||
impl rustc_query_system::dep_graph::DepKind for DepKind {
|
impl rustc_query_system::dep_graph::DepKind for DepKind {
|
||||||
const NULL: Self = DepKind::Null;
|
const NULL: Self = DepKind::Null;
|
||||||
|
const RED: Self = DepKind::Red;
|
||||||
|
|
||||||
fn debug_node(node: &DepNode, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
fn debug_node(node: &DepNode, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
write!(f, "{:?}(", node.kind)?;
|
write!(f, "{:?}(", node.kind)?;
|
||||||
@ -71,8 +72,8 @@ impl<'tcx> DepContext for TyCtxt<'tcx> {
|
|||||||
type DepKind = DepKind;
|
type DepKind = DepKind;
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
fn create_stable_hashing_context(&self) -> StableHashingContext<'_> {
|
fn with_stable_hashing_context<R>(&self, f: impl FnOnce(StableHashingContext<'_>) -> R) -> R {
|
||||||
TyCtxt::create_stable_hashing_context(*self)
|
TyCtxt::with_stable_hashing_context(*self, f)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
|
@ -218,13 +218,6 @@ impl<'hir> Map<'hir> {
|
|||||||
self.tcx.local_def_id_to_hir_id(def_id)
|
self.tcx.local_def_id_to_hir_id(def_id)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn iter_local_def_id(self) -> impl Iterator<Item = LocalDefId> + 'hir {
|
|
||||||
// Create a dependency to the crate to be sure we re-execute this when the amount of
|
|
||||||
// definitions change.
|
|
||||||
self.tcx.ensure().hir_crate(());
|
|
||||||
self.tcx.definitions_untracked().iter_local_def_id()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Do not call this function directly. The query should be called.
|
/// Do not call this function directly. The query should be called.
|
||||||
pub(super) fn opt_def_kind(self, local_def_id: LocalDefId) -> Option<DefKind> {
|
pub(super) fn opt_def_kind(self, local_def_id: LocalDefId) -> Option<DefKind> {
|
||||||
let hir_id = self.local_def_id_to_hir_id(local_def_id);
|
let hir_id = self.local_def_id_to_hir_id(local_def_id);
|
||||||
@ -1142,13 +1135,13 @@ pub(super) fn crate_hash(tcx: TyCtxt<'_>, crate_num: CrateNum) -> Svh {
|
|||||||
|
|
||||||
source_file_names.sort_unstable();
|
source_file_names.sort_unstable();
|
||||||
|
|
||||||
let mut hcx = tcx.create_stable_hashing_context();
|
let crate_hash: Fingerprint = tcx.with_stable_hashing_context(|mut hcx| {
|
||||||
let mut stable_hasher = StableHasher::new();
|
let mut stable_hasher = StableHasher::new();
|
||||||
hir_body_hash.hash_stable(&mut hcx, &mut stable_hasher);
|
hir_body_hash.hash_stable(&mut hcx, &mut stable_hasher);
|
||||||
upstream_crates.hash_stable(&mut hcx, &mut stable_hasher);
|
upstream_crates.hash_stable(&mut hcx, &mut stable_hasher);
|
||||||
source_file_names.hash_stable(&mut hcx, &mut stable_hasher);
|
source_file_names.hash_stable(&mut hcx, &mut stable_hasher);
|
||||||
if tcx.sess.opts.debugging_opts.incremental_relative_spans {
|
if tcx.sess.opts.debugging_opts.incremental_relative_spans {
|
||||||
let definitions = &tcx.definitions_untracked();
|
let definitions = tcx.definitions_untracked();
|
||||||
let mut owner_spans: Vec<_> = krate
|
let mut owner_spans: Vec<_> = krate
|
||||||
.owners
|
.owners
|
||||||
.iter_enumerated()
|
.iter_enumerated()
|
||||||
@ -1168,8 +1161,9 @@ pub(super) fn crate_hash(tcx: TyCtxt<'_>, crate_num: CrateNum) -> Svh {
|
|||||||
// Hash visibility information since it does not appear in HIR.
|
// Hash visibility information since it does not appear in HIR.
|
||||||
resolutions.visibilities.hash_stable(&mut hcx, &mut stable_hasher);
|
resolutions.visibilities.hash_stable(&mut hcx, &mut stable_hasher);
|
||||||
resolutions.has_pub_restricted.hash_stable(&mut hcx, &mut stable_hasher);
|
resolutions.has_pub_restricted.hash_stable(&mut hcx, &mut stable_hasher);
|
||||||
|
stable_hasher.finish()
|
||||||
|
});
|
||||||
|
|
||||||
let crate_hash: Fingerprint = stable_hasher.finish();
|
|
||||||
Svh::new(crate_hash.to_smaller_hash())
|
Svh::new(crate_hash.to_smaller_hash())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -111,7 +111,6 @@ pub fn provide(providers: &mut Providers) {
|
|||||||
let hir = tcx.hir();
|
let hir = tcx.hir();
|
||||||
hir.get_module_parent_node(hir.local_def_id_to_hir_id(id))
|
hir.get_module_parent_node(hir.local_def_id_to_hir_id(id))
|
||||||
};
|
};
|
||||||
providers.hir_crate = |tcx, ()| tcx.untracked_crate;
|
|
||||||
providers.hir_crate_items = map::hir_crate_items;
|
providers.hir_crate_items = map::hir_crate_items;
|
||||||
providers.crate_hash = map::crate_hash;
|
providers.crate_hash = map::crate_hash;
|
||||||
providers.hir_module_items = map::hir_module_items;
|
providers.hir_module_items = map::hir_module_items;
|
||||||
|
@ -26,6 +26,12 @@ rustc_queries! {
|
|||||||
desc { "get the resolver outputs" }
|
desc { "get the resolver outputs" }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
query resolver_for_lowering(_: ()) -> &'tcx Steal<ty::ResolverAstLowering> {
|
||||||
|
eval_always
|
||||||
|
no_hash
|
||||||
|
desc { "get the resolver for lowering" }
|
||||||
|
}
|
||||||
|
|
||||||
/// Return the span for a definition.
|
/// Return the span for a definition.
|
||||||
/// Contrary to `def_span` below, this query returns the full absolute span of the definition.
|
/// Contrary to `def_span` below, this query returns the full absolute span of the definition.
|
||||||
/// This span is meant for dep-tracking rather than diagnostics. It should not be used outside
|
/// This span is meant for dep-tracking rather than diagnostics. It should not be used outside
|
||||||
@ -40,7 +46,8 @@ rustc_queries! {
|
|||||||
/// This is because the `hir_crate` query gives you access to all other items.
|
/// This is because the `hir_crate` query gives you access to all other items.
|
||||||
/// To avoid this fate, do not call `tcx.hir().krate()`; instead,
|
/// To avoid this fate, do not call `tcx.hir().krate()`; instead,
|
||||||
/// prefer wrappers like `tcx.visit_all_items_in_krate()`.
|
/// prefer wrappers like `tcx.visit_all_items_in_krate()`.
|
||||||
query hir_crate(key: ()) -> &'tcx Crate<'tcx> {
|
query hir_crate(key: ()) -> Crate<'tcx> {
|
||||||
|
storage(ArenaCacheSelector<'tcx>)
|
||||||
eval_always
|
eval_always
|
||||||
desc { "get the crate HIR" }
|
desc { "get the crate HIR" }
|
||||||
}
|
}
|
||||||
|
@ -32,12 +32,13 @@ use rustc_data_structures::profiling::SelfProfilerRef;
|
|||||||
use rustc_data_structures::sharded::{IntoPointer, ShardedHashMap};
|
use rustc_data_structures::sharded::{IntoPointer, ShardedHashMap};
|
||||||
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
|
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
|
||||||
use rustc_data_structures::steal::Steal;
|
use rustc_data_structures::steal::Steal;
|
||||||
use rustc_data_structures::sync::{self, Lock, Lrc, WorkerLocal};
|
use rustc_data_structures::sync::{self, Lock, Lrc, ReadGuard, RwLock, WorkerLocal};
|
||||||
use rustc_data_structures::vec_map::VecMap;
|
use rustc_data_structures::vec_map::VecMap;
|
||||||
use rustc_errors::{DecorateLint, ErrorGuaranteed, LintDiagnosticBuilder, MultiSpan};
|
use rustc_errors::{DecorateLint, ErrorGuaranteed, LintDiagnosticBuilder, MultiSpan};
|
||||||
use rustc_hir as hir;
|
use rustc_hir as hir;
|
||||||
use rustc_hir::def::{DefKind, Res};
|
use rustc_hir::def::{DefKind, Res};
|
||||||
use rustc_hir::def_id::{CrateNum, DefId, DefIdMap, LocalDefId, LOCAL_CRATE};
|
use rustc_hir::def_id::{CrateNum, DefId, DefIdMap, LocalDefId, LOCAL_CRATE};
|
||||||
|
use rustc_hir::definitions::Definitions;
|
||||||
use rustc_hir::intravisit::Visitor;
|
use rustc_hir::intravisit::Visitor;
|
||||||
use rustc_hir::lang_items::LangItem;
|
use rustc_hir::lang_items::LangItem;
|
||||||
use rustc_hir::{
|
use rustc_hir::{
|
||||||
@ -1045,6 +1046,7 @@ impl<'tcx> Deref for TyCtxt<'tcx> {
|
|||||||
|
|
||||||
pub struct GlobalCtxt<'tcx> {
|
pub struct GlobalCtxt<'tcx> {
|
||||||
pub arena: &'tcx WorkerLocal<Arena<'tcx>>,
|
pub arena: &'tcx WorkerLocal<Arena<'tcx>>,
|
||||||
|
pub hir_arena: &'tcx WorkerLocal<hir::Arena<'tcx>>,
|
||||||
|
|
||||||
interners: CtxtInterners<'tcx>,
|
interners: CtxtInterners<'tcx>,
|
||||||
|
|
||||||
@ -1069,13 +1071,15 @@ pub struct GlobalCtxt<'tcx> {
|
|||||||
/// Common consts, pre-interned for your convenience.
|
/// Common consts, pre-interned for your convenience.
|
||||||
pub consts: CommonConsts<'tcx>,
|
pub consts: CommonConsts<'tcx>,
|
||||||
|
|
||||||
definitions: rustc_hir::definitions::Definitions,
|
definitions: RwLock<Definitions>,
|
||||||
cstore: Box<CrateStoreDyn>,
|
cstore: Box<CrateStoreDyn>,
|
||||||
|
|
||||||
/// Output of the resolver.
|
/// Output of the resolver.
|
||||||
pub(crate) untracked_resolutions: ty::ResolverOutputs,
|
pub(crate) untracked_resolutions: ty::ResolverOutputs,
|
||||||
|
untracked_resolver_for_lowering: Steal<ty::ResolverAstLowering>,
|
||||||
pub(crate) untracked_crate: &'tcx hir::Crate<'tcx>,
|
/// The entire crate as AST. This field serves as the input for the hir_crate query,
|
||||||
|
/// which lowers it from AST to HIR. It must not be read or used by anything else.
|
||||||
|
pub untracked_crate: Steal<Lrc<ast::Crate>>,
|
||||||
|
|
||||||
/// This provides access to the incremental compilation on-disk cache for query results.
|
/// This provides access to the incremental compilation on-disk cache for query results.
|
||||||
/// Do not access this directly. It is only meant to be used by
|
/// Do not access this directly. It is only meant to be used by
|
||||||
@ -1233,10 +1237,12 @@ impl<'tcx> TyCtxt<'tcx> {
|
|||||||
s: &'tcx Session,
|
s: &'tcx Session,
|
||||||
lint_store: Lrc<dyn Any + sync::Send + sync::Sync>,
|
lint_store: Lrc<dyn Any + sync::Send + sync::Sync>,
|
||||||
arena: &'tcx WorkerLocal<Arena<'tcx>>,
|
arena: &'tcx WorkerLocal<Arena<'tcx>>,
|
||||||
definitions: rustc_hir::definitions::Definitions,
|
hir_arena: &'tcx WorkerLocal<hir::Arena<'tcx>>,
|
||||||
|
definitions: Definitions,
|
||||||
cstore: Box<CrateStoreDyn>,
|
cstore: Box<CrateStoreDyn>,
|
||||||
untracked_resolutions: ty::ResolverOutputs,
|
untracked_resolutions: ty::ResolverOutputs,
|
||||||
krate: &'tcx hir::Crate<'tcx>,
|
untracked_resolver_for_lowering: ty::ResolverAstLowering,
|
||||||
|
krate: Lrc<ast::Crate>,
|
||||||
dep_graph: DepGraph,
|
dep_graph: DepGraph,
|
||||||
on_disk_cache: Option<&'tcx dyn OnDiskCache<'tcx>>,
|
on_disk_cache: Option<&'tcx dyn OnDiskCache<'tcx>>,
|
||||||
queries: &'tcx dyn query::QueryEngine<'tcx>,
|
queries: &'tcx dyn query::QueryEngine<'tcx>,
|
||||||
@ -1263,16 +1269,18 @@ impl<'tcx> TyCtxt<'tcx> {
|
|||||||
sess: s,
|
sess: s,
|
||||||
lint_store,
|
lint_store,
|
||||||
arena,
|
arena,
|
||||||
|
hir_arena,
|
||||||
interners,
|
interners,
|
||||||
dep_graph,
|
dep_graph,
|
||||||
definitions,
|
definitions: RwLock::new(definitions),
|
||||||
cstore,
|
cstore,
|
||||||
untracked_resolutions,
|
|
||||||
prof: s.prof.clone(),
|
prof: s.prof.clone(),
|
||||||
types: common_types,
|
types: common_types,
|
||||||
lifetimes: common_lifetimes,
|
lifetimes: common_lifetimes,
|
||||||
consts: common_consts,
|
consts: common_consts,
|
||||||
untracked_crate: krate,
|
untracked_resolutions,
|
||||||
|
untracked_resolver_for_lowering: Steal::new(untracked_resolver_for_lowering),
|
||||||
|
untracked_crate: Steal::new(krate),
|
||||||
on_disk_cache,
|
on_disk_cache,
|
||||||
queries,
|
queries,
|
||||||
query_caches: query::QueryCaches::default(),
|
query_caches: query::QueryCaches::default(),
|
||||||
@ -1368,7 +1376,7 @@ impl<'tcx> TyCtxt<'tcx> {
|
|||||||
pub fn def_key(self, id: DefId) -> rustc_hir::definitions::DefKey {
|
pub fn def_key(self, id: DefId) -> rustc_hir::definitions::DefKey {
|
||||||
// Accessing the DefKey is ok, since it is part of DefPathHash.
|
// Accessing the DefKey is ok, since it is part of DefPathHash.
|
||||||
if let Some(id) = id.as_local() {
|
if let Some(id) = id.as_local() {
|
||||||
self.definitions.def_key(id)
|
self.definitions_untracked().def_key(id)
|
||||||
} else {
|
} else {
|
||||||
self.cstore.def_key(id)
|
self.cstore.def_key(id)
|
||||||
}
|
}
|
||||||
@ -1382,7 +1390,7 @@ impl<'tcx> TyCtxt<'tcx> {
|
|||||||
pub fn def_path(self, id: DefId) -> rustc_hir::definitions::DefPath {
|
pub fn def_path(self, id: DefId) -> rustc_hir::definitions::DefPath {
|
||||||
// Accessing the DefPath is ok, since it is part of DefPathHash.
|
// Accessing the DefPath is ok, since it is part of DefPathHash.
|
||||||
if let Some(id) = id.as_local() {
|
if let Some(id) = id.as_local() {
|
||||||
self.definitions.def_path(id)
|
self.definitions_untracked().def_path(id)
|
||||||
} else {
|
} else {
|
||||||
self.cstore.def_path(id)
|
self.cstore.def_path(id)
|
||||||
}
|
}
|
||||||
@ -1392,7 +1400,7 @@ impl<'tcx> TyCtxt<'tcx> {
|
|||||||
pub fn def_path_hash(self, def_id: DefId) -> rustc_hir::definitions::DefPathHash {
|
pub fn def_path_hash(self, def_id: DefId) -> rustc_hir::definitions::DefPathHash {
|
||||||
// Accessing the DefPathHash is ok, it is incr. comp. stable.
|
// Accessing the DefPathHash is ok, it is incr. comp. stable.
|
||||||
if let Some(def_id) = def_id.as_local() {
|
if let Some(def_id) = def_id.as_local() {
|
||||||
self.definitions.def_path_hash(def_id)
|
self.definitions_untracked().def_path_hash(def_id)
|
||||||
} else {
|
} else {
|
||||||
self.cstore.def_path_hash(def_id)
|
self.cstore.def_path_hash(def_id)
|
||||||
}
|
}
|
||||||
@ -1429,7 +1437,7 @@ impl<'tcx> TyCtxt<'tcx> {
|
|||||||
// If this is a DefPathHash from the local crate, we can look up the
|
// If this is a DefPathHash from the local crate, we can look up the
|
||||||
// DefId in the tcx's `Definitions`.
|
// DefId in the tcx's `Definitions`.
|
||||||
if stable_crate_id == self.sess.local_stable_crate_id() {
|
if stable_crate_id == self.sess.local_stable_crate_id() {
|
||||||
self.definitions.local_def_path_hash_to_def_id(hash, err).to_def_id()
|
self.definitions.read().local_def_path_hash_to_def_id(hash, err).to_def_id()
|
||||||
} else {
|
} else {
|
||||||
// If this is a DefPathHash from an upstream crate, let the CrateStore map
|
// If this is a DefPathHash from an upstream crate, let the CrateStore map
|
||||||
// it to a DefId.
|
// it to a DefId.
|
||||||
@ -1460,6 +1468,64 @@ impl<'tcx> TyCtxt<'tcx> {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Create a new definition within the incr. comp. engine.
|
||||||
|
pub fn create_def(self, parent: LocalDefId, data: hir::definitions::DefPathData) -> LocalDefId {
|
||||||
|
// This function modifies `self.definitions` using a side-effect.
|
||||||
|
// We need to ensure that these side effects are re-run by the incr. comp. engine.
|
||||||
|
// Depending on the forever-red node will tell the graph that the calling query
|
||||||
|
// needs to be re-evaluated.
|
||||||
|
use rustc_query_system::dep_graph::DepNodeIndex;
|
||||||
|
self.dep_graph.read_index(DepNodeIndex::FOREVER_RED_NODE);
|
||||||
|
|
||||||
|
// The following call has the side effect of modifying the tables inside `definitions`.
|
||||||
|
// These very tables are relied on by the incr. comp. engine to decode DepNodes and to
|
||||||
|
// decode the on-disk cache.
|
||||||
|
//
|
||||||
|
// Any LocalDefId which is used within queries, either as key or result, either:
|
||||||
|
// - has been created before the construction of the TyCtxt;
|
||||||
|
// - has been created by this call to `create_def`.
|
||||||
|
// As a consequence, this LocalDefId is always re-created before it is needed by the incr.
|
||||||
|
// comp. engine itself.
|
||||||
|
//
|
||||||
|
// This call also writes to the value of `source_span` and `expn_that_defined` queries.
|
||||||
|
// This is fine because:
|
||||||
|
// - those queries are `eval_always` so we won't miss their result changing;
|
||||||
|
// - this write will have happened before these queries are called.
|
||||||
|
self.definitions.write().create_def(parent, data)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn iter_local_def_id(self) -> impl Iterator<Item = LocalDefId> + 'tcx {
|
||||||
|
// Create a dependency to the crate to be sure we re-execute this when the amount of
|
||||||
|
// definitions change.
|
||||||
|
self.ensure().hir_crate(());
|
||||||
|
// Leak a read lock once we start iterating on definitions, to prevent adding new onces
|
||||||
|
// while iterating. If some query needs to add definitions, it should be `ensure`d above.
|
||||||
|
let definitions = self.definitions.leak();
|
||||||
|
definitions.iter_local_def_id()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn def_path_table(self) -> &'tcx rustc_hir::definitions::DefPathTable {
|
||||||
|
// Create a dependency to the crate to be sure we reexcute this when the amount of
|
||||||
|
// definitions change.
|
||||||
|
self.ensure().hir_crate(());
|
||||||
|
// Leak a read lock once we start iterating on definitions, to prevent adding new onces
|
||||||
|
// while iterating. If some query needs to add definitions, it should be `ensure`d above.
|
||||||
|
let definitions = self.definitions.leak();
|
||||||
|
definitions.def_path_table()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn def_path_hash_to_def_index_map(
|
||||||
|
self,
|
||||||
|
) -> &'tcx rustc_hir::def_path_hash_map::DefPathHashMap {
|
||||||
|
// Create a dependency to the crate to be sure we reexcute this when the amount of
|
||||||
|
// definitions change.
|
||||||
|
self.ensure().hir_crate(());
|
||||||
|
// Leak a read lock once we start iterating on definitions, to prevent adding new onces
|
||||||
|
// while iterating. If some query needs to add definitions, it should be `ensure`d above.
|
||||||
|
let definitions = self.definitions.leak();
|
||||||
|
definitions.def_path_hash_to_def_index_map()
|
||||||
|
}
|
||||||
|
|
||||||
/// Note that this is *untracked* and should only be used within the query
|
/// Note that this is *untracked* and should only be used within the query
|
||||||
/// system if the result is otherwise tracked through queries
|
/// system if the result is otherwise tracked through queries
|
||||||
pub fn cstore_untracked(self) -> &'tcx CrateStoreDyn {
|
pub fn cstore_untracked(self) -> &'tcx CrateStoreDyn {
|
||||||
@ -1468,8 +1534,9 @@ impl<'tcx> TyCtxt<'tcx> {
|
|||||||
|
|
||||||
/// Note that this is *untracked* and should only be used within the query
|
/// Note that this is *untracked* and should only be used within the query
|
||||||
/// system if the result is otherwise tracked through queries
|
/// system if the result is otherwise tracked through queries
|
||||||
pub fn definitions_untracked(self) -> &'tcx hir::definitions::Definitions {
|
#[inline]
|
||||||
&self.definitions
|
pub fn definitions_untracked(self) -> ReadGuard<'tcx, Definitions> {
|
||||||
|
self.definitions.read()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Note that this is *untracked* and should only be used within the query
|
/// Note that this is *untracked* and should only be used within the query
|
||||||
@ -1480,23 +1547,18 @@ impl<'tcx> TyCtxt<'tcx> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[inline(always)]
|
#[inline(always)]
|
||||||
pub fn create_stable_hashing_context(self) -> StableHashingContext<'tcx> {
|
pub fn with_stable_hashing_context<R>(
|
||||||
StableHashingContext::new(
|
self,
|
||||||
|
f: impl FnOnce(StableHashingContext<'_>) -> R,
|
||||||
|
) -> R {
|
||||||
|
let definitions = self.definitions_untracked();
|
||||||
|
let hcx = StableHashingContext::new(
|
||||||
self.sess,
|
self.sess,
|
||||||
&self.definitions,
|
&*definitions,
|
||||||
&*self.cstore,
|
&*self.cstore,
|
||||||
&self.untracked_resolutions.source_span,
|
&self.untracked_resolutions.source_span,
|
||||||
)
|
);
|
||||||
}
|
f(hcx)
|
||||||
|
|
||||||
#[inline(always)]
|
|
||||||
pub fn create_no_span_stable_hashing_context(self) -> StableHashingContext<'tcx> {
|
|
||||||
StableHashingContext::ignore_spans(
|
|
||||||
self.sess,
|
|
||||||
&self.definitions,
|
|
||||||
&*self.cstore,
|
|
||||||
&self.untracked_resolutions.source_span,
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn serialize_query_result_cache(self, encoder: FileEncoder) -> FileEncodeResult {
|
pub fn serialize_query_result_cache(self, encoder: FileEncoder) -> FileEncodeResult {
|
||||||
@ -2304,7 +2366,7 @@ impl<'tcx> TyCtxt<'tcx> {
|
|||||||
self.interners.intern_ty(
|
self.interners.intern_ty(
|
||||||
st,
|
st,
|
||||||
self.sess,
|
self.sess,
|
||||||
&self.definitions,
|
&self.definitions.read(),
|
||||||
&*self.cstore,
|
&*self.cstore,
|
||||||
// This is only used to create a stable hashing context.
|
// This is only used to create a stable hashing context.
|
||||||
&self.untracked_resolutions.source_span,
|
&self.untracked_resolutions.source_span,
|
||||||
@ -2922,6 +2984,7 @@ fn ptr_eq<T, U>(t: *const T, u: *const U) -> bool {
|
|||||||
|
|
||||||
pub fn provide(providers: &mut ty::query::Providers) {
|
pub fn provide(providers: &mut ty::query::Providers) {
|
||||||
providers.resolutions = |tcx, ()| &tcx.untracked_resolutions;
|
providers.resolutions = |tcx, ()| &tcx.untracked_resolutions;
|
||||||
|
providers.resolver_for_lowering = |tcx, ()| &tcx.untracked_resolver_for_lowering;
|
||||||
providers.module_reexports =
|
providers.module_reexports =
|
||||||
|tcx, id| tcx.resolutions(()).reexport_map.get(&id).map(|v| &v[..]);
|
|tcx, id| tcx.resolutions(()).reexport_map.get(&id).map(|v| &v[..]);
|
||||||
providers.crate_name = |tcx, id| {
|
providers.crate_name = |tcx, id| {
|
||||||
|
@ -142,16 +142,16 @@ impl<'tcx> TyCtxt<'tcx> {
|
|||||||
/// Creates a hash of the type `Ty` which will be the same no matter what crate
|
/// Creates a hash of the type `Ty` which will be the same no matter what crate
|
||||||
/// context it's calculated within. This is used by the `type_id` intrinsic.
|
/// context it's calculated within. This is used by the `type_id` intrinsic.
|
||||||
pub fn type_id_hash(self, ty: Ty<'tcx>) -> u64 {
|
pub fn type_id_hash(self, ty: Ty<'tcx>) -> u64 {
|
||||||
let mut hasher = StableHasher::new();
|
|
||||||
let mut hcx = self.create_stable_hashing_context();
|
|
||||||
|
|
||||||
// We want the type_id be independent of the types free regions, so we
|
// We want the type_id be independent of the types free regions, so we
|
||||||
// erase them. The erase_regions() call will also anonymize bound
|
// erase them. The erase_regions() call will also anonymize bound
|
||||||
// regions, which is desirable too.
|
// regions, which is desirable too.
|
||||||
let ty = self.erase_regions(ty);
|
let ty = self.erase_regions(ty);
|
||||||
|
|
||||||
|
self.with_stable_hashing_context(|mut hcx| {
|
||||||
|
let mut hasher = StableHasher::new();
|
||||||
hcx.while_hashing_spans(false, |hcx| ty.hash_stable(hcx, &mut hasher));
|
hcx.while_hashing_spans(false, |hcx| ty.hash_stable(hcx, &mut hasher));
|
||||||
hasher.finish()
|
hasher.finish()
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn res_generics_def_id(self, res: Res) -> Option<DefId> {
|
pub fn res_generics_def_id(self, res: Res) -> Option<DefId> {
|
||||||
|
@ -15,7 +15,6 @@ use spans::{CoverageSpan, CoverageSpans};
|
|||||||
use crate::MirPass;
|
use crate::MirPass;
|
||||||
|
|
||||||
use rustc_data_structures::graph::WithNumNodes;
|
use rustc_data_structures::graph::WithNumNodes;
|
||||||
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
|
|
||||||
use rustc_data_structures::sync::Lrc;
|
use rustc_data_structures::sync::Lrc;
|
||||||
use rustc_index::vec::IndexVec;
|
use rustc_index::vec::IndexVec;
|
||||||
use rustc_middle::hir;
|
use rustc_middle::hir;
|
||||||
@ -576,12 +575,6 @@ fn get_body_span<'tcx>(
|
|||||||
|
|
||||||
fn hash_mir_source<'tcx>(tcx: TyCtxt<'tcx>, hir_body: &'tcx rustc_hir::Body<'tcx>) -> u64 {
|
fn hash_mir_source<'tcx>(tcx: TyCtxt<'tcx>, hir_body: &'tcx rustc_hir::Body<'tcx>) -> u64 {
|
||||||
// FIXME(cjgillot) Stop hashing HIR manually here.
|
// FIXME(cjgillot) Stop hashing HIR manually here.
|
||||||
let mut hcx = tcx.create_no_span_stable_hashing_context();
|
|
||||||
let mut stable_hasher = StableHasher::new();
|
|
||||||
let owner = hir_body.id().hir_id.owner;
|
let owner = hir_body.id().hir_id.owner;
|
||||||
let bodies = &tcx.hir_owner_nodes(owner).unwrap().bodies;
|
tcx.hir_owner_nodes(owner).unwrap().hash_including_bodies.to_smaller_hash()
|
||||||
hcx.with_hir_bodies(false, owner, bodies, |hcx| {
|
|
||||||
hir_body.value.hash_stable(hcx, &mut stable_hasher)
|
|
||||||
});
|
|
||||||
stable_hasher.finish()
|
|
||||||
}
|
}
|
||||||
|
@ -588,7 +588,7 @@ impl<'tcx> Inliner<'tcx> {
|
|||||||
);
|
);
|
||||||
expn_data.def_site = callee_body.span;
|
expn_data.def_site = callee_body.span;
|
||||||
let expn_data =
|
let expn_data =
|
||||||
LocalExpnId::fresh(expn_data, self.tcx.create_stable_hashing_context());
|
self.tcx.with_stable_hashing_context(|hcx| LocalExpnId::fresh(expn_data, hcx));
|
||||||
let mut integrator = Integrator {
|
let mut integrator = Integrator {
|
||||||
args: &args,
|
args: &args,
|
||||||
new_locals: Local::new(caller_body.local_decls.len())..,
|
new_locals: Local::new(caller_body.local_decls.len())..,
|
||||||
|
@ -653,12 +653,11 @@ impl<'a, 'tcx> Decodable<CacheDecoder<'a, 'tcx>> for ExpnId {
|
|||||||
#[cfg(debug_assertions)]
|
#[cfg(debug_assertions)]
|
||||||
{
|
{
|
||||||
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
|
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
|
||||||
let mut hcx = decoder.tcx.create_stable_hashing_context();
|
let local_hash: u64 = decoder.tcx.with_stable_hashing_context(|mut hcx| {
|
||||||
let mut hasher = StableHasher::new();
|
let mut hasher = StableHasher::new();
|
||||||
hcx.while_hashing_spans(true, |hcx| {
|
expn_id.expn_data().hash_stable(&mut hcx, &mut hasher);
|
||||||
expn_id.expn_data().hash_stable(hcx, &mut hasher)
|
hasher.finish()
|
||||||
});
|
});
|
||||||
let local_hash: u64 = hasher.finish();
|
|
||||||
debug_assert_eq!(hash.local_hash(), local_hash);
|
debug_assert_eq!(hash.local_hash(), local_hash);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -291,11 +291,12 @@ macro_rules! define_queries {
|
|||||||
.and_then(|def_id| tcx.opt_def_kind(def_id))
|
.and_then(|def_id| tcx.opt_def_kind(def_id))
|
||||||
};
|
};
|
||||||
let hash = || {
|
let hash = || {
|
||||||
let mut hcx = tcx.create_stable_hashing_context();
|
tcx.with_stable_hashing_context(|mut hcx|{
|
||||||
let mut hasher = StableHasher::new();
|
let mut hasher = StableHasher::new();
|
||||||
std::mem::discriminant(&kind).hash_stable(&mut hcx, &mut hasher);
|
std::mem::discriminant(&kind).hash_stable(&mut hcx, &mut hasher);
|
||||||
key.hash_stable(&mut hcx, &mut hasher);
|
key.hash_stable(&mut hcx, &mut hasher);
|
||||||
hasher.finish::<u64>()
|
hasher.finish::<u64>()
|
||||||
|
})
|
||||||
};
|
};
|
||||||
|
|
||||||
QueryStackFrame::new(name, description, span, def_kind, hash)
|
QueryStackFrame::new(name, description, span, def_kind, hash)
|
||||||
@ -376,6 +377,17 @@ macro_rules! define_queries {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// We use this for the forever-red node.
|
||||||
|
pub fn Red() -> DepKindStruct {
|
||||||
|
DepKindStruct {
|
||||||
|
is_anon: false,
|
||||||
|
is_eval_always: false,
|
||||||
|
fingerprint_style: FingerprintStyle::Unit,
|
||||||
|
force_from_dep_node: Some(|_, dep_node| bug!("force_from_dep_node: encountered {:?}", dep_node)),
|
||||||
|
try_load_from_on_disk_cache: None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub fn TraitSelect() -> DepKindStruct {
|
pub fn TraitSelect() -> DepKindStruct {
|
||||||
DepKindStruct {
|
DepKindStruct {
|
||||||
is_anon: true,
|
is_anon: true,
|
||||||
|
@ -131,12 +131,11 @@ where
|
|||||||
|
|
||||||
#[inline(always)]
|
#[inline(always)]
|
||||||
default fn to_fingerprint(&self, tcx: Ctxt) -> Fingerprint {
|
default fn to_fingerprint(&self, tcx: Ctxt) -> Fingerprint {
|
||||||
let mut hcx = tcx.create_stable_hashing_context();
|
tcx.with_stable_hashing_context(|mut hcx| {
|
||||||
let mut hasher = StableHasher::new();
|
let mut hasher = StableHasher::new();
|
||||||
|
|
||||||
self.hash_stable(&mut hcx, &mut hasher);
|
self.hash_stable(&mut hcx, &mut hasher);
|
||||||
|
|
||||||
hasher.finish()
|
hasher.finish()
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline(always)]
|
#[inline(always)]
|
||||||
|
@ -43,6 +43,7 @@ rustc_index::newtype_index! {
|
|||||||
impl DepNodeIndex {
|
impl DepNodeIndex {
|
||||||
pub const INVALID: DepNodeIndex = DepNodeIndex::MAX;
|
pub const INVALID: DepNodeIndex = DepNodeIndex::MAX;
|
||||||
pub const SINGLETON_DEPENDENCYLESS_ANON_NODE: DepNodeIndex = DepNodeIndex::from_u32(0);
|
pub const SINGLETON_DEPENDENCYLESS_ANON_NODE: DepNodeIndex = DepNodeIndex::from_u32(0);
|
||||||
|
pub const FOREVER_RED_NODE: DepNodeIndex = DepNodeIndex::from_u32(1);
|
||||||
}
|
}
|
||||||
|
|
||||||
impl std::convert::From<DepNodeIndex> for QueryInvocationId {
|
impl std::convert::From<DepNodeIndex> for QueryInvocationId {
|
||||||
@ -124,6 +125,8 @@ impl<K: DepKind> DepGraph<K> {
|
|||||||
record_stats,
|
record_stats,
|
||||||
);
|
);
|
||||||
|
|
||||||
|
let colors = DepNodeColorMap::new(prev_graph_node_count);
|
||||||
|
|
||||||
// Instantiate a dependy-less node only once for anonymous queries.
|
// Instantiate a dependy-less node only once for anonymous queries.
|
||||||
let _green_node_index = current.intern_new_node(
|
let _green_node_index = current.intern_new_node(
|
||||||
profiler,
|
profiler,
|
||||||
@ -131,7 +134,19 @@ impl<K: DepKind> DepGraph<K> {
|
|||||||
smallvec![],
|
smallvec![],
|
||||||
Fingerprint::ZERO,
|
Fingerprint::ZERO,
|
||||||
);
|
);
|
||||||
debug_assert_eq!(_green_node_index, DepNodeIndex::SINGLETON_DEPENDENCYLESS_ANON_NODE);
|
assert_eq!(_green_node_index, DepNodeIndex::SINGLETON_DEPENDENCYLESS_ANON_NODE);
|
||||||
|
|
||||||
|
// Instantiate a dependy-less red node only once for anonymous queries.
|
||||||
|
let (_red_node_index, _prev_and_index) = current.intern_node(
|
||||||
|
profiler,
|
||||||
|
&prev_graph,
|
||||||
|
DepNode { kind: DepKind::RED, hash: Fingerprint::ZERO.into() },
|
||||||
|
smallvec![],
|
||||||
|
None,
|
||||||
|
false,
|
||||||
|
);
|
||||||
|
assert_eq!(_red_node_index, DepNodeIndex::FOREVER_RED_NODE);
|
||||||
|
assert!(matches!(_prev_and_index, None | Some((_, DepNodeColor::Red))));
|
||||||
|
|
||||||
DepGraph {
|
DepGraph {
|
||||||
data: Some(Lrc::new(DepGraphData {
|
data: Some(Lrc::new(DepGraphData {
|
||||||
@ -140,7 +155,7 @@ impl<K: DepKind> DepGraph<K> {
|
|||||||
current,
|
current,
|
||||||
processed_side_effects: Default::default(),
|
processed_side_effects: Default::default(),
|
||||||
previous: prev_graph,
|
previous: prev_graph,
|
||||||
colors: DepNodeColorMap::new(prev_graph_node_count),
|
colors,
|
||||||
debug_loaded_from_disk: Default::default(),
|
debug_loaded_from_disk: Default::default(),
|
||||||
})),
|
})),
|
||||||
virtual_dep_node_index: Lrc::new(AtomicU32::new(0)),
|
virtual_dep_node_index: Lrc::new(AtomicU32::new(0)),
|
||||||
@ -328,10 +343,8 @@ impl<K: DepKind> DepGraph<K> {
|
|||||||
|
|
||||||
let dcx = cx.dep_context();
|
let dcx = cx.dep_context();
|
||||||
let hashing_timer = dcx.profiler().incr_result_hashing();
|
let hashing_timer = dcx.profiler().incr_result_hashing();
|
||||||
let current_fingerprint = hash_result.map(|f| {
|
let current_fingerprint =
|
||||||
let mut hcx = dcx.create_stable_hashing_context();
|
hash_result.map(|f| dcx.with_stable_hashing_context(|mut hcx| f(&mut hcx, &result)));
|
||||||
f(&mut hcx, &result)
|
|
||||||
});
|
|
||||||
|
|
||||||
let print_status = cfg!(debug_assertions) && dcx.sess().opts.debugging_opts.dep_tasks;
|
let print_status = cfg!(debug_assertions) && dcx.sess().opts.debugging_opts.dep_tasks;
|
||||||
|
|
||||||
@ -971,6 +984,7 @@ impl<K: DepKind> CurrentDepGraph<K> {
|
|||||||
let nanos = duration.as_secs() * 1_000_000_000 + duration.subsec_nanos() as u64;
|
let nanos = duration.as_secs() * 1_000_000_000 + duration.subsec_nanos() as u64;
|
||||||
let mut stable_hasher = StableHasher::new();
|
let mut stable_hasher = StableHasher::new();
|
||||||
nanos.hash(&mut stable_hasher);
|
nanos.hash(&mut stable_hasher);
|
||||||
|
let anon_id_seed = stable_hasher.finish();
|
||||||
|
|
||||||
#[cfg(debug_assertions)]
|
#[cfg(debug_assertions)]
|
||||||
let forbidden_edge = match env::var("RUST_FORBID_DEP_GRAPH_EDGE") {
|
let forbidden_edge = match env::var("RUST_FORBID_DEP_GRAPH_EDGE") {
|
||||||
@ -1006,7 +1020,7 @@ impl<K: DepKind> CurrentDepGraph<K> {
|
|||||||
)
|
)
|
||||||
}),
|
}),
|
||||||
prev_index_to_index: Lock::new(IndexVec::from_elem_n(None, prev_graph_node_count)),
|
prev_index_to_index: Lock::new(IndexVec::from_elem_n(None, prev_graph_node_count)),
|
||||||
anon_id_seed: stable_hasher.finish(),
|
anon_id_seed,
|
||||||
#[cfg(debug_assertions)]
|
#[cfg(debug_assertions)]
|
||||||
forbidden_edge,
|
forbidden_edge,
|
||||||
total_read_count: AtomicU64::new(0),
|
total_read_count: AtomicU64::new(0),
|
||||||
|
@ -23,7 +23,7 @@ pub trait DepContext: Copy {
|
|||||||
type DepKind: self::DepKind;
|
type DepKind: self::DepKind;
|
||||||
|
|
||||||
/// Create a hashing context for hashing new results.
|
/// Create a hashing context for hashing new results.
|
||||||
fn create_stable_hashing_context(&self) -> StableHashingContext<'_>;
|
fn with_stable_hashing_context<R>(&self, f: impl FnOnce(StableHashingContext<'_>) -> R) -> R;
|
||||||
|
|
||||||
/// Access the DepGraph.
|
/// Access the DepGraph.
|
||||||
fn dep_graph(&self) -> &DepGraph<Self::DepKind>;
|
fn dep_graph(&self) -> &DepGraph<Self::DepKind>;
|
||||||
@ -85,8 +85,12 @@ impl FingerprintStyle {
|
|||||||
|
|
||||||
/// Describe the different families of dependency nodes.
|
/// Describe the different families of dependency nodes.
|
||||||
pub trait DepKind: Copy + fmt::Debug + Eq + Hash + Send + Encodable<FileEncoder> + 'static {
|
pub trait DepKind: Copy + fmt::Debug + Eq + Hash + Send + Encodable<FileEncoder> + 'static {
|
||||||
|
/// DepKind to use when incr. comp. is turned off.
|
||||||
const NULL: Self;
|
const NULL: Self;
|
||||||
|
|
||||||
|
/// DepKind to use to create the initial forever-red node.
|
||||||
|
const RED: Self;
|
||||||
|
|
||||||
/// Implementation of `std::fmt::Debug` for `DepNode`.
|
/// Implementation of `std::fmt::Debug` for `DepNode`.
|
||||||
fn debug_node(node: &DepNode<Self>, f: &mut fmt::Formatter<'_>) -> fmt::Result;
|
fn debug_node(node: &DepNode<Self>, f: &mut fmt::Formatter<'_>) -> fmt::Result;
|
||||||
|
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
use crate::ich;
|
use crate::ich;
|
||||||
|
|
||||||
use rustc_ast as ast;
|
use rustc_ast as ast;
|
||||||
use rustc_data_structures::sorted_map::SortedMap;
|
use rustc_data_structures::sorted_map::SortedMap;
|
||||||
use rustc_data_structures::stable_hasher::{HashStable, HashingControls, StableHasher};
|
use rustc_data_structures::stable_hasher::{HashStable, HashingControls, StableHasher};
|
||||||
@ -118,13 +119,13 @@ impl<'a> StableHashingContext<'a> {
|
|||||||
&mut self,
|
&mut self,
|
||||||
hash_bodies: bool,
|
hash_bodies: bool,
|
||||||
owner: LocalDefId,
|
owner: LocalDefId,
|
||||||
bodies: &'a SortedMap<hir::ItemLocalId, &'a hir::Body<'a>>,
|
bodies: &SortedMap<hir::ItemLocalId, &hir::Body<'_>>,
|
||||||
f: impl FnOnce(&mut Self),
|
f: impl FnOnce(&mut StableHashingContext<'_>),
|
||||||
) {
|
) {
|
||||||
let prev = self.body_resolver;
|
f(&mut StableHashingContext {
|
||||||
self.body_resolver = BodyResolver::Traverse { hash_bodies, owner, bodies };
|
body_resolver: BodyResolver::Traverse { hash_bodies, owner, bodies },
|
||||||
f(self);
|
..self.clone()
|
||||||
self.body_resolver = prev;
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
|
@ -542,8 +542,7 @@ fn incremental_verify_ich<CTX, K, V: Debug>(
|
|||||||
|
|
||||||
debug!("BEGIN verify_ich({:?})", dep_node);
|
debug!("BEGIN verify_ich({:?})", dep_node);
|
||||||
let new_hash = query.hash_result.map_or(Fingerprint::ZERO, |f| {
|
let new_hash = query.hash_result.map_or(Fingerprint::ZERO, |f| {
|
||||||
let mut hcx = tcx.create_stable_hashing_context();
|
tcx.with_stable_hashing_context(|mut hcx| f(&mut hcx, result))
|
||||||
f(&mut hcx, result)
|
|
||||||
});
|
});
|
||||||
let old_hash = tcx.dep_graph().prev_fingerprint_of(dep_node);
|
let old_hash = tcx.dep_graph().prev_fingerprint_of(dep_node);
|
||||||
debug!("END verify_ich({:?})", dep_node);
|
debug!("END verify_ich({:?})", dep_node);
|
||||||
|
@ -96,8 +96,8 @@ fn get_symbol_hash<'tcx>(
|
|||||||
let substs = instance.substs;
|
let substs = instance.substs;
|
||||||
debug!("get_symbol_hash(def_id={:?}, parameters={:?})", def_id, substs);
|
debug!("get_symbol_hash(def_id={:?}, parameters={:?})", def_id, substs);
|
||||||
|
|
||||||
|
tcx.with_stable_hashing_context(|mut hcx| {
|
||||||
let mut hasher = StableHasher::new();
|
let mut hasher = StableHasher::new();
|
||||||
let mut hcx = tcx.create_stable_hashing_context();
|
|
||||||
|
|
||||||
record_time(&tcx.sess.perf_stats.symbol_hash_time, || {
|
record_time(&tcx.sess.perf_stats.symbol_hash_time, || {
|
||||||
// the main symbol name is not necessarily unique; hash in the
|
// the main symbol name is not necessarily unique; hash in the
|
||||||
@ -137,6 +137,7 @@ fn get_symbol_hash<'tcx>(
|
|||||||
|
|
||||||
// 64 bits should be enough to avoid collisions.
|
// 64 bits should be enough to avoid collisions.
|
||||||
hasher.finish::<u64>()
|
hasher.finish::<u64>()
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
// Follow C++ namespace-mangling style, see
|
// Follow C++ namespace-mangling style, see
|
||||||
|
@ -53,7 +53,8 @@ impl<'a, 'tcx> SyntaxChecker<'a, 'tcx> {
|
|||||||
None,
|
None,
|
||||||
None,
|
None,
|
||||||
);
|
);
|
||||||
let expn_id = LocalExpnId::fresh(expn_data, self.cx.tcx.create_stable_hashing_context());
|
let expn_id =
|
||||||
|
self.cx.tcx.with_stable_hashing_context(|hcx| LocalExpnId::fresh(expn_data, hcx));
|
||||||
let span = DUMMY_SP.fresh_expansion(expn_id);
|
let span = DUMMY_SP.fresh_expansion(expn_id);
|
||||||
|
|
||||||
let is_empty = rustc_driver::catch_fatal_errors(|| {
|
let is_empty = rustc_driver::catch_fatal_errors(|| {
|
||||||
|
Loading…
Reference in New Issue
Block a user