mirror of
https://github.com/rust-lang/rust.git
synced 2025-02-05 19:43:24 +00:00
Auto merge of #64264 - Centril:rollup-w1khzun, r=Centril
Rollup of 7 pull requests Successful merges: - #64023 (libstd fuchsia fixes) - #64098 (Ensure edition lints and internal lints are enabled with deny-warnings=false) - #64139 (Migrate internal diagnostic registration to macro_rules) - #64226 (Aggregation of cosmetic changes made during work on REPL PRs: libsyntax) - #64227 (Aggregation of cosmetic changes made during work on REPL PRs: librustc) - #64235 (Upgrade env_logger to 0.6) - #64258 (compiletest: Match suffixed environments) Failed merges: r? @ghost
This commit is contained in:
commit
43a5ff4222
40
Cargo.lock
40
Cargo.lock
@ -277,7 +277,7 @@ dependencies = [
|
||||
"crypto-hash",
|
||||
"curl",
|
||||
"curl-sys",
|
||||
"env_logger 0.6.0",
|
||||
"env_logger",
|
||||
"failure",
|
||||
"filetime",
|
||||
"flate2",
|
||||
@ -507,7 +507,7 @@ name = "compiletest"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"diff",
|
||||
"env_logger 0.5.13",
|
||||
"env_logger",
|
||||
"getopts",
|
||||
"lazy_static 1.3.0",
|
||||
"libc",
|
||||
@ -909,21 +909,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "env_logger"
|
||||
version = "0.5.13"
|
||||
version = "0.6.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "15b0a4d2e39f8420210be8b27eeda28029729e2fd4291019455016c348240c38"
|
||||
dependencies = [
|
||||
"atty",
|
||||
"humantime",
|
||||
"log",
|
||||
"termcolor",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "env_logger"
|
||||
version = "0.6.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "afb070faf94c85d17d50ca44f6ad076bce18ae92f0037d350947240a36e9d42e"
|
||||
checksum = "aafcde04e90a5226a6443b7aabdb016ba2f8307c847d524724bd9b346dd1a2d3"
|
||||
dependencies = [
|
||||
"atty",
|
||||
"humantime",
|
||||
@ -1774,7 +1762,7 @@ dependencies = [
|
||||
"chrono",
|
||||
"clap",
|
||||
"elasticlunr-rs",
|
||||
"env_logger 0.6.0",
|
||||
"env_logger",
|
||||
"error-chain",
|
||||
"handlebars",
|
||||
"itertools 0.8.0",
|
||||
@ -1799,7 +1787,7 @@ version = "0.3.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "77d1f0ba4d1e6b86fa18e8853d026d7d76a97eb7eb5eb052ed80901e43b7fc10"
|
||||
dependencies = [
|
||||
"env_logger 0.6.0",
|
||||
"env_logger",
|
||||
"failure",
|
||||
"log",
|
||||
"mdbook",
|
||||
@ -1992,7 +1980,7 @@ dependencies = [
|
||||
"colored",
|
||||
"compiletest_rs",
|
||||
"directories",
|
||||
"env_logger 0.6.0",
|
||||
"env_logger",
|
||||
"getrandom",
|
||||
"hex",
|
||||
"log",
|
||||
@ -2363,7 +2351,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "df8b3f4e0475def7d9c2e5de8e5a1306949849761e107b360d03e98eafaffd61"
|
||||
dependencies = [
|
||||
"chrono",
|
||||
"env_logger 0.6.0",
|
||||
"env_logger",
|
||||
"log",
|
||||
]
|
||||
|
||||
@ -2440,7 +2428,7 @@ dependencies = [
|
||||
"bitflags",
|
||||
"clap",
|
||||
"derive_more",
|
||||
"env_logger 0.6.0",
|
||||
"env_logger",
|
||||
"humantime",
|
||||
"lazy_static 1.3.0",
|
||||
"log",
|
||||
@ -2734,7 +2722,7 @@ dependencies = [
|
||||
"clippy_lints",
|
||||
"crossbeam-channel",
|
||||
"difference",
|
||||
"env_logger 0.6.0",
|
||||
"env_logger",
|
||||
"failure",
|
||||
"futures",
|
||||
"heck",
|
||||
@ -3129,11 +3117,7 @@ dependencies = [
|
||||
name = "rustc_codegen_llvm"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"cc",
|
||||
"memmap",
|
||||
"num_cpus",
|
||||
"rustc_llvm",
|
||||
"tempfile",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -3203,7 +3187,7 @@ dependencies = [
|
||||
name = "rustc_driver"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"env_logger 0.5.13",
|
||||
"env_logger",
|
||||
"graphviz",
|
||||
"log",
|
||||
"rustc",
|
||||
@ -3590,7 +3574,7 @@ dependencies = [
|
||||
"derive-new",
|
||||
"diff",
|
||||
"dirs",
|
||||
"env_logger 0.6.0",
|
||||
"env_logger",
|
||||
"failure",
|
||||
"getopts",
|
||||
"ignore",
|
||||
|
@ -119,17 +119,18 @@ fn main() {
|
||||
cmd.arg(format!("-Cdebuginfo={}", debuginfo_level));
|
||||
}
|
||||
|
||||
if env::var_os("RUSTC_DENY_WARNINGS").is_some() &&
|
||||
env::var_os("RUSTC_EXTERNAL_TOOL").is_none() {
|
||||
if env::var_os("RUSTC_EXTERNAL_TOOL").is_none() {
|
||||
// When extending this list, add the new lints to the RUSTFLAGS of the
|
||||
// build_bootstrap function of src/bootstrap/bootstrap.py as well as
|
||||
// some code doesn't go through this `rustc` wrapper.
|
||||
cmd.arg("-Dwarnings");
|
||||
cmd.arg("-Drust_2018_idioms");
|
||||
cmd.arg("-Dunused_lifetimes");
|
||||
cmd.arg("-Wrust_2018_idioms");
|
||||
cmd.arg("-Wunused_lifetimes");
|
||||
if use_internal_lints(crate_name) {
|
||||
cmd.arg("-Zunstable-options");
|
||||
cmd.arg("-Drustc::internal");
|
||||
cmd.arg("-Wrustc::internal");
|
||||
}
|
||||
if env::var_os("RUSTC_DENY_WARNINGS").is_some() {
|
||||
cmd.arg("-Dwarnings");
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -631,8 +631,9 @@ class RustBuild(object):
|
||||
target_linker = self.get_toml("linker", build_section)
|
||||
if target_linker is not None:
|
||||
env["RUSTFLAGS"] += "-C linker=" + target_linker + " "
|
||||
env["RUSTFLAGS"] += " -Wrust_2018_idioms -Wunused_lifetimes "
|
||||
if self.get_toml("deny-warnings", "rust") != "false":
|
||||
env["RUSTFLAGS"] += "-Dwarnings -Drust_2018_idioms -Dunused_lifetimes "
|
||||
env["RUSTFLAGS"] += "-Dwarnings "
|
||||
|
||||
env["PATH"] = os.path.join(self.bin_root(), "bin") + \
|
||||
os.pathsep + env["PATH"]
|
||||
|
@ -36,7 +36,7 @@ pub struct Flags {
|
||||
// This overrides the deny-warnings configuation option,
|
||||
// which passes -Dwarnings to the compiler invocations.
|
||||
//
|
||||
// true => deny, false => allow
|
||||
// true => deny, false => warn
|
||||
pub deny_warnings: Option<bool>,
|
||||
}
|
||||
|
||||
@ -556,10 +556,10 @@ fn split(s: &[String]) -> Vec<String> {
|
||||
fn parse_deny_warnings(matches: &getopts::Matches) -> Option<bool> {
|
||||
match matches.opt_str("warnings").as_ref().map(|v| v.as_str()) {
|
||||
Some("deny") => Some(true),
|
||||
Some("allow") => Some(false),
|
||||
Some("warn") => Some(false),
|
||||
Some(value) => {
|
||||
eprintln!(
|
||||
r#"invalid value for --warnings: {:?}, expected "allow" or "deny""#,
|
||||
r#"invalid value for --warnings: {:?}, expected "warn" or "deny""#,
|
||||
value,
|
||||
);
|
||||
process::exit(1);
|
||||
|
@ -1327,7 +1327,10 @@ impl Step for Compiletest {
|
||||
cmd.env("RUSTC_PROFILER_SUPPORT", "1");
|
||||
}
|
||||
|
||||
cmd.env("RUST_TEST_TMPDIR", builder.out.join("tmp"));
|
||||
let tmp = builder.out.join("tmp");
|
||||
std::fs::create_dir_all(&tmp).unwrap();
|
||||
cmd.env("RUST_TEST_TMPDIR", tmp);
|
||||
|
||||
|
||||
cmd.arg("--adb-path").arg("adb");
|
||||
cmd.arg("--adb-test-dir").arg(ADB_TEST_DIR);
|
||||
|
@ -187,7 +187,7 @@ impl<T: Copy> ArenaAllocatable for T {}
|
||||
|
||||
unsafe trait ArenaField<'tcx>: Sized {
|
||||
/// Returns a specific arena to allocate from.
|
||||
/// If None is returned, the DropArena will be used.
|
||||
/// If `None` is returned, the `DropArena` will be used.
|
||||
fn arena<'a>(arena: &'a Arena<'tcx>) -> Option<&'a TypedArena<Self>>;
|
||||
}
|
||||
|
||||
|
@ -1,7 +1,8 @@
|
||||
// Error messages for EXXXX errors.
|
||||
// Each message should start and end with a new line, and be wrapped to 80 characters.
|
||||
// In vim you can `:set tw=80` and use `gq` to wrap paragraphs. Use `:set tw=0` to disable.
|
||||
register_long_diagnostics! {
|
||||
// Each message should start and end with a new line, and be wrapped to 80
|
||||
// characters. In vim you can `:set tw=80` and use `gq` to wrap paragraphs. Use
|
||||
// `:set tw=0` to disable.
|
||||
syntax::register_diagnostics! {
|
||||
E0038: r##"
|
||||
Trait objects like `Box<Trait>` can only be constructed when certain
|
||||
requirements are satisfied by the trait in question.
|
||||
@ -2183,11 +2184,7 @@ Examples of erroneous code:
|
||||
static X: u32 = 42;
|
||||
```
|
||||
"##,
|
||||
|
||||
}
|
||||
|
||||
|
||||
register_diagnostics! {
|
||||
;
|
||||
// E0006, // merged with E0005
|
||||
// E0101, // replaced with E0282
|
||||
// E0102, // replaced with E0282
|
||||
@ -2206,7 +2203,8 @@ register_diagnostics! {
|
||||
// E0305, // expected constant
|
||||
E0311, // thing may not live long enough
|
||||
E0312, // lifetime of reference outlives lifetime of borrowed content
|
||||
E0313, // lifetime of borrowed pointer outlives lifetime of captured variable
|
||||
E0313, // lifetime of borrowed pointer outlives lifetime of captured
|
||||
// variable
|
||||
E0314, // closure outlives stack frame
|
||||
E0315, // cannot invoke closure outside of its lifetime
|
||||
E0316, // nested quantification of lifetimes
|
||||
@ -2223,12 +2221,13 @@ register_diagnostics! {
|
||||
E0483, // lifetime of operand does not outlive the operation
|
||||
E0484, // reference is not valid at the time of borrow
|
||||
E0485, // automatically reference is not valid at the time of borrow
|
||||
E0486, // type of expression contains references that are not valid during...
|
||||
E0486, // type of expression contains references that are not valid during..
|
||||
E0487, // unsafe use of destructor: destructor might be called while...
|
||||
E0488, // lifetime of variable does not enclose its declaration
|
||||
E0489, // type/lifetime parameter not in scope here
|
||||
E0490, // a value of type `..` is borrowed for too long
|
||||
E0495, // cannot infer an appropriate lifetime due to conflicting requirements
|
||||
E0495, // cannot infer an appropriate lifetime due to conflicting
|
||||
// requirements
|
||||
E0566, // conflicting representation hints
|
||||
E0623, // lifetime mismatch where both parameters are anonymous regions
|
||||
E0628, // generators cannot have explicit parameters
|
||||
@ -2239,7 +2238,8 @@ register_diagnostics! {
|
||||
E0688, // in-band lifetimes cannot be mixed with explicit lifetime binders
|
||||
E0697, // closures cannot be static
|
||||
E0707, // multiple elided lifetimes used in arguments of `async fn`
|
||||
E0708, // `async` non-`move` closures with parameters are not currently supported
|
||||
E0708, // `async` non-`move` closures with parameters are not currently
|
||||
// supported
|
||||
E0709, // multiple different lifetimes used in arguments of `async fn`
|
||||
E0710, // an unknown tool name found in scoped lint
|
||||
E0711, // a feature has been declared with conflicting stability attributes
|
||||
|
@ -4,13 +4,12 @@
|
||||
//! conflicts between multiple such attributes attached to the same
|
||||
//! item.
|
||||
|
||||
|
||||
use crate::ty::TyCtxt;
|
||||
use crate::ty::query::Providers;
|
||||
|
||||
use crate::hir;
|
||||
use crate::hir::def_id::DefId;
|
||||
use crate::hir::intravisit::{self, Visitor, NestedVisitorMap};
|
||||
use crate::ty::TyCtxt;
|
||||
use crate::ty::query::Providers;
|
||||
|
||||
use std::fmt::{self, Display};
|
||||
use syntax::symbol::sym;
|
||||
use syntax_pos::Span;
|
||||
|
@ -1,15 +1,17 @@
|
||||
use self::Namespace::*;
|
||||
|
||||
use crate::hir::def_id::{DefId, CRATE_DEF_INDEX, LOCAL_CRATE};
|
||||
use crate::hir;
|
||||
use crate::ty;
|
||||
use crate::util::nodemap::DefIdMap;
|
||||
|
||||
use syntax::ast;
|
||||
use syntax::ext::base::MacroKind;
|
||||
use syntax::ast::NodeId;
|
||||
use syntax_pos::Span;
|
||||
use rustc_macros::HashStable;
|
||||
use crate::hir;
|
||||
use crate::ty;
|
||||
use std::fmt::Debug;
|
||||
|
||||
use self::Namespace::*;
|
||||
use std::fmt::Debug;
|
||||
|
||||
/// Encodes if a `DefKind::Ctor` is the constructor of an enum variant or a struct.
|
||||
#[derive(Clone, Copy, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, HashStable)]
|
||||
@ -115,7 +117,7 @@ impl DefKind {
|
||||
}
|
||||
}
|
||||
|
||||
/// An English article for the def.
|
||||
/// Gets an English article for the definition.
|
||||
pub fn article(&self) -> &'static str {
|
||||
match *self {
|
||||
DefKind::AssocTy
|
||||
@ -134,18 +136,22 @@ pub enum Res<Id = hir::HirId> {
|
||||
Def(DefKind, DefId),
|
||||
|
||||
// Type namespace
|
||||
|
||||
PrimTy(hir::PrimTy),
|
||||
SelfTy(Option<DefId> /* trait */, Option<DefId> /* impl */),
|
||||
ToolMod, // e.g., `rustfmt` in `#[rustfmt::skip]`
|
||||
|
||||
// Value namespace
|
||||
|
||||
SelfCtor(DefId /* impl */), // `DefId` refers to the impl
|
||||
Local(Id),
|
||||
|
||||
// Macro namespace
|
||||
|
||||
NonMacroAttr(NonMacroAttrKind), // e.g., `#[inline]` or `#[rustfmt::skip]`
|
||||
|
||||
// All namespaces
|
||||
|
||||
Err,
|
||||
}
|
||||
|
||||
@ -330,7 +336,7 @@ impl NonMacroAttrKind {
|
||||
}
|
||||
|
||||
impl<Id> Res<Id> {
|
||||
/// Return the `DefId` of this `Def` if it has an id, else panic.
|
||||
/// Return the `DefId` of this `Def` if it has an ID, else panic.
|
||||
pub fn def_id(&self) -> DefId
|
||||
where
|
||||
Id: Debug,
|
||||
@ -340,7 +346,7 @@ impl<Id> Res<Id> {
|
||||
})
|
||||
}
|
||||
|
||||
/// Return `Some(..)` with the `DefId` of this `Res` if it has a id, else `None`.
|
||||
/// Return `Some(..)` with the `DefId` of this `Res` if it has a ID, else `None`.
|
||||
pub fn opt_def_id(&self) -> Option<DefId> {
|
||||
match *self {
|
||||
Res::Def(_, id) => Some(id),
|
||||
@ -379,7 +385,7 @@ impl<Id> Res<Id> {
|
||||
}
|
||||
}
|
||||
|
||||
/// An English article for the res.
|
||||
/// Gets an English article for the `Res`.
|
||||
pub fn article(&self) -> &'static str {
|
||||
match *self {
|
||||
Res::Def(kind, _) => kind.article(),
|
||||
|
@ -11,7 +11,7 @@ newtype_index! {
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
pub enum CrateNum {
|
||||
/// A special CrateNum that we use for the tcx.rcache when decoding from
|
||||
/// A special `CrateNum` that we use for the `tcx.rcache` when decoding from
|
||||
/// the incr. comp. cache.
|
||||
ReservedForIncrCompCache,
|
||||
Index(CrateId),
|
||||
@ -26,11 +26,10 @@ impl ::std::fmt::Debug for CrateNum {
|
||||
}
|
||||
}
|
||||
|
||||
/// Item definitions in the currently-compiled crate would have the CrateNum
|
||||
/// LOCAL_CRATE in their DefId.
|
||||
/// Item definitions in the currently-compiled crate would have the `CrateNum`
|
||||
/// `LOCAL_CRATE` in their `DefId`.
|
||||
pub const LOCAL_CRATE: CrateNum = CrateNum::Index(CrateId::from_u32_const(0));
|
||||
|
||||
|
||||
impl Idx for CrateNum {
|
||||
#[inline]
|
||||
fn new(value: usize) -> Self {
|
||||
|
@ -31,11 +31,13 @@
|
||||
//! This order consistency is required in a few places in rustc, for
|
||||
//! example generator inference, and possibly also HIR borrowck.
|
||||
|
||||
use syntax::ast::{Ident, Name, Attribute};
|
||||
use syntax_pos::Span;
|
||||
use super::itemlikevisit::DeepVisitor;
|
||||
|
||||
use crate::hir::*;
|
||||
use crate::hir::map::Map;
|
||||
use super::itemlikevisit::DeepVisitor;
|
||||
|
||||
use syntax::ast::{Ident, Name, Attribute};
|
||||
use syntax_pos::Span;
|
||||
|
||||
#[derive(Copy, Clone)]
|
||||
pub enum FnKind<'a> {
|
||||
@ -139,7 +141,7 @@ impl<'this, 'tcx> NestedVisitorMap<'this, 'tcx> {
|
||||
/// explicitly, you need to override each method. (And you also need
|
||||
/// to monitor future changes to `Visitor` in case a new method with a
|
||||
/// new default implementation gets introduced.)
|
||||
pub trait Visitor<'v> : Sized {
|
||||
pub trait Visitor<'v>: Sized {
|
||||
///////////////////////////////////////////////////////////////////////////
|
||||
// Nested items.
|
||||
|
||||
@ -162,8 +164,8 @@ pub trait Visitor<'v> : Sized {
|
||||
fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'v>;
|
||||
|
||||
/// Invoked when a nested item is encountered. By default does
|
||||
/// nothing unless you override `nested_visit_map` to return
|
||||
/// `Some(_)`, in which case it will walk the item. **You probably
|
||||
/// nothing unless you override `nested_visit_map` to return other than
|
||||
/// `None`, in which case it will walk the item. **You probably
|
||||
/// don't want to override this method** -- instead, override
|
||||
/// `nested_visit_map` or use the "shallow" or "deep" visit
|
||||
/// patterns described on `itemlikevisit::ItemLikeVisitor`. The only
|
||||
@ -201,8 +203,8 @@ pub trait Visitor<'v> : Sized {
|
||||
|
||||
/// Invoked to visit the body of a function, method or closure. Like
|
||||
/// visit_nested_item, does nothing by default unless you override
|
||||
/// `nested_visit_map` to return `Some(_)`, in which case it will walk the
|
||||
/// body.
|
||||
/// `nested_visit_map` to return other htan `None`, in which case it will walk
|
||||
/// the body.
|
||||
fn visit_nested_body(&mut self, id: BodyId) {
|
||||
let opt_body = self.nested_visit_map().intra().map(|map| map.body(id));
|
||||
if let Some(body) = opt_body {
|
||||
@ -603,7 +605,7 @@ pub fn walk_ty<'v, V: Visitor<'v>>(visitor: &mut V, typ: &'v Ty) {
|
||||
visitor.visit_lifetime(lifetime);
|
||||
visitor.visit_ty(&mutable_type.ty)
|
||||
}
|
||||
TyKind::Never => {},
|
||||
TyKind::Never => {}
|
||||
TyKind::Tup(ref tuple_element_types) => {
|
||||
walk_list!(visitor, visit_ty, tuple_element_types);
|
||||
}
|
||||
|
@ -1,7 +1,7 @@
|
||||
use super::{Item, ImplItem, TraitItem};
|
||||
use super::intravisit::Visitor;
|
||||
|
||||
/// The "item-like visitor" visitor defines only the top-level methods
|
||||
/// The "item-like visitor" defines only the top-level methods
|
||||
/// that can be invoked by `Crate::visit_all_item_likes()`. Whether
|
||||
/// this trait is the right one to implement will depend on the
|
||||
/// overall pattern you need. Here are the three available patterns,
|
||||
@ -18,11 +18,11 @@ use super::intravisit::Visitor;
|
||||
/// an item, but don't care about how item-like things are nested
|
||||
/// within one another.
|
||||
/// - Example: Examine each expression to look for its type and do some check or other.
|
||||
/// - How: Implement `intravisit::Visitor` and use
|
||||
/// `tcx.hir().krate().visit_all_item_likes(visitor.as_deep_visitor())`. Within
|
||||
/// your `intravisit::Visitor` impl, implement methods like
|
||||
/// `visit_expr()`; don't forget to invoke
|
||||
/// `intravisit::walk_visit_expr()` to keep walking the subparts.
|
||||
/// - How: Implement `intravisit::Visitor` and override the `nested_visit_map()` method
|
||||
/// to return `NestedVisitorMap::OnlyBodies` and use
|
||||
/// `tcx.hir().krate().visit_all_item_likes(&mut visitor.as_deep_visitor())`. Within
|
||||
/// your `intravisit::Visitor` impl, implement methods like `visit_expr()` (don't forget
|
||||
/// to invoke `intravisit::walk_expr()` to keep walking the subparts).
|
||||
/// - Pro: Visitor methods for any kind of HIR node, not just item-like things.
|
||||
/// - Pro: Integrates well into dependency tracking.
|
||||
/// - Con: Don't get information about nesting between items
|
||||
@ -30,10 +30,9 @@ use super::intravisit::Visitor;
|
||||
/// item-like things.
|
||||
/// - Example: Lifetime resolution, which wants to bring lifetimes declared on the
|
||||
/// impl into scope while visiting the impl-items, and then back out again.
|
||||
/// - How: Implement `intravisit::Visitor` and override the
|
||||
/// `nested_visit_map()` methods to return
|
||||
/// `NestedVisitorMap::All`. Walk your crate with
|
||||
/// `intravisit::walk_crate()` invoked on `tcx.hir().krate()`.
|
||||
/// - How: Implement `intravisit::Visitor` and override the `nested_visit_map()` method
|
||||
/// to return `NestedVisitorMap::All`. Walk your crate with `intravisit::walk_crate()`
|
||||
/// invoked on `tcx.hir().krate()`.
|
||||
/// - Pro: Visitor methods for any kind of HIR node, not just item-like things.
|
||||
/// - Pro: Preserves nesting information
|
||||
/// - Con: Does not integrate well into dependency tracking.
|
||||
@ -79,7 +78,7 @@ impl<'v, 'hir, V> ItemLikeVisitor<'hir> for DeepVisitor<'v, V>
|
||||
}
|
||||
}
|
||||
|
||||
/// A parallel variant of ItemLikeVisitor
|
||||
/// A parallel variant of `ItemLikeVisitor`.
|
||||
pub trait ParItemLikeVisitor<'hir> {
|
||||
fn visit_item(&self, item: &'hir Item);
|
||||
fn visit_trait_item(&self, trait_item: &'hir TraitItem);
|
||||
|
@ -79,7 +79,7 @@ const HIR_ID_COUNTER_LOCKED: u32 = 0xFFFFFFFF;
|
||||
pub struct LoweringContext<'a> {
|
||||
crate_root: Option<Symbol>,
|
||||
|
||||
/// Used to assign ids to HIR nodes that do not directly correspond to an AST node.
|
||||
/// Used to assign IDs to HIR nodes that do not directly correspond to AST nodes.
|
||||
sess: &'a Session,
|
||||
|
||||
cstore: &'a dyn CrateStore,
|
||||
@ -126,7 +126,7 @@ pub struct LoweringContext<'a> {
|
||||
/// lifetime definitions in the corresponding impl or function generics.
|
||||
lifetimes_to_define: Vec<(Span, ParamName)>,
|
||||
|
||||
/// Whether or not in-band lifetimes are being collected. This is used to
|
||||
/// `true` if in-band lifetimes are being collected. This is used to
|
||||
/// indicate whether or not we're in a place where new lifetimes will result
|
||||
/// in in-band lifetime definitions, such a function or an impl header,
|
||||
/// including implicit lifetimes from `impl_header_lifetime_elision`.
|
||||
@ -154,13 +154,13 @@ pub struct LoweringContext<'a> {
|
||||
}
|
||||
|
||||
pub trait Resolver {
|
||||
/// Obtain resolution for a `NodeId` with a single resolution.
|
||||
/// Obtains resolution for a `NodeId` with a single resolution.
|
||||
fn get_partial_res(&mut self, id: NodeId) -> Option<PartialRes>;
|
||||
|
||||
/// Obtain per-namespace resolutions for `use` statement with the given `NoedId`.
|
||||
/// Obtains per-namespace resolutions for `use` statement with the given `NodeId`.
|
||||
fn get_import_res(&mut self, id: NodeId) -> PerNS<Option<Res<NodeId>>>;
|
||||
|
||||
/// Obtain resolution for a label with the given `NodeId`.
|
||||
/// Obtains resolution for a label with the given `NodeId`.
|
||||
fn get_label_res(&mut self, id: NodeId) -> Option<NodeId>;
|
||||
|
||||
/// We must keep the set of definitions up to date as we add nodes that weren't in the AST.
|
||||
@ -699,7 +699,7 @@ impl<'a> LoweringContext<'a> {
|
||||
fn lower_res(&mut self, res: Res<NodeId>) -> Res {
|
||||
res.map_id(|id| {
|
||||
self.lower_node_id_generic(id, |_| {
|
||||
panic!("expected node_id to be lowered already for res {:#?}", res)
|
||||
panic!("expected `NodeId` to be lowered already for res {:#?}", res);
|
||||
})
|
||||
})
|
||||
}
|
||||
@ -1364,7 +1364,7 @@ impl<'a> LoweringContext<'a> {
|
||||
}
|
||||
}
|
||||
}
|
||||
TyKind::Mac(_) => bug!("`TyMac` should have been expanded by now."),
|
||||
TyKind::Mac(_) => bug!("`TyMac` should have been expanded by now"),
|
||||
TyKind::CVarArgs => {
|
||||
// Create the implicit lifetime of the "spoofed" `VaListImpl`.
|
||||
let span = self.sess.source_map().next_point(t.span.shrink_to_lo());
|
||||
@ -2999,7 +2999,7 @@ impl<'a> LoweringContext<'a> {
|
||||
}
|
||||
StmtKind::Expr(ref e) => hir::StmtKind::Expr(P(self.lower_expr(e))),
|
||||
StmtKind::Semi(ref e) => hir::StmtKind::Semi(P(self.lower_expr(e))),
|
||||
StmtKind::Mac(..) => panic!("Shouldn't exist here"),
|
||||
StmtKind::Mac(..) => panic!("shouldn't exist here"),
|
||||
};
|
||||
smallvec![hir::Stmt {
|
||||
hir_id: self.lower_node_id(s.id),
|
||||
@ -3187,7 +3187,7 @@ impl<'a> LoweringContext<'a> {
|
||||
|
||||
hir::Path {
|
||||
span,
|
||||
res: res.map_id(|_| panic!("unexpected node_id")),
|
||||
res: res.map_id(|_| panic!("unexpected `NodeId`")),
|
||||
segments: segments.into(),
|
||||
}
|
||||
}
|
||||
|
@ -718,7 +718,7 @@ impl LoweringContext<'_> {
|
||||
AnonymousLifetimeMode::PassThrough,
|
||||
|this, _| {
|
||||
(
|
||||
// Disallow impl Trait in foreign items
|
||||
// Disallow `impl Trait` in foreign items.
|
||||
this.lower_fn_decl(fdec, None, false, None),
|
||||
this.lower_fn_params_to_names(fdec),
|
||||
)
|
||||
@ -732,7 +732,7 @@ impl LoweringContext<'_> {
|
||||
self.lower_ty(t, ImplTraitContext::disallowed()), self.lower_mutability(m))
|
||||
}
|
||||
ForeignItemKind::Ty => hir::ForeignItemKind::Type,
|
||||
ForeignItemKind::Macro(_) => panic!("shouldn't exist here"),
|
||||
ForeignItemKind::Macro(_) => panic!("macro shouldn't exist here"),
|
||||
},
|
||||
vis: self.lower_visibility(&i.vis, None),
|
||||
span: i.span,
|
||||
|
@ -340,7 +340,7 @@ impl<'a, 'hir> Visitor<'hir> for NodeCollector<'a, 'hir> {
|
||||
/// their outer items.
|
||||
|
||||
fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'hir> {
|
||||
panic!("visit_nested_xxx must be manually implemented in this visitor")
|
||||
panic!("`visit_nested_xxx` must be manually implemented in this visitor");
|
||||
}
|
||||
|
||||
fn visit_nested_item(&mut self, item: ItemId) {
|
||||
|
@ -7,10 +7,12 @@
|
||||
use crate::hir;
|
||||
use crate::hir::def_id::{CrateNum, DefId, DefIndex, LOCAL_CRATE, CRATE_DEF_INDEX};
|
||||
use crate::ich::Fingerprint;
|
||||
use crate::session::CrateDisambiguator;
|
||||
use crate::util::nodemap::NodeMap;
|
||||
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
use rustc_data_structures::indexed_vec::{IndexVec};
|
||||
use rustc_data_structures::stable_hasher::StableHasher;
|
||||
use crate::session::CrateDisambiguator;
|
||||
use std::borrow::Borrow;
|
||||
use std::fmt::Write;
|
||||
use std::hash::Hash;
|
||||
@ -18,12 +20,11 @@ use syntax::ast;
|
||||
use syntax::ext::hygiene::ExpnId;
|
||||
use syntax::symbol::{Symbol, sym, InternedString};
|
||||
use syntax_pos::{Span, DUMMY_SP};
|
||||
use crate::util::nodemap::NodeMap;
|
||||
|
||||
/// The DefPathTable maps DefIndexes to DefKeys and vice versa.
|
||||
/// Internally the DefPathTable holds a tree of DefKeys, where each DefKey
|
||||
/// stores the DefIndex of its parent.
|
||||
/// There is one DefPathTable for each crate.
|
||||
/// The `DefPathTable` maps `DefIndex`es to `DefKey`s and vice versa.
|
||||
/// Internally the `DefPathTable` holds a tree of `DefKey`s, where each `DefKey`
|
||||
/// stores the `DefIndex` of its parent.
|
||||
/// There is one `DefPathTable` for each crate.
|
||||
#[derive(Clone, Default, RustcDecodable, RustcEncodable)]
|
||||
pub struct DefPathTable {
|
||||
index_to_key: Vec<DefKey>,
|
||||
@ -121,7 +122,7 @@ impl DefKey {
|
||||
fn compute_stable_hash(&self, parent_hash: DefPathHash) -> DefPathHash {
|
||||
let mut hasher = StableHasher::new();
|
||||
|
||||
// We hash a 0u8 here to disambiguate between regular DefPath hashes,
|
||||
// We hash a `0u8` here to disambiguate between regular `DefPath` hashes,
|
||||
// and the special "root_parent" below.
|
||||
0u8.hash(&mut hasher);
|
||||
parent_hash.hash(&mut hasher);
|
||||
@ -145,8 +146,7 @@ impl DefKey {
|
||||
crate_disambiguator: CrateDisambiguator)
|
||||
-> DefPathHash {
|
||||
let mut hasher = StableHasher::new();
|
||||
// Disambiguate this from a regular DefPath hash,
|
||||
// see compute_stable_hash() above.
|
||||
// Disambiguate this from a regular `DefPath` hash; see `compute_stable_hash()` above.
|
||||
1u8.hash(&mut hasher);
|
||||
crate_name.hash(&mut hasher);
|
||||
crate_disambiguator.hash(&mut hasher);
|
||||
@ -155,10 +155,10 @@ impl DefKey {
|
||||
}
|
||||
|
||||
/// A pair of `DefPathData` and an integer disambiguator. The integer is
|
||||
/// normally 0, but in the event that there are multiple defs with the
|
||||
/// normally `0`, but in the event that there are multiple defs with the
|
||||
/// same `parent` and `data`, we use this field to disambiguate
|
||||
/// between them. This introduces some artificial ordering dependency
|
||||
/// but means that if you have (e.g.) two impls for the same type in
|
||||
/// but means that if you have, e.g., two impls for the same type in
|
||||
/// the same module, they do get distinct `DefId`s.
|
||||
#[derive(Clone, PartialEq, Debug, Hash, RustcEncodable, RustcDecodable)]
|
||||
pub struct DisambiguatedDefPathData {
|
||||
@ -277,29 +277,34 @@ impl DefPath {
|
||||
pub enum DefPathData {
|
||||
// Root: these should only be used for the root nodes, because
|
||||
// they are treated specially by the `def_path` function.
|
||||
/// The crate root (marker)
|
||||
|
||||
/// The crate root (marker).
|
||||
CrateRoot,
|
||||
// Catch-all for random DefId things like `DUMMY_NODE_ID`
|
||||
// Catch-all for random `DefId` things like `DUMMY_NODE_ID`.
|
||||
Misc,
|
||||
|
||||
// Different kinds of items and item-like things:
|
||||
/// An impl
|
||||
|
||||
/// An impl.
|
||||
Impl,
|
||||
/// Something in the type NS
|
||||
/// Something in the type namespace.
|
||||
TypeNs(InternedString),
|
||||
/// Something in the value NS
|
||||
/// Something in the value namespace.
|
||||
ValueNs(InternedString),
|
||||
/// Something in the macro NS
|
||||
/// Something in the macro namespace.
|
||||
MacroNs(InternedString),
|
||||
/// Something in the lifetime NS
|
||||
/// Something in the lifetime namespace.
|
||||
LifetimeNs(InternedString),
|
||||
/// A closure expression
|
||||
/// A closure expression.
|
||||
ClosureExpr,
|
||||
// Subportions of items
|
||||
/// Implicit ctor for a unit or tuple-like struct or enum variant.
|
||||
|
||||
// Subportions of items:
|
||||
|
||||
/// Implicit constructor for a unit or tuple-like struct or enum variant.
|
||||
Ctor,
|
||||
/// A constant expression (see {ast,hir}::AnonConst).
|
||||
/// A constant expression (see `{ast,hir}::AnonConst`).
|
||||
AnonConst,
|
||||
/// An `impl Trait` type node
|
||||
/// An `impl Trait` type node.
|
||||
ImplTrait,
|
||||
/// Identifies a piece of crate metadata that is global to a whole crate
|
||||
/// (as opposed to just one item). `GlobalMetaData` components are only
|
||||
@ -435,7 +440,7 @@ impl Definitions {
|
||||
self.node_to_def_index.insert(ast::CRATE_NODE_ID, root_index);
|
||||
self.set_invocation_parent(ExpnId::root(), root_index);
|
||||
|
||||
// Allocate some other DefIndices that always must exist.
|
||||
// Allocate some other `DefIndex`es that always must exist.
|
||||
GlobalMetaDataKind::allocate_def_indices(self);
|
||||
|
||||
root_index
|
||||
@ -458,7 +463,7 @@ impl Definitions {
|
||||
data,
|
||||
self.table.def_key(self.node_to_def_index[&node_id]));
|
||||
|
||||
// The root node must be created with create_root_def()
|
||||
// The root node must be created with `create_root_def()`.
|
||||
assert!(data != DefPathData::CrateRoot);
|
||||
|
||||
// Find the next free disambiguator for this key.
|
||||
@ -486,9 +491,9 @@ impl Definitions {
|
||||
assert_eq!(index.index(), self.def_index_to_node.len());
|
||||
self.def_index_to_node.push(node_id);
|
||||
|
||||
// Some things for which we allocate DefIndices don't correspond to
|
||||
// anything in the AST, so they don't have a NodeId. For these cases
|
||||
// we don't need a mapping from NodeId to DefIndex.
|
||||
// Some things for which we allocate `DefIndex`es don't correspond to
|
||||
// anything in the AST, so they don't have a `NodeId`. For these cases
|
||||
// we don't need a mapping from `NodeId` to `DefIndex`.
|
||||
if node_id != ast::DUMMY_NODE_ID {
|
||||
debug!("create_def_with_parent: def_index_to_node[{:?} <-> {:?}", index, node_id);
|
||||
self.node_to_def_index.insert(node_id, index);
|
||||
@ -498,7 +503,7 @@ impl Definitions {
|
||||
self.expansions_that_defined.insert(index, expn_id);
|
||||
}
|
||||
|
||||
// The span is added if it isn't dummy
|
||||
// The span is added if it isn't dummy.
|
||||
if !span.is_dummy() {
|
||||
self.def_index_to_span.insert(index, span);
|
||||
}
|
||||
@ -506,12 +511,12 @@ impl Definitions {
|
||||
index
|
||||
}
|
||||
|
||||
/// Initialize the `ast::NodeId` to `HirId` mapping once it has been generated during
|
||||
/// Initializes the `ast::NodeId` to `HirId` mapping once it has been generated during
|
||||
/// AST to HIR lowering.
|
||||
pub fn init_node_id_to_hir_id_mapping(&mut self,
|
||||
mapping: IndexVec<ast::NodeId, hir::HirId>) {
|
||||
assert!(self.node_to_hir_id.is_empty(),
|
||||
"Trying initialize NodeId -> HirId mapping twice");
|
||||
"trying to initialize `NodeId` -> `HirId` mapping twice");
|
||||
self.node_to_hir_id = mapping;
|
||||
}
|
||||
|
||||
@ -533,7 +538,7 @@ impl Definitions {
|
||||
|
||||
pub fn set_invocation_parent(&mut self, invoc_id: ExpnId, parent: DefIndex) {
|
||||
let old_parent = self.invocation_parents.insert(invoc_id, parent);
|
||||
assert!(old_parent.is_none(), "parent def-index is reset for an invocation");
|
||||
assert!(old_parent.is_none(), "parent `DefIndex` is reset for an invocation");
|
||||
}
|
||||
}
|
||||
|
||||
@ -585,9 +590,9 @@ impl DefPathData {
|
||||
}
|
||||
}
|
||||
|
||||
// We define the GlobalMetaDataKind enum with this macro because we want to
|
||||
// We define the `GlobalMetaDataKind` enum with this macro because we want to
|
||||
// make sure that we exhaustively iterate over all variants when registering
|
||||
// the corresponding DefIndices in the DefTable.
|
||||
// the corresponding `DefIndex`es in the `DefTable`.
|
||||
macro_rules! define_global_metadata_kind {
|
||||
(pub enum GlobalMetaDataKind {
|
||||
$($variant:ident),*
|
||||
@ -609,7 +614,7 @@ macro_rules! define_global_metadata_kind {
|
||||
DUMMY_SP
|
||||
);
|
||||
|
||||
// Make sure calling def_index does not crash.
|
||||
// Make sure calling `def_index` does not crash.
|
||||
instance.def_index(&definitions.table);
|
||||
})*
|
||||
}
|
||||
@ -623,7 +628,7 @@ macro_rules! define_global_metadata_kind {
|
||||
}
|
||||
};
|
||||
|
||||
// These DefKeys are all right after the root,
|
||||
// These `DefKey`s are all right after the root,
|
||||
// so a linear search is fine.
|
||||
let index = def_path_table.index_to_key
|
||||
.iter()
|
||||
|
@ -5,10 +5,15 @@ pub use self::definitions::{
|
||||
};
|
||||
|
||||
use crate::dep_graph::{DepGraph, DepNode, DepKind, DepNodeIndex};
|
||||
|
||||
use crate::hir::*;
|
||||
use crate::hir::DefKind;
|
||||
use crate::hir::def_id::{CRATE_DEF_INDEX, DefId, LocalDefId};
|
||||
|
||||
use crate::hir::itemlikevisit::ItemLikeVisitor;
|
||||
use crate::hir::print::Nested;
|
||||
use crate::middle::cstore::CrateStoreDyn;
|
||||
use crate::ty::query::Providers;
|
||||
use crate::util::nodemap::FxHashMap;
|
||||
use crate::util::common::time;
|
||||
|
||||
use rustc_target::spec::abi::Abi;
|
||||
use rustc_data_structures::svh::Svh;
|
||||
@ -18,15 +23,7 @@ use syntax::source_map::Spanned;
|
||||
use syntax::ext::base::MacroKind;
|
||||
use syntax_pos::{Span, DUMMY_SP};
|
||||
|
||||
use crate::hir::*;
|
||||
use crate::hir::DefKind;
|
||||
use crate::hir::itemlikevisit::ItemLikeVisitor;
|
||||
use crate::hir::print::Nested;
|
||||
use crate::util::nodemap::FxHashMap;
|
||||
use crate::util::common::time;
|
||||
|
||||
use std::result::Result::Err;
|
||||
use crate::ty::query::Providers;
|
||||
|
||||
pub mod blocks;
|
||||
mod collector;
|
||||
@ -627,7 +624,7 @@ impl<'hir> Map<'hir> {
|
||||
.unwrap_or(hir_id)
|
||||
}
|
||||
|
||||
/// Check if the node is an argument. An argument is a local variable whose
|
||||
/// Checks if the node is an argument. An argument is a local variable whose
|
||||
/// immediate parent is an item or a closure.
|
||||
pub fn is_argument(&self, id: HirId) -> bool {
|
||||
match self.find(id) {
|
||||
@ -733,7 +730,7 @@ impl<'hir> Map<'hir> {
|
||||
/// ```
|
||||
/// fn foo(x: usize) -> bool {
|
||||
/// if x == 1 {
|
||||
/// true // `get_return_block` gets passed the `id` corresponding
|
||||
/// true // If `get_return_block` gets passed the `id` corresponding
|
||||
/// } else { // to this, it will return `foo`'s `HirId`.
|
||||
/// false
|
||||
/// }
|
||||
@ -743,7 +740,7 @@ impl<'hir> Map<'hir> {
|
||||
/// ```
|
||||
/// fn foo(x: usize) -> bool {
|
||||
/// loop {
|
||||
/// true // `get_return_block` gets passed the `id` corresponding
|
||||
/// true // If `get_return_block` gets passed the `id` corresponding
|
||||
/// } // to this, it will return `None`.
|
||||
/// false
|
||||
/// }
|
||||
@ -994,9 +991,9 @@ impl<'hir> Map<'hir> {
|
||||
self.map.iter().enumerate().filter_map(|(i, local_map)| {
|
||||
local_map.as_ref().map(|m| (i, m))
|
||||
}).flat_map(move |(array_index, local_map)| {
|
||||
// Iterate over each valid entry in the local map
|
||||
// Iterate over each valid entry in the local map.
|
||||
local_map.iter_enumerated().filter_map(move |(i, entry)| entry.map(move |_| {
|
||||
// Reconstruct the HirId based on the 3 indices we used to find it
|
||||
// Reconstruct the `HirId` based on the 3 indices we used to find it.
|
||||
HirId {
|
||||
owner: DefIndex::from(array_index),
|
||||
local_id: i,
|
||||
@ -1207,7 +1204,7 @@ pub fn map_crate<'hir>(sess: &crate::session::Session,
|
||||
definitions,
|
||||
};
|
||||
|
||||
time(sess, "validate hir map", || {
|
||||
time(sess, "validate HIR map", || {
|
||||
hir_id_validator::check_crate(&map);
|
||||
});
|
||||
|
||||
@ -1247,9 +1244,9 @@ impl<'a> print::State<'a> {
|
||||
Node::Pat(a) => self.print_pat(&a),
|
||||
Node::Arm(a) => self.print_arm(&a),
|
||||
Node::Block(a) => {
|
||||
// containing cbox, will be closed by print-block at }
|
||||
// Containing cbox, will be closed by print-block at `}`.
|
||||
self.cbox(print::INDENT_UNIT);
|
||||
// head-ibox, will be closed by print-block after {
|
||||
// Head-ibox, will be closed by print-block after `{`.
|
||||
self.ibox(0);
|
||||
self.print_block(&a)
|
||||
}
|
||||
@ -1257,8 +1254,8 @@ impl<'a> print::State<'a> {
|
||||
Node::Visibility(a) => self.print_visibility(&a),
|
||||
Node::GenericParam(_) => bug!("cannot print Node::GenericParam"),
|
||||
Node::Field(_) => bug!("cannot print StructField"),
|
||||
// these cases do not carry enough information in the
|
||||
// hir_map to reconstruct their full structure for pretty
|
||||
// These cases do not carry enough information in the
|
||||
// `hir_map` to reconstruct their full structure for pretty
|
||||
// printing.
|
||||
Node::Ctor(..) => bug!("cannot print isolated Ctor"),
|
||||
Node::Local(a) => self.print_local_decl(&a),
|
||||
@ -1273,8 +1270,8 @@ fn hir_id_to_string(map: &Map<'_>, id: HirId, include_id: bool) -> String {
|
||||
let id_str = if include_id { &id_str[..] } else { "" };
|
||||
|
||||
let path_str = || {
|
||||
// This functionality is used for debugging, try to use TyCtxt to get
|
||||
// the user-friendly path, otherwise fall back to stringifying DefPath.
|
||||
// This functionality is used for debugging, try to use `TyCtxt` to get
|
||||
// the user-friendly path, otherwise fall back to stringifying `DefPath`.
|
||||
crate::ty::tls::with_opt(|tcx| {
|
||||
if let Some(tcx) = tcx {
|
||||
let def_id = map.local_def_id(id);
|
||||
|
@ -13,26 +13,24 @@ pub use self::UnsafeSource::*;
|
||||
use crate::hir::def::{Res, DefKind};
|
||||
use crate::hir::def_id::{DefId, DefIndex, LocalDefId, CRATE_DEF_INDEX};
|
||||
use crate::hir::ptr::P;
|
||||
use crate::util::nodemap::{NodeMap, FxHashSet};
|
||||
use crate::mir::mono::Linkage;
|
||||
use crate::ty::AdtKind;
|
||||
use crate::ty::query::Providers;
|
||||
use crate::util::nodemap::{NodeMap, FxHashSet};
|
||||
|
||||
use errors::FatalError;
|
||||
use syntax_pos::{Span, DUMMY_SP, symbol::InternedString, MultiSpan};
|
||||
use syntax::source_map::Spanned;
|
||||
use rustc_target::spec::abi::Abi;
|
||||
use syntax::ast::{self, CrateSugar, Ident, Name, NodeId, AsmDialect};
|
||||
use syntax::ast::{Attribute, Label, LitKind, StrStyle, FloatTy, IntTy, UintTy};
|
||||
use syntax::attr::{InlineAttr, OptimizeAttr};
|
||||
use syntax::symbol::{Symbol, kw};
|
||||
use syntax::tokenstream::TokenStream;
|
||||
use syntax::util::parser::ExprPrecedence;
|
||||
use crate::ty::AdtKind;
|
||||
use crate::ty::query::Providers;
|
||||
|
||||
use rustc_target::spec::abi::Abi;
|
||||
use rustc_data_structures::sync::{par_for_each_in, Send, Sync};
|
||||
use rustc_data_structures::thin_vec::ThinVec;
|
||||
use rustc_macros::HashStable;
|
||||
|
||||
use rustc_serialize::{self, Encoder, Encodable, Decoder, Decodable};
|
||||
use std::collections::{BTreeSet, BTreeMap};
|
||||
use std::fmt;
|
||||
@ -99,7 +97,8 @@ impl rustc_serialize::UseSpecializedEncodable for HirId {
|
||||
} = *self;
|
||||
|
||||
owner.encode(s)?;
|
||||
local_id.encode(s)
|
||||
local_id.encode(s)?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
@ -121,7 +120,7 @@ impl fmt::Display for HirId {
|
||||
}
|
||||
}
|
||||
|
||||
// Hack to ensure that we don't try to access the private parts of `ItemLocalId` in this module
|
||||
// Hack to ensure that we don't try to access the private parts of `ItemLocalId` in this module.
|
||||
mod item_local_id_inner {
|
||||
use rustc_data_structures::indexed_vec::Idx;
|
||||
use rustc_macros::HashStable;
|
||||
@ -746,7 +745,7 @@ pub struct Crate {
|
||||
// Attributes from non-exported macros, kept only for collecting the library feature list.
|
||||
pub non_exported_macro_attrs: HirVec<Attribute>,
|
||||
|
||||
// N.B., we use a BTreeMap here so that `visit_all_items` iterates
|
||||
// N.B., we use a `BTreeMap` here so that `visit_all_items` iterates
|
||||
// over the ids in increasing order. In principle it should not
|
||||
// matter what order we visit things in, but in *practice* it
|
||||
// does, because it can affect the order in which errors are
|
||||
@ -1403,13 +1402,13 @@ pub struct AnonConst {
|
||||
pub body: BodyId,
|
||||
}
|
||||
|
||||
/// An expression
|
||||
/// An expression.
|
||||
#[derive(RustcEncodable, RustcDecodable)]
|
||||
pub struct Expr {
|
||||
pub span: Span,
|
||||
pub hir_id: HirId,
|
||||
pub node: ExprKind,
|
||||
pub attrs: ThinVec<Attribute>,
|
||||
pub hir_id: HirId,
|
||||
pub span: Span,
|
||||
}
|
||||
|
||||
// `Expr` is used a lot. Make sure it doesn't unintentionally get bigger.
|
||||
@ -2422,37 +2421,37 @@ pub enum ItemKind {
|
||||
///
|
||||
/// or just
|
||||
///
|
||||
/// `use foo::bar::baz;` (with `as baz` implicitly on the right)
|
||||
/// `use foo::bar::baz;` (with `as baz` implicitly on the right).
|
||||
Use(P<Path>, UseKind),
|
||||
|
||||
/// A `static` item
|
||||
/// A `static` item.
|
||||
Static(P<Ty>, Mutability, BodyId),
|
||||
/// A `const` item
|
||||
/// A `const` item.
|
||||
Const(P<Ty>, BodyId),
|
||||
/// A function declaration
|
||||
/// A function declaration.
|
||||
Fn(P<FnDecl>, FnHeader, Generics, BodyId),
|
||||
/// A module
|
||||
/// A module.
|
||||
Mod(Mod),
|
||||
/// An external module
|
||||
/// An external module.
|
||||
ForeignMod(ForeignMod),
|
||||
/// Module-level inline assembly (from global_asm!)
|
||||
/// Module-level inline assembly (from `global_asm!`).
|
||||
GlobalAsm(P<GlobalAsm>),
|
||||
/// A type alias, e.g., `type Foo = Bar<u8>`
|
||||
/// A type alias, e.g., `type Foo = Bar<u8>`.
|
||||
TyAlias(P<Ty>, Generics),
|
||||
/// An opaque `impl Trait` type alias, e.g., `type Foo = impl Bar;`
|
||||
/// An opaque `impl Trait` type alias, e.g., `type Foo = impl Bar;`.
|
||||
OpaqueTy(OpaqueTy),
|
||||
/// An enum definition, e.g., `enum Foo<A, B> {C<A>, D<B>}`
|
||||
/// An enum definition, e.g., `enum Foo<A, B> {C<A>, D<B>}`.
|
||||
Enum(EnumDef, Generics),
|
||||
/// A struct definition, e.g., `struct Foo<A> {x: A}`
|
||||
/// A struct definition, e.g., `struct Foo<A> {x: A}`.
|
||||
Struct(VariantData, Generics),
|
||||
/// A union definition, e.g., `union Foo<A, B> {x: A, y: B}`
|
||||
/// A union definition, e.g., `union Foo<A, B> {x: A, y: B}`.
|
||||
Union(VariantData, Generics),
|
||||
/// A trait definition
|
||||
/// A trait definition.
|
||||
Trait(IsAuto, Unsafety, Generics, GenericBounds, HirVec<TraitItemRef>),
|
||||
/// A trait alias
|
||||
/// A trait alias.
|
||||
TraitAlias(Generics, GenericBounds),
|
||||
|
||||
/// An implementation, eg `impl<A> Trait for Foo { .. }`
|
||||
/// An implementation, e.g., `impl<A> Trait for Foo { .. }`.
|
||||
Impl(Unsafety,
|
||||
ImplPolarity,
|
||||
Defaultness,
|
||||
|
@ -1293,11 +1293,11 @@ impl<'a> State<'a> {
|
||||
self.print_closure_params(&decl, body);
|
||||
self.s.space();
|
||||
|
||||
// this is a bare expression
|
||||
// This is a bare expression.
|
||||
self.ann.nested(self, Nested::Body(body));
|
||||
self.end(); // need to close a box
|
||||
|
||||
// a box will be closed by print_expr, but we didn't want an overall
|
||||
// A box will be closed by `print_expr`, but we didn't want an overall
|
||||
// wrapper so we closed the corresponding opening. so create an
|
||||
// empty box to satisfy the close.
|
||||
self.ibox(0);
|
||||
@ -1307,9 +1307,9 @@ impl<'a> State<'a> {
|
||||
self.print_ident(label.ident);
|
||||
self.word_space(":");
|
||||
}
|
||||
// containing cbox, will be closed by print-block at }
|
||||
// containing cbox, will be closed by print-block at `}`
|
||||
self.cbox(INDENT_UNIT);
|
||||
// head-box, will be closed by print-block after {
|
||||
// head-box, will be closed by print-block after `{`
|
||||
self.ibox(0);
|
||||
self.print_block(&blk);
|
||||
}
|
||||
@ -1759,7 +1759,7 @@ impl<'a> State<'a> {
|
||||
self.word_space(",");
|
||||
}
|
||||
if let PatKind::Wild = p.node {
|
||||
// Print nothing
|
||||
// Print nothing.
|
||||
} else {
|
||||
self.print_pat(&p);
|
||||
}
|
||||
@ -1891,7 +1891,7 @@ impl<'a> State<'a> {
|
||||
i += 1;
|
||||
|
||||
if let hir::TyKind::Infer = ty.node {
|
||||
// Print nothing
|
||||
// Print nothing.
|
||||
} else {
|
||||
s.s.word(":");
|
||||
s.s.space();
|
||||
@ -2221,7 +2221,6 @@ impl<'a> State<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
// Dup'ed from parse::classify, but adapted for the HIR.
|
||||
/// Does this expression require a semicolon to be treated
|
||||
/// as a statement? The negation of this: 'can this expression
|
||||
/// be used as a statement without a semicolon' -- is used
|
||||
@ -2229,6 +2228,8 @@ impl<'a> State<'a> {
|
||||
/// if true {...} else {...}
|
||||
/// |x| 5
|
||||
/// isn't parsed as (if true {...} else {...} | x) | 5
|
||||
//
|
||||
// Duplicated from `parse::classify`, but adapted for the HIR.
|
||||
fn expr_requires_semi_to_be_stmt(e: &hir::Expr) -> bool {
|
||||
match e.node {
|
||||
hir::ExprKind::Match(..) |
|
||||
@ -2238,7 +2239,7 @@ fn expr_requires_semi_to_be_stmt(e: &hir::Expr) -> bool {
|
||||
}
|
||||
}
|
||||
|
||||
/// this statement requires a semicolon after it.
|
||||
/// This statement requires a semicolon after it.
|
||||
/// note that in one case (stmt_semi), we've already
|
||||
/// seen the semicolon, and thus don't need another.
|
||||
fn stmt_ends_with_semi(stmt: &hir::StmtKind) -> bool {
|
||||
@ -2277,7 +2278,7 @@ fn bin_op_to_assoc_op(op: hir::BinOpKind) -> AssocOp {
|
||||
}
|
||||
}
|
||||
|
||||
/// Expressions that syntactically contain an "exterior" struct literal i.e., not surrounded by any
|
||||
/// Expressions that syntactically contain an "exterior" struct literal, i.e., not surrounded by any
|
||||
/// parens or other delimiters, e.g., `X { y: 1 }`, `X { y: 1 }.method()`, `foo == X { y: 1 }` and
|
||||
/// `X { y: 1 } == foo` all do, but `(X { y: 1 }) == foo` does not.
|
||||
fn contains_exterior_struct_lit(value: &hir::Expr) -> bool {
|
||||
@ -2287,7 +2288,7 @@ fn contains_exterior_struct_lit(value: &hir::Expr) -> bool {
|
||||
hir::ExprKind::Assign(ref lhs, ref rhs) |
|
||||
hir::ExprKind::AssignOp(_, ref lhs, ref rhs) |
|
||||
hir::ExprKind::Binary(_, ref lhs, ref rhs) => {
|
||||
// X { y: 1 } + X { y: 2 }
|
||||
// `X { y: 1 } + X { y: 2 }`
|
||||
contains_exterior_struct_lit(&lhs) || contains_exterior_struct_lit(&rhs)
|
||||
}
|
||||
hir::ExprKind::Unary(_, ref x) |
|
||||
@ -2295,12 +2296,12 @@ fn contains_exterior_struct_lit(value: &hir::Expr) -> bool {
|
||||
hir::ExprKind::Type(ref x, _) |
|
||||
hir::ExprKind::Field(ref x, _) |
|
||||
hir::ExprKind::Index(ref x, _) => {
|
||||
// &X { y: 1 }, X { y: 1 }.y
|
||||
// `&X { y: 1 }, X { y: 1 }.y`
|
||||
contains_exterior_struct_lit(&x)
|
||||
}
|
||||
|
||||
hir::ExprKind::MethodCall(.., ref exprs) => {
|
||||
// X { y: 1 }.bar(...)
|
||||
// `X { y: 1 }.bar(...)`
|
||||
contains_exterior_struct_lit(&exprs[0])
|
||||
}
|
||||
|
||||
|
@ -12,7 +12,6 @@ use std::hash as std_hash;
|
||||
use std::cell::RefCell;
|
||||
|
||||
use syntax::ast;
|
||||
|
||||
use syntax::source_map::SourceMap;
|
||||
use syntax::ext::hygiene::SyntaxContext;
|
||||
use syntax::symbol::Symbol;
|
||||
@ -20,9 +19,9 @@ use syntax::tokenstream::DelimSpan;
|
||||
use syntax_pos::{Span, DUMMY_SP};
|
||||
use syntax_pos::hygiene;
|
||||
|
||||
use rustc_data_structures::stable_hasher::{HashStable,
|
||||
StableHasher, StableHasherResult,
|
||||
ToStableHashKey};
|
||||
use rustc_data_structures::stable_hasher::{
|
||||
HashStable, StableHasher, StableHasherResult, ToStableHashKey,
|
||||
};
|
||||
use rustc_data_structures::fx::{FxHashSet, FxHashMap};
|
||||
use smallvec::SmallVec;
|
||||
|
||||
@ -32,9 +31,9 @@ fn compute_ignored_attr_names() -> FxHashSet<Symbol> {
|
||||
}
|
||||
|
||||
/// This is the context state available during incr. comp. hashing. It contains
|
||||
/// enough information to transform DefIds and HirIds into stable DefPaths (i.e.
|
||||
/// a reference to the TyCtxt) and it holds a few caches for speeding up various
|
||||
/// things (e.g., each DefId/DefPath is only hashed once).
|
||||
/// enough information to transform `DefId`s and `HirId`s into stable `DefPath`s (i.e.,
|
||||
/// a reference to the `TyCtxt`) and it holds a few caches for speeding up various
|
||||
/// things (e.g., each `DefId`/`DefPath` is only hashed once).
|
||||
#[derive(Clone)]
|
||||
pub struct StableHashingContext<'a> {
|
||||
sess: &'a Session,
|
||||
@ -46,7 +45,7 @@ pub struct StableHashingContext<'a> {
|
||||
node_id_hashing_mode: NodeIdHashingMode,
|
||||
|
||||
// Very often, we are hashing something that does not need the
|
||||
// CachingSourceMapView, so we initialize it lazily.
|
||||
// `CachingSourceMapView`, so we initialize it lazily.
|
||||
raw_source_map: &'a SourceMap,
|
||||
caching_source_map: Option<CachingSourceMapView<'a>>,
|
||||
}
|
||||
@ -57,24 +56,24 @@ pub enum NodeIdHashingMode {
|
||||
HashDefPath,
|
||||
}
|
||||
|
||||
/// The BodyResolver allows to map a BodyId to the corresponding hir::Body.
|
||||
/// We could also just store a plain reference to the hir::Crate but we want
|
||||
/// The `BodyResolver` allows mapping a `BodyId` to the corresponding `hir::Body`.
|
||||
/// We could also just store a plain reference to the `hir::Crate` but we want
|
||||
/// to avoid that the crate is used to get untracked access to all of the HIR.
|
||||
#[derive(Clone, Copy)]
|
||||
struct BodyResolver<'tcx>(&'tcx hir::Crate);
|
||||
|
||||
impl<'tcx> BodyResolver<'tcx> {
|
||||
// Return a reference to the hir::Body with the given BodyId.
|
||||
// DOES NOT DO ANY TRACKING, use carefully.
|
||||
/// Returns a reference to the `hir::Body` with the given `BodyId`.
|
||||
/// **Does not do any tracking**; use carefully.
|
||||
fn body(self, id: hir::BodyId) -> &'tcx hir::Body {
|
||||
self.0.body(id)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> StableHashingContext<'a> {
|
||||
// The `krate` here is only used for mapping BodyIds to Bodies.
|
||||
// Don't use it for anything else or you'll run the risk of
|
||||
// leaking data out of the tracking system.
|
||||
/// The `krate` here is only used for mapping `BodyId`s to `Body`s.
|
||||
/// Don't use it for anything else or you'll run the risk of
|
||||
/// leaking data out of the tracking system.
|
||||
#[inline]
|
||||
pub fn new(sess: &'a Session,
|
||||
krate: &'a hir::Crate,
|
||||
@ -217,9 +216,7 @@ impl<'a> StableHashingContextProvider<'a> for StableHashingContext<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> crate::dep_graph::DepGraphSafe for StableHashingContext<'a> {
|
||||
}
|
||||
|
||||
impl<'a> crate::dep_graph::DepGraphSafe for StableHashingContext<'a> {}
|
||||
|
||||
impl<'a> HashStable<StableHashingContext<'a>> for hir::BodyId {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
@ -292,16 +289,15 @@ impl<'a> ToStableHashKey<StableHashingContext<'a>> for ast::NodeId {
|
||||
}
|
||||
|
||||
impl<'a> HashStable<StableHashingContext<'a>> for Span {
|
||||
|
||||
// Hash a span in a stable way. We can't directly hash the span's BytePos
|
||||
// fields (that would be similar to hashing pointers, since those are just
|
||||
// offsets into the SourceMap). Instead, we hash the (file name, line, column)
|
||||
// triple, which stays the same even if the containing SourceFile has moved
|
||||
// within the SourceMap.
|
||||
// Also note that we are hashing byte offsets for the column, not unicode
|
||||
// codepoint offsets. For the purpose of the hash that's sufficient.
|
||||
// Also, hashing filenames is expensive so we avoid doing it twice when the
|
||||
// span starts and ends in the same file, which is almost always the case.
|
||||
/// Hashes a span in a stable way. We can't directly hash the span's `BytePos`
|
||||
/// fields (that would be similar to hashing pointers, since those are just
|
||||
/// offsets into the `SourceMap`). Instead, we hash the (file name, line, column)
|
||||
/// triple, which stays the same even if the containing `SourceFile` has moved
|
||||
/// within the `SourceMap`.
|
||||
/// Also note that we are hashing byte offsets for the column, not unicode
|
||||
/// codepoint offsets. For the purpose of the hash that's sufficient.
|
||||
/// Also, hashing filenames is expensive so we avoid doing it twice when the
|
||||
/// span starts and ends in the same file, which is almost always the case.
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
@ -340,7 +336,7 @@ impl<'a> HashStable<StableHashingContext<'a>> for Span {
|
||||
}
|
||||
|
||||
std_hash::Hash::hash(&TAG_VALID_SPAN, hasher);
|
||||
// We truncate the stable_id hash and line and col numbers. The chances
|
||||
// We truncate the stable ID hash and line and column numbers. The chances
|
||||
// of causing a collision this way should be minimal.
|
||||
std_hash::Hash::hash(&(file_lo.name_hash as u64), hasher);
|
||||
|
||||
|
@ -5,8 +5,10 @@ use crate::hir;
|
||||
use crate::hir::map::DefPathHash;
|
||||
use crate::hir::def_id::{DefId, LocalDefId, CrateNum, CRATE_DEF_INDEX};
|
||||
use crate::ich::{StableHashingContext, NodeIdHashingMode, Fingerprint};
|
||||
use rustc_data_structures::stable_hasher::{HashStable, ToStableHashKey,
|
||||
StableHasher, StableHasherResult};
|
||||
|
||||
use rustc_data_structures::stable_hasher::{
|
||||
HashStable, ToStableHashKey, StableHasher, StableHasherResult,
|
||||
};
|
||||
use smallvec::SmallVec;
|
||||
use std::mem;
|
||||
use syntax::ast;
|
||||
@ -82,9 +84,9 @@ for hir::ItemLocalId {
|
||||
}
|
||||
}
|
||||
|
||||
// The following implementations of HashStable for ItemId, TraitItemId, and
|
||||
// ImplItemId deserve special attention. Normally we do not hash NodeIds within
|
||||
// the HIR, since they just signify a HIR nodes own path. But ItemId et al
|
||||
// The following implementations of HashStable for `ItemId`, `TraitItemId`, and
|
||||
// `ImplItemId` deserve special attention. Normally we do not hash `NodeId`s within
|
||||
// the HIR, since they just signify a HIR nodes own path. But `ItemId` et al
|
||||
// are used when another item in the HIR is *referenced* and we certainly
|
||||
// want to pick up on a reference changing its target, so we hash the NodeIds
|
||||
// in "DefPath Mode".
|
||||
@ -131,7 +133,6 @@ impl<'a> HashStable<StableHashingContext<'a>> for hir::ImplItemId {
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
impl_stable_hash_for!(struct ast::Label {
|
||||
ident
|
||||
});
|
||||
@ -241,7 +242,7 @@ impl<'a> HashStable<StableHashingContext<'a>> for hir::ImplItem {
|
||||
}
|
||||
}
|
||||
|
||||
impl_stable_hash_for!(enum ::syntax::ast::CrateSugar {
|
||||
impl_stable_hash_for!(enum ast::CrateSugar {
|
||||
JustCrate,
|
||||
PubCrate,
|
||||
});
|
||||
@ -365,8 +366,7 @@ impl<'a> HashStable<StableHashingContext<'a>> for hir::def_id::DefIndex {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> ToStableHashKey<StableHashingContext<'a>>
|
||||
for hir::def_id::DefIndex {
|
||||
impl<'a> ToStableHashKey<StableHashingContext<'a>> for hir::def_id::DefIndex {
|
||||
type KeyType = DefPathHash;
|
||||
|
||||
#[inline]
|
||||
|
@ -204,7 +204,7 @@ impl<'a> HashStable<StableHashingContext<'a>> for ty::TyVid {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
_hcx: &mut StableHashingContext<'a>,
|
||||
_hasher: &mut StableHasher<W>) {
|
||||
// TyVid values are confined to an inference context and hence
|
||||
// `TyVid` values are confined to an inference context and hence
|
||||
// should not be hashed.
|
||||
bug!("ty::TyKind::hash_stable() - can't hash a TyVid {:?}.", *self)
|
||||
}
|
||||
@ -214,7 +214,7 @@ impl<'a> HashStable<StableHashingContext<'a>> for ty::IntVid {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
_hcx: &mut StableHashingContext<'a>,
|
||||
_hasher: &mut StableHasher<W>) {
|
||||
// IntVid values are confined to an inference context and hence
|
||||
// `IntVid` values are confined to an inference context and hence
|
||||
// should not be hashed.
|
||||
bug!("ty::TyKind::hash_stable() - can't hash an IntVid {:?}.", *self)
|
||||
}
|
||||
@ -224,7 +224,7 @@ impl<'a> HashStable<StableHashingContext<'a>> for ty::FloatVid {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
_hcx: &mut StableHashingContext<'a>,
|
||||
_hasher: &mut StableHasher<W>) {
|
||||
// FloatVid values are confined to an inference context and hence
|
||||
// `FloatVid` values are confined to an inference context and hence
|
||||
// should not be hashed.
|
||||
bug!("ty::TyKind::hash_stable() - can't hash a FloatVid {:?}.", *self)
|
||||
}
|
||||
|
@ -1321,13 +1321,13 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
|
||||
T: TypeFoldable<'tcx>,
|
||||
{
|
||||
if !value.needs_infer() {
|
||||
return value.clone(); // avoid duplicated subst-folding
|
||||
return value.clone(); // Avoid duplicated subst-folding.
|
||||
}
|
||||
let mut r = resolve::OpportunisticVarResolver::new(self);
|
||||
value.fold_with(&mut r)
|
||||
}
|
||||
|
||||
/// Returns first unresolved variable contained in `T`. In the
|
||||
/// Returns the first unresolved variable contained in `T`. In the
|
||||
/// process of visiting `T`, this will resolve (where possible)
|
||||
/// type variables in `T`, but it never constructs the final,
|
||||
/// resolved type, so it's more efficient than
|
||||
@ -1462,7 +1462,7 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
|
||||
|
||||
let copy_def_id = self.tcx.require_lang_item(lang_items::CopyTraitLangItem, None);
|
||||
|
||||
// this can get called from typeck (by euv), and moves_by_default
|
||||
// This can get called from typeck (by euv), and `moves_by_default`
|
||||
// rightly refuses to work with inference variables, but
|
||||
// moves_by_default has a cache, which we want to use in other
|
||||
// cases.
|
||||
@ -1482,7 +1482,7 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
|
||||
closure_kind_ty.to_opt_closure_kind()
|
||||
}
|
||||
|
||||
/// Obtain the signature of a closure. For closures, unlike
|
||||
/// Obtains the signature of a closure. For closures, unlike
|
||||
/// `tcx.fn_sig(def_id)`, this method will work during the
|
||||
/// type-checking of the enclosing function and return the closure
|
||||
/// signature in its partially inferred state.
|
||||
|
@ -45,7 +45,6 @@
|
||||
#![feature(non_exhaustive)]
|
||||
#![feature(optin_builtin_traits)]
|
||||
#![feature(range_is_empty)]
|
||||
#![feature(rustc_diagnostic_macros)]
|
||||
#![feature(slice_patterns)]
|
||||
#![feature(specialization)]
|
||||
#![feature(unboxed_closures)]
|
||||
@ -88,8 +87,6 @@ mod tests;
|
||||
#[macro_use]
|
||||
mod macros;
|
||||
|
||||
// N.B., this module needs to be declared first so diagnostics are
|
||||
// registered before they are used.
|
||||
pub mod error_codes;
|
||||
|
||||
#[macro_use]
|
||||
@ -142,6 +139,3 @@ pub mod util {
|
||||
|
||||
// Allows macros to refer to this crate as `::rustc`
|
||||
extern crate self as rustc;
|
||||
|
||||
// Build the diagnostics array at the end so that the metadata includes error use sites.
|
||||
__build_diagnostic_array! { librustc, DIAGNOSTICS }
|
||||
|
@ -16,32 +16,32 @@
|
||||
|
||||
use self::TargetLint::*;
|
||||
|
||||
use std::slice;
|
||||
use rustc_data_structures::sync::{ReadGuard, Lock, ParallelIterator, join, par_iter};
|
||||
use crate::hir;
|
||||
use crate::hir::def_id::{CrateNum, DefId, LOCAL_CRATE};
|
||||
use crate::hir::intravisit as hir_visit;
|
||||
use crate::hir::intravisit::Visitor;
|
||||
use crate::hir::map::{definitions::DisambiguatedDefPathData, DefPathData};
|
||||
use crate::lint::{EarlyLintPass, LateLintPass, EarlyLintPassObject, LateLintPassObject};
|
||||
use crate::lint::{LintArray, Level, Lint, LintId, LintPass, LintBuffer};
|
||||
use crate::lint::builtin::BuiltinLintDiagnostics;
|
||||
use crate::lint::levels::{LintLevelSets, LintLevelsBuilder};
|
||||
use crate::middle::privacy::AccessLevels;
|
||||
use rustc_serialize::{Decoder, Decodable, Encoder, Encodable};
|
||||
use crate::session::{config, early_error, Session};
|
||||
use crate::ty::{self, print::Printer, subst::Kind, TyCtxt, Ty};
|
||||
use crate::ty::layout::{LayoutError, LayoutOf, TyLayout};
|
||||
use crate::util::nodemap::FxHashMap;
|
||||
use crate::util::common::time;
|
||||
|
||||
use errors::DiagnosticBuilder;
|
||||
use std::slice;
|
||||
use std::default::Default as StdDefault;
|
||||
use rustc_data_structures::sync::{ReadGuard, Lock, ParallelIterator, join, par_iter};
|
||||
use rustc_serialize::{Decoder, Decodable, Encoder, Encodable};
|
||||
use syntax::ast;
|
||||
use syntax::edition;
|
||||
use syntax_pos::{MultiSpan, Span, symbol::Symbol};
|
||||
use errors::DiagnosticBuilder;
|
||||
use crate::hir;
|
||||
use crate::hir::def_id::{CrateNum, DefId, LOCAL_CRATE};
|
||||
use crate::hir::intravisit as hir_visit;
|
||||
use crate::hir::intravisit::Visitor;
|
||||
use crate::hir::map::{definitions::DisambiguatedDefPathData, DefPathData};
|
||||
use syntax::util::lev_distance::find_best_match_for_name;
|
||||
use syntax::visit as ast_visit;
|
||||
use syntax_pos::{MultiSpan, Span, symbol::Symbol};
|
||||
|
||||
/// Information about the registered lints.
|
||||
///
|
||||
|
@ -875,12 +875,12 @@ pub fn in_external_macro(sess: &Session, span: Span) -> bool {
|
||||
ExpnKind::AstPass(_) | ExpnKind::Desugaring(_) => true, // well, it's "external"
|
||||
ExpnKind::Macro(MacroKind::Bang, _) => {
|
||||
if expn_data.def_site.is_dummy() {
|
||||
// dummy span for the def_site means it's an external macro
|
||||
// Dummy span for the `def_site` means it's an external macro.
|
||||
return true;
|
||||
}
|
||||
match sess.source_map().span_to_snippet(expn_data.def_site) {
|
||||
Ok(code) => !code.starts_with("macro_rules"),
|
||||
// no snippet = external macro or compiler-builtin expansion
|
||||
// No snippet means external macro or compiler-builtin expansion.
|
||||
Err(_) => true,
|
||||
}
|
||||
}
|
||||
@ -888,7 +888,7 @@ pub fn in_external_macro(sess: &Session, span: Span) -> bool {
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns whether `span` originates in a derive macro's expansion
|
||||
/// Returns `true` if `span` originates in a derive-macro's expansion.
|
||||
pub fn in_derive_expansion(span: Span) -> bool {
|
||||
if let ExpnKind::Macro(MacroKind::Derive, _) = span.ctxt().outer_expn_data().kind {
|
||||
return true;
|
||||
|
@ -16,17 +16,17 @@ struct EntryContext<'a, 'tcx> {
|
||||
|
||||
map: &'a hir_map::Map<'tcx>,
|
||||
|
||||
// The top-level function called 'main'
|
||||
/// The top-level function called 'main'.
|
||||
main_fn: Option<(HirId, Span)>,
|
||||
|
||||
// The function that has attribute named 'main'
|
||||
/// The function that has attribute named 'main'.
|
||||
attr_main_fn: Option<(HirId, Span)>,
|
||||
|
||||
// The function that has the attribute 'start' on it
|
||||
/// The function that has the attribute 'start' on it.
|
||||
start_fn: Option<(HirId, Span)>,
|
||||
|
||||
// The functions that one might think are 'main' but aren't, e.g.
|
||||
// main functions not defined at the top level. For diagnostics.
|
||||
/// The functions that one might think are 'main' but aren't, e.g.
|
||||
/// main functions not defined at the top level. For diagnostics.
|
||||
non_main_fns: Vec<(HirId, Span)> ,
|
||||
}
|
||||
|
||||
@ -39,11 +39,11 @@ impl<'a, 'tcx> ItemLikeVisitor<'tcx> for EntryContext<'a, 'tcx> {
|
||||
}
|
||||
|
||||
fn visit_trait_item(&mut self, _trait_item: &'tcx TraitItem) {
|
||||
// entry fn is never a trait item
|
||||
// Entry fn is never a trait item.
|
||||
}
|
||||
|
||||
fn visit_impl_item(&mut self, _impl_item: &'tcx ImplItem) {
|
||||
// entry fn is never an impl item
|
||||
// Entry fn is never a trait item.
|
||||
}
|
||||
}
|
||||
|
||||
@ -54,7 +54,7 @@ fn entry_fn(tcx: TyCtxt<'_>, cnum: CrateNum) -> Option<(DefId, EntryFnType)> {
|
||||
*ty == config::CrateType::Executable
|
||||
});
|
||||
if !any_exe {
|
||||
// No need to find a main function
|
||||
// No need to find a main function.
|
||||
return None;
|
||||
}
|
||||
|
||||
@ -148,7 +148,7 @@ fn configure_main(tcx: TyCtxt<'_>, visitor: &EntryContext<'_, '_>) -> Option<(De
|
||||
} else if let Some((hir_id, _)) = visitor.main_fn {
|
||||
Some((tcx.hir().local_def_id(hir_id), EntryFnType::Main))
|
||||
} else {
|
||||
// No main function
|
||||
// There is no main function.
|
||||
let mut err = struct_err!(tcx.sess, E0601,
|
||||
"`main` function not found in crate `{}`", tcx.crate_name(LOCAL_CRATE));
|
||||
if !visitor.non_main_fns.is_empty() {
|
||||
|
@ -596,7 +596,7 @@ impl<'a, 'tcx> ExprUseVisitor<'a, 'tcx> {
|
||||
}
|
||||
|
||||
hir::StmtKind::Item(_) => {
|
||||
// we don't visit nested items in this visitor,
|
||||
// We don't visit nested items in this visitor,
|
||||
// only the fn body we were given.
|
||||
}
|
||||
|
||||
|
@ -3,10 +3,8 @@
|
||||
//! Language items are items that represent concepts intrinsic to the language
|
||||
//! itself. Examples are:
|
||||
//!
|
||||
//! * Traits that specify "kinds"; e.g., "Sync", "Send".
|
||||
//!
|
||||
//! * Traits that represent operators; e.g., "Add", "Sub", "Index".
|
||||
//!
|
||||
//! * Traits that specify "kinds"; e.g., `Sync`, `Send`.
|
||||
//! * Traits that represent operators; e.g., `Add`, `Sub`, `Index`.
|
||||
//! * Functions called by the compiler itself.
|
||||
|
||||
pub use self::LangItem::*;
|
||||
@ -151,11 +149,11 @@ impl ItemLikeVisitor<'v> for LanguageItemCollector<'tcx> {
|
||||
}
|
||||
|
||||
fn visit_trait_item(&mut self, _trait_item: &hir::TraitItem) {
|
||||
// at present, lang items are always items, not trait items
|
||||
// At present, lang items are always items, not trait items.
|
||||
}
|
||||
|
||||
fn visit_impl_item(&mut self, _impl_item: &hir::ImplItem) {
|
||||
// at present, lang items are always items, not impl items
|
||||
// At present, lang items are always items, not impl items.
|
||||
}
|
||||
}
|
||||
|
||||
@ -204,7 +202,7 @@ impl LanguageItemCollector<'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
/// Extract the first `lang = "$name"` out of a list of attributes.
|
||||
/// Extracts the first `lang = "$name"` out of a list of attributes.
|
||||
/// The attributes `#[panic_handler]` and `#[alloc_error_handler]`
|
||||
/// are also extracted out when found.
|
||||
pub fn extract(attrs: &[ast::Attribute]) -> Option<(Symbol, Span)> {
|
||||
@ -216,7 +214,7 @@ pub fn extract(attrs: &[ast::Attribute]) -> Option<(Symbol, Span)> {
|
||||
}))
|
||||
}
|
||||
|
||||
/// Traverse and collect all the lang items in all crates.
|
||||
/// Traverses and collects all the lang items in all crates.
|
||||
pub fn collect<'tcx>(tcx: TyCtxt<'tcx>) -> LanguageItems {
|
||||
// Initialize the collector.
|
||||
let mut collector = LanguageItemCollector::new(tcx);
|
||||
@ -367,7 +365,7 @@ language_item_table! {
|
||||
|
||||
MaybeUninitLangItem, "maybe_uninit", maybe_uninit, Target::Union;
|
||||
|
||||
// Align offset for stride != 1, must not panic.
|
||||
// Align offset for stride != 1; must not panic.
|
||||
AlignOffsetLangItem, "align_offset", align_offset_fn, Target::Fn;
|
||||
|
||||
TerminationTraitLangItem, "termination", termination, Target::Trait;
|
||||
@ -378,7 +376,7 @@ language_item_table! {
|
||||
|
||||
impl<'tcx> TyCtxt<'tcx> {
|
||||
/// Returns the `DefId` for a given `LangItem`.
|
||||
/// If not found, fatally abort compilation.
|
||||
/// If not found, fatally aborts compilation.
|
||||
pub fn require_lang_item(&self, lang_item: LangItem, span: Option<Span>) -> DefId {
|
||||
self.lang_items().require(lang_item).unwrap_or_else(|msg| {
|
||||
if let Some(span) = span {
|
||||
|
@ -6,29 +6,27 @@
|
||||
//!
|
||||
//! [rustc guide]: https://rust-lang.github.io/rustc-guide/mir/borrowck.html
|
||||
|
||||
use crate::ich::{StableHashingContext, NodeIdHashingMode};
|
||||
use crate::util::nodemap::{FxHashMap, FxHashSet};
|
||||
use crate::ty;
|
||||
|
||||
use std::mem;
|
||||
use std::fmt;
|
||||
use rustc_macros::HashStable;
|
||||
use syntax::source_map;
|
||||
use syntax_pos::{Span, DUMMY_SP};
|
||||
use crate::ty::{DefIdTree, TyCtxt};
|
||||
use crate::ty::query::Providers;
|
||||
|
||||
use crate::hir;
|
||||
use crate::hir::Node;
|
||||
use crate::hir::def_id::DefId;
|
||||
use crate::hir::intravisit::{self, Visitor, NestedVisitorMap};
|
||||
use crate::hir::{Block, Arm, Pat, PatKind, Stmt, Expr, Local};
|
||||
use rustc_data_structures::indexed_vec::Idx;
|
||||
use rustc_data_structures::stable_hasher::{HashStable, StableHasher,
|
||||
StableHasherResult};
|
||||
use crate::ich::{StableHashingContext, NodeIdHashingMode};
|
||||
use crate::util::nodemap::{FxHashMap, FxHashSet};
|
||||
use crate::ty::{self, DefIdTree, TyCtxt};
|
||||
use crate::ty::query::Providers;
|
||||
|
||||
/// Scope represents a statically-describable scope that can be
|
||||
/// used to bound the lifetime/region for values.
|
||||
use rustc_data_structures::indexed_vec::Idx;
|
||||
use rustc_data_structures::stable_hasher::{HashStable, StableHasher, StableHasherResult};
|
||||
use rustc_macros::HashStable;
|
||||
use syntax::source_map;
|
||||
use syntax_pos::{Span, DUMMY_SP};
|
||||
|
||||
use std::fmt;
|
||||
use std::mem;
|
||||
|
||||
/// Represents a statically-describable scope that can be used to
|
||||
/// bound the lifetime/region for values.
|
||||
///
|
||||
/// `Node(node_id)`: Any AST node that has any scope at all has the
|
||||
/// `Node(node_id)` scope. Other variants represent special cases not
|
||||
@ -225,7 +223,7 @@ pub struct ScopeTree {
|
||||
/// have lifetime parameters free in this body.
|
||||
root_parent: Option<hir::HirId>,
|
||||
|
||||
/// `parent_map` maps from a scope ID to the enclosing scope id;
|
||||
/// Maps from a scope ID to the enclosing scope id;
|
||||
/// this is usually corresponding to the lexical nesting, though
|
||||
/// in the case of closures the parent scope is the innermost
|
||||
/// conditional expression or repeating block. (Note that the
|
||||
@ -233,17 +231,17 @@ pub struct ScopeTree {
|
||||
/// the closure itself.)
|
||||
parent_map: FxHashMap<Scope, (Scope, ScopeDepth)>,
|
||||
|
||||
/// `var_map` maps from a variable or binding ID to the block in
|
||||
/// which that variable is declared.
|
||||
/// Maps from a variable or binding ID to the block in which that
|
||||
/// variable is declared.
|
||||
var_map: FxHashMap<hir::ItemLocalId, Scope>,
|
||||
|
||||
/// maps from a `NodeId` to the associated destruction scope (if any)
|
||||
/// Maps from a `NodeId` to the associated destruction scope (if any).
|
||||
destruction_scopes: FxHashMap<hir::ItemLocalId, Scope>,
|
||||
|
||||
/// `rvalue_scopes` includes entries for those expressions whose cleanup scope is
|
||||
/// larger than the default. The map goes from the expression id
|
||||
/// to the cleanup scope id. For rvalues not present in this
|
||||
/// table, the appropriate cleanup scope is the innermost
|
||||
/// `rvalue_scopes` includes entries for those expressions whose
|
||||
/// cleanup scope is larger than the default. The map goes from the
|
||||
/// expression ID to the cleanup scope id. For rvalues not present in
|
||||
/// this table, the appropriate cleanup scope is the innermost
|
||||
/// enclosing statement, conditional expression, or repeating
|
||||
/// block (see `terminating_scopes`).
|
||||
/// In constants, None is used to indicate that certain expressions
|
||||
@ -318,7 +316,7 @@ pub struct ScopeTree {
|
||||
/// 4. By `2.` and `3.`, `D` is *statically* storage-dead at `U`,
|
||||
/// QED.
|
||||
///
|
||||
/// I don't think this property relies on `3.` in an essential way - it
|
||||
/// This property ought to not on (3) in an essential way -- it
|
||||
/// is probably still correct even if we have "unrestricted" terminating
|
||||
/// scopes. However, why use the complicated proof when a simple one
|
||||
/// works?
|
||||
@ -341,20 +339,20 @@ pub struct ScopeTree {
|
||||
|
||||
#[derive(Debug, Copy, Clone, RustcEncodable, RustcDecodable, HashStable)]
|
||||
pub struct YieldData {
|
||||
/// `Span` of the yield.
|
||||
/// The `Span` of the yield.
|
||||
pub span: Span,
|
||||
/// The number of expressions and patterns appearing before the `yield` in the body + 1.
|
||||
/// The number of expressions and patterns appearing before the `yield` in the body plus one.
|
||||
pub expr_and_pat_count: usize,
|
||||
pub source: hir::YieldSource,
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct Context {
|
||||
/// the root of the current region tree. This is typically the id
|
||||
/// The root of the current region tree. This is typically the id
|
||||
/// of the innermost fn body. Each fn forms its own disjoint tree
|
||||
/// in the region hierarchy. These fn bodies are themselves
|
||||
/// arranged into a tree. See the "Modeling closures" section of
|
||||
/// the README in infer::region_constraints for more
|
||||
/// the README in `infer::region_constraints` for more
|
||||
/// details.
|
||||
root_id: Option<hir::ItemLocalId>,
|
||||
|
||||
@ -369,15 +367,15 @@ pub struct Context {
|
||||
struct RegionResolutionVisitor<'tcx> {
|
||||
tcx: TyCtxt<'tcx>,
|
||||
|
||||
// The number of expressions and patterns visited in the current body
|
||||
// The number of expressions and patterns visited in the current body.
|
||||
expr_and_pat_count: usize,
|
||||
// When this is `true`, we record the `Scopes` we encounter
|
||||
// when processing a Yield expression. This allows us to fix
|
||||
// up their indices.
|
||||
pessimistic_yield: bool,
|
||||
// Stores scopes when pessimistic_yield is true.
|
||||
// Stores scopes when `pessimistic_yield` is `true`.
|
||||
fixup_scopes: Vec<Scope>,
|
||||
// Generated scope tree:
|
||||
// The generated scope tree.
|
||||
scope_tree: ScopeTree,
|
||||
|
||||
cx: Context,
|
||||
@ -411,7 +409,7 @@ struct ExprLocatorVisitor {
|
||||
expr_and_pat_count: usize,
|
||||
}
|
||||
|
||||
// This visitor has to have the same visit_expr calls as RegionResolutionVisitor
|
||||
// This visitor has to have the same `visit_expr` calls as `RegionResolutionVisitor`
|
||||
// since `expr_count` is compared against the results there.
|
||||
impl<'tcx> Visitor<'tcx> for ExprLocatorVisitor {
|
||||
fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'tcx> {
|
||||
@ -456,7 +454,7 @@ impl<'tcx> ScopeTree {
|
||||
assert!(prev.is_none());
|
||||
}
|
||||
|
||||
// record the destruction scopes for later so we can query them
|
||||
// Record the destruction scopes for later so we can query them.
|
||||
if let ScopeData::Destruction = child.data {
|
||||
self.destruction_scopes.insert(child.item_local_id(), child);
|
||||
}
|
||||
@ -478,7 +476,7 @@ impl<'tcx> ScopeTree {
|
||||
self.destruction_scopes.get(&n).cloned()
|
||||
}
|
||||
|
||||
/// Records that `sub_closure` is defined within `sup_closure`. These ids
|
||||
/// Records that `sub_closure` is defined within `sup_closure`. These IDs
|
||||
/// should be the ID of the block that is the fn body, which is
|
||||
/// also the root of the region hierarchy for that fn.
|
||||
fn record_closure_parent(&mut self,
|
||||
@ -505,14 +503,14 @@ impl<'tcx> ScopeTree {
|
||||
self.rvalue_scopes.insert(var, lifetime);
|
||||
}
|
||||
|
||||
/// Returns the narrowest scope that encloses `id`, if any.
|
||||
pub fn opt_encl_scope(&self, id: Scope) -> Option<Scope> {
|
||||
//! Returns the narrowest scope that encloses `id`, if any.
|
||||
self.parent_map.get(&id).cloned().map(|(p, _)| p)
|
||||
}
|
||||
|
||||
/// Returns the narrowest scope that encloses `id`, if any.
|
||||
#[allow(dead_code)] // used in cfg
|
||||
pub fn encl_scope(&self, id: Scope) -> Scope {
|
||||
//! Returns the narrowest scope that encloses `id`, if any.
|
||||
self.opt_encl_scope(id).unwrap()
|
||||
}
|
||||
|
||||
@ -522,16 +520,15 @@ impl<'tcx> ScopeTree {
|
||||
bug!("no enclosing scope for id {:?}", var_id))
|
||||
}
|
||||
|
||||
/// Returns the scope when the temp created by `expr_id` will be cleaned up.
|
||||
pub fn temporary_scope(&self, expr_id: hir::ItemLocalId) -> Option<Scope> {
|
||||
//! Returns the scope when temp created by expr_id will be cleaned up
|
||||
|
||||
// check for a designated rvalue scope
|
||||
// Check for a designated rvalue scope.
|
||||
if let Some(&s) = self.rvalue_scopes.get(&expr_id) {
|
||||
debug!("temporary_scope({:?}) = {:?} [custom]", expr_id, s);
|
||||
return s;
|
||||
}
|
||||
|
||||
// else, locate the innermost terminating scope
|
||||
// Otherwise, locate the innermost terminating scope
|
||||
// if there's one. Static items, for instance, won't
|
||||
// have an enclosing scope, hence no scope will be
|
||||
// returned.
|
||||
@ -552,9 +549,8 @@ impl<'tcx> ScopeTree {
|
||||
return None;
|
||||
}
|
||||
|
||||
/// Returns the lifetime of the variable `id`.
|
||||
pub fn var_region(&self, id: hir::ItemLocalId) -> ty::RegionKind {
|
||||
//! Returns the lifetime of the variable `id`.
|
||||
|
||||
let scope = ty::ReScope(self.var_scope(id));
|
||||
debug!("var_region({:?}) = {:?}", id, scope);
|
||||
scope
|
||||
@ -589,7 +585,7 @@ impl<'tcx> ScopeTree {
|
||||
return true;
|
||||
}
|
||||
|
||||
/// Returns the ID of the innermost containing body
|
||||
/// Returns the ID of the innermost containing body.
|
||||
pub fn containing_body(&self, mut scope: Scope) -> Option<hir::ItemLocalId> {
|
||||
loop {
|
||||
if let ScopeData::CallSite = scope.data {
|
||||
|
@ -27,7 +27,7 @@ impl<'a> HashStable<StableHashingContext<'a>> for Cache {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
_: &mut StableHashingContext<'a>,
|
||||
_: &mut StableHasher<W>) {
|
||||
// do nothing
|
||||
// Do nothing.
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -4,16 +4,17 @@ use super::{
|
||||
Pointer, InterpResult, AllocId, ScalarMaybeUndef, write_target_uint, read_target_uint, Scalar,
|
||||
};
|
||||
|
||||
use crate::ty::layout::{Size, Align};
|
||||
use syntax::ast::Mutability;
|
||||
use std::iter;
|
||||
use crate::mir;
|
||||
use std::ops::{Range, Deref, DerefMut};
|
||||
use crate::ty::layout::{Size, Align};
|
||||
|
||||
use rustc_data_structures::sorted_map::SortedMap;
|
||||
use rustc_target::abi::HasDataLayout;
|
||||
use syntax::ast::Mutability;
|
||||
use std::iter;
|
||||
use std::ops::{Range, Deref, DerefMut};
|
||||
use std::borrow::Cow;
|
||||
|
||||
// NOTE: When adding new fields, make sure to adjust the Snapshot impl in
|
||||
// NOTE: When adding new fields, make sure to adjust the `Snapshot` impl in
|
||||
// `src/librustc_mir/interpret/snapshot.rs`.
|
||||
#[derive(
|
||||
Clone,
|
||||
@ -27,7 +28,7 @@ use std::borrow::Cow;
|
||||
RustcDecodable,
|
||||
HashStable,
|
||||
)]
|
||||
pub struct Allocation<Tag=(),Extra=()> {
|
||||
pub struct Allocation<Tag = (),Extra = ()> {
|
||||
/// The actual bytes of the allocation.
|
||||
/// Note that the bytes of a pointer represent the offset of the pointer.
|
||||
bytes: Vec<u8>,
|
||||
@ -42,7 +43,7 @@ pub struct Allocation<Tag=(),Extra=()> {
|
||||
pub size: Size,
|
||||
/// The alignment of the allocation to detect unaligned reads.
|
||||
pub align: Align,
|
||||
/// Whether the allocation is mutable.
|
||||
/// `true` if the allocation is mutable.
|
||||
/// Also used by codegen to determine if a static should be put into mutable memory,
|
||||
/// which happens for `static mut` and `static` with interior mutability.
|
||||
pub mutability: Mutability,
|
||||
@ -50,7 +51,6 @@ pub struct Allocation<Tag=(),Extra=()> {
|
||||
pub extra: Extra,
|
||||
}
|
||||
|
||||
|
||||
pub trait AllocationExtra<Tag>: ::std::fmt::Debug + Clone {
|
||||
// There is no constructor in here because the constructor's type depends
|
||||
// on `MemoryKind`, and making things sufficiently generic leads to painful
|
||||
@ -92,7 +92,7 @@ pub trait AllocationExtra<Tag>: ::std::fmt::Debug + Clone {
|
||||
}
|
||||
}
|
||||
|
||||
// For Tag=() and no extra state, we have is a trivial implementation.
|
||||
// For `Tag = ()` and no extra state, we have a trivial implementation.
|
||||
impl AllocationExtra<()> for () { }
|
||||
|
||||
// The constructors are all without extra; the extra gets added by a machine hook later.
|
||||
@ -185,7 +185,7 @@ impl<Tag, Extra> Allocation<Tag, Extra> {
|
||||
|
||||
impl<'tcx> rustc_serialize::UseSpecializedDecodable for &'tcx Allocation {}
|
||||
|
||||
/// Byte accessors
|
||||
/// Byte accessors.
|
||||
impl<'tcx, Tag: Copy, Extra: AllocationExtra<Tag>> Allocation<Tag, Extra> {
|
||||
/// Just a small local helper function to avoid a bit of code repetition.
|
||||
/// Returns the range of this allocation that was meant.
|
||||
@ -195,7 +195,7 @@ impl<'tcx, Tag: Copy, Extra: AllocationExtra<Tag>> Allocation<Tag, Extra> {
|
||||
offset: Size,
|
||||
size: Size
|
||||
) -> Range<usize> {
|
||||
let end = offset + size; // this does overflow checking
|
||||
let end = offset + size; // This does overflow checking.
|
||||
assert_eq!(
|
||||
end.bytes() as usize as u64, end.bytes(),
|
||||
"cannot handle this access on this host architecture"
|
||||
@ -232,7 +232,7 @@ impl<'tcx, Tag: Copy, Extra: AllocationExtra<Tag>> Allocation<Tag, Extra> {
|
||||
self.check_defined(ptr, size)?;
|
||||
self.check_relocations(cx, ptr, size)?;
|
||||
} else {
|
||||
// We still don't want relocations on the *edges*
|
||||
// We still don't want relocations on the *edges*.
|
||||
self.check_relocation_edges(cx, ptr, size)?;
|
||||
}
|
||||
|
||||
@ -241,7 +241,7 @@ impl<'tcx, Tag: Copy, Extra: AllocationExtra<Tag>> Allocation<Tag, Extra> {
|
||||
Ok(&self.bytes[range])
|
||||
}
|
||||
|
||||
/// Check that these bytes are initialized and not pointer bytes, and then return them
|
||||
/// Checks that these bytes are initialized and not pointer bytes, and then return them
|
||||
/// as a slice.
|
||||
///
|
||||
/// It is the caller's responsibility to check bounds and alignment beforehand.
|
||||
@ -293,7 +293,7 @@ impl<'tcx, Tag: Copy, Extra: AllocationExtra<Tag>> Allocation<Tag, Extra> {
|
||||
}
|
||||
}
|
||||
|
||||
/// Reading and writing
|
||||
/// Reading and writing.
|
||||
impl<'tcx, Tag: Copy, Extra: AllocationExtra<Tag>> Allocation<Tag, Extra> {
|
||||
/// Reads bytes until a `0` is encountered. Will error if the end of the allocation is reached
|
||||
/// before a `0` is found.
|
||||
@ -329,9 +329,9 @@ impl<'tcx, Tag: Copy, Extra: AllocationExtra<Tag>> Allocation<Tag, Extra> {
|
||||
allow_ptr_and_undef: bool,
|
||||
) -> InterpResult<'tcx>
|
||||
{
|
||||
// Check bounds and relocations on the edges
|
||||
// Check bounds and relocations on the edges.
|
||||
self.get_bytes_with_undef_and_ptr(cx, ptr, size)?;
|
||||
// Check undef and ptr
|
||||
// Check undef and ptr.
|
||||
if !allow_ptr_and_undef {
|
||||
self.check_defined(ptr, size)?;
|
||||
self.check_relocations(cx, ptr, size)?;
|
||||
@ -372,12 +372,12 @@ impl<'tcx, Tag: Copy, Extra: AllocationExtra<Tag>> Allocation<Tag, Extra> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Read a *non-ZST* scalar
|
||||
/// Reads a *non-ZST* scalar.
|
||||
///
|
||||
/// zsts can't be read out of two reasons:
|
||||
/// * byteorder cannot work with zero element buffers
|
||||
/// * in order to obtain a `Pointer` we need to check for ZSTness anyway due to integer pointers
|
||||
/// being valid for ZSTs
|
||||
/// ZSTs can't be read for two reasons:
|
||||
/// * byte-order cannot work with zero-element buffers;
|
||||
/// * in order to obtain a `Pointer`, we need to check for ZSTness anyway due to integer
|
||||
/// pointers being valid for ZSTs.
|
||||
///
|
||||
/// It is the caller's responsibility to check bounds and alignment beforehand.
|
||||
pub fn read_scalar(
|
||||
@ -387,20 +387,20 @@ impl<'tcx, Tag: Copy, Extra: AllocationExtra<Tag>> Allocation<Tag, Extra> {
|
||||
size: Size
|
||||
) -> InterpResult<'tcx, ScalarMaybeUndef<Tag>>
|
||||
{
|
||||
// get_bytes_unchecked tests relocation edges
|
||||
// `get_bytes_unchecked` tests relocation edges.
|
||||
let bytes = self.get_bytes_with_undef_and_ptr(cx, ptr, size)?;
|
||||
// Undef check happens *after* we established that the alignment is correct.
|
||||
// We must not return Ok() for unaligned pointers!
|
||||
// We must not return `Ok()` for unaligned pointers!
|
||||
if self.check_defined(ptr, size).is_err() {
|
||||
// this inflates undefined bytes to the entire scalar, even if only a few
|
||||
// bytes are undefined
|
||||
// This inflates undefined bytes to the entire scalar, even if only a few
|
||||
// bytes are undefined.
|
||||
return Ok(ScalarMaybeUndef::Undef);
|
||||
}
|
||||
// Now we do the actual reading
|
||||
// Now we do the actual reading.
|
||||
let bits = read_target_uint(cx.data_layout().endian, bytes).unwrap();
|
||||
// See if we got a pointer
|
||||
// See if we got a pointer.
|
||||
if size != cx.data_layout().pointer_size {
|
||||
// *Now* better make sure that the inside also is free of relocations.
|
||||
// *Now*, we better make sure that the inside is free of relocations too.
|
||||
self.check_relocations(cx, ptr, size)?;
|
||||
} else {
|
||||
match self.relocations.get(&ptr.offset) {
|
||||
@ -415,7 +415,7 @@ impl<'tcx, Tag: Copy, Extra: AllocationExtra<Tag>> Allocation<Tag, Extra> {
|
||||
Ok(ScalarMaybeUndef::Scalar(Scalar::from_uint(bits, size)))
|
||||
}
|
||||
|
||||
/// Read a pointer-sized scalar.
|
||||
/// Reads a pointer-sized scalar.
|
||||
///
|
||||
/// It is the caller's responsibility to check bounds and alignment beforehand.
|
||||
pub fn read_ptr_sized(
|
||||
@ -427,12 +427,12 @@ impl<'tcx, Tag: Copy, Extra: AllocationExtra<Tag>> Allocation<Tag, Extra> {
|
||||
self.read_scalar(cx, ptr, cx.data_layout().pointer_size)
|
||||
}
|
||||
|
||||
/// Write a *non-ZST* scalar
|
||||
/// Writes a *non-ZST* scalar.
|
||||
///
|
||||
/// zsts can't be read out of two reasons:
|
||||
/// * byteorder cannot work with zero element buffers
|
||||
/// * in oder to obtain a `Pointer` we need to check for ZSTness anyway due to integer pointers
|
||||
/// being valid for ZSTs
|
||||
/// ZSTs can't be read for two reasons:
|
||||
/// * byte-order cannot work with zero-element buffers;
|
||||
/// * in order to obtain a `Pointer`, we need to check for ZSTness anyway due to integer
|
||||
/// pointers being valid for ZSTs.
|
||||
///
|
||||
/// It is the caller's responsibility to check bounds and alignment beforehand.
|
||||
pub fn write_scalar(
|
||||
@ -460,7 +460,7 @@ impl<'tcx, Tag: Copy, Extra: AllocationExtra<Tag>> Allocation<Tag, Extra> {
|
||||
let dst = self.get_bytes_mut(cx, ptr, type_size)?;
|
||||
write_target_uint(endian, dst, bytes).unwrap();
|
||||
|
||||
// See if we have to also write a relocation
|
||||
// See if we have to also write a relocation.
|
||||
match val {
|
||||
Scalar::Ptr(val) => {
|
||||
self.relocations.insert(
|
||||
@ -474,7 +474,7 @@ impl<'tcx, Tag: Copy, Extra: AllocationExtra<Tag>> Allocation<Tag, Extra> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Write a pointer-sized scalar.
|
||||
/// Writes a pointer-sized scalar.
|
||||
///
|
||||
/// It is the caller's responsibility to check bounds and alignment beforehand.
|
||||
pub fn write_ptr_sized(
|
||||
@ -489,9 +489,9 @@ impl<'tcx, Tag: Copy, Extra: AllocationExtra<Tag>> Allocation<Tag, Extra> {
|
||||
}
|
||||
}
|
||||
|
||||
/// Relocations
|
||||
/// Relocations.
|
||||
impl<'tcx, Tag: Copy, Extra> Allocation<Tag, Extra> {
|
||||
/// Returns all relocations overlapping with the given ptr-offset pair.
|
||||
/// Returns all relocations overlapping with the given pointer-offset pair.
|
||||
pub fn get_relocations(
|
||||
&self,
|
||||
cx: &impl HasDataLayout,
|
||||
@ -501,7 +501,7 @@ impl<'tcx, Tag: Copy, Extra> Allocation<Tag, Extra> {
|
||||
// We have to go back `pointer_size - 1` bytes, as that one would still overlap with
|
||||
// the beginning of this range.
|
||||
let start = ptr.offset.bytes().saturating_sub(cx.data_layout().pointer_size.bytes() - 1);
|
||||
let end = ptr.offset + size; // this does overflow checking
|
||||
let end = ptr.offset + size; // This does overflow checking.
|
||||
self.relocations.range(Size::from_bytes(start)..end)
|
||||
}
|
||||
|
||||
@ -561,7 +561,7 @@ impl<'tcx, Tag: Copy, Extra> Allocation<Tag, Extra> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Error if there are relocations overlapping with the edges of the
|
||||
/// Errors if there are relocations overlapping with the edges of the
|
||||
/// given memory range.
|
||||
#[inline]
|
||||
fn check_relocation_edges(
|
||||
@ -577,7 +577,7 @@ impl<'tcx, Tag: Copy, Extra> Allocation<Tag, Extra> {
|
||||
}
|
||||
|
||||
|
||||
/// Undefined bytes
|
||||
/// Undefined bytes.
|
||||
impl<'tcx, Tag, Extra> Allocation<Tag, Extra> {
|
||||
/// Checks that a range of bytes is defined. If not, returns the `ReadUndefBytes`
|
||||
/// error which will report the first byte which is undefined.
|
||||
@ -618,7 +618,7 @@ pub struct AllocationDefinedness {
|
||||
|
||||
/// Transferring the definedness mask to other allocations.
|
||||
impl<Tag, Extra> Allocation<Tag, Extra> {
|
||||
/// Creates a run-length encoding of the undef_mask.
|
||||
/// Creates a run-length encoding of the undef mask.
|
||||
pub fn compress_undef_range(
|
||||
&self,
|
||||
src: Pointer<Tag>,
|
||||
@ -631,10 +631,10 @@ impl<Tag, Extra> Allocation<Tag, Extra> {
|
||||
// Therefor we precompute a compressed version of the undef mask of the source value and
|
||||
// then write it back `repeat` times without computing any more information from the source.
|
||||
|
||||
// a precomputed cache for ranges of defined/undefined bits
|
||||
// A precomputed cache for ranges of defined/undefined bits
|
||||
// 0000010010001110 will become
|
||||
// [5, 1, 2, 1, 3, 3, 1]
|
||||
// where each element toggles the state
|
||||
// `[5, 1, 2, 1, 3, 3, 1]`,
|
||||
// where each element toggles the state.
|
||||
|
||||
let mut ranges = smallvec::SmallVec::<[u64; 1]>::new();
|
||||
let initial = self.undef_mask.get(src.offset);
|
||||
@ -642,7 +642,7 @@ impl<Tag, Extra> Allocation<Tag, Extra> {
|
||||
let mut cur = initial;
|
||||
|
||||
for i in 1..size.bytes() {
|
||||
// FIXME: optimize to bitshift the current undef block's bits and read the top bit
|
||||
// FIXME: optimize to bitshift the current undef block's bits and read the top bit.
|
||||
if self.undef_mask.get(src.offset + Size::from_bytes(i)) == cur {
|
||||
cur_len += 1;
|
||||
} else {
|
||||
@ -657,7 +657,7 @@ impl<Tag, Extra> Allocation<Tag, Extra> {
|
||||
AllocationDefinedness { ranges, initial, }
|
||||
}
|
||||
|
||||
/// Apply multiple instances of the run-length encoding to the undef_mask.
|
||||
/// Applies multiple instances of the run-length encoding to the undef mask.
|
||||
pub fn mark_compressed_undef_range(
|
||||
&mut self,
|
||||
defined: &AllocationDefinedness,
|
||||
@ -665,7 +665,7 @@ impl<Tag, Extra> Allocation<Tag, Extra> {
|
||||
size: Size,
|
||||
repeat: u64,
|
||||
) {
|
||||
// an optimization where we can just overwrite an entire range of definedness bits if
|
||||
// An optimization where we can just overwrite an entire range of definedness bits if
|
||||
// they are going to be uniformly `1` or `0`.
|
||||
if defined.ranges.len() <= 1 {
|
||||
self.undef_mask.set_range_inbounds(
|
||||
@ -694,9 +694,9 @@ impl<Tag, Extra> Allocation<Tag, Extra> {
|
||||
}
|
||||
}
|
||||
|
||||
/// Relocations
|
||||
/// Relocations.
|
||||
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, RustcEncodable, RustcDecodable)]
|
||||
pub struct Relocations<Tag=(), Id=AllocId>(SortedMap<Size, (Tag, Id)>);
|
||||
pub struct Relocations<Tag = (), Id = AllocId>(SortedMap<Size, (Tag, Id)>);
|
||||
|
||||
impl<Tag, Id> Relocations<Tag, Id> {
|
||||
pub fn new() -> Self {
|
||||
@ -766,7 +766,7 @@ impl<Tag: Copy, Extra> Allocation<Tag, Extra> {
|
||||
}
|
||||
}
|
||||
|
||||
/// Apply a relocation copy.
|
||||
/// Applies a relocation copy.
|
||||
/// The affected range, as defined in the parameters to `prepare_relocation_copy` is expected
|
||||
/// to be clear of relocations.
|
||||
pub fn mark_relocation_range(
|
||||
@ -838,8 +838,8 @@ impl UndefMask {
|
||||
let (blocka, bita) = bit_index(start);
|
||||
let (blockb, bitb) = bit_index(end);
|
||||
if blocka == blockb {
|
||||
// first set all bits but the first `bita`
|
||||
// then unset the last `64 - bitb` bits
|
||||
// First set all bits except the first `bita`,
|
||||
// then unset the last `64 - bitb` bits.
|
||||
let range = if bitb == 0 {
|
||||
u64::max_value() << bita
|
||||
} else {
|
||||
@ -854,24 +854,24 @@ impl UndefMask {
|
||||
}
|
||||
// across block boundaries
|
||||
if new_state {
|
||||
// set bita..64 to 1
|
||||
// Set `bita..64` to `1`.
|
||||
self.blocks[blocka] |= u64::max_value() << bita;
|
||||
// set 0..bitb to 1
|
||||
// Set `0..bitb` to `1`.
|
||||
if bitb != 0 {
|
||||
self.blocks[blockb] |= u64::max_value() >> (64 - bitb);
|
||||
}
|
||||
// fill in all the other blocks (much faster than one bit at a time)
|
||||
// Fill in all the other blocks (much faster than one bit at a time).
|
||||
for block in (blocka + 1) .. blockb {
|
||||
self.blocks[block] = u64::max_value();
|
||||
}
|
||||
} else {
|
||||
// set bita..64 to 0
|
||||
// Set `bita..64` to `0`.
|
||||
self.blocks[blocka] &= !(u64::max_value() << bita);
|
||||
// set 0..bitb to 0
|
||||
// Set `0..bitb` to `0`.
|
||||
if bitb != 0 {
|
||||
self.blocks[blockb] &= !(u64::max_value() >> (64 - bitb));
|
||||
}
|
||||
// fill in all the other blocks (much faster than one bit at a time)
|
||||
// Fill in all the other blocks (much faster than one bit at a time).
|
||||
for block in (blocka + 1) .. blockb {
|
||||
self.blocks[block] = 0;
|
||||
}
|
||||
@ -908,7 +908,7 @@ impl UndefMask {
|
||||
let additional_blocks = amount.bytes() / Self::BLOCK_SIZE + 1;
|
||||
assert_eq!(additional_blocks as usize as u64, additional_blocks);
|
||||
self.blocks.extend(
|
||||
// FIXME(oli-obk): optimize this by repeating `new_state as Block`
|
||||
// FIXME(oli-obk): optimize this by repeating `new_state as Block`.
|
||||
iter::repeat(0).take(additional_blocks as usize),
|
||||
);
|
||||
}
|
||||
|
@ -1,23 +1,21 @@
|
||||
use std::{fmt, env};
|
||||
use super::{RawConst, Pointer, CheckInAllocMsg, ScalarMaybeUndef};
|
||||
|
||||
use crate::hir;
|
||||
use crate::hir::map::definitions::DefPathData;
|
||||
use crate::mir;
|
||||
use crate::ty::{self, Ty, layout};
|
||||
use crate::ty::layout::{Size, Align, LayoutError};
|
||||
use rustc_target::spec::abi::Abi;
|
||||
use rustc_macros::HashStable;
|
||||
|
||||
use super::{RawConst, Pointer, CheckInAllocMsg, ScalarMaybeUndef};
|
||||
use crate::ty::query::TyCtxtAt;
|
||||
|
||||
use backtrace::Backtrace;
|
||||
|
||||
use crate::ty::query::TyCtxtAt;
|
||||
use errors::DiagnosticBuilder;
|
||||
|
||||
use rustc_macros::HashStable;
|
||||
use rustc_target::spec::abi::Abi;
|
||||
use syntax_pos::{Pos, Span};
|
||||
use syntax::symbol::Symbol;
|
||||
|
||||
use std::{fmt, env};
|
||||
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, HashStable, RustcEncodable, RustcDecodable)]
|
||||
pub enum ErrorHandled {
|
||||
/// Already reported a lint or an error for this evaluation.
|
||||
@ -582,7 +580,7 @@ pub type InterpResult<'tcx, T = ()> = Result<T, InterpErrorInfo<'tcx>>;
|
||||
|
||||
impl fmt::Display for InterpError<'_> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
// Forward `Display` to `Debug`
|
||||
// Forward `Display` to `Debug`.
|
||||
write!(f, "{:?}", self)
|
||||
}
|
||||
}
|
||||
|
@ -1,4 +1,4 @@
|
||||
//! An interpreter for MIR used in CTFE and by miri
|
||||
//! An interpreter for MIR used in CTFE and by miri.
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! err_unsup {
|
||||
@ -107,21 +107,21 @@ pub use self::allocation::{Allocation, AllocationExtra, Relocations, UndefMask};
|
||||
|
||||
pub use self::pointer::{Pointer, PointerArithmetic, CheckInAllocMsg};
|
||||
|
||||
use std::fmt;
|
||||
use crate::mir;
|
||||
use crate::hir::def_id::DefId;
|
||||
use crate::ty::{self, TyCtxt, Instance, subst::UnpackedKind};
|
||||
use crate::ty::codec::TyDecoder;
|
||||
use crate::ty::layout::{self, Size};
|
||||
use std::io;
|
||||
use std::fmt;
|
||||
use std::num::NonZeroU32;
|
||||
use std::sync::atomic::{AtomicU32, Ordering};
|
||||
use rustc_serialize::{Encoder, Decodable, Encodable};
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
use rustc_data_structures::sync::{Lock as Mutex, HashMapExt};
|
||||
use rustc_data_structures::sync::{Lock, HashMapExt};
|
||||
use rustc_data_structures::tiny_list::TinyList;
|
||||
use rustc_macros::HashStable;
|
||||
use byteorder::{WriteBytesExt, ReadBytesExt, LittleEndian, BigEndian};
|
||||
use crate::ty::codec::TyDecoder;
|
||||
use std::sync::atomic::{AtomicU32, Ordering};
|
||||
use std::num::NonZeroU32;
|
||||
|
||||
/// Uniquely identifies a specific constant or static.
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash, RustcEncodable, RustcDecodable, HashStable)]
|
||||
@ -152,8 +152,8 @@ pub fn specialized_encode_alloc_id<'tcx, E: Encoder>(
|
||||
tcx: TyCtxt<'tcx>,
|
||||
alloc_id: AllocId,
|
||||
) -> Result<(), E::Error> {
|
||||
let alloc: GlobalAlloc<'tcx> =
|
||||
tcx.alloc_map.lock().get(alloc_id).expect("no value for AllocId");
|
||||
let alloc: GlobalAlloc<'tcx> = tcx.alloc_map.lock().get(alloc_id)
|
||||
.expect("no value for given alloc ID");
|
||||
match alloc {
|
||||
GlobalAlloc::Memory(alloc) => {
|
||||
trace!("encoding {:?} with {:#?}", alloc_id, alloc);
|
||||
@ -166,8 +166,8 @@ pub fn specialized_encode_alloc_id<'tcx, E: Encoder>(
|
||||
fn_instance.encode(encoder)?;
|
||||
}
|
||||
GlobalAlloc::Static(did) => {
|
||||
// referring to statics doesn't need to know about their allocations,
|
||||
// just about its DefId
|
||||
// References to statics doesn't need to know about their allocations,
|
||||
// just about its `DefId`.
|
||||
AllocDiscriminant::Static.encode(encoder)?;
|
||||
did.encode(encoder)?;
|
||||
}
|
||||
@ -187,19 +187,18 @@ enum State {
|
||||
}
|
||||
|
||||
pub struct AllocDecodingState {
|
||||
// For each AllocId we keep track of which decoding state it's currently in.
|
||||
decoding_state: Vec<Mutex<State>>,
|
||||
// For each `AllocId`, we keep track of which decoding state it's currently in.
|
||||
decoding_state: Vec<Lock<State>>,
|
||||
// The offsets of each allocation in the data stream.
|
||||
data_offsets: Vec<u32>,
|
||||
}
|
||||
|
||||
impl AllocDecodingState {
|
||||
|
||||
pub fn new_decoding_session(&self) -> AllocDecodingSession<'_> {
|
||||
static DECODER_SESSION_ID: AtomicU32 = AtomicU32::new(0);
|
||||
let counter = DECODER_SESSION_ID.fetch_add(1, Ordering::SeqCst);
|
||||
|
||||
// Make sure this is never zero
|
||||
// Make sure this is never zero.
|
||||
let session_id = DecodingSessionId::new((counter & 0x7FFFFFFF) + 1).unwrap();
|
||||
|
||||
AllocDecodingSession {
|
||||
@ -208,10 +207,10 @@ impl AllocDecodingState {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn new(data_offsets: Vec<u32>) -> AllocDecodingState {
|
||||
let decoding_state = vec![Mutex::new(State::Empty); data_offsets.len()];
|
||||
pub fn new(data_offsets: Vec<u32>) -> Self {
|
||||
let decoding_state = vec![Lock::new(State::Empty); data_offsets.len()];
|
||||
|
||||
AllocDecodingState {
|
||||
Self {
|
||||
decoding_state,
|
||||
data_offsets,
|
||||
}
|
||||
@ -225,23 +224,23 @@ pub struct AllocDecodingSession<'s> {
|
||||
}
|
||||
|
||||
impl<'s> AllocDecodingSession<'s> {
|
||||
// Decodes an AllocId in a thread-safe way.
|
||||
/// Decodes an `AllocId` in a thread-safe way.
|
||||
pub fn decode_alloc_id<D>(&self, decoder: &mut D) -> Result<AllocId, D::Error>
|
||||
where
|
||||
D: TyDecoder<'tcx>,
|
||||
{
|
||||
// Read the index of the allocation
|
||||
// Read the index of the allocation.
|
||||
let idx = decoder.read_u32()? as usize;
|
||||
let pos = self.state.data_offsets[idx] as usize;
|
||||
|
||||
// Decode the AllocDiscriminant now so that we know if we have to reserve an
|
||||
// AllocId.
|
||||
// Decode the `AllocDiscriminant` now so that we know if we have to reserve an
|
||||
// `AllocId`.
|
||||
let (alloc_kind, pos) = decoder.with_position(pos, |decoder| {
|
||||
let alloc_kind = AllocDiscriminant::decode(decoder)?;
|
||||
Ok((alloc_kind, decoder.position()))
|
||||
})?;
|
||||
|
||||
// Check the decoding state, see if it's already decoded or if we should
|
||||
// Check the decoding state to see if it's already decoded or if we should
|
||||
// decode it here.
|
||||
let alloc_id = {
|
||||
let mut entry = self.state.decoding_state[idx].lock();
|
||||
@ -251,11 +250,11 @@ impl<'s> AllocDecodingSession<'s> {
|
||||
return Ok(alloc_id);
|
||||
}
|
||||
ref mut entry @ State::Empty => {
|
||||
// We are allowed to decode
|
||||
// We are allowed to decode.
|
||||
match alloc_kind {
|
||||
AllocDiscriminant::Alloc => {
|
||||
// If this is an allocation, we need to reserve an
|
||||
// AllocId so we can decode cyclic graphs.
|
||||
// `AllocId` so we can decode cyclic graphs.
|
||||
let alloc_id = decoder.tcx().alloc_map.lock().reserve();
|
||||
*entry = State::InProgress(
|
||||
TinyList::new_single(self.session_id),
|
||||
@ -263,8 +262,8 @@ impl<'s> AllocDecodingSession<'s> {
|
||||
Some(alloc_id)
|
||||
},
|
||||
AllocDiscriminant::Fn | AllocDiscriminant::Static => {
|
||||
// Fns and statics cannot be cyclic and their AllocId
|
||||
// is determined later by interning
|
||||
// Fns and statics cannot be cyclic, and their `AllocId`
|
||||
// is determined later by interning.
|
||||
*entry = State::InProgressNonAlloc(
|
||||
TinyList::new_single(self.session_id));
|
||||
None
|
||||
@ -273,9 +272,9 @@ impl<'s> AllocDecodingSession<'s> {
|
||||
}
|
||||
State::InProgressNonAlloc(ref mut sessions) => {
|
||||
if sessions.contains(&self.session_id) {
|
||||
bug!("This should be unreachable")
|
||||
bug!("this should be unreachable");
|
||||
} else {
|
||||
// Start decoding concurrently
|
||||
// Start decoding concurrently.
|
||||
sessions.insert(self.session_id);
|
||||
None
|
||||
}
|
||||
@ -285,7 +284,7 @@ impl<'s> AllocDecodingSession<'s> {
|
||||
// Don't recurse.
|
||||
return Ok(alloc_id)
|
||||
} else {
|
||||
// Start decoding concurrently
|
||||
// Start decoding concurrently.
|
||||
sessions.insert(self.session_id);
|
||||
Some(alloc_id)
|
||||
}
|
||||
@ -293,20 +292,20 @@ impl<'s> AllocDecodingSession<'s> {
|
||||
}
|
||||
};
|
||||
|
||||
// Now decode the actual data
|
||||
// Now decode the actual data.
|
||||
let alloc_id = decoder.with_position(pos, |decoder| {
|
||||
match alloc_kind {
|
||||
AllocDiscriminant::Alloc => {
|
||||
let allocation = <&'tcx Allocation as Decodable>::decode(decoder)?;
|
||||
// We already have a reserved AllocId.
|
||||
let alloc = <&'tcx Allocation as Decodable>::decode(decoder)?;
|
||||
// We already have a reserved `AllocId`.
|
||||
let alloc_id = alloc_id.unwrap();
|
||||
trace!("decoded alloc {:?} {:#?}", alloc_id, allocation);
|
||||
decoder.tcx().alloc_map.lock().set_alloc_id_same_memory(alloc_id, allocation);
|
||||
trace!("decoded alloc {:?}: {:#?}", alloc_id, alloc);
|
||||
decoder.tcx().alloc_map.lock().set_alloc_id_same_memory(alloc_id, alloc);
|
||||
Ok(alloc_id)
|
||||
},
|
||||
AllocDiscriminant::Fn => {
|
||||
assert!(alloc_id.is_none());
|
||||
trace!("creating fn alloc id");
|
||||
trace!("creating fn alloc ID");
|
||||
let instance = ty::Instance::decode(decoder)?;
|
||||
trace!("decoded fn alloc instance: {:?}", instance);
|
||||
let alloc_id = decoder.tcx().alloc_map.lock().create_fn_alloc(instance);
|
||||
@ -314,8 +313,9 @@ impl<'s> AllocDecodingSession<'s> {
|
||||
},
|
||||
AllocDiscriminant::Static => {
|
||||
assert!(alloc_id.is_none());
|
||||
trace!("creating extern static alloc id at");
|
||||
trace!("creating extern static alloc ID");
|
||||
let did = DefId::decode(decoder)?;
|
||||
trace!("decoded static def-ID: {:?}", did);
|
||||
let alloc_id = decoder.tcx().alloc_map.lock().create_static_alloc(did);
|
||||
Ok(alloc_id)
|
||||
}
|
||||
@ -340,7 +340,7 @@ impl fmt::Display for AllocId {
|
||||
/// a static, or a "real" allocation with some data in it.
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Hash, RustcDecodable, RustcEncodable, HashStable)]
|
||||
pub enum GlobalAlloc<'tcx> {
|
||||
/// The alloc ID is used as a function pointer
|
||||
/// The alloc ID is used as a function pointer.
|
||||
Function(Instance<'tcx>),
|
||||
/// The alloc ID points to a "lazy" static variable that did not get computed (yet).
|
||||
/// This is also used to break the cycle in recursive statics.
|
||||
@ -350,16 +350,17 @@ pub enum GlobalAlloc<'tcx> {
|
||||
}
|
||||
|
||||
pub struct AllocMap<'tcx> {
|
||||
/// Lets you know what an `AllocId` refers to.
|
||||
/// Maps `AllocId`s to their corresponding allocations.
|
||||
alloc_map: FxHashMap<AllocId, GlobalAlloc<'tcx>>,
|
||||
|
||||
/// Used to ensure that statics and functions only get one associated `AllocId`.
|
||||
/// Should never contain a `GlobalAlloc::Memory`!
|
||||
/// FIXME: Should we just have two separate dedup maps for statics and functions each?
|
||||
//
|
||||
// FIXME: Should we just have two separate dedup maps for statics and functions each?
|
||||
dedup: FxHashMap<GlobalAlloc<'tcx>, AllocId>,
|
||||
|
||||
/// The `AllocId` to assign to the next requested ID.
|
||||
/// Always incremented, never gets smaller.
|
||||
/// Always incremented; never gets smaller.
|
||||
next_id: AllocId,
|
||||
}
|
||||
|
||||
@ -389,7 +390,7 @@ impl<'tcx> AllocMap<'tcx> {
|
||||
next
|
||||
}
|
||||
|
||||
/// Reserve a new ID *if* this allocation has not been dedup-reserved before.
|
||||
/// Reserves a new ID *if* this allocation has not been dedup-reserved before.
|
||||
/// Should only be used for function pointers and statics, we don't want
|
||||
/// to dedup IDs for "real" memory!
|
||||
fn reserve_and_set_dedup(&mut self, alloc: GlobalAlloc<'tcx>) -> AllocId {
|
||||
@ -430,17 +431,17 @@ impl<'tcx> AllocMap<'tcx> {
|
||||
}
|
||||
});
|
||||
if is_generic {
|
||||
// Get a fresh ID
|
||||
// Get a fresh ID.
|
||||
let id = self.reserve();
|
||||
self.alloc_map.insert(id, GlobalAlloc::Function(instance));
|
||||
id
|
||||
} else {
|
||||
// Deduplicate
|
||||
// Deduplicate.
|
||||
self.reserve_and_set_dedup(GlobalAlloc::Function(instance))
|
||||
}
|
||||
}
|
||||
|
||||
/// Intern the `Allocation` and return a new `AllocId`, even if there's already an identical
|
||||
/// Interns the `Allocation` and return a new `AllocId`, even if there's already an identical
|
||||
/// `Allocation` with a different `AllocId`.
|
||||
/// Statics with identical content will still point to the same `Allocation`, i.e.,
|
||||
/// their data will be deduplicated through `Allocation` interning -- but they
|
||||
@ -465,19 +466,19 @@ impl<'tcx> AllocMap<'tcx> {
|
||||
pub fn unwrap_memory(&self, id: AllocId) -> &'tcx Allocation {
|
||||
match self.get(id) {
|
||||
Some(GlobalAlloc::Memory(mem)) => mem,
|
||||
_ => bug!("expected allocation id {} to point to memory", id),
|
||||
_ => bug!("expected allocation ID {} to point to memory", id),
|
||||
}
|
||||
}
|
||||
|
||||
/// Freeze an `AllocId` created with `reserve` by pointing it at an `Allocation`. Trying to
|
||||
/// Freezes an `AllocId` created with `reserve` by pointing it at an `Allocation`. Trying to
|
||||
/// call this function twice, even with the same `Allocation` will ICE the compiler.
|
||||
pub fn set_alloc_id_memory(&mut self, id: AllocId, mem: &'tcx Allocation) {
|
||||
if let Some(old) = self.alloc_map.insert(id, GlobalAlloc::Memory(mem)) {
|
||||
bug!("tried to set allocation id {}, but it was already existing as {:#?}", id, old);
|
||||
bug!("tried to set allocation ID {}, but it was already existing as {:#?}", id, old);
|
||||
}
|
||||
}
|
||||
|
||||
/// Freeze an `AllocId` created with `reserve` by pointing it at an `Allocation`. May be called
|
||||
/// Freezes an `AllocId` created with `reserve` by pointing it at an `Allocation`. May be called
|
||||
/// twice for the same `(AllocId, Allocation)` pair.
|
||||
fn set_alloc_id_same_memory(&mut self, id: AllocId, mem: &'tcx Allocation) {
|
||||
self.alloc_map.insert_same(id, GlobalAlloc::Memory(mem));
|
||||
@ -513,7 +514,7 @@ pub fn read_target_uint(endianness: layout::Endian, mut source: &[u8]) -> Result
|
||||
// Methods to facilitate working with signed integers stored in a u128
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
/// Truncate `value` to `size` bits and then sign-extend it to 128 bits
|
||||
/// Truncates `value` to `size` bits and then sign-extend it to 128 bits
|
||||
/// (i.e., if it is negative, fill with 1's on the left).
|
||||
#[inline]
|
||||
pub fn sign_extend(value: u128, size: Size) -> u128 {
|
||||
@ -522,14 +523,14 @@ pub fn sign_extend(value: u128, size: Size) -> u128 {
|
||||
// Truncated until nothing is left.
|
||||
return 0;
|
||||
}
|
||||
// sign extend
|
||||
// Sign-extend it.
|
||||
let shift = 128 - size;
|
||||
// shift the unsigned value to the left
|
||||
// and back to the right as signed (essentially fills with FF on the left)
|
||||
// Shift the unsigned value to the left, then shift back to the right as signed
|
||||
// (essentially fills with FF on the left).
|
||||
(((value << shift) as i128) >> shift) as u128
|
||||
}
|
||||
|
||||
/// Truncate `value` to `size` bits.
|
||||
/// Truncates `value` to `size` bits.
|
||||
#[inline]
|
||||
pub fn truncate(value: u128, size: Size) -> u128 {
|
||||
let size = size.bits();
|
||||
@ -538,6 +539,6 @@ pub fn truncate(value: u128, size: Size) -> u128 {
|
||||
return 0;
|
||||
}
|
||||
let shift = 128 - size;
|
||||
// truncate (shift left to drop out leftover values, shift right to fill with zeroes)
|
||||
// Truncate (shift left to drop out leftover values, shift right to fill with zeroes).
|
||||
(value << shift) >> shift
|
||||
}
|
||||
|
@ -86,18 +86,17 @@ pub trait PointerArithmetic: layout::HasDataLayout {
|
||||
|
||||
impl<T: layout::HasDataLayout> PointerArithmetic for T {}
|
||||
|
||||
|
||||
/// Pointer is generic over the type that represents a reference to Allocations,
|
||||
/// `Pointer` is generic over the type that represents a reference to `Allocation`s,
|
||||
/// thus making it possible for the most convenient representation to be used in
|
||||
/// each context.
|
||||
///
|
||||
/// Defaults to the index based and loosely coupled AllocId.
|
||||
/// Defaults to the index based and loosely coupled `AllocId`.
|
||||
///
|
||||
/// Pointer is also generic over the `Tag` associated with each pointer,
|
||||
/// which is used to do provenance tracking during execution.
|
||||
#[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd,
|
||||
RustcEncodable, RustcDecodable, Hash, HashStable)]
|
||||
pub struct Pointer<Tag=(),Id=AllocId> {
|
||||
pub struct Pointer<Tag = (), Id = AllocId> {
|
||||
pub alloc_id: Id,
|
||||
pub offset: Size,
|
||||
pub tag: Tag,
|
||||
@ -117,7 +116,7 @@ impl<Id: fmt::Debug> fmt::Debug for Pointer<(), Id> {
|
||||
}
|
||||
}
|
||||
|
||||
/// Produces a `Pointer` which points to the beginning of the Allocation
|
||||
/// Produces a `Pointer` which points to the beginning of the `Allocation`.
|
||||
impl From<AllocId> for Pointer {
|
||||
#[inline(always)]
|
||||
fn from(alloc_id: AllocId) -> Self {
|
||||
|
@ -91,7 +91,7 @@ impl<'tcx> ConstValue<'tcx> {
|
||||
/// of a simple value or a pointer into another `Allocation`
|
||||
#[derive(Clone, Copy, Eq, PartialEq, Ord, PartialOrd,
|
||||
RustcEncodable, RustcDecodable, Hash, HashStable)]
|
||||
pub enum Scalar<Tag=(), Id=AllocId> {
|
||||
pub enum Scalar<Tag = (), Id = AllocId> {
|
||||
/// The raw bytes of a simple value.
|
||||
Raw {
|
||||
/// The first `size` bytes of `data` are the value.
|
||||
@ -359,7 +359,7 @@ impl<'tcx, Tag> Scalar<Tag> {
|
||||
|
||||
#[inline(always)]
|
||||
pub fn assert_bits(self, target_size: Size) -> u128 {
|
||||
self.to_bits(target_size).expect("Expected Raw bits but got a Pointer")
|
||||
self.to_bits(target_size).expect("expected Raw bits but got a Pointer")
|
||||
}
|
||||
|
||||
/// Do not call this method! Use either `assert_ptr` or `force_ptr`.
|
||||
@ -374,7 +374,7 @@ impl<'tcx, Tag> Scalar<Tag> {
|
||||
|
||||
#[inline(always)]
|
||||
pub fn assert_ptr(self) -> Pointer<Tag> {
|
||||
self.to_ptr().expect("Expected a Pointer but got Raw bits")
|
||||
self.to_ptr().expect("expected a Pointer but got Raw bits")
|
||||
}
|
||||
|
||||
/// Do not call this method! Dispatch based on the type instead.
|
||||
@ -482,8 +482,8 @@ impl<Tag> From<Pointer<Tag>> for Scalar<Tag> {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Eq, PartialEq, Ord, PartialOrd, RustcEncodable, RustcDecodable, Hash)]
|
||||
pub enum ScalarMaybeUndef<Tag=(), Id=AllocId> {
|
||||
#[derive(Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Hash, RustcEncodable, RustcDecodable)]
|
||||
pub enum ScalarMaybeUndef<Tag = (), Id = AllocId> {
|
||||
Scalar(Scalar<Tag, Id>),
|
||||
Undef,
|
||||
}
|
||||
|
@ -18,6 +18,7 @@ use crate::ty::{
|
||||
self, AdtDef, CanonicalUserTypeAnnotations, ClosureSubsts, GeneratorSubsts, Region, Ty, TyCtxt,
|
||||
UserTypeAnnotationIndex,
|
||||
};
|
||||
|
||||
use polonius_engine::Atom;
|
||||
use rustc_data_structures::bit_set::BitMatrix;
|
||||
use rustc_data_structures::fx::FxHashSet;
|
||||
@ -70,7 +71,7 @@ impl<'tcx> HasLocalDecls<'tcx> for Body<'tcx> {
|
||||
|
||||
/// The various "big phases" that MIR goes through.
|
||||
///
|
||||
/// Warning: ordering of variants is significant
|
||||
/// Warning: ordering of variants is significant.
|
||||
#[derive(Copy, Clone, RustcEncodable, RustcDecodable, Debug, PartialEq, Eq, PartialOrd, Ord)]
|
||||
pub enum MirPhase {
|
||||
Build = 0,
|
||||
@ -80,16 +81,16 @@ pub enum MirPhase {
|
||||
}
|
||||
|
||||
impl MirPhase {
|
||||
/// Gets the index of the current MirPhase within the set of all MirPhases.
|
||||
/// Gets the index of the current MirPhase within the set of all `MirPhase`s.
|
||||
pub fn phase_index(&self) -> usize {
|
||||
*self as usize
|
||||
}
|
||||
}
|
||||
|
||||
/// Lowered representation of a single function.
|
||||
/// The lowered representation of a single function.
|
||||
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
|
||||
pub struct Body<'tcx> {
|
||||
/// List of basic blocks. References to basic block use a newtyped index type `BasicBlock`
|
||||
/// A list of basic blocks. References to basic block use a newtyped index type `BasicBlock`
|
||||
/// that indexes into this vector.
|
||||
basic_blocks: IndexVec<BasicBlock, BasicBlockData<'tcx>>,
|
||||
|
||||
@ -100,7 +101,7 @@ pub struct Body<'tcx> {
|
||||
/// us to see the difference and forego optimization on the inlined promoted items.
|
||||
pub phase: MirPhase,
|
||||
|
||||
/// List of source scopes; these are referenced by statements
|
||||
/// A list of source scopes; these are referenced by statements
|
||||
/// and used for debuginfo. Indexed by a `SourceScope`.
|
||||
pub source_scopes: IndexVec<SourceScope, SourceScopeData>,
|
||||
|
||||
@ -108,10 +109,10 @@ pub struct Body<'tcx> {
|
||||
/// needn't) be tracked across crates.
|
||||
pub source_scope_local_data: ClearCrossCrate<IndexVec<SourceScope, SourceScopeLocalData>>,
|
||||
|
||||
/// Yields type of the function, if it is a generator.
|
||||
/// The yield type of the function, if it is a generator.
|
||||
pub yield_ty: Option<Ty<'tcx>>,
|
||||
|
||||
/// Generator drop glue
|
||||
/// Generator drop glue.
|
||||
pub generator_drop: Option<Box<Body<'tcx>>>,
|
||||
|
||||
/// The layout of a generator. Produced by the state transformation.
|
||||
@ -124,10 +125,10 @@ pub struct Body<'tcx> {
|
||||
/// variables and temporaries.
|
||||
pub local_decls: LocalDecls<'tcx>,
|
||||
|
||||
/// User type annotations
|
||||
/// User type annotations.
|
||||
pub user_type_annotations: CanonicalUserTypeAnnotations<'tcx>,
|
||||
|
||||
/// Number of arguments this function takes.
|
||||
/// The number of arguments this function takes.
|
||||
///
|
||||
/// Starting at local 1, `arg_count` locals will be provided by the caller
|
||||
/// and can be assumed to be initialized.
|
||||
@ -143,10 +144,11 @@ pub struct Body<'tcx> {
|
||||
|
||||
/// Names and capture modes of all the closure upvars, assuming
|
||||
/// the first argument is either the closure or a reference to it.
|
||||
//
|
||||
// NOTE(eddyb) This is *strictly* a temporary hack for codegen
|
||||
// debuginfo generation, and will be removed at some point.
|
||||
// Do **NOT** use it for anything else, upvar information should not be
|
||||
// in the MIR, please rely on local crate HIR or other side-channels.
|
||||
// Do **NOT** use it for anything else; upvar information should not be
|
||||
// in the MIR, so please rely on local crate HIR or other side-channels.
|
||||
pub __upvar_debuginfo_codegen_only_do_not_use: Vec<UpvarDebuginfo>,
|
||||
|
||||
/// Mark this MIR of a const context other than const functions as having converted a `&&` or
|
||||
@ -157,10 +159,10 @@ pub struct Body<'tcx> {
|
||||
/// List of places where control flow was destroyed. Used for error reporting.
|
||||
pub control_flow_destroyed: Vec<(Span, String)>,
|
||||
|
||||
/// A span representing this MIR, for error reporting
|
||||
/// A span representing this MIR, for error reporting.
|
||||
pub span: Span,
|
||||
|
||||
/// A cache for various calculations
|
||||
/// A cache for various calculations.
|
||||
cache: cache::Cache,
|
||||
}
|
||||
|
||||
@ -177,7 +179,7 @@ impl<'tcx> Body<'tcx> {
|
||||
span: Span,
|
||||
control_flow_destroyed: Vec<(Span, String)>,
|
||||
) -> Self {
|
||||
// We need `arg_count` locals, and one for the return place
|
||||
// We need `arg_count` locals, and one for the return place.
|
||||
assert!(
|
||||
local_decls.len() >= arg_count + 1,
|
||||
"expected at least {} locals, got {}",
|
||||
@ -384,12 +386,12 @@ impl<'tcx> Body<'tcx> {
|
||||
true
|
||||
}
|
||||
|
||||
/// Returns the return type, it always return first element from `local_decls` array
|
||||
/// Returns the return type; it always return first element from `local_decls` array.
|
||||
pub fn return_ty(&self) -> Ty<'tcx> {
|
||||
self.local_decls[RETURN_PLACE].ty
|
||||
}
|
||||
|
||||
/// Gets the location of the terminator for the given block
|
||||
/// Gets the location of the terminator for the given block.
|
||||
pub fn terminator_loc(&self, bb: BasicBlock) -> Location {
|
||||
Location { block: bb, statement_index: self[bb].statements.len() }
|
||||
}
|
||||
@ -463,7 +465,7 @@ impl<T: Decodable> rustc_serialize::UseSpecializedDecodable for ClearCrossCrate<
|
||||
/// Most passes can work with it as a whole, within a single function.
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, HashStable)]
|
||||
pub struct SourceInfo {
|
||||
/// Source span for the AST pertaining to this MIR entity.
|
||||
/// The source span for the AST pertaining to this MIR entity.
|
||||
pub span: Span,
|
||||
|
||||
/// The source scope, keeping track of which bindings can be
|
||||
@ -591,13 +593,13 @@ impl Atom for Local {
|
||||
/// Classifies locals into categories. See `Body::local_kind`.
|
||||
#[derive(PartialEq, Eq, Debug, HashStable)]
|
||||
pub enum LocalKind {
|
||||
/// User-declared variable binding
|
||||
/// User-declared variable binding.
|
||||
Var,
|
||||
/// Compiler-introduced temporary
|
||||
/// Compiler-introduced temporary.
|
||||
Temp,
|
||||
/// Function argument
|
||||
/// Function argument.
|
||||
Arg,
|
||||
/// Location of function's return value
|
||||
/// Location of function's return value.
|
||||
ReturnPointer,
|
||||
}
|
||||
|
||||
@ -619,7 +621,7 @@ pub struct VarBindingForm<'tcx> {
|
||||
/// (b) it gives a way to separate this case from the remaining cases
|
||||
/// for diagnostics.
|
||||
pub opt_match_place: Option<(Option<Place<'tcx>>, Span)>,
|
||||
/// Span of the pattern in which this variable was bound.
|
||||
/// The span of the pattern in which this variable was bound.
|
||||
pub pat_span: Span,
|
||||
}
|
||||
|
||||
@ -721,12 +723,12 @@ impl_stable_hash_for!(struct BlockTailInfo { tail_result_is_ignored });
|
||||
/// argument, or the return place.
|
||||
#[derive(Clone, Debug, RustcEncodable, RustcDecodable, HashStable)]
|
||||
pub struct LocalDecl<'tcx> {
|
||||
/// `let mut x` vs `let x`.
|
||||
/// Whether this is a mutable minding (i.e., `let x` or `let mut x`).
|
||||
///
|
||||
/// Temporaries and the return place are always mutable.
|
||||
pub mutability: Mutability,
|
||||
|
||||
/// Some(binding_mode) if this corresponds to a user-declared local variable.
|
||||
/// `Some(binding_mode)` if this corresponds to a user-declared local variable.
|
||||
///
|
||||
/// This is solely used for local diagnostics when generating
|
||||
/// warnings/errors when compiling the current crate, and
|
||||
@ -760,7 +762,7 @@ pub struct LocalDecl<'tcx> {
|
||||
/// intervening statement context).
|
||||
pub is_block_tail: Option<BlockTailInfo>,
|
||||
|
||||
/// Type of this local.
|
||||
/// The type of this local.
|
||||
pub ty: Ty<'tcx>,
|
||||
|
||||
/// If the user manually ascribed a type to this variable,
|
||||
@ -769,7 +771,7 @@ pub struct LocalDecl<'tcx> {
|
||||
/// region inference.
|
||||
pub user_ty: UserTypeProjections,
|
||||
|
||||
/// Name of the local, used in debuginfo and pretty-printing.
|
||||
/// The name of the local, used in debuginfo and pretty-printing.
|
||||
///
|
||||
/// Note that function arguments can also have this set to `Some(_)`
|
||||
/// to generate better debuginfo.
|
||||
@ -837,8 +839,8 @@ pub struct LocalDecl<'tcx> {
|
||||
/// ROOT SCOPE
|
||||
/// │{ argument x: &str }
|
||||
/// │
|
||||
/// │ │{ #[allow(unused_mut)] } // this is actually split into 2 scopes
|
||||
/// │ │ // in practice because I'm lazy.
|
||||
/// │ │{ #[allow(unused_mut)] } // This is actually split into 2 scopes
|
||||
/// │ │ // in practice because I'm lazy.
|
||||
/// │ │
|
||||
/// │ │← x.source_info.scope
|
||||
/// │ │← `x.parse().unwrap()`
|
||||
@ -852,7 +854,7 @@ pub struct LocalDecl<'tcx> {
|
||||
/// │
|
||||
/// │ │{ let x: u32 }
|
||||
/// │ │← x.visibility_scope
|
||||
/// │ │← `drop(x)` // this accesses `x: u32`
|
||||
/// │ │← `drop(x)` // This accesses `x: u32`.
|
||||
/// ```
|
||||
pub source_info: SourceInfo,
|
||||
|
||||
@ -1038,16 +1040,16 @@ pub struct Terminator<'tcx> {
|
||||
|
||||
#[derive(Clone, RustcEncodable, RustcDecodable, HashStable)]
|
||||
pub enum TerminatorKind<'tcx> {
|
||||
/// block should have one successor in the graph; we jump there
|
||||
/// Block should have one successor in the graph; we jump there.
|
||||
Goto { target: BasicBlock },
|
||||
|
||||
/// operand evaluates to an integer; jump depending on its value
|
||||
/// to one of the targets, and otherwise fallback to `otherwise`
|
||||
/// Operand evaluates to an integer; jump depending on its value
|
||||
/// to one of the targets, and otherwise fallback to `otherwise`.
|
||||
SwitchInt {
|
||||
/// discriminant value being tested
|
||||
/// The discriminant value being tested.
|
||||
discr: Operand<'tcx>,
|
||||
|
||||
/// type of value being tested
|
||||
/// The type of value being tested.
|
||||
switch_ty: Ty<'tcx>,
|
||||
|
||||
/// Possible values. The locations to branch to in each case
|
||||
@ -1057,6 +1059,7 @@ pub enum TerminatorKind<'tcx> {
|
||||
/// Possible branch sites. The last element of this vector is used
|
||||
/// for the otherwise branch, so targets.len() == values.len() + 1
|
||||
/// should hold.
|
||||
//
|
||||
// This invariant is quite non-obvious and also could be improved.
|
||||
// One way to make this invariant is to have something like this instead:
|
||||
//
|
||||
@ -1069,7 +1072,7 @@ pub enum TerminatorKind<'tcx> {
|
||||
},
|
||||
|
||||
/// Indicates that the landing pad is finished and unwinding should
|
||||
/// continue. Emitted by build::scope::diverge_cleanup.
|
||||
/// continue. Emitted by `build::scope::diverge_cleanup`.
|
||||
Resume,
|
||||
|
||||
/// Indicates that the landing pad is finished and that the process
|
||||
@ -1083,10 +1086,10 @@ pub enum TerminatorKind<'tcx> {
|
||||
/// Indicates a terminator that can never be reached.
|
||||
Unreachable,
|
||||
|
||||
/// Drop the Place
|
||||
/// Drop the `Place`.
|
||||
Drop { location: Place<'tcx>, target: BasicBlock, unwind: Option<BasicBlock> },
|
||||
|
||||
/// Drop the Place and assign the new value over it. This ensures
|
||||
/// Drop the `Place` and assign the new value over it. This ensures
|
||||
/// that the assignment to `P` occurs *even if* the destructor for
|
||||
/// place unwinds. Its semantics are best explained by the
|
||||
/// elaboration:
|
||||
@ -1119,9 +1122,9 @@ pub enum TerminatorKind<'tcx> {
|
||||
unwind: Option<BasicBlock>,
|
||||
},
|
||||
|
||||
/// Block ends with a call of a converging function
|
||||
/// Block ends with a call of a converging function.
|
||||
Call {
|
||||
/// The function that’s being called
|
||||
/// The function that’s being called.
|
||||
func: Operand<'tcx>,
|
||||
/// Arguments the function is called with.
|
||||
/// These are owned by the callee, which is free to modify them.
|
||||
@ -1132,7 +1135,7 @@ pub enum TerminatorKind<'tcx> {
|
||||
destination: Option<(Place<'tcx>, BasicBlock)>,
|
||||
/// Cleanups to be done if the call unwinds.
|
||||
cleanup: Option<BasicBlock>,
|
||||
/// Whether this is from a call in HIR, rather than from an overloaded
|
||||
/// `true` if this is from a call in HIR rather than from an overloaded
|
||||
/// operator. True for overloaded function call.
|
||||
from_hir_call: bool,
|
||||
},
|
||||
@ -1147,40 +1150,40 @@ pub enum TerminatorKind<'tcx> {
|
||||
cleanup: Option<BasicBlock>,
|
||||
},
|
||||
|
||||
/// A suspend point
|
||||
/// A suspend point.
|
||||
Yield {
|
||||
/// The value to return
|
||||
/// The value to return.
|
||||
value: Operand<'tcx>,
|
||||
/// Where to resume to
|
||||
/// Where to resume to.
|
||||
resume: BasicBlock,
|
||||
/// Cleanup to be done if the generator is dropped at this suspend point
|
||||
/// Cleanup to be done if the generator is dropped at this suspend point.
|
||||
drop: Option<BasicBlock>,
|
||||
},
|
||||
|
||||
/// Indicates the end of the dropping of a generator
|
||||
/// Indicates the end of the dropping of a generator.
|
||||
GeneratorDrop,
|
||||
|
||||
/// A block where control flow only ever takes one real path, but borrowck
|
||||
/// needs to be more conservative.
|
||||
FalseEdges {
|
||||
/// The target normal control flow will take
|
||||
/// The target normal control flow will take.
|
||||
real_target: BasicBlock,
|
||||
/// A block control flow could conceptually jump to, but won't in
|
||||
/// practice
|
||||
/// practice.
|
||||
imaginary_target: BasicBlock,
|
||||
},
|
||||
/// A terminator for blocks that only take one path in reality, but where we
|
||||
/// reserve the right to unwind in borrowck, even if it won't happen in practice.
|
||||
/// This can arise in infinite loops with no function calls for example.
|
||||
FalseUnwind {
|
||||
/// The target normal control flow will take
|
||||
/// The target normal control flow will take.
|
||||
real_target: BasicBlock,
|
||||
/// The imaginary cleanup block link. This particular path will never be taken
|
||||
/// in practice, but in order to avoid fragility we want to always
|
||||
/// consider it in borrowck. We don't want to accept programs which
|
||||
/// pass borrowck only when panic=abort or some assertions are disabled
|
||||
/// due to release vs. debug mode builds. This needs to be an Option because
|
||||
/// of the remove_noop_landing_pads and no_landing_pads passes
|
||||
/// pass borrowck only when `panic=abort` or some assertions are disabled
|
||||
/// due to release vs. debug mode builds. This needs to be an `Option` because
|
||||
/// of the `remove_noop_landing_pads` and `no_landing_pads` passes.
|
||||
unwind: Option<BasicBlock>,
|
||||
},
|
||||
}
|
||||
@ -1445,7 +1448,7 @@ impl<'tcx> Debug for TerminatorKind<'tcx> {
|
||||
}
|
||||
|
||||
impl<'tcx> TerminatorKind<'tcx> {
|
||||
/// Write the "head" part of the terminator; that is, its name and the data it uses to pick the
|
||||
/// Writes the "head" part of the terminator; that is, its name and the data it uses to pick the
|
||||
/// successor basic block, if any. The only information not included is the list of possible
|
||||
/// successors, which may be rendered differently between the text and the graphviz format.
|
||||
pub fn fmt_head<W: Write>(&self, fmt: &mut W) -> fmt::Result {
|
||||
@ -1615,20 +1618,20 @@ pub enum StatementKind<'tcx> {
|
||||
Nop,
|
||||
}
|
||||
|
||||
/// `RetagKind` describes what kind of retag is to be performed.
|
||||
/// Describes what kind of retag is to be performed.
|
||||
#[derive(Copy, Clone, RustcEncodable, RustcDecodable, Debug, PartialEq, Eq, HashStable)]
|
||||
pub enum RetagKind {
|
||||
/// The initial retag when entering a function
|
||||
/// The initial retag when entering a function.
|
||||
FnEntry,
|
||||
/// Retag preparing for a two-phase borrow
|
||||
/// Retag preparing for a two-phase borrow.
|
||||
TwoPhase,
|
||||
/// Retagging raw pointers
|
||||
/// Retagging raw pointers.
|
||||
Raw,
|
||||
/// A "normal" retag
|
||||
/// A "normal" retag.
|
||||
Default,
|
||||
}
|
||||
|
||||
/// The `FakeReadCause` describes the type of pattern why a `FakeRead` statement exists.
|
||||
/// The `FakeReadCause` describes the type of pattern why a FakeRead statement exists.
|
||||
#[derive(Copy, Clone, RustcEncodable, RustcDecodable, Debug, HashStable)]
|
||||
pub enum FakeReadCause {
|
||||
/// Inject a fake read of the borrowed input at the end of each guards
|
||||
@ -2171,7 +2174,7 @@ pub struct SourceScopeData {
|
||||
|
||||
#[derive(Clone, Debug, RustcEncodable, RustcDecodable, HashStable)]
|
||||
pub struct SourceScopeLocalData {
|
||||
/// A HirId with lint levels equivalent to this scope's lint levels.
|
||||
/// An `HirId` with lint levels equivalent to this scope's lint levels.
|
||||
pub lint_root: hir::HirId,
|
||||
/// The unsafe block that contains this node.
|
||||
pub safety: Safety,
|
||||
@ -2760,11 +2763,12 @@ impl<'a, 'b> graph::GraphSuccessors<'b> for Body<'a> {
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, Ord, PartialOrd, HashStable)]
|
||||
pub struct Location {
|
||||
/// the location is within this block
|
||||
/// The block that the location is within.
|
||||
pub block: BasicBlock,
|
||||
|
||||
/// the location is the start of the statement; or, if `statement_index`
|
||||
/// == num-statements, then the start of the terminator.
|
||||
/// The location is the position of the start of the statement; or, if
|
||||
/// `statement_index` equals the number of statements, then the start of the
|
||||
/// terminator.
|
||||
pub statement_index: usize,
|
||||
}
|
||||
|
||||
@ -2827,7 +2831,7 @@ impl Location {
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable, HashStable)]
|
||||
pub enum UnsafetyViolationKind {
|
||||
General,
|
||||
/// Permitted in const fn and regular fns.
|
||||
/// Permitted both in `const fn`s and regular `fn`s.
|
||||
GeneralAndConstFn,
|
||||
ExternStatic(hir::HirId),
|
||||
BorrowPacked(hir::HirId),
|
||||
@ -2843,9 +2847,9 @@ pub struct UnsafetyViolation {
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable, HashStable)]
|
||||
pub struct UnsafetyCheckResult {
|
||||
/// Violations that are propagated *upwards* from this function
|
||||
/// Violations that are propagated *upwards* from this function.
|
||||
pub violations: Lrc<[UnsafetyViolation]>,
|
||||
/// unsafe blocks in this function, along with whether they are used. This is
|
||||
/// `unsafe` blocks in this function, along with whether they are used. This is
|
||||
/// used for the "unused_unsafe" lint.
|
||||
pub unsafe_blocks: Lrc<[(hir::HirId, bool)]>,
|
||||
}
|
||||
@ -2857,7 +2861,7 @@ newtype_index! {
|
||||
}
|
||||
}
|
||||
|
||||
/// The layout of generator state
|
||||
/// The layout of generator state.
|
||||
#[derive(Clone, Debug, RustcEncodable, RustcDecodable, HashStable)]
|
||||
pub struct GeneratorLayout<'tcx> {
|
||||
/// The type of every local stored inside the generator.
|
||||
@ -2872,11 +2876,14 @@ pub struct GeneratorLayout<'tcx> {
|
||||
/// layout.
|
||||
pub storage_conflicts: BitMatrix<GeneratorSavedLocal, GeneratorSavedLocal>,
|
||||
|
||||
/// Names and scopes of all the stored generator locals.
|
||||
/// NOTE(tmandry) This is *strictly* a temporary hack for codegen
|
||||
/// The names and scopes of all the stored generator locals.
|
||||
///
|
||||
/// N.B., this is *strictly* a temporary hack for codegen
|
||||
/// debuginfo generation, and will be removed at some point.
|
||||
/// Do **NOT** use it for anything else, local information should not be
|
||||
/// in the MIR, please rely on local crate HIR or other side-channels.
|
||||
//
|
||||
// FIXME(tmandry): see above.
|
||||
pub __local_debuginfo_codegen_only_do_not_use: IndexVec<GeneratorSavedLocal, LocalDecl<'tcx>>,
|
||||
}
|
||||
|
||||
@ -2934,7 +2941,7 @@ pub struct BorrowCheckResult<'tcx> {
|
||||
/// instances assigned one of these same indices. Those regions will
|
||||
/// be substituted away by the creator. We use `ReClosureBound` in
|
||||
/// that case because the regions must be allocated in the global
|
||||
/// TyCtxt, and hence we cannot use `ReVar` (which is what we use
|
||||
/// `TyCtxt`, and hence we cannot use `ReVar` (which is what we use
|
||||
/// internally within the rest of the NLL code).
|
||||
#[derive(Clone, Debug, RustcEncodable, RustcDecodable, HashStable)]
|
||||
pub struct ClosureRegionRequirements<'tcx> {
|
||||
@ -2950,8 +2957,8 @@ pub struct ClosureRegionRequirements<'tcx> {
|
||||
pub outlives_requirements: Vec<ClosureOutlivesRequirement<'tcx>>,
|
||||
}
|
||||
|
||||
/// Indicates an outlives constraint between a type or between two
|
||||
/// free-regions declared on the closure.
|
||||
/// Indicates an outlives-constraint between a type or between two
|
||||
/// free regions declared on the closure.
|
||||
#[derive(Copy, Clone, Debug, RustcEncodable, RustcDecodable, HashStable)]
|
||||
pub struct ClosureOutlivesRequirement<'tcx> {
|
||||
// This region or type ...
|
||||
@ -2967,11 +2974,11 @@ pub struct ClosureOutlivesRequirement<'tcx> {
|
||||
pub category: ConstraintCategory,
|
||||
}
|
||||
|
||||
/// Outlives constraints can be categorized to determine whether and why they
|
||||
/// Outlives-constraints can be categorized to determine whether and why they
|
||||
/// are interesting (for error reporting). Order of variants indicates sort
|
||||
/// order of the category, thereby influencing diagnostic output.
|
||||
///
|
||||
/// See also [rustc_mir::borrow_check::nll::constraints]
|
||||
/// See also [rustc_mir::borrow_check::nll::constraints].
|
||||
#[derive(
|
||||
Copy,
|
||||
Clone,
|
||||
@ -3019,7 +3026,7 @@ pub enum ConstraintCategory {
|
||||
Internal,
|
||||
}
|
||||
|
||||
/// The subject of a ClosureOutlivesRequirement -- that is, the thing
|
||||
/// The subject of a `ClosureOutlivesRequirement` -- that is, the thing
|
||||
/// that must outlive some region.
|
||||
#[derive(Copy, Clone, Debug, RustcEncodable, RustcDecodable, HashStable)]
|
||||
pub enum ClosureOutlivesSubject<'tcx> {
|
||||
@ -3037,7 +3044,7 @@ pub enum ClosureOutlivesSubject<'tcx> {
|
||||
}
|
||||
|
||||
/*
|
||||
* TypeFoldable implementations for MIR types
|
||||
* `TypeFoldable` implementations for MIR types
|
||||
*/
|
||||
|
||||
CloneTypeFoldableAndLiftImpls! {
|
||||
|
@ -17,7 +17,6 @@ use crate::traits::query::{
|
||||
use std::borrow::Cow;
|
||||
use syntax_pos::symbol::InternedString;
|
||||
|
||||
|
||||
// Each of these queries corresponds to a function pointer field in the
|
||||
// `Providers` struct for requesting a value of that type, and a method
|
||||
// on `tcx: TyCtxt` (and `tcx.at(span)`) for doing that request in a way
|
||||
@ -854,7 +853,7 @@ rustc_queries! {
|
||||
desc { "calculating the lang items map" }
|
||||
}
|
||||
|
||||
/// Returns all diagnostic items defined in all crates
|
||||
/// Returns all diagnostic items defined in all crates.
|
||||
query all_diagnostic_items(_: CrateNum) -> &'tcx FxHashMap<Symbol, DefId> {
|
||||
eval_always
|
||||
desc { "calculating the diagnostic items map" }
|
||||
@ -865,7 +864,7 @@ rustc_queries! {
|
||||
desc { "calculating the lang items defined in a crate" }
|
||||
}
|
||||
|
||||
/// Returns the diagnostic items defined in a crate
|
||||
/// Returns the diagnostic items defined in a crate.
|
||||
query diagnostic_items(_: CrateNum) -> &'tcx FxHashMap<Symbol, DefId> {
|
||||
desc { "calculating the diagnostic items map in a crate" }
|
||||
}
|
||||
|
@ -1,36 +1,36 @@
|
||||
//! Contains infrastructure for configuring the compiler, including parsing
|
||||
//! command line options.
|
||||
|
||||
use std::str::FromStr;
|
||||
//! command-line options.
|
||||
|
||||
use crate::lint;
|
||||
use crate::middle::cstore;
|
||||
use crate::session::{early_error, early_warn, Session};
|
||||
use crate::session::search_paths::SearchPath;
|
||||
|
||||
use rustc_data_structures::fx::FxHashSet;
|
||||
|
||||
use rustc_target::spec::{LinkerFlavor, MergeFunctions, PanicStrategy, RelroLevel};
|
||||
use rustc_target::spec::{Target, TargetTriple};
|
||||
use crate::lint;
|
||||
use crate::middle::cstore;
|
||||
|
||||
use syntax;
|
||||
use syntax::ast::{self, IntTy, UintTy, MetaItemKind};
|
||||
use syntax::source_map::{FileName, FilePathMapping};
|
||||
use syntax::edition::{Edition, EDITION_NAME_LIST, DEFAULT_EDITION};
|
||||
use syntax::parse::{ParseSess, new_parser_from_source_str};
|
||||
use syntax::parse::token;
|
||||
use syntax::parse;
|
||||
use syntax::symbol::{sym, Symbol};
|
||||
use syntax::feature_gate::UnstableFeatures;
|
||||
use errors::emitter::HumanReadableErrorType;
|
||||
|
||||
use errors::emitter::HumanReadableErrorType;
|
||||
use errors::{ColorConfig, FatalError, Handler};
|
||||
|
||||
use getopts;
|
||||
use std::collections::{BTreeMap, BTreeSet};
|
||||
use std::collections::btree_map::Iter as BTreeMapIter;
|
||||
use std::collections::btree_map::Keys as BTreeMapKeysIter;
|
||||
use std::collections::btree_map::Values as BTreeMapValuesIter;
|
||||
|
||||
use rustc_data_structures::fx::FxHashSet;
|
||||
use std::{fmt, str};
|
||||
use std::collections::{BTreeMap, BTreeSet};
|
||||
use std::collections::btree_map::{
|
||||
Iter as BTreeMapIter, Keys as BTreeMapKeysIter, Values as BTreeMapValuesIter,
|
||||
};
|
||||
use std::fmt;
|
||||
use std::str::{self, FromStr};
|
||||
use std::hash::Hasher;
|
||||
use std::collections::hash_map::DefaultHasher;
|
||||
use std::iter::FromIterator;
|
||||
@ -241,14 +241,14 @@ pub enum ErrorOutputType {
|
||||
}
|
||||
|
||||
impl Default for ErrorOutputType {
|
||||
fn default() -> ErrorOutputType {
|
||||
ErrorOutputType::HumanReadable(HumanReadableErrorType::Default(ColorConfig::Auto))
|
||||
fn default() -> Self {
|
||||
Self::HumanReadable(HumanReadableErrorType::Default(ColorConfig::Auto))
|
||||
}
|
||||
}
|
||||
|
||||
// Use tree-based collections to cheaply get a deterministic Hash implementation.
|
||||
// DO NOT switch BTreeMap out for an unsorted container type! That would break
|
||||
// dependency tracking for command-line arguments.
|
||||
/// Use tree-based collections to cheaply get a deterministic `Hash` implementation.
|
||||
/// *Do not* switch `BTreeMap` out for an unsorted container type! That would break
|
||||
/// dependency tracking for command-line arguments.
|
||||
#[derive(Clone, Hash)]
|
||||
pub struct OutputTypes(BTreeMap<OutputType, Option<PathBuf>>);
|
||||
|
||||
@ -281,7 +281,7 @@ impl OutputTypes {
|
||||
self.0.len()
|
||||
}
|
||||
|
||||
// True if any of the output types require codegen or linking.
|
||||
// Returns `true` if any of the output types require codegen or linking.
|
||||
pub fn should_codegen(&self) -> bool {
|
||||
self.0.keys().any(|k| match *k {
|
||||
OutputType::Bitcode
|
||||
@ -295,9 +295,9 @@ impl OutputTypes {
|
||||
}
|
||||
}
|
||||
|
||||
// Use tree-based collections to cheaply get a deterministic Hash implementation.
|
||||
// DO NOT switch BTreeMap or BTreeSet out for an unsorted container type! That
|
||||
// would break dependency tracking for command-line arguments.
|
||||
/// Use tree-based collections to cheaply get a deterministic `Hash` implementation.
|
||||
/// *Do not* switch `BTreeMap` or `BTreeSet` out for an unsorted container type! That
|
||||
/// would break dependency tracking for command-line arguments.
|
||||
#[derive(Clone, Hash)]
|
||||
pub struct Externs(BTreeMap<String, ExternEntry>);
|
||||
|
||||
@ -327,7 +327,7 @@ macro_rules! hash_option {
|
||||
($opt_name:ident, $opt_expr:expr, $sub_hashes:expr, [TRACKED]) => ({
|
||||
if $sub_hashes.insert(stringify!($opt_name),
|
||||
$opt_expr as &dyn dep_tracking::DepTrackingHash).is_some() {
|
||||
bug!("Duplicate key in CLI DepTrackingHash: {}", stringify!($opt_name))
|
||||
bug!("duplicate key in CLI DepTrackingHash: {}", stringify!($opt_name))
|
||||
}
|
||||
});
|
||||
}
|
||||
@ -362,7 +362,7 @@ macro_rules! top_level_options {
|
||||
);
|
||||
}
|
||||
|
||||
// The top-level command-line options struct
|
||||
// The top-level command-line options struct.
|
||||
//
|
||||
// For each option, one has to specify how it behaves with regard to the
|
||||
// dependency tracking system of incremental compilation. This is done via the
|
||||
@ -376,16 +376,16 @@ macro_rules! top_level_options {
|
||||
// Incremental compilation is not influenced by this option.
|
||||
//
|
||||
// If you add a new option to this struct or one of the sub-structs like
|
||||
// CodegenOptions, think about how it influences incremental compilation. If in
|
||||
// `CodegenOptions`, think about how it influences incremental compilation. If in
|
||||
// doubt, specify [TRACKED], which is always "correct" but might lead to
|
||||
// unnecessary re-compilation.
|
||||
top_level_options!(
|
||||
pub struct Options {
|
||||
// The crate config requested for the session, which may be combined
|
||||
// with additional crate configurations during the compile process
|
||||
// with additional crate configurations during the compile process.
|
||||
crate_types: Vec<CrateType> [TRACKED],
|
||||
optimize: OptLevel [TRACKED],
|
||||
// Include the debug_assertions flag into dependency tracking, since it
|
||||
// Include the `debug_assertions` flag in dependency tracking, since it
|
||||
// can influence whether overflow checks are done or not.
|
||||
debug_assertions: bool [TRACKED],
|
||||
debuginfo: DebugInfo [TRACKED],
|
||||
@ -402,8 +402,8 @@ top_level_options!(
|
||||
test: bool [TRACKED],
|
||||
error_format: ErrorOutputType [UNTRACKED],
|
||||
|
||||
// if Some, enable incremental compilation, using the given
|
||||
// directory to store intermediate results
|
||||
// If `Some`, enable incremental compilation, using the given
|
||||
// directory to store intermediate results.
|
||||
incremental: Option<PathBuf> [UNTRACKED],
|
||||
|
||||
debugging_opts: DebuggingOptions [TRACKED],
|
||||
@ -418,7 +418,7 @@ top_level_options!(
|
||||
// written `extern crate name as std`. Defaults to `std`. Used by
|
||||
// out-of-tree drivers.
|
||||
alt_std_name: Option<String> [TRACKED],
|
||||
// Indicates how the compiler should treat unstable features
|
||||
// Indicates how the compiler should treat unstable features.
|
||||
unstable_features: UnstableFeatures [TRACKED],
|
||||
|
||||
// Indicates whether this run of the compiler is actually rustdoc. This
|
||||
@ -434,12 +434,12 @@ top_level_options!(
|
||||
cli_forced_codegen_units: Option<usize> [UNTRACKED],
|
||||
cli_forced_thinlto_off: bool [UNTRACKED],
|
||||
|
||||
// Remap source path prefixes in all output (messages, object files, debug, etc)
|
||||
// Remap source path prefixes in all output (messages, object files, debug, etc.).
|
||||
remap_path_prefix: Vec<(PathBuf, PathBuf)> [UNTRACKED],
|
||||
|
||||
edition: Edition [TRACKED],
|
||||
|
||||
// Whether or not we're emitting JSON blobs about each artifact produced
|
||||
// `true` if we're emitting JSON blobs about each artifact produced
|
||||
// by the compiler.
|
||||
json_artifact_notifications: bool [TRACKED],
|
||||
}
|
||||
@ -468,7 +468,7 @@ pub enum BorrowckMode {
|
||||
}
|
||||
|
||||
impl BorrowckMode {
|
||||
/// Should we run the MIR-based borrow check, but also fall back
|
||||
/// Returns whether we should run the MIR-based borrow check, but also fall back
|
||||
/// on the AST borrow check if the MIR-based one errors.
|
||||
pub fn migrate(self) -> bool {
|
||||
match self {
|
||||
@ -477,7 +477,7 @@ impl BorrowckMode {
|
||||
}
|
||||
}
|
||||
|
||||
/// Should we emit the AST-based borrow checker errors?
|
||||
/// Returns whether we should emit the AST-based borrow checker errors.
|
||||
pub fn use_ast(self) -> bool {
|
||||
match self {
|
||||
BorrowckMode::Mir => false,
|
||||
@ -487,12 +487,13 @@ impl BorrowckMode {
|
||||
}
|
||||
|
||||
pub enum Input {
|
||||
/// Loads source from file
|
||||
/// Load source code from a file.
|
||||
File(PathBuf),
|
||||
/// Load source code from a string.
|
||||
Str {
|
||||
/// String that is shown in place of a filename
|
||||
/// A string that is shown in place of a filename.
|
||||
name: FileName,
|
||||
/// Anonymous source string
|
||||
/// An anonymous string containing the source code.
|
||||
input: String,
|
||||
},
|
||||
}
|
||||
@ -651,7 +652,7 @@ impl Options {
|
||||
FilePathMapping::new(self.remap_path_prefix.clone())
|
||||
}
|
||||
|
||||
/// Returns `true` if there will be an output file generated
|
||||
/// Returns `true` if there will be an output file generated.
|
||||
pub fn will_create_output_file(&self) -> bool {
|
||||
!self.debugging_opts.parse_only && // The file is just being parsed
|
||||
!self.debugging_opts.ls // The file is just being queried
|
||||
@ -709,16 +710,14 @@ impl Passes {
|
||||
}
|
||||
}
|
||||
|
||||
/// Declare a macro that will define all CodegenOptions/DebuggingOptions fields and parsers all
|
||||
/// at once. The goal of this macro is to define an interface that can be
|
||||
/// programmatically used by the option parser in order to initialize the struct
|
||||
/// without hardcoding field names all over the place.
|
||||
/// Defines all `CodegenOptions`/`DebuggingOptions` fields and parsers all at once. The goal of this
|
||||
/// macro is to define an interface that can be programmatically used by the option parser
|
||||
/// to initialize the struct without hardcoding field names all over the place.
|
||||
///
|
||||
/// The goal is to invoke this macro once with the correct fields, and then this
|
||||
/// macro generates all necessary code. The main gotcha of this macro is the
|
||||
/// cgsetters module which is a bunch of generated code to parse an option into
|
||||
/// its respective field in the struct. There are a few hand-written parsers for
|
||||
/// parsing specific types of values in this module.
|
||||
/// The goal is to invoke this macro once with the correct fields, and then this macro generates all
|
||||
/// necessary code. The main gotcha of this macro is the `cgsetters` module which is a bunch of
|
||||
/// generated code to parse an option into its respective field in the struct. There are a few
|
||||
/// hand-written parsers for parsing specific types of values in this module.
|
||||
macro_rules! options {
|
||||
($struct_name:ident, $setter_name:ident, $defaultfn:ident,
|
||||
$buildfn:ident, $prefix:expr, $outputname:expr,
|
||||
@ -1539,7 +1538,7 @@ pub fn default_configuration(sess: &Session) -> ast::CrateConfig {
|
||||
ret
|
||||
}
|
||||
|
||||
/// Converts the crate cfg! configuration from String to Symbol.
|
||||
/// Converts the crate `cfg!` configuration from `String` to `Symbol`.
|
||||
/// `rustc_interface::interface::Config` accepts this in the compiler configuration,
|
||||
/// but the symbol interner is not yet set up then, so we must convert it later.
|
||||
pub fn to_crate_config(cfg: FxHashSet<(String, Option<String>)>) -> ast::CrateConfig {
|
||||
@ -1550,9 +1549,9 @@ pub fn to_crate_config(cfg: FxHashSet<(String, Option<String>)>) -> ast::CrateCo
|
||||
|
||||
pub fn build_configuration(sess: &Session, mut user_cfg: ast::CrateConfig) -> ast::CrateConfig {
|
||||
// Combine the configuration requested by the session (command line) with
|
||||
// some default and generated configuration items
|
||||
// some default and generated configuration items.
|
||||
let default_cfg = default_configuration(sess);
|
||||
// If the user wants a test runner, then add the test cfg
|
||||
// If the user wants a test runner, then add the test cfg.
|
||||
if sess.opts.test {
|
||||
user_cfg.insert((sym::test, None));
|
||||
}
|
||||
@ -1851,13 +1850,13 @@ pub fn rustc_optgroups() -> Vec<RustcOptGroup> {
|
||||
opts
|
||||
}
|
||||
|
||||
// Convert strings provided as --cfg [cfgspec] into a crate_cfg
|
||||
// Converts strings provided as `--cfg [cfgspec]` into a `crate_cfg`.
|
||||
pub fn parse_cfgspecs(cfgspecs: Vec<String>) -> FxHashSet<(String, Option<String>)> {
|
||||
syntax::with_default_globals(move || {
|
||||
let cfg = cfgspecs.into_iter().map(|s| {
|
||||
let sess = parse::ParseSess::new(FilePathMapping::empty());
|
||||
let sess = ParseSess::new(FilePathMapping::empty());
|
||||
let filename = FileName::cfg_spec_source_code(&s);
|
||||
let mut parser = parse::new_parser_from_source_str(&sess, filename, s.to_string());
|
||||
let mut parser = new_parser_from_source_str(&sess, filename, s.to_string());
|
||||
|
||||
macro_rules! error {($reason: expr) => {
|
||||
early_error(ErrorOutputType::default(),
|
||||
@ -1917,7 +1916,7 @@ pub fn get_cmd_lint_options(matches: &getopts::Matches,
|
||||
(lint_opts, describe_lints, lint_cap)
|
||||
}
|
||||
|
||||
/// Parse the `--color` flag
|
||||
/// Parses the `--color` flag.
|
||||
pub fn parse_color(matches: &getopts::Matches) -> ColorConfig {
|
||||
match matches.opt_str("color").as_ref().map(|s| &s[..]) {
|
||||
Some("auto") => ColorConfig::Auto,
|
||||
@ -1929,7 +1928,7 @@ pub fn parse_color(matches: &getopts::Matches) -> ColorConfig {
|
||||
Some(arg) => early_error(
|
||||
ErrorOutputType::default(),
|
||||
&format!(
|
||||
"argument for --color must be auto, \
|
||||
"argument for `--color` must be auto, \
|
||||
always or never (instead was `{}`)",
|
||||
arg
|
||||
),
|
||||
@ -1974,16 +1973,16 @@ pub fn parse_json(matches: &getopts::Matches) -> (HumanReadableErrorType, bool)
|
||||
(json_rendered(json_color), json_artifact_notifications)
|
||||
}
|
||||
|
||||
/// Parse the `--error-format` flag
|
||||
/// Parses the `--error-format` flag.
|
||||
pub fn parse_error_format(
|
||||
matches: &getopts::Matches,
|
||||
color: ColorConfig,
|
||||
json_rendered: HumanReadableErrorType,
|
||||
) -> ErrorOutputType {
|
||||
// We need the opts_present check because the driver will send us Matches
|
||||
// We need the `opts_present` check because the driver will send us Matches
|
||||
// with only stable options if no unstable options are used. Since error-format
|
||||
// is unstable, it will not be present. We have to use opts_present not
|
||||
// opt_present because the latter will panic.
|
||||
// is unstable, it will not be present. We have to use `opts_present` not
|
||||
// `opt_present` because the latter will panic.
|
||||
let error_format = if matches.opts_present(&["error-format".to_owned()]) {
|
||||
match matches.opt_str("error-format").as_ref().map(|s| &s[..]) {
|
||||
None |
|
||||
@ -2116,7 +2115,7 @@ pub fn build_session_options_and_crate_config(
|
||||
let mut codegen_units = cg.codegen_units;
|
||||
let mut disable_thinlto = false;
|
||||
|
||||
// Issue #30063: if user requests llvm-related output to one
|
||||
// Issue #30063: if user requests LLVM-related output to one
|
||||
// particular path, disable codegen-units.
|
||||
let incompatible: Vec<_> = output_types
|
||||
.iter()
|
||||
@ -2414,10 +2413,10 @@ pub fn build_session_options_and_crate_config(
|
||||
)
|
||||
}
|
||||
|
||||
// We start out with a Vec<(Option<String>, bool)>>,
|
||||
// and later convert it into a BTreeSet<(Option<String>, bool)>
|
||||
// We start out with a `Vec<(Option<String>, bool)>>`,
|
||||
// and later convert it into a `BTreeSet<(Option<String>, bool)>`
|
||||
// This allows to modify entries in-place to set their correct
|
||||
// 'public' value
|
||||
// 'public' value.
|
||||
let mut externs: BTreeMap<String, ExternEntry> = BTreeMap::new();
|
||||
for (arg, private) in matches.opt_strs("extern").into_iter().map(|v| (v, false))
|
||||
.chain(matches.opt_strs("extern-private").into_iter().map(|v| (v, true))) {
|
||||
@ -2616,15 +2615,15 @@ impl fmt::Display for CrateType {
|
||||
/// The values of all command-line arguments that are relevant for dependency
|
||||
/// tracking are hashed into a single value that determines whether the
|
||||
/// incremental compilation cache can be re-used or not. This hashing is done
|
||||
/// via the DepTrackingHash trait defined below, since the standard Hash
|
||||
/// implementation might not be suitable (e.g., arguments are stored in a Vec,
|
||||
/// via the `DepTrackingHash` trait defined below, since the standard `Hash`
|
||||
/// implementation might not be suitable (e.g., arguments are stored in a `Vec`,
|
||||
/// the hash of which is order dependent, but we might not want the order of
|
||||
/// arguments to make a difference for the hash).
|
||||
///
|
||||
/// However, since the value provided by Hash::hash often *is* suitable,
|
||||
/// However, since the value provided by `Hash::hash` often *is* suitable,
|
||||
/// especially for primitive types, there is the
|
||||
/// impl_dep_tracking_hash_via_hash!() macro that allows to simply reuse the
|
||||
/// Hash implementation for DepTrackingHash. It's important though that
|
||||
/// `impl_dep_tracking_hash_via_hash!()` macro that allows to simply reuse the
|
||||
/// `Hash` implementation for `DepTrackingHash`. It's important though that
|
||||
/// we have an opt-in scheme here, so one is hopefully forced to think about
|
||||
/// how the hash should be calculated when adding a new command-line argument.
|
||||
mod dep_tracking {
|
||||
@ -2637,9 +2636,9 @@ mod dep_tracking {
|
||||
use super::{CrateType, DebugInfo, ErrorOutputType, OptLevel, OutputTypes,
|
||||
Passes, Sanitizer, LtoCli, LinkerPluginLto, SwitchWithOptPath,
|
||||
SymbolManglingVersion};
|
||||
use syntax::feature_gate::UnstableFeatures;
|
||||
use rustc_target::spec::{MergeFunctions, PanicStrategy, RelroLevel, TargetTriple};
|
||||
use syntax::edition::Edition;
|
||||
use syntax::feature_gate::UnstableFeatures;
|
||||
|
||||
pub trait DepTrackingHash {
|
||||
fn hash(&self, hasher: &mut DefaultHasher, error_format: ErrorOutputType);
|
||||
|
@ -79,12 +79,12 @@ pub struct Session {
|
||||
/// if the value stored here has been affected by path remapping.
|
||||
pub working_dir: (PathBuf, bool),
|
||||
|
||||
// FIXME: lint_store and buffered_lints are not thread-safe,
|
||||
// but are only used in a single thread
|
||||
// FIXME: `lint_store` and `buffered_lints` are not thread-safe,
|
||||
// but are only used in a single thread.
|
||||
pub lint_store: RwLock<lint::LintStore>,
|
||||
pub buffered_lints: Lock<Option<lint::LintBuffer>>,
|
||||
|
||||
/// Set of (DiagnosticId, Option<Span>, message) tuples tracking
|
||||
/// Set of `(DiagnosticId, Option<Span>, message)` tuples tracking
|
||||
/// (sub)diagnostics that have been set once, but should not be set again,
|
||||
/// in order to avoid redundantly verbose output (Issue #24690, #44953).
|
||||
pub one_time_diagnostics: Lock<FxHashSet<(DiagnosticMessageId, Option<Span>, String)>>,
|
||||
@ -92,11 +92,11 @@ pub struct Session {
|
||||
pub plugin_attributes: Lock<Vec<(Symbol, AttributeType)>>,
|
||||
pub crate_types: Once<Vec<config::CrateType>>,
|
||||
pub dependency_formats: Once<dependency_format::Dependencies>,
|
||||
/// The crate_disambiguator is constructed out of all the `-C metadata`
|
||||
/// The `crate_disambiguator` is constructed out of all the `-C metadata`
|
||||
/// arguments passed to the compiler. Its value together with the crate-name
|
||||
/// forms a unique global identifier for the crate. It is used to allow
|
||||
/// multiple crates with the same name to coexist. See the
|
||||
/// rustc_codegen_llvm::back::symbol_names module for more information.
|
||||
/// `rustc_codegen_llvm::back::symbol_names` module for more information.
|
||||
pub crate_disambiguator: Once<CrateDisambiguator>,
|
||||
|
||||
features: Once<feature_gate::Features>,
|
||||
@ -111,7 +111,7 @@ pub struct Session {
|
||||
/// The maximum number of stackframes allowed in const eval.
|
||||
pub const_eval_stack_frame_limit: usize,
|
||||
|
||||
/// The metadata::creader module may inject an allocator/panic_runtime
|
||||
/// The `metadata::creader` module may inject an allocator/`panic_runtime`
|
||||
/// dependency if it didn't already find one, and this tracks what was
|
||||
/// injected.
|
||||
pub allocator_kind: Once<Option<AllocatorKind>>,
|
||||
@ -130,7 +130,7 @@ pub struct Session {
|
||||
/// Used by `-Z profile-queries` in `util::common`.
|
||||
pub profile_channel: Lock<Option<mpsc::Sender<ProfileQueriesMsg>>>,
|
||||
|
||||
/// Used by -Z self-profile
|
||||
/// Used by `-Z self-profile`.
|
||||
pub self_profiling: Option<Arc<SelfProfiler>>,
|
||||
|
||||
/// Some measurements that are being gathered during compilation.
|
||||
@ -187,16 +187,16 @@ pub struct PerfStats {
|
||||
pub normalize_projection_ty: AtomicUsize,
|
||||
}
|
||||
|
||||
/// Enum to support dispatch of one-time diagnostics (in Session.diag_once)
|
||||
/// Enum to support dispatch of one-time diagnostics (in `Session.diag_once`).
|
||||
enum DiagnosticBuilderMethod {
|
||||
Note,
|
||||
SpanNote,
|
||||
SpanSuggestion(String), // suggestion
|
||||
// add more variants as needed to support one-time diagnostics
|
||||
// Add more variants as needed to support one-time diagnostics.
|
||||
}
|
||||
|
||||
/// Diagnostic message ID—used by `Session.one_time_diagnostics` to avoid
|
||||
/// emitting the same message more than once
|
||||
/// Diagnostic message ID, used by `Session.one_time_diagnostics` to avoid
|
||||
/// emitting the same message more than once.
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
|
||||
pub enum DiagnosticMessageId {
|
||||
ErrorId(u16), // EXXXX error code as integer
|
||||
@ -408,7 +408,7 @@ impl Session {
|
||||
Some(next) => {
|
||||
self.next_node_id.set(ast::NodeId::from_usize(next));
|
||||
}
|
||||
None => bug!("Input too large, ran out of node ids!"),
|
||||
None => bug!("input too large; ran out of node-IDs!"),
|
||||
}
|
||||
|
||||
id
|
||||
@ -440,11 +440,11 @@ impl Session {
|
||||
diag_builder.note(message);
|
||||
}
|
||||
DiagnosticBuilderMethod::SpanNote => {
|
||||
let span = span_maybe.expect("span_note needs a span");
|
||||
let span = span_maybe.expect("`span_note` needs a span");
|
||||
diag_builder.span_note(span, message);
|
||||
}
|
||||
DiagnosticBuilderMethod::SpanSuggestion(suggestion) => {
|
||||
let span = span_maybe.expect("span_suggestion_* needs a span");
|
||||
let span = span_maybe.expect("`span_suggestion_*` needs a span");
|
||||
diag_builder.span_suggestion(
|
||||
span,
|
||||
message,
|
||||
@ -688,7 +688,7 @@ impl Session {
|
||||
|
||||
pub fn must_not_eliminate_frame_pointers(&self) -> bool {
|
||||
// "mcount" function relies on stack pointer.
|
||||
// See https://sourceware.org/binutils/docs/gprof/Implementation.html
|
||||
// See <https://sourceware.org/binutils/docs/gprof/Implementation.html>.
|
||||
if self.instrument_mcount() {
|
||||
true
|
||||
} else if let Some(x) = self.opts.cg.force_frame_pointers {
|
||||
@ -699,7 +699,7 @@ impl Session {
|
||||
}
|
||||
|
||||
/// Returns the symbol name for the registrar function,
|
||||
/// given the crate Svh and the function DefIndex.
|
||||
/// given the crate `Svh` and the function `DefIndex`.
|
||||
pub fn generate_plugin_registrar_symbol(&self, disambiguator: CrateDisambiguator) -> String {
|
||||
format!(
|
||||
"__rustc_plugin_registrar_{}__",
|
||||
@ -719,7 +719,7 @@ impl Session {
|
||||
&self.sysroot,
|
||||
self.opts.target_triple.triple(),
|
||||
&self.opts.search_paths,
|
||||
// target_tlib_path==None means it's the same as host_tlib_path.
|
||||
// `target_tlib_path == None` means it's the same as `host_tlib_path`.
|
||||
self.target_tlib_path.as_ref().unwrap_or(&self.host_tlib_path),
|
||||
kind,
|
||||
)
|
||||
@ -779,12 +779,12 @@ impl Session {
|
||||
if let IncrCompSession::Active { .. } = *incr_comp_session {
|
||||
} else {
|
||||
bug!(
|
||||
"Trying to finalize IncrCompSession `{:?}`",
|
||||
"trying to finalize `IncrCompSession` `{:?}`",
|
||||
*incr_comp_session
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
// Note: This will also drop the lock file, thus unlocking the directory
|
||||
// Note: this will also drop the lock file, thus unlocking the directory.
|
||||
*incr_comp_session = IncrCompSession::Finalized {
|
||||
session_directory: new_directory_path,
|
||||
};
|
||||
@ -800,13 +800,15 @@ impl Session {
|
||||
} => session_directory.clone(),
|
||||
IncrCompSession::InvalidBecauseOfErrors { .. } => return,
|
||||
_ => bug!(
|
||||
"Trying to invalidate IncrCompSession `{:?}`",
|
||||
"trying to invalidate `IncrCompSession` `{:?}`",
|
||||
*incr_comp_session
|
||||
),
|
||||
};
|
||||
|
||||
// Note: This will also drop the lock file, thus unlocking the directory
|
||||
*incr_comp_session = IncrCompSession::InvalidBecauseOfErrors { session_directory };
|
||||
// Note: this will also drop the lock file, thus unlocking the directory.
|
||||
*incr_comp_session = IncrCompSession::InvalidBecauseOfErrors {
|
||||
session_directory,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn incr_comp_session_dir(&self) -> cell::Ref<'_, PathBuf> {
|
||||
@ -815,8 +817,8 @@ impl Session {
|
||||
incr_comp_session,
|
||||
|incr_comp_session| match *incr_comp_session {
|
||||
IncrCompSession::NotInitialized => bug!(
|
||||
"Trying to get session directory from IncrCompSession `{:?}`",
|
||||
*incr_comp_session
|
||||
"trying to get session directory from `IncrCompSession`: {:?}",
|
||||
*incr_comp_session,
|
||||
),
|
||||
IncrCompSession::Active {
|
||||
ref session_directory,
|
||||
@ -1185,7 +1187,10 @@ fn build_session_(
|
||||
);
|
||||
let target_cfg = config::build_target_config(&sopts, &span_diagnostic);
|
||||
|
||||
let p_s = parse::ParseSess::with_span_handler(span_diagnostic, source_map);
|
||||
let parse_sess = parse::ParseSess::with_span_handler(
|
||||
span_diagnostic,
|
||||
source_map,
|
||||
);
|
||||
let sysroot = match &sopts.maybe_sysroot {
|
||||
Some(sysroot) => sysroot.clone(),
|
||||
None => filesearch::get_or_default_sysroot(),
|
||||
@ -1214,7 +1219,7 @@ fn build_session_(
|
||||
let print_fuel = AtomicU64::new(0);
|
||||
|
||||
let working_dir = env::current_dir().unwrap_or_else(|e|
|
||||
p_s.span_diagnostic
|
||||
parse_sess.span_diagnostic
|
||||
.fatal(&format!("Current directory is invalid: {}", e))
|
||||
.raise()
|
||||
);
|
||||
@ -1232,7 +1237,7 @@ fn build_session_(
|
||||
opts: sopts,
|
||||
host_tlib_path,
|
||||
target_tlib_path,
|
||||
parse_sess: p_s,
|
||||
parse_sess,
|
||||
sysroot,
|
||||
local_crate_source_file,
|
||||
working_dir,
|
||||
|
@ -130,13 +130,13 @@ impl<'tcx> TyCtxt<'tcx> {
|
||||
}
|
||||
|
||||
/// We say a method is *vtable safe* if it can be invoked on a trait
|
||||
/// object. Note that object-safe traits can have some
|
||||
/// non-vtable-safe methods, so long as they require `Self:Sized` or
|
||||
/// otherwise ensure that they cannot be used when `Self=Trait`.
|
||||
/// object. Note that object-safe traits can have some
|
||||
/// non-vtable-safe methods, so long as they require `Self: Sized` or
|
||||
/// otherwise ensure that they cannot be used when `Self = Trait`.
|
||||
pub fn is_vtable_safe_method(self, trait_def_id: DefId, method: &ty::AssocItem) -> bool {
|
||||
debug_assert!(self.generics_of(trait_def_id).has_self);
|
||||
debug!("is_vtable_safe_method({:?}, {:?})", trait_def_id, method);
|
||||
// Any method that has a `Self : Sized` requisite can't be called.
|
||||
// Any method that has a `Self: Sized` bound cannot be called.
|
||||
if self.generics_require_sized_self(method.def_id) {
|
||||
return false;
|
||||
}
|
||||
@ -350,15 +350,15 @@ impl<'tcx> TyCtxt<'tcx> {
|
||||
&sig.map_bound(|sig| sig.inputs()[0]),
|
||||
);
|
||||
|
||||
// until `unsized_locals` is fully implemented, `self: Self` can't be dispatched on.
|
||||
// Until `unsized_locals` is fully implemented, `self: Self` can't be dispatched on.
|
||||
// However, this is already considered object-safe. We allow it as a special case here.
|
||||
// FIXME(mikeyhew) get rid of this `if` statement once `receiver_is_dispatchable` allows
|
||||
// `Receiver: Unsize<Receiver[Self => dyn Trait]>`
|
||||
// `Receiver: Unsize<Receiver[Self => dyn Trait]>`.
|
||||
if receiver_ty != self.types.self_param {
|
||||
if !self.receiver_is_dispatchable(method, receiver_ty) {
|
||||
return Some(MethodViolationCode::UndispatchableReceiver);
|
||||
} else {
|
||||
// sanity check to make sure the receiver actually has the layout of a pointer
|
||||
// Do sanity check to make sure the receiver actually has the layout of a pointer.
|
||||
|
||||
use crate::ty::layout::Abi;
|
||||
|
||||
@ -373,7 +373,7 @@ impl<'tcx> TyCtxt<'tcx> {
|
||||
}
|
||||
};
|
||||
|
||||
// e.g., Rc<()>
|
||||
// e.g., `Rc<()>`
|
||||
let unit_receiver_ty = self.receiver_for_self_ty(
|
||||
receiver_ty, self.mk_unit(), method.def_id
|
||||
);
|
||||
@ -395,7 +395,7 @@ impl<'tcx> TyCtxt<'tcx> {
|
||||
trait_def_id, self.mk_region(ty::ReStatic)
|
||||
);
|
||||
|
||||
// e.g., Rc<dyn Trait>
|
||||
// e.g., `Rc<dyn Trait>`
|
||||
let trait_object_receiver = self.receiver_for_self_ty(
|
||||
receiver_ty, trait_object_ty, method.def_id
|
||||
);
|
||||
@ -419,8 +419,8 @@ impl<'tcx> TyCtxt<'tcx> {
|
||||
None
|
||||
}
|
||||
|
||||
/// Performs a type substitution to produce the version of receiver_ty when `Self = self_ty`
|
||||
/// e.g., for receiver_ty = `Rc<Self>` and self_ty = `Foo`, returns `Rc<Foo>`.
|
||||
/// Performs a type substitution to produce the version of `receiver_ty` when `Self = self_ty`.
|
||||
/// For example, for `receiver_ty = Rc<Self>` and `self_ty = Foo`, returns `Rc<Foo>`.
|
||||
fn receiver_for_self_ty(
|
||||
self,
|
||||
receiver_ty: Ty<'tcx>,
|
||||
|
@ -1,7 +1,8 @@
|
||||
use crate::infer::InferCtxt;
|
||||
use crate::infer::canonical::OriginalQueryValues;
|
||||
use crate::traits::{EvaluationResult, PredicateObligation, SelectionContext,
|
||||
TraitQueryMode, OverflowError};
|
||||
use crate::traits::{
|
||||
EvaluationResult, PredicateObligation, SelectionContext, TraitQueryMode, OverflowError,
|
||||
};
|
||||
|
||||
impl<'cx, 'tcx> InferCtxt<'cx, 'tcx> {
|
||||
/// Evaluates whether the predicate can be satisfied (by any means)
|
||||
|
@ -6,11 +6,11 @@ use crate::ty::Ty;
|
||||
pub struct CandidateStep<'tcx> {
|
||||
pub self_ty: Canonical<'tcx, QueryResponse<'tcx, Ty<'tcx>>>,
|
||||
pub autoderefs: usize,
|
||||
// true if the type results from a dereference of a raw pointer.
|
||||
// when assembling candidates, we include these steps, but not when
|
||||
// picking methods. This so that if we have `foo: *const Foo` and `Foo` has methods
|
||||
// `fn by_raw_ptr(self: *const Self)` and `fn by_ref(&self)`, then
|
||||
// `foo.by_raw_ptr()` will work and `foo.by_ref()` won't.
|
||||
/// `true` if the type results from a dereference of a raw pointer.
|
||||
/// when assembling candidates, we include these steps, but not when
|
||||
/// picking methods. This so that if we have `foo: *const Foo` and `Foo` has methods
|
||||
/// `fn by_raw_ptr(self: *const Self)` and `fn by_ref(&self)`, then
|
||||
/// `foo.by_raw_ptr()` will work and `foo.by_ref()` won't.
|
||||
pub from_unsafe_deref: bool,
|
||||
pub unsize: bool,
|
||||
}
|
||||
|
@ -512,7 +512,7 @@ pub fn impl_trait_ref_and_oblig<'a, 'tcx>(
|
||||
(impl_trait_ref, impl_obligations)
|
||||
}
|
||||
|
||||
/// See `super::obligations_for_generics`
|
||||
/// See [`super::obligations_for_generics`].
|
||||
pub fn predicates_for_generics<'tcx>(cause: ObligationCause<'tcx>,
|
||||
recursion_depth: usize,
|
||||
param_env: ty::ParamEnv<'tcx>,
|
||||
@ -562,7 +562,7 @@ impl<'tcx> TyCtxt<'tcx> {
|
||||
predicate_for_trait_ref(cause, param_env, trait_ref, recursion_depth)
|
||||
}
|
||||
|
||||
/// Cast a trait reference into a reference to one of its super
|
||||
/// Casts a trait reference into a reference to one of its super
|
||||
/// traits; returns `None` if `target_trait_def_id` is not a
|
||||
/// supertrait.
|
||||
pub fn upcast_choices(self,
|
||||
@ -571,7 +571,7 @@ impl<'tcx> TyCtxt<'tcx> {
|
||||
-> Vec<ty::PolyTraitRef<'tcx>>
|
||||
{
|
||||
if source_trait_ref.def_id() == target_trait_def_id {
|
||||
return vec![source_trait_ref]; // shorcut the most common case
|
||||
return vec![source_trait_ref]; // Shortcut the most common case.
|
||||
}
|
||||
|
||||
supertraits(self, source_trait_ref)
|
||||
|
@ -284,9 +284,11 @@ where
|
||||
#[macro_export]
|
||||
macro_rules! __impl_decoder_methods {
|
||||
($($name:ident -> $ty:ty;)*) => {
|
||||
$(fn $name(&mut self) -> Result<$ty, Self::Error> {
|
||||
self.opaque.$name()
|
||||
})*
|
||||
$(
|
||||
fn $name(&mut self) -> Result<$ty, Self::Error> {
|
||||
self.opaque.$name()
|
||||
}
|
||||
)*
|
||||
}
|
||||
}
|
||||
|
||||
@ -327,14 +329,17 @@ macro_rules! impl_arena_allocatable_decoders {
|
||||
macro_rules! implement_ty_decoder {
|
||||
($DecoderName:ident <$($typaram:tt),*>) => {
|
||||
mod __ty_decoder_impl {
|
||||
use super::$DecoderName;
|
||||
use std::borrow::Cow;
|
||||
|
||||
use rustc_serialize::{Decoder, SpecializedDecoder};
|
||||
|
||||
use $crate::infer::canonical::CanonicalVarInfos;
|
||||
use $crate::ty;
|
||||
use $crate::ty::codec::*;
|
||||
use $crate::ty::subst::SubstsRef;
|
||||
use $crate::hir::def_id::{CrateNum};
|
||||
use rustc_serialize::{Decoder, SpecializedDecoder};
|
||||
use std::borrow::Cow;
|
||||
|
||||
use super::$DecoderName;
|
||||
|
||||
impl<$($typaram ),*> Decoder for $DecoderName<$($typaram),*> {
|
||||
type Error = String;
|
||||
@ -368,8 +373,8 @@ macro_rules! implement_ty_decoder {
|
||||
}
|
||||
}
|
||||
|
||||
// FIXME(#36588) These impls are horribly unsound as they allow
|
||||
// the caller to pick any lifetime for 'tcx, including 'static,
|
||||
// FIXME(#36588): These impls are horribly unsound as they allow
|
||||
// the caller to pick any lifetime for `'tcx`, including `'static`,
|
||||
// by using the unspecialized proxies to them.
|
||||
|
||||
arena_types!(impl_arena_allocatable_decoders, [$DecoderName [$($typaram),*]], 'tcx);
|
||||
|
@ -7,7 +7,7 @@ use crate::session::Session;
|
||||
use crate::session::config::{BorrowckMode, OutputFilenames};
|
||||
use crate::session::config::CrateType;
|
||||
use crate::middle;
|
||||
use crate::hir::{TraitCandidate, HirId, ItemKind, ItemLocalId, Node};
|
||||
use crate::hir::{self, TraitCandidate, HirId, ItemKind, ItemLocalId, Node};
|
||||
use crate::hir::def::{Res, DefKind, Export};
|
||||
use crate::hir::def_id::{CrateNum, DefId, DefIndex, LOCAL_CRATE};
|
||||
use crate::hir::map as hir_map;
|
||||
@ -45,15 +45,16 @@ use crate::ty::CanonicalPolyFnSig;
|
||||
use crate::util::common::ErrorReported;
|
||||
use crate::util::nodemap::{DefIdMap, DefIdSet, ItemLocalMap, ItemLocalSet};
|
||||
use crate::util::nodemap::{FxHashMap, FxHashSet};
|
||||
|
||||
use errors::DiagnosticBuilder;
|
||||
use smallvec::SmallVec;
|
||||
use rustc_data_structures::stable_hasher::{HashStable, hash_stable_hashmap,
|
||||
StableHasher, StableHasherResult,
|
||||
StableVec};
|
||||
use arena::SyncDroplessArena;
|
||||
use smallvec::SmallVec;
|
||||
use rustc_data_structures::stable_hasher::{
|
||||
HashStable, StableHasher, StableHasherResult, StableVec, hash_stable_hashmap,
|
||||
};
|
||||
use rustc_data_structures::indexed_vec::{Idx, IndexVec};
|
||||
use rustc_data_structures::sync::{Lrc, Lock, WorkerLocal};
|
||||
use rustc_data_structures::sharded::ShardedHashMap;
|
||||
use rustc_data_structures::sync::{Lrc, Lock, WorkerLocal};
|
||||
use std::any::Any;
|
||||
use std::borrow::Borrow;
|
||||
use std::cmp::Ordering;
|
||||
@ -74,8 +75,6 @@ use syntax::feature_gate;
|
||||
use syntax::symbol::{Symbol, InternedString, kw, sym};
|
||||
use syntax_pos::Span;
|
||||
|
||||
use crate::hir;
|
||||
|
||||
pub struct AllArenas {
|
||||
pub interner: SyncDroplessArena,
|
||||
}
|
||||
@ -91,10 +90,10 @@ impl AllArenas {
|
||||
type InternedSet<'tcx, T> = ShardedHashMap<Interned<'tcx, T>, ()>;
|
||||
|
||||
pub struct CtxtInterners<'tcx> {
|
||||
/// The arena that types, regions, etc are allocated from
|
||||
/// The arena that types, regions, etc. are allocated from.
|
||||
arena: &'tcx SyncDroplessArena,
|
||||
|
||||
/// Specifically use a speedy hash algorithm for these hash sets,
|
||||
/// Specifically use a speedy hash algorithm for these hash sets, since
|
||||
/// they're accessed quite often.
|
||||
type_: InternedSet<'tcx, TyS<'tcx>>,
|
||||
type_list: InternedSet<'tcx, List<Ty<'tcx>>>,
|
||||
@ -129,7 +128,7 @@ impl<'tcx> CtxtInterners<'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
/// Intern a type
|
||||
/// Interns a type.
|
||||
#[allow(rustc::usage_of_ty_tykind)]
|
||||
#[inline(never)]
|
||||
fn intern_ty(&self,
|
||||
@ -144,7 +143,6 @@ impl<'tcx> CtxtInterners<'tcx> {
|
||||
outer_exclusive_binder: flags.outer_exclusive_binder,
|
||||
};
|
||||
|
||||
|
||||
Interned(self.arena.alloc(ty_struct))
|
||||
}).0
|
||||
}
|
||||
@ -1025,7 +1023,7 @@ pub struct GlobalCtxt<'tcx> {
|
||||
|
||||
hir_map: hir_map::Map<'tcx>,
|
||||
|
||||
/// A map from DefPathHash -> DefId. Includes DefIds from the local crate
|
||||
/// A map from `DefPathHash` -> `DefId`. Includes `DefId`s from the local crate
|
||||
/// as well as all upstream crates. Only populated in incremental mode.
|
||||
pub def_path_hash_to_def_id: Option<FxHashMap<DefPathHash, DefId>>,
|
||||
|
||||
@ -1124,9 +1122,9 @@ impl<'tcx> TyCtxt<'tcx> {
|
||||
})
|
||||
}
|
||||
|
||||
/// Allocates a byte or string literal for `mir::interpret`, read-only
|
||||
/// Allocates a read-only byte or string literal for `mir::interpret`.
|
||||
pub fn allocate_bytes(self, bytes: &[u8]) -> interpret::AllocId {
|
||||
// create an allocation that just contains these bytes
|
||||
// Create an allocation that just contains these bytes.
|
||||
let alloc = interpret::Allocation::from_byte_aligned_bytes(bytes);
|
||||
let alloc = self.intern_const_alloc(alloc);
|
||||
self.alloc_map.lock().create_memory_alloc(alloc)
|
||||
@ -1346,7 +1344,7 @@ impl<'tcx> TyCtxt<'tcx> {
|
||||
}
|
||||
|
||||
/// Converts a `DefId` into its fully expanded `DefPath` (every
|
||||
/// `DefId` is really just an interned def-path).
|
||||
/// `DefId` is really just an interned `DefPath`).
|
||||
///
|
||||
/// Note that if `id` is not local to this crate, the result will
|
||||
/// be a non-local `DefPath`.
|
||||
@ -1402,6 +1400,10 @@ impl<'tcx> TyCtxt<'tcx> {
|
||||
self.cstore.metadata_encoding_version().to_vec()
|
||||
}
|
||||
|
||||
pub fn encode_metadata(self)-> EncodedMetadata {
|
||||
self.cstore.encode_metadata(self)
|
||||
}
|
||||
|
||||
// Note that this is *untracked* and should only be used within the query
|
||||
// system if the result is otherwise tracked through queries
|
||||
pub fn crate_data_as_rc_any(self, cnum: CrateNum) -> Lrc<dyn Any> {
|
||||
@ -1446,25 +1448,25 @@ impl<'tcx> TyCtxt<'tcx> {
|
||||
self.queries.on_disk_cache.serialize(self.global_tcx(), encoder)
|
||||
}
|
||||
|
||||
/// If true, we should use the AST-based borrowck (we may *also* use
|
||||
/// If `true`, we should use the AST-based borrowck (we may *also* use
|
||||
/// the MIR-based borrowck).
|
||||
pub fn use_ast_borrowck(self) -> bool {
|
||||
self.borrowck_mode().use_ast()
|
||||
}
|
||||
|
||||
/// If true, we should use the MIR-based borrow check, but also
|
||||
/// fall back on the AST borrow check if the MIR-based one errors.
|
||||
/// If `true`, we should use the MIR-based borrowck, but also
|
||||
/// fall back on the AST borrowck if the MIR-based one errors.
|
||||
pub fn migrate_borrowck(self) -> bool {
|
||||
self.borrowck_mode().migrate()
|
||||
}
|
||||
|
||||
/// If true, make MIR codegen for `match` emit a temp that holds a
|
||||
/// If `true`, make MIR codegen for `match` emit a temp that holds a
|
||||
/// borrow of the input to the match expression.
|
||||
pub fn generate_borrow_of_any_match_input(&self) -> bool {
|
||||
self.emit_read_for_match()
|
||||
}
|
||||
|
||||
/// If true, make MIR codegen for `match` emit FakeRead
|
||||
/// If `true`, make MIR codegen for `match` emit FakeRead
|
||||
/// statements (which simulate the maximal effect of executing the
|
||||
/// patterns in a match arm).
|
||||
pub fn emit_read_for_match(&self) -> bool {
|
||||
@ -1517,7 +1519,7 @@ impl<'tcx> TyCtxt<'tcx> {
|
||||
})
|
||||
}
|
||||
|
||||
// This method returns the DefId and the BoundRegion corresponding to the given region.
|
||||
// Returns the `DefId` and the `BoundRegion` corresponding to the given region.
|
||||
pub fn is_suitable_region(&self, region: Region<'tcx>) -> Option<FreeRegionInfo> {
|
||||
let (suitable_region_binding_scope, bound_region) = match *region {
|
||||
ty::ReFree(ref free_region) => (free_region.scope, free_region.bound_region),
|
||||
@ -1550,18 +1552,18 @@ impl<'tcx> TyCtxt<'tcx> {
|
||||
&self,
|
||||
scope_def_id: DefId,
|
||||
) -> Option<Ty<'tcx>> {
|
||||
// HACK: `type_of_def_id()` will fail on these (#55796), so return None
|
||||
// HACK: `type_of_def_id()` will fail on these (#55796), so return `None`.
|
||||
let hir_id = self.hir().as_local_hir_id(scope_def_id).unwrap();
|
||||
match self.hir().get(hir_id) {
|
||||
Node::Item(item) => {
|
||||
match item.node {
|
||||
ItemKind::Fn(..) => { /* type_of_def_id() will work */ }
|
||||
ItemKind::Fn(..) => { /* `type_of_def_id()` will work */ }
|
||||
_ => {
|
||||
return None;
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => { /* type_of_def_id() will work or panic */ }
|
||||
_ => { /* `type_of_def_id()` will work or panic */ }
|
||||
}
|
||||
|
||||
let ret_ty = self.type_of(scope_def_id);
|
||||
@ -1579,7 +1581,7 @@ impl<'tcx> TyCtxt<'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
// Here we check if the bound region is in Impl Item.
|
||||
// Checks if the bound region is in Impl Item.
|
||||
pub fn is_bound_region_in_impl_item(
|
||||
&self,
|
||||
suitable_region_binding_scope: DefId,
|
||||
@ -1599,23 +1601,15 @@ impl<'tcx> TyCtxt<'tcx> {
|
||||
false
|
||||
}
|
||||
|
||||
/// Determine whether identifiers in the assembly have strict naming rules.
|
||||
/// Determines whether identifiers in the assembly have strict naming rules.
|
||||
/// Currently, only NVPTX* targets need it.
|
||||
pub fn has_strict_asm_symbol_naming(&self) -> bool {
|
||||
self.gcx.sess.target.target.arch.contains("nvptx")
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx> TyCtxt<'tcx> {
|
||||
pub fn encode_metadata(self)
|
||||
-> EncodedMetadata
|
||||
{
|
||||
self.cstore.encode_metadata(self)
|
||||
self.sess.target.target.arch.contains("nvptx")
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx> GlobalCtxt<'tcx> {
|
||||
/// Call the closure with a local `TyCtxt` using the given arena.
|
||||
/// Calls the closure with a local `TyCtxt` using the given arena.
|
||||
/// `interners` is a slot passed so we can create a CtxtInterners
|
||||
/// with the same lifetime as `arena`.
|
||||
pub fn enter_local<F, R>(&'tcx self, f: F) -> R
|
||||
@ -1651,7 +1645,7 @@ impl<'tcx> GlobalCtxt<'tcx> {
|
||||
/// It would be more efficient if `TypedArena` provided a way to
|
||||
/// determine whether the address is in the allocated range.
|
||||
///
|
||||
/// None is returned if the value or one of the components is not part
|
||||
/// `None` is returned if the value or one of the components is not part
|
||||
/// of the provided context.
|
||||
/// For `Ty`, `None` can be returned if either the type interner doesn't
|
||||
/// contain the `TyKind` key or if the address of the interned
|
||||
@ -1662,7 +1656,6 @@ pub trait Lift<'tcx>: fmt::Debug {
|
||||
fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option<Self::Lifted>;
|
||||
}
|
||||
|
||||
|
||||
macro_rules! nop_lift {
|
||||
($ty:ty => $lifted:ty) => {
|
||||
impl<'a, 'tcx> Lift<'tcx> for $ty {
|
||||
@ -1709,7 +1702,7 @@ nop_list_lift!{Predicate<'a> => Predicate<'tcx>}
|
||||
nop_list_lift!{CanonicalVarInfo => CanonicalVarInfo}
|
||||
nop_list_lift!{ProjectionKind => ProjectionKind}
|
||||
|
||||
// this is the impl for `&'a InternalSubsts<'a>`
|
||||
// This is the impl for `&'a InternalSubsts<'a>`.
|
||||
nop_list_lift!{Kind<'a> => Kind<'tcx>}
|
||||
|
||||
pub mod tls {
|
||||
@ -1732,43 +1725,43 @@ pub mod tls {
|
||||
use rustc_rayon_core as rayon_core;
|
||||
|
||||
/// This is the implicit state of rustc. It contains the current
|
||||
/// TyCtxt and query. It is updated when creating a local interner or
|
||||
/// executing a new query. Whenever there's a TyCtxt value available
|
||||
/// you should also have access to an ImplicitCtxt through the functions
|
||||
/// `TyCtxt` and query. It is updated when creating a local interner or
|
||||
/// executing a new query. Whenever there's a `TyCtxt` value available
|
||||
/// you should also have access to an `ImplicitCtxt` through the functions
|
||||
/// in this module.
|
||||
#[derive(Clone)]
|
||||
pub struct ImplicitCtxt<'a, 'tcx> {
|
||||
/// The current TyCtxt. Initially created by `enter_global` and updated
|
||||
/// by `enter_local` with a new local interner
|
||||
/// The current `TyCtxt`. Initially created by `enter_global` and updated
|
||||
/// by `enter_local` with a new local interner.
|
||||
pub tcx: TyCtxt<'tcx>,
|
||||
|
||||
/// The current query job, if any. This is updated by JobOwner::start in
|
||||
/// ty::query::plumbing when executing a query
|
||||
/// The current query job, if any. This is updated by `JobOwner::start` in
|
||||
/// `ty::query::plumbing` when executing a query.
|
||||
pub query: Option<Lrc<query::QueryJob<'tcx>>>,
|
||||
|
||||
/// Where to store diagnostics for the current query job, if any.
|
||||
/// This is updated by JobOwner::start in ty::query::plumbing when executing a query
|
||||
/// This is updated by `JobOwner::start` in `ty::query::plumbing` when executing a query.
|
||||
pub diagnostics: Option<&'a Lock<ThinVec<Diagnostic>>>,
|
||||
|
||||
/// Used to prevent layout from recursing too deeply.
|
||||
pub layout_depth: usize,
|
||||
|
||||
/// The current dep graph task. This is used to add dependencies to queries
|
||||
/// when executing them
|
||||
/// when executing them.
|
||||
pub task_deps: Option<&'a Lock<TaskDeps>>,
|
||||
}
|
||||
|
||||
/// Sets Rayon's thread local variable which is preserved for Rayon jobs
|
||||
/// Sets Rayon's thread-local variable, which is preserved for Rayon jobs
|
||||
/// to `value` during the call to `f`. It is restored to its previous value after.
|
||||
/// This is used to set the pointer to the new ImplicitCtxt.
|
||||
/// This is used to set the pointer to the new `ImplicitCtxt`.
|
||||
#[cfg(parallel_compiler)]
|
||||
#[inline]
|
||||
fn set_tlv<F: FnOnce() -> R, R>(value: usize, f: F) -> R {
|
||||
rayon_core::tlv::with(value, f)
|
||||
}
|
||||
|
||||
/// Gets Rayon's thread local variable which is preserved for Rayon jobs.
|
||||
/// This is used to get the pointer to the current ImplicitCtxt.
|
||||
/// Gets Rayon's thread-local variable, which is preserved for Rayon jobs.
|
||||
/// This is used to get the pointer to the current `ImplicitCtxt`.
|
||||
#[cfg(parallel_compiler)]
|
||||
#[inline]
|
||||
fn get_tlv() -> usize {
|
||||
@ -1777,13 +1770,13 @@ pub mod tls {
|
||||
|
||||
#[cfg(not(parallel_compiler))]
|
||||
thread_local! {
|
||||
/// A thread local variable which stores a pointer to the current ImplicitCtxt.
|
||||
/// A thread local variable that stores a pointer to the current `ImplicitCtxt`.
|
||||
static TLV: Cell<usize> = Cell::new(0);
|
||||
}
|
||||
|
||||
/// Sets TLV to `value` during the call to `f`.
|
||||
/// It is restored to its previous value after.
|
||||
/// This is used to set the pointer to the new ImplicitCtxt.
|
||||
/// This is used to set the pointer to the new `ImplicitCtxt`.
|
||||
#[cfg(not(parallel_compiler))]
|
||||
#[inline]
|
||||
fn set_tlv<F: FnOnce() -> R, R>(value: usize, f: F) -> R {
|
||||
@ -1793,14 +1786,14 @@ pub mod tls {
|
||||
f()
|
||||
}
|
||||
|
||||
/// This is used to get the pointer to the current ImplicitCtxt.
|
||||
/// Gets the pointer to the current `ImplicitCtxt`.
|
||||
#[cfg(not(parallel_compiler))]
|
||||
fn get_tlv() -> usize {
|
||||
TLV.with(|tlv| tlv.get())
|
||||
}
|
||||
|
||||
/// This is a callback from libsyntax as it cannot access the implicit state
|
||||
/// in librustc otherwise
|
||||
/// in librustc otherwise.
|
||||
fn span_debug(span: syntax_pos::Span, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
with_opt(|tcx| {
|
||||
if let Some(tcx) = tcx {
|
||||
@ -1825,7 +1818,7 @@ pub mod tls {
|
||||
})
|
||||
}
|
||||
|
||||
/// Sets up the callbacks from libsyntax on the current thread
|
||||
/// Sets up the callbacks from libsyntax on the current thread.
|
||||
pub fn with_thread_locals<F, R>(f: F) -> R
|
||||
where F: FnOnce() -> R
|
||||
{
|
||||
@ -1850,7 +1843,7 @@ pub mod tls {
|
||||
})
|
||||
}
|
||||
|
||||
/// Sets `context` as the new current ImplicitCtxt for the duration of the function `f`
|
||||
/// Sets `context` as the new current `ImplicitCtxt` for the duration of the function `f`.
|
||||
#[inline]
|
||||
pub fn enter_context<'a, 'tcx, F, R>(context: &ImplicitCtxt<'a, 'tcx>, f: F) -> R
|
||||
where
|
||||
@ -1861,19 +1854,19 @@ pub mod tls {
|
||||
})
|
||||
}
|
||||
|
||||
/// Enters GlobalCtxt by setting up libsyntax callbacks and
|
||||
/// creating a initial TyCtxt and ImplicitCtxt.
|
||||
/// This happens once per rustc session and TyCtxts only exists
|
||||
/// Enters `GlobalCtxt` by setting up libsyntax callbacks and
|
||||
/// creating a initial `TyCtxt` and `ImplicitCtxt`.
|
||||
/// This happens once per rustc session and `TyCtxt`s only exists
|
||||
/// inside the `f` function.
|
||||
pub fn enter_global<'tcx, F, R>(gcx: &'tcx GlobalCtxt<'tcx>, f: F) -> R
|
||||
where
|
||||
F: FnOnce(TyCtxt<'tcx>) -> R,
|
||||
{
|
||||
// Update GCX_PTR to indicate there's a GlobalCtxt available
|
||||
// Update `GCX_PTR` to indicate there's a `GlobalCtxt` available.
|
||||
GCX_PTR.with(|lock| {
|
||||
*lock.lock() = gcx as *const _ as usize;
|
||||
});
|
||||
// Set GCX_PTR back to 0 when we exit
|
||||
// Set `GCX_PTR` back to 0 when we exit.
|
||||
let _on_drop = OnDrop(move || {
|
||||
GCX_PTR.with(|lock| *lock.lock() = 0);
|
||||
});
|
||||
@ -1894,12 +1887,12 @@ pub mod tls {
|
||||
}
|
||||
|
||||
scoped_thread_local! {
|
||||
/// Stores a pointer to the GlobalCtxt if one is available.
|
||||
/// This is used to access the GlobalCtxt in the deadlock handler given to Rayon.
|
||||
/// Stores a pointer to the `GlobalCtxt` if one is available.
|
||||
/// This is used to access the `GlobalCtxt` in the deadlock handler given to Rayon.
|
||||
pub static GCX_PTR: Lock<usize>
|
||||
}
|
||||
|
||||
/// Creates a TyCtxt and ImplicitCtxt based on the GCX_PTR thread local.
|
||||
/// Creates a `TyCtxt` and `ImplicitCtxt` based on the `GCX_PTR` thread local.
|
||||
/// This is used in the deadlock handler.
|
||||
pub unsafe fn with_global<F, R>(f: F) -> R
|
||||
where
|
||||
@ -1921,7 +1914,7 @@ pub mod tls {
|
||||
enter_context(&icx, |_| f(tcx))
|
||||
}
|
||||
|
||||
/// Allows access to the current ImplicitCtxt in a closure if one is available
|
||||
/// Allows access to the current `ImplicitCtxt` in a closure if one is available.
|
||||
#[inline]
|
||||
pub fn with_context_opt<F, R>(f: F) -> R
|
||||
where
|
||||
@ -1931,16 +1924,16 @@ pub mod tls {
|
||||
if context == 0 {
|
||||
f(None)
|
||||
} else {
|
||||
// We could get a ImplicitCtxt pointer from another thread.
|
||||
// Ensure that ImplicitCtxt is Sync
|
||||
// We could get a `ImplicitCtxt` pointer from another thread.
|
||||
// Ensure that `ImplicitCtxt` is `Sync`.
|
||||
sync::assert_sync::<ImplicitCtxt<'_, '_>>();
|
||||
|
||||
unsafe { f(Some(&*(context as *const ImplicitCtxt<'_, '_>))) }
|
||||
}
|
||||
}
|
||||
|
||||
/// Allows access to the current ImplicitCtxt.
|
||||
/// Panics if there is no ImplicitCtxt available
|
||||
/// Allows access to the current `ImplicitCtxt`.
|
||||
/// Panics if there is no `ImplicitCtxt` available.
|
||||
#[inline]
|
||||
pub fn with_context<F, R>(f: F) -> R
|
||||
where
|
||||
@ -1949,11 +1942,11 @@ pub mod tls {
|
||||
with_context_opt(|opt_context| f(opt_context.expect("no ImplicitCtxt stored in tls")))
|
||||
}
|
||||
|
||||
/// Allows access to the current ImplicitCtxt whose tcx field has the same global
|
||||
/// interner as the tcx argument passed in. This means the closure is given an ImplicitCtxt
|
||||
/// with the same 'tcx lifetime as the TyCtxt passed in.
|
||||
/// This will panic if you pass it a TyCtxt which has a different global interner from
|
||||
/// the current ImplicitCtxt's tcx field.
|
||||
/// Allows access to the current `ImplicitCtxt` whose tcx field has the same global
|
||||
/// interner as the tcx argument passed in. This means the closure is given an `ImplicitCtxt`
|
||||
/// with the same `'tcx` lifetime as the `TyCtxt` passed in.
|
||||
/// This will panic if you pass it a `TyCtxt` which has a different global interner from
|
||||
/// the current `ImplicitCtxt`'s `tcx` field.
|
||||
#[inline]
|
||||
pub fn with_related_context<'tcx, F, R>(tcx: TyCtxt<'tcx>, f: F) -> R
|
||||
where
|
||||
@ -1968,8 +1961,8 @@ pub mod tls {
|
||||
})
|
||||
}
|
||||
|
||||
/// Allows access to the TyCtxt in the current ImplicitCtxt.
|
||||
/// Panics if there is no ImplicitCtxt available
|
||||
/// Allows access to the `TyCtxt` in the current `ImplicitCtxt`.
|
||||
/// Panics if there is no `ImplicitCtxt` available.
|
||||
#[inline]
|
||||
pub fn with<F, R>(f: F) -> R
|
||||
where
|
||||
@ -1978,8 +1971,8 @@ pub mod tls {
|
||||
with_context(|context| f(context.tcx))
|
||||
}
|
||||
|
||||
/// Allows access to the TyCtxt in the current ImplicitCtxt.
|
||||
/// The closure is passed None if there is no ImplicitCtxt available
|
||||
/// Allows access to the `TyCtxt` in the current `ImplicitCtxt`.
|
||||
/// The closure is passed None if there is no `ImplicitCtxt` available.
|
||||
#[inline]
|
||||
pub fn with_opt<F, R>(f: F) -> R
|
||||
where
|
||||
@ -1991,7 +1984,7 @@ pub mod tls {
|
||||
|
||||
macro_rules! sty_debug_print {
|
||||
($ctxt: expr, $($variant: ident),*) => {{
|
||||
// curious inner module to allow variant names to be used as
|
||||
// Curious inner module to allow variant names to be used as
|
||||
// variable names.
|
||||
#[allow(non_snake_case)]
|
||||
mod inner {
|
||||
@ -2265,9 +2258,9 @@ slice_interners!(
|
||||
projs: _intern_projs(ProjectionKind)
|
||||
);
|
||||
|
||||
// This isn't a perfect fit: CanonicalVarInfo slices are always
|
||||
// This isn't a perfect fit: `CanonicalVarInfo` slices are always
|
||||
// allocated in the global arena, so this `intern_method!` macro is
|
||||
// overly general. But we just return false for the code that checks
|
||||
// overly general. However, we just return `false` for the code that checks
|
||||
// whether they belong in the thread-local arena, so no harm done, and
|
||||
// seems better than open-coding the rest.
|
||||
intern_method! {
|
||||
@ -2366,7 +2359,7 @@ impl<'tcx> TyCtxt<'tcx> {
|
||||
|
||||
#[inline]
|
||||
pub fn mk_adt(self, def: &'tcx AdtDef, substs: SubstsRef<'tcx>) -> Ty<'tcx> {
|
||||
// take a copy of substs so that we own the vectors inside
|
||||
// Take a copy of substs so that we own the vectors inside.
|
||||
self.mk_ty(Adt(def, substs))
|
||||
}
|
||||
|
||||
|
@ -39,8 +39,8 @@ use std::collections::BTreeMap;
|
||||
use std::fmt;
|
||||
use crate::util::nodemap::FxHashSet;
|
||||
|
||||
/// The TypeFoldable trait is implemented for every type that can be folded.
|
||||
/// Basically, every type that has a corresponding method in TypeFolder.
|
||||
/// This trait is implemented for every type that can be folded.
|
||||
/// Basically, every type that has a corresponding method in `TypeFolder`.
|
||||
///
|
||||
/// To implement this conveniently, use the
|
||||
/// `BraceStructTypeFoldableImpl` etc macros found in `macros.rs`.
|
||||
|
@ -4,20 +4,20 @@ use rustc::hir::CRATE_HIR_ID;
|
||||
use crate::ty::context::TyCtxt;
|
||||
use crate::ty::{DefId, DefIdTree};
|
||||
|
||||
/// Represents a forest of DefIds closed under the ancestor relation. That is,
|
||||
/// if a DefId representing a module is contained in the forest then all
|
||||
/// DefIds defined in that module or submodules are also implicitly contained
|
||||
/// Represents a forest of `DefId`s closed under the ancestor relation. That is,
|
||||
/// if a `DefId` representing a module is contained in the forest then all
|
||||
/// `DefId`s defined in that module or submodules are also implicitly contained
|
||||
/// in the forest.
|
||||
///
|
||||
/// This is used to represent a set of modules in which a type is visibly
|
||||
/// uninhabited.
|
||||
#[derive(Clone)]
|
||||
pub struct DefIdForest {
|
||||
/// The minimal set of DefIds required to represent the whole set.
|
||||
/// If A and B are DefIds in the DefIdForest, and A is a descendant
|
||||
/// of B, then only B will be in root_ids.
|
||||
/// We use a SmallVec here because (for its use for caching inhabitedness)
|
||||
/// its rare that this will contain even two ids.
|
||||
/// The minimal set of `DefId`s required to represent the whole set.
|
||||
/// If A and B are DefIds in the `DefIdForest`, and A is a descendant
|
||||
/// of B, then only B will be in `root_ids`.
|
||||
/// We use a `SmallVec` here because (for its use for caching inhabitedness)
|
||||
/// its rare that this will contain even two IDs.
|
||||
root_ids: SmallVec<[DefId; 1]>,
|
||||
}
|
||||
|
||||
@ -37,7 +37,7 @@ impl<'tcx> DefIdForest {
|
||||
DefIdForest::from_id(crate_id)
|
||||
}
|
||||
|
||||
/// Creates a forest containing a DefId and all its descendants.
|
||||
/// Creates a forest containing a `DefId` and all its descendants.
|
||||
pub fn from_id(id: DefId) -> DefIdForest {
|
||||
let mut root_ids = SmallVec::new();
|
||||
root_ids.push(id);
|
||||
|
@ -1,3 +1,5 @@
|
||||
pub use self::def_id_forest::DefIdForest;
|
||||
|
||||
use crate::ty::context::TyCtxt;
|
||||
use crate::ty::{AdtDef, VariantDef, FieldDef, Ty, TyS};
|
||||
use crate::ty::{DefId, SubstsRef};
|
||||
@ -5,12 +7,10 @@ use crate::ty::{AdtKind, Visibility};
|
||||
use crate::ty::TyKind::*;
|
||||
use crate::ty;
|
||||
|
||||
pub use self::def_id_forest::DefIdForest;
|
||||
|
||||
mod def_id_forest;
|
||||
|
||||
// The methods in this module calculate DefIdForests of modules in which a
|
||||
// AdtDef/VariantDef/FieldDef is visibly uninhabited.
|
||||
// The methods in this module calculate `DefIdForest`s of modules in which a
|
||||
// `AdtDef`/`VariantDef`/`FieldDef` is visibly uninhabited.
|
||||
//
|
||||
// # Example
|
||||
// ```rust
|
||||
@ -36,24 +36,25 @@ mod def_id_forest;
|
||||
// y: c::AlsoSecretlyUninhabited,
|
||||
// }
|
||||
// ```
|
||||
// In this code, the type Foo will only be visibly uninhabited inside the
|
||||
// modules b, c and d. Calling uninhabited_from on Foo or its AdtDef will
|
||||
// return the forest of modules {b, c->d} (represented in a DefIdForest by the
|
||||
// set {b, c})
|
||||
// In this code, the type `Foo` will only be visibly uninhabited inside the
|
||||
// modules `b`, `c` and `d`. Calling `uninhabited_from` on `Foo` or its `AdtDef` will
|
||||
// return the forest of modules {`b`, `c`->`d`} (represented in a `DefIdForest` by the
|
||||
// set {`b`, `c`}).
|
||||
//
|
||||
// We need this information for pattern-matching on Foo or types that contain
|
||||
// Foo.
|
||||
// We need this information for pattern-matching on `Foo` or types that contain
|
||||
// `Foo`.
|
||||
//
|
||||
// # Example
|
||||
// ```rust
|
||||
// let foo_result: Result<T, Foo> = ... ;
|
||||
// let Ok(t) = foo_result;
|
||||
// ```
|
||||
// This code should only compile in modules where the uninhabitedness of Foo is
|
||||
// This code should only compile in modules where the uninhabitedness of `Foo` is
|
||||
// visible.
|
||||
|
||||
impl<'tcx> TyCtxt<'tcx> {
|
||||
/// Checks whether a type is visibly uninhabited from a particular module.
|
||||
///
|
||||
/// # Example
|
||||
/// ```rust
|
||||
/// enum Void {}
|
||||
@ -91,7 +92,7 @@ impl<'tcx> TyCtxt<'tcx> {
|
||||
/// visible.
|
||||
pub fn is_ty_uninhabited_from(self, module: DefId, ty: Ty<'tcx>) -> bool {
|
||||
// To check whether this type is uninhabited at all (not just from the
|
||||
// given node) you could check whether the forest is empty.
|
||||
// given node), you could check whether the forest is empty.
|
||||
// ```
|
||||
// forest.is_empty()
|
||||
// ```
|
||||
@ -108,7 +109,7 @@ impl<'tcx> TyCtxt<'tcx> {
|
||||
}
|
||||
|
||||
impl<'tcx> AdtDef {
|
||||
/// Calculate the forest of DefIds from which this adt is visibly uninhabited.
|
||||
/// Calculates the forest of `DefId`s from which this ADT is visibly uninhabited.
|
||||
fn uninhabited_from(&self, tcx: TyCtxt<'tcx>, substs: SubstsRef<'tcx>) -> DefIdForest {
|
||||
// Non-exhaustive ADTs from other crates are always considered inhabited.
|
||||
if self.is_variant_list_non_exhaustive() && !self.did.is_local() {
|
||||
@ -122,7 +123,7 @@ impl<'tcx> AdtDef {
|
||||
}
|
||||
|
||||
impl<'tcx> VariantDef {
|
||||
/// Calculate the forest of DefIds from which this variant is visibly uninhabited.
|
||||
/// Calculates the forest of `DefId`s from which this variant is visibly uninhabited.
|
||||
pub fn uninhabited_from(
|
||||
&self,
|
||||
tcx: TyCtxt<'tcx>,
|
||||
@ -148,7 +149,7 @@ impl<'tcx> VariantDef {
|
||||
}
|
||||
|
||||
impl<'tcx> FieldDef {
|
||||
/// Calculate the forest of DefIds from which this field is visibly uninhabited.
|
||||
/// Calculates the forest of `DefId`s from which this field is visibly uninhabited.
|
||||
fn uninhabited_from(
|
||||
&self,
|
||||
tcx: TyCtxt<'tcx>,
|
||||
@ -159,7 +160,7 @@ impl<'tcx> FieldDef {
|
||||
self.ty(tcx, substs).uninhabited_from(tcx)
|
||||
};
|
||||
// FIXME(canndrew): Currently enum fields are (incorrectly) stored with
|
||||
// Visibility::Invisible so we need to override self.vis if we're
|
||||
// `Visibility::Invisible` so we need to override `self.vis` if we're
|
||||
// dealing with an enum.
|
||||
if is_enum {
|
||||
data_uninhabitedness()
|
||||
@ -178,7 +179,7 @@ impl<'tcx> FieldDef {
|
||||
}
|
||||
|
||||
impl<'tcx> TyS<'tcx> {
|
||||
/// Calculate the forest of DefIds from which this type is visibly uninhabited.
|
||||
/// Calculates the forest of `DefId`s from which this type is visibly uninhabited.
|
||||
fn uninhabited_from(&self, tcx: TyCtxt<'tcx>) -> DefIdForest {
|
||||
match self.sty {
|
||||
Adt(def, substs) => def.uninhabited_from(tcx, substs),
|
||||
|
@ -590,7 +590,7 @@ impl<'tcx> rustc_serialize::UseSpecializedDecodable for Ty<'tcx> {}
|
||||
pub type CanonicalTy<'tcx> = Canonical<'tcx, Ty<'tcx>>;
|
||||
|
||||
extern {
|
||||
/// A dummy type used to force List to by unsized without requiring fat pointers
|
||||
/// A dummy type used to force `List` to by unsized without requiring fat pointers.
|
||||
type OpaqueListContents;
|
||||
}
|
||||
|
||||
|
@ -27,7 +27,8 @@ pub trait Print<'tcx, P> {
|
||||
/// which the associated types allow passing through the methods.
|
||||
///
|
||||
/// For pretty-printing/formatting in particular, see `PrettyPrinter`.
|
||||
// FIXME(eddyb) find a better name, this is more general than "printing".
|
||||
//
|
||||
// FIXME(eddyb) find a better name; this is more general than "printing".
|
||||
pub trait Printer<'tcx>: Sized {
|
||||
type Error;
|
||||
|
||||
@ -46,6 +47,7 @@ pub trait Printer<'tcx>: Sized {
|
||||
) -> Result<Self::Path, Self::Error> {
|
||||
self.default_print_def_path(def_id, substs)
|
||||
}
|
||||
|
||||
fn print_impl_path(
|
||||
self,
|
||||
impl_def_id: DefId,
|
||||
@ -80,6 +82,7 @@ pub trait Printer<'tcx>: Sized {
|
||||
self,
|
||||
cnum: CrateNum,
|
||||
) -> Result<Self::Path, Self::Error>;
|
||||
|
||||
fn path_qualified(
|
||||
self,
|
||||
self_ty: Ty<'tcx>,
|
||||
@ -93,11 +96,13 @@ pub trait Printer<'tcx>: Sized {
|
||||
self_ty: Ty<'tcx>,
|
||||
trait_ref: Option<ty::TraitRef<'tcx>>,
|
||||
) -> Result<Self::Path, Self::Error>;
|
||||
|
||||
fn path_append(
|
||||
self,
|
||||
print_prefix: impl FnOnce(Self) -> Result<Self::Path, Self::Error>,
|
||||
disambiguated_data: &DisambiguatedDefPathData,
|
||||
) -> Result<Self::Path, Self::Error>;
|
||||
|
||||
fn path_generic_args(
|
||||
self,
|
||||
print_prefix: impl FnOnce(Self) -> Result<Self::Path, Self::Error>,
|
||||
|
@ -8,10 +8,11 @@ use crate::ty::{self, DefIdTree, ParamConst, Ty, TyCtxt, TypeFoldable};
|
||||
use crate::ty::subst::{Kind, Subst, UnpackedKind};
|
||||
use crate::ty::layout::{Integer, IntegerExt, Size};
|
||||
use crate::mir::interpret::{ConstValue, sign_extend, Scalar, truncate};
|
||||
use syntax::ast;
|
||||
|
||||
use rustc_apfloat::ieee::{Double, Single};
|
||||
use rustc_apfloat::Float;
|
||||
use rustc_target::spec::abi::Abi;
|
||||
use syntax::ast;
|
||||
use syntax::attr::{SignedInt, UnsignedInt};
|
||||
use syntax::symbol::{kw, InternedString};
|
||||
|
||||
@ -194,7 +195,7 @@ pub trait PrettyPrinter<'tcx>:
|
||||
value.skip_binder().print(self)
|
||||
}
|
||||
|
||||
/// Print comma-separated elements.
|
||||
/// Prints comma-separated elements.
|
||||
fn comma_sep<T>(mut self, mut elems: impl Iterator<Item = T>) -> Result<Self, Self::Error>
|
||||
where
|
||||
T: Print<'tcx, Self, Output = Self, Error = Self::Error>,
|
||||
@ -209,14 +210,14 @@ pub trait PrettyPrinter<'tcx>:
|
||||
Ok(self)
|
||||
}
|
||||
|
||||
/// Print `<...>` around what `f` prints.
|
||||
/// Prints `<...>` around what `f` prints.
|
||||
fn generic_delimiters(
|
||||
self,
|
||||
f: impl FnOnce(Self) -> Result<Self, Self::Error>,
|
||||
) -> Result<Self, Self::Error>;
|
||||
|
||||
/// Return `true` if the region should be printed in
|
||||
/// optional positions, e.g. `&'a T` or `dyn Tr + 'b`.
|
||||
/// Returns `true` if the region should be printed in
|
||||
/// optional positions, e.g., `&'a T` or `dyn Tr + 'b`.
|
||||
/// This is typically the case for all non-`'_` regions.
|
||||
fn region_should_not_be_omitted(
|
||||
&self,
|
||||
@ -226,7 +227,7 @@ pub trait PrettyPrinter<'tcx>:
|
||||
// Defaults (should not be overriden):
|
||||
|
||||
/// If possible, this returns a global path resolving to `def_id` that is visible
|
||||
/// from at least one local module and returns true. If the crate defining `def_id` is
|
||||
/// from at least one local module, and returns `true`. If the crate defining `def_id` is
|
||||
/// declared with an `extern crate`, the path is guaranteed to use the `extern crate`.
|
||||
fn try_print_visible_def_path(
|
||||
self,
|
||||
@ -267,11 +268,11 @@ pub trait PrettyPrinter<'tcx>:
|
||||
// In local mode, when we encounter a crate other than
|
||||
// LOCAL_CRATE, execution proceeds in one of two ways:
|
||||
//
|
||||
// 1. for a direct dependency, where user added an
|
||||
// 1. For a direct dependency, where user added an
|
||||
// `extern crate` manually, we put the `extern
|
||||
// crate` as the parent. So you wind up with
|
||||
// something relative to the current crate.
|
||||
// 2. for an extern inferred from a path or an indirect crate,
|
||||
// 2. For an extern inferred from a path or an indirect crate,
|
||||
// where there is no explicit `extern crate`, we just prepend
|
||||
// the crate name.
|
||||
match self.tcx().extern_crate(def_id) {
|
||||
@ -304,13 +305,13 @@ pub trait PrettyPrinter<'tcx>:
|
||||
let mut cur_def_key = self.tcx().def_key(def_id);
|
||||
debug!("try_print_visible_def_path: cur_def_key={:?}", cur_def_key);
|
||||
|
||||
// For a constructor we want the name of its parent rather than <unnamed>.
|
||||
// For a constructor, we want the name of its parent rather than <unnamed>.
|
||||
match cur_def_key.disambiguated_data.data {
|
||||
DefPathData::Ctor => {
|
||||
let parent = DefId {
|
||||
krate: def_id.krate,
|
||||
index: cur_def_key.parent
|
||||
.expect("DefPathData::Ctor/VariantData missing a parent"),
|
||||
.expect("`DefPathData::Ctor` / `VariantData` missing a parent"),
|
||||
};
|
||||
|
||||
cur_def_key = self.tcx().def_key(parent);
|
||||
@ -630,7 +631,7 @@ pub trait PrettyPrinter<'tcx>:
|
||||
sep = ", ";
|
||||
}
|
||||
} else {
|
||||
// cross-crate closure types should only be
|
||||
// Cross-crate closure types should only be
|
||||
// visible in codegen bug reports, I imagine.
|
||||
p!(write("@{:?}", did));
|
||||
let mut sep = " ";
|
||||
@ -673,7 +674,7 @@ pub trait PrettyPrinter<'tcx>:
|
||||
sep = ", ";
|
||||
}
|
||||
} else {
|
||||
// cross-crate closure types should only be
|
||||
// Cross-crate closure types should only be
|
||||
// visible in codegen bug reports, I imagine.
|
||||
p!(write("@{:?}", did));
|
||||
let mut sep = " ";
|
||||
@ -1173,6 +1174,7 @@ impl<F: fmt::Write> Printer<'tcx> for FmtPrinter<'_, 'tcx, F> {
|
||||
}
|
||||
Ok(self)
|
||||
}
|
||||
|
||||
fn path_qualified(
|
||||
mut self,
|
||||
self_ty: Ty<'tcx>,
|
||||
@ -1201,6 +1203,7 @@ impl<F: fmt::Write> Printer<'tcx> for FmtPrinter<'_, 'tcx, F> {
|
||||
self.empty_path = false;
|
||||
Ok(self)
|
||||
}
|
||||
|
||||
fn path_append(
|
||||
mut self,
|
||||
print_prefix: impl FnOnce(Self) -> Result<Self::Path, Self::Error>,
|
||||
@ -1238,6 +1241,7 @@ impl<F: fmt::Write> Printer<'tcx> for FmtPrinter<'_, 'tcx, F> {
|
||||
|
||||
Ok(self)
|
||||
}
|
||||
|
||||
fn path_generic_args(
|
||||
mut self,
|
||||
print_prefix: impl FnOnce(Self) -> Result<Self::Path, Self::Error>,
|
||||
|
@ -5,9 +5,6 @@ use crate::hir::map::definitions::DefPathHash;
|
||||
use crate::ich::{CachingSourceMapView, Fingerprint};
|
||||
use crate::mir::{self, interpret};
|
||||
use crate::mir::interpret::{AllocDecodingSession, AllocDecodingState};
|
||||
use rustc_serialize::{Decodable, Decoder, Encodable, Encoder, opaque,
|
||||
SpecializedDecoder, SpecializedEncoder,
|
||||
UseSpecializedDecodable, UseSpecializedEncodable};
|
||||
use crate::session::{CrateDisambiguator, Session};
|
||||
use crate::ty::{self, Ty};
|
||||
use crate::ty::codec::{self as ty_codec, TyDecoder, TyEncoder};
|
||||
@ -19,6 +16,10 @@ use rustc_data_structures::fx::FxHashMap;
|
||||
use rustc_data_structures::thin_vec::ThinVec;
|
||||
use rustc_data_structures::sync::{Lrc, Lock, HashMapExt, Once};
|
||||
use rustc_data_structures::indexed_vec::{IndexVec, Idx};
|
||||
use rustc_serialize::{
|
||||
Decodable, Decoder, Encodable, Encoder, SpecializedDecoder, SpecializedEncoder,
|
||||
UseSpecializedDecodable, UseSpecializedEncodable, opaque,
|
||||
};
|
||||
use std::mem;
|
||||
use syntax::ast::{Ident, NodeId};
|
||||
use syntax::source_map::{SourceMap, StableSourceFileId};
|
||||
@ -37,17 +38,16 @@ const TAG_EXPN_DATA_INLINE: u8 = 2;
|
||||
const TAG_VALID_SPAN: u8 = 0;
|
||||
const TAG_INVALID_SPAN: u8 = 1;
|
||||
|
||||
/// `OnDiskCache` provides an interface to incr. comp. data cached from the
|
||||
/// Provides an interface to incremental compilation data cached from the
|
||||
/// previous compilation session. This data will eventually include the results
|
||||
/// of a few selected queries (like `typeck_tables_of` and `mir_optimized`) and
|
||||
/// any diagnostics that have been emitted during a query.
|
||||
pub struct OnDiskCache<'sess> {
|
||||
|
||||
// The complete cache data in serialized form.
|
||||
serialized_data: Vec<u8>,
|
||||
|
||||
// This field collects all Diagnostics emitted during the current
|
||||
// compilation session.
|
||||
// Collects all `Diagnostic`s emitted during the current compilation
|
||||
// session.
|
||||
current_diagnostics: Lock<FxHashMap<DepNodeIndex, Vec<Diagnostic>>>,
|
||||
|
||||
prev_cnums: Vec<(u32, String, CrateDisambiguator)>,
|
||||
@ -56,7 +56,7 @@ pub struct OnDiskCache<'sess> {
|
||||
source_map: &'sess SourceMap,
|
||||
file_index_to_stable_id: FxHashMap<SourceFileIndex, StableSourceFileId>,
|
||||
|
||||
// These two fields caches that are populated lazily during decoding.
|
||||
// Caches that are populated lazily during decoding.
|
||||
file_index_to_file: Lock<FxHashMap<SourceFileIndex, Lrc<SourceFile>>>,
|
||||
synthetic_syntax_contexts: Lock<FxHashMap<AbsoluteBytePos, SyntaxContext>>,
|
||||
|
||||
@ -78,7 +78,7 @@ struct Footer {
|
||||
prev_cnums: Vec<(u32, String, CrateDisambiguator)>,
|
||||
query_result_index: EncodedQueryResultIndex,
|
||||
diagnostics_index: EncodedQueryResultIndex,
|
||||
// the location of all allocations
|
||||
// The location of all allocations.
|
||||
interpret_alloc_index: Vec<u32>,
|
||||
}
|
||||
|
||||
@ -104,28 +104,28 @@ impl AbsoluteBytePos {
|
||||
}
|
||||
|
||||
impl<'sess> OnDiskCache<'sess> {
|
||||
/// Creates a new OnDiskCache instance from the serialized data in `data`.
|
||||
pub fn new(sess: &'sess Session, data: Vec<u8>, start_pos: usize) -> OnDiskCache<'sess> {
|
||||
/// Creates a new `OnDiskCache` instance from the serialized data in `data`.
|
||||
pub fn new(sess: &'sess Session, data: Vec<u8>, start_pos: usize) -> Self {
|
||||
debug_assert!(sess.opts.incremental.is_some());
|
||||
|
||||
// Wrapping in a scope so we can borrow `data`
|
||||
// Wrap in a scope so we can borrow `data`.
|
||||
let footer: Footer = {
|
||||
let mut decoder = opaque::Decoder::new(&data[..], start_pos);
|
||||
|
||||
// Decode the *position* of the footer which can be found in the
|
||||
// Decode the *position* of the footer, which can be found in the
|
||||
// last 8 bytes of the file.
|
||||
decoder.set_position(data.len() - IntEncodedWithFixedSize::ENCODED_SIZE);
|
||||
let query_result_index_pos = IntEncodedWithFixedSize::decode(&mut decoder)
|
||||
.expect("Error while trying to decode query result index position.")
|
||||
let footer_pos = IntEncodedWithFixedSize::decode(&mut decoder)
|
||||
.expect("error while trying to decode footer position")
|
||||
.0 as usize;
|
||||
|
||||
// Decoder the file footer which contains all the lookup tables, etc.
|
||||
decoder.set_position(query_result_index_pos);
|
||||
// Decode the file footer, which contains all the lookup tables, etc.
|
||||
decoder.set_position(footer_pos);
|
||||
decode_tagged(&mut decoder, TAG_FILE_FOOTER)
|
||||
.expect("Error while trying to decode query result index position.")
|
||||
.expect("error while trying to decode footer position")
|
||||
};
|
||||
|
||||
OnDiskCache {
|
||||
Self {
|
||||
serialized_data: data,
|
||||
file_index_to_stable_id: footer.file_index_to_stable_id,
|
||||
file_index_to_file: Default::default(),
|
||||
@ -140,8 +140,8 @@ impl<'sess> OnDiskCache<'sess> {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn new_empty(source_map: &'sess SourceMap) -> OnDiskCache<'sess> {
|
||||
OnDiskCache {
|
||||
pub fn new_empty(source_map: &'sess SourceMap) -> Self {
|
||||
Self {
|
||||
serialized_data: Vec::new(),
|
||||
file_index_to_stable_id: Default::default(),
|
||||
file_index_to_file: Default::default(),
|
||||
@ -158,11 +158,11 @@ impl<'sess> OnDiskCache<'sess> {
|
||||
|
||||
pub fn serialize<'tcx, E>(&self, tcx: TyCtxt<'tcx>, encoder: &mut E) -> Result<(), E::Error>
|
||||
where
|
||||
E: ty_codec::TyEncoder,
|
||||
E: TyEncoder,
|
||||
{
|
||||
// Serializing the DepGraph should not modify it:
|
||||
// Serializing the `DepGraph` should not modify it.
|
||||
tcx.dep_graph.with_ignore(|| {
|
||||
// Allocate SourceFileIndices
|
||||
// Allocate `SourceFileIndex`es.
|
||||
let (file_to_file_index, file_index_to_stable_id) = {
|
||||
let files = tcx.sess.source_map().files();
|
||||
let mut file_to_file_index = FxHashMap::with_capacity_and_hasher(
|
||||
@ -197,7 +197,7 @@ impl<'sess> OnDiskCache<'sess> {
|
||||
// be in memory, so this should be a cheap operation.
|
||||
tcx.dep_graph.exec_cache_promotions(tcx);
|
||||
|
||||
// Encode query results
|
||||
// Encode query results.
|
||||
let mut query_result_index = EncodedQueryResultIndex::new();
|
||||
|
||||
time(tcx.sess, "encode query results", || {
|
||||
@ -221,29 +221,28 @@ impl<'sess> OnDiskCache<'sess> {
|
||||
Ok(())
|
||||
})?;
|
||||
|
||||
// Encode diagnostics
|
||||
// Encode diagnostics.
|
||||
let diagnostics_index: EncodedDiagnosticsIndex = self.current_diagnostics.borrow()
|
||||
.iter()
|
||||
.map(|(dep_node_index, diagnostics)|
|
||||
{
|
||||
let pos = AbsoluteBytePos::new(encoder.position());
|
||||
// Let's make sure we get the expected type here:
|
||||
let diagnostics: &EncodedDiagnostics = diagnostics;
|
||||
let dep_node_index = SerializedDepNodeIndex::new(dep_node_index.index());
|
||||
encoder.encode_tagged(dep_node_index, diagnostics)?;
|
||||
.map(|(dep_node_index, diagnostics)| {
|
||||
let pos = AbsoluteBytePos::new(encoder.position());
|
||||
// Let's make sure we get the expected type here.
|
||||
let diagnostics: &EncodedDiagnostics = diagnostics;
|
||||
let dep_node_index = SerializedDepNodeIndex::new(dep_node_index.index());
|
||||
encoder.encode_tagged(dep_node_index, diagnostics)?;
|
||||
|
||||
Ok((dep_node_index, pos))
|
||||
})
|
||||
.collect::<Result<_, _>>()?;
|
||||
Ok((dep_node_index, pos))
|
||||
})
|
||||
.collect::<Result<_, _>>()?;
|
||||
|
||||
let interpret_alloc_index = {
|
||||
let mut interpret_alloc_index = Vec::new();
|
||||
let mut n = 0;
|
||||
loop {
|
||||
let new_n = encoder.interpret_allocs_inverse.len();
|
||||
// if we have found new ids, serialize those, too
|
||||
// If we have found new IDs, serialize those too.
|
||||
if n == new_n {
|
||||
// otherwise, abort
|
||||
// Otherwise, abort.
|
||||
break;
|
||||
}
|
||||
interpret_alloc_index.reserve(new_n - n);
|
||||
@ -263,13 +262,15 @@ impl<'sess> OnDiskCache<'sess> {
|
||||
};
|
||||
|
||||
let sorted_cnums = sorted_cnums_including_local_crate(tcx);
|
||||
let prev_cnums: Vec<_> = sorted_cnums.iter().map(|&cnum| {
|
||||
let crate_name = tcx.original_crate_name(cnum).as_str().to_string();
|
||||
let crate_disambiguator = tcx.crate_disambiguator(cnum);
|
||||
(cnum.as_u32(), crate_name, crate_disambiguator)
|
||||
}).collect();
|
||||
let prev_cnums: Vec<_> = sorted_cnums.iter()
|
||||
.map(|&cnum| {
|
||||
let crate_name = tcx.original_crate_name(cnum).as_str().to_string();
|
||||
let crate_disambiguator = tcx.crate_disambiguator(cnum);
|
||||
(cnum.as_u32(), crate_name, crate_disambiguator)
|
||||
})
|
||||
.collect();
|
||||
|
||||
// Encode the file footer
|
||||
// Encode the file footer.
|
||||
let footer_pos = encoder.position() as u64;
|
||||
encoder.encode_tagged(TAG_FILE_FOOTER, &Footer {
|
||||
file_index_to_stable_id,
|
||||
@ -371,7 +372,7 @@ impl<'sess> OnDiskCache<'sess> {
|
||||
{
|
||||
let pos = index.get(&dep_node_index).cloned()?;
|
||||
|
||||
// Initialize the cnum_map using the value from the thread which finishes the closure first
|
||||
// Initialize `cnum_map` using the value from the thread that finishes the closure first.
|
||||
self.cnum_map.init_nonlocking_same(|| {
|
||||
Self::compute_cnum_map(tcx, &self.prev_cnums[..])
|
||||
});
|
||||
@ -381,25 +382,21 @@ impl<'sess> OnDiskCache<'sess> {
|
||||
opaque: opaque::Decoder::new(&self.serialized_data[..], pos.to_usize()),
|
||||
source_map: self.source_map,
|
||||
cnum_map: self.cnum_map.get(),
|
||||
synthetic_syntax_contexts: &self.synthetic_syntax_contexts,
|
||||
file_index_to_file: &self.file_index_to_file,
|
||||
file_index_to_stable_id: &self.file_index_to_stable_id,
|
||||
synthetic_syntax_contexts: &self.synthetic_syntax_contexts,
|
||||
alloc_decoding_session: self.alloc_decoding_state.new_decoding_session(),
|
||||
};
|
||||
|
||||
match decode_tagged(&mut decoder, dep_node_index) {
|
||||
Ok(value) => {
|
||||
Some(value)
|
||||
}
|
||||
Err(e) => {
|
||||
bug!("Could not decode cached {}: {}", debug_tag, e)
|
||||
}
|
||||
Ok(v) => Some(v),
|
||||
Err(e) => bug!("could not decode cached {}: {}", debug_tag, e),
|
||||
}
|
||||
}
|
||||
|
||||
// This function builds mapping from previous-session-CrateNum to
|
||||
// current-session-CrateNum. There might be CrateNums from the previous
|
||||
// Session that don't occur in the current one. For these, the mapping
|
||||
// This function builds mapping from previous-session-`CrateNum` to
|
||||
// current-session-`CrateNum`. There might be `CrateNum`s from the previous
|
||||
// `Session` that don't occur in the current one. For these, the mapping
|
||||
// maps to None.
|
||||
fn compute_cnum_map(
|
||||
tcx: TyCtxt<'_>,
|
||||
@ -432,9 +429,9 @@ impl<'sess> OnDiskCache<'sess> {
|
||||
|
||||
//- DECODING -------------------------------------------------------------------
|
||||
|
||||
/// A decoder that can read the incr. comp. cache. It is similar to the one
|
||||
/// we use for crate metadata decoding in that it can rebase spans and
|
||||
/// eventually will also handle things that contain `Ty` instances.
|
||||
/// A decoder that can read fro the incr. comp. cache. It is similar to the one
|
||||
/// we use for crate metadata decoding in that it can rebase spans and eventually
|
||||
/// will also handle things that contain `Ty` instances.
|
||||
struct CacheDecoder<'a, 'tcx> {
|
||||
tcx: TyCtxt<'tcx>,
|
||||
opaque: opaque::Decoder<'a>,
|
||||
@ -458,7 +455,7 @@ impl<'a, 'tcx> CacheDecoder<'a, 'tcx> {
|
||||
file_index_to_file.borrow_mut().entry(index).or_insert_with(|| {
|
||||
let stable_id = file_index_to_stable_id[&index];
|
||||
source_map.source_file_by_stable_id(stable_id)
|
||||
.expect("Failed to lookup SourceFile in new context.")
|
||||
.expect("failed to lookup `SourceFile` in new context")
|
||||
}).clone()
|
||||
}
|
||||
}
|
||||
@ -479,7 +476,7 @@ impl<'a, 'tcx> DecoderWithPosition for CacheDecoder<'a, 'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
// Decode something that was encoded with encode_tagged() and verify that the
|
||||
// Decodes something that was encoded with `encode_tagged()` and verify that the
|
||||
// tag matches and the correct amount of bytes was read.
|
||||
fn decode_tagged<D, T, V>(decoder: &mut D, expected_tag: T) -> Result<V, D::Error>
|
||||
where
|
||||
@ -500,7 +497,7 @@ where
|
||||
Ok(value)
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> ty_codec::TyDecoder<'tcx> for CacheDecoder<'a, 'tcx> {
|
||||
impl<'a, 'tcx> TyDecoder<'tcx> for CacheDecoder<'a, 'tcx> {
|
||||
#[inline]
|
||||
fn tcx(&self) -> TyCtxt<'tcx> {
|
||||
self.tcx
|
||||
@ -534,7 +531,7 @@ impl<'a, 'tcx> ty_codec::TyDecoder<'tcx> for CacheDecoder<'a, 'tcx> {
|
||||
}
|
||||
|
||||
let ty = or_insert_with(self)?;
|
||||
// This may overwrite the entry, but it should overwrite with the same value
|
||||
// This may overwrite the entry, but it should overwrite with the same value.
|
||||
tcx.rcache.borrow_mut().insert_same(cache_key, ty);
|
||||
Ok(ty)
|
||||
}
|
||||
@ -553,7 +550,7 @@ impl<'a, 'tcx> ty_codec::TyDecoder<'tcx> for CacheDecoder<'a, 'tcx> {
|
||||
|
||||
fn map_encoded_cnum_to_current(&self, cnum: CrateNum) -> CrateNum {
|
||||
self.cnum_map[cnum].unwrap_or_else(|| {
|
||||
bug!("Could not find new CrateNum for {:?}", cnum)
|
||||
bug!("could not find new `CrateNum` for {:?}", cnum)
|
||||
})
|
||||
}
|
||||
}
|
||||
@ -635,25 +632,25 @@ impl<'a, 'tcx> SpecializedDecoder<Ident> for CacheDecoder<'a, 'tcx> {
|
||||
}
|
||||
|
||||
// This impl makes sure that we get a runtime error when we try decode a
|
||||
// DefIndex that is not contained in a DefId. Such a case would be problematic
|
||||
// because we would not know how to transform the DefIndex to the current
|
||||
// `DefIndex` that is not contained in a `DefId`. Such a case would be problematic
|
||||
// because we would not know how to transform the `DefIndex` to the current
|
||||
// context.
|
||||
impl<'a, 'tcx> SpecializedDecoder<DefIndex> for CacheDecoder<'a, 'tcx> {
|
||||
fn specialized_decode(&mut self) -> Result<DefIndex, Self::Error> {
|
||||
bug!("Trying to decode DefIndex outside the context of a DefId")
|
||||
bug!("trying to decode `DefIndex` outside the context of a `DefId`")
|
||||
}
|
||||
}
|
||||
|
||||
// Both the CrateNum and the DefIndex of a DefId can change in between two
|
||||
// compilation sessions. We use the DefPathHash, which is stable across
|
||||
// sessions, to map the old DefId to the new one.
|
||||
// Both the `CrateNum` and the `DefIndex` of a `DefId` can change in between two
|
||||
// compilation sessions. We use the `DefPathHash`, which is stable across
|
||||
// sessions, to map the old DefId`` to the new one.
|
||||
impl<'a, 'tcx> SpecializedDecoder<DefId> for CacheDecoder<'a, 'tcx> {
|
||||
#[inline]
|
||||
fn specialized_decode(&mut self) -> Result<DefId, Self::Error> {
|
||||
// Load the DefPathHash which is was we encoded the DefId as.
|
||||
// Load the `DefPathHash` which is was we encoded the `DefId` as.
|
||||
let def_path_hash = DefPathHash::decode(self)?;
|
||||
|
||||
// Using the DefPathHash, we can lookup the new DefId
|
||||
// Using the `DefPathHash`, we can lookup the new `DefId`.
|
||||
Ok(self.tcx().def_path_hash_to_def_id.as_ref().unwrap()[&def_path_hash])
|
||||
}
|
||||
}
|
||||
@ -667,10 +664,10 @@ impl<'a, 'tcx> SpecializedDecoder<LocalDefId> for CacheDecoder<'a, 'tcx> {
|
||||
|
||||
impl<'a, 'tcx> SpecializedDecoder<hir::HirId> for CacheDecoder<'a, 'tcx> {
|
||||
fn specialized_decode(&mut self) -> Result<hir::HirId, Self::Error> {
|
||||
// Load the DefPathHash which is was we encoded the DefIndex as.
|
||||
// Load the `DefPathHash` which is what we encoded the `DefIndex` as.
|
||||
let def_path_hash = DefPathHash::decode(self)?;
|
||||
|
||||
// Use the DefPathHash to map to the current DefId.
|
||||
// Use the `DefPathHash` to map to the current `DefId`.
|
||||
let def_id = self.tcx()
|
||||
.def_path_hash_to_def_id
|
||||
.as_ref()
|
||||
@ -678,10 +675,10 @@ impl<'a, 'tcx> SpecializedDecoder<hir::HirId> for CacheDecoder<'a, 'tcx> {
|
||||
|
||||
debug_assert!(def_id.is_local());
|
||||
|
||||
// The ItemLocalId needs no remapping.
|
||||
// The `ItemLocalId` needs no remapping.
|
||||
let local_id = hir::ItemLocalId::decode(self)?;
|
||||
|
||||
// Reconstruct the HirId and look up the corresponding NodeId in the
|
||||
// Reconstruct the `HirId` and look up the corresponding `NodeId` in the
|
||||
// context of the current session.
|
||||
Ok(hir::HirId {
|
||||
owner: def_id.index,
|
||||
@ -690,8 +687,8 @@ impl<'a, 'tcx> SpecializedDecoder<hir::HirId> for CacheDecoder<'a, 'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
// NodeIds are not stable across compilation sessions, so we store them in their
|
||||
// HirId representation. This allows use to map them to the current NodeId.
|
||||
// `NodeId`s are not stable across compilation sessions, so we store them in their
|
||||
// `HirId` representation. This allows use to map them to the current `NodeId`.
|
||||
impl<'a, 'tcx> SpecializedDecoder<NodeId> for CacheDecoder<'a, 'tcx> {
|
||||
#[inline]
|
||||
fn specialized_decode(&mut self) -> Result<NodeId, Self::Error> {
|
||||
@ -728,6 +725,7 @@ impl<'a, 'tcx, T: Decodable> SpecializedDecoder<mir::ClearCrossCrate<T>>
|
||||
|
||||
//- ENCODING -------------------------------------------------------------------
|
||||
|
||||
/// An encoder that can write the incr. comp. cache.
|
||||
struct CacheEncoder<'a, 'tcx, E: ty_codec::TyEncoder> {
|
||||
tcx: TyCtxt<'tcx>,
|
||||
encoder: &'a mut E,
|
||||
@ -742,7 +740,7 @@ struct CacheEncoder<'a, 'tcx, E: ty_codec::TyEncoder> {
|
||||
|
||||
impl<'a, 'tcx, E> CacheEncoder<'a, 'tcx, E>
|
||||
where
|
||||
E: 'a + ty_codec::TyEncoder,
|
||||
E: 'a + TyEncoder,
|
||||
{
|
||||
fn source_file_index(&mut self, source_file: Lrc<SourceFile>) -> SourceFileIndex {
|
||||
self.file_to_file_index[&(&*source_file as *const SourceFile)]
|
||||
@ -753,11 +751,11 @@ where
|
||||
/// encode the specified tag, then the given value, then the number of
|
||||
/// bytes taken up by tag and value. On decoding, we can then verify that
|
||||
/// we get the expected tag and read the expected number of bytes.
|
||||
fn encode_tagged<T: Encodable, V: Encodable>(&mut self,
|
||||
tag: T,
|
||||
value: &V)
|
||||
-> Result<(), E::Error>
|
||||
{
|
||||
fn encode_tagged<T: Encodable, V: Encodable>(
|
||||
&mut self,
|
||||
tag: T,
|
||||
value: &V
|
||||
) -> Result<(), E::Error> {
|
||||
let start_pos = self.position();
|
||||
|
||||
tag.encode(self)?;
|
||||
@ -770,7 +768,7 @@ where
|
||||
|
||||
impl<'a, 'tcx, E> SpecializedEncoder<interpret::AllocId> for CacheEncoder<'a, 'tcx, E>
|
||||
where
|
||||
E: 'a + ty_codec::TyEncoder,
|
||||
E: 'a + TyEncoder,
|
||||
{
|
||||
fn specialized_encode(&mut self, alloc_id: &interpret::AllocId) -> Result<(), Self::Error> {
|
||||
use std::collections::hash_map::Entry;
|
||||
@ -790,10 +788,9 @@ where
|
||||
|
||||
impl<'a, 'tcx, E> SpecializedEncoder<Span> for CacheEncoder<'a, 'tcx, E>
|
||||
where
|
||||
E: 'a + ty_codec::TyEncoder,
|
||||
E: 'a + TyEncoder,
|
||||
{
|
||||
fn specialized_encode(&mut self, span: &Span) -> Result<(), Self::Error> {
|
||||
|
||||
if *span == DUMMY_SP {
|
||||
return TAG_INVALID_SPAN.encode(self);
|
||||
}
|
||||
@ -849,14 +846,14 @@ where
|
||||
// We don't currently encode enough information to ensure hygiene works
|
||||
// with incremental, so panic rather than risk incremental bugs.
|
||||
|
||||
// FIXME: Handle hygiene in incremental
|
||||
bug!("Trying to encode Ident for incremental")
|
||||
// FIXME: handle hygiene in incremental.
|
||||
bug!("trying to encode `Ident` for incremental");
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'tcx, E> ty_codec::TyEncoder for CacheEncoder<'a, 'tcx, E>
|
||||
where
|
||||
E: 'a + ty_codec::TyEncoder,
|
||||
E: 'a + TyEncoder,
|
||||
{
|
||||
#[inline]
|
||||
fn position(&self) -> usize {
|
||||
@ -866,7 +863,7 @@ where
|
||||
|
||||
impl<'a, 'tcx, E> SpecializedEncoder<CrateNum> for CacheEncoder<'a, 'tcx, E>
|
||||
where
|
||||
E: 'a + ty_codec::TyEncoder,
|
||||
E: 'a + TyEncoder,
|
||||
{
|
||||
#[inline]
|
||||
fn specialized_encode(&mut self, cnum: &CrateNum) -> Result<(), Self::Error> {
|
||||
@ -876,7 +873,7 @@ where
|
||||
|
||||
impl<'a, 'tcx, E> SpecializedEncoder<Ty<'tcx>> for CacheEncoder<'a, 'tcx, E>
|
||||
where
|
||||
E: 'a + ty_codec::TyEncoder,
|
||||
E: 'a + TyEncoder,
|
||||
{
|
||||
#[inline]
|
||||
fn specialized_encode(&mut self, ty: &Ty<'tcx>) -> Result<(), Self::Error> {
|
||||
@ -887,7 +884,7 @@ where
|
||||
|
||||
impl<'a, 'tcx, E> SpecializedEncoder<ty::GenericPredicates<'tcx>> for CacheEncoder<'a, 'tcx, E>
|
||||
where
|
||||
E: 'a + ty_codec::TyEncoder,
|
||||
E: 'a + TyEncoder,
|
||||
{
|
||||
#[inline]
|
||||
fn specialized_encode(&mut self,
|
||||
@ -900,7 +897,7 @@ where
|
||||
|
||||
impl<'a, 'tcx, E> SpecializedEncoder<hir::HirId> for CacheEncoder<'a, 'tcx, E>
|
||||
where
|
||||
E: 'a + ty_codec::TyEncoder,
|
||||
E: 'a + TyEncoder,
|
||||
{
|
||||
#[inline]
|
||||
fn specialized_encode(&mut self, id: &hir::HirId) -> Result<(), Self::Error> {
|
||||
@ -918,7 +915,7 @@ where
|
||||
|
||||
impl<'a, 'tcx, E> SpecializedEncoder<DefId> for CacheEncoder<'a, 'tcx, E>
|
||||
where
|
||||
E: 'a + ty_codec::TyEncoder,
|
||||
E: 'a + TyEncoder,
|
||||
{
|
||||
#[inline]
|
||||
fn specialized_encode(&mut self, id: &DefId) -> Result<(), Self::Error> {
|
||||
@ -929,7 +926,7 @@ where
|
||||
|
||||
impl<'a, 'tcx, E> SpecializedEncoder<LocalDefId> for CacheEncoder<'a, 'tcx, E>
|
||||
where
|
||||
E: 'a + ty_codec::TyEncoder,
|
||||
E: 'a + TyEncoder,
|
||||
{
|
||||
#[inline]
|
||||
fn specialized_encode(&mut self, id: &LocalDefId) -> Result<(), Self::Error> {
|
||||
@ -939,18 +936,18 @@ where
|
||||
|
||||
impl<'a, 'tcx, E> SpecializedEncoder<DefIndex> for CacheEncoder<'a, 'tcx, E>
|
||||
where
|
||||
E: 'a + ty_codec::TyEncoder,
|
||||
E: 'a + TyEncoder,
|
||||
{
|
||||
fn specialized_encode(&mut self, _: &DefIndex) -> Result<(), Self::Error> {
|
||||
bug!("Encoding DefIndex without context.")
|
||||
bug!("encoding `DefIndex` without context");
|
||||
}
|
||||
}
|
||||
|
||||
// NodeIds are not stable across compilation sessions, so we store them in their
|
||||
// HirId representation. This allows use to map them to the current NodeId.
|
||||
// `NodeId`s are not stable across compilation sessions, so we store them in their
|
||||
// `HirId` representation. This allows use to map them to the current `NodeId`.
|
||||
impl<'a, 'tcx, E> SpecializedEncoder<NodeId> for CacheEncoder<'a, 'tcx, E>
|
||||
where
|
||||
E: 'a + ty_codec::TyEncoder,
|
||||
E: 'a + TyEncoder,
|
||||
{
|
||||
#[inline]
|
||||
fn specialized_encode(&mut self, node_id: &NodeId) -> Result<(), Self::Error> {
|
||||
@ -967,7 +964,7 @@ impl<'a, 'tcx> SpecializedEncoder<Fingerprint> for CacheEncoder<'a, 'tcx, opaque
|
||||
|
||||
impl<'a, 'tcx, E, T> SpecializedEncoder<mir::ClearCrossCrate<T>> for CacheEncoder<'a, 'tcx, E>
|
||||
where
|
||||
E: 'a + ty_codec::TyEncoder,
|
||||
E: 'a + TyEncoder,
|
||||
T: Encodable,
|
||||
{
|
||||
#[inline]
|
||||
@ -996,7 +993,7 @@ macro_rules! encoder_methods {
|
||||
|
||||
impl<'a, 'tcx, E> Encoder for CacheEncoder<'a, 'tcx, E>
|
||||
where
|
||||
E: 'a + ty_codec::TyEncoder,
|
||||
E: 'a + TyEncoder,
|
||||
{
|
||||
type Error = E::Error;
|
||||
|
||||
@ -1040,7 +1037,7 @@ impl UseSpecializedDecodable for IntEncodedWithFixedSize {}
|
||||
impl SpecializedEncoder<IntEncodedWithFixedSize> for opaque::Encoder {
|
||||
fn specialized_encode(&mut self, x: &IntEncodedWithFixedSize) -> Result<(), Self::Error> {
|
||||
let start_pos = self.position();
|
||||
for i in 0 .. IntEncodedWithFixedSize::ENCODED_SIZE {
|
||||
for i in 0..IntEncodedWithFixedSize::ENCODED_SIZE {
|
||||
((x.0 >> i * 8) as u8).encode(self)?;
|
||||
}
|
||||
let end_pos = self.position();
|
||||
@ -1085,10 +1082,10 @@ where
|
||||
if Q::cache_on_disk(tcx, key.clone(), Some(&entry.value)) {
|
||||
let dep_node = SerializedDepNodeIndex::new(entry.index.index());
|
||||
|
||||
// Record position of the cache entry
|
||||
// Record position of the cache entry.
|
||||
query_result_index.push((dep_node, AbsoluteBytePos::new(encoder.position())));
|
||||
|
||||
// Encode the type check tables with the SerializedDepNodeIndex
|
||||
// Encode the type check tables with the `SerializedDepNodeIndex`
|
||||
// as tag.
|
||||
encoder.encode_tagged(dep_node, &entry.value)?;
|
||||
}
|
||||
|
@ -61,7 +61,7 @@ impl<'tcx, M: QueryConfig<'tcx>> Default for QueryCache<'tcx, M> {
|
||||
}
|
||||
}
|
||||
|
||||
// If enabled, send a message to the profile-queries thread
|
||||
// If enabled, sends a message to the profile-queries thread.
|
||||
macro_rules! profq_msg {
|
||||
($tcx:expr, $msg:expr) => {
|
||||
if cfg!(debug_assertions) {
|
||||
@ -72,7 +72,7 @@ macro_rules! profq_msg {
|
||||
}
|
||||
}
|
||||
|
||||
// If enabled, format a key using its debug string, which can be
|
||||
// If enabled, formats a key using its debug string, which can be
|
||||
// expensive to compute (in terms of time).
|
||||
macro_rules! profq_query_msg {
|
||||
($query:expr, $tcx:expr, $key:expr) => {{
|
||||
@ -98,7 +98,7 @@ pub(super) struct JobOwner<'a, 'tcx, Q: QueryDescription<'tcx>> {
|
||||
|
||||
impl<'a, 'tcx, Q: QueryDescription<'tcx>> JobOwner<'a, 'tcx, Q> {
|
||||
/// Either gets a `JobOwner` corresponding the query, allowing us to
|
||||
/// start executing the query, or it returns with the result of the query.
|
||||
/// start executing the query, or returns with the result of the query.
|
||||
/// If the query is executing elsewhere, this will wait for it.
|
||||
/// If the query panicked, this will silently panic.
|
||||
///
|
||||
@ -215,30 +215,30 @@ impl<'a, 'tcx, Q: QueryDescription<'tcx>> Drop for JobOwner<'a, 'tcx, Q> {
|
||||
#[inline(never)]
|
||||
#[cold]
|
||||
fn drop(&mut self) {
|
||||
// Poison the query so jobs waiting on it panic
|
||||
// Poison the query so jobs waiting on it panic.
|
||||
let shard = self.cache.get_shard_by_value(&self.key);
|
||||
shard.lock().active.insert(self.key.clone(), QueryResult::Poisoned);
|
||||
// Also signal the completion of the job, so waiters
|
||||
// will continue execution
|
||||
// will continue execution.
|
||||
self.job.signal_complete();
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct CycleError<'tcx> {
|
||||
/// The query and related span which uses the cycle
|
||||
/// The query and related span that uses the cycle.
|
||||
pub(super) usage: Option<(Span, Query<'tcx>)>,
|
||||
pub(super) cycle: Vec<QueryInfo<'tcx>>,
|
||||
}
|
||||
|
||||
/// The result of `try_get_lock`
|
||||
/// The result of `try_get_lock`.
|
||||
pub(super) enum TryGetJob<'a, 'tcx, D: QueryDescription<'tcx>> {
|
||||
/// The query is not yet started. Contains a guard to the cache eventually used to start it.
|
||||
NotYetStarted(JobOwner<'a, 'tcx, D>),
|
||||
|
||||
/// The query was already completed.
|
||||
/// Returns the result of the query and its dep node index
|
||||
/// if it succeeded or a cycle error if it failed
|
||||
/// Returns the result of the query and its dep-node index
|
||||
/// if it succeeded or a cycle error if it failed.
|
||||
JobCompleted((D::Value, DepNodeIndex)),
|
||||
|
||||
/// Trying to execute the query resulted in a cycle.
|
||||
@ -246,7 +246,7 @@ pub(super) enum TryGetJob<'a, 'tcx, D: QueryDescription<'tcx>> {
|
||||
}
|
||||
|
||||
impl<'tcx> TyCtxt<'tcx> {
|
||||
/// Executes a job by changing the ImplicitCtxt to point to the
|
||||
/// Executes a job by changing the `ImplicitCtxt` to point to the
|
||||
/// new query job while it executes. It returns the diagnostics
|
||||
/// captured during execution and the actual result.
|
||||
#[inline(always)]
|
||||
@ -259,11 +259,11 @@ impl<'tcx> TyCtxt<'tcx> {
|
||||
where
|
||||
F: FnOnce(TyCtxt<'tcx>) -> R,
|
||||
{
|
||||
// The TyCtxt stored in TLS has the same global interner lifetime
|
||||
// The `TyCtxt` stored in TLS has the same global interner lifetime
|
||||
// as `self`, so we use `with_related_context` to relate the 'tcx lifetimes
|
||||
// when accessing the ImplicitCtxt
|
||||
// when accessing the `ImplicitCtxt`.
|
||||
tls::with_related_context(self, move |current_icx| {
|
||||
// Update the ImplicitCtxt to point to our new query job
|
||||
// Update the `ImplicitCtxt` to point to our new query job.
|
||||
let new_icx = tls::ImplicitCtxt {
|
||||
tcx: self.global_tcx(),
|
||||
query: Some(job),
|
||||
@ -272,7 +272,7 @@ impl<'tcx> TyCtxt<'tcx> {
|
||||
task_deps: current_icx.task_deps,
|
||||
};
|
||||
|
||||
// Use the ImplicitCtxt while we execute the query
|
||||
// Use the `ImplicitCtxt` while we execute the query.
|
||||
tls::enter_context(&new_icx, |_| {
|
||||
compute(self.global_tcx())
|
||||
})
|
||||
@ -372,7 +372,7 @@ impl<'tcx> TyCtxt<'tcx> {
|
||||
};
|
||||
|
||||
// Fast path for when incr. comp. is off. `to_dep_node` is
|
||||
// expensive for some DepKinds.
|
||||
// expensive for some `DepKind`s.
|
||||
if !self.dep_graph.is_fully_enabled() {
|
||||
let null_dep_node = DepNode::new_no_params(crate::dep_graph::DepKind::Null);
|
||||
return self.force_query_with_job::<Q>(key, job, null_dep_node).0;
|
||||
@ -410,7 +410,7 @@ impl<'tcx> TyCtxt<'tcx> {
|
||||
if !Q::EVAL_ALWAYS {
|
||||
// The diagnostics for this query will be
|
||||
// promoted to the current session during
|
||||
// try_mark_green(), so we can ignore them here.
|
||||
// `try_mark_green()`, so we can ignore them here.
|
||||
let loaded = self.start_query(job.job.clone(), None, |tcx| {
|
||||
let marked = tcx.dep_graph.try_mark_green_and_read(tcx, &dep_node);
|
||||
marked.map(|(prev_dep_node_index, dep_node_index)| {
|
||||
@ -441,11 +441,11 @@ impl<'tcx> TyCtxt<'tcx> {
|
||||
dep_node: &DepNode,
|
||||
) -> Q::Value {
|
||||
// Note this function can be called concurrently from the same query
|
||||
// We must ensure that this is handled correctly
|
||||
// We must ensure that this is handled correctly.
|
||||
|
||||
debug_assert!(self.dep_graph.is_green(dep_node));
|
||||
|
||||
// First we try to load the result from the on-disk cache
|
||||
// First we try to load the result from the on-disk cache.
|
||||
let result = if Q::cache_on_disk(self.global_tcx(), key.clone(), None) &&
|
||||
self.sess.opts.debugging_opts.incremental_queries {
|
||||
self.sess.profiler(|p| p.incremental_load_result_start(Q::NAME));
|
||||
@ -453,10 +453,10 @@ impl<'tcx> TyCtxt<'tcx> {
|
||||
self.sess.profiler(|p| p.incremental_load_result_end(Q::NAME));
|
||||
|
||||
// We always expect to find a cached result for things that
|
||||
// can be forced from DepNode.
|
||||
// can be forced from `DepNode`.
|
||||
debug_assert!(!dep_node.kind.can_reconstruct_query_key() ||
|
||||
result.is_some(),
|
||||
"Missing on-disk cache entry for {:?}",
|
||||
"missing on-disk cache entry for {:?}",
|
||||
dep_node);
|
||||
result
|
||||
} else {
|
||||
@ -475,8 +475,7 @@ impl<'tcx> TyCtxt<'tcx> {
|
||||
|
||||
self.sess.profiler(|p| p.start_query(Q::NAME));
|
||||
|
||||
// The dep-graph for this computation is already in
|
||||
// place
|
||||
// The dep-graph for this computation is already in-place.
|
||||
let result = self.dep_graph.with_ignore(|| {
|
||||
Q::compute(self, key)
|
||||
});
|
||||
@ -485,7 +484,7 @@ impl<'tcx> TyCtxt<'tcx> {
|
||||
result
|
||||
};
|
||||
|
||||
// If -Zincremental-verify-ich is specified, re-hash results from
|
||||
// If `-Zincremental-verify-ich` is specified, re-hash results from
|
||||
// the cache and make sure that they have the expected fingerprint.
|
||||
if unlikely!(self.sess.opts.debugging_opts.incremental_verify_ich) {
|
||||
self.incremental_verify_ich::<Q>(&result, dep_node, dep_node_index);
|
||||
@ -508,10 +507,12 @@ impl<'tcx> TyCtxt<'tcx> {
|
||||
) {
|
||||
use crate::ich::Fingerprint;
|
||||
|
||||
assert!(Some(self.dep_graph.fingerprint_of(dep_node_index)) ==
|
||||
assert!(
|
||||
Some(self.dep_graph.fingerprint_of(dep_node_index)) ==
|
||||
self.dep_graph.prev_fingerprint_of(dep_node),
|
||||
"Fingerprint for green query instance not loaded \
|
||||
from cache: {:?}", dep_node);
|
||||
"fingerprint for green query instance not loaded from cache: {:?}",
|
||||
dep_node,
|
||||
);
|
||||
|
||||
debug!("BEGIN verify_ich({:?})", dep_node);
|
||||
let mut hcx = self.create_stable_hashing_context();
|
||||
@ -521,8 +522,11 @@ impl<'tcx> TyCtxt<'tcx> {
|
||||
|
||||
let old_hash = self.dep_graph.fingerprint_of(dep_node_index);
|
||||
|
||||
assert!(new_hash == old_hash, "Found unstable fingerprints \
|
||||
for {:?}", dep_node);
|
||||
assert!(
|
||||
new_hash == old_hash,
|
||||
"found unstable fingerprints for {:?}",
|
||||
dep_node,
|
||||
);
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
@ -534,11 +538,11 @@ impl<'tcx> TyCtxt<'tcx> {
|
||||
) -> (Q::Value, DepNodeIndex) {
|
||||
// If the following assertion triggers, it can have two reasons:
|
||||
// 1. Something is wrong with DepNode creation, either here or
|
||||
// in DepGraph::try_mark_green()
|
||||
// 2. Two distinct query keys get mapped to the same DepNode
|
||||
// (see for example #48923)
|
||||
// in `DepGraph::try_mark_green()`.
|
||||
// 2. Two distinct query keys get mapped to the same `DepNode`
|
||||
// (see for example #48923).
|
||||
assert!(!self.dep_graph.dep_node_exists(&dep_node),
|
||||
"Forcing query with already existing DepNode.\n\
|
||||
"forcing query with already existing `DepNode`\n\
|
||||
- query-key: {:?}\n\
|
||||
- dep-node: {:?}",
|
||||
key, dep_node);
|
||||
@ -584,7 +588,7 @@ impl<'tcx> TyCtxt<'tcx> {
|
||||
}
|
||||
|
||||
/// Ensure that either this query has all green inputs or been executed.
|
||||
/// Executing query::ensure(D) is considered a read of the dep-node D.
|
||||
/// Executing `query::ensure(D)` is considered a read of the dep-node `D`.
|
||||
///
|
||||
/// This function is particularly useful when executing passes for their
|
||||
/// side-effects -- e.g., in order to report errors for erroneous programs.
|
||||
@ -899,13 +903,13 @@ macro_rules! define_queries_inner {
|
||||
}
|
||||
}
|
||||
|
||||
// FIXME(eddyb) Get more valid Span's on queries.
|
||||
// FIXME(eddyb) Get more valid `Span`s on queries.
|
||||
pub fn default_span(&self, tcx: TyCtxt<$tcx>, span: Span) -> Span {
|
||||
if !span.is_dummy() {
|
||||
return span;
|
||||
}
|
||||
// The def_span query is used to calculate default_span,
|
||||
// so exit to avoid infinite recursion
|
||||
// The `def_span` query is used to calculate `default_span`,
|
||||
// so exit to avoid infinite recursion.
|
||||
if let Query::def_span(..) = *self {
|
||||
return span
|
||||
}
|
||||
@ -1116,7 +1120,7 @@ macro_rules! define_provider_struct {
|
||||
impl<$tcx> Default for Providers<$tcx> {
|
||||
fn default() -> Self {
|
||||
$(fn $name<$tcx>(_: TyCtxt<$tcx>, key: $K) -> $R {
|
||||
bug!("tcx.{}({:?}) unsupported by its crate",
|
||||
bug!("`tcx.{}({:?})` unsupported by its crate",
|
||||
stringify!($name), key);
|
||||
})*
|
||||
Providers { $($name),* }
|
||||
@ -1128,26 +1132,26 @@ macro_rules! define_provider_struct {
|
||||
|
||||
/// The red/green evaluation system will try to mark a specific DepNode in the
|
||||
/// dependency graph as green by recursively trying to mark the dependencies of
|
||||
/// that DepNode as green. While doing so, it will sometimes encounter a DepNode
|
||||
/// that `DepNode` as green. While doing so, it will sometimes encounter a `DepNode`
|
||||
/// where we don't know if it is red or green and we therefore actually have
|
||||
/// to recompute its value in order to find out. Since the only piece of
|
||||
/// information that we have at that point is the DepNode we are trying to
|
||||
/// information that we have at that point is the `DepNode` we are trying to
|
||||
/// re-evaluate, we need some way to re-run a query from just that. This is what
|
||||
/// `force_from_dep_node()` implements.
|
||||
///
|
||||
/// In the general case, a DepNode consists of a DepKind and an opaque
|
||||
/// In the general case, a `DepNode` consists of a `DepKind` and an opaque
|
||||
/// GUID/fingerprint that will uniquely identify the node. This GUID/fingerprint
|
||||
/// is usually constructed by computing a stable hash of the query-key that the
|
||||
/// DepNode corresponds to. Consequently, it is not in general possible to go
|
||||
/// `DepNode` corresponds to. Consequently, it is not in general possible to go
|
||||
/// back from hash to query-key (since hash functions are not reversible). For
|
||||
/// this reason `force_from_dep_node()` is expected to fail from time to time
|
||||
/// because we just cannot find out, from the DepNode alone, what the
|
||||
/// because we just cannot find out, from the `DepNode` alone, what the
|
||||
/// corresponding query-key is and therefore cannot re-run the query.
|
||||
///
|
||||
/// The system deals with this case letting `try_mark_green` fail which forces
|
||||
/// the root query to be re-evaluated.
|
||||
///
|
||||
/// Now, if force_from_dep_node() would always fail, it would be pretty useless.
|
||||
/// Now, if `force_from_dep_node()` would always fail, it would be pretty useless.
|
||||
/// Fortunately, we can use some contextual information that will allow us to
|
||||
/// reconstruct query-keys for certain kinds of `DepNode`s. In particular, we
|
||||
/// enforce by construction that the GUID/fingerprint of certain `DepNode`s is a
|
||||
@ -1171,9 +1175,9 @@ macro_rules! define_provider_struct {
|
||||
pub fn force_from_dep_node(tcx: TyCtxt<'_>, dep_node: &DepNode) -> bool {
|
||||
use crate::dep_graph::RecoverKey;
|
||||
|
||||
// We must avoid ever having to call force_from_dep_node() for a
|
||||
// DepNode::codegen_unit:
|
||||
// Since we cannot reconstruct the query key of a DepNode::codegen_unit, we
|
||||
// We must avoid ever having to call `force_from_dep_node()` for a
|
||||
// `DepNode::codegen_unit`:
|
||||
// Since we cannot reconstruct the query key of a `DepNode::codegen_unit`, we
|
||||
// would always end up having to evaluate the first caller of the
|
||||
// `codegen_unit` query that *is* reconstructible. This might very well be
|
||||
// the `compile_codegen_unit` query, thus re-codegenning the whole CGU just
|
||||
@ -1196,7 +1200,7 @@ pub fn force_from_dep_node(tcx: TyCtxt<'_>, dep_node: &DepNode) -> bool {
|
||||
if let Some(def_id) = dep_node.extract_def_id(tcx) {
|
||||
def_id
|
||||
} else {
|
||||
// return from the whole function
|
||||
// Return from the whole function.
|
||||
return false
|
||||
}
|
||||
}
|
||||
@ -1224,20 +1228,20 @@ pub fn force_from_dep_node(tcx: TyCtxt<'_>, dep_node: &DepNode) -> bool {
|
||||
|
||||
rustc_dep_node_force!([dep_node, tcx]
|
||||
// These are inputs that are expected to be pre-allocated and that
|
||||
// should therefore always be red or green already
|
||||
// should therefore always be red or green already.
|
||||
DepKind::AllLocalTraitImpls |
|
||||
DepKind::Krate |
|
||||
DepKind::CrateMetadata |
|
||||
DepKind::HirBody |
|
||||
DepKind::Hir |
|
||||
|
||||
// This are anonymous nodes
|
||||
// These are anonymous nodes.
|
||||
DepKind::TraitSelect |
|
||||
|
||||
// We don't have enough information to reconstruct the query key of
|
||||
// these
|
||||
// these.
|
||||
DepKind::CompileCodegenUnit => {
|
||||
bug!("force_from_dep_node() - Encountered {:?}", dep_node)
|
||||
bug!("force_from_dep_node: encountered {:?}", dep_node)
|
||||
}
|
||||
|
||||
DepKind::Analysis => { force!(analysis, krate!()); }
|
||||
|
@ -644,7 +644,7 @@ impl<'tcx> Binder<ExistentialPredicate<'tcx>> {
|
||||
impl<'tcx> rustc_serialize::UseSpecializedDecodable for &'tcx List<ExistentialPredicate<'tcx>> {}
|
||||
|
||||
impl<'tcx> List<ExistentialPredicate<'tcx>> {
|
||||
/// Returns the "principal def id" of this set of existential predicates.
|
||||
/// Returns the "principal `DefId`" of this set of existential predicates.
|
||||
///
|
||||
/// A Rust trait object type consists (in addition to a lifetime bound)
|
||||
/// of a set of trait bounds, which are separated into any number
|
||||
@ -1052,7 +1052,7 @@ impl<'tcx> PolyGenSig<'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
/// Signature of a function type, which I have arbitrarily
|
||||
/// Signature of a function type, which we have arbitrarily
|
||||
/// decided to use to refer to the input/output types.
|
||||
///
|
||||
/// - `inputs`: is the list of arguments and their modes.
|
||||
@ -1076,7 +1076,8 @@ impl<'tcx> FnSig<'tcx> {
|
||||
self.inputs_and_output[self.inputs_and_output.len() - 1]
|
||||
}
|
||||
|
||||
// Create a minimal `FnSig` to be used when encountering a `TyKind::Error` in a fallible method
|
||||
// Creates a minimal `FnSig` to be used when encountering a `TyKind::Error` in a fallible
|
||||
// method.
|
||||
fn fake() -> FnSig<'tcx> {
|
||||
FnSig {
|
||||
inputs_and_output: List::empty(),
|
||||
@ -1118,7 +1119,6 @@ impl<'tcx> PolyFnSig<'tcx> {
|
||||
|
||||
pub type CanonicalPolyFnSig<'tcx> = Canonical<'tcx, Binder<FnSig<'tcx>>>;
|
||||
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord,
|
||||
Hash, RustcEncodable, RustcDecodable, HashStable)]
|
||||
pub struct ParamTy {
|
||||
|
@ -1,11 +1,12 @@
|
||||
use crate::cfg::*;
|
||||
use rustc_data_structures::graph::implementation as graph;
|
||||
use rustc::middle::region;
|
||||
use rustc::ty::{self, TyCtxt};
|
||||
|
||||
use rustc::hir::{self, PatKind};
|
||||
use rustc::hir::def_id::DefId;
|
||||
use rustc::hir::ptr::P;
|
||||
use rustc::middle::region;
|
||||
use rustc::ty::{self, TyCtxt};
|
||||
|
||||
use rustc_data_structures::graph::implementation as graph;
|
||||
|
||||
struct CFGBuilder<'a, 'tcx> {
|
||||
tcx: TyCtxt<'tcx>,
|
||||
@ -19,15 +20,15 @@ struct CFGBuilder<'a, 'tcx> {
|
||||
|
||||
#[derive(Copy, Clone)]
|
||||
struct BlockScope {
|
||||
block_expr_id: hir::ItemLocalId, // id of breakable block expr node
|
||||
block_expr_id: hir::ItemLocalId, // ID of breakable block expr node
|
||||
break_index: CFGIndex, // where to go on `break`
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone)]
|
||||
struct LoopScope {
|
||||
loop_id: hir::ItemLocalId, // id of loop/while node
|
||||
loop_id: hir::ItemLocalId, // ID of `loop`/`while` node
|
||||
continue_index: CFGIndex, // where to go on a `loop`
|
||||
break_index: CFGIndex, // where to go on a `break`
|
||||
break_index: CFGIndex, // where to go on a `break`
|
||||
}
|
||||
|
||||
pub(super) fn construct(tcx: TyCtxt<'_>, body: &hir::Body) -> CFG {
|
||||
@ -103,9 +104,7 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> {
|
||||
let init_exit = self.opt_expr(&local.init, pred);
|
||||
self.pat(&local.pat, init_exit)
|
||||
}
|
||||
hir::StmtKind::Item(_) => {
|
||||
pred
|
||||
}
|
||||
hir::StmtKind::Item(_) => pred,
|
||||
hir::StmtKind::Expr(ref expr) |
|
||||
hir::StmtKind::Semi(ref expr) => {
|
||||
self.expr(&expr, pred)
|
||||
@ -154,12 +153,12 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
fn pats_all<'b, I: Iterator<Item=&'b P<hir::Pat>>>(
|
||||
/// Handles case where all of the patterns must match.
|
||||
fn pats_all<'b, I: Iterator<Item = &'b P<hir::Pat>>>(
|
||||
&mut self,
|
||||
pats: I,
|
||||
pred: CFGIndex
|
||||
pred: CFGIndex,
|
||||
) -> CFGIndex {
|
||||
//! Handles case where all of the patterns must match.
|
||||
pats.fold(pred, |pred, pat| self.pat(&pat, pred))
|
||||
}
|
||||
|
||||
@ -185,15 +184,15 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> {
|
||||
// Note that `break` and `loop` statements
|
||||
// may cause additional edges.
|
||||
|
||||
let loopback = self.add_dummy_node(&[pred]); // 1
|
||||
let expr_exit = self.add_ast_node(expr.hir_id.local_id, &[]); // 2
|
||||
let loopback = self.add_dummy_node(&[pred]); // 1
|
||||
let expr_exit = self.add_ast_node(expr.hir_id.local_id, &[]); // 2
|
||||
self.loop_scopes.push(LoopScope {
|
||||
loop_id: expr.hir_id.local_id,
|
||||
continue_index: loopback,
|
||||
break_index: expr_exit,
|
||||
});
|
||||
let body_exit = self.block(&body, loopback); // 3
|
||||
self.add_contained_edge(body_exit, loopback); // 4
|
||||
let body_exit = self.block(&body, loopback); // 3
|
||||
self.add_contained_edge(body_exit, loopback); // 4
|
||||
self.loop_scopes.pop();
|
||||
expr_exit
|
||||
}
|
||||
@ -217,9 +216,9 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> {
|
||||
// v 3 v 4
|
||||
// [..exit..]
|
||||
//
|
||||
let l_exit = self.expr(&l, pred); // 1
|
||||
let r_exit = self.expr(&r, l_exit); // 2
|
||||
self.add_ast_node(expr.hir_id.local_id, &[l_exit, r_exit]) // 3,4
|
||||
let l_exit = self.expr(&l, pred); // 1
|
||||
let r_exit = self.expr(&r, l_exit); // 2
|
||||
self.add_ast_node(expr.hir_id.local_id, &[l_exit, r_exit]) // 3,4
|
||||
}
|
||||
|
||||
hir::ExprKind::Ret(ref v) => {
|
||||
@ -313,11 +312,13 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
fn call<'b, I: Iterator<Item=&'b hir::Expr>>(&mut self,
|
||||
call_expr: &hir::Expr,
|
||||
pred: CFGIndex,
|
||||
func_or_rcvr: &hir::Expr,
|
||||
args: I) -> CFGIndex {
|
||||
fn call<'b, I: Iterator<Item = &'b hir::Expr>>(
|
||||
&mut self,
|
||||
call_expr: &hir::Expr,
|
||||
pred: CFGIndex,
|
||||
func_or_rcvr: &hir::Expr,
|
||||
args: I,
|
||||
) -> CFGIndex {
|
||||
let func_or_rcvr_exit = self.expr(func_or_rcvr, pred);
|
||||
let ret = self.straightline(call_expr, func_or_rcvr_exit, args);
|
||||
let m = self.tcx.hir().get_module_parent(call_expr.hir_id);
|
||||
@ -328,33 +329,38 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
fn exprs<'b, I: Iterator<Item=&'b hir::Expr>>(&mut self,
|
||||
exprs: I,
|
||||
pred: CFGIndex) -> CFGIndex {
|
||||
//! Constructs graph for `exprs` evaluated in order
|
||||
/// Constructs graph for `exprs` evaluated in order.
|
||||
fn exprs<'b, I: Iterator<Item = &'b hir::Expr>>(
|
||||
&mut self,
|
||||
exprs: I,
|
||||
pred: CFGIndex,
|
||||
) -> CFGIndex {
|
||||
exprs.fold(pred, |p, e| self.expr(e, p))
|
||||
}
|
||||
|
||||
fn opt_expr(&mut self,
|
||||
opt_expr: &Option<P<hir::Expr>>,
|
||||
pred: CFGIndex) -> CFGIndex {
|
||||
//! Constructs graph for `opt_expr` evaluated, if Some
|
||||
/// Constructs graph for `opt_expr` evaluated, if `Some`.
|
||||
fn opt_expr(
|
||||
&mut self,
|
||||
opt_expr: &Option<P<hir::Expr>>,
|
||||
pred: CFGIndex,
|
||||
) -> CFGIndex {
|
||||
opt_expr.iter().fold(pred, |p, e| self.expr(&e, p))
|
||||
}
|
||||
|
||||
fn straightline<'b, I: Iterator<Item=&'b hir::Expr>>(&mut self,
|
||||
expr: &hir::Expr,
|
||||
pred: CFGIndex,
|
||||
subexprs: I) -> CFGIndex {
|
||||
//! Handles case of an expression that evaluates `subexprs` in order
|
||||
|
||||
/// Handles case of an expression that evaluates `subexprs` in order.
|
||||
fn straightline<'b, I: Iterator<Item = &'b hir::Expr>>(
|
||||
&mut self,
|
||||
expr: &hir::Expr,
|
||||
pred: CFGIndex,
|
||||
subexprs: I,
|
||||
) -> CFGIndex {
|
||||
let subexprs_exit = self.exprs(subexprs, pred);
|
||||
self.add_ast_node(expr.hir_id.local_id, &[subexprs_exit])
|
||||
}
|
||||
|
||||
fn match_(&mut self, id: hir::ItemLocalId, discr: &hir::Expr,
|
||||
arms: &[hir::Arm], pred: CFGIndex) -> CFGIndex {
|
||||
// The CFG for match expression is quite complex, so no ASCII
|
||||
// The CFG for match expressions is quite complex, so no ASCII
|
||||
// art for it (yet).
|
||||
//
|
||||
// The CFG generated below matches roughly what MIR contains.
|
||||
@ -369,13 +375,13 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> {
|
||||
//
|
||||
// What is going on is explained in further comments.
|
||||
|
||||
// Visit the discriminant expression
|
||||
// Visit the discriminant expression.
|
||||
let discr_exit = self.expr(discr, pred);
|
||||
|
||||
// Add a node for the exit of the match expression as a whole.
|
||||
let expr_exit = self.add_ast_node(id, &[]);
|
||||
|
||||
// Keep track of the previous guard expressions
|
||||
// Keep track of the previous guard expressions.
|
||||
let mut prev_guard = None;
|
||||
let match_scope = region::Scope { id, data: region::ScopeData::Node };
|
||||
|
||||
@ -388,12 +394,12 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> {
|
||||
// Visit the pattern, coming from the discriminant exit
|
||||
let mut pat_exit = self.pat(&pat, discr_exit);
|
||||
|
||||
// If there is a guard expression, handle it here
|
||||
// If there is a guard expression, handle it here.
|
||||
if let Some(ref guard) = arm.guard {
|
||||
// Add a dummy node for the previous guard
|
||||
// expression to target
|
||||
// expression to target.
|
||||
let guard_start = self.add_dummy_node(&[pat_exit]);
|
||||
// Visit the guard expression
|
||||
// Visit the guard expression.
|
||||
let guard_exit = match guard {
|
||||
hir::Guard::If(ref e) => (&**e, self.expr(e, guard_start)),
|
||||
};
|
||||
@ -407,24 +413,23 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> {
|
||||
self.add_exiting_edge(prev_guard, prev_index, match_scope, guard_start);
|
||||
}
|
||||
|
||||
// Push the guard onto the list of previous guards
|
||||
// Push the guard onto the list of previous guards.
|
||||
prev_guard = Some(guard_exit);
|
||||
|
||||
// Update the exit node for the pattern
|
||||
// Update the exit node for the pattern.
|
||||
pat_exit = guard_exit.1;
|
||||
}
|
||||
|
||||
// Add an edge from the exit of this pattern to the
|
||||
// exit of the arm
|
||||
// Add an edge from the exit of this pattern to the exit of the arm.
|
||||
self.add_contained_edge(pat_exit, bindings_exit);
|
||||
}
|
||||
|
||||
// Visit the body of this arm
|
||||
// Visit the body of this arm.
|
||||
let body_exit = self.expr(&arm.body, bindings_exit);
|
||||
|
||||
let arm_exit = self.add_ast_node(arm.hir_id.local_id, &[body_exit]);
|
||||
|
||||
// Link the body to the exit of the expression
|
||||
// Link the body to the exit of the expression.
|
||||
self.add_contained_edge(arm_exit, expr_exit);
|
||||
}
|
||||
|
||||
@ -451,18 +456,22 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> {
|
||||
node
|
||||
}
|
||||
|
||||
fn add_contained_edge(&mut self,
|
||||
source: CFGIndex,
|
||||
target: CFGIndex) {
|
||||
fn add_contained_edge(
|
||||
&mut self,
|
||||
source: CFGIndex,
|
||||
target: CFGIndex,
|
||||
) {
|
||||
let data = CFGEdgeData {exiting_scopes: vec![] };
|
||||
self.graph.add_edge(source, target, data);
|
||||
}
|
||||
|
||||
fn add_exiting_edge(&mut self,
|
||||
from_expr: &hir::Expr,
|
||||
from_index: CFGIndex,
|
||||
target_scope: region::Scope,
|
||||
to_index: CFGIndex) {
|
||||
fn add_exiting_edge(
|
||||
&mut self,
|
||||
from_expr: &hir::Expr,
|
||||
from_index: CFGIndex,
|
||||
target_scope: region::Scope,
|
||||
to_index: CFGIndex,
|
||||
) {
|
||||
let mut data = CFGEdgeData { exiting_scopes: vec![] };
|
||||
let mut scope = region::Scope {
|
||||
id: from_expr.hir_id.local_id,
|
||||
@ -476,9 +485,11 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> {
|
||||
self.graph.add_edge(from_index, to_index, data);
|
||||
}
|
||||
|
||||
fn add_returning_edge(&mut self,
|
||||
_from_expr: &hir::Expr,
|
||||
from_index: CFGIndex) {
|
||||
fn add_returning_edge(
|
||||
&mut self,
|
||||
_from_expr: &hir::Expr,
|
||||
from_index: CFGIndex,
|
||||
) {
|
||||
let data = CFGEdgeData {
|
||||
exiting_scopes: self.loop_scopes.iter()
|
||||
.rev()
|
||||
@ -488,11 +499,12 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> {
|
||||
self.graph.add_edge(from_index, self.fn_exit, data);
|
||||
}
|
||||
|
||||
fn find_scope_edge(&self,
|
||||
expr: &hir::Expr,
|
||||
destination: hir::Destination,
|
||||
scope_cf_kind: ScopeCfKind) -> (region::Scope, CFGIndex) {
|
||||
|
||||
fn find_scope_edge(
|
||||
&self,
|
||||
expr: &hir::Expr,
|
||||
destination: hir::Destination,
|
||||
scope_cf_kind: ScopeCfKind,
|
||||
) -> (region::Scope, CFGIndex) {
|
||||
match destination.target_id {
|
||||
Ok(loop_id) => {
|
||||
for b in &self.breakable_block_scopes {
|
||||
@ -519,7 +531,7 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> {
|
||||
});
|
||||
}
|
||||
}
|
||||
span_bug!(expr.span, "no scope for id {}", loop_id);
|
||||
span_bug!(expr.span, "no scope for ID {}", loop_id);
|
||||
}
|
||||
Err(err) => span_bug!(expr.span, "scope error: {}", err),
|
||||
}
|
||||
|
@ -1,4 +1,4 @@
|
||||
/// This module provides linkage between rustc::middle::graph and
|
||||
/// This module provides linkage between `rustc::middle::graph` and
|
||||
/// libgraphviz traits.
|
||||
|
||||
use crate::cfg;
|
||||
@ -12,7 +12,7 @@ pub struct LabelledCFG<'a, 'tcx> {
|
||||
pub tcx: TyCtxt<'tcx>,
|
||||
pub cfg: &'a cfg::CFG,
|
||||
pub name: String,
|
||||
/// `labelled_edges` controls whether we emit labels on the edges
|
||||
/// `labelled_edges` controls whether we emit labels on the edges.
|
||||
pub labelled_edges: bool,
|
||||
}
|
||||
|
||||
@ -25,12 +25,12 @@ impl<'a, 'tcx> LabelledCFG<'a, 'tcx> {
|
||||
};
|
||||
let s = self.tcx.hir().node_to_string(hir_id);
|
||||
|
||||
// Replacing newlines with \\l causes each line to be left-aligned,
|
||||
// Replacing newlines with `\\l` causes each line to be left-aligned,
|
||||
// improving presentation of (long) pretty-printed expressions.
|
||||
if s.contains("\n") {
|
||||
let mut s = s.replace("\n", "\\l");
|
||||
// Apparently left-alignment applies to the line that precedes
|
||||
// \l, not the line that follows; so, add \l at end of string
|
||||
// `\l`, not the line that follows; so, add `\l` at end of string
|
||||
// if not already present, ensuring last line gets left-aligned
|
||||
// as well.
|
||||
let mut last_two: Vec<_> =
|
||||
@ -109,8 +109,7 @@ impl<'a> dot::GraphWalk<'a> for &'a cfg::CFG {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'hir> dot::GraphWalk<'a> for LabelledCFG<'a, 'hir>
|
||||
{
|
||||
impl<'a, 'hir> dot::GraphWalk<'a> for LabelledCFG<'a, 'hir> {
|
||||
type Node = Node<'a>;
|
||||
type Edge = Edge<'a>;
|
||||
fn nodes(&'a self) -> dot::Nodes<'a, Node<'a>> { self.cfg.nodes() }
|
||||
|
@ -11,11 +11,7 @@ crate-type = ["dylib"]
|
||||
test = false
|
||||
|
||||
[dependencies]
|
||||
cc = "1.0.1" # Used to locate MSVC
|
||||
num_cpus = "1.0"
|
||||
tempfile = "3.0"
|
||||
rustc_llvm = { path = "../librustc_llvm" }
|
||||
memmap = "0.6"
|
||||
|
||||
[features]
|
||||
# This is used to convince Cargo to separately cache builds of `rustc_codegen_llvm`
|
||||
|
@ -1,4 +1,4 @@
|
||||
register_long_diagnostics! {
|
||||
register_diagnostics! {
|
||||
|
||||
E0511: r##"
|
||||
Invalid monomorphization of an intrinsic function was used. Erroneous code
|
||||
|
@ -14,7 +14,6 @@
|
||||
#![feature(in_band_lifetimes)]
|
||||
#![feature(libc)]
|
||||
#![feature(nll)]
|
||||
#![feature(rustc_diagnostic_macros)]
|
||||
#![feature(optin_builtin_traits)]
|
||||
#![feature(concat_idents)]
|
||||
#![feature(link_args)]
|
||||
@ -256,7 +255,7 @@ impl CodegenBackend for LlvmCodegenBackend {
|
||||
}
|
||||
|
||||
fn diagnostics(&self) -> &[(&'static str, &'static str)] {
|
||||
&DIAGNOSTICS
|
||||
&error_codes::DIAGNOSTICS
|
||||
}
|
||||
|
||||
fn target_features(&self, sess: &Session) -> Vec<Symbol> {
|
||||
@ -425,5 +424,3 @@ impl Drop for ModuleLlvm {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
__build_diagnostic_array! { librustc_codegen_llvm, DIAGNOSTICS }
|
||||
|
@ -1,4 +1,4 @@
|
||||
register_long_diagnostics! {
|
||||
syntax::register_diagnostics! {
|
||||
|
||||
E0668: r##"
|
||||
Malformed inline assembly rejected by LLVM.
|
||||
|
@ -4,7 +4,6 @@
|
||||
#![feature(box_syntax)]
|
||||
#![feature(core_intrinsics)]
|
||||
#![feature(libc)]
|
||||
#![feature(rustc_diagnostic_macros)]
|
||||
#![feature(stmt_expr_attributes)]
|
||||
#![feature(try_blocks)]
|
||||
#![feature(in_band_lifetimes)]
|
||||
@ -35,8 +34,6 @@ use rustc_data_structures::svh::Svh;
|
||||
use rustc::middle::cstore::{LibSource, CrateSource, NativeLibrary};
|
||||
use syntax_pos::symbol::Symbol;
|
||||
|
||||
// N.B., this module needs to be declared first so diagnostics are
|
||||
// registered before they are used.
|
||||
mod error_codes;
|
||||
|
||||
pub mod common;
|
||||
@ -158,5 +155,3 @@ pub struct CodegenResults {
|
||||
pub linker_info: back::linker::LinkerInfo,
|
||||
pub crate_info: CrateInfo,
|
||||
}
|
||||
|
||||
__build_diagnostic_array! { librustc_codegen_ssa, DIAGNOSTICS }
|
||||
|
@ -10,7 +10,6 @@
|
||||
#![feature(core_intrinsics)]
|
||||
#![feature(never_type)]
|
||||
#![feature(nll)]
|
||||
#![feature(rustc_diagnostic_macros)]
|
||||
#![feature(in_band_lifetimes)]
|
||||
|
||||
#![recursion_limit="256"]
|
||||
|
@ -12,7 +12,7 @@ crate-type = ["dylib"]
|
||||
[dependencies]
|
||||
graphviz = { path = "../libgraphviz" }
|
||||
log = "0.4"
|
||||
env_logger = { version = "0.5", default-features = false }
|
||||
env_logger = { version = "0.6", default-features = false }
|
||||
rustc = { path = "../librustc" }
|
||||
rustc_target = { path = "../librustc_target" }
|
||||
rustc_ast_borrowck = { path = "../librustc_ast_borrowck" }
|
||||
|
@ -9,7 +9,6 @@
|
||||
#![feature(box_syntax)]
|
||||
#![cfg_attr(unix, feature(libc))]
|
||||
#![feature(nll)]
|
||||
#![feature(rustc_diagnostic_macros)]
|
||||
#![feature(set_stdio)]
|
||||
#![feature(no_debug)]
|
||||
#![feature(integer_atomics)]
|
||||
|
@ -34,7 +34,7 @@ use rustc_privacy;
|
||||
use rustc_resolve::{Resolver, ResolverArenas};
|
||||
use rustc_traits;
|
||||
use rustc_typeck as typeck;
|
||||
use syntax::{self, ast, diagnostics, visit};
|
||||
use syntax::{self, ast, visit};
|
||||
use syntax::early_buffered_lints::BufferedEarlyLint;
|
||||
use syntax::ext::base::{NamedSyntaxExtension, ExtCtxt};
|
||||
use syntax::mut_visit::MutVisitor;
|
||||
@ -283,21 +283,6 @@ pub fn register_plugins<'a>(
|
||||
let mut registry = Registry::new(sess, krate.span);
|
||||
|
||||
time(sess, "plugin registration", || {
|
||||
if sess.features_untracked().rustc_diagnostic_macros {
|
||||
registry.register_macro(
|
||||
"__diagnostic_used",
|
||||
diagnostics::plugin::expand_diagnostic_used,
|
||||
);
|
||||
registry.register_macro(
|
||||
"__register_diagnostic",
|
||||
diagnostics::plugin::expand_register_diagnostic,
|
||||
);
|
||||
registry.register_macro(
|
||||
"__build_diagnostic_array",
|
||||
diagnostics::plugin::expand_build_diagnostic_array,
|
||||
);
|
||||
}
|
||||
|
||||
for registrar in registrars {
|
||||
registry.args_hidden = Some(registrar.args);
|
||||
(registrar.fun)(&mut registry);
|
||||
|
@ -43,17 +43,17 @@ use std::{thread, panic};
|
||||
|
||||
pub fn diagnostics_registry() -> Registry {
|
||||
let mut all_errors = Vec::new();
|
||||
all_errors.extend_from_slice(&rustc::DIAGNOSTICS);
|
||||
all_errors.extend_from_slice(&rustc_typeck::DIAGNOSTICS);
|
||||
all_errors.extend_from_slice(&rustc_resolve::DIAGNOSTICS);
|
||||
all_errors.extend_from_slice(&rustc_privacy::DIAGNOSTICS);
|
||||
all_errors.extend_from_slice(&rustc::error_codes::DIAGNOSTICS);
|
||||
all_errors.extend_from_slice(&rustc_typeck::error_codes::DIAGNOSTICS);
|
||||
all_errors.extend_from_slice(&rustc_resolve::error_codes::DIAGNOSTICS);
|
||||
all_errors.extend_from_slice(&rustc_privacy::error_codes::DIAGNOSTICS);
|
||||
// FIXME: need to figure out a way to get these back in here
|
||||
// all_errors.extend_from_slice(get_codegen_backend(sess).diagnostics());
|
||||
all_errors.extend_from_slice(&rustc_metadata::DIAGNOSTICS);
|
||||
all_errors.extend_from_slice(&rustc_passes::DIAGNOSTICS);
|
||||
all_errors.extend_from_slice(&rustc_plugin::DIAGNOSTICS);
|
||||
all_errors.extend_from_slice(&rustc_mir::DIAGNOSTICS);
|
||||
all_errors.extend_from_slice(&syntax::DIAGNOSTICS);
|
||||
all_errors.extend_from_slice(&rustc_metadata::error_codes::DIAGNOSTICS);
|
||||
all_errors.extend_from_slice(&rustc_passes::error_codes::DIAGNOSTICS);
|
||||
all_errors.extend_from_slice(&rustc_plugin::error_codes::DIAGNOSTICS);
|
||||
all_errors.extend_from_slice(&rustc_mir::error_codes::DIAGNOSTICS);
|
||||
all_errors.extend_from_slice(&syntax::error_codes::DIAGNOSTICS);
|
||||
|
||||
Registry::new(&all_errors)
|
||||
}
|
||||
|
@ -1,5 +1,4 @@
|
||||
use syntax::register_diagnostics;
|
||||
|
||||
register_diagnostics! {
|
||||
syntax::register_diagnostics! {
|
||||
;
|
||||
E0721, // `await` keyword
|
||||
}
|
||||
|
@ -15,7 +15,6 @@
|
||||
#![feature(box_patterns)]
|
||||
#![feature(box_syntax)]
|
||||
#![feature(nll)]
|
||||
#![feature(rustc_diagnostic_macros)]
|
||||
|
||||
#![recursion_limit="256"]
|
||||
|
||||
|
@ -1,6 +1,4 @@
|
||||
use syntax::{register_diagnostics, register_long_diagnostics};
|
||||
|
||||
register_long_diagnostics! {
|
||||
syntax::register_diagnostics! {
|
||||
E0454: r##"
|
||||
A link name was given with an empty name. Erroneous code example:
|
||||
|
||||
@ -84,10 +82,7 @@ You need to link your code to the relevant crate in order to be able to use it
|
||||
(through Cargo or the `-L` option of rustc example). Plugins are crates as
|
||||
well, and you link to them the same way.
|
||||
"##,
|
||||
|
||||
}
|
||||
|
||||
register_diagnostics! {
|
||||
;
|
||||
E0456, // plugin `..` is not available for triple `..`
|
||||
E0457, // plugin `..` only found in rlib format, but must be available...
|
||||
E0514, // metadata version mismatch
|
||||
@ -97,5 +92,6 @@ register_diagnostics! {
|
||||
E0464, // multiple matching crates for `..`
|
||||
E0465, // multiple .. candidates for `..` found
|
||||
E0519, // local crate and dependency have same (crate-name, disambiguator)
|
||||
E0523, // two dependencies have same (crate-name, disambiguator) but different SVH
|
||||
// two dependencies have same (crate-name, disambiguator) but different SVH
|
||||
E0523,
|
||||
}
|
||||
|
@ -8,7 +8,6 @@
|
||||
#![feature(nll)]
|
||||
#![feature(proc_macro_internals)]
|
||||
#![feature(proc_macro_quote)]
|
||||
#![feature(rustc_diagnostic_macros)]
|
||||
#![feature(rustc_private)]
|
||||
#![feature(slice_patterns)]
|
||||
#![feature(specialization)]
|
||||
@ -23,7 +22,7 @@ extern crate rustc;
|
||||
#[macro_use]
|
||||
extern crate rustc_data_structures;
|
||||
|
||||
mod error_codes;
|
||||
pub mod error_codes;
|
||||
|
||||
mod index;
|
||||
mod encoder;
|
||||
@ -68,5 +67,3 @@ pub fn validate_crate_name(
|
||||
sess.unwrap().abort_if_errors();
|
||||
}
|
||||
}
|
||||
|
||||
__build_diagnostic_array! { librustc_metadata, DIAGNOSTICS }
|
||||
|
@ -1,4 +1,4 @@
|
||||
register_long_diagnostics! {
|
||||
syntax::register_diagnostics! {
|
||||
|
||||
|
||||
E0001: r##"
|
||||
@ -2448,9 +2448,9 @@ information.
|
||||
|
||||
There are some known bugs that trigger this message.
|
||||
"##,
|
||||
}
|
||||
|
||||
register_diagnostics! {
|
||||
;
|
||||
|
||||
// E0298, // cannot compare constants
|
||||
// E0299, // mismatched types between arms
|
||||
// E0471, // constant evaluation error (in pattern)
|
||||
|
@ -14,7 +14,6 @@ Rust MIR: a lowered representation of Rust. Also: an experiment!
|
||||
#![feature(const_fn)]
|
||||
#![feature(decl_macro)]
|
||||
#![feature(exhaustive_patterns)]
|
||||
#![feature(rustc_diagnostic_macros)]
|
||||
#![feature(never_type)]
|
||||
#![feature(specialization)]
|
||||
#![feature(try_trait)]
|
||||
@ -32,7 +31,7 @@ Rust MIR: a lowered representation of Rust. Also: an experiment!
|
||||
#[macro_use] extern crate rustc_data_structures;
|
||||
#[macro_use] extern crate syntax;
|
||||
|
||||
mod error_codes;
|
||||
pub mod error_codes;
|
||||
|
||||
mod borrow_check;
|
||||
mod build;
|
||||
@ -62,5 +61,3 @@ pub fn provide(providers: &mut Providers<'_>) {
|
||||
};
|
||||
providers.type_name = interpret::type_name;
|
||||
}
|
||||
|
||||
__build_diagnostic_array! { librustc_mir, DIAGNOSTICS }
|
||||
|
@ -1,6 +1,4 @@
|
||||
use syntax::{register_diagnostics, register_long_diagnostics};
|
||||
|
||||
register_long_diagnostics! {
|
||||
syntax::register_diagnostics! {
|
||||
/*
|
||||
E0014: r##"
|
||||
Constants can only be initialized by a constant value or, in a future
|
||||
@ -320,10 +318,8 @@ async fn foo() {}
|
||||
```
|
||||
|
||||
Switch to the Rust 2018 edition to use `async fn`.
|
||||
"##
|
||||
}
|
||||
|
||||
register_diagnostics! {
|
||||
"##,
|
||||
;
|
||||
E0226, // only a single explicit lifetime bound is permitted
|
||||
E0472, // asm! is unsupported on this target
|
||||
E0561, // patterns aren't allowed in function pointer types
|
||||
|
@ -9,7 +9,6 @@
|
||||
#![feature(in_band_lifetimes)]
|
||||
#![feature(nll)]
|
||||
#![feature(bind_by_move_pattern_guards)]
|
||||
#![feature(rustc_diagnostic_macros)]
|
||||
|
||||
#![recursion_limit="256"]
|
||||
|
||||
@ -18,7 +17,7 @@ extern crate rustc;
|
||||
|
||||
use rustc::ty::query::Providers;
|
||||
|
||||
mod error_codes;
|
||||
pub mod error_codes;
|
||||
|
||||
pub mod ast_validation;
|
||||
pub mod rvalue_promotion;
|
||||
@ -26,8 +25,6 @@ pub mod hir_stats;
|
||||
pub mod layout_test;
|
||||
pub mod loops;
|
||||
|
||||
__build_diagnostic_array! { librustc_passes, DIAGNOSTICS }
|
||||
|
||||
pub fn provide(providers: &mut Providers<'_>) {
|
||||
rvalue_promotion::provide(providers);
|
||||
loops::provide(providers);
|
||||
|
@ -1,9 +1,4 @@
|
||||
use syntax::{register_diagnostics, register_long_diagnostics};
|
||||
|
||||
register_long_diagnostics! {
|
||||
|
||||
}
|
||||
|
||||
register_diagnostics! {
|
||||
E0498 // malformed plugin attribute
|
||||
syntax::register_diagnostics! {
|
||||
;
|
||||
E0498, // malformed plugin attribute
|
||||
}
|
||||
|
@ -54,15 +54,12 @@
|
||||
#![doc(html_root_url = "https://doc.rust-lang.org/nightly/")]
|
||||
|
||||
#![feature(nll)]
|
||||
#![feature(rustc_diagnostic_macros)]
|
||||
|
||||
#![recursion_limit="256"]
|
||||
|
||||
pub use registry::Registry;
|
||||
|
||||
mod error_codes;
|
||||
pub mod error_codes;
|
||||
pub mod registry;
|
||||
pub mod load;
|
||||
pub mod build;
|
||||
|
||||
__build_diagnostic_array! { librustc_plugin, DIAGNOSTICS }
|
||||
|
@ -1,4 +1,4 @@
|
||||
register_long_diagnostics! {
|
||||
syntax::register_diagnostics! {
|
||||
|
||||
E0445: r##"
|
||||
A private trait was used on a public type parameter bound. Erroneous code
|
||||
@ -154,8 +154,5 @@ let f = Bar::Foo::new(); // ok!
|
||||
```
|
||||
"##,
|
||||
|
||||
}
|
||||
|
||||
register_diagnostics! {
|
||||
// E0450, moved into resolve
|
||||
}
|
||||
|
@ -2,7 +2,6 @@
|
||||
|
||||
#![feature(in_band_lifetimes)]
|
||||
#![feature(nll)]
|
||||
#![feature(rustc_diagnostic_macros)]
|
||||
|
||||
#![recursion_limit="256"]
|
||||
|
||||
@ -31,7 +30,7 @@ use syntax_pos::Span;
|
||||
use std::{cmp, fmt, mem};
|
||||
use std::marker::PhantomData;
|
||||
|
||||
mod error_codes;
|
||||
pub mod error_codes;
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
/// Generic infrastructure used to implement specific visitors below.
|
||||
@ -2035,5 +2034,3 @@ fn check_private_in_public(tcx: TyCtxt<'_>, krate: CrateNum) {
|
||||
};
|
||||
krate.visit_all_item_likes(&mut DeepVisitor::new(&mut visitor));
|
||||
}
|
||||
|
||||
__build_diagnostic_array! { librustc_privacy, DIAGNOSTICS }
|
||||
|
@ -1,9 +1,7 @@
|
||||
use syntax::{register_diagnostics, register_long_diagnostics};
|
||||
|
||||
// Error messages for EXXXX errors. Each message should start and end with a
|
||||
// new line, and be wrapped to 80 characters. In vim you can `:set tw=80` and
|
||||
// use `gq` to wrap paragraphs. Use `:set tw=0` to disable.
|
||||
register_long_diagnostics! {
|
||||
syntax::register_diagnostics! {
|
||||
|
||||
E0128: r##"
|
||||
Type parameter defaults can only use parameters that occur before them.
|
||||
@ -1662,10 +1660,7 @@ fn const_id<T, const N: T>() -> T { // error: const parameter
|
||||
}
|
||||
```
|
||||
"##,
|
||||
|
||||
}
|
||||
|
||||
register_diagnostics! {
|
||||
;
|
||||
// E0153, unused error code
|
||||
// E0157, unused error code
|
||||
// E0257,
|
||||
|
@ -298,18 +298,18 @@ impl<'a> PathSource<'a> {
|
||||
}
|
||||
|
||||
fn error_code(self, has_unexpected_resolution: bool) -> &'static str {
|
||||
__diagnostic_used!(E0404);
|
||||
__diagnostic_used!(E0405);
|
||||
__diagnostic_used!(E0412);
|
||||
__diagnostic_used!(E0422);
|
||||
__diagnostic_used!(E0423);
|
||||
__diagnostic_used!(E0425);
|
||||
__diagnostic_used!(E0531);
|
||||
__diagnostic_used!(E0532);
|
||||
__diagnostic_used!(E0573);
|
||||
__diagnostic_used!(E0574);
|
||||
__diagnostic_used!(E0575);
|
||||
__diagnostic_used!(E0576);
|
||||
syntax::diagnostic_used!(E0404);
|
||||
syntax::diagnostic_used!(E0405);
|
||||
syntax::diagnostic_used!(E0412);
|
||||
syntax::diagnostic_used!(E0422);
|
||||
syntax::diagnostic_used!(E0423);
|
||||
syntax::diagnostic_used!(E0425);
|
||||
syntax::diagnostic_used!(E0531);
|
||||
syntax::diagnostic_used!(E0532);
|
||||
syntax::diagnostic_used!(E0573);
|
||||
syntax::diagnostic_used!(E0574);
|
||||
syntax::diagnostic_used!(E0575);
|
||||
syntax::diagnostic_used!(E0576);
|
||||
match (self, has_unexpected_resolution) {
|
||||
(PathSource::Trait(_), true) => "E0404",
|
||||
(PathSource::Trait(_), false) => "E0405",
|
||||
|
@ -113,7 +113,7 @@ impl<'a> LateResolutionVisitor<'a, '_> {
|
||||
|
||||
// Emit special messages for unresolved `Self` and `self`.
|
||||
if is_self_type(path, ns) {
|
||||
__diagnostic_used!(E0411);
|
||||
syntax::diagnostic_used!(E0411);
|
||||
err.code(DiagnosticId::Error("E0411".into()));
|
||||
err.span_label(span, format!("`Self` is only available in impls, traits, \
|
||||
and type definitions"));
|
||||
@ -122,7 +122,7 @@ impl<'a> LateResolutionVisitor<'a, '_> {
|
||||
if is_self_value(path, ns) {
|
||||
debug!("smart_resolve_path_fragment: E0424, source={:?}", source);
|
||||
|
||||
__diagnostic_used!(E0424);
|
||||
syntax::diagnostic_used!(E0424);
|
||||
err.code(DiagnosticId::Error("E0424".into()));
|
||||
err.span_label(span, match source {
|
||||
PathSource::Pat => {
|
||||
|
@ -14,7 +14,6 @@
|
||||
#![feature(label_break_value)]
|
||||
#![feature(mem_take)]
|
||||
#![feature(nll)]
|
||||
#![feature(rustc_diagnostic_macros)]
|
||||
|
||||
#![recursion_limit="256"]
|
||||
|
||||
@ -68,9 +67,7 @@ use macros::{LegacyBinding, LegacyScope};
|
||||
|
||||
type Res = def::Res<NodeId>;
|
||||
|
||||
// N.B., this module needs to be declared first so diagnostics are
|
||||
// registered before they are used.
|
||||
mod error_codes;
|
||||
pub mod error_codes;
|
||||
mod diagnostics;
|
||||
mod late;
|
||||
mod macros;
|
||||
@ -2840,5 +2837,3 @@ impl CrateLint {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
__build_diagnostic_array! { librustc_resolve, DIAGNOSTICS }
|
||||
|
@ -1,6 +1,6 @@
|
||||
// ignore-tidy-filelength
|
||||
|
||||
register_long_diagnostics! {
|
||||
syntax::register_diagnostics! {
|
||||
|
||||
E0023: r##"
|
||||
A pattern used to match against an enum variant must provide a sub-pattern for
|
||||
@ -4870,10 +4870,7 @@ fn foo_recursive(n: usize) -> Pin<Box<dyn Future<Output = ()>>> {
|
||||
The `Box<...>` ensures that the result is of known size,
|
||||
and the pin is required to keep it in the same place in memory.
|
||||
"##,
|
||||
|
||||
} // (end of detailed error messages)
|
||||
|
||||
register_diagnostics! {
|
||||
;
|
||||
// E0035, merged into E0087/E0089
|
||||
// E0036, merged into E0087/E0089
|
||||
// E0068,
|
||||
@ -4930,7 +4927,8 @@ register_diagnostics! {
|
||||
// E0245, // not a trait
|
||||
// E0246, // invalid recursive type
|
||||
// E0247,
|
||||
// E0248, // value used as a type, now reported earlier during resolution as E0412
|
||||
// E0248, // value used as a type, now reported earlier during resolution
|
||||
// as E0412
|
||||
// E0249,
|
||||
// E0319, // trait impls for defaulted traits allowed just for structs/enums
|
||||
// E0372, // coherence not object safe
|
||||
@ -4938,7 +4936,7 @@ register_diagnostics! {
|
||||
// between structures with the same definition
|
||||
// E0558, // replaced with a generic attribute input check
|
||||
E0533, // `{}` does not name a unit variant, unit struct or a constant
|
||||
// E0563, // cannot determine a type for this `impl Trait`: {} // removed in 6383de15
|
||||
// E0563, // cannot determine a type for this `impl Trait` removed in 6383de15
|
||||
E0564, // only named lifetimes are allowed in `impl Trait`,
|
||||
// but `{}` was found in the type `{}`
|
||||
E0587, // type has conflicting packed and align representation hints
|
||||
@ -4947,8 +4945,8 @@ register_diagnostics! {
|
||||
// E0612, // merged into E0609
|
||||
// E0613, // Removed (merged with E0609)
|
||||
E0627, // yield statement outside of generator literal
|
||||
E0632, // cannot provide explicit type parameters when `impl Trait` is used in
|
||||
// argument position.
|
||||
E0632, // cannot provide explicit type parameters when `impl Trait` is used
|
||||
// in argument position.
|
||||
E0634, // type has conflicting packed representaton hints
|
||||
E0640, // infer outlives requirements
|
||||
E0641, // cannot cast to/from a pointer with an unknown kind
|
||||
|
@ -65,7 +65,6 @@ This API is completely unstable and subject to change.
|
||||
#![feature(exhaustive_patterns)]
|
||||
#![feature(in_band_lifetimes)]
|
||||
#![feature(nll)]
|
||||
#![feature(rustc_diagnostic_macros)]
|
||||
#![feature(slice_patterns)]
|
||||
#![feature(never_type)]
|
||||
#![feature(inner_deref)]
|
||||
@ -78,9 +77,7 @@ This API is completely unstable and subject to change.
|
||||
|
||||
#[macro_use] extern crate rustc;
|
||||
|
||||
// N.B., this module needs to be declared first so diagnostics are
|
||||
// registered before they are used.
|
||||
mod error_codes;
|
||||
pub mod error_codes;
|
||||
|
||||
mod astconv;
|
||||
mod check;
|
||||
@ -389,5 +386,3 @@ pub fn hir_trait_to_predicates<'tcx>(
|
||||
|
||||
bounds
|
||||
}
|
||||
|
||||
__build_diagnostic_array! { librustc_typeck, DIAGNOSTICS }
|
||||
|
@ -48,7 +48,7 @@ impl<'tcx> StructuredDiagnostic<'tcx> for VariadicError<'tcx> {
|
||||
fn session(&self) -> &Session { self.sess }
|
||||
|
||||
fn code(&self) -> DiagnosticId {
|
||||
__diagnostic_used!(E0617);
|
||||
syntax::diagnostic_used!(E0617);
|
||||
DiagnosticId::Error("E0617".to_owned())
|
||||
}
|
||||
|
||||
@ -104,7 +104,7 @@ impl<'tcx> StructuredDiagnostic<'tcx> for SizedUnsizedCastError<'tcx> {
|
||||
fn session(&self) -> &Session { self.sess }
|
||||
|
||||
fn code(&self) -> DiagnosticId {
|
||||
__diagnostic_used!(E0607);
|
||||
syntax::diagnostic_used!(E0607);
|
||||
DiagnosticId::Error("E0607".to_owned())
|
||||
}
|
||||
|
||||
|
@ -1,19 +1,27 @@
|
||||
use crate::os::unix::prelude::*;
|
||||
|
||||
use crate::ffi::{OsString, OsStr, CString, CStr};
|
||||
use crate::ffi::{OsString, OsStr, CString};
|
||||
use crate::fmt;
|
||||
use crate::io;
|
||||
use crate::ptr;
|
||||
use crate::sys::fd::FileDesc;
|
||||
use crate::sys::fs::{File, OpenOptions};
|
||||
use crate::sys::fs::File;
|
||||
use crate::sys::pipe::{self, AnonPipe};
|
||||
use crate::sys_common::process::CommandEnv;
|
||||
use crate::collections::BTreeMap;
|
||||
|
||||
#[cfg(not(target_os = "fuchsia"))]
|
||||
use {
|
||||
crate::ffi::CStr,
|
||||
crate::sys::fs::OpenOptions,
|
||||
};
|
||||
|
||||
use libc::{c_int, gid_t, uid_t, c_char, EXIT_SUCCESS, EXIT_FAILURE};
|
||||
|
||||
cfg_if::cfg_if! {
|
||||
if #[cfg(target_os = "redox")] {
|
||||
if #[cfg(target_os = "fuchsia")] {
|
||||
// fuchsia doesn't have /dev/null
|
||||
} else if #[cfg(target_os = "redox")] {
|
||||
const DEV_NULL: &'static str = "null:\0";
|
||||
} else {
|
||||
const DEV_NULL: &'static str = "/dev/null\0";
|
||||
@ -107,6 +115,11 @@ pub enum ChildStdio {
|
||||
Inherit,
|
||||
Explicit(c_int),
|
||||
Owned(FileDesc),
|
||||
|
||||
// On Fuchsia, null stdio is the default, so we simply don't specify
|
||||
// any actions at the time of spawning.
|
||||
#[cfg(target_os = "fuchsia")]
|
||||
Null,
|
||||
}
|
||||
|
||||
pub enum Stdio {
|
||||
@ -325,6 +338,7 @@ impl Stdio {
|
||||
Ok((ChildStdio::Owned(theirs.into_fd()), Some(ours)))
|
||||
}
|
||||
|
||||
#[cfg(not(target_os = "fuchsia"))]
|
||||
Stdio::Null => {
|
||||
let mut opts = OpenOptions::new();
|
||||
opts.read(readable);
|
||||
@ -335,6 +349,11 @@ impl Stdio {
|
||||
let fd = File::open_c(&path, &opts)?;
|
||||
Ok((ChildStdio::Owned(fd.into_fd()), None))
|
||||
}
|
||||
|
||||
#[cfg(target_os = "fuchsia")]
|
||||
Stdio::Null => {
|
||||
Ok((ChildStdio::Null, None))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -357,6 +376,9 @@ impl ChildStdio {
|
||||
ChildStdio::Inherit => None,
|
||||
ChildStdio::Explicit(fd) => Some(fd),
|
||||
ChildStdio::Owned(ref fd) => Some(fd.raw()),
|
||||
|
||||
#[cfg(target_os = "fuchsia")]
|
||||
ChildStdio::Null => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -48,30 +48,51 @@ impl Command {
|
||||
use crate::sys::process::zircon::*;
|
||||
|
||||
let envp = match maybe_envp {
|
||||
Some(envp) => envp.as_ptr(),
|
||||
// None means to clone the current environment, which is done in the
|
||||
// flags below.
|
||||
None => ptr::null(),
|
||||
Some(envp) => envp.as_ptr(),
|
||||
};
|
||||
|
||||
let transfer_or_clone = |opt_fd, target_fd| if let Some(local_fd) = opt_fd {
|
||||
fdio_spawn_action_t {
|
||||
action: FDIO_SPAWN_ACTION_TRANSFER_FD,
|
||||
local_fd,
|
||||
target_fd,
|
||||
..Default::default()
|
||||
}
|
||||
} else {
|
||||
fdio_spawn_action_t {
|
||||
action: FDIO_SPAWN_ACTION_CLONE_FD,
|
||||
local_fd: target_fd,
|
||||
target_fd,
|
||||
..Default::default()
|
||||
let make_action = |local_io: &ChildStdio, target_fd| -> io::Result<fdio_spawn_action_t> {
|
||||
if let Some(local_fd) = local_io.fd() {
|
||||
Ok(fdio_spawn_action_t {
|
||||
action: FDIO_SPAWN_ACTION_TRANSFER_FD,
|
||||
local_fd,
|
||||
target_fd,
|
||||
..Default::default()
|
||||
})
|
||||
} else {
|
||||
if let ChildStdio::Null = local_io {
|
||||
// acts as no-op
|
||||
return Ok(Default::default());
|
||||
}
|
||||
|
||||
let mut handle = ZX_HANDLE_INVALID;
|
||||
let status = fdio_fd_clone(target_fd, &mut handle);
|
||||
if status == ERR_INVALID_ARGS || status == ERR_NOT_SUPPORTED {
|
||||
// This descriptor is closed; skip it rather than generating an
|
||||
// error.
|
||||
return Ok(Default::default());
|
||||
}
|
||||
zx_cvt(status)?;
|
||||
|
||||
let mut cloned_fd = 0;
|
||||
zx_cvt(fdio_fd_create(handle, &mut cloned_fd))?;
|
||||
|
||||
Ok(fdio_spawn_action_t {
|
||||
action: FDIO_SPAWN_ACTION_TRANSFER_FD,
|
||||
local_fd: cloned_fd as i32,
|
||||
target_fd,
|
||||
..Default::default()
|
||||
})
|
||||
}
|
||||
};
|
||||
|
||||
// Clone stdin, stdout, and stderr
|
||||
let action1 = transfer_or_clone(stdio.stdin.fd(), 0);
|
||||
let action2 = transfer_or_clone(stdio.stdout.fd(), 1);
|
||||
let action3 = transfer_or_clone(stdio.stderr.fd(), 2);
|
||||
let action1 = make_action(&stdio.stdin, 0)?;
|
||||
let action2 = make_action(&stdio.stdout, 1)?;
|
||||
let action3 = make_action(&stdio.stderr, 2)?;
|
||||
let actions = [action1, action2, action3];
|
||||
|
||||
// We don't want FileDesc::drop to be called on any stdio. fdio_spawn_etc
|
||||
@ -84,9 +105,11 @@ impl Command {
|
||||
|
||||
let mut process_handle: zx_handle_t = 0;
|
||||
zx_cvt(fdio_spawn_etc(
|
||||
0,
|
||||
FDIO_SPAWN_CLONE_JOB | FDIO_SPAWN_CLONE_LDSVC | FDIO_SPAWN_CLONE_NAMESPACE,
|
||||
self.get_argv()[0], self.get_argv().as_ptr(), envp, 3, actions.as_ptr(),
|
||||
ZX_HANDLE_INVALID,
|
||||
FDIO_SPAWN_CLONE_JOB | FDIO_SPAWN_CLONE_LDSVC | FDIO_SPAWN_CLONE_NAMESPACE
|
||||
| FDIO_SPAWN_CLONE_ENVIRON, // this is ignored when envp is non-null
|
||||
self.get_argv()[0], self.get_argv().as_ptr(), envp,
|
||||
actions.len() as size_t, actions.as_ptr(),
|
||||
&mut process_handle,
|
||||
ptr::null_mut(),
|
||||
))?;
|
||||
|
@ -2,8 +2,9 @@
|
||||
|
||||
use crate::convert::TryInto;
|
||||
use crate::io;
|
||||
use crate::i64;
|
||||
use crate::mem::MaybeUninit;
|
||||
use crate::os::raw::c_char;
|
||||
use crate::u64;
|
||||
|
||||
use libc::{c_int, c_void, size_t};
|
||||
|
||||
@ -14,8 +15,8 @@ pub type zx_status_t = i32;
|
||||
|
||||
pub const ZX_HANDLE_INVALID: zx_handle_t = 0;
|
||||
|
||||
pub type zx_time_t = u64;
|
||||
pub const ZX_TIME_INFINITE : zx_time_t = u64::MAX;
|
||||
pub type zx_time_t = i64;
|
||||
pub const ZX_TIME_INFINITE : zx_time_t = i64::MAX;
|
||||
|
||||
pub type zx_signals_t = u32;
|
||||
|
||||
@ -120,8 +121,11 @@ pub struct fdio_spawn_action_t {
|
||||
extern {
|
||||
pub fn fdio_spawn_etc(job: zx_handle_t, flags: u32, path: *const c_char,
|
||||
argv: *const *const c_char, envp: *const *const c_char,
|
||||
action_count: u64, actions: *const fdio_spawn_action_t,
|
||||
action_count: size_t, actions: *const fdio_spawn_action_t,
|
||||
process: *mut zx_handle_t, err_msg: *mut c_char) -> zx_status_t;
|
||||
|
||||
pub fn fdio_fd_clone(fd: c_int, out_handle: *mut zx_handle_t) -> zx_status_t;
|
||||
pub fn fdio_fd_create(handle: zx_handle_t, fd: *mut c_int) -> zx_status_t;
|
||||
}
|
||||
|
||||
// fdio_spawn_etc flags
|
||||
|
@ -413,11 +413,11 @@ impl WherePredicate {
|
||||
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
|
||||
pub struct WhereBoundPredicate {
|
||||
pub span: Span,
|
||||
/// Any generics from a `for` binding
|
||||
/// Any generics from a `for` binding.
|
||||
pub bound_generic_params: Vec<GenericParam>,
|
||||
/// The type being bounded
|
||||
/// The type being bounded.
|
||||
pub bounded_ty: P<Ty>,
|
||||
/// Trait and lifetime bounds (`Clone+Send+'static`)
|
||||
/// Trait and lifetime bounds (`Clone + Send + 'static`).
|
||||
pub bounds: GenericBounds,
|
||||
}
|
||||
|
||||
@ -495,15 +495,15 @@ pub enum MetaItemKind {
|
||||
NameValue(Lit),
|
||||
}
|
||||
|
||||
/// A Block (`{ .. }`).
|
||||
/// A block (`{ .. }`).
|
||||
///
|
||||
/// E.g., `{ .. }` as in `fn foo() { .. }`.
|
||||
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
|
||||
pub struct Block {
|
||||
/// Statements in a block
|
||||
/// The statements in the block.
|
||||
pub stmts: Vec<Stmt>,
|
||||
pub id: NodeId,
|
||||
/// Distinguishes between `unsafe { ... }` and `{ ... }`
|
||||
/// Distinguishes between `unsafe { ... }` and `{ ... }`.
|
||||
pub rules: BlockCheckMode,
|
||||
pub span: Span,
|
||||
}
|
||||
@ -908,11 +908,11 @@ pub enum MacStmtStyle {
|
||||
/// Local represents a `let` statement, e.g., `let <pat>:<ty> = <expr>;`.
|
||||
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
|
||||
pub struct Local {
|
||||
pub id: NodeId,
|
||||
pub pat: P<Pat>,
|
||||
pub ty: Option<P<Ty>>,
|
||||
/// Initializer expression to set the value, if any.
|
||||
pub init: Option<P<Expr>>,
|
||||
pub id: NodeId,
|
||||
pub span: Span,
|
||||
pub attrs: ThinVec<Attribute>,
|
||||
}
|
||||
@ -970,7 +970,7 @@ pub struct AnonConst {
|
||||
pub value: P<Expr>,
|
||||
}
|
||||
|
||||
/// An expression
|
||||
/// An expression.
|
||||
#[derive(Clone, RustcEncodable, RustcDecodable)]
|
||||
pub struct Expr {
|
||||
pub id: NodeId,
|
||||
@ -984,26 +984,26 @@ pub struct Expr {
|
||||
static_assert_size!(Expr, 96);
|
||||
|
||||
impl Expr {
|
||||
/// Whether this expression would be valid somewhere that expects a value; for example, an `if`
|
||||
/// condition.
|
||||
/// Returns `true` if this expression would be valid somewhere that expects a value;
|
||||
/// for example, an `if` condition.
|
||||
pub fn returns(&self) -> bool {
|
||||
if let ExprKind::Block(ref block, _) = self.node {
|
||||
match block.stmts.last().map(|last_stmt| &last_stmt.node) {
|
||||
// implicit return
|
||||
// Implicit return
|
||||
Some(&StmtKind::Expr(_)) => true,
|
||||
Some(&StmtKind::Semi(ref expr)) => {
|
||||
if let ExprKind::Ret(_) = expr.node {
|
||||
// last statement is explicit return
|
||||
// Last statement is explicit return.
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
// This is a block that doesn't end in either an implicit or explicit return
|
||||
// This is a block that doesn't end in either an implicit or explicit return.
|
||||
_ => false,
|
||||
}
|
||||
} else {
|
||||
// This is not a block, it is a value
|
||||
// This is not a block, it is a value.
|
||||
true
|
||||
}
|
||||
}
|
||||
@ -2307,37 +2307,37 @@ impl Default for FnHeader {
|
||||
|
||||
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
|
||||
pub enum ItemKind {
|
||||
/// An `extern crate` item, with optional *original* crate name if the crate was renamed.
|
||||
/// An `extern crate` item, with the optional *original* crate name if the crate was renamed.
|
||||
///
|
||||
/// E.g., `extern crate foo` or `extern crate foo_bar as foo`.
|
||||
ExternCrate(Option<Name>),
|
||||
/// A use declaration (`use` or `pub use`) item.
|
||||
/// A use declaration item (`use`).
|
||||
///
|
||||
/// E.g., `use foo;`, `use foo::bar;` or `use foo::bar as FooBar;`.
|
||||
Use(P<UseTree>),
|
||||
/// A static item (`static` or `pub static`).
|
||||
/// A static item (`static`).
|
||||
///
|
||||
/// E.g., `static FOO: i32 = 42;` or `static FOO: &'static str = "bar";`.
|
||||
Static(P<Ty>, Mutability, P<Expr>),
|
||||
/// A constant item (`const` or `pub const`).
|
||||
/// A constant item (`const`).
|
||||
///
|
||||
/// E.g., `const FOO: i32 = 42;`.
|
||||
Const(P<Ty>, P<Expr>),
|
||||
/// A function declaration (`fn` or `pub fn`).
|
||||
/// A function declaration (`fn`).
|
||||
///
|
||||
/// E.g., `fn foo(bar: usize) -> usize { .. }`.
|
||||
Fn(P<FnDecl>, FnHeader, Generics, P<Block>),
|
||||
/// A module declaration (`mod` or `pub mod`).
|
||||
/// A module declaration (`mod`).
|
||||
///
|
||||
/// E.g., `mod foo;` or `mod foo { .. }`.
|
||||
Mod(Mod),
|
||||
/// An external module (`extern` or `pub extern`).
|
||||
/// An external module (`extern`).
|
||||
///
|
||||
/// E.g., `extern {}` or `extern "C" {}`.
|
||||
ForeignMod(ForeignMod),
|
||||
/// Module-level inline assembly (from `global_asm!()`).
|
||||
GlobalAsm(P<GlobalAsm>),
|
||||
/// A type alias (`type` or `pub type`).
|
||||
/// A type alias (`type`).
|
||||
///
|
||||
/// E.g., `type Foo = Bar<u8>;`.
|
||||
TyAlias(P<Ty>, Generics),
|
||||
@ -2345,19 +2345,19 @@ pub enum ItemKind {
|
||||
///
|
||||
/// E.g., `type Foo = impl Bar + Boo;`.
|
||||
OpaqueTy(GenericBounds, Generics),
|
||||
/// An enum definition (`enum` or `pub enum`).
|
||||
/// An enum definition (`enum`).
|
||||
///
|
||||
/// E.g., `enum Foo<A, B> { C<A>, D<B> }`.
|
||||
Enum(EnumDef, Generics),
|
||||
/// A struct definition (`struct` or `pub struct`).
|
||||
/// A struct definition (`struct`).
|
||||
///
|
||||
/// E.g., `struct Foo<A> { x: A }`.
|
||||
Struct(VariantData, Generics),
|
||||
/// A union definition (`union` or `pub union`).
|
||||
/// A union definition (`union`).
|
||||
///
|
||||
/// E.g., `union Foo<A, B> { x: A, y: B }`.
|
||||
Union(VariantData, Generics),
|
||||
/// A Trait declaration (`trait` or `pub trait`).
|
||||
/// A trait declaration (`trait`).
|
||||
///
|
||||
/// E.g., `trait Foo { .. }`, `trait Foo<T> { .. }` or `auto trait Foo {}`.
|
||||
Trait(IsAuto, Unsafety, Generics, GenericBounds, Vec<TraitItem>),
|
||||
|
@ -1,4 +1,4 @@
|
||||
//! Functions dealing with attributes and meta items
|
||||
//! Functions dealing with attributes and meta items.
|
||||
|
||||
mod builtin;
|
||||
|
||||
@ -61,7 +61,7 @@ pub fn is_known_lint_tool(m_item: Ident) -> bool {
|
||||
}
|
||||
|
||||
impl NestedMetaItem {
|
||||
/// Returns the MetaItem if self is a NestedMetaItem::MetaItem.
|
||||
/// Returns the `MetaItem` if `self` is a `NestedMetaItem::MetaItem`.
|
||||
pub fn meta_item(&self) -> Option<&MetaItem> {
|
||||
match *self {
|
||||
NestedMetaItem::MetaItem(ref item) => Some(item),
|
||||
@ -69,7 +69,7 @@ impl NestedMetaItem {
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the Lit if self is a NestedMetaItem::Literal.
|
||||
/// Returns the `Lit` if `self` is a `NestedMetaItem::Literal`s.
|
||||
pub fn literal(&self) -> Option<&Lit> {
|
||||
match *self {
|
||||
NestedMetaItem::Literal(ref lit) => Some(lit),
|
||||
@ -82,7 +82,7 @@ impl NestedMetaItem {
|
||||
self.meta_item().map_or(false, |meta_item| meta_item.check_name(name))
|
||||
}
|
||||
|
||||
/// For a single-segment meta-item returns its name, otherwise returns `None`.
|
||||
/// For a single-segment meta item, returns its name; otherwise, returns `None`.
|
||||
pub fn ident(&self) -> Option<Ident> {
|
||||
self.meta_item().and_then(|meta_item| meta_item.ident())
|
||||
}
|
||||
@ -90,13 +90,13 @@ impl NestedMetaItem {
|
||||
self.ident().unwrap_or(Ident::invalid()).name
|
||||
}
|
||||
|
||||
/// Gets the string value if self is a MetaItem and the MetaItem is a
|
||||
/// MetaItemKind::NameValue variant containing a string, otherwise None.
|
||||
/// Gets the string value if `self` is a `MetaItem` and the `MetaItem` is a
|
||||
/// `MetaItemKind::NameValue` variant containing a string, otherwise `None`.
|
||||
pub fn value_str(&self) -> Option<Symbol> {
|
||||
self.meta_item().and_then(|meta_item| meta_item.value_str())
|
||||
}
|
||||
|
||||
/// Returns a name and single literal value tuple of the MetaItem.
|
||||
/// Returns a name and single literal value tuple of the `MetaItem`.
|
||||
pub fn name_value_literal(&self) -> Option<(Name, &Lit)> {
|
||||
self.meta_item().and_then(
|
||||
|meta_item| meta_item.meta_item_list().and_then(
|
||||
@ -112,32 +112,32 @@ impl NestedMetaItem {
|
||||
}))
|
||||
}
|
||||
|
||||
/// Gets a list of inner meta items from a list MetaItem type.
|
||||
/// Gets a list of inner meta items from a list `MetaItem` type.
|
||||
pub fn meta_item_list(&self) -> Option<&[NestedMetaItem]> {
|
||||
self.meta_item().and_then(|meta_item| meta_item.meta_item_list())
|
||||
}
|
||||
|
||||
/// Returns `true` if the variant is MetaItem.
|
||||
/// Returns `true` if the variant is `MetaItem`.
|
||||
pub fn is_meta_item(&self) -> bool {
|
||||
self.meta_item().is_some()
|
||||
}
|
||||
|
||||
/// Returns `true` if the variant is Literal.
|
||||
/// Returns `true` if the variant is `Literal`.
|
||||
pub fn is_literal(&self) -> bool {
|
||||
self.literal().is_some()
|
||||
}
|
||||
|
||||
/// Returns `true` if self is a MetaItem and the meta item is a word.
|
||||
/// Returns `true` if `self` is a `MetaItem` and the meta item is a word.
|
||||
pub fn is_word(&self) -> bool {
|
||||
self.meta_item().map_or(false, |meta_item| meta_item.is_word())
|
||||
}
|
||||
|
||||
/// Returns `true` if self is a MetaItem and the meta item is a ValueString.
|
||||
/// Returns `true` if `self` is a `MetaItem` and the meta item is a `ValueString`.
|
||||
pub fn is_value_str(&self) -> bool {
|
||||
self.value_str().is_some()
|
||||
}
|
||||
|
||||
/// Returns `true` if self is a MetaItem and the meta item is a list.
|
||||
/// Returns `true` if `self` is a `MetaItem` and the meta item is a list.
|
||||
pub fn is_meta_item_list(&self) -> bool {
|
||||
self.meta_item_list().is_some()
|
||||
}
|
||||
@ -156,7 +156,7 @@ impl Attribute {
|
||||
matches
|
||||
}
|
||||
|
||||
/// For a single-segment attribute returns its name, otherwise returns `None`.
|
||||
/// For a single-segment attribute, returns its name; otherwise, returns `None`.
|
||||
pub fn ident(&self) -> Option<Ident> {
|
||||
if self.path.segments.len() == 1 {
|
||||
Some(self.path.segments[0].ident)
|
||||
@ -187,14 +187,14 @@ impl Attribute {
|
||||
self.meta_item_list().is_some()
|
||||
}
|
||||
|
||||
/// Indicates if the attribute is a Value String.
|
||||
/// Indicates if the attribute is a `ValueString`.
|
||||
pub fn is_value_str(&self) -> bool {
|
||||
self.value_str().is_some()
|
||||
}
|
||||
}
|
||||
|
||||
impl MetaItem {
|
||||
/// For a single-segment meta-item returns its name, otherwise returns `None`.
|
||||
/// For a single-segment meta item, returns its name; otherwise, returns `None`.
|
||||
pub fn ident(&self) -> Option<Ident> {
|
||||
if self.path.segments.len() == 1 {
|
||||
Some(self.path.segments[0].ident)
|
||||
@ -206,8 +206,9 @@ impl MetaItem {
|
||||
self.ident().unwrap_or(Ident::invalid()).name
|
||||
}
|
||||
|
||||
// #[attribute(name = "value")]
|
||||
// ^^^^^^^^^^^^^^
|
||||
// Example:
|
||||
// #[attribute(name = "value")]
|
||||
// ^^^^^^^^^^^^^^
|
||||
pub fn name_value_literal(&self) -> Option<&Lit> {
|
||||
match &self.node {
|
||||
MetaItemKind::NameValue(v) => Some(v),
|
||||
@ -255,7 +256,7 @@ impl MetaItem {
|
||||
}
|
||||
|
||||
impl Attribute {
|
||||
/// Extracts the MetaItem from inside this Attribute.
|
||||
/// Extracts the `MetaItem` from inside this `Attribute`.
|
||||
pub fn meta(&self) -> Option<MetaItem> {
|
||||
let mut tokens = self.tokens.trees().peekable();
|
||||
Some(MetaItem {
|
||||
@ -318,8 +319,8 @@ impl Attribute {
|
||||
})
|
||||
}
|
||||
|
||||
/// Converts self to a normal #[doc="foo"] comment, if it is a
|
||||
/// comment like `///` or `/** */`. (Returns self unchanged for
|
||||
/// Converts `self` to a normal `#[doc="foo"]` comment, if it is a
|
||||
/// comment like `///` or `/** */`. (Returns `self` unchanged for
|
||||
/// non-sugared doc attributes.)
|
||||
pub fn with_desugared_doc<T, F>(&self, f: F) -> T where
|
||||
F: FnOnce(&Attribute) -> T,
|
||||
|
@ -1,13 +1,14 @@
|
||||
#[macro_export]
|
||||
macro_rules! register_diagnostic {
|
||||
($code:tt, $description:tt) => (__register_diagnostic! { $code, $description });
|
||||
($code:tt) => (__register_diagnostic! { $code })
|
||||
macro_rules! diagnostic_used {
|
||||
($code:ident) => (
|
||||
let _ = crate::error_codes::$code;
|
||||
)
|
||||
}
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! span_fatal {
|
||||
($session:expr, $span:expr, $code:ident, $($message:tt)*) => ({
|
||||
__diagnostic_used!($code);
|
||||
$crate::diagnostic_used!($code);
|
||||
$session.span_fatal_with_code(
|
||||
$span,
|
||||
&format!($($message)*),
|
||||
@ -19,7 +20,7 @@ macro_rules! span_fatal {
|
||||
#[macro_export]
|
||||
macro_rules! span_err {
|
||||
($session:expr, $span:expr, $code:ident, $($message:tt)*) => ({
|
||||
__diagnostic_used!($code);
|
||||
$crate::diagnostic_used!($code);
|
||||
$session.span_err_with_code(
|
||||
$span,
|
||||
&format!($($message)*),
|
||||
@ -31,7 +32,7 @@ macro_rules! span_err {
|
||||
#[macro_export]
|
||||
macro_rules! span_warn {
|
||||
($session:expr, $span:expr, $code:ident, $($message:tt)*) => ({
|
||||
__diagnostic_used!($code);
|
||||
$crate::diagnostic_used!($code);
|
||||
$session.span_warn_with_code(
|
||||
$span,
|
||||
&format!($($message)*),
|
||||
@ -43,7 +44,7 @@ macro_rules! span_warn {
|
||||
#[macro_export]
|
||||
macro_rules! struct_err {
|
||||
($session:expr, $code:ident, $($message:tt)*) => ({
|
||||
__diagnostic_used!($code);
|
||||
$crate::diagnostic_used!($code);
|
||||
$session.struct_err_with_code(
|
||||
&format!($($message)*),
|
||||
$crate::errors::DiagnosticId::Error(stringify!($code).to_owned()),
|
||||
@ -54,7 +55,7 @@ macro_rules! struct_err {
|
||||
#[macro_export]
|
||||
macro_rules! span_err_or_warn {
|
||||
($is_warning:expr, $session:expr, $span:expr, $code:ident, $($message:tt)*) => ({
|
||||
__diagnostic_used!($code);
|
||||
$crate::diagnostic_used!($code);
|
||||
if $is_warning {
|
||||
$session.span_warn_with_code(
|
||||
$span,
|
||||
@ -74,7 +75,7 @@ macro_rules! span_err_or_warn {
|
||||
#[macro_export]
|
||||
macro_rules! struct_span_fatal {
|
||||
($session:expr, $span:expr, $code:ident, $($message:tt)*) => ({
|
||||
__diagnostic_used!($code);
|
||||
$crate::diagnostic_used!($code);
|
||||
$session.struct_span_fatal_with_code(
|
||||
$span,
|
||||
&format!($($message)*),
|
||||
@ -86,7 +87,7 @@ macro_rules! struct_span_fatal {
|
||||
#[macro_export]
|
||||
macro_rules! struct_span_err {
|
||||
($session:expr, $span:expr, $code:ident, $($message:tt)*) => ({
|
||||
__diagnostic_used!($code);
|
||||
$crate::diagnostic_used!($code);
|
||||
$session.struct_span_err_with_code(
|
||||
$span,
|
||||
&format!($($message)*),
|
||||
@ -98,7 +99,7 @@ macro_rules! struct_span_err {
|
||||
#[macro_export]
|
||||
macro_rules! stringify_error_code {
|
||||
($code:ident) => ({
|
||||
__diagnostic_used!($code);
|
||||
$crate::diagnostic_used!($code);
|
||||
$crate::errors::DiagnosticId::Error(stringify!($code).to_owned())
|
||||
})
|
||||
}
|
||||
@ -117,7 +118,7 @@ macro_rules! type_error_struct {
|
||||
#[macro_export]
|
||||
macro_rules! struct_span_warn {
|
||||
($session:expr, $span:expr, $code:ident, $($message:tt)*) => ({
|
||||
__diagnostic_used!($code);
|
||||
$crate::diagnostic_used!($code);
|
||||
$session.struct_span_warn_with_code(
|
||||
$span,
|
||||
&format!($($message)*),
|
||||
@ -129,7 +130,7 @@ macro_rules! struct_span_warn {
|
||||
#[macro_export]
|
||||
macro_rules! struct_span_err_or_warn {
|
||||
($is_warning:expr, $session:expr, $span:expr, $code:ident, $($message:tt)*) => ({
|
||||
__diagnostic_used!($code);
|
||||
$crate::diagnostic_used!($code);
|
||||
if $is_warning {
|
||||
$session.struct_span_warn_with_code(
|
||||
$span,
|
||||
@ -169,20 +170,22 @@ macro_rules! help {
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! register_diagnostics {
|
||||
($($code:tt),*) => (
|
||||
$($crate::register_diagnostic! { $code })*
|
||||
($($ecode:ident: $message:expr,)*) => (
|
||||
$crate::register_diagnostics!{$($ecode:$message,)* ;}
|
||||
);
|
||||
($($code:tt),*,) => (
|
||||
$($crate::register_diagnostic! { $code })*
|
||||
)
|
||||
}
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! register_long_diagnostics {
|
||||
($($code:tt: $description:tt),*) => (
|
||||
$($crate::register_diagnostic! { $code, $description })*
|
||||
);
|
||||
($($code:tt: $description:tt),*,) => (
|
||||
$($crate::register_diagnostic! { $code, $description })*
|
||||
($($ecode:ident: $message:expr,)* ; $($code:ident,)*) => (
|
||||
pub static DIAGNOSTICS: &[(&str, &str)] = &[
|
||||
$( (stringify!($ecode), $message), )*
|
||||
];
|
||||
|
||||
$(
|
||||
#[deny(unused)]
|
||||
pub(crate) const $ecode: &str = $message;
|
||||
)*
|
||||
$(
|
||||
#[deny(unused)]
|
||||
pub(crate) const $code: () = ();
|
||||
)*
|
||||
)
|
||||
}
|
||||
|
@ -1,185 +0,0 @@
|
||||
use std::collections::BTreeMap;
|
||||
|
||||
use crate::ast::{self, Ident, Name};
|
||||
use crate::source_map;
|
||||
use crate::ext::base::{ExtCtxt, MacEager, MacResult};
|
||||
use crate::parse::token::{self, Token};
|
||||
use crate::ptr::P;
|
||||
use crate::symbol::kw;
|
||||
use crate::tokenstream::{TokenTree, TokenStream};
|
||||
|
||||
use smallvec::smallvec;
|
||||
use syntax_pos::Span;
|
||||
|
||||
pub use errors::*;
|
||||
|
||||
// Maximum width of any line in an extended error description (inclusive).
|
||||
const MAX_DESCRIPTION_WIDTH: usize = 80;
|
||||
|
||||
/// Error information type.
|
||||
pub struct ErrorInfo {
|
||||
pub description: Option<Name>,
|
||||
pub use_site: Option<Span>
|
||||
}
|
||||
|
||||
/// Mapping from error codes to metadata.
|
||||
pub type ErrorMap = BTreeMap<Name, ErrorInfo>;
|
||||
|
||||
pub fn expand_diagnostic_used<'cx>(ecx: &'cx mut ExtCtxt<'_>,
|
||||
span: Span,
|
||||
tts: TokenStream)
|
||||
-> Box<dyn MacResult+'cx> {
|
||||
assert_eq!(tts.len(), 1);
|
||||
let code = match tts.into_trees().next() {
|
||||
Some(TokenTree::Token(Token { kind: token::Ident(code, _), .. })) => code,
|
||||
_ => unreachable!()
|
||||
};
|
||||
|
||||
ecx.parse_sess.registered_diagnostics.with_lock(|diagnostics| {
|
||||
match diagnostics.get_mut(&code) {
|
||||
// Previously used errors.
|
||||
Some(&mut ErrorInfo { description: _, use_site: Some(previous_span) }) => {
|
||||
ecx.struct_span_warn(span, &format!(
|
||||
"diagnostic code {} already used", code
|
||||
)).span_note(previous_span, "previous invocation")
|
||||
.emit();
|
||||
}
|
||||
// Newly used errors.
|
||||
Some(ref mut info) => {
|
||||
info.use_site = Some(span);
|
||||
}
|
||||
// Unregistered errors.
|
||||
None => {
|
||||
ecx.span_err(span, &format!(
|
||||
"used diagnostic code {} not registered", code
|
||||
));
|
||||
}
|
||||
}
|
||||
});
|
||||
MacEager::expr(ecx.expr_tuple(span, Vec::new()))
|
||||
}
|
||||
|
||||
pub fn expand_register_diagnostic<'cx>(ecx: &'cx mut ExtCtxt<'_>,
|
||||
span: Span,
|
||||
tts: TokenStream)
|
||||
-> Box<dyn MacResult+'cx> {
|
||||
assert!(tts.len() == 1 || tts.len() == 3);
|
||||
let mut cursor = tts.into_trees();
|
||||
let code = match cursor.next() {
|
||||
Some(TokenTree::Token(Token { kind: token::Ident(code, _), .. })) => code,
|
||||
_ => unreachable!()
|
||||
};
|
||||
let description = match (cursor.next(), cursor.next()) {
|
||||
(None, None) => None,
|
||||
(
|
||||
Some(TokenTree::Token(Token { kind: token::Comma, .. })),
|
||||
Some(TokenTree::Token(Token { kind: token::Literal(token::Lit { symbol, .. }), ..}))
|
||||
) => {
|
||||
Some(symbol)
|
||||
},
|
||||
_ => unreachable!()
|
||||
};
|
||||
|
||||
// Check that the description starts and ends with a newline and doesn't
|
||||
// overflow the maximum line width.
|
||||
description.map(|raw_msg| {
|
||||
let msg = raw_msg.as_str();
|
||||
if !msg.starts_with("\n") || !msg.ends_with("\n") {
|
||||
ecx.span_err(span, &format!(
|
||||
"description for error code {} doesn't start and end with a newline",
|
||||
code
|
||||
));
|
||||
}
|
||||
|
||||
// URLs can be unavoidably longer than the line limit, so we allow them.
|
||||
// Allowed format is: `[name]: https://www.rust-lang.org/`
|
||||
let is_url = |l: &str| l.starts_with("[") && l.contains("]:") && l.contains("http");
|
||||
|
||||
if msg.lines().any(|line| line.len() > MAX_DESCRIPTION_WIDTH && !is_url(line)) {
|
||||
ecx.span_err(span, &format!(
|
||||
"description for error code {} contains a line longer than {} characters.\n\
|
||||
if you're inserting a long URL use the footnote style to bypass this check.",
|
||||
code, MAX_DESCRIPTION_WIDTH
|
||||
));
|
||||
}
|
||||
});
|
||||
// Add the error to the map.
|
||||
ecx.parse_sess.registered_diagnostics.with_lock(|diagnostics| {
|
||||
let info = ErrorInfo {
|
||||
description,
|
||||
use_site: None
|
||||
};
|
||||
if diagnostics.insert(code, info).is_some() {
|
||||
ecx.span_err(span, &format!(
|
||||
"diagnostic code {} already registered", code
|
||||
));
|
||||
}
|
||||
});
|
||||
|
||||
MacEager::items(smallvec![])
|
||||
}
|
||||
|
||||
pub fn expand_build_diagnostic_array<'cx>(ecx: &'cx mut ExtCtxt<'_>,
|
||||
span: Span,
|
||||
tts: TokenStream)
|
||||
-> Box<dyn MacResult+'cx> {
|
||||
assert_eq!(tts.len(), 3);
|
||||
let ident = match tts.into_trees().nth(2) {
|
||||
// DIAGNOSTICS ident.
|
||||
Some(TokenTree::Token(Token { kind: token::Ident(name, _), span }))
|
||||
=> Ident::new(name, span),
|
||||
_ => unreachable!()
|
||||
};
|
||||
|
||||
// Construct the output expression.
|
||||
let (count, expr) =
|
||||
ecx.parse_sess.registered_diagnostics.with_lock(|diagnostics| {
|
||||
let descriptions: Vec<P<ast::Expr>> =
|
||||
diagnostics.iter().filter_map(|(&code, info)| {
|
||||
info.description.map(|description| {
|
||||
ecx.expr_tuple(span, vec![
|
||||
ecx.expr_str(span, code),
|
||||
ecx.expr_str(span, description)
|
||||
])
|
||||
})
|
||||
}).collect();
|
||||
(descriptions.len(), ecx.expr_vec(span, descriptions))
|
||||
});
|
||||
|
||||
let static_ = ecx.lifetime(span, Ident::with_dummy_span(kw::StaticLifetime));
|
||||
let ty_str = ecx.ty_rptr(
|
||||
span,
|
||||
ecx.ty_ident(span, ecx.ident_of("str")),
|
||||
Some(static_),
|
||||
ast::Mutability::Immutable,
|
||||
);
|
||||
|
||||
let ty = ecx.ty(
|
||||
span,
|
||||
ast::TyKind::Array(
|
||||
ecx.ty(
|
||||
span,
|
||||
ast::TyKind::Tup(vec![ty_str.clone(), ty_str])
|
||||
),
|
||||
ast::AnonConst {
|
||||
id: ast::DUMMY_NODE_ID,
|
||||
value: ecx.expr_usize(span, count),
|
||||
},
|
||||
),
|
||||
);
|
||||
|
||||
MacEager::items(smallvec![
|
||||
P(ast::Item {
|
||||
ident,
|
||||
attrs: Vec::new(),
|
||||
id: ast::DUMMY_NODE_ID,
|
||||
node: ast::ItemKind::Const(
|
||||
ty,
|
||||
expr,
|
||||
),
|
||||
vis: source_map::respan(span.shrink_to_lo(), ast::VisibilityKind::Public),
|
||||
span,
|
||||
tokens: None,
|
||||
})
|
||||
])
|
||||
}
|
@ -1,7 +1,8 @@
|
||||
// Error messages for EXXXX errors.
|
||||
// Each message should start and end with a new line, and be wrapped to 80 characters.
|
||||
// In vim you can `:set tw=80` and use `gq` to wrap paragraphs. Use `:set tw=0` to disable.
|
||||
register_long_diagnostics! {
|
||||
// Each message should start and end with a new line, and be wrapped to 80
|
||||
// characters. In vim you can `:set tw=80` and use `gq` to wrap paragraphs. Use
|
||||
// `:set tw=0` to disable.
|
||||
register_diagnostics! {
|
||||
|
||||
E0178: r##"
|
||||
In types, the `+` type operator has low precedence, so it is often necessary
|
||||
@ -420,9 +421,8 @@ Delete the offending feature attribute, or add it to the list of allowed
|
||||
features in the `-Z allow_features` flag.
|
||||
"##,
|
||||
|
||||
}
|
||||
;
|
||||
|
||||
register_diagnostics! {
|
||||
E0539, // incorrect meta item
|
||||
E0540, // multiple rustc_deprecated attributes
|
||||
E0542, // missing 'since'
|
||||
@ -432,7 +432,9 @@ register_diagnostics! {
|
||||
E0546, // missing 'feature'
|
||||
E0547, // missing 'issue'
|
||||
// E0548, // replaced with a generic attribute input check
|
||||
E0549, // rustc_deprecated attribute must be paired with either stable or unstable attribute
|
||||
// rustc_deprecated attribute must be paired with either stable or unstable
|
||||
// attribute
|
||||
E0549,
|
||||
E0550, // multiple deprecated attributes
|
||||
E0551, // incorrect meta item
|
||||
E0553, // multiple rustc_const_unstable attributes
|
||||
@ -440,9 +442,11 @@ register_diagnostics! {
|
||||
E0556, // malformed feature, expected just one word
|
||||
E0584, // file for module `..` found at both .. and ..
|
||||
E0629, // missing 'feature' (rustc_const_unstable)
|
||||
E0630, // rustc_const_unstable attribute must be paired with stable/unstable attribute
|
||||
// rustc_const_unstable attribute must be paired with stable/unstable
|
||||
// attribute
|
||||
E0630,
|
||||
E0693, // incorrect `repr(align)` attribute format
|
||||
E0694, // an unknown tool name found in scoped attributes
|
||||
// E0694, // an unknown tool name found in scoped attributes
|
||||
E0703, // invalid ABI
|
||||
E0717, // rustc_promotable without stability attribute
|
||||
}
|
||||
|
@ -958,7 +958,7 @@ impl<'a> ExtCtxt<'a> {
|
||||
self.resolver.check_unused_macros();
|
||||
}
|
||||
|
||||
/// Resolve a path mentioned inside Rust code.
|
||||
/// Resolves a path mentioned inside Rust code.
|
||||
///
|
||||
/// This unifies the logic used for resolving `include_X!`, and `#[doc(include)]` file paths.
|
||||
///
|
||||
|
@ -1,9 +1,11 @@
|
||||
//! List of the active feature gates.
|
||||
|
||||
use super::{State, Feature};
|
||||
|
||||
use crate::edition::Edition;
|
||||
use crate::symbol::{Symbol, sym};
|
||||
|
||||
use syntax_pos::Span;
|
||||
use super::{State, Feature};
|
||||
|
||||
macro_rules! set {
|
||||
($field: ident) => {{
|
||||
@ -37,9 +39,9 @@ macro_rules! declare_features {
|
||||
/// A set of features to be used by later passes.
|
||||
#[derive(Clone)]
|
||||
pub struct Features {
|
||||
/// `#![feature]` attrs for language features, for error reporting
|
||||
/// `#![feature]` attrs for language features, for error reporting.
|
||||
pub declared_lang_features: Vec<(Symbol, Span, Option<Symbol>)>,
|
||||
/// `#![feature]` attrs for non-language (library) features
|
||||
/// `#![feature]` attrs for non-language (library) features.
|
||||
pub declared_lib_features: Vec<(Symbol, Span)>,
|
||||
$(
|
||||
$(#[doc = $doc])*
|
||||
@ -66,11 +68,11 @@ macro_rules! declare_features {
|
||||
}
|
||||
|
||||
impl Feature {
|
||||
/// Set this feature in `Features`. Panics if called on a non-active feature.
|
||||
/// Sets this feature in `Features`. Panics if called on a non-active feature.
|
||||
pub fn set(&self, features: &mut Features, span: Span) {
|
||||
match self.state {
|
||||
State::Active { set } => set(features, span),
|
||||
_ => panic!("Called `set` on feature `{}` which is not `active`", self.name)
|
||||
_ => panic!("called `set` on feature `{}` which is not `active`", self.name)
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -120,12 +122,6 @@ declare_features! (
|
||||
/// macros disappear).
|
||||
(active, allow_internal_unsafe, "1.0.0", None, None),
|
||||
|
||||
/// Allows using the macros:
|
||||
/// + `__diagnostic_used`
|
||||
/// + `__register_diagnostic`
|
||||
/// +`__build_diagnostic_array`
|
||||
(active, rustc_diagnostic_macros, "1.0.0", None, None),
|
||||
|
||||
/// Allows using `#[rustc_const_unstable(feature = "foo", ..)]` which
|
||||
/// lets a function to be `const` when opted into with `#![feature(foo)]`.
|
||||
(active, rustc_const_unstable, "1.0.0", None, None),
|
||||
@ -478,7 +474,7 @@ declare_features! (
|
||||
(active, precise_pointer_size_matching, "1.32.0", Some(56354), None),
|
||||
|
||||
/// Allows relaxing the coherence rules such that
|
||||
/// `impl<T> ForeignTrait<LocalType> for ForeignType<T> is permitted.
|
||||
/// `impl<T> ForeignTrait<LocalType> for ForeignType<T>` is permitted.
|
||||
(active, re_rebalance_coherence, "1.32.0", Some(55437), None),
|
||||
|
||||
/// Allows using `#[ffi_returns_twice]` on foreign functions.
|
||||
@ -520,7 +516,7 @@ declare_features! (
|
||||
/// Allows `async || body` closures.
|
||||
(active, async_closure, "1.37.0", Some(62290), None),
|
||||
|
||||
/// Allows the use of `#[cfg(doctest)]`, set when rustdoc is collecting doctests
|
||||
/// Allows the use of `#[cfg(doctest)]`; set when rustdoc is collecting doctests.
|
||||
(active, cfg_doctest, "1.37.0", Some(62210), None),
|
||||
|
||||
/// Allows `[x; N]` where `x` is a constant (RFC 2203).
|
||||
@ -529,7 +525,7 @@ declare_features! (
|
||||
/// Allows `impl Trait` to be used inside type aliases (RFC 2515).
|
||||
(active, type_alias_impl_trait, "1.38.0", Some(63063), None),
|
||||
|
||||
/// Allows the use of or-patterns, e.g. `0 | 1`.
|
||||
/// Allows the use of or-patterns (e.g., `0 | 1`).
|
||||
(active, or_patterns, "1.38.0", Some(54883), None),
|
||||
|
||||
// -------------------------------------------------------------------------
|
||||
|
@ -169,7 +169,7 @@ const INTERAL_UNSTABLE: &str = "this is an internal attribute that will never be
|
||||
|
||||
pub type BuiltinAttribute = (Symbol, AttributeType, AttributeTemplate, AttributeGate);
|
||||
|
||||
/// Attributes that have a special meaning to rustc or rustdoc
|
||||
/// Attributes that have a special meaning to rustc or rustdoc.
|
||||
pub const BUILTIN_ATTRIBUTES: &[BuiltinAttribute] = &[
|
||||
// ==========================================================================
|
||||
// Stable attributes:
|
||||
|
@ -920,9 +920,9 @@ pub enum UnstableFeatures {
|
||||
|
||||
impl UnstableFeatures {
|
||||
pub fn from_environment() -> UnstableFeatures {
|
||||
// Whether this is a feature-staged build, i.e., on the beta or stable channel
|
||||
// `true` if this is a feature-staged build, i.e., on the beta or stable channel.
|
||||
let disable_unstable_features = option_env!("CFG_DISABLE_UNSTABLE_FEATURES").is_some();
|
||||
// Whether we should enable unstable features for bootstrapping
|
||||
// `true` if we should enable unstable features for bootstrapping.
|
||||
let bootstrap = env::var("RUSTC_BOOTSTRAP").is_ok();
|
||||
match (disable_unstable_features, bootstrap) {
|
||||
(_, true) => UnstableFeatures::Cheat,
|
||||
|
@ -94,6 +94,11 @@ declare_features! (
|
||||
/// Allows defining `existential type`s.
|
||||
(removed, existential_type, "1.38.0", Some(63063), None,
|
||||
Some("removed in favor of `#![feature(type_alias_impl_trait)]`")),
|
||||
/// Allows using the macros:
|
||||
/// + `__diagnostic_used`
|
||||
/// + `__register_diagnostic`
|
||||
/// +`__build_diagnostic_array`
|
||||
(removed, rustc_diagnostic_macros, "1.38.0", None, None, None),
|
||||
|
||||
// -------------------------------------------------------------------------
|
||||
// feature-group-end: removed features
|
||||
|
@ -18,7 +18,6 @@
|
||||
#![feature(proc_macro_diagnostic)]
|
||||
#![feature(proc_macro_internals)]
|
||||
#![feature(proc_macro_span)]
|
||||
#![feature(rustc_diagnostic_macros)]
|
||||
#![feature(try_trait)]
|
||||
#![feature(unicode_internals)]
|
||||
|
||||
@ -123,11 +122,8 @@ scoped_tls::scoped_thread_local!(pub static GLOBALS: Globals);
|
||||
pub mod diagnostics {
|
||||
#[macro_use]
|
||||
pub mod macros;
|
||||
pub mod plugin;
|
||||
}
|
||||
|
||||
// N.B., this module needs to be declared first so diagnostics are
|
||||
// registered before they are used.
|
||||
pub mod error_codes;
|
||||
|
||||
pub mod util {
|
||||
@ -182,5 +178,3 @@ pub mod ext {
|
||||
}
|
||||
|
||||
pub mod early_buffered_lints;
|
||||
|
||||
__build_diagnostic_array! { libsyntax, DIAGNOSTICS }
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user