mirror of
https://github.com/rust-lang/rust.git
synced 2024-11-22 14:55:26 +00:00
Auto merge of #37597 - alexcrichton:rollup, r=alexcrichton
Rollup of 24 pull requests - Successful merges: #37255, #37317, #37408, #37410, #37422, #37427, #37470, #37501, #37537, #37556, #37557, #37564, #37565, #37566, #37569, #37574, #37577, #37579, #37583, #37585, #37586, #37587, #37589, #37596 - Failed merges: #37521, #37547
This commit is contained in:
commit
3fc8304fd9
@ -127,7 +127,7 @@ ones from MSYS if you have it installed). You'll also need Visual Studio 2013 or
|
||||
newer with the C++ tools. Then all you need to do is to kick off rustbuild.
|
||||
|
||||
```
|
||||
python .\src\bootstrap\bootstrap.py
|
||||
python x.py build
|
||||
```
|
||||
|
||||
Currently rustbuild only works with some known versions of Visual Studio. If you
|
||||
@ -137,7 +137,7 @@ by manually calling the appropriate vcvars file before running the bootstrap.
|
||||
|
||||
```
|
||||
CALL "C:\Program Files (x86)\Microsoft Visual Studio 14.0\VC\bin\amd64\vcvars64.bat"
|
||||
python .\src\bootstrap\bootstrap.py
|
||||
python x.py build
|
||||
```
|
||||
|
||||
## Building Documentation
|
||||
|
27
RELEASES.md
27
RELEASES.md
@ -1,3 +1,30 @@
|
||||
Version 1.12.1 (2016-10-20)
|
||||
===========================
|
||||
|
||||
Regression Fixes
|
||||
----------------
|
||||
|
||||
* [ICE: 'rustc' panicked at 'assertion failed: concrete_substs.is_normalized_for_trans()' #36381][36381]
|
||||
* [Confusion with double negation and booleans][36856]
|
||||
* [rustc 1.12.0 fails with SIGSEGV in release mode (syn crate 0.8.0)][36875]
|
||||
* [Rustc 1.12.0 Windows build of `ethcore` crate fails with LLVM error][36924]
|
||||
* [1.12.0: High memory usage when linking in release mode with debug info][36926]
|
||||
* [Corrupted memory after updated to 1.12][36936]
|
||||
* ["Let NullaryConstructor = something;" causes internal compiler error: "tried to overwrite interned AdtDef"][37026]
|
||||
* [Fix ICE: inject bitcast if types mismatch for invokes/calls/stores][37112]
|
||||
* [debuginfo: Handle spread_arg case in MIR-trans in a more stable way.][37153]
|
||||
|
||||
[36381]: https://github.com/rust-lang/rust/issues/36381
|
||||
[36856]: https://github.com/rust-lang/rust/issues/36856
|
||||
[36875]: https://github.com/rust-lang/rust/issues/36875
|
||||
[36924]: https://github.com/rust-lang/rust/issues/36924
|
||||
[36926]: https://github.com/rust-lang/rust/issues/36926
|
||||
[36936]: https://github.com/rust-lang/rust/issues/36936
|
||||
[37026]: https://github.com/rust-lang/rust/issues/37026
|
||||
[37112]: https://github.com/rust-lang/rust/issues/37112
|
||||
[37153]: https://github.com/rust-lang/rust/issues/37153
|
||||
|
||||
|
||||
Version 1.12.0 (2016-09-29)
|
||||
===========================
|
||||
|
||||
|
7
configure
vendored
7
configure
vendored
@ -868,13 +868,6 @@ then
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ -n "$CFG_GDB" ]
|
||||
then
|
||||
# Store GDB's version
|
||||
CFG_GDB_VERSION=$($CFG_GDB --version 2>/dev/null | head -1)
|
||||
putvar CFG_GDB_VERSION
|
||||
fi
|
||||
|
||||
if [ -n "$CFG_LLDB" ]
|
||||
then
|
||||
# Store LLDB's version
|
||||
|
@ -372,7 +372,7 @@ CFG_INFO := $(info cfg: disabling unstable features (CFG_DISABLE_UNSTABLE_FEATUR
|
||||
# Turn on feature-staging
|
||||
export CFG_DISABLE_UNSTABLE_FEATURES
|
||||
# Subvert unstable feature lints to do the self-build
|
||||
export RUSTC_BOOTSTRAP
|
||||
export RUSTC_BOOTSTRAP=1
|
||||
endif
|
||||
ifdef CFG_MUSL_ROOT
|
||||
export CFG_MUSL_ROOT
|
||||
|
@ -648,7 +648,7 @@ CTEST_COMMON_ARGS$(1)-T-$(2)-H-$(3) = \
|
||||
--host $(3) \
|
||||
--docck-python $$(CFG_PYTHON) \
|
||||
--lldb-python $$(CFG_LLDB_PYTHON) \
|
||||
--gdb-version="$(CFG_GDB_VERSION)" \
|
||||
--gdb="$(CFG_GDB)" \
|
||||
--lldb-version="$(CFG_LLDB_VERSION)" \
|
||||
--llvm-version="$$(LLVM_VERSION_$(3))" \
|
||||
--android-cross-path=$(CFG_ARM_LINUX_ANDROIDEABI_NDK) \
|
||||
|
20
src/Cargo.lock
generated
20
src/Cargo.lock
generated
@ -40,9 +40,9 @@ name = "bootstrap"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"build_helper 0.1.0",
|
||||
"cmake 0.1.17 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"cmake 0.1.18 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"filetime 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"gcc 0.3.38 (git+https://github.com/alexcrichton/gcc-rs)",
|
||||
"gcc 0.3.38 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"getopts 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"libc 0.2.17 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
@ -63,7 +63,7 @@ version = "0.1.0"
|
||||
|
||||
[[package]]
|
||||
name = "cmake"
|
||||
version = "0.1.17"
|
||||
version = "0.1.18"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"gcc 0.3.38 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
@ -131,11 +131,6 @@ dependencies = [
|
||||
name = "fmt_macros"
|
||||
version = "0.0.0"
|
||||
|
||||
[[package]]
|
||||
name = "gcc"
|
||||
version = "0.3.38"
|
||||
source = "git+https://github.com/alexcrichton/gcc-rs#be620ac6d3ddb498cd0c700d5312c6a4c3c19597"
|
||||
|
||||
[[package]]
|
||||
name = "gcc"
|
||||
version = "0.3.38"
|
||||
@ -189,7 +184,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
name = "linkchecker"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"url 1.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"url 1.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -725,7 +720,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "url"
|
||||
version = "1.2.2"
|
||||
version = "1.2.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"idna 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
@ -743,10 +738,9 @@ version = "0.1.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[metadata]
|
||||
"checksum cmake 0.1.17 (registry+https://github.com/rust-lang/crates.io-index)" = "dfcf5bcece56ef953b8ea042509e9dcbdfe97820b7e20d86beb53df30ed94978"
|
||||
"checksum cmake 0.1.18 (registry+https://github.com/rust-lang/crates.io-index)" = "0e5bcf27e097a184c1df4437654ed98df3d7a516e8508a6ba45d8b092bbdf283"
|
||||
"checksum env_logger 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "15abd780e45b3ea4f76b4e9a26ff4843258dd8a3eed2775a0e7368c2e7936c2f"
|
||||
"checksum filetime 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)" = "5363ab8e4139b8568a6237db5248646e5a8a2f89bd5ccb02092182b11fd3e922"
|
||||
"checksum gcc 0.3.38 (git+https://github.com/alexcrichton/gcc-rs)" = "<none>"
|
||||
"checksum gcc 0.3.38 (registry+https://github.com/rust-lang/crates.io-index)" = "553f11439bdefe755bf366b264820f1da70f3aaf3924e594b886beb9c831bcf5"
|
||||
"checksum getopts 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)" = "d9047cfbd08a437050b363d35ef160452c5fe8ea5187ae0a624708c91581d685"
|
||||
"checksum idna 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "1053236e00ce4f668aeca4a769a09b3bf5a682d802abd6f3cb39374f6b162c11"
|
||||
@ -760,6 +754,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
"checksum toml 0.1.30 (registry+https://github.com/rust-lang/crates.io-index)" = "0590d72182e50e879c4da3b11c6488dae18fccb1ae0c7a3eda18e16795844796"
|
||||
"checksum unicode-bidi 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "c1f7ceb96afdfeedee42bade65a0d585a6a0106f681b6749c8ff4daa8df30b3f"
|
||||
"checksum unicode-normalization 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "26643a2f83bac55f1976fb716c10234485f9202dcd65cfbdf9da49867b271172"
|
||||
"checksum url 1.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "9ba5a45db1d2e0effb7a1c00cc73ffc63a973da8c7d1fcd5b46f24285ade6c54"
|
||||
"checksum url 1.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "48ccf7bd87a81b769cf84ad556e034541fb90e1cd6d4bc375c822ed9500cd9d7"
|
||||
"checksum winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)" = "167dc9d6949a9b857f3451275e911c3f44255842c1f7a76f33c55103a909087a"
|
||||
"checksum winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "2d315eee3b34aca4797b2da6b13ed88266e6d612562a0c46390af8299fc699bc"
|
||||
|
@ -27,7 +27,7 @@ num_cpus = "0.2"
|
||||
toml = "0.1"
|
||||
getopts = "0.2"
|
||||
rustc-serialize = "0.3"
|
||||
gcc = { git = "https://github.com/alexcrichton/gcc-rs" }
|
||||
gcc = "0.3.36"
|
||||
libc = "0.2"
|
||||
md5 = "0.1"
|
||||
|
||||
|
@ -10,8 +10,64 @@ system.
|
||||
|
||||
## Using rustbuild
|
||||
|
||||
When configuring Rust via `./configure`, pass the following to enable building
|
||||
via this build system:
|
||||
The rustbuild build system has a primary entry point, a top level `x.py` script:
|
||||
|
||||
```
|
||||
python ./x.py build
|
||||
```
|
||||
|
||||
Note that if you're on Unix you should be able to execute the script directly:
|
||||
|
||||
```
|
||||
./x.py build
|
||||
```
|
||||
|
||||
The script accepts commands, flags, and filters to determine what to do:
|
||||
|
||||
* `build` - a general purpose command for compiling code. Alone `build` will
|
||||
bootstrap the entire compiler, and otherwise arguments passed indicate what to
|
||||
build. For example:
|
||||
|
||||
```
|
||||
# build the whole compiler
|
||||
./x.py build
|
||||
|
||||
# build the stage1 compier
|
||||
./x.py build --stage 1
|
||||
|
||||
# build stage0 libstd
|
||||
./x.py build --stage 0 src/libstd
|
||||
|
||||
# build a particular crate in stage0
|
||||
./x.py build --stage 0 src/libtest
|
||||
```
|
||||
|
||||
* `test` - a command for executing unit tests. Like the `build` command this
|
||||
will execute the entire test suite by default, and otherwise it can be used to
|
||||
select which test suite is run:
|
||||
|
||||
```
|
||||
# run all unit tests
|
||||
./x.py test
|
||||
|
||||
# execute the run-pass test suite
|
||||
./x.py test src/test/run-pass
|
||||
|
||||
# execute only some tests in the run-pass test suite
|
||||
./x.py test src/test/run-pass --filter my-filter
|
||||
|
||||
# execute tests in the standard library in stage0
|
||||
./x.py test --stage 0 src/libstd
|
||||
|
||||
# execute all doc tests
|
||||
./x.py test src/doc
|
||||
```
|
||||
|
||||
* `doc` - a command for building documentation. Like above can take arguments
|
||||
for what to document.
|
||||
|
||||
If you're more used to `./configure` and `make`, however, then you can also
|
||||
configure the build system to use rustbuild instead of the old makefiles:
|
||||
|
||||
```
|
||||
./configure --enable-rustbuild
|
||||
@ -19,15 +75,7 @@ make
|
||||
```
|
||||
|
||||
Afterwards the `Makefile` which is generated will have a few commands like
|
||||
`make check`, `make tidy`, etc. For finer-grained control, the
|
||||
`bootstrap.py` entry point can be used:
|
||||
|
||||
```
|
||||
python src/bootstrap/bootstrap.py
|
||||
```
|
||||
|
||||
This accepts a number of options like `--stage` and `--step` which can configure
|
||||
what's actually being done.
|
||||
`make check`, `make tidy`, etc.
|
||||
|
||||
## Configuring rustbuild
|
||||
|
||||
@ -47,7 +95,7 @@ being invoked manually (via the python script).
|
||||
The rustbuild build system goes through a few phases to actually build the
|
||||
compiler. What actually happens when you invoke rustbuild is:
|
||||
|
||||
1. The entry point script, `src/bootstrap/bootstrap.py` is run. This script is
|
||||
1. The entry point script, `x.py` is run. This script is
|
||||
responsible for downloading the stage0 compiler/Cargo binaries, and it then
|
||||
compiles the build system itself (this folder). Finally, it then invokes the
|
||||
actual `bootstrap` binary build system.
|
||||
|
@ -399,12 +399,10 @@ def main():
|
||||
|
||||
# Run the bootstrap
|
||||
args = [os.path.join(rb.build_dir, "bootstrap/debug/bootstrap")]
|
||||
args.append('--src')
|
||||
args.append(rb.rust_root)
|
||||
args.append('--build')
|
||||
args.append(rb.build)
|
||||
args.extend(sys.argv[1:])
|
||||
env = os.environ.copy()
|
||||
env["BUILD"] = rb.build
|
||||
env["SRC"] = rb.rust_root
|
||||
env["BOOTSTRAP_PARENT_ID"] = str(os.getpid())
|
||||
rb.run(args, env)
|
||||
|
||||
|
@ -13,44 +13,19 @@
|
||||
//! This file implements the various regression test suites that we execute on
|
||||
//! our CI.
|
||||
|
||||
use std::collections::{HashMap, HashSet};
|
||||
use std::collections::HashSet;
|
||||
use std::env;
|
||||
use std::fs;
|
||||
use std::path::{PathBuf, Path};
|
||||
use std::process::Command;
|
||||
|
||||
use build_helper::output;
|
||||
use rustc_serialize::json;
|
||||
|
||||
use {Build, Compiler, Mode};
|
||||
use util::{self, dylib_path, dylib_path_var};
|
||||
|
||||
const ADB_TEST_DIR: &'static str = "/data/tmp";
|
||||
|
||||
#[derive(RustcDecodable)]
|
||||
struct Output {
|
||||
packages: Vec<Package>,
|
||||
resolve: Resolve,
|
||||
}
|
||||
|
||||
#[derive(RustcDecodable)]
|
||||
struct Package {
|
||||
id: String,
|
||||
name: String,
|
||||
source: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(RustcDecodable)]
|
||||
struct Resolve {
|
||||
nodes: Vec<ResolveNode>,
|
||||
}
|
||||
|
||||
#[derive(RustcDecodable)]
|
||||
struct ResolveNode {
|
||||
id: String,
|
||||
dependencies: Vec<String>,
|
||||
}
|
||||
|
||||
/// Runs the `linkchecker` tool as compiled in `stage` by the `host` compiler.
|
||||
///
|
||||
/// This tool in `src/tools` will verify the validity of all our links in the
|
||||
@ -168,8 +143,8 @@ pub fn compiletest(build: &Build,
|
||||
cmd.arg("--lldb-python").arg(python_default);
|
||||
}
|
||||
|
||||
if let Some(ref vers) = build.gdb_version {
|
||||
cmd.arg("--gdb-version").arg(vers);
|
||||
if let Some(ref gdb) = build.config.gdb {
|
||||
cmd.arg("--gdb").arg(gdb);
|
||||
}
|
||||
if let Some(ref vers) = build.lldb_version {
|
||||
cmd.arg("--lldb-version").arg(vers);
|
||||
@ -181,7 +156,7 @@ pub fn compiletest(build: &Build,
|
||||
let llvm_version = output(Command::new(&llvm_config).arg("--version"));
|
||||
cmd.arg("--llvm-version").arg(llvm_version);
|
||||
|
||||
cmd.args(&build.flags.args);
|
||||
cmd.args(&build.flags.cmd.test_args());
|
||||
|
||||
if build.config.verbose || build.flags.verbose {
|
||||
cmd.arg("--verbose");
|
||||
@ -282,7 +257,7 @@ fn markdown_test(build: &Build, compiler: &Compiler, markdown: &Path) {
|
||||
cmd.arg("--test");
|
||||
cmd.arg(markdown);
|
||||
|
||||
let mut test_args = build.flags.args.join(" ");
|
||||
let mut test_args = build.flags.cmd.test_args().join(" ");
|
||||
if build.config.quiet_tests {
|
||||
test_args.push_str(" --quiet");
|
||||
}
|
||||
@ -302,7 +277,8 @@ fn markdown_test(build: &Build, compiler: &Compiler, markdown: &Path) {
|
||||
pub fn krate(build: &Build,
|
||||
compiler: &Compiler,
|
||||
target: &str,
|
||||
mode: Mode) {
|
||||
mode: Mode,
|
||||
krate: Option<&str>) {
|
||||
let (name, path, features, root) = match mode {
|
||||
Mode::Libstd => {
|
||||
("libstd", "src/rustc/std_shim", build.std_features(), "std_shim")
|
||||
@ -318,24 +294,6 @@ pub fn krate(build: &Build,
|
||||
println!("Testing {} stage{} ({} -> {})", name, compiler.stage,
|
||||
compiler.host, target);
|
||||
|
||||
// Run `cargo metadata` to figure out what crates we're testing.
|
||||
//
|
||||
// Down below we're going to call `cargo test`, but to test the right set
|
||||
// of packages we're going to have to know what `-p` arguments to pass it
|
||||
// to know what crates to test. Here we run `cargo metadata` to learn about
|
||||
// the dependency graph and what `-p` arguments there are.
|
||||
let mut cargo = Command::new(&build.cargo);
|
||||
cargo.arg("metadata")
|
||||
.arg("--manifest-path").arg(build.src.join(path).join("Cargo.toml"));
|
||||
let output = output(&mut cargo);
|
||||
let output: Output = json::decode(&output).unwrap();
|
||||
let id2pkg = output.packages.iter()
|
||||
.map(|pkg| (&pkg.id, pkg))
|
||||
.collect::<HashMap<_, _>>();
|
||||
let id2deps = output.resolve.nodes.iter()
|
||||
.map(|node| (&node.id, &node.dependencies))
|
||||
.collect::<HashMap<_, _>>();
|
||||
|
||||
// Build up the base `cargo test` command.
|
||||
//
|
||||
// Pass in some standard flags then iterate over the graph we've discovered
|
||||
@ -346,24 +304,25 @@ pub fn krate(build: &Build,
|
||||
.arg(build.src.join(path).join("Cargo.toml"))
|
||||
.arg("--features").arg(features);
|
||||
|
||||
let mut visited = HashSet::new();
|
||||
let root_pkg = output.packages.iter().find(|p| p.name == root).unwrap();
|
||||
let mut next = vec![&root_pkg.id];
|
||||
while let Some(id) = next.pop() {
|
||||
// Skip any packages with sources listed, as these come from crates.io
|
||||
// and we shouldn't be testing them.
|
||||
if id2pkg[id].source.is_some() {
|
||||
continue
|
||||
match krate {
|
||||
Some(krate) => {
|
||||
cargo.arg("-p").arg(krate);
|
||||
}
|
||||
// Right now jemalloc is our only target-specific crate in the sense
|
||||
// that it's not present on all platforms. Custom skip it here for now,
|
||||
// but if we add more this probably wants to get more generalized.
|
||||
if !id.contains("jemalloc") {
|
||||
cargo.arg("-p").arg(&id2pkg[id].name);
|
||||
}
|
||||
for dep in id2deps[id] {
|
||||
if visited.insert(dep) {
|
||||
next.push(dep);
|
||||
None => {
|
||||
let mut visited = HashSet::new();
|
||||
let mut next = vec![root];
|
||||
while let Some(name) = next.pop() {
|
||||
// Right now jemalloc is our only target-specific crate in the sense
|
||||
// that it's not present on all platforms. Custom skip it here for now,
|
||||
// but if we add more this probably wants to get more generalized.
|
||||
if !name.contains("jemalloc") {
|
||||
cargo.arg("-p").arg(name);
|
||||
}
|
||||
for dep in build.crates[name].deps.iter() {
|
||||
if visited.insert(dep) {
|
||||
next.push(dep);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -389,7 +348,7 @@ pub fn krate(build: &Build,
|
||||
build.run(cargo.arg("--no-run"));
|
||||
krate_emscripten(build, compiler, target, mode);
|
||||
} else {
|
||||
cargo.args(&build.flags.args);
|
||||
cargo.args(&build.flags.cmd.test_args());
|
||||
build.run(&mut cargo);
|
||||
}
|
||||
}
|
||||
@ -421,7 +380,7 @@ fn krate_android(build: &Build,
|
||||
target = target,
|
||||
test = test_file_name,
|
||||
log = log,
|
||||
args = build.flags.args.join(" "));
|
||||
args = build.flags.cmd.test_args().join(" "));
|
||||
|
||||
let output = output(Command::new("adb").arg("shell").arg(&program));
|
||||
println!("{}", output);
|
||||
|
@ -16,6 +16,7 @@
|
||||
//! directory as we want that cached between builds.
|
||||
|
||||
use std::fs;
|
||||
use std::io::{self, ErrorKind};
|
||||
use std::path::Path;
|
||||
|
||||
use Build;
|
||||
@ -25,24 +26,58 @@ pub fn clean(build: &Build) {
|
||||
rm_rf(build, &build.out.join("tmp"));
|
||||
|
||||
for host in build.config.host.iter() {
|
||||
let entries = match build.out.join(host).read_dir() {
|
||||
Ok(iter) => iter,
|
||||
Err(_) => continue,
|
||||
};
|
||||
|
||||
let out = build.out.join(host);
|
||||
|
||||
rm_rf(build, &out.join("doc"));
|
||||
|
||||
for stage in 0..4 {
|
||||
rm_rf(build, &out.join(format!("stage{}", stage)));
|
||||
rm_rf(build, &out.join(format!("stage{}-std", stage)));
|
||||
rm_rf(build, &out.join(format!("stage{}-rustc", stage)));
|
||||
rm_rf(build, &out.join(format!("stage{}-tools", stage)));
|
||||
rm_rf(build, &out.join(format!("stage{}-test", stage)));
|
||||
for entry in entries {
|
||||
let entry = t!(entry);
|
||||
if entry.file_name().to_str() == Some("llvm") {
|
||||
continue
|
||||
}
|
||||
let path = t!(entry.path().canonicalize());
|
||||
rm_rf(build, &path);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn rm_rf(build: &Build, path: &Path) {
|
||||
if path.exists() {
|
||||
build.verbose(&format!("removing `{}`", path.display()));
|
||||
t!(fs::remove_dir_all(path));
|
||||
if !path.exists() {
|
||||
return
|
||||
}
|
||||
|
||||
for file in t!(fs::read_dir(path)) {
|
||||
let file = t!(file).path();
|
||||
|
||||
if file.is_dir() {
|
||||
rm_rf(build, &file);
|
||||
} else {
|
||||
// On windows we can't remove a readonly file, and git will
|
||||
// often clone files as readonly. As a result, we have some
|
||||
// special logic to remove readonly files on windows.
|
||||
do_op(&file, "remove file", |p| fs::remove_file(p));
|
||||
}
|
||||
}
|
||||
do_op(path, "remove dir", |p| fs::remove_dir(p));
|
||||
}
|
||||
|
||||
fn do_op<F>(path: &Path, desc: &str, mut f: F)
|
||||
where F: FnMut(&Path) -> io::Result<()>
|
||||
{
|
||||
match f(path) {
|
||||
Ok(()) => {}
|
||||
Err(ref e) if cfg!(windows) &&
|
||||
e.kind() == ErrorKind::PermissionDenied => {
|
||||
let mut p = t!(path.metadata()).permissions();
|
||||
p.set_readonly(false);
|
||||
t!(fs::set_permissions(path, p));
|
||||
f(path).unwrap_or_else(|e| {
|
||||
panic!("failed to {} {}: {}", desc, path.display(), e);
|
||||
})
|
||||
}
|
||||
Err(e) => {
|
||||
panic!("failed to {} {}: {}", desc, path.display(), e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -64,8 +64,8 @@ pub fn std<'a>(build: &'a Build, target: &str, compiler: &Compiler<'a>) {
|
||||
}
|
||||
|
||||
build.run(&mut cargo);
|
||||
update_mtime(&libstd_stamp(build, compiler, target));
|
||||
std_link(build, target, compiler, compiler.host);
|
||||
update_mtime(&libstd_stamp(build, &compiler, target));
|
||||
std_link(build, target, compiler.stage, compiler.host);
|
||||
}
|
||||
|
||||
/// Link all libstd rlibs/dylibs into the sysroot location.
|
||||
@ -74,11 +74,12 @@ pub fn std<'a>(build: &'a Build, target: &str, compiler: &Compiler<'a>) {
|
||||
/// by `compiler` into `host`'s sysroot.
|
||||
pub fn std_link(build: &Build,
|
||||
target: &str,
|
||||
compiler: &Compiler,
|
||||
stage: u32,
|
||||
host: &str) {
|
||||
let compiler = Compiler::new(stage, &build.config.build);
|
||||
let target_compiler = Compiler::new(compiler.stage, host);
|
||||
let libdir = build.sysroot_libdir(&target_compiler, target);
|
||||
let out_dir = build.cargo_out(compiler, Mode::Libstd, target);
|
||||
let out_dir = build.cargo_out(&compiler, Mode::Libstd, target);
|
||||
|
||||
// If we're linking one compiler host's output into another, then we weren't
|
||||
// called from the `std` method above. In that case we clean out what's
|
||||
@ -146,7 +147,7 @@ pub fn test<'a>(build: &'a Build, target: &str, compiler: &Compiler<'a>) {
|
||||
.arg(build.src.join("src/rustc/test_shim/Cargo.toml"));
|
||||
build.run(&mut cargo);
|
||||
update_mtime(&libtest_stamp(build, compiler, target));
|
||||
test_link(build, target, compiler, compiler.host);
|
||||
test_link(build, target, compiler.stage, compiler.host);
|
||||
}
|
||||
|
||||
/// Link all libtest rlibs/dylibs into the sysroot location.
|
||||
@ -155,11 +156,12 @@ pub fn test<'a>(build: &'a Build, target: &str, compiler: &Compiler<'a>) {
|
||||
/// by `compiler` into `host`'s sysroot.
|
||||
pub fn test_link(build: &Build,
|
||||
target: &str,
|
||||
compiler: &Compiler,
|
||||
stage: u32,
|
||||
host: &str) {
|
||||
let compiler = Compiler::new(stage, &build.config.build);
|
||||
let target_compiler = Compiler::new(compiler.stage, host);
|
||||
let libdir = build.sysroot_libdir(&target_compiler, target);
|
||||
let out_dir = build.cargo_out(compiler, Mode::Libtest, target);
|
||||
let out_dir = build.cargo_out(&compiler, Mode::Libtest, target);
|
||||
add_to_sysroot(&out_dir, &libdir);
|
||||
}
|
||||
|
||||
@ -218,7 +220,7 @@ pub fn rustc<'a>(build: &'a Build, target: &str, compiler: &Compiler<'a>) {
|
||||
}
|
||||
build.run(&mut cargo);
|
||||
|
||||
rustc_link(build, target, compiler, compiler.host);
|
||||
rustc_link(build, target, compiler.stage, compiler.host);
|
||||
}
|
||||
|
||||
/// Link all librustc rlibs/dylibs into the sysroot location.
|
||||
@ -227,11 +229,12 @@ pub fn rustc<'a>(build: &'a Build, target: &str, compiler: &Compiler<'a>) {
|
||||
/// by `compiler` into `host`'s sysroot.
|
||||
pub fn rustc_link(build: &Build,
|
||||
target: &str,
|
||||
compiler: &Compiler,
|
||||
stage: u32,
|
||||
host: &str) {
|
||||
let compiler = Compiler::new(stage, &build.config.build);
|
||||
let target_compiler = Compiler::new(compiler.stage, host);
|
||||
let libdir = build.sysroot_libdir(&target_compiler, target);
|
||||
let out_dir = build.cargo_out(compiler, Mode::Librustc, target);
|
||||
let out_dir = build.cargo_out(&compiler, Mode::Librustc, target);
|
||||
add_to_sysroot(&out_dir, &libdir);
|
||||
}
|
||||
|
||||
@ -259,7 +262,10 @@ fn compiler_file(compiler: &Path, file: &str) -> PathBuf {
|
||||
/// must have been previously produced by the `stage - 1` build.config.build
|
||||
/// compiler.
|
||||
pub fn assemble_rustc(build: &Build, stage: u32, host: &str) {
|
||||
assert!(stage > 0, "the stage0 compiler isn't assembled, it's downloaded");
|
||||
// nothing to do in stage0
|
||||
if stage == 0 {
|
||||
return
|
||||
}
|
||||
// The compiler that we're assembling
|
||||
let target_compiler = Compiler::new(stage, host);
|
||||
|
||||
|
@ -23,6 +23,7 @@ use std::process;
|
||||
use num_cpus;
|
||||
use rustc_serialize::Decodable;
|
||||
use toml::{Parser, Decoder, Value};
|
||||
use util::push_exe_path;
|
||||
|
||||
/// Global configuration for the entire build and/or bootstrap.
|
||||
///
|
||||
@ -86,6 +87,7 @@ pub struct Config {
|
||||
pub mandir: Option<String>,
|
||||
pub codegen_tests: bool,
|
||||
pub nodejs: Option<PathBuf>,
|
||||
pub gdb: Option<PathBuf>,
|
||||
}
|
||||
|
||||
/// Per-target configuration stored in the global configuration structure.
|
||||
@ -123,6 +125,7 @@ struct Build {
|
||||
compiler_docs: Option<bool>,
|
||||
docs: Option<bool>,
|
||||
submodules: Option<bool>,
|
||||
gdb: Option<String>,
|
||||
}
|
||||
|
||||
/// TOML representation of how the LLVM build is configured.
|
||||
@ -227,6 +230,7 @@ impl Config {
|
||||
}
|
||||
config.rustc = build.rustc.map(PathBuf::from);
|
||||
config.cargo = build.cargo.map(PathBuf::from);
|
||||
config.gdb = build.gdb.map(PathBuf::from);
|
||||
set(&mut config.compiler_docs, build.compiler_docs);
|
||||
set(&mut config.docs, build.docs);
|
||||
set(&mut config.submodules, build.submodules);
|
||||
@ -356,37 +360,37 @@ impl Config {
|
||||
.collect();
|
||||
}
|
||||
"CFG_MUSL_ROOT" if value.len() > 0 => {
|
||||
self.musl_root = Some(PathBuf::from(value));
|
||||
self.musl_root = Some(parse_configure_path(value));
|
||||
}
|
||||
"CFG_MUSL_ROOT_X86_64" if value.len() > 0 => {
|
||||
let target = "x86_64-unknown-linux-musl".to_string();
|
||||
let target = self.target_config.entry(target)
|
||||
.or_insert(Target::default());
|
||||
target.musl_root = Some(PathBuf::from(value));
|
||||
target.musl_root = Some(parse_configure_path(value));
|
||||
}
|
||||
"CFG_MUSL_ROOT_I686" if value.len() > 0 => {
|
||||
let target = "i686-unknown-linux-musl".to_string();
|
||||
let target = self.target_config.entry(target)
|
||||
.or_insert(Target::default());
|
||||
target.musl_root = Some(PathBuf::from(value));
|
||||
target.musl_root = Some(parse_configure_path(value));
|
||||
}
|
||||
"CFG_MUSL_ROOT_ARM" if value.len() > 0 => {
|
||||
let target = "arm-unknown-linux-musleabi".to_string();
|
||||
let target = self.target_config.entry(target)
|
||||
.or_insert(Target::default());
|
||||
target.musl_root = Some(PathBuf::from(value));
|
||||
target.musl_root = Some(parse_configure_path(value));
|
||||
}
|
||||
"CFG_MUSL_ROOT_ARMHF" if value.len() > 0 => {
|
||||
let target = "arm-unknown-linux-musleabihf".to_string();
|
||||
let target = self.target_config.entry(target)
|
||||
.or_insert(Target::default());
|
||||
target.musl_root = Some(PathBuf::from(value));
|
||||
target.musl_root = Some(parse_configure_path(value));
|
||||
}
|
||||
"CFG_MUSL_ROOT_ARMV7" if value.len() > 0 => {
|
||||
let target = "armv7-unknown-linux-musleabihf".to_string();
|
||||
let target = self.target_config.entry(target)
|
||||
.or_insert(Target::default());
|
||||
target.musl_root = Some(PathBuf::from(value));
|
||||
target.musl_root = Some(parse_configure_path(value));
|
||||
}
|
||||
"CFG_DEFAULT_AR" if value.len() > 0 => {
|
||||
self.rustc_default_ar = Some(value.to_string());
|
||||
@ -394,6 +398,9 @@ impl Config {
|
||||
"CFG_DEFAULT_LINKER" if value.len() > 0 => {
|
||||
self.rustc_default_linker = Some(value.to_string());
|
||||
}
|
||||
"CFG_GDB" if value.len() > 0 => {
|
||||
self.gdb = Some(parse_configure_path(value));
|
||||
}
|
||||
"CFG_RELEASE_CHANNEL" => {
|
||||
self.channel = value.to_string();
|
||||
}
|
||||
@ -412,41 +419,42 @@ impl Config {
|
||||
"CFG_LLVM_ROOT" if value.len() > 0 => {
|
||||
let target = self.target_config.entry(self.build.clone())
|
||||
.or_insert(Target::default());
|
||||
let root = PathBuf::from(value);
|
||||
target.llvm_config = Some(root.join("bin/llvm-config"));
|
||||
let root = parse_configure_path(value);
|
||||
target.llvm_config = Some(push_exe_path(root, &["bin", "llvm-config"]));
|
||||
}
|
||||
"CFG_JEMALLOC_ROOT" if value.len() > 0 => {
|
||||
let target = self.target_config.entry(self.build.clone())
|
||||
.or_insert(Target::default());
|
||||
target.jemalloc = Some(PathBuf::from(value));
|
||||
target.jemalloc = Some(parse_configure_path(value));
|
||||
}
|
||||
"CFG_ARM_LINUX_ANDROIDEABI_NDK" if value.len() > 0 => {
|
||||
let target = "arm-linux-androideabi".to_string();
|
||||
let target = self.target_config.entry(target)
|
||||
.or_insert(Target::default());
|
||||
target.ndk = Some(PathBuf::from(value));
|
||||
target.ndk = Some(parse_configure_path(value));
|
||||
}
|
||||
"CFG_ARMV7_LINUX_ANDROIDEABI_NDK" if value.len() > 0 => {
|
||||
let target = "armv7-linux-androideabi".to_string();
|
||||
let target = self.target_config.entry(target)
|
||||
.or_insert(Target::default());
|
||||
target.ndk = Some(PathBuf::from(value));
|
||||
target.ndk = Some(parse_configure_path(value));
|
||||
}
|
||||
"CFG_I686_LINUX_ANDROID_NDK" if value.len() > 0 => {
|
||||
let target = "i686-linux-android".to_string();
|
||||
let target = self.target_config.entry(target)
|
||||
.or_insert(Target::default());
|
||||
target.ndk = Some(PathBuf::from(value));
|
||||
target.ndk = Some(parse_configure_path(value));
|
||||
}
|
||||
"CFG_AARCH64_LINUX_ANDROID_NDK" if value.len() > 0 => {
|
||||
let target = "aarch64-linux-android".to_string();
|
||||
let target = self.target_config.entry(target)
|
||||
.or_insert(Target::default());
|
||||
target.ndk = Some(PathBuf::from(value));
|
||||
target.ndk = Some(parse_configure_path(value));
|
||||
}
|
||||
"CFG_LOCAL_RUST_ROOT" if value.len() > 0 => {
|
||||
self.rustc = Some(PathBuf::from(value).join("bin/rustc"));
|
||||
self.cargo = Some(PathBuf::from(value).join("bin/cargo"));
|
||||
let path = parse_configure_path(value);
|
||||
self.rustc = Some(push_exe_path(path.clone(), &["bin", "rustc"]));
|
||||
self.cargo = Some(push_exe_path(path, &["bin", "cargo"]));
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
@ -454,6 +462,30 @@ impl Config {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(windows))]
|
||||
fn parse_configure_path(path: &str) -> PathBuf {
|
||||
path.into()
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
fn parse_configure_path(path: &str) -> PathBuf {
|
||||
// on windows, configure produces unix style paths e.g. /c/some/path but we
|
||||
// only want real windows paths
|
||||
|
||||
use std::process::Command;
|
||||
use build_helper;
|
||||
|
||||
// '/' is invalid in windows paths, so we can detect unix paths by the presence of it
|
||||
if !path.contains('/') {
|
||||
return path.into();
|
||||
}
|
||||
|
||||
let win_path = build_helper::output(Command::new("cygpath").arg("-w").arg(path));
|
||||
let win_path = win_path.trim();
|
||||
|
||||
win_path.into()
|
||||
}
|
||||
|
||||
fn set<T>(field: &mut T, val: Option<T>) {
|
||||
if let Some(v) = val {
|
||||
*field = v;
|
||||
|
@ -79,6 +79,9 @@
|
||||
# Indicate whether submodules are managed and updated automatically.
|
||||
#submodules = true
|
||||
|
||||
# The path to (or name of) the GDB executable to use
|
||||
#gdb = "gdb"
|
||||
|
||||
# =============================================================================
|
||||
# Options for compiling Rust code itself
|
||||
# =============================================================================
|
||||
|
@ -19,7 +19,6 @@
|
||||
|
||||
use std::fs::{self, File};
|
||||
use std::io::prelude::*;
|
||||
use std::path::Path;
|
||||
use std::process::Command;
|
||||
|
||||
use {Build, Compiler, Mode};
|
||||
@ -30,8 +29,9 @@ use util::{up_to_date, cp_r};
|
||||
///
|
||||
/// This will not actually generate any documentation if the documentation has
|
||||
/// already been generated.
|
||||
pub fn rustbook(build: &Build, stage: u32, target: &str, name: &str, out: &Path) {
|
||||
t!(fs::create_dir_all(out));
|
||||
pub fn rustbook(build: &Build, stage: u32, target: &str, name: &str) {
|
||||
let out = build.doc_out(target);
|
||||
t!(fs::create_dir_all(&out));
|
||||
|
||||
let out = out.join(name);
|
||||
let compiler = Compiler::new(stage, &build.config.build);
|
||||
@ -57,9 +57,10 @@ pub fn rustbook(build: &Build, stage: u32, target: &str, name: &str, out: &Path)
|
||||
/// `STAMP` alongw ith providing the various header/footer HTML we've cutomized.
|
||||
///
|
||||
/// In the end, this is just a glorified wrapper around rustdoc!
|
||||
pub fn standalone(build: &Build, stage: u32, target: &str, out: &Path) {
|
||||
pub fn standalone(build: &Build, stage: u32, target: &str) {
|
||||
println!("Documenting stage{} standalone ({})", stage, target);
|
||||
t!(fs::create_dir_all(out));
|
||||
let out = build.doc_out(target);
|
||||
t!(fs::create_dir_all(&out));
|
||||
|
||||
let compiler = Compiler::new(stage, &build.config.build);
|
||||
|
||||
@ -109,7 +110,7 @@ pub fn standalone(build: &Build, stage: u32, target: &str, out: &Path) {
|
||||
.arg("--html-in-header").arg(&favicon)
|
||||
.arg("--markdown-playground-url")
|
||||
.arg("https://play.rust-lang.org/")
|
||||
.arg("-o").arg(out)
|
||||
.arg("-o").arg(&out)
|
||||
.arg(&path);
|
||||
|
||||
if filename == "reference.md" {
|
||||
@ -131,9 +132,10 @@ pub fn standalone(build: &Build, stage: u32, target: &str, out: &Path) {
|
||||
///
|
||||
/// This will generate all documentation for the standard library and its
|
||||
/// dependencies. This is largely just a wrapper around `cargo doc`.
|
||||
pub fn std(build: &Build, stage: u32, target: &str, out: &Path) {
|
||||
pub fn std(build: &Build, stage: u32, target: &str) {
|
||||
println!("Documenting stage{} std ({})", stage, target);
|
||||
t!(fs::create_dir_all(out));
|
||||
let out = build.doc_out(target);
|
||||
t!(fs::create_dir_all(&out));
|
||||
let compiler = Compiler::new(stage, &build.config.build);
|
||||
let out_dir = build.stage_out(&compiler, Mode::Libstd)
|
||||
.join(target).join("doc");
|
||||
@ -146,16 +148,17 @@ pub fn std(build: &Build, stage: u32, target: &str, out: &Path) {
|
||||
.arg(build.src.join("src/rustc/std_shim/Cargo.toml"))
|
||||
.arg("--features").arg(build.std_features());
|
||||
build.run(&mut cargo);
|
||||
cp_r(&out_dir, out)
|
||||
cp_r(&out_dir, &out)
|
||||
}
|
||||
|
||||
/// Compile all libtest documentation.
|
||||
///
|
||||
/// This will generate all documentation for libtest and its dependencies. This
|
||||
/// is largely just a wrapper around `cargo doc`.
|
||||
pub fn test(build: &Build, stage: u32, target: &str, out: &Path) {
|
||||
pub fn test(build: &Build, stage: u32, target: &str) {
|
||||
println!("Documenting stage{} test ({})", stage, target);
|
||||
t!(fs::create_dir_all(out));
|
||||
let out = build.doc_out(target);
|
||||
t!(fs::create_dir_all(&out));
|
||||
let compiler = Compiler::new(stage, &build.config.build);
|
||||
let out_dir = build.stage_out(&compiler, Mode::Libtest)
|
||||
.join(target).join("doc");
|
||||
@ -167,16 +170,17 @@ pub fn test(build: &Build, stage: u32, target: &str, out: &Path) {
|
||||
cargo.arg("--manifest-path")
|
||||
.arg(build.src.join("src/rustc/test_shim/Cargo.toml"));
|
||||
build.run(&mut cargo);
|
||||
cp_r(&out_dir, out)
|
||||
cp_r(&out_dir, &out)
|
||||
}
|
||||
|
||||
/// Generate all compiler documentation.
|
||||
///
|
||||
/// This will generate all documentation for the compiler libraries and their
|
||||
/// dependencies. This is largely just a wrapper around `cargo doc`.
|
||||
pub fn rustc(build: &Build, stage: u32, target: &str, out: &Path) {
|
||||
pub fn rustc(build: &Build, stage: u32, target: &str) {
|
||||
println!("Documenting stage{} compiler ({})", stage, target);
|
||||
t!(fs::create_dir_all(out));
|
||||
let out = build.doc_out(target);
|
||||
t!(fs::create_dir_all(&out));
|
||||
let compiler = Compiler::new(stage, &build.config.build);
|
||||
let out_dir = build.stage_out(&compiler, Mode::Librustc)
|
||||
.join(target).join("doc");
|
||||
@ -189,14 +193,15 @@ pub fn rustc(build: &Build, stage: u32, target: &str, out: &Path) {
|
||||
.arg(build.src.join("src/rustc/Cargo.toml"))
|
||||
.arg("--features").arg(build.rustc_features());
|
||||
build.run(&mut cargo);
|
||||
cp_r(&out_dir, out)
|
||||
cp_r(&out_dir, &out)
|
||||
}
|
||||
|
||||
/// Generates the HTML rendered error-index by running the
|
||||
/// `error_index_generator` tool.
|
||||
pub fn error_index(build: &Build, stage: u32, target: &str, out: &Path) {
|
||||
pub fn error_index(build: &Build, stage: u32, target: &str) {
|
||||
println!("Documenting stage{} error index ({})", stage, target);
|
||||
t!(fs::create_dir_all(out));
|
||||
let out = build.doc_out(target);
|
||||
t!(fs::create_dir_all(&out));
|
||||
let compiler = Compiler::new(stage, &build.config.build);
|
||||
let mut index = build.tool_cmd(&compiler, "error_index_generator");
|
||||
index.arg("html");
|
||||
|
@ -13,30 +13,46 @@
|
||||
//! This module implements the command-line parsing of the build system which
|
||||
//! has various flags to configure how it's run.
|
||||
|
||||
use std::env;
|
||||
use std::fs;
|
||||
use std::path::PathBuf;
|
||||
use std::process;
|
||||
use std::slice;
|
||||
|
||||
use getopts::Options;
|
||||
use getopts::{Matches, Options};
|
||||
|
||||
use Build;
|
||||
use config::Config;
|
||||
use metadata;
|
||||
use step;
|
||||
|
||||
/// Deserialized version of all flags for this compile.
|
||||
pub struct Flags {
|
||||
pub verbose: bool,
|
||||
pub stage: Option<u32>,
|
||||
pub build: String,
|
||||
pub host: Filter,
|
||||
pub target: Filter,
|
||||
pub step: Vec<String>,
|
||||
pub host: Vec<String>,
|
||||
pub target: Vec<String>,
|
||||
pub config: Option<PathBuf>,
|
||||
pub src: Option<PathBuf>,
|
||||
pub jobs: Option<u32>,
|
||||
pub args: Vec<String>,
|
||||
pub clean: bool,
|
||||
pub cmd: Subcommand,
|
||||
}
|
||||
|
||||
pub struct Filter {
|
||||
values: Vec<String>,
|
||||
pub enum Subcommand {
|
||||
Build {
|
||||
paths: Vec<PathBuf>,
|
||||
},
|
||||
Doc {
|
||||
paths: Vec<PathBuf>,
|
||||
},
|
||||
Test {
|
||||
paths: Vec<PathBuf>,
|
||||
test_args: Vec<String>,
|
||||
},
|
||||
Clean,
|
||||
Dist {
|
||||
install: bool,
|
||||
},
|
||||
}
|
||||
|
||||
impl Flags {
|
||||
@ -44,29 +60,177 @@ impl Flags {
|
||||
let mut opts = Options::new();
|
||||
opts.optflag("v", "verbose", "use verbose output");
|
||||
opts.optopt("", "config", "TOML configuration file for build", "FILE");
|
||||
opts.optopt("", "build", "build target of the stage0 compiler", "BUILD");
|
||||
opts.optmulti("", "host", "host targets to build", "HOST");
|
||||
opts.reqopt("", "build", "build target of the stage0 compiler", "BUILD");
|
||||
opts.optmulti("", "target", "targets to build", "TARGET");
|
||||
opts.optmulti("s", "step", "build step to execute", "STEP");
|
||||
opts.optmulti("", "target", "target targets to build", "TARGET");
|
||||
opts.optopt("", "stage", "stage to build", "N");
|
||||
opts.optopt("", "src", "path to repo root", "DIR");
|
||||
opts.optopt("", "src", "path to the root of the rust checkout", "DIR");
|
||||
opts.optopt("j", "jobs", "number of jobs to run in parallel", "JOBS");
|
||||
opts.optflag("", "clean", "clean output directory");
|
||||
opts.optflag("h", "help", "print this help message");
|
||||
|
||||
let usage = |n| -> ! {
|
||||
let brief = format!("Usage: rust.py [options]");
|
||||
print!("{}", opts.usage(&brief));
|
||||
let usage = |n, opts: &Options| -> ! {
|
||||
let command = args.get(0).map(|s| &**s);
|
||||
let brief = format!("Usage: x.py {} [options] [<args>...]",
|
||||
command.unwrap_or("<command>"));
|
||||
|
||||
println!("{}", opts.usage(&brief));
|
||||
match command {
|
||||
Some("build") => {
|
||||
println!("\
|
||||
Arguments:
|
||||
This subcommand accepts a number of positional arguments of directories to
|
||||
the crates and/or artifacts to compile. For example:
|
||||
|
||||
./x.py build src/libcore
|
||||
./x.py build src/libproc_macro
|
||||
./x.py build src/libstd --stage 1
|
||||
|
||||
If no arguments are passed then the complete artifacts for that stage are
|
||||
also compiled.
|
||||
|
||||
./x.py build
|
||||
./x.py build --stage 1
|
||||
|
||||
For a quick build with a usable compile, you can pass:
|
||||
|
||||
./x.py build --stage 1 src/libtest
|
||||
");
|
||||
}
|
||||
|
||||
Some("test") => {
|
||||
println!("\
|
||||
Arguments:
|
||||
This subcommand accepts a number of positional arguments of directories to
|
||||
tests that should be compiled and run. For example:
|
||||
|
||||
./x.py test src/test/run-pass
|
||||
./x.py test src/test/run-pass/assert-*
|
||||
./x.py test src/libstd --test-args hash_map
|
||||
./x.py test src/libstd --stage 0
|
||||
|
||||
If no arguments are passed then the complete artifacts for that stage are
|
||||
compiled and tested.
|
||||
|
||||
./x.py test
|
||||
./x.py test --stage 1
|
||||
");
|
||||
}
|
||||
|
||||
Some("doc") => {
|
||||
println!("\
|
||||
Arguments:
|
||||
This subcommand accepts a number of positional arguments of directories of
|
||||
documentation to build. For example:
|
||||
|
||||
./x.py doc src/doc/book
|
||||
./x.py doc src/doc/nomicon
|
||||
./x.py doc src/libstd
|
||||
|
||||
If no arguments are passed then everything is documented:
|
||||
|
||||
./x.py doc
|
||||
./x.py doc --stage 1
|
||||
");
|
||||
}
|
||||
|
||||
_ => {}
|
||||
}
|
||||
|
||||
if let Some(command) = command {
|
||||
if command == "build" ||
|
||||
command == "dist" ||
|
||||
command == "doc" ||
|
||||
command == "test" ||
|
||||
command == "clean" {
|
||||
println!("Available invocations:");
|
||||
if args.iter().any(|a| a == "-v") {
|
||||
let flags = Flags::parse(&["build".to_string()]);
|
||||
let mut config = Config::default();
|
||||
config.build = flags.build.clone();
|
||||
let mut build = Build::new(flags, config);
|
||||
metadata::build(&mut build);
|
||||
step::build_rules(&build).print_help(command);
|
||||
} else {
|
||||
println!(" ... elided, run `./x.py {} -h -v` to see",
|
||||
command);
|
||||
}
|
||||
|
||||
println!("");
|
||||
}
|
||||
}
|
||||
|
||||
println!("\
|
||||
Subcommands:
|
||||
build Compile either the compiler or libraries
|
||||
test Build and run some test suites
|
||||
doc Build documentation
|
||||
clean Clean out build directories
|
||||
dist Build and/or install distribution artifacts
|
||||
|
||||
To learn more about a subcommand, run `./x.py <command> -h`
|
||||
");
|
||||
|
||||
process::exit(n);
|
||||
};
|
||||
|
||||
let m = opts.parse(args).unwrap_or_else(|e| {
|
||||
println!("failed to parse options: {}", e);
|
||||
usage(1);
|
||||
});
|
||||
if m.opt_present("h") {
|
||||
usage(0);
|
||||
if args.len() == 0 {
|
||||
println!("a command must be passed");
|
||||
usage(1, &opts);
|
||||
}
|
||||
let parse = |opts: &Options| {
|
||||
let m = opts.parse(&args[1..]).unwrap_or_else(|e| {
|
||||
println!("failed to parse options: {}", e);
|
||||
usage(1, opts);
|
||||
});
|
||||
if m.opt_present("h") {
|
||||
usage(0, opts);
|
||||
}
|
||||
return m
|
||||
};
|
||||
|
||||
let cwd = t!(env::current_dir());
|
||||
let remaining_as_path = |m: &Matches| {
|
||||
m.free.iter().map(|p| cwd.join(p)).collect::<Vec<_>>()
|
||||
};
|
||||
|
||||
let m: Matches;
|
||||
let cmd = match &args[0][..] {
|
||||
"build" => {
|
||||
m = parse(&opts);
|
||||
Subcommand::Build { paths: remaining_as_path(&m) }
|
||||
}
|
||||
"doc" => {
|
||||
m = parse(&opts);
|
||||
Subcommand::Doc { paths: remaining_as_path(&m) }
|
||||
}
|
||||
"test" => {
|
||||
opts.optmulti("", "test-args", "extra arguments", "ARGS");
|
||||
m = parse(&opts);
|
||||
Subcommand::Test {
|
||||
paths: remaining_as_path(&m),
|
||||
test_args: m.opt_strs("test-args"),
|
||||
}
|
||||
}
|
||||
"clean" => {
|
||||
m = parse(&opts);
|
||||
if m.free.len() > 0 {
|
||||
println!("clean takes no arguments");
|
||||
usage(1, &opts);
|
||||
}
|
||||
Subcommand::Clean
|
||||
}
|
||||
"dist" => {
|
||||
opts.optflag("", "install", "run installer as well");
|
||||
m = parse(&opts);
|
||||
Subcommand::Dist {
|
||||
install: m.opt_present("install"),
|
||||
}
|
||||
}
|
||||
cmd => {
|
||||
println!("unknown command: {}", cmd);
|
||||
usage(1, &opts);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
let cfg_file = m.opt_str("config").map(PathBuf::from).or_else(|| {
|
||||
if fs::metadata("config.toml").is_ok() {
|
||||
@ -78,26 +242,27 @@ impl Flags {
|
||||
|
||||
Flags {
|
||||
verbose: m.opt_present("v"),
|
||||
clean: m.opt_present("clean"),
|
||||
stage: m.opt_str("stage").map(|j| j.parse().unwrap()),
|
||||
build: m.opt_str("build").unwrap(),
|
||||
host: Filter { values: m.opt_strs("host") },
|
||||
target: Filter { values: m.opt_strs("target") },
|
||||
step: m.opt_strs("step"),
|
||||
build: m.opt_str("build").unwrap_or_else(|| {
|
||||
env::var("BUILD").unwrap()
|
||||
}),
|
||||
host: m.opt_strs("host"),
|
||||
target: m.opt_strs("target"),
|
||||
config: cfg_file,
|
||||
src: m.opt_str("src").map(PathBuf::from),
|
||||
jobs: m.opt_str("jobs").map(|j| j.parse().unwrap()),
|
||||
args: m.free.clone(),
|
||||
cmd: cmd,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Filter {
|
||||
pub fn contains(&self, name: &str) -> bool {
|
||||
self.values.len() == 0 || self.values.iter().any(|s| s == name)
|
||||
}
|
||||
|
||||
pub fn iter(&self) -> slice::Iter<String> {
|
||||
self.values.iter()
|
||||
impl Subcommand {
|
||||
pub fn test_args(&self) -> Vec<&str> {
|
||||
match *self {
|
||||
Subcommand::Test { ref test_args, .. } => {
|
||||
test_args.iter().flat_map(|s| s.split_whitespace()).collect()
|
||||
}
|
||||
_ => Vec::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -57,6 +57,7 @@ mod channel;
|
||||
mod check;
|
||||
mod clean;
|
||||
mod compile;
|
||||
mod metadata;
|
||||
mod config;
|
||||
mod dist;
|
||||
mod doc;
|
||||
@ -76,7 +77,7 @@ mod job {
|
||||
}
|
||||
|
||||
pub use config::Config;
|
||||
pub use flags::Flags;
|
||||
pub use flags::{Flags, Subcommand};
|
||||
|
||||
/// A structure representing a Rust compiler.
|
||||
///
|
||||
@ -123,13 +124,23 @@ pub struct Build {
|
||||
bootstrap_key_stage0: String,
|
||||
|
||||
// Probed tools at runtime
|
||||
gdb_version: Option<String>,
|
||||
lldb_version: Option<String>,
|
||||
lldb_python_dir: Option<String>,
|
||||
|
||||
// Runtime state filled in later on
|
||||
cc: HashMap<String, (gcc::Tool, Option<PathBuf>)>,
|
||||
cxx: HashMap<String, gcc::Tool>,
|
||||
crates: HashMap<String, Crate>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct Crate {
|
||||
name: String,
|
||||
deps: Vec<String>,
|
||||
path: PathBuf,
|
||||
doc_step: String,
|
||||
build_step: String,
|
||||
test_step: String,
|
||||
}
|
||||
|
||||
/// The various "modes" of invoking Cargo.
|
||||
@ -162,7 +173,9 @@ impl Build {
|
||||
/// By default all build output will be placed in the current directory.
|
||||
pub fn new(flags: Flags, config: Config) -> Build {
|
||||
let cwd = t!(env::current_dir());
|
||||
let src = flags.src.clone().unwrap_or(cwd.clone());
|
||||
let src = flags.src.clone().or_else(|| {
|
||||
env::var_os("SRC").map(|x| x.into())
|
||||
}).unwrap_or(cwd.clone());
|
||||
let out = cwd.join("build");
|
||||
|
||||
let stage0_root = out.join(&config.build).join("stage0/bin");
|
||||
@ -196,7 +209,7 @@ impl Build {
|
||||
package_vers: String::new(),
|
||||
cc: HashMap::new(),
|
||||
cxx: HashMap::new(),
|
||||
gdb_version: None,
|
||||
crates: HashMap::new(),
|
||||
lldb_version: None,
|
||||
lldb_python_dir: None,
|
||||
}
|
||||
@ -204,13 +217,11 @@ impl Build {
|
||||
|
||||
/// Executes the entire build, as configured by the flags and configuration.
|
||||
pub fn build(&mut self) {
|
||||
use step::Source::*;
|
||||
|
||||
unsafe {
|
||||
job::setup();
|
||||
}
|
||||
|
||||
if self.flags.clean {
|
||||
if let Subcommand::Clean = self.flags.cmd {
|
||||
return clean::clean(self);
|
||||
}
|
||||
|
||||
@ -232,247 +243,10 @@ impl Build {
|
||||
}
|
||||
self.verbose("updating submodules");
|
||||
self.update_submodules();
|
||||
self.verbose("learning about cargo");
|
||||
metadata::build(self);
|
||||
|
||||
// The main loop of the build system.
|
||||
//
|
||||
// The `step::all` function returns a topographically sorted list of all
|
||||
// steps that need to be executed as part of this build. Each step has a
|
||||
// corresponding entry in `step.rs` and indicates some unit of work that
|
||||
// needs to be done as part of the build.
|
||||
//
|
||||
// Almost all of these are simple one-liners that shell out to the
|
||||
// corresponding functionality in the extra modules, where more
|
||||
// documentation can be found.
|
||||
let steps = step::all(self);
|
||||
|
||||
self.verbose("bootstrap build plan:");
|
||||
for step in &steps {
|
||||
self.verbose(&format!("{:?}", step));
|
||||
}
|
||||
|
||||
for target in steps {
|
||||
let doc_out = self.out.join(&target.target).join("doc");
|
||||
match target.src {
|
||||
Llvm { _dummy } => {
|
||||
native::llvm(self, target.target);
|
||||
}
|
||||
TestHelpers { _dummy } => {
|
||||
native::test_helpers(self, target.target);
|
||||
}
|
||||
Libstd { compiler } => {
|
||||
compile::std(self, target.target, &compiler);
|
||||
}
|
||||
Libtest { compiler } => {
|
||||
compile::test(self, target.target, &compiler);
|
||||
}
|
||||
Librustc { compiler } => {
|
||||
compile::rustc(self, target.target, &compiler);
|
||||
}
|
||||
LibstdLink { compiler, host } => {
|
||||
compile::std_link(self, target.target, &compiler, host);
|
||||
}
|
||||
LibtestLink { compiler, host } => {
|
||||
compile::test_link(self, target.target, &compiler, host);
|
||||
}
|
||||
LibrustcLink { compiler, host } => {
|
||||
compile::rustc_link(self, target.target, &compiler, host);
|
||||
}
|
||||
Rustc { stage: 0 } => {
|
||||
// nothing to do...
|
||||
}
|
||||
Rustc { stage } => {
|
||||
compile::assemble_rustc(self, stage, target.target);
|
||||
}
|
||||
ToolLinkchecker { stage } => {
|
||||
compile::tool(self, stage, target.target, "linkchecker");
|
||||
}
|
||||
ToolRustbook { stage } => {
|
||||
compile::tool(self, stage, target.target, "rustbook");
|
||||
}
|
||||
ToolErrorIndex { stage } => {
|
||||
compile::tool(self, stage, target.target,
|
||||
"error_index_generator");
|
||||
}
|
||||
ToolCargoTest { stage } => {
|
||||
compile::tool(self, stage, target.target, "cargotest");
|
||||
}
|
||||
ToolTidy { stage } => {
|
||||
compile::tool(self, stage, target.target, "tidy");
|
||||
}
|
||||
ToolCompiletest { stage } => {
|
||||
compile::tool(self, stage, target.target, "compiletest");
|
||||
}
|
||||
DocBook { stage } => {
|
||||
doc::rustbook(self, stage, target.target, "book", &doc_out);
|
||||
}
|
||||
DocNomicon { stage } => {
|
||||
doc::rustbook(self, stage, target.target, "nomicon",
|
||||
&doc_out);
|
||||
}
|
||||
DocStandalone { stage } => {
|
||||
doc::standalone(self, stage, target.target, &doc_out);
|
||||
}
|
||||
DocStd { stage } => {
|
||||
doc::std(self, stage, target.target, &doc_out);
|
||||
}
|
||||
DocTest { stage } => {
|
||||
doc::test(self, stage, target.target, &doc_out);
|
||||
}
|
||||
DocRustc { stage } => {
|
||||
doc::rustc(self, stage, target.target, &doc_out);
|
||||
}
|
||||
DocErrorIndex { stage } => {
|
||||
doc::error_index(self, stage, target.target, &doc_out);
|
||||
}
|
||||
|
||||
CheckLinkcheck { stage } => {
|
||||
check::linkcheck(self, stage, target.target);
|
||||
}
|
||||
CheckCargoTest { stage } => {
|
||||
check::cargotest(self, stage, target.target);
|
||||
}
|
||||
CheckTidy { stage } => {
|
||||
check::tidy(self, stage, target.target);
|
||||
}
|
||||
CheckRPass { compiler } => {
|
||||
check::compiletest(self, &compiler, target.target,
|
||||
"run-pass", "run-pass");
|
||||
}
|
||||
CheckRPassFull { compiler } => {
|
||||
check::compiletest(self, &compiler, target.target,
|
||||
"run-pass", "run-pass-fulldeps");
|
||||
}
|
||||
CheckCFail { compiler } => {
|
||||
check::compiletest(self, &compiler, target.target,
|
||||
"compile-fail", "compile-fail");
|
||||
}
|
||||
CheckCFailFull { compiler } => {
|
||||
check::compiletest(self, &compiler, target.target,
|
||||
"compile-fail", "compile-fail-fulldeps")
|
||||
}
|
||||
CheckPFail { compiler } => {
|
||||
check::compiletest(self, &compiler, target.target,
|
||||
"parse-fail", "parse-fail");
|
||||
}
|
||||
CheckRFail { compiler } => {
|
||||
check::compiletest(self, &compiler, target.target,
|
||||
"run-fail", "run-fail");
|
||||
}
|
||||
CheckRFailFull { compiler } => {
|
||||
check::compiletest(self, &compiler, target.target,
|
||||
"run-fail", "run-fail-fulldeps");
|
||||
}
|
||||
CheckPretty { compiler } => {
|
||||
check::compiletest(self, &compiler, target.target,
|
||||
"pretty", "pretty");
|
||||
}
|
||||
CheckPrettyRPass { compiler } => {
|
||||
check::compiletest(self, &compiler, target.target,
|
||||
"pretty", "run-pass");
|
||||
}
|
||||
CheckPrettyRPassFull { compiler } => {
|
||||
check::compiletest(self, &compiler, target.target,
|
||||
"pretty", "run-pass-fulldeps");
|
||||
}
|
||||
CheckPrettyRFail { compiler } => {
|
||||
check::compiletest(self, &compiler, target.target,
|
||||
"pretty", "run-fail");
|
||||
}
|
||||
CheckPrettyRFailFull { compiler } => {
|
||||
check::compiletest(self, &compiler, target.target,
|
||||
"pretty", "run-fail-fulldeps");
|
||||
}
|
||||
CheckPrettyRPassValgrind { compiler } => {
|
||||
check::compiletest(self, &compiler, target.target,
|
||||
"pretty", "run-pass-valgrind");
|
||||
}
|
||||
CheckMirOpt { compiler } => {
|
||||
check::compiletest(self, &compiler, target.target,
|
||||
"mir-opt", "mir-opt");
|
||||
}
|
||||
CheckCodegen { compiler } => {
|
||||
if self.config.codegen_tests {
|
||||
check::compiletest(self, &compiler, target.target,
|
||||
"codegen", "codegen");
|
||||
}
|
||||
}
|
||||
CheckCodegenUnits { compiler } => {
|
||||
check::compiletest(self, &compiler, target.target,
|
||||
"codegen-units", "codegen-units");
|
||||
}
|
||||
CheckIncremental { compiler } => {
|
||||
check::compiletest(self, &compiler, target.target,
|
||||
"incremental", "incremental");
|
||||
}
|
||||
CheckUi { compiler } => {
|
||||
check::compiletest(self, &compiler, target.target,
|
||||
"ui", "ui");
|
||||
}
|
||||
CheckDebuginfo { compiler } => {
|
||||
if target.target.contains("msvc") {
|
||||
// nothing to do
|
||||
} else if target.target.contains("apple") {
|
||||
check::compiletest(self, &compiler, target.target,
|
||||
"debuginfo-lldb", "debuginfo");
|
||||
} else {
|
||||
check::compiletest(self, &compiler, target.target,
|
||||
"debuginfo-gdb", "debuginfo");
|
||||
}
|
||||
}
|
||||
CheckRustdoc { compiler } => {
|
||||
check::compiletest(self, &compiler, target.target,
|
||||
"rustdoc", "rustdoc");
|
||||
}
|
||||
CheckRPassValgrind { compiler } => {
|
||||
check::compiletest(self, &compiler, target.target,
|
||||
"run-pass-valgrind", "run-pass-valgrind");
|
||||
}
|
||||
CheckDocs { compiler } => {
|
||||
check::docs(self, &compiler);
|
||||
}
|
||||
CheckErrorIndex { compiler } => {
|
||||
check::error_index(self, &compiler);
|
||||
}
|
||||
CheckRMake { compiler } => {
|
||||
check::compiletest(self, &compiler, target.target,
|
||||
"run-make", "run-make")
|
||||
}
|
||||
CheckCrateStd { compiler } => {
|
||||
check::krate(self, &compiler, target.target, Mode::Libstd)
|
||||
}
|
||||
CheckCrateTest { compiler } => {
|
||||
check::krate(self, &compiler, target.target, Mode::Libtest)
|
||||
}
|
||||
CheckCrateRustc { compiler } => {
|
||||
check::krate(self, &compiler, target.target, Mode::Librustc)
|
||||
}
|
||||
|
||||
DistDocs { stage } => dist::docs(self, stage, target.target),
|
||||
DistMingw { _dummy } => dist::mingw(self, target.target),
|
||||
DistRustc { stage } => dist::rustc(self, stage, target.target),
|
||||
DistStd { compiler } => dist::std(self, &compiler, target.target),
|
||||
DistSrc { _dummy } => dist::rust_src(self),
|
||||
|
||||
Install { stage } => install::install(self, stage, target.target),
|
||||
|
||||
DebuggerScripts { stage } => {
|
||||
let compiler = Compiler::new(stage, target.target);
|
||||
dist::debugger_scripts(self,
|
||||
&self.sysroot(&compiler),
|
||||
target.target);
|
||||
}
|
||||
|
||||
AndroidCopyLibs { compiler } => {
|
||||
check::android_copy_libs(self, &compiler, target.target);
|
||||
}
|
||||
|
||||
// pseudo-steps
|
||||
Dist { .. } |
|
||||
Doc { .. } |
|
||||
CheckTarget { .. } |
|
||||
Check { .. } => {}
|
||||
}
|
||||
}
|
||||
step::run(self);
|
||||
}
|
||||
|
||||
/// Updates all git submodules that we have.
|
||||
@ -812,6 +586,11 @@ impl Build {
|
||||
self.out.join(target).join("llvm")
|
||||
}
|
||||
|
||||
/// Output directory for all documentation for a target
|
||||
fn doc_out(&self, target: &str) -> PathBuf {
|
||||
self.out.join(target).join("doc")
|
||||
}
|
||||
|
||||
/// Returns true if no custom `llvm-config` is set for the specified target.
|
||||
///
|
||||
/// If no custom `llvm-config` was specified then Rust's llvm will be used.
|
||||
@ -873,7 +652,7 @@ impl Build {
|
||||
|
||||
/// Adds the compiler's bootstrap key to the environment of `cmd`.
|
||||
fn add_bootstrap_key(&self, cmd: &mut Command) {
|
||||
cmd.env("RUSTC_BOOTSTRAP", "");
|
||||
cmd.env("RUSTC_BOOTSTRAP", "1");
|
||||
// FIXME: Transitionary measure to bootstrap using the old bootstrap logic.
|
||||
// Remove this once the bootstrap compiler uses the new login in Issue #36548.
|
||||
cmd.env("RUSTC_BOOTSTRAP_KEY", "62b3e239");
|
||||
|
95
src/bootstrap/metadata.rs
Normal file
95
src/bootstrap/metadata.rs
Normal file
@ -0,0 +1,95 @@
|
||||
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution and at
|
||||
// http://rust-lang.org/COPYRIGHT.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::process::Command;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use build_helper::output;
|
||||
use rustc_serialize::json;
|
||||
|
||||
use {Build, Crate};
|
||||
|
||||
#[derive(RustcDecodable)]
|
||||
struct Output {
|
||||
packages: Vec<Package>,
|
||||
resolve: Resolve,
|
||||
}
|
||||
|
||||
#[derive(RustcDecodable)]
|
||||
struct Package {
|
||||
id: String,
|
||||
name: String,
|
||||
source: Option<String>,
|
||||
manifest_path: String,
|
||||
}
|
||||
|
||||
#[derive(RustcDecodable)]
|
||||
struct Resolve {
|
||||
nodes: Vec<ResolveNode>,
|
||||
}
|
||||
|
||||
#[derive(RustcDecodable)]
|
||||
struct ResolveNode {
|
||||
id: String,
|
||||
dependencies: Vec<String>,
|
||||
}
|
||||
|
||||
pub fn build(build: &mut Build) {
|
||||
build_krate(build, "src/rustc/std_shim");
|
||||
build_krate(build, "src/rustc/test_shim");
|
||||
build_krate(build, "src/rustc");
|
||||
}
|
||||
|
||||
fn build_krate(build: &mut Build, krate: &str) {
|
||||
// Run `cargo metadata` to figure out what crates we're testing.
|
||||
//
|
||||
// Down below we're going to call `cargo test`, but to test the right set
|
||||
// of packages we're going to have to know what `-p` arguments to pass it
|
||||
// to know what crates to test. Here we run `cargo metadata` to learn about
|
||||
// the dependency graph and what `-p` arguments there are.
|
||||
let mut cargo = Command::new(&build.cargo);
|
||||
cargo.arg("metadata")
|
||||
.arg("--manifest-path").arg(build.src.join(krate).join("Cargo.toml"));
|
||||
let output = output(&mut cargo);
|
||||
let output: Output = json::decode(&output).unwrap();
|
||||
let mut id2name = HashMap::new();
|
||||
for package in output.packages {
|
||||
if package.source.is_none() {
|
||||
id2name.insert(package.id, package.name.clone());
|
||||
let mut path = PathBuf::from(package.manifest_path);
|
||||
path.pop();
|
||||
build.crates.insert(package.name.clone(), Crate {
|
||||
build_step: format!("build-crate-{}", package.name),
|
||||
doc_step: format!("doc-crate-{}", package.name),
|
||||
test_step: format!("test-crate-{}", package.name),
|
||||
name: package.name,
|
||||
deps: Vec::new(),
|
||||
path: path,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
for node in output.resolve.nodes {
|
||||
let name = match id2name.get(&node.id) {
|
||||
Some(name) => name,
|
||||
None => continue,
|
||||
};
|
||||
|
||||
let krate = build.crates.get_mut(name).unwrap();
|
||||
for dep in node.dependencies.iter() {
|
||||
let dep = match id2name.get(dep) {
|
||||
Some(dep) => dep,
|
||||
None => continue,
|
||||
};
|
||||
krate.deps.push(dep.clone());
|
||||
}
|
||||
}
|
||||
}
|
@ -17,47 +17,46 @@ else
|
||||
BOOTSTRAP_ARGS :=
|
||||
endif
|
||||
|
||||
BOOTSTRAP := $(CFG_PYTHON) $(CFG_SRC_DIR)src/bootstrap/bootstrap.py $(BOOTSTRAP_ARGS)
|
||||
BOOTSTRAP := $(CFG_PYTHON) $(CFG_SRC_DIR)src/bootstrap/bootstrap.py
|
||||
|
||||
all:
|
||||
$(Q)$(BOOTSTRAP)
|
||||
$(Q)$(BOOTSTRAP) build $(BOOTSTRAP_ARGS)
|
||||
$(Q)$(BOOTSTRAP) doc $(BOOTSTRAP_ARGS)
|
||||
|
||||
# Don’t use $(Q) here, always show how to invoke the bootstrap script directly
|
||||
help:
|
||||
$(BOOTSTRAP) --help
|
||||
|
||||
clean:
|
||||
$(Q)$(BOOTSTRAP) --clean
|
||||
$(Q)$(BOOTSTRAP) clean $(BOOTSTRAP_ARGS)
|
||||
|
||||
rustc-stage1:
|
||||
$(Q)$(BOOTSTRAP) --step libtest --stage 1
|
||||
$(Q)$(BOOTSTRAP) build --stage 1 src/libtest $(BOOTSTRAP_ARGS)
|
||||
rustc-stage2:
|
||||
$(Q)$(BOOTSTRAP) --step libtest --stage 2
|
||||
$(Q)$(BOOTSTRAP) build --stage 2 src/libtest $(BOOTSTRAP_ARGS)
|
||||
|
||||
docs: doc
|
||||
doc:
|
||||
$(Q)$(BOOTSTRAP) --step doc
|
||||
style:
|
||||
$(Q)$(BOOTSTRAP) --step doc-style
|
||||
$(Q)$(BOOTSTRAP) doc $(BOOTSTRAP_ARGS)
|
||||
nomicon:
|
||||
$(Q)$(BOOTSTRAP) --step doc-nomicon
|
||||
$(Q)$(BOOTSTRAP) doc src/doc/nomicon $(BOOTSTRAP_ARGS)
|
||||
book:
|
||||
$(Q)$(BOOTSTRAP) --step doc-book
|
||||
$(Q)$(BOOTSTRAP) doc src/doc/book $(BOOTSTRAP_ARGS)
|
||||
standalone-docs:
|
||||
$(Q)$(BOOTSTRAP) --step doc-standalone
|
||||
$(Q)$(BOOTSTRAP) doc src/doc $(BOOTSTRAP_ARGS)
|
||||
check:
|
||||
$(Q)$(BOOTSTRAP) --step check
|
||||
$(Q)$(BOOTSTRAP) test $(BOOTSTRAP_ARGS)
|
||||
check-cargotest:
|
||||
$(Q)$(BOOTSTRAP) --step check-cargotest
|
||||
$(Q)$(BOOTSTRAP) test src/tools/cargotest $(BOOTSTRAP_ARGS)
|
||||
dist:
|
||||
$(Q)$(BOOTSTRAP) --step dist
|
||||
$(Q)$(BOOTSTRAP) dist $(BOOTSTRAP_ARGS)
|
||||
install:
|
||||
ifeq (root user, $(USER) $(patsubst %,user,$(SUDO_USER)))
|
||||
$(Q)echo "'sudo make install' is not supported currently."
|
||||
else
|
||||
$(Q)$(BOOTSTRAP) --step install
|
||||
$(Q)$(BOOTSTRAP) dist --install $(BOOTSTRAP_ARGS)
|
||||
endif
|
||||
tidy:
|
||||
$(Q)$(BOOTSTRAP) --step check-tidy --stage 0
|
||||
$(Q)$(BOOTSTRAP) test src/tools/tidy $(BOOTSTRAP_ARGS)
|
||||
|
||||
.PHONY: dist
|
||||
|
@ -92,6 +92,12 @@ pub fn check(build: &mut Build) {
|
||||
need_cmd(s.as_ref());
|
||||
}
|
||||
|
||||
if let Some(ref gdb) = build.config.gdb {
|
||||
need_cmd(gdb.as_ref());
|
||||
} else {
|
||||
build.config.gdb = have_cmd("gdb".as_ref());
|
||||
}
|
||||
|
||||
// We're gonna build some custom C code here and there, host triples
|
||||
// also build some C++ shims for LLVM so we need a C++ compiler.
|
||||
for target in build.config.target.iter() {
|
||||
@ -198,7 +204,6 @@ $ pacman -R cmake && pacman -S mingw-w64-x86_64-cmake
|
||||
.to_string()
|
||||
})
|
||||
};
|
||||
build.gdb_version = run(Command::new("gdb").arg("--version")).ok();
|
||||
build.lldb_version = run(Command::new("lldb").arg("--version")).ok();
|
||||
if build.lldb_version.is_some() {
|
||||
build.lldb_python_dir = run(Command::new("lldb").arg("-P")).ok();
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -57,8 +57,7 @@ pub fn cp_r(src: &Path, dst: &Path) {
|
||||
let name = path.file_name().unwrap();
|
||||
let dst = dst.join(name);
|
||||
if t!(f.file_type()).is_dir() {
|
||||
let _ = fs::remove_dir_all(&dst);
|
||||
t!(fs::create_dir(&dst));
|
||||
t!(fs::create_dir_all(&dst));
|
||||
cp_r(&path, &dst);
|
||||
} else {
|
||||
let _ = fs::remove_file(&dst);
|
||||
@ -172,3 +171,21 @@ pub fn dylib_path() -> Vec<PathBuf> {
|
||||
env::split_paths(&env::var_os(dylib_path_var()).unwrap_or(OsString::new()))
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// `push` all components to `buf`. On windows, append `.exe` to the last component.
|
||||
pub fn push_exe_path(mut buf: PathBuf, components: &[&str]) -> PathBuf {
|
||||
let (&file, components) = components.split_last().expect("at least one component required");
|
||||
let mut file = file.to_owned();
|
||||
|
||||
if cfg!(windows) {
|
||||
file.push_str(".exe");
|
||||
}
|
||||
|
||||
for c in components {
|
||||
buf.push(c);
|
||||
}
|
||||
|
||||
buf.push(file);
|
||||
|
||||
buf
|
||||
}
|
||||
|
@ -1 +1 @@
|
||||
Subproject commit f03ba5a4e8bf16dcf42dd742a4ce255c36321356
|
||||
Subproject commit ecd2b1f6d689d5afbf5debe8afb3739337323852
|
@ -327,7 +327,7 @@ that takes a reference like so:
|
||||
fn call_with_ref<F>(some_closure:F) -> i32
|
||||
where F: Fn(&i32) -> i32 {
|
||||
|
||||
let mut value = 0;
|
||||
let value = 0;
|
||||
some_closure(&value)
|
||||
}
|
||||
```
|
||||
@ -340,14 +340,15 @@ fn call_with_ref<'a, F>(some_closure:F) -> i32
|
||||
where F: Fn(&'a i32) -> i32 {
|
||||
```
|
||||
|
||||
However this presents a problem in our case. When you specify the explicit
|
||||
lifetime on a function it binds that lifetime to the *entire* scope of the function
|
||||
instead of just the invocation scope of our closure. This means that the borrow checker
|
||||
will see a mutable reference in the same lifetime as our immutable reference and fail
|
||||
to compile.
|
||||
However, this presents a problem in our case. When a function has an explicit
|
||||
lifetime parameter, that lifetime must be at least as long as the *entire*
|
||||
call to that function. The borrow checker will complain that `value` doesn't
|
||||
live long enough, because it is only in scope after its declaration inside the
|
||||
function body.
|
||||
|
||||
In order to say that we only need the lifetime to be valid for the invocation scope
|
||||
of the closure we can use Higher-Ranked Trait Bounds with the `for<...>` syntax:
|
||||
What we need is a closure that can borrow its argument only for its own
|
||||
invocation scope, not for the outer function's scope. In order to say that,
|
||||
we can use Higher-Ranked Trait Bounds with the `for<...>` syntax:
|
||||
|
||||
```ignore
|
||||
fn call_with_ref<F>(some_closure:F) -> i32
|
||||
@ -362,7 +363,7 @@ expect.
|
||||
fn call_with_ref<F>(some_closure:F) -> i32
|
||||
where F: for<'a> Fn(&'a i32) -> i32 {
|
||||
|
||||
let mut value = 0;
|
||||
let value = 0;
|
||||
some_closure(&value)
|
||||
}
|
||||
```
|
||||
|
@ -17,6 +17,7 @@ use core::hash::{Hash, Hasher};
|
||||
use core::ops::{Add, AddAssign, Deref};
|
||||
|
||||
use fmt;
|
||||
use string::String;
|
||||
|
||||
use self::Cow::*;
|
||||
|
||||
@ -159,7 +160,10 @@ impl<'a, B: ?Sized> Cow<'a, B> where B: ToOwned {
|
||||
match *self {
|
||||
Borrowed(borrowed) => {
|
||||
*self = Owned(borrowed.to_owned());
|
||||
self.to_mut()
|
||||
match *self {
|
||||
Borrowed(..) => unreachable!(),
|
||||
Owned(ref mut owned) => owned,
|
||||
}
|
||||
}
|
||||
Owned(ref mut owned) => owned,
|
||||
}
|
||||
@ -284,48 +288,60 @@ impl<'a, T: ?Sized + ToOwned> AsRef<T> for Cow<'a, T> {
|
||||
}
|
||||
}
|
||||
|
||||
#[stable(feature = "cow_add", since = "1.13.0")]
|
||||
#[stable(feature = "cow_add", since = "1.14.0")]
|
||||
impl<'a> Add<&'a str> for Cow<'a, str> {
|
||||
type Output = Cow<'a, str>;
|
||||
|
||||
fn add(self, rhs: &'a str) -> Self {
|
||||
if self == "" {
|
||||
Cow::Borrowed(rhs)
|
||||
} else if rhs == "" {
|
||||
self
|
||||
} else {
|
||||
Cow::Owned(self.into_owned() + rhs)
|
||||
}
|
||||
#[inline]
|
||||
fn add(mut self, rhs: &'a str) -> Self::Output {
|
||||
self += rhs;
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
#[stable(feature = "cow_add", since = "1.13.0")]
|
||||
#[stable(feature = "cow_add", since = "1.14.0")]
|
||||
impl<'a> Add<Cow<'a, str>> for Cow<'a, str> {
|
||||
type Output = Cow<'a, str>;
|
||||
|
||||
fn add(self, rhs: Cow<'a, str>) -> Self {
|
||||
if self == "" {
|
||||
rhs
|
||||
} else if rhs == "" {
|
||||
self
|
||||
#[inline]
|
||||
fn add(mut self, rhs: Cow<'a, str>) -> Self::Output {
|
||||
self += rhs;
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
#[stable(feature = "cow_add", since = "1.14.0")]
|
||||
impl<'a> AddAssign<&'a str> for Cow<'a, str> {
|
||||
fn add_assign(&mut self, rhs: &'a str) {
|
||||
if self.is_empty() {
|
||||
*self = Cow::Borrowed(rhs)
|
||||
} else if rhs.is_empty() {
|
||||
return;
|
||||
} else {
|
||||
Cow::Owned(self.into_owned() + rhs.borrow())
|
||||
if let Cow::Borrowed(lhs) = *self {
|
||||
let mut s = String::with_capacity(lhs.len() + rhs.len());
|
||||
s.push_str(lhs);
|
||||
*self = Cow::Owned(s);
|
||||
}
|
||||
self.to_mut().push_str(rhs);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[stable(feature = "cow_add", since = "1.13.0")]
|
||||
impl<'a> AddAssign<&'a str> for Cow<'a, str> {
|
||||
fn add_assign(&mut self, rhs: &'a str) {
|
||||
if rhs == "" { return; }
|
||||
self.to_mut().push_str(rhs);
|
||||
}
|
||||
}
|
||||
|
||||
#[stable(feature = "cow_add", since = "1.13.0")]
|
||||
#[stable(feature = "cow_add", since = "1.14.0")]
|
||||
impl<'a> AddAssign<Cow<'a, str>> for Cow<'a, str> {
|
||||
fn add_assign(&mut self, rhs: Cow<'a, str>) {
|
||||
if rhs == "" { return; }
|
||||
self.to_mut().push_str(rhs.borrow());
|
||||
if self.is_empty() {
|
||||
*self = rhs
|
||||
} else if rhs.is_empty() {
|
||||
return;
|
||||
} else {
|
||||
if let Cow::Borrowed(lhs) = *self {
|
||||
let mut s = String::with_capacity(lhs.len() + rhs.len());
|
||||
s.push_str(lhs);
|
||||
*self = Cow::Owned(s);
|
||||
}
|
||||
self.to_mut().push_str(&rhs);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1904,10 +1904,10 @@ impl<'a> FromIterator<String> for Cow<'a, str> {
|
||||
}
|
||||
}
|
||||
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
impl Into<Vec<u8>> for String {
|
||||
fn into(self) -> Vec<u8> {
|
||||
self.into_bytes()
|
||||
#[stable(feature = "from_string_for_vec_u8", since = "1.14.0")]
|
||||
impl From<String> for Vec<u8> {
|
||||
fn from(string : String) -> Vec<u8> {
|
||||
string.into_bytes()
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
// Copyright 2012-2013-2014 The Rust Project Developers. See the COPYRIGHT
|
||||
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution and at
|
||||
// http://rust-lang.org/COPYRIGHT.
|
||||
//
|
||||
@ -12,54 +12,130 @@ use std::borrow::Cow;
|
||||
|
||||
// check that Cow<'a, str> implements addition
|
||||
#[test]
|
||||
fn check_cow_add() {
|
||||
borrowed1 = Cow::Borrowed("Hello, ");
|
||||
borrowed2 = Cow::Borrowed("World!");
|
||||
borrow_empty = Cow::Borrowed("");
|
||||
fn check_cow_add_cow() {
|
||||
let borrowed1 = Cow::Borrowed("Hello, ");
|
||||
let borrowed2 = Cow::Borrowed("World!");
|
||||
let borrow_empty = Cow::Borrowed("");
|
||||
|
||||
owned1 = Cow::Owned("Hi, ".into());
|
||||
owned2 = Cow::Owned("Rustaceans!".into());
|
||||
owned_empty = Cow::Owned("".into());
|
||||
let owned1: Cow<str> = Cow::Owned(String::from("Hi, "));
|
||||
let owned2: Cow<str> = Cow::Owned(String::from("Rustaceans!"));
|
||||
let owned_empty: Cow<str> = Cow::Owned(String::new());
|
||||
|
||||
assert_eq!("Hello, World!", borrowed1 + borrowed2);
|
||||
assert_eq!("Hello, Rustaceans!", borrowed1 + owned2);
|
||||
assert_eq!("Hello, World!", borrowed1.clone() + borrowed2.clone());
|
||||
assert_eq!("Hello, Rustaceans!", borrowed1.clone() + owned2.clone());
|
||||
|
||||
assert_eq!("Hello, World!", owned1 + borrowed2);
|
||||
assert_eq!("Hello, Rustaceans!", owned1 + owned2);
|
||||
assert_eq!("Hi, World!", owned1.clone() + borrowed2.clone());
|
||||
assert_eq!("Hi, Rustaceans!", owned1.clone() + owned2.clone());
|
||||
|
||||
if let Cow::Owned(_) = borrowed1 + borrow_empty {
|
||||
if let Cow::Owned(_) = borrowed1.clone() + borrow_empty.clone() {
|
||||
panic!("Adding empty strings to a borrow should note allocate");
|
||||
}
|
||||
if let Cow::Owned(_) = borrow_empty + borrowed1 {
|
||||
if let Cow::Owned(_) = borrow_empty.clone() + borrowed1.clone() {
|
||||
panic!("Adding empty strings to a borrow should note allocate");
|
||||
}
|
||||
if let Cow::Owned(_) = borrowed1 + owned_empty {
|
||||
if let Cow::Owned(_) = borrowed1.clone() + owned_empty.clone() {
|
||||
panic!("Adding empty strings to a borrow should note allocate");
|
||||
}
|
||||
if let Cow::Owned(_) = owned_empty + borrowed1 {
|
||||
if let Cow::Owned(_) = owned_empty.clone() + borrowed1.clone() {
|
||||
panic!("Adding empty strings to a borrow should note allocate");
|
||||
}
|
||||
}
|
||||
|
||||
fn check_cow_add_assign() {
|
||||
borrowed1 = Cow::Borrowed("Hello, ");
|
||||
borrowed2 = Cow::Borrowed("World!");
|
||||
borrow_empty = Cow::Borrowed("");
|
||||
#[test]
|
||||
fn check_cow_add_str() {
|
||||
let borrowed = Cow::Borrowed("Hello, ");
|
||||
let borrow_empty = Cow::Borrowed("");
|
||||
|
||||
owned1 = Cow::Owned("Hi, ".into());
|
||||
owned2 = Cow::Owned("Rustaceans!".into());
|
||||
owned_empty = Cow::Owned("".into());
|
||||
let owned: Cow<str> = Cow::Owned(String::from("Hi, "));
|
||||
let owned_empty: Cow<str> = Cow::Owned(String::new());
|
||||
|
||||
let borrowed1clone = borrowed1.clone();
|
||||
borrowed1clone += borrow_empty;
|
||||
assert_eq!((&borrowed1clone).as_ptr(), (&borrowed1).as_ptr());
|
||||
assert_eq!("Hello, World!", borrowed.clone() + "World!");
|
||||
|
||||
borrowed1clone += owned_empty;
|
||||
assert_eq!((&borrowed1clone).as_ptr(), (&borrowed1).as_ptr());
|
||||
assert_eq!("Hi, World!", owned.clone() + "World!");
|
||||
|
||||
if let Cow::Owned(_) = borrowed.clone() + "" {
|
||||
panic!("Adding empty strings to a borrow should note allocate");
|
||||
}
|
||||
if let Cow::Owned(_) = borrow_empty.clone() + "Hello, " {
|
||||
panic!("Adding empty strings to a borrow should note allocate");
|
||||
}
|
||||
if let Cow::Owned(_) = owned_empty.clone() + "Hello, " {
|
||||
panic!("Adding empty strings to a borrow should note allocate");
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn check_cow_add_assign_cow() {
|
||||
let mut borrowed1 = Cow::Borrowed("Hello, ");
|
||||
let borrowed2 = Cow::Borrowed("World!");
|
||||
let borrow_empty = Cow::Borrowed("");
|
||||
|
||||
let mut owned1: Cow<str> = Cow::Owned(String::from("Hi, "));
|
||||
let owned2: Cow<str> = Cow::Owned(String::from("Rustaceans!"));
|
||||
let owned_empty: Cow<str> = Cow::Owned(String::new());
|
||||
|
||||
let mut s = borrowed1.clone();
|
||||
s += borrow_empty.clone();
|
||||
assert_eq!("Hello, ", s);
|
||||
if let Cow::Owned(_) = s {
|
||||
panic!("Adding empty strings to a borrow should note allocate");
|
||||
}
|
||||
let mut s = borrow_empty.clone();
|
||||
s += borrowed1.clone();
|
||||
assert_eq!("Hello, ", s);
|
||||
if let Cow::Owned(_) = s {
|
||||
panic!("Adding empty strings to a borrow should note allocate");
|
||||
}
|
||||
let mut s = borrowed1.clone();
|
||||
s += owned_empty.clone();
|
||||
assert_eq!("Hello, ", s);
|
||||
if let Cow::Owned(_) = s {
|
||||
panic!("Adding empty strings to a borrow should note allocate");
|
||||
}
|
||||
let mut s = owned_empty.clone();
|
||||
s += borrowed1.clone();
|
||||
assert_eq!("Hello, ", s);
|
||||
if let Cow::Owned(_) = s {
|
||||
panic!("Adding empty strings to a borrow should note allocate");
|
||||
}
|
||||
|
||||
owned1 += borrowed2;
|
||||
borrowed1 += owned2;
|
||||
|
||||
assert_eq!("Hello, World!", owned1);
|
||||
assert_eq!("Hi, World!", owned1);
|
||||
assert_eq!("Hello, Rustaceans!", borrowed1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn check_cow_add_assign_str() {
|
||||
let mut borrowed = Cow::Borrowed("Hello, ");
|
||||
let borrow_empty = Cow::Borrowed("");
|
||||
|
||||
let mut owned: Cow<str> = Cow::Owned(String::from("Hi, "));
|
||||
let owned_empty: Cow<str> = Cow::Owned(String::new());
|
||||
|
||||
let mut s = borrowed.clone();
|
||||
s += "";
|
||||
assert_eq!("Hello, ", s);
|
||||
if let Cow::Owned(_) = s {
|
||||
panic!("Adding empty strings to a borrow should note allocate");
|
||||
}
|
||||
let mut s = borrow_empty.clone();
|
||||
s += "World!";
|
||||
assert_eq!("World!", s);
|
||||
if let Cow::Owned(_) = s {
|
||||
panic!("Adding empty strings to a borrow should note allocate");
|
||||
}
|
||||
let mut s = owned_empty.clone();
|
||||
s += "World!";
|
||||
assert_eq!("World!", s);
|
||||
if let Cow::Owned(_) = s {
|
||||
panic!("Adding empty strings to a borrow should note allocate");
|
||||
}
|
||||
|
||||
owned += "World!";
|
||||
borrowed += "World!";
|
||||
|
||||
assert_eq!("Hi, World!", owned);
|
||||
assert_eq!("Hello, World!", borrowed);
|
||||
}
|
||||
|
@ -42,6 +42,7 @@ mod bench;
|
||||
|
||||
mod binary_heap;
|
||||
mod btree;
|
||||
mod cow_str;
|
||||
mod enum_set;
|
||||
mod fmt;
|
||||
mod linked_list;
|
||||
|
@ -24,7 +24,8 @@ use mem;
|
||||
///
|
||||
/// See: https://131002.net/siphash/
|
||||
#[unstable(feature = "sip_hash_13", issue = "34767")]
|
||||
#[rustc_deprecated(since = "1.13.0", reason = "use `DefaultHasher` instead")]
|
||||
#[rustc_deprecated(since = "1.13.0",
|
||||
reason = "use `std::collections::hash_map::DefaultHasher` instead")]
|
||||
#[derive(Debug, Clone, Default)]
|
||||
pub struct SipHasher13 {
|
||||
hasher: Hasher<Sip13Rounds>,
|
||||
@ -34,7 +35,8 @@ pub struct SipHasher13 {
|
||||
///
|
||||
/// See: https://131002.net/siphash/
|
||||
#[unstable(feature = "sip_hash_13", issue = "34767")]
|
||||
#[rustc_deprecated(since = "1.13.0", reason = "use `DefaultHasher` instead")]
|
||||
#[rustc_deprecated(since = "1.13.0",
|
||||
reason = "use `std::collections::hash_map::DefaultHasher` instead")]
|
||||
#[derive(Debug, Clone, Default)]
|
||||
pub struct SipHasher24 {
|
||||
hasher: Hasher<Sip24Rounds>,
|
||||
@ -53,7 +55,8 @@ pub struct SipHasher24 {
|
||||
/// it is not intended for cryptographic purposes. As such, all
|
||||
/// cryptographic uses of this implementation are _strongly discouraged_.
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
#[rustc_deprecated(since = "1.13.0", reason = "use `DefaultHasher` instead")]
|
||||
#[rustc_deprecated(since = "1.13.0",
|
||||
reason = "use `std::collections::hash_map::DefaultHasher` instead")]
|
||||
#[derive(Debug, Clone, Default)]
|
||||
pub struct SipHasher(SipHasher24);
|
||||
|
||||
@ -140,7 +143,8 @@ impl SipHasher {
|
||||
/// Creates a new `SipHasher` with the two initial keys set to 0.
|
||||
#[inline]
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
#[rustc_deprecated(since = "1.13.0", reason = "use `DefaultHasher` instead")]
|
||||
#[rustc_deprecated(since = "1.13.0",
|
||||
reason = "use `std::collections::hash_map::DefaultHasher` instead")]
|
||||
pub fn new() -> SipHasher {
|
||||
SipHasher::new_with_keys(0, 0)
|
||||
}
|
||||
@ -148,7 +152,8 @@ impl SipHasher {
|
||||
/// Creates a `SipHasher` that is keyed off the provided keys.
|
||||
#[inline]
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
#[rustc_deprecated(since = "1.13.0", reason = "use `DefaultHasher` instead")]
|
||||
#[rustc_deprecated(since = "1.13.0",
|
||||
reason = "use `std::collections::hash_map::DefaultHasher` instead")]
|
||||
pub fn new_with_keys(key0: u64, key1: u64) -> SipHasher {
|
||||
SipHasher(SipHasher24::new_with_keys(key0, key1))
|
||||
}
|
||||
@ -158,7 +163,8 @@ impl SipHasher13 {
|
||||
/// Creates a new `SipHasher13` with the two initial keys set to 0.
|
||||
#[inline]
|
||||
#[unstable(feature = "sip_hash_13", issue = "34767")]
|
||||
#[rustc_deprecated(since = "1.13.0", reason = "use `DefaultHasher` instead")]
|
||||
#[rustc_deprecated(since = "1.13.0",
|
||||
reason = "use `std::collections::hash_map::DefaultHasher` instead")]
|
||||
pub fn new() -> SipHasher13 {
|
||||
SipHasher13::new_with_keys(0, 0)
|
||||
}
|
||||
@ -166,7 +172,8 @@ impl SipHasher13 {
|
||||
/// Creates a `SipHasher13` that is keyed off the provided keys.
|
||||
#[inline]
|
||||
#[unstable(feature = "sip_hash_13", issue = "34767")]
|
||||
#[rustc_deprecated(since = "1.13.0", reason = "use `DefaultHasher` instead")]
|
||||
#[rustc_deprecated(since = "1.13.0",
|
||||
reason = "use `std::collections::hash_map::DefaultHasher` instead")]
|
||||
pub fn new_with_keys(key0: u64, key1: u64) -> SipHasher13 {
|
||||
SipHasher13 {
|
||||
hasher: Hasher::new_with_keys(key0, key1)
|
||||
@ -178,7 +185,8 @@ impl SipHasher24 {
|
||||
/// Creates a new `SipHasher24` with the two initial keys set to 0.
|
||||
#[inline]
|
||||
#[unstable(feature = "sip_hash_13", issue = "34767")]
|
||||
#[rustc_deprecated(since = "1.13.0", reason = "use `DefaultHasher` instead")]
|
||||
#[rustc_deprecated(since = "1.13.0",
|
||||
reason = "use `std::collections::hash_map::DefaultHasher` instead")]
|
||||
pub fn new() -> SipHasher24 {
|
||||
SipHasher24::new_with_keys(0, 0)
|
||||
}
|
||||
@ -186,7 +194,8 @@ impl SipHasher24 {
|
||||
/// Creates a `SipHasher24` that is keyed off the provided keys.
|
||||
#[inline]
|
||||
#[unstable(feature = "sip_hash_13", issue = "34767")]
|
||||
#[rustc_deprecated(since = "1.13.0", reason = "use `DefaultHasher` instead")]
|
||||
#[rustc_deprecated(since = "1.13.0",
|
||||
reason = "use `std::collections::hash_map::DefaultHasher` instead")]
|
||||
pub fn new_with_keys(key0: u64, key1: u64) -> SipHasher24 {
|
||||
SipHasher24 {
|
||||
hasher: Hasher::new_with_keys(key0, key1)
|
||||
|
@ -1374,10 +1374,7 @@ impl<I: Iterator> Peekable<I> {
|
||||
if self.peeked.is_none() {
|
||||
self.peeked = self.iter.next();
|
||||
}
|
||||
match self.peeked {
|
||||
Some(ref value) => Some(value),
|
||||
None => None,
|
||||
}
|
||||
self.peeked.as_ref()
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -311,11 +311,11 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> {
|
||||
}
|
||||
|
||||
hir::ExprIndex(ref l, ref r) |
|
||||
hir::ExprBinary(_, ref l, ref r) if self.tcx.is_method_call(expr.id) => {
|
||||
hir::ExprBinary(_, ref l, ref r) if self.tcx.tables().is_method_call(expr.id) => {
|
||||
self.call(expr, pred, &l, Some(&**r).into_iter())
|
||||
}
|
||||
|
||||
hir::ExprUnary(_, ref e) if self.tcx.is_method_call(expr.id) => {
|
||||
hir::ExprUnary(_, ref e) if self.tcx.tables().is_method_call(expr.id) => {
|
||||
self.call(expr, pred, &e, None::<hir::Expr>.iter())
|
||||
}
|
||||
|
||||
@ -372,9 +372,9 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> {
|
||||
func_or_rcvr: &hir::Expr,
|
||||
args: I) -> CFGIndex {
|
||||
let method_call = ty::MethodCall::expr(call_expr.id);
|
||||
let fn_ty = match self.tcx.tables.borrow().method_map.get(&method_call) {
|
||||
let fn_ty = match self.tcx.tables().method_map.get(&method_call) {
|
||||
Some(method) => method.ty,
|
||||
None => self.tcx.expr_ty_adjusted(func_or_rcvr)
|
||||
None => self.tcx.tables().expr_ty_adjusted(func_or_rcvr)
|
||||
};
|
||||
|
||||
let func_or_rcvr_exit = self.expr(func_or_rcvr, pred);
|
||||
|
@ -1208,38 +1208,32 @@ impl<'a> LoweringContext<'a> {
|
||||
ExprKind::Break(opt_ident) => hir::ExprBreak(self.lower_opt_sp_ident(opt_ident)),
|
||||
ExprKind::Continue(opt_ident) => hir::ExprAgain(self.lower_opt_sp_ident(opt_ident)),
|
||||
ExprKind::Ret(ref e) => hir::ExprRet(e.as_ref().map(|x| self.lower_expr(x))),
|
||||
ExprKind::InlineAsm(InlineAsm {
|
||||
ref inputs,
|
||||
ref outputs,
|
||||
ref asm,
|
||||
asm_str_style,
|
||||
ref clobbers,
|
||||
volatile,
|
||||
alignstack,
|
||||
dialect,
|
||||
expn_id,
|
||||
}) => hir::ExprInlineAsm(P(hir::InlineAsm {
|
||||
inputs: inputs.iter().map(|&(ref c, _)| c.clone()).collect(),
|
||||
outputs: outputs.iter()
|
||||
.map(|out| {
|
||||
hir::InlineAsmOutput {
|
||||
constraint: out.constraint.clone(),
|
||||
is_rw: out.is_rw,
|
||||
is_indirect: out.is_indirect,
|
||||
}
|
||||
})
|
||||
.collect(),
|
||||
asm: asm.clone(),
|
||||
asm_str_style: asm_str_style,
|
||||
clobbers: clobbers.clone().into(),
|
||||
volatile: volatile,
|
||||
alignstack: alignstack,
|
||||
dialect: dialect,
|
||||
expn_id: expn_id,
|
||||
}), outputs.iter().map(|out| self.lower_expr(&out.expr)).collect(),
|
||||
inputs.iter().map(|&(_, ref input)| self.lower_expr(input)).collect()),
|
||||
ExprKind::InlineAsm(ref asm) => {
|
||||
let hir_asm = hir::InlineAsm {
|
||||
inputs: asm.inputs.iter().map(|&(ref c, _)| c.clone()).collect(),
|
||||
outputs: asm.outputs.iter().map(|out| {
|
||||
hir::InlineAsmOutput {
|
||||
constraint: out.constraint.clone(),
|
||||
is_rw: out.is_rw,
|
||||
is_indirect: out.is_indirect,
|
||||
}
|
||||
}).collect(),
|
||||
asm: asm.asm.clone(),
|
||||
asm_str_style: asm.asm_str_style,
|
||||
clobbers: asm.clobbers.clone().into(),
|
||||
volatile: asm.volatile,
|
||||
alignstack: asm.alignstack,
|
||||
dialect: asm.dialect,
|
||||
expn_id: asm.expn_id,
|
||||
};
|
||||
let outputs =
|
||||
asm.outputs.iter().map(|out| self.lower_expr(&out.expr)).collect();
|
||||
let inputs =
|
||||
asm.inputs.iter().map(|&(_, ref input)| self.lower_expr(input)).collect();
|
||||
hir::ExprInlineAsm(P(hir_asm), outputs, inputs)
|
||||
}
|
||||
ExprKind::Struct(ref path, ref fields, ref maybe_expr) => {
|
||||
hir::ExprStruct(self.lower_path(path),
|
||||
hir::ExprStruct(P(self.lower_path(path)),
|
||||
fields.iter().map(|x| self.lower_field(x)).collect(),
|
||||
maybe_expr.as_ref().map(|x| self.lower_expr(x)))
|
||||
}
|
||||
@ -1743,7 +1737,7 @@ impl<'a> LoweringContext<'a> {
|
||||
e: Option<P<hir::Expr>>,
|
||||
attrs: ThinVec<Attribute>) -> P<hir::Expr> {
|
||||
let def = self.resolver.resolve_generated_global_path(&path, false);
|
||||
let expr = self.expr(sp, hir::ExprStruct(path, fields, e), attrs);
|
||||
let expr = self.expr(sp, hir::ExprStruct(P(path), fields, e), attrs);
|
||||
self.resolver.record_resolution(expr.id, def);
|
||||
expr
|
||||
}
|
||||
|
@ -840,8 +840,8 @@ pub enum UnsafeSource {
|
||||
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash)]
|
||||
pub struct Expr {
|
||||
pub id: NodeId,
|
||||
pub node: Expr_,
|
||||
pub span: Span,
|
||||
pub node: Expr_,
|
||||
pub attrs: ThinVec<Attribute>,
|
||||
}
|
||||
|
||||
@ -946,7 +946,7 @@ pub enum Expr_ {
|
||||
///
|
||||
/// For example, `Foo {x: 1, y: 2}`, or
|
||||
/// `Foo {x: 1, .. base}`, where `base` is the `Option<Expr>`.
|
||||
ExprStruct(Path, HirVec<Field>, Option<P<Expr>>),
|
||||
ExprStruct(P<Path>, HirVec<Field>, Option<P<Expr>>),
|
||||
|
||||
/// An array literal constructed from one repeated element.
|
||||
///
|
||||
|
@ -1266,26 +1266,6 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
|
||||
self.region_vars.new_bound(debruijn)
|
||||
}
|
||||
|
||||
/// Apply `adjustment` to the type of `expr`
|
||||
pub fn adjust_expr_ty(&self,
|
||||
expr: &hir::Expr,
|
||||
adjustment: Option<&adjustment::AutoAdjustment<'tcx>>)
|
||||
-> Ty<'tcx>
|
||||
{
|
||||
let raw_ty = self.expr_ty(expr);
|
||||
let raw_ty = self.shallow_resolve(raw_ty);
|
||||
let resolve_ty = |ty: Ty<'tcx>| self.resolve_type_vars_if_possible(&ty);
|
||||
raw_ty.adjust(self.tcx,
|
||||
expr.span,
|
||||
expr.id,
|
||||
adjustment,
|
||||
|method_call| self.tables
|
||||
.borrow()
|
||||
.method_map
|
||||
.get(&method_call)
|
||||
.map(|method| resolve_ty(method.ty)))
|
||||
}
|
||||
|
||||
/// True if errors have been reported since this infcx was
|
||||
/// created. This is sometimes used as a heuristic to skip
|
||||
/// reporting errors that often occur as a result of earlier
|
||||
@ -1622,7 +1602,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
|
||||
}
|
||||
|
||||
pub fn expr_ty_adjusted(&self, expr: &hir::Expr) -> McResult<Ty<'tcx>> {
|
||||
let ty = self.adjust_expr_ty(expr, self.tables.borrow().adjustments.get(&expr.id));
|
||||
let ty = self.tables.borrow().expr_ty_adjusted(expr);
|
||||
self.resolve_type_vars_or_error(&ty)
|
||||
}
|
||||
|
||||
@ -1666,9 +1646,9 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
|
||||
.map(|method| method.def_id)
|
||||
}
|
||||
|
||||
pub fn adjustments(&self) -> Ref<NodeMap<adjustment::AutoAdjustment<'tcx>>> {
|
||||
pub fn adjustments(&self) -> Ref<NodeMap<adjustment::Adjustment<'tcx>>> {
|
||||
fn project_adjustments<'a, 'tcx>(tables: &'a ty::Tables<'tcx>)
|
||||
-> &'a NodeMap<adjustment::AutoAdjustment<'tcx>> {
|
||||
-> &'a NodeMap<adjustment::Adjustment<'tcx>> {
|
||||
&tables.adjustments
|
||||
}
|
||||
|
||||
|
@ -92,7 +92,7 @@ impl<'a, 'tcx> MarkSymbolVisitor<'a, 'tcx> {
|
||||
match def {
|
||||
Def::AssociatedTy(..) | Def::Method(_) | Def::AssociatedConst(_)
|
||||
if self.tcx.trait_of_item(def.def_id()).is_some() => {
|
||||
if let Some(substs) = self.tcx.tables.borrow().item_substs.get(&id) {
|
||||
if let Some(substs) = self.tcx.tables().item_substs.get(&id) {
|
||||
if let ty::TyAdt(tyid, _) = substs.substs.type_at(0).sty {
|
||||
self.check_def_id(tyid.did);
|
||||
}
|
||||
@ -123,12 +123,12 @@ impl<'a, 'tcx> MarkSymbolVisitor<'a, 'tcx> {
|
||||
|
||||
fn lookup_and_handle_method(&mut self, id: ast::NodeId) {
|
||||
let method_call = ty::MethodCall::expr(id);
|
||||
let method = self.tcx.tables.borrow().method_map[&method_call];
|
||||
let method = self.tcx.tables().method_map[&method_call];
|
||||
self.check_def_id(method.def_id);
|
||||
}
|
||||
|
||||
fn handle_field_access(&mut self, lhs: &hir::Expr, name: ast::Name) {
|
||||
match self.tcx.expr_ty_adjusted(lhs).sty {
|
||||
match self.tcx.tables().expr_ty_adjusted(lhs).sty {
|
||||
ty::TyAdt(def, _) => {
|
||||
self.insert_def_id(def.struct_variant().field_named(name).did);
|
||||
}
|
||||
@ -137,7 +137,7 @@ impl<'a, 'tcx> MarkSymbolVisitor<'a, 'tcx> {
|
||||
}
|
||||
|
||||
fn handle_tup_field_access(&mut self, lhs: &hir::Expr, idx: usize) {
|
||||
match self.tcx.expr_ty_adjusted(lhs).sty {
|
||||
match self.tcx.tables().expr_ty_adjusted(lhs).sty {
|
||||
ty::TyAdt(def, _) => {
|
||||
self.insert_def_id(def.struct_variant().fields[idx].did);
|
||||
}
|
||||
@ -148,7 +148,7 @@ impl<'a, 'tcx> MarkSymbolVisitor<'a, 'tcx> {
|
||||
|
||||
fn handle_field_pattern_match(&mut self, lhs: &hir::Pat,
|
||||
pats: &[codemap::Spanned<hir::FieldPat>]) {
|
||||
let variant = match self.tcx.node_id_to_type(lhs.id).sty {
|
||||
let variant = match self.tcx.tables().node_id_to_type(lhs.id).sty {
|
||||
ty::TyAdt(adt, _) => {
|
||||
adt.variant_of_def(self.tcx.expect_def(lhs.id))
|
||||
}
|
||||
@ -433,7 +433,7 @@ impl<'a, 'tcx> DeadVisitor<'a, 'tcx> {
|
||||
}
|
||||
|
||||
fn should_warn_about_field(&mut self, field: &hir::StructField) -> bool {
|
||||
let field_type = self.tcx.node_id_to_type(field.id);
|
||||
let field_type = self.tcx.tables().node_id_to_type(field.id);
|
||||
let is_marker_field = match field_type.ty_to_def_id() {
|
||||
Some(def_id) => self.tcx.lang_items.items().iter().any(|item| *item == Some(def_id)),
|
||||
_ => false
|
||||
|
@ -159,7 +159,7 @@ impl<'a, 'tcx, 'v> Visitor<'v> for EffectCheckVisitor<'a, 'tcx> {
|
||||
match expr.node {
|
||||
hir::ExprMethodCall(..) => {
|
||||
let method_call = MethodCall::expr(expr.id);
|
||||
let base_type = self.tcx.tables.borrow().method_map[&method_call].ty;
|
||||
let base_type = self.tcx.tables().method_map[&method_call].ty;
|
||||
debug!("effect: method call case, base type is {:?}",
|
||||
base_type);
|
||||
if type_is_unsafe_function(base_type) {
|
||||
@ -168,7 +168,7 @@ impl<'a, 'tcx, 'v> Visitor<'v> for EffectCheckVisitor<'a, 'tcx> {
|
||||
}
|
||||
}
|
||||
hir::ExprCall(ref base, _) => {
|
||||
let base_type = self.tcx.expr_ty_adjusted(base);
|
||||
let base_type = self.tcx.tables().expr_ty_adjusted(base);
|
||||
debug!("effect: call case, base type is {:?}",
|
||||
base_type);
|
||||
if type_is_unsafe_function(base_type) {
|
||||
@ -176,7 +176,7 @@ impl<'a, 'tcx, 'v> Visitor<'v> for EffectCheckVisitor<'a, 'tcx> {
|
||||
}
|
||||
}
|
||||
hir::ExprUnary(hir::UnDeref, ref base) => {
|
||||
let base_type = self.tcx.expr_ty_adjusted(base);
|
||||
let base_type = self.tcx.tables().expr_ty_adjusted(base);
|
||||
debug!("effect: unary case, base type is {:?}",
|
||||
base_type);
|
||||
if let ty::TyRawPtr(_) = base_type.sty {
|
||||
@ -200,7 +200,7 @@ impl<'a, 'tcx, 'v> Visitor<'v> for EffectCheckVisitor<'a, 'tcx> {
|
||||
}
|
||||
}
|
||||
hir::ExprField(ref base_expr, field) => {
|
||||
if let ty::TyAdt(adt, ..) = self.tcx.expr_ty_adjusted(base_expr).sty {
|
||||
if let ty::TyAdt(adt, ..) = self.tcx.tables().expr_ty_adjusted(base_expr).sty {
|
||||
if adt.is_union() {
|
||||
self.require_unsafe(field.span, "access to union field");
|
||||
}
|
||||
@ -214,7 +214,7 @@ impl<'a, 'tcx, 'v> Visitor<'v> for EffectCheckVisitor<'a, 'tcx> {
|
||||
|
||||
fn visit_pat(&mut self, pat: &hir::Pat) {
|
||||
if let PatKind::Struct(_, ref fields, _) = pat.node {
|
||||
if let ty::TyAdt(adt, ..) = self.tcx.pat_ty(pat).sty {
|
||||
if let ty::TyAdt(adt, ..) = self.tcx.tables().pat_ty(pat).sty {
|
||||
if adt.is_union() {
|
||||
for field in fields {
|
||||
self.require_unsafe(field.span, "matching on union field");
|
||||
|
@ -720,11 +720,11 @@ impl<'a, 'gcx, 'tcx> ExprUseVisitor<'a, 'gcx, 'tcx> {
|
||||
//NOTE(@jroesch): mixed RefCell borrow causes crash
|
||||
let adj = infcx.adjustments().get(&expr.id).map(|x| x.clone());
|
||||
if let Some(adjustment) = adj {
|
||||
match adjustment {
|
||||
adjustment::AdjustNeverToAny(..) |
|
||||
adjustment::AdjustReifyFnPointer |
|
||||
adjustment::AdjustUnsafeFnPointer |
|
||||
adjustment::AdjustMutToConstPointer => {
|
||||
match adjustment.kind {
|
||||
adjustment::Adjust::NeverToAny |
|
||||
adjustment::Adjust::ReifyFnPointer |
|
||||
adjustment::Adjust::UnsafeFnPointer |
|
||||
adjustment::Adjust::MutToConstPointer => {
|
||||
// Creating a closure/fn-pointer or unsizing consumes
|
||||
// the input and stores it into the resulting rvalue.
|
||||
debug!("walk_adjustment: trivial adjustment");
|
||||
@ -732,8 +732,21 @@ impl<'a, 'gcx, 'tcx> ExprUseVisitor<'a, 'gcx, 'tcx> {
|
||||
return_if_err!(self.mc.cat_expr_unadjusted(expr));
|
||||
self.delegate_consume(expr.id, expr.span, cmt_unadjusted);
|
||||
}
|
||||
adjustment::AdjustDerefRef(ref adj) => {
|
||||
self.walk_autoderefref(expr, adj);
|
||||
adjustment::Adjust::DerefRef { autoderefs, autoref, unsize } => {
|
||||
debug!("walk_adjustment expr={:?} adj={:?}", expr, adjustment);
|
||||
|
||||
self.walk_autoderefs(expr, autoderefs);
|
||||
|
||||
let cmt_derefd =
|
||||
return_if_err!(self.mc.cat_expr_autoderefd(expr, autoderefs));
|
||||
|
||||
let cmt_refd =
|
||||
self.walk_autoref(expr, cmt_derefd, autoref);
|
||||
|
||||
if unsize {
|
||||
// Unsizing consumes the thin pointer and produces a fat one.
|
||||
self.delegate_consume(expr.id, expr.span, cmt_refd);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -770,28 +783,6 @@ impl<'a, 'gcx, 'tcx> ExprUseVisitor<'a, 'gcx, 'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
fn walk_autoderefref(&mut self,
|
||||
expr: &hir::Expr,
|
||||
adj: &adjustment::AutoDerefRef<'tcx>) {
|
||||
debug!("walk_autoderefref expr={:?} adj={:?}",
|
||||
expr,
|
||||
adj);
|
||||
|
||||
self.walk_autoderefs(expr, adj.autoderefs);
|
||||
|
||||
let cmt_derefd =
|
||||
return_if_err!(self.mc.cat_expr_autoderefd(expr, adj.autoderefs));
|
||||
|
||||
let cmt_refd =
|
||||
self.walk_autoref(expr, cmt_derefd, adj.autoref);
|
||||
|
||||
if adj.unsize.is_some() {
|
||||
// Unsizing consumes the thin pointer and produces a fat one.
|
||||
self.delegate_consume(expr.id, expr.span, cmt_refd);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/// Walks the autoref `opt_autoref` applied to the autoderef'd
|
||||
/// `expr`. `cmt_derefd` is the mem-categorized form of `expr`
|
||||
/// after all relevant autoderefs have occurred. Because AutoRefs
|
||||
@ -803,7 +794,7 @@ impl<'a, 'gcx, 'tcx> ExprUseVisitor<'a, 'gcx, 'tcx> {
|
||||
fn walk_autoref(&mut self,
|
||||
expr: &hir::Expr,
|
||||
cmt_base: mc::cmt<'tcx>,
|
||||
opt_autoref: Option<adjustment::AutoRef<'tcx>>)
|
||||
opt_autoref: Option<adjustment::AutoBorrow<'tcx>>)
|
||||
-> mc::cmt<'tcx>
|
||||
{
|
||||
debug!("walk_autoref(expr.id={} cmt_derefd={:?} opt_autoref={:?})",
|
||||
@ -822,7 +813,7 @@ impl<'a, 'gcx, 'tcx> ExprUseVisitor<'a, 'gcx, 'tcx> {
|
||||
};
|
||||
|
||||
match *autoref {
|
||||
adjustment::AutoPtr(r, m) => {
|
||||
adjustment::AutoBorrow::Ref(r, m) => {
|
||||
self.delegate.borrow(expr.id,
|
||||
expr.span,
|
||||
cmt_base,
|
||||
@ -831,7 +822,7 @@ impl<'a, 'gcx, 'tcx> ExprUseVisitor<'a, 'gcx, 'tcx> {
|
||||
AutoRef);
|
||||
}
|
||||
|
||||
adjustment::AutoUnsafe(m) => {
|
||||
adjustment::AutoBorrow::RawPtr(m) => {
|
||||
debug!("walk_autoref: expr.id={} cmt_base={:?}",
|
||||
expr.id,
|
||||
cmt_base);
|
||||
|
@ -163,7 +163,7 @@ impl<'a, 'gcx, 'tcx, 'v> Visitor<'v> for ExprVisitor<'a, 'gcx, 'tcx> {
|
||||
if let hir::ExprPath(..) = expr.node {
|
||||
match self.infcx.tcx.expect_def(expr.id) {
|
||||
Def::Fn(did) if self.def_id_is_transmute(did) => {
|
||||
let typ = self.infcx.tcx.node_id_to_type(expr.id);
|
||||
let typ = self.infcx.tcx.tables().node_id_to_type(expr.id);
|
||||
match typ.sty {
|
||||
ty::TyFnDef(.., ref bare_fn_ty) if bare_fn_ty.abi == RustIntrinsic => {
|
||||
let from = bare_fn_ty.sig.0.inputs[0];
|
||||
|
@ -1081,7 +1081,7 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> {
|
||||
|
||||
hir::ExprAssignOp(_, ref l, ref r) => {
|
||||
// an overloaded assign op is like a method call
|
||||
if self.ir.tcx.is_method_call(expr.id) {
|
||||
if self.ir.tcx.tables().is_method_call(expr.id) {
|
||||
let succ = self.propagate_through_expr(&l, succ);
|
||||
self.propagate_through_expr(&r, succ)
|
||||
} else {
|
||||
@ -1113,8 +1113,8 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> {
|
||||
|
||||
hir::ExprCall(ref f, ref args) => {
|
||||
// FIXME(canndrew): This is_never should really be an is_uninhabited
|
||||
let diverges = !self.ir.tcx.is_method_call(expr.id) &&
|
||||
self.ir.tcx.expr_ty_adjusted(&f).fn_ret().0.is_never();
|
||||
let diverges = !self.ir.tcx.tables().is_method_call(expr.id) &&
|
||||
self.ir.tcx.tables().expr_ty_adjusted(&f).fn_ret().0.is_never();
|
||||
let succ = if diverges {
|
||||
self.s.exit_ln
|
||||
} else {
|
||||
@ -1126,7 +1126,7 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> {
|
||||
|
||||
hir::ExprMethodCall(.., ref args) => {
|
||||
let method_call = ty::MethodCall::expr(expr.id);
|
||||
let method_ty = self.ir.tcx.tables.borrow().method_map[&method_call].ty;
|
||||
let method_ty = self.ir.tcx.tables().method_map[&method_call].ty;
|
||||
// FIXME(canndrew): This is_never should really be an is_uninhabited
|
||||
let succ = if method_ty.fn_ret().0.is_never() {
|
||||
self.s.exit_ln
|
||||
@ -1409,7 +1409,7 @@ fn check_expr(this: &mut Liveness, expr: &Expr) {
|
||||
}
|
||||
|
||||
hir::ExprAssignOp(_, ref l, _) => {
|
||||
if !this.ir.tcx.is_method_call(expr.id) {
|
||||
if !this.ir.tcx.tables().is_method_call(expr.id) {
|
||||
this.check_lvalue(&l);
|
||||
}
|
||||
|
||||
@ -1459,7 +1459,7 @@ fn check_fn(_v: &Liveness,
|
||||
|
||||
impl<'a, 'tcx> Liveness<'a, 'tcx> {
|
||||
fn fn_ret(&self, id: NodeId) -> ty::Binder<Ty<'tcx>> {
|
||||
let fn_ty = self.ir.tcx.node_id_to_type(id);
|
||||
let fn_ty = self.ir.tcx.tables().node_id_to_type(id);
|
||||
match fn_ty.sty {
|
||||
ty::TyClosure(closure_def_id, substs) =>
|
||||
self.ir.tcx.closure_type(closure_def_id, substs).sig.output(),
|
||||
@ -1502,7 +1502,7 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> {
|
||||
None if !body.stmts.is_empty() =>
|
||||
match body.stmts.last().unwrap().node {
|
||||
hir::StmtSemi(ref e, _) => {
|
||||
self.ir.tcx.expr_ty(&e) == fn_ret
|
||||
self.ir.tcx.tables().expr_ty(&e) == fn_ret
|
||||
},
|
||||
_ => false
|
||||
},
|
||||
|
@ -354,11 +354,7 @@ impl<'a, 'gcx, 'tcx> MemCategorizationContext<'a, 'gcx, 'tcx> {
|
||||
}
|
||||
|
||||
fn expr_ty_adjusted(&self, expr: &hir::Expr) -> McResult<Ty<'tcx>> {
|
||||
let unadjusted_ty = self.expr_ty(expr)?;
|
||||
Ok(unadjusted_ty.adjust(
|
||||
self.tcx(), expr.span, expr.id,
|
||||
self.infcx.adjustments().get(&expr.id),
|
||||
|method_call| self.infcx.node_method_ty(method_call)))
|
||||
self.infcx.expr_ty_adjusted(expr)
|
||||
}
|
||||
|
||||
fn node_ty(&self, id: ast::NodeId) -> McResult<Ty<'tcx>> {
|
||||
@ -396,19 +392,21 @@ impl<'a, 'gcx, 'tcx> MemCategorizationContext<'a, 'gcx, 'tcx> {
|
||||
}
|
||||
|
||||
Some(adjustment) => {
|
||||
match *adjustment {
|
||||
adjustment::AdjustDerefRef(
|
||||
adjustment::AutoDerefRef {
|
||||
autoref: None, unsize: None, autoderefs, ..}) => {
|
||||
match adjustment.kind {
|
||||
adjustment::Adjust::DerefRef {
|
||||
autoderefs,
|
||||
autoref: None,
|
||||
unsize: false
|
||||
} => {
|
||||
// Equivalent to *expr or something similar.
|
||||
self.cat_expr_autoderefd(expr, autoderefs)
|
||||
}
|
||||
|
||||
adjustment::AdjustNeverToAny(..) |
|
||||
adjustment::AdjustReifyFnPointer |
|
||||
adjustment::AdjustUnsafeFnPointer |
|
||||
adjustment::AdjustMutToConstPointer |
|
||||
adjustment::AdjustDerefRef(_) => {
|
||||
adjustment::Adjust::NeverToAny |
|
||||
adjustment::Adjust::ReifyFnPointer |
|
||||
adjustment::Adjust::UnsafeFnPointer |
|
||||
adjustment::Adjust::MutToConstPointer |
|
||||
adjustment::Adjust::DerefRef {..} => {
|
||||
debug!("cat_expr({:?}): {:?}",
|
||||
adjustment,
|
||||
expr);
|
||||
|
@ -116,7 +116,7 @@ impl<'a, 'tcx, 'v> Visitor<'v> for ReachableContext<'a, 'tcx> {
|
||||
}
|
||||
hir::ExprMethodCall(..) => {
|
||||
let method_call = ty::MethodCall::expr(expr.id);
|
||||
let def_id = self.tcx.tables.borrow().method_map[&method_call].def_id;
|
||||
let def_id = self.tcx.tables().method_map[&method_call].def_id;
|
||||
|
||||
// Mark the trait item (and, possibly, its default impl) as reachable
|
||||
// Or mark inherent impl item as reachable
|
||||
|
@ -555,11 +555,11 @@ pub fn check_expr<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, e: &hir::Expr,
|
||||
hir::ExprMethodCall(i, ..) => {
|
||||
span = i.span;
|
||||
let method_call = ty::MethodCall::expr(e.id);
|
||||
tcx.tables.borrow().method_map[&method_call].def_id
|
||||
tcx.tables().method_map[&method_call].def_id
|
||||
}
|
||||
hir::ExprField(ref base_e, ref field) => {
|
||||
span = field.span;
|
||||
match tcx.expr_ty_adjusted(base_e).sty {
|
||||
match tcx.tables().expr_ty_adjusted(base_e).sty {
|
||||
ty::TyAdt(def, _) => {
|
||||
def.struct_variant().field_named(field.node).did
|
||||
}
|
||||
@ -569,7 +569,7 @@ pub fn check_expr<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, e: &hir::Expr,
|
||||
}
|
||||
hir::ExprTupField(ref base_e, ref field) => {
|
||||
span = field.span;
|
||||
match tcx.expr_ty_adjusted(base_e).sty {
|
||||
match tcx.tables().expr_ty_adjusted(base_e).sty {
|
||||
ty::TyAdt(def, _) => {
|
||||
def.struct_variant().fields[field.node].did
|
||||
}
|
||||
@ -580,7 +580,7 @@ pub fn check_expr<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, e: &hir::Expr,
|
||||
}
|
||||
}
|
||||
hir::ExprStruct(_, ref expr_fields, _) => {
|
||||
match tcx.expr_ty(e).sty {
|
||||
match tcx.tables().expr_ty(e).sty {
|
||||
ty::TyAdt(adt, ..) => match adt.adt_kind() {
|
||||
AdtKind::Struct | AdtKind::Union => {
|
||||
// check the stability of each field that appears
|
||||
@ -637,7 +637,7 @@ pub fn check_pat<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, pat: &hir::Pat,
|
||||
debug!("check_pat(pat = {:?})", pat);
|
||||
if is_internal(tcx, pat.span) { return; }
|
||||
|
||||
let v = match tcx.pat_ty_opt(pat).map(|ty| &ty.sty) {
|
||||
let v = match tcx.tables().pat_ty_opt(pat).map(|ty| &ty.sty) {
|
||||
Some(&ty::TyAdt(adt, _)) if !adt.is_enum() => adt.struct_variant(),
|
||||
_ => return,
|
||||
};
|
||||
|
@ -918,6 +918,8 @@ options! {DebuggingOptions, DebuggingSetter, basic_debugging_options,
|
||||
"the directory the MIR is dumped into"),
|
||||
perf_stats: bool = (false, parse_bool, [UNTRACKED],
|
||||
"print some performance-related statistics"),
|
||||
hir_stats: bool = (false, parse_bool, [UNTRACKED],
|
||||
"print some statistics about AST and HIR"),
|
||||
}
|
||||
|
||||
pub fn default_lib_output() -> CrateType {
|
||||
|
@ -8,10 +8,7 @@
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
pub use self::AutoAdjustment::*;
|
||||
pub use self::AutoRef::*;
|
||||
|
||||
use ty::{self, Ty, TyCtxt, TypeAndMut, TypeFoldable};
|
||||
use ty::{self, Ty, TyCtxt, TypeAndMut};
|
||||
use ty::LvaluePreference::{NoPreference};
|
||||
|
||||
use syntax::ast;
|
||||
@ -20,116 +17,122 @@ use syntax_pos::Span;
|
||||
use hir;
|
||||
|
||||
#[derive(Copy, Clone, RustcEncodable, RustcDecodable)]
|
||||
pub enum AutoAdjustment<'tcx> {
|
||||
AdjustNeverToAny(Ty<'tcx>), // go from ! to any type
|
||||
AdjustReifyFnPointer, // go from a fn-item type to a fn-pointer type
|
||||
AdjustUnsafeFnPointer, // go from a safe fn pointer to an unsafe fn pointer
|
||||
AdjustMutToConstPointer, // go from a mut raw pointer to a const raw pointer
|
||||
AdjustDerefRef(AutoDerefRef<'tcx>),
|
||||
pub struct Adjustment<'tcx> {
|
||||
pub kind: Adjust<'tcx>,
|
||||
pub target: Ty<'tcx>
|
||||
}
|
||||
|
||||
/// Represents coercing a pointer to a different kind of pointer - where 'kind'
|
||||
/// here means either or both of raw vs borrowed vs unique and fat vs thin.
|
||||
///
|
||||
/// We transform pointers by following the following steps in order:
|
||||
/// 1. Deref the pointer `self.autoderefs` times (may be 0).
|
||||
/// 2. If `autoref` is `Some(_)`, then take the address and produce either a
|
||||
/// `&` or `*` pointer.
|
||||
/// 3. If `unsize` is `Some(_)`, then apply the unsize transformation,
|
||||
/// which will do things like convert thin pointers to fat
|
||||
/// pointers, or convert structs containing thin pointers to
|
||||
/// structs containing fat pointers, or convert between fat
|
||||
/// pointers. We don't store the details of how the transform is
|
||||
/// done (in fact, we don't know that, because it might depend on
|
||||
/// the precise type parameters). We just store the target
|
||||
/// type. Trans figures out what has to be done at monomorphization
|
||||
/// time based on the precise source/target type at hand.
|
||||
///
|
||||
/// To make that more concrete, here are some common scenarios:
|
||||
///
|
||||
/// 1. The simplest cases are where the pointer is not adjusted fat vs thin.
|
||||
/// Here the pointer will be dereferenced N times (where a dereference can
|
||||
/// happen to raw or borrowed pointers or any smart pointer which implements
|
||||
/// Deref, including Box<_>). The number of dereferences is given by
|
||||
/// `autoderefs`. It can then be auto-referenced zero or one times, indicated
|
||||
/// by `autoref`, to either a raw or borrowed pointer. In these cases unsize is
|
||||
/// None.
|
||||
///
|
||||
/// 2. A thin-to-fat coercon involves unsizing the underlying data. We start
|
||||
/// with a thin pointer, deref a number of times, unsize the underlying data,
|
||||
/// then autoref. The 'unsize' phase may change a fixed length array to a
|
||||
/// dynamically sized one, a concrete object to a trait object, or statically
|
||||
/// sized struct to a dyncamically sized one. E.g., &[i32; 4] -> &[i32] is
|
||||
/// represented by:
|
||||
///
|
||||
/// ```
|
||||
/// AutoDerefRef {
|
||||
/// autoderefs: 1, // &[i32; 4] -> [i32; 4]
|
||||
/// autoref: Some(AutoPtr), // [i32] -> &[i32]
|
||||
/// unsize: Some([i32]), // [i32; 4] -> [i32]
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// Note that for a struct, the 'deep' unsizing of the struct is not recorded.
|
||||
/// E.g., `struct Foo<T> { x: T }` we can coerce &Foo<[i32; 4]> to &Foo<[i32]>
|
||||
/// The autoderef and -ref are the same as in the above example, but the type
|
||||
/// stored in `unsize` is `Foo<[i32]>`, we don't store any further detail about
|
||||
/// the underlying conversions from `[i32; 4]` to `[i32]`.
|
||||
///
|
||||
/// 3. Coercing a `Box<T>` to `Box<Trait>` is an interesting special case. In
|
||||
/// that case, we have the pointer we need coming in, so there are no
|
||||
/// autoderefs, and no autoref. Instead we just do the `Unsize` transformation.
|
||||
/// At some point, of course, `Box` should move out of the compiler, in which
|
||||
/// case this is analogous to transformating a struct. E.g., Box<[i32; 4]> ->
|
||||
/// Box<[i32]> is represented by:
|
||||
///
|
||||
/// ```
|
||||
/// AutoDerefRef {
|
||||
/// autoderefs: 0,
|
||||
/// autoref: None,
|
||||
/// unsize: Some(Box<[i32]>),
|
||||
/// }
|
||||
/// ```
|
||||
#[derive(Copy, Clone, RustcEncodable, RustcDecodable)]
|
||||
pub struct AutoDerefRef<'tcx> {
|
||||
/// Step 1. Apply a number of dereferences, producing an lvalue.
|
||||
pub autoderefs: usize,
|
||||
#[derive(Copy, Clone, Debug, RustcEncodable, RustcDecodable)]
|
||||
pub enum Adjust<'tcx> {
|
||||
/// Go from ! to any type.
|
||||
NeverToAny,
|
||||
|
||||
/// Step 2. Optionally produce a pointer/reference from the value.
|
||||
pub autoref: Option<AutoRef<'tcx>>,
|
||||
/// Go from a fn-item type to a fn-pointer type.
|
||||
ReifyFnPointer,
|
||||
|
||||
/// Step 3. Unsize a pointer/reference value, e.g. `&[T; n]` to
|
||||
/// `&[T]`. The stored type is the target pointer type. Note that
|
||||
/// the source could be a thin or fat pointer.
|
||||
pub unsize: Option<Ty<'tcx>>,
|
||||
/// Go from a safe fn pointer to an unsafe fn pointer.
|
||||
UnsafeFnPointer,
|
||||
|
||||
/// Go from a mut raw pointer to a const raw pointer.
|
||||
MutToConstPointer,
|
||||
|
||||
/// Represents coercing a pointer to a different kind of pointer - where 'kind'
|
||||
/// here means either or both of raw vs borrowed vs unique and fat vs thin.
|
||||
///
|
||||
/// We transform pointers by following the following steps in order:
|
||||
/// 1. Deref the pointer `self.autoderefs` times (may be 0).
|
||||
/// 2. If `autoref` is `Some(_)`, then take the address and produce either a
|
||||
/// `&` or `*` pointer.
|
||||
/// 3. If `unsize` is `Some(_)`, then apply the unsize transformation,
|
||||
/// which will do things like convert thin pointers to fat
|
||||
/// pointers, or convert structs containing thin pointers to
|
||||
/// structs containing fat pointers, or convert between fat
|
||||
/// pointers. We don't store the details of how the transform is
|
||||
/// done (in fact, we don't know that, because it might depend on
|
||||
/// the precise type parameters). We just store the target
|
||||
/// type. Trans figures out what has to be done at monomorphization
|
||||
/// time based on the precise source/target type at hand.
|
||||
///
|
||||
/// To make that more concrete, here are some common scenarios:
|
||||
///
|
||||
/// 1. The simplest cases are where the pointer is not adjusted fat vs thin.
|
||||
/// Here the pointer will be dereferenced N times (where a dereference can
|
||||
/// happen to raw or borrowed pointers or any smart pointer which implements
|
||||
/// Deref, including Box<_>). The number of dereferences is given by
|
||||
/// `autoderefs`. It can then be auto-referenced zero or one times, indicated
|
||||
/// by `autoref`, to either a raw or borrowed pointer. In these cases unsize is
|
||||
/// None.
|
||||
///
|
||||
/// 2. A thin-to-fat coercon involves unsizing the underlying data. We start
|
||||
/// with a thin pointer, deref a number of times, unsize the underlying data,
|
||||
/// then autoref. The 'unsize' phase may change a fixed length array to a
|
||||
/// dynamically sized one, a concrete object to a trait object, or statically
|
||||
/// sized struct to a dyncamically sized one. E.g., &[i32; 4] -> &[i32] is
|
||||
/// represented by:
|
||||
///
|
||||
/// ```
|
||||
/// Adjust::DerefRef {
|
||||
/// autoderefs: 1, // &[i32; 4] -> [i32; 4]
|
||||
/// autoref: Some(AutoBorrow::Ref), // [i32] -> &[i32]
|
||||
/// unsize: Some([i32]), // [i32; 4] -> [i32]
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// Note that for a struct, the 'deep' unsizing of the struct is not recorded.
|
||||
/// E.g., `struct Foo<T> { x: T }` we can coerce &Foo<[i32; 4]> to &Foo<[i32]>
|
||||
/// The autoderef and -ref are the same as in the above example, but the type
|
||||
/// stored in `unsize` is `Foo<[i32]>`, we don't store any further detail about
|
||||
/// the underlying conversions from `[i32; 4]` to `[i32]`.
|
||||
///
|
||||
/// 3. Coercing a `Box<T>` to `Box<Trait>` is an interesting special case. In
|
||||
/// that case, we have the pointer we need coming in, so there are no
|
||||
/// autoderefs, and no autoref. Instead we just do the `Unsize` transformation.
|
||||
/// At some point, of course, `Box` should move out of the compiler, in which
|
||||
/// case this is analogous to transformating a struct. E.g., Box<[i32; 4]> ->
|
||||
/// Box<[i32]> is represented by:
|
||||
///
|
||||
/// ```
|
||||
/// Adjust::DerefRef {
|
||||
/// autoderefs: 0,
|
||||
/// autoref: None,
|
||||
/// unsize: Some(Box<[i32]>),
|
||||
/// }
|
||||
/// ```
|
||||
DerefRef {
|
||||
/// Step 1. Apply a number of dereferences, producing an lvalue.
|
||||
autoderefs: usize,
|
||||
|
||||
/// Step 2. Optionally produce a pointer/reference from the value.
|
||||
autoref: Option<AutoBorrow<'tcx>>,
|
||||
|
||||
/// Step 3. Unsize a pointer/reference value, e.g. `&[T; n]` to
|
||||
/// `&[T]`. Note that the source could be a thin or fat pointer.
|
||||
unsize: bool,
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx> AutoAdjustment<'tcx> {
|
||||
impl<'tcx> Adjustment<'tcx> {
|
||||
pub fn is_identity(&self) -> bool {
|
||||
match *self {
|
||||
AdjustNeverToAny(ty) => ty.is_never(),
|
||||
AdjustReifyFnPointer |
|
||||
AdjustUnsafeFnPointer |
|
||||
AdjustMutToConstPointer => false,
|
||||
AdjustDerefRef(ref r) => r.is_identity(),
|
||||
match self.kind {
|
||||
Adjust::NeverToAny => self.target.is_never(),
|
||||
|
||||
Adjust::DerefRef { autoderefs: 0, autoref: None, unsize: false } => true,
|
||||
|
||||
Adjust::ReifyFnPointer |
|
||||
Adjust::UnsafeFnPointer |
|
||||
Adjust::MutToConstPointer |
|
||||
Adjust::DerefRef {..} => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
impl<'tcx> AutoDerefRef<'tcx> {
|
||||
pub fn is_identity(&self) -> bool {
|
||||
self.autoderefs == 0 && self.unsize.is_none() && self.autoref.is_none()
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Debug, RustcEncodable, RustcDecodable)]
|
||||
pub enum AutoRef<'tcx> {
|
||||
pub enum AutoBorrow<'tcx> {
|
||||
/// Convert from T to &T.
|
||||
AutoPtr(&'tcx ty::Region, hir::Mutability),
|
||||
Ref(&'tcx ty::Region, hir::Mutability),
|
||||
|
||||
/// Convert from T to *T.
|
||||
/// Value to thin pointer.
|
||||
AutoUnsafe(hir::Mutability),
|
||||
RawPtr(hir::Mutability),
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, RustcEncodable, RustcDecodable, Debug)]
|
||||
@ -139,84 +142,6 @@ pub enum CustomCoerceUnsized {
|
||||
}
|
||||
|
||||
impl<'a, 'gcx, 'tcx> ty::TyS<'tcx> {
|
||||
/// See `expr_ty_adjusted`
|
||||
pub fn adjust<F>(&'tcx self,
|
||||
tcx: TyCtxt<'a, 'gcx, 'tcx>,
|
||||
span: Span,
|
||||
expr_id: ast::NodeId,
|
||||
adjustment: Option<&AutoAdjustment<'tcx>>,
|
||||
mut method_type: F)
|
||||
-> Ty<'tcx> where
|
||||
F: FnMut(ty::MethodCall) -> Option<Ty<'tcx>>,
|
||||
{
|
||||
if let ty::TyError = self.sty {
|
||||
return self;
|
||||
}
|
||||
|
||||
return match adjustment {
|
||||
Some(adjustment) => {
|
||||
match *adjustment {
|
||||
AdjustNeverToAny(ref ty) => ty,
|
||||
|
||||
AdjustReifyFnPointer => {
|
||||
match self.sty {
|
||||
ty::TyFnDef(.., f) => tcx.mk_fn_ptr(f),
|
||||
_ => {
|
||||
bug!("AdjustReifyFnPointer adjustment on non-fn-item: {:?}",
|
||||
self);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
AdjustUnsafeFnPointer => {
|
||||
match self.sty {
|
||||
ty::TyFnPtr(b) => tcx.safe_to_unsafe_fn_ty(b),
|
||||
ref b => {
|
||||
bug!("AdjustUnsafeFnPointer adjustment on non-fn-ptr: {:?}",
|
||||
b);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
AdjustMutToConstPointer => {
|
||||
match self.sty {
|
||||
ty::TyRawPtr(mt) => tcx.mk_ptr(ty::TypeAndMut {
|
||||
ty: mt.ty,
|
||||
mutbl: hir::MutImmutable
|
||||
}),
|
||||
ref b => {
|
||||
bug!("AdjustMutToConstPointer on non-raw-ptr: {:?}",
|
||||
b);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
AdjustDerefRef(ref adj) => {
|
||||
let mut adjusted_ty = self;
|
||||
|
||||
if !adjusted_ty.references_error() {
|
||||
for i in 0..adj.autoderefs {
|
||||
adjusted_ty =
|
||||
adjusted_ty.adjust_for_autoderef(tcx,
|
||||
expr_id,
|
||||
span,
|
||||
i as u32,
|
||||
&mut method_type);
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(target) = adj.unsize {
|
||||
target
|
||||
} else {
|
||||
adjusted_ty.adjust_for_autoref(tcx, adj.autoref)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
None => self
|
||||
};
|
||||
}
|
||||
|
||||
pub fn adjust_for_autoderef<F>(&'tcx self,
|
||||
tcx: TyCtxt<'a, 'gcx, 'tcx>,
|
||||
expr_id: ast::NodeId,
|
||||
@ -247,14 +172,14 @@ impl<'a, 'gcx, 'tcx> ty::TyS<'tcx> {
|
||||
}
|
||||
|
||||
pub fn adjust_for_autoref(&'tcx self, tcx: TyCtxt<'a, 'gcx, 'tcx>,
|
||||
autoref: Option<AutoRef<'tcx>>)
|
||||
autoref: Option<AutoBorrow<'tcx>>)
|
||||
-> Ty<'tcx> {
|
||||
match autoref {
|
||||
None => self,
|
||||
Some(AutoPtr(r, m)) => {
|
||||
Some(AutoBorrow::Ref(r, m)) => {
|
||||
tcx.mk_ref(r, TypeAndMut { ty: self, mutbl: m })
|
||||
}
|
||||
Some(AutoUnsafe(m)) => {
|
||||
Some(AutoBorrow::RawPtr(m)) => {
|
||||
tcx.mk_ptr(TypeAndMut { ty: self, mutbl: m })
|
||||
}
|
||||
}
|
||||
|
@ -41,7 +41,7 @@ use rustc_data_structures::accumulate_vec::AccumulateVec;
|
||||
|
||||
use arena::TypedArena;
|
||||
use std::borrow::Borrow;
|
||||
use std::cell::{Cell, RefCell, Ref};
|
||||
use std::cell::{Cell, RefCell};
|
||||
use std::hash::{Hash, Hasher};
|
||||
use std::mem;
|
||||
use std::ops::Deref;
|
||||
@ -212,7 +212,7 @@ pub struct Tables<'tcx> {
|
||||
/// other items.
|
||||
pub item_substs: NodeMap<ty::ItemSubsts<'tcx>>,
|
||||
|
||||
pub adjustments: NodeMap<ty::adjustment::AutoAdjustment<'tcx>>,
|
||||
pub adjustments: NodeMap<ty::adjustment::Adjustment<'tcx>>,
|
||||
|
||||
pub method_map: ty::MethodMap<'tcx>,
|
||||
|
||||
@ -255,6 +255,76 @@ impl<'a, 'gcx, 'tcx> Tables<'tcx> {
|
||||
fru_field_types: NodeMap()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn node_id_to_type(&self, id: NodeId) -> Ty<'tcx> {
|
||||
match self.node_id_to_type_opt(id) {
|
||||
Some(ty) => ty,
|
||||
None => {
|
||||
bug!("node_id_to_type: no type for node `{}`",
|
||||
tls::with(|tcx| tcx.map.node_to_string(id)))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn node_id_to_type_opt(&self, id: NodeId) -> Option<Ty<'tcx>> {
|
||||
self.node_types.get(&id).cloned()
|
||||
}
|
||||
|
||||
pub fn node_id_item_substs(&self, id: NodeId) -> Option<&'tcx Substs<'tcx>> {
|
||||
self.item_substs.get(&id).map(|ts| ts.substs)
|
||||
}
|
||||
|
||||
// Returns the type of a pattern as a monotype. Like @expr_ty, this function
|
||||
// doesn't provide type parameter substitutions.
|
||||
pub fn pat_ty(&self, pat: &hir::Pat) -> Ty<'tcx> {
|
||||
self.node_id_to_type(pat.id)
|
||||
}
|
||||
|
||||
pub fn pat_ty_opt(&self, pat: &hir::Pat) -> Option<Ty<'tcx>> {
|
||||
self.node_id_to_type_opt(pat.id)
|
||||
}
|
||||
|
||||
// Returns the type of an expression as a monotype.
|
||||
//
|
||||
// NB (1): This is the PRE-ADJUSTMENT TYPE for the expression. That is, in
|
||||
// some cases, we insert `Adjustment` annotations such as auto-deref or
|
||||
// auto-ref. The type returned by this function does not consider such
|
||||
// adjustments. See `expr_ty_adjusted()` instead.
|
||||
//
|
||||
// NB (2): This type doesn't provide type parameter substitutions; e.g. if you
|
||||
// ask for the type of "id" in "id(3)", it will return "fn(&isize) -> isize"
|
||||
// instead of "fn(ty) -> T with T = isize".
|
||||
pub fn expr_ty(&self, expr: &hir::Expr) -> Ty<'tcx> {
|
||||
self.node_id_to_type(expr.id)
|
||||
}
|
||||
|
||||
pub fn expr_ty_opt(&self, expr: &hir::Expr) -> Option<Ty<'tcx>> {
|
||||
self.node_id_to_type_opt(expr.id)
|
||||
}
|
||||
|
||||
/// Returns the type of `expr`, considering any `Adjustment`
|
||||
/// entry recorded for that expression.
|
||||
pub fn expr_ty_adjusted(&self, expr: &hir::Expr) -> Ty<'tcx> {
|
||||
self.adjustments.get(&expr.id)
|
||||
.map_or_else(|| self.expr_ty(expr), |adj| adj.target)
|
||||
}
|
||||
|
||||
pub fn expr_ty_adjusted_opt(&self, expr: &hir::Expr) -> Option<Ty<'tcx>> {
|
||||
self.adjustments.get(&expr.id)
|
||||
.map(|adj| adj.target).or_else(|| self.expr_ty_opt(expr))
|
||||
}
|
||||
|
||||
pub fn is_method_call(&self, expr_id: NodeId) -> bool {
|
||||
self.method_map.contains_key(&ty::MethodCall::expr(expr_id))
|
||||
}
|
||||
|
||||
pub fn is_overloaded_autoderef(&self, expr_id: NodeId, autoderefs: u32) -> bool {
|
||||
self.method_map.contains_key(&ty::MethodCall::autoderef(expr_id, autoderefs))
|
||||
}
|
||||
|
||||
pub fn upvar_capture(&self, upvar_id: ty::UpvarId) -> Option<ty::UpvarCapture<'tcx>> {
|
||||
Some(self.upvar_capture_map.get(&upvar_id).unwrap().clone())
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx> CommonTypes<'tcx> {
|
||||
@ -599,14 +669,6 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
|
||||
self.ty_param_defs.borrow().get(&node_id).unwrap().clone()
|
||||
}
|
||||
|
||||
pub fn node_types(self) -> Ref<'a, NodeMap<Ty<'tcx>>> {
|
||||
fn projection<'a, 'tcx>(tables: &'a Tables<'tcx>) -> &'a NodeMap<Ty<'tcx>> {
|
||||
&tables.node_types
|
||||
}
|
||||
|
||||
Ref::map(self.tables.borrow(), projection)
|
||||
}
|
||||
|
||||
pub fn node_type_insert(self, id: NodeId, ty: Ty<'gcx>) {
|
||||
self.tables.borrow_mut().node_types.insert(id, ty);
|
||||
}
|
||||
|
@ -176,8 +176,8 @@ pub trait TypeFolder<'gcx: 'tcx, 'tcx> : Sized {
|
||||
r.super_fold_with(self)
|
||||
}
|
||||
|
||||
fn fold_autoref(&mut self, ar: &adjustment::AutoRef<'tcx>)
|
||||
-> adjustment::AutoRef<'tcx> {
|
||||
fn fold_autoref(&mut self, ar: &adjustment::AutoBorrow<'tcx>)
|
||||
-> adjustment::AutoBorrow<'tcx> {
|
||||
ar.super_fold_with(self)
|
||||
}
|
||||
}
|
||||
|
@ -2120,80 +2120,8 @@ impl BorrowKind {
|
||||
}
|
||||
|
||||
impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
|
||||
pub fn node_id_to_type(self, id: NodeId) -> Ty<'gcx> {
|
||||
match self.node_id_to_type_opt(id) {
|
||||
Some(ty) => ty,
|
||||
None => bug!("node_id_to_type: no type for node `{}`",
|
||||
self.map.node_to_string(id))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn node_id_to_type_opt(self, id: NodeId) -> Option<Ty<'gcx>> {
|
||||
self.tables.borrow().node_types.get(&id).cloned()
|
||||
}
|
||||
|
||||
pub fn node_id_item_substs(self, id: NodeId) -> ItemSubsts<'gcx> {
|
||||
match self.tables.borrow().item_substs.get(&id) {
|
||||
None => ItemSubsts {
|
||||
substs: self.global_tcx().intern_substs(&[])
|
||||
},
|
||||
Some(ts) => ts.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
// Returns the type of a pattern as a monotype. Like @expr_ty, this function
|
||||
// doesn't provide type parameter substitutions.
|
||||
pub fn pat_ty(self, pat: &hir::Pat) -> Ty<'gcx> {
|
||||
self.node_id_to_type(pat.id)
|
||||
}
|
||||
pub fn pat_ty_opt(self, pat: &hir::Pat) -> Option<Ty<'gcx>> {
|
||||
self.node_id_to_type_opt(pat.id)
|
||||
}
|
||||
|
||||
// Returns the type of an expression as a monotype.
|
||||
//
|
||||
// NB (1): This is the PRE-ADJUSTMENT TYPE for the expression. That is, in
|
||||
// some cases, we insert `AutoAdjustment` annotations such as auto-deref or
|
||||
// auto-ref. The type returned by this function does not consider such
|
||||
// adjustments. See `expr_ty_adjusted()` instead.
|
||||
//
|
||||
// NB (2): This type doesn't provide type parameter substitutions; e.g. if you
|
||||
// ask for the type of "id" in "id(3)", it will return "fn(&isize) -> isize"
|
||||
// instead of "fn(ty) -> T with T = isize".
|
||||
pub fn expr_ty(self, expr: &hir::Expr) -> Ty<'gcx> {
|
||||
self.node_id_to_type(expr.id)
|
||||
}
|
||||
|
||||
pub fn expr_ty_opt(self, expr: &hir::Expr) -> Option<Ty<'gcx>> {
|
||||
self.node_id_to_type_opt(expr.id)
|
||||
}
|
||||
|
||||
/// Returns the type of `expr`, considering any `AutoAdjustment`
|
||||
/// entry recorded for that expression.
|
||||
///
|
||||
/// It would almost certainly be better to store the adjusted ty in with
|
||||
/// the `AutoAdjustment`, but I opted not to do this because it would
|
||||
/// require serializing and deserializing the type and, although that's not
|
||||
/// hard to do, I just hate that code so much I didn't want to touch it
|
||||
/// unless it was to fix it properly, which seemed a distraction from the
|
||||
/// thread at hand! -nmatsakis
|
||||
pub fn expr_ty_adjusted(self, expr: &hir::Expr) -> Ty<'gcx> {
|
||||
self.expr_ty(expr)
|
||||
.adjust(self.global_tcx(), expr.span, expr.id,
|
||||
self.tables.borrow().adjustments.get(&expr.id),
|
||||
|method_call| {
|
||||
self.tables.borrow().method_map.get(&method_call).map(|method| method.ty)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn expr_ty_adjusted_opt(self, expr: &hir::Expr) -> Option<Ty<'gcx>> {
|
||||
self.expr_ty_opt(expr).map(|t| t.adjust(self.global_tcx(),
|
||||
expr.span,
|
||||
expr.id,
|
||||
self.tables.borrow().adjustments.get(&expr.id),
|
||||
|method_call| {
|
||||
self.tables.borrow().method_map.get(&method_call).map(|method| method.ty)
|
||||
}))
|
||||
pub fn tables(self) -> Ref<'a, Tables<'gcx>> {
|
||||
self.tables.borrow()
|
||||
}
|
||||
|
||||
pub fn expr_span(self, id: NodeId) -> Span {
|
||||
@ -2908,19 +2836,6 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
|
||||
self.mk_region(ty::ReScope(self.region_maps.node_extent(id)))
|
||||
}
|
||||
|
||||
pub fn is_method_call(self, expr_id: NodeId) -> bool {
|
||||
self.tables.borrow().method_map.contains_key(&MethodCall::expr(expr_id))
|
||||
}
|
||||
|
||||
pub fn is_overloaded_autoderef(self, expr_id: NodeId, autoderefs: u32) -> bool {
|
||||
self.tables.borrow().method_map.contains_key(&MethodCall::autoderef(expr_id,
|
||||
autoderefs))
|
||||
}
|
||||
|
||||
pub fn upvar_capture(self, upvar_id: ty::UpvarId) -> Option<ty::UpvarCapture<'tcx>> {
|
||||
Some(self.tables.borrow().upvar_capture_map.get(&upvar_id).unwrap().clone())
|
||||
}
|
||||
|
||||
pub fn visit_all_items_in_krate<V,F>(self,
|
||||
dep_node_fn: F,
|
||||
visitor: &mut V)
|
||||
|
@ -218,15 +218,15 @@ impl<'a, 'tcx> Lift<'tcx> for ty::ItemSubsts<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> Lift<'tcx> for ty::adjustment::AutoRef<'a> {
|
||||
type Lifted = ty::adjustment::AutoRef<'tcx>;
|
||||
impl<'a, 'tcx> Lift<'tcx> for ty::adjustment::AutoBorrow<'a> {
|
||||
type Lifted = ty::adjustment::AutoBorrow<'tcx>;
|
||||
fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Self::Lifted> {
|
||||
match *self {
|
||||
ty::adjustment::AutoPtr(r, m) => {
|
||||
tcx.lift(&r).map(|r| ty::adjustment::AutoPtr(r, m))
|
||||
ty::adjustment::AutoBorrow::Ref(r, m) => {
|
||||
tcx.lift(&r).map(|r| ty::adjustment::AutoBorrow::Ref(r, m))
|
||||
}
|
||||
ty::adjustment::AutoUnsafe(m) => {
|
||||
Some(ty::adjustment::AutoUnsafe(m))
|
||||
ty::adjustment::AutoBorrow::RawPtr(m) => {
|
||||
Some(ty::adjustment::AutoBorrow::RawPtr(m))
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -676,13 +676,13 @@ impl<'tcx> TypeFoldable<'tcx> for ty::ItemSubsts<'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx> TypeFoldable<'tcx> for ty::adjustment::AutoRef<'tcx> {
|
||||
impl<'tcx> TypeFoldable<'tcx> for ty::adjustment::AutoBorrow<'tcx> {
|
||||
fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
|
||||
match *self {
|
||||
ty::adjustment::AutoPtr(ref r, m) => {
|
||||
ty::adjustment::AutoPtr(r.fold_with(folder), m)
|
||||
ty::adjustment::AutoBorrow::Ref(ref r, m) => {
|
||||
ty::adjustment::AutoBorrow::Ref(r.fold_with(folder), m)
|
||||
}
|
||||
ty::adjustment::AutoUnsafe(m) => ty::adjustment::AutoUnsafe(m)
|
||||
ty::adjustment::AutoBorrow::RawPtr(m) => ty::adjustment::AutoBorrow::RawPtr(m)
|
||||
}
|
||||
}
|
||||
|
||||
@ -692,8 +692,8 @@ impl<'tcx> TypeFoldable<'tcx> for ty::adjustment::AutoRef<'tcx> {
|
||||
|
||||
fn super_visit_with<V: TypeVisitor<'tcx>>(&self, visitor: &mut V) -> bool {
|
||||
match *self {
|
||||
ty::adjustment::AutoPtr(r, _m) => r.visit_with(visitor),
|
||||
ty::adjustment::AutoUnsafe(_m) => false,
|
||||
ty::adjustment::AutoBorrow::Ref(r, _m) => r.visit_with(visitor),
|
||||
ty::adjustment::AutoBorrow::RawPtr(_m) => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -75,6 +75,26 @@ pub fn duration_to_secs_str(dur: Duration) -> String {
|
||||
format!("{:.3}", secs)
|
||||
}
|
||||
|
||||
pub fn to_readable_str(mut val: usize) -> String {
|
||||
let mut groups = vec![];
|
||||
loop {
|
||||
let group = val % 1000;
|
||||
|
||||
val /= 1000;
|
||||
|
||||
if val == 0 {
|
||||
groups.push(format!("{}", group));
|
||||
break
|
||||
} else {
|
||||
groups.push(format!("{:03}", group));
|
||||
}
|
||||
}
|
||||
|
||||
groups.reverse();
|
||||
|
||||
groups.join("_")
|
||||
}
|
||||
|
||||
pub fn record_time<T, F>(accu: &Cell<Duration>, f: F) -> T where
|
||||
F: FnOnce() -> T,
|
||||
{
|
||||
@ -264,3 +284,17 @@ pub fn path2cstr(p: &Path) -> CString {
|
||||
pub fn path2cstr(p: &Path) -> CString {
|
||||
CString::new(p.to_str().unwrap()).unwrap()
|
||||
}
|
||||
|
||||
|
||||
#[test]
|
||||
fn test_to_readable_str() {
|
||||
assert_eq!("0", to_readable_str(0));
|
||||
assert_eq!("1", to_readable_str(1));
|
||||
assert_eq!("99", to_readable_str(99));
|
||||
assert_eq!("999", to_readable_str(999));
|
||||
assert_eq!("1_000", to_readable_str(1_000));
|
||||
assert_eq!("1_001", to_readable_str(1_001));
|
||||
assert_eq!("999_999", to_readable_str(999_999));
|
||||
assert_eq!("1_000_000", to_readable_str(1_000_000));
|
||||
assert_eq!("1_234_567", to_readable_str(1_234_567));
|
||||
}
|
||||
|
@ -447,32 +447,9 @@ impl<'tcx, 'container> fmt::Debug for ty::AdtDefData<'tcx, 'container> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx> fmt::Debug for ty::adjustment::AutoAdjustment<'tcx> {
|
||||
impl<'tcx> fmt::Debug for ty::adjustment::Adjustment<'tcx> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
match *self {
|
||||
ty::adjustment::AdjustNeverToAny(ref target) => {
|
||||
write!(f, "AdjustNeverToAny({:?})", target)
|
||||
}
|
||||
ty::adjustment::AdjustReifyFnPointer => {
|
||||
write!(f, "AdjustReifyFnPointer")
|
||||
}
|
||||
ty::adjustment::AdjustUnsafeFnPointer => {
|
||||
write!(f, "AdjustUnsafeFnPointer")
|
||||
}
|
||||
ty::adjustment::AdjustMutToConstPointer => {
|
||||
write!(f, "AdjustMutToConstPointer")
|
||||
}
|
||||
ty::adjustment::AdjustDerefRef(ref data) => {
|
||||
write!(f, "{:?}", data)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx> fmt::Debug for ty::adjustment::AutoDerefRef<'tcx> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
write!(f, "AutoDerefRef({}, unsize={:?}, {:?})",
|
||||
self.autoderefs, self.unsize, self.autoref)
|
||||
write!(f, "{:?} -> {}", self.kind, self.target)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -37,7 +37,7 @@ pub fn gather_decl<'a, 'tcx>(bccx: &BorrowckCtxt<'a, 'tcx>,
|
||||
decl_id: ast::NodeId,
|
||||
_decl_span: Span,
|
||||
var_id: ast::NodeId) {
|
||||
let ty = bccx.tcx.node_id_to_type(var_id);
|
||||
let ty = bccx.tcx.tables().node_id_to_type(var_id);
|
||||
let loan_path = Rc::new(LoanPath::new(LpVar(var_id), ty));
|
||||
move_data.add_move(bccx.tcx, loan_path, decl_id, Declared);
|
||||
}
|
||||
|
@ -201,7 +201,7 @@ impl<'a, 'tcx> MatchVisitor<'a, 'tcx> {
|
||||
|
||||
// Finally, check if the whole match expression is exhaustive.
|
||||
// Check for empty enum, because is_useful only works on inhabited types.
|
||||
let pat_ty = self.tcx.node_id_to_type(scrut.id);
|
||||
let pat_ty = self.tcx.tables().node_id_to_type(scrut.id);
|
||||
if inlined_arms.is_empty() {
|
||||
if !pat_ty.is_uninhabited(self.tcx) {
|
||||
// We know the type is inhabited, so this must be wrong
|
||||
@ -262,7 +262,7 @@ impl<'a, 'tcx> MatchVisitor<'a, 'tcx> {
|
||||
fn check_for_bindings_named_the_same_as_variants(cx: &MatchVisitor, pat: &Pat) {
|
||||
pat.walk(|p| {
|
||||
if let PatKind::Binding(hir::BindByValue(hir::MutImmutable), name, None) = p.node {
|
||||
let pat_ty = cx.tcx.pat_ty(p);
|
||||
let pat_ty = cx.tcx.tables().pat_ty(p);
|
||||
if let ty::TyAdt(edef, _) = pat_ty.sty {
|
||||
if edef.is_enum() {
|
||||
if let Def::Local(..) = cx.tcx.expect_def(p.id) {
|
||||
@ -486,7 +486,7 @@ fn check_legality_of_move_bindings(cx: &MatchVisitor,
|
||||
for pat in pats {
|
||||
pat.walk(|p| {
|
||||
if let PatKind::Binding(hir::BindByValue(..), _, ref sub) = p.node {
|
||||
let pat_ty = cx.tcx.node_id_to_type(p.id);
|
||||
let pat_ty = cx.tcx.tables().node_id_to_type(p.id);
|
||||
if pat_ty.moves_by_default(cx.tcx, cx.param_env, pat.span) {
|
||||
check_move(p, sub.as_ref().map(|p| &**p));
|
||||
}
|
||||
|
@ -27,7 +27,7 @@ use rustc::ty::util::IntTypeExt;
|
||||
use rustc::ty::subst::Substs;
|
||||
use rustc::traits::Reveal;
|
||||
use rustc::util::common::ErrorReported;
|
||||
use rustc::util::nodemap::NodeMap;
|
||||
use rustc::util::nodemap::DefIdMap;
|
||||
use rustc::lint;
|
||||
|
||||
use graphviz::IntoCow;
|
||||
@ -246,7 +246,7 @@ pub fn const_expr_to_pat<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
pat_id: ast::NodeId,
|
||||
span: Span)
|
||||
-> Result<P<hir::Pat>, DefId> {
|
||||
let pat_ty = tcx.expr_ty(expr);
|
||||
let pat_ty = tcx.tables().expr_ty(expr);
|
||||
debug!("expr={:?} pat_ty={:?} pat_id={}", expr, pat_ty, pat_id);
|
||||
match pat_ty.sty {
|
||||
ty::TyFloat(_) => {
|
||||
@ -314,7 +314,7 @@ pub fn const_expr_to_pat<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
},
|
||||
}))
|
||||
.collect::<Result<_, _>>()?;
|
||||
PatKind::Struct(path.clone(), field_pats, false)
|
||||
PatKind::Struct((**path).clone(), field_pats, false)
|
||||
}
|
||||
|
||||
hir::ExprArray(ref exprs) => {
|
||||
@ -329,7 +329,8 @@ pub fn const_expr_to_pat<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
Def::StructCtor(_, CtorKind::Const) |
|
||||
Def::VariantCtor(_, CtorKind::Const) => PatKind::Path(None, path.clone()),
|
||||
Def::Const(def_id) | Def::AssociatedConst(def_id) => {
|
||||
let substs = Some(tcx.node_id_item_substs(expr.id).substs);
|
||||
let substs = Some(tcx.tables().node_id_item_substs(expr.id)
|
||||
.unwrap_or_else(|| tcx.intern_substs(&[])));
|
||||
let (expr, _ty) = lookup_const_by_id(tcx, def_id, substs).unwrap();
|
||||
return const_expr_to_pat(tcx, expr, pat_id, span);
|
||||
},
|
||||
@ -413,7 +414,7 @@ pub fn eval_const_expr_checked<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
eval_const_expr_partial(tcx, e, ExprTypeChecked, None)
|
||||
}
|
||||
|
||||
pub type FnArgMap<'a> = Option<&'a NodeMap<ConstVal>>;
|
||||
pub type FnArgMap<'a> = Option<&'a DefIdMap<ConstVal>>;
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct ConstEvalErr {
|
||||
@ -606,7 +607,7 @@ pub fn eval_const_expr_partial<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
let ety = match ty_hint {
|
||||
ExprTypeChecked => {
|
||||
// After type-checking, expr_ty is guaranteed to succeed.
|
||||
Some(tcx.expr_ty(e))
|
||||
Some(tcx.tables().expr_ty(e))
|
||||
}
|
||||
UncheckedExprHint(ty) => {
|
||||
// Use the type hint; it's not guaranteed to be right, but it's
|
||||
@ -617,7 +618,7 @@ pub fn eval_const_expr_partial<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
// This expression might not be type-checked, and we have no hint.
|
||||
// Try to query the context for a type anyway; we might get lucky
|
||||
// (for example, if the expression was imported from another crate).
|
||||
tcx.expr_ty_opt(e)
|
||||
tcx.tables().expr_ty_opt(e)
|
||||
}
|
||||
};
|
||||
let result = match e.node {
|
||||
@ -759,7 +760,7 @@ pub fn eval_const_expr_partial<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
let base_hint = if let ExprTypeChecked = ty_hint {
|
||||
ExprTypeChecked
|
||||
} else {
|
||||
match tcx.expr_ty_opt(&base) {
|
||||
match tcx.tables().expr_ty_opt(&base) {
|
||||
Some(t) => UncheckedExprHint(t),
|
||||
None => ty_hint
|
||||
}
|
||||
@ -798,7 +799,8 @@ pub fn eval_const_expr_partial<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
Def::Const(def_id) |
|
||||
Def::AssociatedConst(def_id) => {
|
||||
let substs = if let ExprTypeChecked = ty_hint {
|
||||
Some(tcx.node_id_item_substs(e.id).substs)
|
||||
Some(tcx.tables().node_id_item_substs(e.id)
|
||||
.unwrap_or_else(|| tcx.intern_substs(&[])))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
@ -835,9 +837,8 @@ pub fn eval_const_expr_partial<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
ConstVal::Struct(e.id)
|
||||
}
|
||||
Def::Local(def_id) => {
|
||||
let id = tcx.map.as_local_node_id(def_id).unwrap();
|
||||
debug!("Def::Local({:?}): {:?}", id, fn_args);
|
||||
if let Some(val) = fn_args.and_then(|args| args.get(&id)) {
|
||||
debug!("Def::Local({:?}): {:?}", def_id, fn_args);
|
||||
if let Some(val) = fn_args.and_then(|args| args.get(&def_id)) {
|
||||
val.clone()
|
||||
} else {
|
||||
signal!(e, NonConstPath);
|
||||
@ -863,7 +864,7 @@ pub fn eval_const_expr_partial<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
let result = result.as_ref().expect("const fn has no result expression");
|
||||
assert_eq!(decl.inputs.len(), args.len());
|
||||
|
||||
let mut call_args = NodeMap();
|
||||
let mut call_args = DefIdMap();
|
||||
for (arg, arg_expr) in decl.inputs.iter().zip(args.iter()) {
|
||||
let arg_hint = ty_hint.erase_hint();
|
||||
let arg_val = eval_const_expr_partial(
|
||||
@ -873,7 +874,7 @@ pub fn eval_const_expr_partial<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
fn_args
|
||||
)?;
|
||||
debug!("const call arg: {:?}", arg);
|
||||
let old = call_args.insert(arg.pat.id, arg_val);
|
||||
let old = call_args.insert(tcx.expect_def(arg.pat.id).def_id(), arg_val);
|
||||
assert!(old.is_none());
|
||||
}
|
||||
debug!("const call({:?})", call_args);
|
||||
|
@ -128,7 +128,7 @@ impl<'a, 'gcx, 'tcx> PatternContext<'a, 'gcx, 'tcx> {
|
||||
}
|
||||
|
||||
pub fn lower_pattern(&mut self, pat: &hir::Pat) -> Pattern<'tcx> {
|
||||
let mut ty = self.tcx.node_id_to_type(pat.id);
|
||||
let mut ty = self.tcx.tables().node_id_to_type(pat.id);
|
||||
|
||||
let kind = match pat.node {
|
||||
PatKind::Wild => PatternKind::Wild,
|
||||
@ -167,8 +167,9 @@ impl<'a, 'gcx, 'tcx> PatternContext<'a, 'gcx, 'tcx> {
|
||||
match self.tcx.expect_def(pat.id) {
|
||||
Def::Const(def_id) | Def::AssociatedConst(def_id) => {
|
||||
let tcx = self.tcx.global_tcx();
|
||||
let substs = Some(self.tcx.node_id_item_substs(pat.id).substs);
|
||||
match eval::lookup_const_by_id(tcx, def_id, substs) {
|
||||
let substs = tcx.tables().node_id_item_substs(pat.id)
|
||||
.unwrap_or_else(|| tcx.intern_substs(&[]));
|
||||
match eval::lookup_const_by_id(tcx, def_id, Some(substs)) {
|
||||
Some((const_expr, _const_ty)) => {
|
||||
match eval::const_expr_to_pat(
|
||||
tcx, const_expr, pat.id, pat.span)
|
||||
@ -197,7 +198,7 @@ impl<'a, 'gcx, 'tcx> PatternContext<'a, 'gcx, 'tcx> {
|
||||
}
|
||||
|
||||
PatKind::Slice(ref prefix, ref slice, ref suffix) => {
|
||||
let ty = self.tcx.node_id_to_type(pat.id);
|
||||
let ty = self.tcx.tables().node_id_to_type(pat.id);
|
||||
match ty.sty {
|
||||
ty::TyRef(_, mt) =>
|
||||
PatternKind::Deref {
|
||||
@ -222,7 +223,7 @@ impl<'a, 'gcx, 'tcx> PatternContext<'a, 'gcx, 'tcx> {
|
||||
}
|
||||
|
||||
PatKind::Tuple(ref subpatterns, ddpos) => {
|
||||
match self.tcx.node_id_to_type(pat.id).sty {
|
||||
match self.tcx.tables().node_id_to_type(pat.id).sty {
|
||||
ty::TyTuple(ref tys) => {
|
||||
let subpatterns =
|
||||
subpatterns.iter()
|
||||
@ -243,7 +244,7 @@ impl<'a, 'gcx, 'tcx> PatternContext<'a, 'gcx, 'tcx> {
|
||||
PatKind::Binding(bm, ref ident, ref sub) => {
|
||||
let def_id = self.tcx.expect_def(pat.id).def_id();
|
||||
let id = self.tcx.map.as_local_node_id(def_id).unwrap();
|
||||
let var_ty = self.tcx.node_id_to_type(pat.id);
|
||||
let var_ty = self.tcx.tables().node_id_to_type(pat.id);
|
||||
let region = match var_ty.sty {
|
||||
ty::TyRef(r, _) => Some(r),
|
||||
_ => None,
|
||||
@ -280,7 +281,7 @@ impl<'a, 'gcx, 'tcx> PatternContext<'a, 'gcx, 'tcx> {
|
||||
}
|
||||
|
||||
PatKind::TupleStruct(_, ref subpatterns, ddpos) => {
|
||||
let pat_ty = self.tcx.node_id_to_type(pat.id);
|
||||
let pat_ty = self.tcx.tables().node_id_to_type(pat.id);
|
||||
let adt_def = match pat_ty.sty {
|
||||
ty::TyAdt(adt_def, _) => adt_def,
|
||||
_ => span_bug!(pat.span, "tuple struct pattern not applied to an ADT"),
|
||||
@ -299,7 +300,7 @@ impl<'a, 'gcx, 'tcx> PatternContext<'a, 'gcx, 'tcx> {
|
||||
}
|
||||
|
||||
PatKind::Struct(_, ref fields, _) => {
|
||||
let pat_ty = self.tcx.node_id_to_type(pat.id);
|
||||
let pat_ty = self.tcx.tables().node_id_to_type(pat.id);
|
||||
let adt_def = match pat_ty.sty {
|
||||
ty::TyAdt(adt_def, _) => adt_def,
|
||||
_ => {
|
||||
|
@ -37,7 +37,8 @@ use rustc_typeck as typeck;
|
||||
use rustc_privacy;
|
||||
use rustc_plugin::registry::Registry;
|
||||
use rustc_plugin as plugin;
|
||||
use rustc_passes::{ast_validation, no_asm, loops, consts, rvalues, static_recursion};
|
||||
use rustc_passes::{ast_validation, no_asm, loops, consts, rvalues,
|
||||
static_recursion, hir_stats};
|
||||
use rustc_const_eval::check_match;
|
||||
use super::Compilation;
|
||||
|
||||
@ -513,6 +514,10 @@ pub fn phase_1_parse_input<'a>(sess: &'a Session, input: &Input) -> PResult<'a,
|
||||
syntax::show_span::run(sess.diagnostic(), s, &krate);
|
||||
}
|
||||
|
||||
if sess.opts.debugging_opts.hir_stats {
|
||||
hir_stats::print_ast_stats(&krate, "PRE EXPANSION AST STATS");
|
||||
}
|
||||
|
||||
Ok(krate)
|
||||
}
|
||||
|
||||
@ -718,6 +723,10 @@ pub fn phase_2_configure_and_expand<'a, F>(sess: &Session,
|
||||
println!("Post-expansion node count: {}", count_nodes(&krate));
|
||||
}
|
||||
|
||||
if sess.opts.debugging_opts.hir_stats {
|
||||
hir_stats::print_ast_stats(&krate, "POST EXPANSION AST STATS");
|
||||
}
|
||||
|
||||
if sess.opts.debugging_opts.ast_json {
|
||||
println!("{}", json::as_json(&krate));
|
||||
}
|
||||
@ -758,7 +767,13 @@ pub fn phase_2_configure_and_expand<'a, F>(sess: &Session,
|
||||
|
||||
// Lower ast -> hir.
|
||||
let hir_forest = time(sess.time_passes(), "lowering ast -> hir", || {
|
||||
hir_map::Forest::new(lower_crate(sess, &krate, &mut resolver), &sess.dep_graph)
|
||||
let hir_crate = lower_crate(sess, &krate, &mut resolver);
|
||||
|
||||
if sess.opts.debugging_opts.hir_stats {
|
||||
hir_stats::print_hir_stats(&hir_crate);
|
||||
}
|
||||
|
||||
hir_map::Forest::new(hir_crate, &sess.dep_graph)
|
||||
});
|
||||
|
||||
// Discard hygiene data, which isn't required past lowering to HIR.
|
||||
|
@ -501,7 +501,7 @@ impl<'a, 'tcx> pprust_hir::PpAnn for TypedAnnotation<'a, 'tcx> {
|
||||
pp::space(&mut s.s)?;
|
||||
pp::word(&mut s.s, "as")?;
|
||||
pp::space(&mut s.s)?;
|
||||
pp::word(&mut s.s, &self.tcx.expr_ty(expr).to_string())?;
|
||||
pp::word(&mut s.s, &self.tcx.tables().expr_ty(expr).to_string())?;
|
||||
s.pclose()
|
||||
}
|
||||
_ => Ok(()),
|
||||
|
@ -11,4 +11,4 @@ crate-type = ["dylib"]
|
||||
[dependencies]
|
||||
log = { path = "../liblog" }
|
||||
serialize = { path = "../libserialize" }
|
||||
syntax_pos = { path = "../libsyntax_pos" }
|
||||
syntax_pos = { path = "../libsyntax_pos" }
|
||||
|
@ -34,7 +34,6 @@ use middle::stability;
|
||||
use rustc::cfg;
|
||||
use rustc::ty::subst::Substs;
|
||||
use rustc::ty::{self, Ty, TyCtxt};
|
||||
use rustc::ty::adjustment;
|
||||
use rustc::traits::{self, Reveal};
|
||||
use rustc::hir::map as hir_map;
|
||||
use util::nodemap::NodeSet;
|
||||
@ -118,7 +117,9 @@ impl LateLintPass for BoxPointers {
|
||||
hir::ItemTy(..) |
|
||||
hir::ItemEnum(..) |
|
||||
hir::ItemStruct(..) |
|
||||
hir::ItemUnion(..) => self.check_heap_type(cx, it.span, cx.tcx.node_id_to_type(it.id)),
|
||||
hir::ItemUnion(..) => {
|
||||
self.check_heap_type(cx, it.span, cx.tcx.tables().node_id_to_type(it.id))
|
||||
}
|
||||
_ => (),
|
||||
}
|
||||
|
||||
@ -129,7 +130,7 @@ impl LateLintPass for BoxPointers {
|
||||
for struct_field in struct_def.fields() {
|
||||
self.check_heap_type(cx,
|
||||
struct_field.span,
|
||||
cx.tcx.node_id_to_type(struct_field.id));
|
||||
cx.tcx.tables().node_id_to_type(struct_field.id));
|
||||
}
|
||||
}
|
||||
_ => (),
|
||||
@ -137,7 +138,7 @@ impl LateLintPass for BoxPointers {
|
||||
}
|
||||
|
||||
fn check_expr(&mut self, cx: &LateContext, e: &hir::Expr) {
|
||||
let ty = cx.tcx.node_id_to_type(e.id);
|
||||
let ty = cx.tcx.tables().node_id_to_type(e.id);
|
||||
self.check_heap_type(cx, e.span, ty);
|
||||
}
|
||||
}
|
||||
@ -585,7 +586,7 @@ impl LateLintPass for MissingDebugImplementations {
|
||||
let mut impls = NodeSet();
|
||||
debug_def.for_each_impl(cx.tcx, |d| {
|
||||
if let Some(n) = cx.tcx.map.as_local_node_id(d) {
|
||||
if let Some(ty_def) = cx.tcx.node_id_to_type(n).ty_to_def_id() {
|
||||
if let Some(ty_def) = cx.tcx.tables().node_id_to_type(n).ty_to_def_id() {
|
||||
if let Some(node_id) = cx.tcx.map.as_local_node_id(ty_def) {
|
||||
impls.insert(node_id);
|
||||
}
|
||||
@ -939,8 +940,10 @@ impl LateLintPass for UnconditionalRecursion {
|
||||
method: &ty::Method,
|
||||
id: ast::NodeId)
|
||||
-> bool {
|
||||
use rustc::ty::adjustment::*;
|
||||
|
||||
// Check for method calls and overloaded operators.
|
||||
let opt_m = tcx.tables.borrow().method_map.get(&ty::MethodCall::expr(id)).cloned();
|
||||
let opt_m = tcx.tables().method_map.get(&ty::MethodCall::expr(id)).cloned();
|
||||
if let Some(m) = opt_m {
|
||||
if method_call_refers_to_method(tcx, method, m.def_id, m.substs, id) {
|
||||
return true;
|
||||
@ -948,15 +951,12 @@ impl LateLintPass for UnconditionalRecursion {
|
||||
}
|
||||
|
||||
// Check for overloaded autoderef method calls.
|
||||
let opt_adj = tcx.tables.borrow().adjustments.get(&id).cloned();
|
||||
if let Some(adjustment::AdjustDerefRef(adj)) = opt_adj {
|
||||
for i in 0..adj.autoderefs {
|
||||
let opt_adj = tcx.tables().adjustments.get(&id).cloned();
|
||||
if let Some(Adjustment { kind: Adjust::DerefRef { autoderefs, .. }, .. }) = opt_adj {
|
||||
for i in 0..autoderefs {
|
||||
let method_call = ty::MethodCall::autoderef(id, i as u32);
|
||||
if let Some(m) = tcx.tables
|
||||
.borrow()
|
||||
.method_map
|
||||
.get(&method_call)
|
||||
.cloned() {
|
||||
if let Some(m) = tcx.tables().method_map.get(&method_call)
|
||||
.cloned() {
|
||||
if method_call_refers_to_method(tcx, method, m.def_id, m.substs, id) {
|
||||
return true;
|
||||
}
|
||||
@ -971,12 +971,10 @@ impl LateLintPass for UnconditionalRecursion {
|
||||
// it doesn't necessarily have a definition.
|
||||
match tcx.expect_def_or_none(callee.id) {
|
||||
Some(Def::Method(def_id)) => {
|
||||
let item_substs = tcx.node_id_item_substs(callee.id);
|
||||
method_call_refers_to_method(tcx,
|
||||
method,
|
||||
def_id,
|
||||
&item_substs.substs,
|
||||
id)
|
||||
let substs = tcx.tables().node_id_item_substs(callee.id)
|
||||
.unwrap_or_else(|| tcx.intern_substs(&[]));
|
||||
method_call_refers_to_method(
|
||||
tcx, method, def_id, substs, id)
|
||||
}
|
||||
_ => false,
|
||||
}
|
||||
@ -1213,7 +1211,7 @@ impl LateLintPass for MutableTransmutes {
|
||||
if !def_id_is_transmute(cx, did) {
|
||||
return None;
|
||||
}
|
||||
let typ = cx.tcx.node_id_to_type(expr.id);
|
||||
let typ = cx.tcx.tables().node_id_to_type(expr.id);
|
||||
match typ.sty {
|
||||
ty::TyFnDef(.., ref bare_fn) if bare_fn.abi == RustIntrinsic => {
|
||||
let from = bare_fn.sig.0.inputs[0];
|
||||
@ -1284,7 +1282,7 @@ impl LateLintPass for UnionsWithDropFields {
|
||||
if let hir::ItemUnion(ref vdata, _) = item.node {
|
||||
let param_env = &ty::ParameterEnvironment::for_item(ctx.tcx, item.id);
|
||||
for field in vdata.fields() {
|
||||
let field_ty = ctx.tcx.node_id_to_type(field.id);
|
||||
let field_ty = ctx.tcx.tables().node_id_to_type(field.id);
|
||||
if ctx.tcx.type_needs_drop_given_env(field_ty, param_env) {
|
||||
ctx.span_lint(UNIONS_WITH_DROP_FIELDS,
|
||||
field.span,
|
||||
|
@ -113,14 +113,14 @@ impl LateLintPass for TypeLimits {
|
||||
forbid_unsigned_negation(cx, e.span);
|
||||
}
|
||||
ast::LitKind::Int(_, ast::LitIntType::Unsuffixed) => {
|
||||
if let ty::TyUint(_) = cx.tcx.node_id_to_type(e.id).sty {
|
||||
if let ty::TyUint(_) = cx.tcx.tables().node_id_to_type(e.id).sty {
|
||||
forbid_unsigned_negation(cx, e.span);
|
||||
}
|
||||
}
|
||||
_ => (),
|
||||
}
|
||||
} else {
|
||||
let t = cx.tcx.node_id_to_type(expr.id);
|
||||
let t = cx.tcx.tables().node_id_to_type(expr.id);
|
||||
if let ty::TyUint(_) = t.sty {
|
||||
forbid_unsigned_negation(cx, e.span);
|
||||
}
|
||||
@ -138,7 +138,7 @@ impl LateLintPass for TypeLimits {
|
||||
}
|
||||
|
||||
if binop.node.is_shift() {
|
||||
let opt_ty_bits = match cx.tcx.node_id_to_type(l.id).sty {
|
||||
let opt_ty_bits = match cx.tcx.tables().node_id_to_type(l.id).sty {
|
||||
ty::TyInt(t) => Some(int_ty_bits(t, cx.sess().target.int_type)),
|
||||
ty::TyUint(t) => Some(uint_ty_bits(t, cx.sess().target.uint_type)),
|
||||
_ => None,
|
||||
@ -171,7 +171,7 @@ impl LateLintPass for TypeLimits {
|
||||
}
|
||||
}
|
||||
hir::ExprLit(ref lit) => {
|
||||
match cx.tcx.node_id_to_type(e.id).sty {
|
||||
match cx.tcx.tables().node_id_to_type(e.id).sty {
|
||||
ty::TyInt(t) => {
|
||||
match lit.node {
|
||||
ast::LitKind::Int(v, ast::LitIntType::Signed(_)) |
|
||||
@ -324,7 +324,7 @@ impl LateLintPass for TypeLimits {
|
||||
// Normalize the binop so that the literal is always on the RHS in
|
||||
// the comparison
|
||||
let norm_binop = if swap { rev_binop(binop) } else { binop };
|
||||
match tcx.node_id_to_type(expr.id).sty {
|
||||
match tcx.tables().node_id_to_type(expr.id).sty {
|
||||
ty::TyInt(int_ty) => {
|
||||
let (min, max) = int_ty_range(int_ty);
|
||||
let lit_val: i64 = match lit.node {
|
||||
@ -740,7 +740,7 @@ impl LateLintPass for VariantSizeDifferences {
|
||||
if let hir::ItemEnum(ref enum_definition, ref gens) = it.node {
|
||||
if gens.ty_params.is_empty() {
|
||||
// sizes only make sense for non-generic types
|
||||
let t = cx.tcx.node_id_to_type(it.id);
|
||||
let t = cx.tcx.tables().node_id_to_type(it.id);
|
||||
let layout = cx.tcx.infer_ctxt(None, None, Reveal::All).enter(|infcx| {
|
||||
let ty = cx.tcx.erase_regions(&t);
|
||||
ty.layout(&infcx)
|
||||
|
@ -140,7 +140,7 @@ impl LateLintPass for UnusedResults {
|
||||
return;
|
||||
}
|
||||
|
||||
let t = cx.tcx.expr_ty(&expr);
|
||||
let t = cx.tcx.tables().expr_ty(&expr);
|
||||
let warned = match t.sty {
|
||||
ty::TyTuple(ref tys) if tys.is_empty() => return,
|
||||
ty::TyNever => return,
|
||||
@ -441,16 +441,15 @@ impl LateLintPass for UnusedAllocation {
|
||||
_ => return,
|
||||
}
|
||||
|
||||
if let Some(adjustment) = cx.tcx.tables.borrow().adjustments.get(&e.id) {
|
||||
if let adjustment::AdjustDerefRef(adjustment::AutoDerefRef { ref autoref, .. }) =
|
||||
*adjustment {
|
||||
if let Some(adjustment) = cx.tcx.tables().adjustments.get(&e.id) {
|
||||
if let adjustment::Adjust::DerefRef { autoref, .. } = adjustment.kind {
|
||||
match autoref {
|
||||
&Some(adjustment::AutoPtr(_, hir::MutImmutable)) => {
|
||||
Some(adjustment::AutoBorrow::Ref(_, hir::MutImmutable)) => {
|
||||
cx.span_lint(UNUSED_ALLOCATION,
|
||||
e.span,
|
||||
"unnecessary allocation, use & instead");
|
||||
}
|
||||
&Some(adjustment::AutoPtr(_, hir::MutMutable)) => {
|
||||
Some(adjustment::AutoBorrow::Ref(_, hir::MutMutable)) => {
|
||||
cx.span_lint(UNUSED_ALLOCATION,
|
||||
e.span,
|
||||
"unnecessary allocation, use &mut instead");
|
||||
|
@ -38,7 +38,7 @@ enum TableEntry<'tcx> {
|
||||
Def(Def),
|
||||
NodeType(Ty<'tcx>),
|
||||
ItemSubsts(ty::ItemSubsts<'tcx>),
|
||||
Adjustment(ty::adjustment::AutoAdjustment<'tcx>),
|
||||
Adjustment(ty::adjustment::Adjustment<'tcx>),
|
||||
ConstQualif(ConstQualif),
|
||||
}
|
||||
|
||||
@ -94,9 +94,9 @@ impl<'a, 'b, 'tcx, 'v> Visitor<'v> for SideTableEncodingIdVisitor<'a, 'b, 'tcx>
|
||||
};
|
||||
|
||||
encode(tcx.expect_def_or_none(id).map(TableEntry::Def));
|
||||
encode(tcx.node_types().get(&id).cloned().map(TableEntry::NodeType));
|
||||
encode(tcx.tables.borrow().item_substs.get(&id).cloned().map(TableEntry::ItemSubsts));
|
||||
encode(tcx.tables.borrow().adjustments.get(&id).cloned().map(TableEntry::Adjustment));
|
||||
encode(tcx.tables().node_types.get(&id).cloned().map(TableEntry::NodeType));
|
||||
encode(tcx.tables().item_substs.get(&id).cloned().map(TableEntry::ItemSubsts));
|
||||
encode(tcx.tables().adjustments.get(&id).cloned().map(TableEntry::Adjustment));
|
||||
encode(tcx.const_qualif_map.borrow().get(&id).cloned().map(TableEntry::ConstQualif));
|
||||
}
|
||||
}
|
||||
|
@ -1016,7 +1016,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
|
||||
|
||||
let data = ClosureData {
|
||||
kind: tcx.closure_kind(def_id),
|
||||
ty: self.lazy(&tcx.tables.borrow().closure_tys[&def_id]),
|
||||
ty: self.lazy(&tcx.tables().closure_tys[&def_id]),
|
||||
};
|
||||
|
||||
Entry {
|
||||
|
@ -191,7 +191,7 @@ pub fn construct_fn<'a, 'gcx, 'tcx, A>(hir: Cx<'a, 'gcx, 'tcx>,
|
||||
assert_eq!(block, builder.return_block());
|
||||
|
||||
let mut spread_arg = None;
|
||||
match tcx.node_id_to_type(fn_id).sty {
|
||||
match tcx.tables().node_id_to_type(fn_id).sty {
|
||||
ty::TyFnDef(_, _, f) if f.abi == Abi::RustCall => {
|
||||
// RustCall pseudo-ABI untuples the last argument.
|
||||
spread_arg = Some(Local::new(arguments.len()));
|
||||
@ -203,7 +203,7 @@ pub fn construct_fn<'a, 'gcx, 'tcx, A>(hir: Cx<'a, 'gcx, 'tcx>,
|
||||
let upvar_decls: Vec<_> = tcx.with_freevars(fn_id, |freevars| {
|
||||
freevars.iter().map(|fv| {
|
||||
let var_id = tcx.map.as_local_node_id(fv.def.def_id()).unwrap();
|
||||
let by_ref = tcx.upvar_capture(ty::UpvarId {
|
||||
let by_ref = tcx.tables().upvar_capture(ty::UpvarId {
|
||||
var_id: var_id,
|
||||
closure_expr_id: fn_id
|
||||
}).map_or(false, |capture| match capture {
|
||||
@ -233,7 +233,7 @@ pub fn construct_const<'a, 'gcx, 'tcx>(hir: Cx<'a, 'gcx, 'tcx>,
|
||||
ast_expr: &'tcx hir::Expr)
|
||||
-> (Mir<'tcx>, ScopeAuxiliaryVec) {
|
||||
let tcx = hir.tcx();
|
||||
let ty = tcx.expr_ty_adjusted(ast_expr);
|
||||
let ty = tcx.tables().expr_ty_adjusted(ast_expr);
|
||||
let span = tcx.map.span(item_id);
|
||||
let mut builder = Builder::new(hir, span, 0, ty);
|
||||
|
||||
|
@ -77,7 +77,7 @@ fn mirror_stmts<'a, 'gcx, 'tcx>(cx: &mut Cx<'a, 'gcx, 'tcx>,
|
||||
pub fn to_expr_ref<'a, 'gcx, 'tcx>(cx: &mut Cx<'a, 'gcx, 'tcx>,
|
||||
block: &'tcx hir::Block)
|
||||
-> ExprRef<'tcx> {
|
||||
let block_ty = cx.tcx.node_id_to_type(block.id);
|
||||
let block_ty = cx.tcx.tables().node_id_to_type(block.id);
|
||||
let temp_lifetime = cx.tcx.region_maps.temporary_scope(block.id);
|
||||
let expr = Expr {
|
||||
ty: block_ty,
|
||||
|
@ -35,15 +35,15 @@ impl<'tcx> Mirror<'tcx> for &'tcx hir::Expr {
|
||||
debug!("Expr::make_mirror(): id={}, span={:?}", self.id, self.span);
|
||||
|
||||
let mut expr = make_mirror_unadjusted(cx, self);
|
||||
let adj = cx.tcx.tables().adjustments.get(&self.id).cloned();
|
||||
|
||||
debug!("make_mirror: unadjusted-expr={:?} applying adjustments={:?}",
|
||||
expr, cx.tcx.tables.borrow().adjustments.get(&self.id));
|
||||
expr, adj);
|
||||
|
||||
// Now apply adjustments, if any.
|
||||
match cx.tcx.tables.borrow().adjustments.get(&self.id) {
|
||||
match adj.map(|adj| (adj.kind, adj.target)) {
|
||||
None => {}
|
||||
Some(&ty::adjustment::AdjustReifyFnPointer) => {
|
||||
let adjusted_ty = cx.tcx.expr_ty_adjusted(self);
|
||||
Some((ty::adjustment::Adjust::ReifyFnPointer, adjusted_ty)) => {
|
||||
expr = Expr {
|
||||
temp_lifetime: temp_lifetime,
|
||||
ty: adjusted_ty,
|
||||
@ -51,8 +51,7 @@ impl<'tcx> Mirror<'tcx> for &'tcx hir::Expr {
|
||||
kind: ExprKind::ReifyFnPointer { source: expr.to_ref() },
|
||||
};
|
||||
}
|
||||
Some(&ty::adjustment::AdjustUnsafeFnPointer) => {
|
||||
let adjusted_ty = cx.tcx.expr_ty_adjusted(self);
|
||||
Some((ty::adjustment::Adjust::UnsafeFnPointer, adjusted_ty)) => {
|
||||
expr = Expr {
|
||||
temp_lifetime: temp_lifetime,
|
||||
ty: adjusted_ty,
|
||||
@ -60,7 +59,7 @@ impl<'tcx> Mirror<'tcx> for &'tcx hir::Expr {
|
||||
kind: ExprKind::UnsafeFnPointer { source: expr.to_ref() },
|
||||
};
|
||||
}
|
||||
Some(&ty::adjustment::AdjustNeverToAny(adjusted_ty)) => {
|
||||
Some((ty::adjustment::Adjust::NeverToAny, adjusted_ty)) => {
|
||||
expr = Expr {
|
||||
temp_lifetime: temp_lifetime,
|
||||
ty: adjusted_ty,
|
||||
@ -68,8 +67,7 @@ impl<'tcx> Mirror<'tcx> for &'tcx hir::Expr {
|
||||
kind: ExprKind::NeverToAny { source: expr.to_ref() },
|
||||
};
|
||||
}
|
||||
Some(&ty::adjustment::AdjustMutToConstPointer) => {
|
||||
let adjusted_ty = cx.tcx.expr_ty_adjusted(self);
|
||||
Some((ty::adjustment::Adjust::MutToConstPointer, adjusted_ty)) => {
|
||||
expr = Expr {
|
||||
temp_lifetime: temp_lifetime,
|
||||
ty: adjusted_ty,
|
||||
@ -77,8 +75,9 @@ impl<'tcx> Mirror<'tcx> for &'tcx hir::Expr {
|
||||
kind: ExprKind::Cast { source: expr.to_ref() },
|
||||
};
|
||||
}
|
||||
Some(&ty::adjustment::AdjustDerefRef(ref adj)) => {
|
||||
for i in 0..adj.autoderefs {
|
||||
Some((ty::adjustment::Adjust::DerefRef { autoderefs, autoref, unsize },
|
||||
adjusted_ty)) => {
|
||||
for i in 0..autoderefs {
|
||||
let i = i as u32;
|
||||
let adjusted_ty =
|
||||
expr.ty.adjust_for_autoderef(
|
||||
@ -86,11 +85,11 @@ impl<'tcx> Mirror<'tcx> for &'tcx hir::Expr {
|
||||
self.id,
|
||||
self.span,
|
||||
i,
|
||||
|mc| cx.tcx.tables.borrow().method_map.get(&mc).map(|m| m.ty));
|
||||
|mc| cx.tcx.tables().method_map.get(&mc).map(|m| m.ty));
|
||||
debug!("make_mirror: autoderef #{}, adjusted_ty={:?}", i, adjusted_ty);
|
||||
let method_key = ty::MethodCall::autoderef(self.id, i);
|
||||
let meth_ty =
|
||||
cx.tcx.tables.borrow().method_map.get(&method_key).map(|m| m.ty);
|
||||
cx.tcx.tables().method_map.get(&method_key).map(|m| m.ty);
|
||||
let kind = if let Some(meth_ty) = meth_ty {
|
||||
debug!("make_mirror: overloaded autoderef (meth_ty={:?})", meth_ty);
|
||||
|
||||
@ -128,10 +127,10 @@ impl<'tcx> Mirror<'tcx> for &'tcx hir::Expr {
|
||||
};
|
||||
}
|
||||
|
||||
if let Some(autoref) = adj.autoref {
|
||||
if let Some(autoref) = autoref {
|
||||
let adjusted_ty = expr.ty.adjust_for_autoref(cx.tcx, Some(autoref));
|
||||
match autoref {
|
||||
ty::adjustment::AutoPtr(r, m) => {
|
||||
ty::adjustment::AutoBorrow::Ref(r, m) => {
|
||||
expr = Expr {
|
||||
temp_lifetime: temp_lifetime,
|
||||
ty: adjusted_ty,
|
||||
@ -143,7 +142,7 @@ impl<'tcx> Mirror<'tcx> for &'tcx hir::Expr {
|
||||
},
|
||||
};
|
||||
}
|
||||
ty::adjustment::AutoUnsafe(m) => {
|
||||
ty::adjustment::AutoBorrow::RawPtr(m) => {
|
||||
// Convert this to a suitable `&foo` and
|
||||
// then an unsafe coercion. Limit the region to be just this
|
||||
// expression.
|
||||
@ -169,10 +168,10 @@ impl<'tcx> Mirror<'tcx> for &'tcx hir::Expr {
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(target) = adj.unsize {
|
||||
if unsize {
|
||||
expr = Expr {
|
||||
temp_lifetime: temp_lifetime,
|
||||
ty: target,
|
||||
ty: adjusted_ty,
|
||||
span: self.span,
|
||||
kind: ExprKind::Unsize { source: expr.to_ref() },
|
||||
};
|
||||
@ -212,7 +211,7 @@ impl<'tcx> Mirror<'tcx> for &'tcx hir::Expr {
|
||||
fn make_mirror_unadjusted<'a, 'gcx, 'tcx>(cx: &mut Cx<'a, 'gcx, 'tcx>,
|
||||
expr: &'tcx hir::Expr)
|
||||
-> Expr<'tcx> {
|
||||
let expr_ty = cx.tcx.expr_ty(expr);
|
||||
let expr_ty = cx.tcx.tables().expr_ty(expr);
|
||||
let temp_lifetime = cx.tcx.region_maps.temporary_scope(expr.id);
|
||||
|
||||
let kind = match expr.node {
|
||||
@ -231,7 +230,7 @@ fn make_mirror_unadjusted<'a, 'gcx, 'tcx>(cx: &mut Cx<'a, 'gcx, 'tcx>,
|
||||
}
|
||||
|
||||
hir::ExprCall(ref fun, ref args) => {
|
||||
if cx.tcx.is_method_call(expr.id) {
|
||||
if cx.tcx.tables().is_method_call(expr.id) {
|
||||
// The callee is something implementing Fn, FnMut, or FnOnce.
|
||||
// Find the actual method implementation being called and
|
||||
// build the appropriate UFCS call expression with the
|
||||
@ -282,7 +281,8 @@ fn make_mirror_unadjusted<'a, 'gcx, 'tcx>(cx: &mut Cx<'a, 'gcx, 'tcx>,
|
||||
})
|
||||
} else { None };
|
||||
if let Some((adt_def, index)) = adt_data {
|
||||
let substs = cx.tcx.node_id_item_substs(fun.id).substs;
|
||||
let substs = cx.tcx.tables().node_id_item_substs(fun.id)
|
||||
.unwrap_or_else(|| cx.tcx.intern_substs(&[]));
|
||||
let field_refs = args.iter().enumerate().map(|(idx, e)| FieldExprRef {
|
||||
name: Field::new(idx),
|
||||
expr: e.to_ref()
|
||||
@ -296,7 +296,7 @@ fn make_mirror_unadjusted<'a, 'gcx, 'tcx>(cx: &mut Cx<'a, 'gcx, 'tcx>,
|
||||
}
|
||||
} else {
|
||||
ExprKind::Call {
|
||||
ty: cx.tcx.node_id_to_type(fun.id),
|
||||
ty: cx.tcx.tables().node_id_to_type(fun.id),
|
||||
fun: fun.to_ref(),
|
||||
args: args.to_ref(),
|
||||
}
|
||||
@ -328,7 +328,7 @@ fn make_mirror_unadjusted<'a, 'gcx, 'tcx>(cx: &mut Cx<'a, 'gcx, 'tcx>,
|
||||
}
|
||||
|
||||
hir::ExprAssignOp(op, ref lhs, ref rhs) => {
|
||||
if cx.tcx.is_method_call(expr.id) {
|
||||
if cx.tcx.tables().is_method_call(expr.id) {
|
||||
let pass_args = if op.node.is_by_value() {
|
||||
PassArgs::ByValue
|
||||
} else {
|
||||
@ -350,7 +350,7 @@ fn make_mirror_unadjusted<'a, 'gcx, 'tcx>(cx: &mut Cx<'a, 'gcx, 'tcx>,
|
||||
},
|
||||
|
||||
hir::ExprBinary(op, ref lhs, ref rhs) => {
|
||||
if cx.tcx.is_method_call(expr.id) {
|
||||
if cx.tcx.tables().is_method_call(expr.id) {
|
||||
let pass_args = if op.node.is_by_value() {
|
||||
PassArgs::ByValue
|
||||
} else {
|
||||
@ -406,7 +406,7 @@ fn make_mirror_unadjusted<'a, 'gcx, 'tcx>(cx: &mut Cx<'a, 'gcx, 'tcx>,
|
||||
}
|
||||
|
||||
hir::ExprIndex(ref lhs, ref index) => {
|
||||
if cx.tcx.is_method_call(expr.id) {
|
||||
if cx.tcx.tables().is_method_call(expr.id) {
|
||||
overloaded_lvalue(cx, expr, ty::MethodCall::expr(expr.id),
|
||||
PassArgs::ByValue, lhs.to_ref(), vec![index])
|
||||
} else {
|
||||
@ -418,7 +418,7 @@ fn make_mirror_unadjusted<'a, 'gcx, 'tcx>(cx: &mut Cx<'a, 'gcx, 'tcx>,
|
||||
}
|
||||
|
||||
hir::ExprUnary(hir::UnOp::UnDeref, ref arg) => {
|
||||
if cx.tcx.is_method_call(expr.id) {
|
||||
if cx.tcx.tables().is_method_call(expr.id) {
|
||||
overloaded_lvalue(cx, expr, ty::MethodCall::expr(expr.id),
|
||||
PassArgs::ByValue, arg.to_ref(), vec![])
|
||||
} else {
|
||||
@ -427,7 +427,7 @@ fn make_mirror_unadjusted<'a, 'gcx, 'tcx>(cx: &mut Cx<'a, 'gcx, 'tcx>,
|
||||
}
|
||||
|
||||
hir::ExprUnary(hir::UnOp::UnNot, ref arg) => {
|
||||
if cx.tcx.is_method_call(expr.id) {
|
||||
if cx.tcx.tables().is_method_call(expr.id) {
|
||||
overloaded_operator(cx, expr, ty::MethodCall::expr(expr.id),
|
||||
PassArgs::ByValue, arg.to_ref(), vec![])
|
||||
} else {
|
||||
@ -439,7 +439,7 @@ fn make_mirror_unadjusted<'a, 'gcx, 'tcx>(cx: &mut Cx<'a, 'gcx, 'tcx>,
|
||||
}
|
||||
|
||||
hir::ExprUnary(hir::UnOp::UnNeg, ref arg) => {
|
||||
if cx.tcx.is_method_call(expr.id) {
|
||||
if cx.tcx.tables().is_method_call(expr.id) {
|
||||
overloaded_operator(cx, expr, ty::MethodCall::expr(expr.id),
|
||||
PassArgs::ByValue, arg.to_ref(), vec![])
|
||||
} else {
|
||||
@ -470,10 +470,8 @@ fn make_mirror_unadjusted<'a, 'gcx, 'tcx>(cx: &mut Cx<'a, 'gcx, 'tcx>,
|
||||
base: base.as_ref().map(|base| {
|
||||
FruInfo {
|
||||
base: base.to_ref(),
|
||||
field_types: cx.tcx.tables
|
||||
.borrow()
|
||||
.fru_field_types[&expr.id]
|
||||
.clone()
|
||||
field_types:
|
||||
cx.tcx.tables().fru_field_types[&expr.id].clone()
|
||||
}
|
||||
})
|
||||
}
|
||||
@ -512,7 +510,7 @@ fn make_mirror_unadjusted<'a, 'gcx, 'tcx>(cx: &mut Cx<'a, 'gcx, 'tcx>,
|
||||
}
|
||||
|
||||
hir::ExprClosure(..) => {
|
||||
let closure_ty = cx.tcx.expr_ty(expr);
|
||||
let closure_ty = cx.tcx.tables().expr_ty(expr);
|
||||
let (def_id, substs) = match closure_ty.sty {
|
||||
ty::TyClosure(def_id, substs) => (def_id, substs),
|
||||
_ => {
|
||||
@ -551,7 +549,7 @@ fn make_mirror_unadjusted<'a, 'gcx, 'tcx>(cx: &mut Cx<'a, 'gcx, 'tcx>,
|
||||
hir::ExprRepeat(ref v, ref c) => ExprKind::Repeat {
|
||||
value: v.to_ref(),
|
||||
count: TypedConstVal {
|
||||
ty: cx.tcx.expr_ty(c),
|
||||
ty: cx.tcx.tables().expr_ty(c),
|
||||
span: c.span,
|
||||
value: match const_eval::eval_const_expr(cx.tcx.global_tcx(), c) {
|
||||
ConstVal::Integral(ConstInt::Usize(u)) => u,
|
||||
@ -579,7 +577,7 @@ fn make_mirror_unadjusted<'a, 'gcx, 'tcx>(cx: &mut Cx<'a, 'gcx, 'tcx>,
|
||||
ExprKind::Loop { condition: None,
|
||||
body: block::to_expr_ref(cx, body) },
|
||||
hir::ExprField(ref source, name) => {
|
||||
let index = match cx.tcx.expr_ty_adjusted(source).sty {
|
||||
let index = match cx.tcx.tables().expr_ty_adjusted(source).sty {
|
||||
ty::TyAdt(adt_def, _) =>
|
||||
adt_def.variants[0].index_of_field_named(name.node),
|
||||
ref ty =>
|
||||
@ -631,8 +629,7 @@ fn method_callee<'a, 'gcx, 'tcx>(cx: &mut Cx<'a, 'gcx, 'tcx>,
|
||||
expr: &hir::Expr,
|
||||
method_call: ty::MethodCall)
|
||||
-> Expr<'tcx> {
|
||||
let tables = cx.tcx.tables.borrow();
|
||||
let callee = &tables.method_map[&method_call];
|
||||
let callee = cx.tcx.tables().method_map[&method_call];
|
||||
let temp_lifetime = cx.tcx.region_maps.temporary_scope(expr.id);
|
||||
Expr {
|
||||
temp_lifetime: temp_lifetime,
|
||||
@ -666,8 +663,8 @@ fn convert_arm<'a, 'gcx, 'tcx>(cx: &mut Cx<'a, 'gcx, 'tcx>,
|
||||
fn convert_path_expr<'a, 'gcx, 'tcx>(cx: &mut Cx<'a, 'gcx, 'tcx>,
|
||||
expr: &'tcx hir::Expr)
|
||||
-> ExprKind<'tcx> {
|
||||
let substs = cx.tcx.node_id_item_substs(expr.id).substs;
|
||||
// Otherwise there may be def_map borrow conflicts
|
||||
let substs = cx.tcx.tables().node_id_item_substs(expr.id)
|
||||
.unwrap_or_else(|| cx.tcx.intern_substs(&[]));
|
||||
let def = cx.tcx.expect_def(expr.id);
|
||||
let def_id = match def {
|
||||
// A regular function, constructor function or a constant.
|
||||
@ -677,18 +674,20 @@ fn convert_path_expr<'a, 'gcx, 'tcx>(cx: &mut Cx<'a, 'gcx, 'tcx>,
|
||||
Def::Const(def_id) | Def::AssociatedConst(def_id) => def_id,
|
||||
|
||||
Def::StructCtor(def_id, CtorKind::Const) |
|
||||
Def::VariantCtor(def_id, CtorKind::Const) => match cx.tcx.node_id_to_type(expr.id).sty {
|
||||
// A unit struct/variant which is used as a value.
|
||||
// We return a completely different ExprKind here to account for this special case.
|
||||
ty::TyAdt(adt_def, substs) => return ExprKind::Adt {
|
||||
adt_def: adt_def,
|
||||
variant_index: adt_def.variant_index_with_id(def_id),
|
||||
substs: substs,
|
||||
fields: vec![],
|
||||
base: None,
|
||||
},
|
||||
ref sty => bug!("unexpected sty: {:?}", sty)
|
||||
},
|
||||
Def::VariantCtor(def_id, CtorKind::Const) => {
|
||||
match cx.tcx.tables().node_id_to_type(expr.id).sty {
|
||||
// A unit struct/variant which is used as a value.
|
||||
// We return a completely different ExprKind here to account for this special case.
|
||||
ty::TyAdt(adt_def, substs) => return ExprKind::Adt {
|
||||
adt_def: adt_def,
|
||||
variant_index: adt_def.variant_index_with_id(def_id),
|
||||
substs: substs,
|
||||
fields: vec![],
|
||||
base: None,
|
||||
},
|
||||
ref sty => bug!("unexpected sty: {:?}", sty)
|
||||
}
|
||||
}
|
||||
|
||||
Def::Static(node_id, _) => return ExprKind::StaticRef {
|
||||
id: node_id,
|
||||
@ -720,7 +719,7 @@ fn convert_var<'a, 'gcx, 'tcx>(cx: &mut Cx<'a, 'gcx, 'tcx>,
|
||||
Def::Upvar(def_id, index, closure_expr_id) => {
|
||||
let id_var = cx.tcx.map.as_local_node_id(def_id).unwrap();
|
||||
debug!("convert_var(upvar({:?}, {:?}, {:?}))", id_var, index, closure_expr_id);
|
||||
let var_ty = cx.tcx.node_id_to_type(id_var);
|
||||
let var_ty = cx.tcx.tables().node_id_to_type(id_var);
|
||||
|
||||
let body_id = match cx.tcx.map.find(closure_expr_id) {
|
||||
Some(map::NodeExpr(expr)) => {
|
||||
@ -737,7 +736,7 @@ fn convert_var<'a, 'gcx, 'tcx>(cx: &mut Cx<'a, 'gcx, 'tcx>,
|
||||
};
|
||||
|
||||
// FIXME free regions in closures are not right
|
||||
let closure_ty = cx.tcx.node_id_to_type(closure_expr_id);
|
||||
let closure_ty = cx.tcx.tables().node_id_to_type(closure_expr_id);
|
||||
|
||||
// FIXME we're just hard-coding the idea that the
|
||||
// signature will be &self or &mut self and hence will
|
||||
@ -809,7 +808,7 @@ fn convert_var<'a, 'gcx, 'tcx>(cx: &mut Cx<'a, 'gcx, 'tcx>,
|
||||
var_id: id_var,
|
||||
closure_expr_id: closure_expr_id,
|
||||
};
|
||||
let upvar_capture = match cx.tcx.upvar_capture(upvar_id) {
|
||||
let upvar_capture = match cx.tcx.tables().upvar_capture(upvar_id) {
|
||||
Some(c) => c,
|
||||
None => {
|
||||
span_bug!(
|
||||
@ -893,7 +892,7 @@ fn overloaded_operator<'a, 'gcx, 'tcx>(cx: &mut Cx<'a, 'gcx, 'tcx>,
|
||||
argrefs.extend(
|
||||
args.iter()
|
||||
.map(|arg| {
|
||||
let arg_ty = cx.tcx.expr_ty_adjusted(arg);
|
||||
let arg_ty = cx.tcx.tables().expr_ty_adjusted(arg);
|
||||
let adjusted_ty =
|
||||
cx.tcx.mk_ref(region,
|
||||
ty::TypeAndMut { ty: arg_ty,
|
||||
@ -931,9 +930,7 @@ fn overloaded_lvalue<'a, 'gcx, 'tcx>(cx: &mut Cx<'a, 'gcx, 'tcx>,
|
||||
// line up (this is because `*x` and `x[y]` represent lvalues):
|
||||
|
||||
// to find the type &T of the content returned by the method;
|
||||
let tables = cx.tcx.tables.borrow();
|
||||
let callee = &tables.method_map[&method_call];
|
||||
let ref_ty = callee.ty.fn_ret();
|
||||
let ref_ty = cx.tcx.tables().method_map[&method_call].ty.fn_ret();
|
||||
let ref_ty = cx.tcx.no_late_bound_regions(&ref_ty).unwrap();
|
||||
// callees always have all late-bound regions fully instantiated,
|
||||
|
||||
@ -962,9 +959,9 @@ fn capture_freevar<'a, 'gcx, 'tcx>(cx: &mut Cx<'a, 'gcx, 'tcx>,
|
||||
var_id: id_var,
|
||||
closure_expr_id: closure_expr.id,
|
||||
};
|
||||
let upvar_capture = cx.tcx.upvar_capture(upvar_id).unwrap();
|
||||
let upvar_capture = cx.tcx.tables().upvar_capture(upvar_id).unwrap();
|
||||
let temp_lifetime = cx.tcx.region_maps.temporary_scope(closure_expr.id);
|
||||
let var_ty = cx.tcx.node_id_to_type(id_var);
|
||||
let var_ty = cx.tcx.tables().node_id_to_type(id_var);
|
||||
let captured_var = Expr {
|
||||
temp_lifetime: temp_lifetime,
|
||||
ty: var_ty,
|
||||
|
@ -214,7 +214,7 @@ impl<'a, 'tcx> Visitor<'tcx> for BuildMir<'a, 'tcx> {
|
||||
id: ast::NodeId) {
|
||||
// fetch the fully liberated fn signature (that is, all bound
|
||||
// types/lifetimes replaced)
|
||||
let fn_sig = match self.tcx.tables.borrow().liberated_fn_sigs.get(&id) {
|
||||
let fn_sig = match self.tcx.tables().liberated_fn_sigs.get(&id) {
|
||||
Some(f) => f.clone(),
|
||||
None => {
|
||||
span_bug!(span, "no liberated fn sig for {:?}", id);
|
||||
@ -248,7 +248,7 @@ fn closure_self_ty<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
closure_expr_id: ast::NodeId,
|
||||
body_id: ast::NodeId)
|
||||
-> Ty<'tcx> {
|
||||
let closure_ty = tcx.node_id_to_type(closure_expr_id);
|
||||
let closure_ty = tcx.tables().node_id_to_type(closure_expr_id);
|
||||
|
||||
// We're just hard-coding the idea that the signature will be
|
||||
// &self or &mut self and hence will have a bound region with
|
||||
|
@ -319,7 +319,7 @@ impl<'a, 'tcx, 'v> Visitor<'v> for CheckCrateVisitor<'a, 'tcx> {
|
||||
let mut outer = self.qualif;
|
||||
self.qualif = ConstQualif::empty();
|
||||
|
||||
let node_ty = self.tcx.node_id_to_type(ex.id);
|
||||
let node_ty = self.tcx.tables().node_id_to_type(ex.id);
|
||||
check_expr(self, ex, node_ty);
|
||||
check_adjustments(self, ex);
|
||||
|
||||
@ -449,14 +449,14 @@ fn check_expr<'a, 'tcx>(v: &mut CheckCrateVisitor<'a, 'tcx>, e: &hir::Expr, node
|
||||
match e.node {
|
||||
hir::ExprUnary(..) |
|
||||
hir::ExprBinary(..) |
|
||||
hir::ExprIndex(..) if v.tcx.tables.borrow().method_map.contains_key(&method_call) => {
|
||||
hir::ExprIndex(..) if v.tcx.tables().method_map.contains_key(&method_call) => {
|
||||
v.add_qualif(ConstQualif::NOT_CONST);
|
||||
}
|
||||
hir::ExprBox(_) => {
|
||||
v.add_qualif(ConstQualif::NOT_CONST);
|
||||
}
|
||||
hir::ExprUnary(op, ref inner) => {
|
||||
match v.tcx.node_id_to_type(inner.id).sty {
|
||||
match v.tcx.tables().node_id_to_type(inner.id).sty {
|
||||
ty::TyRawPtr(_) => {
|
||||
assert!(op == hir::UnDeref);
|
||||
|
||||
@ -466,7 +466,7 @@ fn check_expr<'a, 'tcx>(v: &mut CheckCrateVisitor<'a, 'tcx>, e: &hir::Expr, node
|
||||
}
|
||||
}
|
||||
hir::ExprBinary(op, ref lhs, _) => {
|
||||
match v.tcx.node_id_to_type(lhs.id).sty {
|
||||
match v.tcx.tables().node_id_to_type(lhs.id).sty {
|
||||
ty::TyRawPtr(_) => {
|
||||
assert!(op.node == hir::BiEq || op.node == hir::BiNe ||
|
||||
op.node == hir::BiLe || op.node == hir::BiLt ||
|
||||
@ -503,7 +503,8 @@ fn check_expr<'a, 'tcx>(v: &mut CheckCrateVisitor<'a, 'tcx>, e: &hir::Expr, node
|
||||
}
|
||||
}
|
||||
Def::Const(did) | Def::AssociatedConst(did) => {
|
||||
let substs = Some(v.tcx.node_id_item_substs(e.id).substs);
|
||||
let substs = Some(v.tcx.tables().node_id_item_substs(e.id)
|
||||
.unwrap_or_else(|| v.tcx.intern_substs(&[])));
|
||||
if let Some((expr, _)) = lookup_const_by_id(v.tcx, did, substs) {
|
||||
let inner = v.global_expr(Mode::Const, expr);
|
||||
v.add_qualif(inner);
|
||||
@ -555,7 +556,7 @@ fn check_expr<'a, 'tcx>(v: &mut CheckCrateVisitor<'a, 'tcx>, e: &hir::Expr, node
|
||||
}
|
||||
}
|
||||
hir::ExprMethodCall(..) => {
|
||||
let method = v.tcx.tables.borrow().method_map[&method_call];
|
||||
let method = v.tcx.tables().method_map[&method_call];
|
||||
let is_const = match v.tcx.impl_or_trait_item(method.def_id).container() {
|
||||
ty::ImplContainer(_) => v.handle_const_fn_call(e, method.def_id, node_ty),
|
||||
ty::TraitContainer(_) => false
|
||||
@ -565,7 +566,7 @@ fn check_expr<'a, 'tcx>(v: &mut CheckCrateVisitor<'a, 'tcx>, e: &hir::Expr, node
|
||||
}
|
||||
}
|
||||
hir::ExprStruct(..) => {
|
||||
if let ty::TyAdt(adt, ..) = v.tcx.expr_ty(e).sty {
|
||||
if let ty::TyAdt(adt, ..) = v.tcx.tables().expr_ty(e).sty {
|
||||
// unsafe_cell_type doesn't necessarily exist with no_core
|
||||
if Some(adt.did) == v.tcx.lang_items.unsafe_cell_type() {
|
||||
v.add_qualif(ConstQualif::MUTABLE_MEM);
|
||||
@ -624,16 +625,18 @@ fn check_expr<'a, 'tcx>(v: &mut CheckCrateVisitor<'a, 'tcx>, e: &hir::Expr, node
|
||||
|
||||
/// Check the adjustments of an expression
|
||||
fn check_adjustments<'a, 'tcx>(v: &mut CheckCrateVisitor<'a, 'tcx>, e: &hir::Expr) {
|
||||
match v.tcx.tables.borrow().adjustments.get(&e.id) {
|
||||
None |
|
||||
Some(&ty::adjustment::AdjustNeverToAny(..)) |
|
||||
Some(&ty::adjustment::AdjustReifyFnPointer) |
|
||||
Some(&ty::adjustment::AdjustUnsafeFnPointer) |
|
||||
Some(&ty::adjustment::AdjustMutToConstPointer) => {}
|
||||
use rustc::ty::adjustment::*;
|
||||
|
||||
Some(&ty::adjustment::AdjustDerefRef(ty::adjustment::AutoDerefRef { autoderefs, .. })) => {
|
||||
match v.tcx.tables().adjustments.get(&e.id).map(|adj| adj.kind) {
|
||||
None |
|
||||
Some(Adjust::NeverToAny) |
|
||||
Some(Adjust::ReifyFnPointer) |
|
||||
Some(Adjust::UnsafeFnPointer) |
|
||||
Some(Adjust::MutToConstPointer) => {}
|
||||
|
||||
Some(Adjust::DerefRef { autoderefs, .. }) => {
|
||||
if (0..autoderefs as u32)
|
||||
.any(|autoderef| v.tcx.is_overloaded_autoderef(e.id, autoderef)) {
|
||||
.any(|autoderef| v.tcx.tables().is_overloaded_autoderef(e.id, autoderef)) {
|
||||
v.add_qualif(ConstQualif::NOT_CONST);
|
||||
}
|
||||
}
|
||||
|
374
src/librustc_passes/hir_stats.rs
Normal file
374
src/librustc_passes/hir_stats.rs
Normal file
@ -0,0 +1,374 @@
|
||||
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution and at
|
||||
// http://rust-lang.org/COPYRIGHT.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
// The visitors in this module collect sizes and counts of the most important
|
||||
// pieces of AST and HIR. The resulting numbers are good approximations but not
|
||||
// completely accurate (some things might be counted twice, others missed).
|
||||
|
||||
use rustc::hir;
|
||||
use rustc::hir::intravisit as hir_visit;
|
||||
use rustc::util::common::to_readable_str;
|
||||
use rustc::util::nodemap::{FnvHashMap, FnvHashSet};
|
||||
use syntax::ast::{self, NodeId, AttrId};
|
||||
use syntax::visit as ast_visit;
|
||||
use syntax_pos::Span;
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
|
||||
enum Id {
|
||||
Node(NodeId),
|
||||
Attr(AttrId),
|
||||
None,
|
||||
}
|
||||
|
||||
struct NodeData {
|
||||
count: usize,
|
||||
size: usize,
|
||||
}
|
||||
|
||||
struct StatCollector<'k> {
|
||||
krate: Option<&'k hir::Crate>,
|
||||
data: FnvHashMap<&'static str, NodeData>,
|
||||
seen: FnvHashSet<Id>,
|
||||
}
|
||||
|
||||
pub fn print_hir_stats(krate: &hir::Crate) {
|
||||
let mut collector = StatCollector {
|
||||
krate: Some(krate),
|
||||
data: FnvHashMap(),
|
||||
seen: FnvHashSet(),
|
||||
};
|
||||
hir_visit::walk_crate(&mut collector, krate);
|
||||
collector.print("HIR STATS");
|
||||
}
|
||||
|
||||
pub fn print_ast_stats(krate: &ast::Crate, title: &str) {
|
||||
let mut collector = StatCollector {
|
||||
krate: None,
|
||||
data: FnvHashMap(),
|
||||
seen: FnvHashSet(),
|
||||
};
|
||||
ast_visit::walk_crate(&mut collector, krate);
|
||||
collector.print(title);
|
||||
}
|
||||
|
||||
impl<'k> StatCollector<'k> {
|
||||
|
||||
fn record<T>(&mut self, label: &'static str, id: Id, node: &T) {
|
||||
if id != Id::None {
|
||||
if !self.seen.insert(id) {
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
let entry = self.data.entry(label).or_insert(NodeData {
|
||||
count: 0,
|
||||
size: 0,
|
||||
});
|
||||
|
||||
entry.count += 1;
|
||||
entry.size = ::std::mem::size_of_val(node);
|
||||
}
|
||||
|
||||
fn print(&self, title: &str) {
|
||||
let mut stats: Vec<_> = self.data.iter().collect();
|
||||
|
||||
stats.sort_by_key(|&(_, ref d)| d.count * d.size);
|
||||
|
||||
let mut total_size = 0;
|
||||
|
||||
println!("\n{}\n", title);
|
||||
|
||||
println!("{:<18}{:>18}{:>14}{:>14}",
|
||||
"Name", "Accumulated Size", "Count", "Item Size");
|
||||
println!("----------------------------------------------------------------");
|
||||
|
||||
for (label, data) in stats {
|
||||
println!("{:<18}{:>18}{:>14}{:>14}",
|
||||
label,
|
||||
to_readable_str(data.count * data.size),
|
||||
to_readable_str(data.count),
|
||||
to_readable_str(data.size));
|
||||
|
||||
total_size += data.count * data.size;
|
||||
}
|
||||
println!("----------------------------------------------------------------");
|
||||
println!("{:<18}{:>18}\n",
|
||||
"Total",
|
||||
to_readable_str(total_size));
|
||||
}
|
||||
}
|
||||
|
||||
impl<'v> hir_visit::Visitor<'v> for StatCollector<'v> {
|
||||
|
||||
fn visit_nested_item(&mut self, id: hir::ItemId) {
|
||||
let nested_item = self.krate.unwrap().item(id.id);
|
||||
self.visit_item(nested_item)
|
||||
}
|
||||
|
||||
fn visit_item(&mut self, i: &'v hir::Item) {
|
||||
self.record("Item", Id::Node(i.id), i);
|
||||
hir_visit::walk_item(self, i)
|
||||
}
|
||||
|
||||
///////////////////////////////////////////////////////////////////////////
|
||||
|
||||
fn visit_mod(&mut self, m: &'v hir::Mod, _s: Span, n: NodeId) {
|
||||
self.record("Mod", Id::None, m);
|
||||
hir_visit::walk_mod(self, m, n)
|
||||
}
|
||||
fn visit_foreign_item(&mut self, i: &'v hir::ForeignItem) {
|
||||
self.record("ForeignItem", Id::Node(i.id), i);
|
||||
hir_visit::walk_foreign_item(self, i)
|
||||
}
|
||||
fn visit_local(&mut self, l: &'v hir::Local) {
|
||||
self.record("Local", Id::Node(l.id), l);
|
||||
hir_visit::walk_local(self, l)
|
||||
}
|
||||
fn visit_block(&mut self, b: &'v hir::Block) {
|
||||
self.record("Block", Id::Node(b.id), b);
|
||||
hir_visit::walk_block(self, b)
|
||||
}
|
||||
fn visit_stmt(&mut self, s: &'v hir::Stmt) {
|
||||
self.record("Stmt", Id::Node(s.node.id()), s);
|
||||
hir_visit::walk_stmt(self, s)
|
||||
}
|
||||
fn visit_arm(&mut self, a: &'v hir::Arm) {
|
||||
self.record("Arm", Id::None, a);
|
||||
hir_visit::walk_arm(self, a)
|
||||
}
|
||||
fn visit_pat(&mut self, p: &'v hir::Pat) {
|
||||
self.record("Pat", Id::Node(p.id), p);
|
||||
hir_visit::walk_pat(self, p)
|
||||
}
|
||||
fn visit_decl(&mut self, d: &'v hir::Decl) {
|
||||
self.record("Decl", Id::None, d);
|
||||
hir_visit::walk_decl(self, d)
|
||||
}
|
||||
fn visit_expr(&mut self, ex: &'v hir::Expr) {
|
||||
self.record("Expr", Id::Node(ex.id), ex);
|
||||
hir_visit::walk_expr(self, ex)
|
||||
}
|
||||
|
||||
fn visit_ty(&mut self, t: &'v hir::Ty) {
|
||||
self.record("Ty", Id::Node(t.id), t);
|
||||
hir_visit::walk_ty(self, t)
|
||||
}
|
||||
|
||||
fn visit_fn(&mut self,
|
||||
fk: hir_visit::FnKind<'v>,
|
||||
fd: &'v hir::FnDecl,
|
||||
b: &'v hir::Block,
|
||||
s: Span,
|
||||
id: NodeId) {
|
||||
self.record("FnDecl", Id::None, fd);
|
||||
hir_visit::walk_fn(self, fk, fd, b, s, id)
|
||||
}
|
||||
|
||||
fn visit_where_predicate(&mut self, predicate: &'v hir::WherePredicate) {
|
||||
self.record("WherePredicate", Id::None, predicate);
|
||||
hir_visit::walk_where_predicate(self, predicate)
|
||||
}
|
||||
|
||||
fn visit_trait_item(&mut self, ti: &'v hir::TraitItem) {
|
||||
self.record("TraitItem", Id::Node(ti.id), ti);
|
||||
hir_visit::walk_trait_item(self, ti)
|
||||
}
|
||||
fn visit_impl_item(&mut self, ii: &'v hir::ImplItem) {
|
||||
self.record("ImplItem", Id::Node(ii.id), ii);
|
||||
hir_visit::walk_impl_item(self, ii)
|
||||
}
|
||||
|
||||
fn visit_ty_param_bound(&mut self, bounds: &'v hir::TyParamBound) {
|
||||
self.record("TyParamBound", Id::None, bounds);
|
||||
hir_visit::walk_ty_param_bound(self, bounds)
|
||||
}
|
||||
|
||||
fn visit_struct_field(&mut self, s: &'v hir::StructField) {
|
||||
self.record("StructField", Id::Node(s.id), s);
|
||||
hir_visit::walk_struct_field(self, s)
|
||||
}
|
||||
|
||||
fn visit_variant(&mut self,
|
||||
v: &'v hir::Variant,
|
||||
g: &'v hir::Generics,
|
||||
item_id: NodeId) {
|
||||
self.record("Variant", Id::None, v);
|
||||
hir_visit::walk_variant(self, v, g, item_id)
|
||||
}
|
||||
fn visit_lifetime(&mut self, lifetime: &'v hir::Lifetime) {
|
||||
self.record("Lifetime", Id::Node(lifetime.id), lifetime);
|
||||
hir_visit::walk_lifetime(self, lifetime)
|
||||
}
|
||||
fn visit_lifetime_def(&mut self, lifetime: &'v hir::LifetimeDef) {
|
||||
self.record("LifetimeDef", Id::None, lifetime);
|
||||
hir_visit::walk_lifetime_def(self, lifetime)
|
||||
}
|
||||
fn visit_path(&mut self, path: &'v hir::Path, _id: NodeId) {
|
||||
self.record("Path", Id::None, path);
|
||||
hir_visit::walk_path(self, path)
|
||||
}
|
||||
fn visit_path_list_item(&mut self,
|
||||
prefix: &'v hir::Path,
|
||||
item: &'v hir::PathListItem) {
|
||||
self.record("PathListItem", Id::Node(item.node.id), item);
|
||||
hir_visit::walk_path_list_item(self, prefix, item)
|
||||
}
|
||||
fn visit_path_segment(&mut self,
|
||||
path_span: Span,
|
||||
path_segment: &'v hir::PathSegment) {
|
||||
self.record("PathSegment", Id::None, path_segment);
|
||||
hir_visit::walk_path_segment(self, path_span, path_segment)
|
||||
}
|
||||
|
||||
fn visit_assoc_type_binding(&mut self, type_binding: &'v hir::TypeBinding) {
|
||||
self.record("TypeBinding", Id::Node(type_binding.id), type_binding);
|
||||
hir_visit::walk_assoc_type_binding(self, type_binding)
|
||||
}
|
||||
fn visit_attribute(&mut self, attr: &'v ast::Attribute) {
|
||||
self.record("Attribute", Id::Attr(attr.node.id), attr);
|
||||
}
|
||||
fn visit_macro_def(&mut self, macro_def: &'v hir::MacroDef) {
|
||||
self.record("MacroDef", Id::Node(macro_def.id), macro_def);
|
||||
hir_visit::walk_macro_def(self, macro_def)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'v> ast_visit::Visitor for StatCollector<'v> {
|
||||
|
||||
fn visit_mod(&mut self, m: &ast::Mod, _s: Span, _n: NodeId) {
|
||||
self.record("Mod", Id::None, m);
|
||||
ast_visit::walk_mod(self, m)
|
||||
}
|
||||
|
||||
fn visit_foreign_item(&mut self, i: &ast::ForeignItem) {
|
||||
self.record("ForeignItem", Id::None, i);
|
||||
ast_visit::walk_foreign_item(self, i)
|
||||
}
|
||||
|
||||
fn visit_item(&mut self, i: &ast::Item) {
|
||||
self.record("Item", Id::None, i);
|
||||
ast_visit::walk_item(self, i)
|
||||
}
|
||||
|
||||
fn visit_local(&mut self, l: &ast::Local) {
|
||||
self.record("Local", Id::None, l);
|
||||
ast_visit::walk_local(self, l)
|
||||
}
|
||||
|
||||
fn visit_block(&mut self, b: &ast::Block) {
|
||||
self.record("Block", Id::None, b);
|
||||
ast_visit::walk_block(self, b)
|
||||
}
|
||||
|
||||
fn visit_stmt(&mut self, s: &ast::Stmt) {
|
||||
self.record("Stmt", Id::None, s);
|
||||
ast_visit::walk_stmt(self, s)
|
||||
}
|
||||
|
||||
fn visit_arm(&mut self, a: &ast::Arm) {
|
||||
self.record("Arm", Id::None, a);
|
||||
ast_visit::walk_arm(self, a)
|
||||
}
|
||||
|
||||
fn visit_pat(&mut self, p: &ast::Pat) {
|
||||
self.record("Pat", Id::None, p);
|
||||
ast_visit::walk_pat(self, p)
|
||||
}
|
||||
|
||||
fn visit_expr(&mut self, ex: &ast::Expr) {
|
||||
self.record("Expr", Id::None, ex);
|
||||
ast_visit::walk_expr(self, ex)
|
||||
}
|
||||
|
||||
fn visit_ty(&mut self, t: &ast::Ty) {
|
||||
self.record("Ty", Id::None, t);
|
||||
ast_visit::walk_ty(self, t)
|
||||
}
|
||||
|
||||
fn visit_fn(&mut self,
|
||||
fk: ast_visit::FnKind,
|
||||
fd: &ast::FnDecl,
|
||||
b: &ast::Block,
|
||||
s: Span,
|
||||
_: NodeId) {
|
||||
self.record("FnDecl", Id::None, fd);
|
||||
ast_visit::walk_fn(self, fk, fd, b, s)
|
||||
}
|
||||
|
||||
fn visit_trait_item(&mut self, ti: &ast::TraitItem) {
|
||||
self.record("TraitItem", Id::None, ti);
|
||||
ast_visit::walk_trait_item(self, ti)
|
||||
}
|
||||
|
||||
fn visit_impl_item(&mut self, ii: &ast::ImplItem) {
|
||||
self.record("ImplItem", Id::None, ii);
|
||||
ast_visit::walk_impl_item(self, ii)
|
||||
}
|
||||
|
||||
fn visit_ty_param_bound(&mut self, bounds: &ast::TyParamBound) {
|
||||
self.record("TyParamBound", Id::None, bounds);
|
||||
ast_visit::walk_ty_param_bound(self, bounds)
|
||||
}
|
||||
|
||||
fn visit_struct_field(&mut self, s: &ast::StructField) {
|
||||
self.record("StructField", Id::None, s);
|
||||
ast_visit::walk_struct_field(self, s)
|
||||
}
|
||||
|
||||
fn visit_variant(&mut self,
|
||||
v: &ast::Variant,
|
||||
g: &ast::Generics,
|
||||
item_id: NodeId) {
|
||||
self.record("Variant", Id::None, v);
|
||||
ast_visit::walk_variant(self, v, g, item_id)
|
||||
}
|
||||
|
||||
fn visit_lifetime(&mut self, lifetime: &ast::Lifetime) {
|
||||
self.record("Lifetime", Id::None, lifetime);
|
||||
ast_visit::walk_lifetime(self, lifetime)
|
||||
}
|
||||
|
||||
fn visit_lifetime_def(&mut self, lifetime: &ast::LifetimeDef) {
|
||||
self.record("LifetimeDef", Id::None, lifetime);
|
||||
ast_visit::walk_lifetime_def(self, lifetime)
|
||||
}
|
||||
|
||||
fn visit_mac(&mut self, mac: &ast::Mac) {
|
||||
self.record("Mac", Id::None, mac);
|
||||
}
|
||||
|
||||
fn visit_path_list_item(&mut self,
|
||||
prefix: &ast::Path,
|
||||
item: &ast::PathListItem) {
|
||||
self.record("PathListItem", Id::None, item);
|
||||
ast_visit::walk_path_list_item(self, prefix, item)
|
||||
}
|
||||
|
||||
fn visit_path_segment(&mut self,
|
||||
path_span: Span,
|
||||
path_segment: &ast::PathSegment) {
|
||||
self.record("PathSegment", Id::None, path_segment);
|
||||
ast_visit::walk_path_segment(self, path_span, path_segment)
|
||||
}
|
||||
|
||||
fn visit_assoc_type_binding(&mut self, type_binding: &ast::TypeBinding) {
|
||||
self.record("TypeBinding", Id::None, type_binding);
|
||||
ast_visit::walk_assoc_type_binding(self, type_binding)
|
||||
}
|
||||
|
||||
fn visit_attribute(&mut self, attr: &ast::Attribute) {
|
||||
self.record("Attribute", Id::None, attr);
|
||||
}
|
||||
|
||||
fn visit_macro_def(&mut self, macro_def: &ast::MacroDef) {
|
||||
self.record("MacroDef", Id::None, macro_def);
|
||||
ast_visit::walk_macro_def(self, macro_def)
|
||||
}
|
||||
}
|
@ -45,6 +45,7 @@ pub mod diagnostics;
|
||||
|
||||
pub mod ast_validation;
|
||||
pub mod consts;
|
||||
pub mod hir_stats;
|
||||
pub mod loops;
|
||||
pub mod no_asm;
|
||||
pub mod rvalues;
|
||||
|
@ -430,11 +430,11 @@ impl<'a, 'tcx, 'v> Visitor<'v> for PrivacyVisitor<'a, 'tcx> {
|
||||
match expr.node {
|
||||
hir::ExprMethodCall(..) => {
|
||||
let method_call = ty::MethodCall::expr(expr.id);
|
||||
let method = self.tcx.tables.borrow().method_map[&method_call];
|
||||
let method = self.tcx.tables().method_map[&method_call];
|
||||
self.check_method(expr.span, method.def_id);
|
||||
}
|
||||
hir::ExprStruct(_, ref expr_fields, _) => {
|
||||
let adt = self.tcx.expr_ty(expr).ty_adt_def().unwrap();
|
||||
let adt = self.tcx.tables().expr_ty(expr).ty_adt_def().unwrap();
|
||||
let variant = adt.variant_of_def(self.tcx.expect_def(expr.id));
|
||||
// RFC 736: ensure all unmentioned fields are visible.
|
||||
// Rather than computing the set of unmentioned fields
|
||||
@ -495,14 +495,14 @@ impl<'a, 'tcx, 'v> Visitor<'v> for PrivacyVisitor<'a, 'tcx> {
|
||||
|
||||
match pattern.node {
|
||||
PatKind::Struct(_, ref fields, _) => {
|
||||
let adt = self.tcx.pat_ty(pattern).ty_adt_def().unwrap();
|
||||
let adt = self.tcx.tables().pat_ty(pattern).ty_adt_def().unwrap();
|
||||
let variant = adt.variant_of_def(self.tcx.expect_def(pattern.id));
|
||||
for field in fields {
|
||||
self.check_field(field.span, adt, variant.field_named(field.node.name));
|
||||
}
|
||||
}
|
||||
PatKind::TupleStruct(_, ref fields, ddpos) => {
|
||||
match self.tcx.pat_ty(pattern).sty {
|
||||
match self.tcx.tables().pat_ty(pattern).sty {
|
||||
// enum fields have no privacy at this time
|
||||
ty::TyAdt(def, _) if !def.is_enum() => {
|
||||
let expected_len = def.struct_variant().fields.len();
|
||||
|
@ -356,7 +356,7 @@ impl<'l, 'tcx: 'l, 'll, D: Dump + 'll> DumpVisitor<'l, 'tcx, 'll, D> {
|
||||
collector.visit_pat(&arg.pat);
|
||||
let span_utils = self.span.clone();
|
||||
for &(id, ref p, ..) in &collector.collected_paths {
|
||||
let typ = self.tcx.node_types().get(&id).unwrap().to_string();
|
||||
let typ = self.tcx.tables().node_types.get(&id).unwrap().to_string();
|
||||
// get the span only for the name of the variable (I hope the path is only ever a
|
||||
// variable name, but who knows?)
|
||||
let sub_span = span_utils.span_for_last_ident(p.span);
|
||||
@ -988,7 +988,7 @@ impl<'l, 'tcx: 'l, 'll, D: Dump + 'll> DumpVisitor<'l, 'tcx, 'll, D> {
|
||||
match p.node {
|
||||
PatKind::Struct(ref path, ref fields, _) => {
|
||||
visit::walk_path(self, path);
|
||||
let adt = self.tcx.node_id_to_type(p.id).ty_adt_def().unwrap();
|
||||
let adt = self.tcx.tables().node_id_to_type(p.id).ty_adt_def().unwrap();
|
||||
let variant = adt.variant_of_def(self.tcx.expect_def(p.id));
|
||||
|
||||
for &Spanned { node: ref field, span } in fields {
|
||||
@ -1023,8 +1023,7 @@ impl<'l, 'tcx: 'l, 'll, D: Dump + 'll> DumpVisitor<'l, 'tcx, 'll, D> {
|
||||
ast::Mutability::Immutable => value.to_string(),
|
||||
_ => String::new(),
|
||||
};
|
||||
let types = self.tcx.node_types();
|
||||
let typ = match types.get(&id) {
|
||||
let typ = match self.tcx.tables().node_types.get(&id) {
|
||||
Some(typ) => {
|
||||
let typ = typ.to_string();
|
||||
if !value.is_empty() {
|
||||
@ -1355,7 +1354,7 @@ impl<'l, 'tcx: 'l, 'll, D: Dump +'ll> Visitor for DumpVisitor<'l, 'tcx, 'll, D>
|
||||
}
|
||||
ast::ExprKind::Struct(ref path, ref fields, ref base) => {
|
||||
let hir_expr = self.save_ctxt.tcx.map.expect_expr(ex.id);
|
||||
let adt = self.tcx.expr_ty(&hir_expr).ty_adt_def().unwrap();
|
||||
let adt = self.tcx.tables().expr_ty(&hir_expr).ty_adt_def().unwrap();
|
||||
let def = self.tcx.expect_def(hir_expr.id);
|
||||
self.process_struct_lit(ex, path, fields, adt.variant_of_def(def), base)
|
||||
}
|
||||
@ -1381,7 +1380,7 @@ impl<'l, 'tcx: 'l, 'll, D: Dump +'ll> Visitor for DumpVisitor<'l, 'tcx, 'll, D>
|
||||
return;
|
||||
}
|
||||
};
|
||||
let ty = &self.tcx.expr_ty_adjusted(&hir_node).sty;
|
||||
let ty = &self.tcx.tables().expr_ty_adjusted(&hir_node).sty;
|
||||
match *ty {
|
||||
ty::TyAdt(def, _) => {
|
||||
let sub_span = self.span.sub_span_after_token(ex.span, token::Dot);
|
||||
@ -1468,7 +1467,7 @@ impl<'l, 'tcx: 'l, 'll, D: Dump +'ll> Visitor for DumpVisitor<'l, 'tcx, 'll, D>
|
||||
} else {
|
||||
"<mutable>".to_string()
|
||||
};
|
||||
let typ = self.tcx.node_types()
|
||||
let typ = self.tcx.tables().node_types
|
||||
.get(&id).map(|t| t.to_string()).unwrap_or(String::new());
|
||||
value.push_str(": ");
|
||||
value.push_str(&typ);
|
||||
|
@ -286,7 +286,7 @@ impl<'l, 'tcx: 'l> SaveContext<'l, 'tcx> {
|
||||
scope: NodeId) -> Option<VariableData> {
|
||||
if let Some(ident) = field.ident {
|
||||
let qualname = format!("::{}::{}", self.tcx.node_path_str(scope), ident);
|
||||
let typ = self.tcx.node_types().get(&field.id).unwrap().to_string();
|
||||
let typ = self.tcx.tables().node_types.get(&field.id).unwrap().to_string();
|
||||
let sub_span = self.span_utils.sub_span_before_token(field.span, token::Colon);
|
||||
filter!(self.span_utils, sub_span, field.span, None);
|
||||
Some(VariableData {
|
||||
@ -418,7 +418,7 @@ impl<'l, 'tcx: 'l> SaveContext<'l, 'tcx> {
|
||||
|
||||
pub fn get_expr_data(&self, expr: &ast::Expr) -> Option<Data> {
|
||||
let hir_node = self.tcx.map.expect_expr(expr.id);
|
||||
let ty = self.tcx.expr_ty_adjusted_opt(&hir_node);
|
||||
let ty = self.tcx.tables().expr_ty_adjusted_opt(&hir_node);
|
||||
if ty.is_none() || ty.unwrap().sty == ty::TyError {
|
||||
return None;
|
||||
}
|
||||
@ -432,7 +432,7 @@ impl<'l, 'tcx: 'l> SaveContext<'l, 'tcx> {
|
||||
return None;
|
||||
}
|
||||
};
|
||||
match self.tcx.expr_ty_adjusted(&hir_node).sty {
|
||||
match self.tcx.tables().expr_ty_adjusted(&hir_node).sty {
|
||||
ty::TyAdt(def, _) if !def.is_enum() => {
|
||||
let f = def.struct_variant().field_named(ident.node.name);
|
||||
let sub_span = self.span_utils.span_for_last_ident(expr.span);
|
||||
@ -451,7 +451,7 @@ impl<'l, 'tcx: 'l> SaveContext<'l, 'tcx> {
|
||||
}
|
||||
}
|
||||
ast::ExprKind::Struct(ref path, ..) => {
|
||||
match self.tcx.expr_ty_adjusted(&hir_node).sty {
|
||||
match self.tcx.tables().expr_ty_adjusted(&hir_node).sty {
|
||||
ty::TyAdt(def, _) if !def.is_enum() => {
|
||||
let sub_span = self.span_utils.span_for_last_ident(path.span);
|
||||
filter!(self.span_utils, sub_span, path.span, None);
|
||||
@ -472,7 +472,7 @@ impl<'l, 'tcx: 'l> SaveContext<'l, 'tcx> {
|
||||
}
|
||||
ast::ExprKind::MethodCall(..) => {
|
||||
let method_call = ty::MethodCall::expr(expr.id);
|
||||
let method_id = self.tcx.tables.borrow().method_map[&method_call].def_id;
|
||||
let method_id = self.tcx.tables().method_map[&method_call].def_id;
|
||||
let (def_id, decl_id) = match self.tcx.impl_or_trait_item(method_id).container() {
|
||||
ty::ImplContainer(_) => (Some(method_id), None),
|
||||
ty::TraitContainer(_) => (None, Some(method_id)),
|
||||
|
@ -636,7 +636,7 @@ fn link_natively(sess: &Session,
|
||||
{
|
||||
let mut linker = trans.linker_info.to_linker(&mut cmd, &sess);
|
||||
link_args(&mut *linker, sess, crate_type, tmpdir,
|
||||
objects, out_filename, outputs);
|
||||
objects, out_filename, outputs, trans);
|
||||
}
|
||||
cmd.args(&sess.target.target.options.late_link_args);
|
||||
for obj in &sess.target.target.options.post_link_objects {
|
||||
@ -711,7 +711,8 @@ fn link_args(cmd: &mut Linker,
|
||||
tmpdir: &Path,
|
||||
objects: &[PathBuf],
|
||||
out_filename: &Path,
|
||||
outputs: &OutputFilenames) {
|
||||
outputs: &OutputFilenames,
|
||||
trans: &CrateTranslation) {
|
||||
|
||||
// The default library location, we need this to find the runtime.
|
||||
// The location of crates will be determined as needed.
|
||||
@ -726,6 +727,13 @@ fn link_args(cmd: &mut Linker,
|
||||
}
|
||||
cmd.output_filename(out_filename);
|
||||
|
||||
if crate_type == config::CrateTypeExecutable &&
|
||||
sess.target.target.options.is_like_windows {
|
||||
if let Some(ref s) = trans.windows_subsystem {
|
||||
cmd.subsystem(s);
|
||||
}
|
||||
}
|
||||
|
||||
// If we're building a dynamic library then some platforms need to make sure
|
||||
// that all symbols are exported correctly from the dynamic library.
|
||||
if crate_type != config::CrateTypeExecutable {
|
||||
|
@ -92,6 +92,7 @@ pub trait Linker {
|
||||
fn whole_archives(&mut self);
|
||||
fn no_whole_archives(&mut self);
|
||||
fn export_symbols(&mut self, tmpdir: &Path, crate_type: CrateType);
|
||||
fn subsystem(&mut self, subsystem: &str);
|
||||
}
|
||||
|
||||
pub struct GnuLinker<'a> {
|
||||
@ -294,6 +295,10 @@ impl<'a> Linker for GnuLinker<'a> {
|
||||
|
||||
self.cmd.arg(arg);
|
||||
}
|
||||
|
||||
fn subsystem(&mut self, subsystem: &str) {
|
||||
self.cmd.arg(&format!("-Wl,--subsystem,{}", subsystem));
|
||||
}
|
||||
}
|
||||
|
||||
pub struct MsvcLinker<'a> {
|
||||
@ -441,6 +446,30 @@ impl<'a> Linker for MsvcLinker<'a> {
|
||||
arg.push(path);
|
||||
self.cmd.arg(&arg);
|
||||
}
|
||||
|
||||
fn subsystem(&mut self, subsystem: &str) {
|
||||
// Note that previous passes of the compiler validated this subsystem,
|
||||
// so we just blindly pass it to the linker.
|
||||
self.cmd.arg(&format!("/SUBSYSTEM:{}", subsystem));
|
||||
|
||||
// Windows has two subsystems we're interested in right now, the console
|
||||
// and windows subsystems. These both implicitly have different entry
|
||||
// points (starting symbols). The console entry point starts with
|
||||
// `mainCRTStartup` and the windows entry point starts with
|
||||
// `WinMainCRTStartup`. These entry points, defined in system libraries,
|
||||
// will then later probe for either `main` or `WinMain`, respectively to
|
||||
// start the application.
|
||||
//
|
||||
// In Rust we just always generate a `main` function so we want control
|
||||
// to always start there, so we force the entry point on the windows
|
||||
// subsystem to be `mainCRTStartup` to get everything booted up
|
||||
// correctly.
|
||||
//
|
||||
// For more information see RFC #1665
|
||||
if subsystem == "windows" {
|
||||
self.cmd.arg("/ENTRY:mainCRTStartup");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn exported_symbols(scx: &SharedCrateContext,
|
||||
|
@ -1196,6 +1196,9 @@ pub fn maybe_create_entry_wrapper(ccx: &CrateContext) {
|
||||
}
|
||||
let llfn = declare::declare_cfn(ccx, "main", llfty);
|
||||
|
||||
// `main` should respect same config for frame pointer elimination as rest of code
|
||||
attributes::set_frame_pointer_elimination(ccx, llfn);
|
||||
|
||||
let llbb = unsafe {
|
||||
llvm::LLVMAppendBasicBlockInContext(ccx.llcx(), llfn, "top\0".as_ptr() as *const _)
|
||||
};
|
||||
@ -1611,7 +1614,8 @@ pub fn trans_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
metadata: metadata,
|
||||
reachable: vec![],
|
||||
no_builtins: no_builtins,
|
||||
linker_info: linker_info
|
||||
linker_info: linker_info,
|
||||
windows_subsystem: None,
|
||||
};
|
||||
}
|
||||
|
||||
@ -1747,6 +1751,17 @@ pub fn trans_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
|
||||
let linker_info = LinkerInfo::new(&shared_ccx, &reachable_symbols);
|
||||
|
||||
let subsystem = attr::first_attr_value_str_by_name(&krate.attrs,
|
||||
"windows_subsystem");
|
||||
let windows_subsystem = subsystem.map(|subsystem| {
|
||||
if subsystem != "windows" && subsystem != "console" {
|
||||
tcx.sess.fatal(&format!("invalid windows subsystem `{}`, only \
|
||||
`windows` and `console` are allowed",
|
||||
subsystem));
|
||||
}
|
||||
subsystem.to_string()
|
||||
});
|
||||
|
||||
CrateTranslation {
|
||||
modules: modules,
|
||||
metadata_module: metadata_module,
|
||||
@ -1754,7 +1769,8 @@ pub fn trans_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
metadata: metadata,
|
||||
reachable: reachable_symbols,
|
||||
no_builtins: no_builtins,
|
||||
linker_info: linker_info
|
||||
linker_info: linker_info,
|
||||
windows_subsystem: windows_subsystem,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -74,7 +74,7 @@ impl<'tcx> Callee<'tcx> {
|
||||
pub fn method_call<'blk>(bcx: Block<'blk, 'tcx>,
|
||||
method_call: ty::MethodCall)
|
||||
-> Callee<'tcx> {
|
||||
let method = bcx.tcx().tables.borrow().method_map[&method_call];
|
||||
let method = bcx.tcx().tables().method_map[&method_call];
|
||||
Callee::method(bcx, method)
|
||||
}
|
||||
|
||||
|
@ -1082,10 +1082,7 @@ impl<'b, 'a, 'v> hir_visit::Visitor<'v> for RootCollector<'b, 'a, 'v> {
|
||||
hir::ItemStruct(_, ref generics) |
|
||||
hir::ItemUnion(_, ref generics) => {
|
||||
if !generics.is_parameterized() {
|
||||
let ty = {
|
||||
let tables = self.scx.tcx().tables.borrow();
|
||||
tables.node_types[&item.id]
|
||||
};
|
||||
let ty = self.scx.tcx().tables().node_types[&item.id];
|
||||
|
||||
if self.mode == TransItemCollectionMode::Eager {
|
||||
debug!("RootCollector: ADT drop-glue for {}",
|
||||
|
@ -1765,7 +1765,7 @@ pub fn create_global_var_metadata(cx: &CrateContext,
|
||||
};
|
||||
|
||||
let is_local_to_unit = is_node_local_to_unit(cx, node_id);
|
||||
let variable_type = tcx.erase_regions(&tcx.node_id_to_type(node_id));
|
||||
let variable_type = tcx.erase_regions(&tcx.tables().node_id_to_type(node_id));
|
||||
let type_metadata = type_metadata(cx, variable_type, span);
|
||||
let var_name = tcx.item_name(node_def_id).to_string();
|
||||
let linkage_name = mangled_name_of_item(cx, node_def_id, "");
|
||||
|
@ -169,6 +169,7 @@ pub struct CrateTranslation {
|
||||
pub metadata: Vec<u8>,
|
||||
pub reachable: Vec<String>,
|
||||
pub no_builtins: bool,
|
||||
pub windows_subsystem: Option<String>,
|
||||
pub linker_info: back::linker::LinkerInfo
|
||||
}
|
||||
|
||||
|
@ -1549,7 +1549,7 @@ impl<'o, 'gcx: 'tcx, 'tcx> AstConv<'gcx, 'tcx>+'o {
|
||||
|
||||
tcx.prohibit_type_params(base_segments);
|
||||
let impl_id = tcx.map.as_local_node_id(def_id).unwrap();
|
||||
let ty = tcx.node_id_to_type(impl_id);
|
||||
let ty = tcx.tables().node_id_to_type(impl_id);
|
||||
if let Some(free_substs) = self.get_free_substs() {
|
||||
ty.subst(tcx, free_substs)
|
||||
} else {
|
||||
|
@ -103,7 +103,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
|
||||
// If the callee is a bare function or a closure, then we're all set.
|
||||
match self.structurally_resolved_type(callee_expr.span, adjusted_ty).sty {
|
||||
ty::TyFnDef(..) | ty::TyFnPtr(_) => {
|
||||
self.write_autoderef_adjustment(callee_expr.id, autoderefs);
|
||||
self.write_autoderef_adjustment(callee_expr.id, autoderefs, adjusted_ty);
|
||||
return Some(CallStep::Builtin);
|
||||
}
|
||||
|
||||
|
@ -65,10 +65,7 @@ use check::FnCtxt;
|
||||
use rustc::hir;
|
||||
use rustc::infer::{Coercion, InferOk, TypeOrigin, TypeTrace};
|
||||
use rustc::traits::{self, ObligationCause};
|
||||
use rustc::ty::adjustment::{AutoAdjustment, AutoDerefRef, AdjustDerefRef};
|
||||
use rustc::ty::adjustment::{AutoPtr, AutoUnsafe, AdjustReifyFnPointer};
|
||||
use rustc::ty::adjustment::{AdjustUnsafeFnPointer, AdjustMutToConstPointer};
|
||||
use rustc::ty::adjustment::AdjustNeverToAny;
|
||||
use rustc::ty::adjustment::{Adjustment, Adjust, AutoBorrow};
|
||||
use rustc::ty::{self, LvaluePreference, TypeAndMut, Ty};
|
||||
use rustc::ty::fold::TypeFoldable;
|
||||
use rustc::ty::error::TypeError;
|
||||
@ -93,7 +90,7 @@ impl<'a, 'gcx, 'tcx> Deref for Coerce<'a, 'gcx, 'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
type CoerceResult<'tcx> = RelateResult<'tcx, (Ty<'tcx>, AutoAdjustment<'tcx>)>;
|
||||
type CoerceResult<'tcx> = RelateResult<'tcx, (Ty<'tcx>, Adjust<'tcx>)>;
|
||||
|
||||
fn coerce_mutbls<'tcx>(from_mutbl: hir::Mutability,
|
||||
to_mutbl: hir::Mutability)
|
||||
@ -144,12 +141,11 @@ impl<'f, 'gcx, 'tcx> Coerce<'f, 'gcx, 'tcx> {
|
||||
|
||||
/// Synthesize an identity adjustment.
|
||||
fn identity(&self, ty: Ty<'tcx>) -> CoerceResult<'tcx> {
|
||||
Ok((ty,
|
||||
AdjustDerefRef(AutoDerefRef {
|
||||
autoderefs: 0,
|
||||
autoref: None,
|
||||
unsize: None,
|
||||
})))
|
||||
Ok((ty, Adjust::DerefRef {
|
||||
autoderefs: 0,
|
||||
autoref: None,
|
||||
unsize: false,
|
||||
}))
|
||||
}
|
||||
|
||||
fn coerce<'a, E, I>(&self, exprs: &E, a: Ty<'tcx>, b: Ty<'tcx>) -> CoerceResult<'tcx>
|
||||
@ -166,7 +162,7 @@ impl<'f, 'gcx, 'tcx> Coerce<'f, 'gcx, 'tcx> {
|
||||
}
|
||||
|
||||
if a.is_never() {
|
||||
return Ok((b, AdjustNeverToAny(b)));
|
||||
return Ok((b, Adjust::NeverToAny));
|
||||
}
|
||||
|
||||
// Consider coercing the subtype to a DST
|
||||
@ -396,17 +392,16 @@ impl<'f, 'gcx, 'tcx> Coerce<'f, 'gcx, 'tcx> {
|
||||
ty::TyRef(r_borrow, _) => r_borrow,
|
||||
_ => span_bug!(span, "expected a ref type, got {:?}", ty),
|
||||
};
|
||||
let autoref = Some(AutoPtr(r_borrow, mt_b.mutbl));
|
||||
let autoref = Some(AutoBorrow::Ref(r_borrow, mt_b.mutbl));
|
||||
debug!("coerce_borrowed_pointer: succeeded ty={:?} autoderefs={:?} autoref={:?}",
|
||||
ty,
|
||||
autoderefs,
|
||||
autoref);
|
||||
Ok((ty,
|
||||
AdjustDerefRef(AutoDerefRef {
|
||||
autoderefs: autoderefs,
|
||||
autoref: autoref,
|
||||
unsize: None,
|
||||
})))
|
||||
Ok((ty, Adjust::DerefRef {
|
||||
autoderefs: autoderefs,
|
||||
autoref: autoref,
|
||||
unsize: false,
|
||||
}))
|
||||
}
|
||||
|
||||
|
||||
@ -437,11 +432,11 @@ impl<'f, 'gcx, 'tcx> Coerce<'f, 'gcx, 'tcx> {
|
||||
|
||||
let coercion = Coercion(self.origin.span());
|
||||
let r_borrow = self.next_region_var(coercion);
|
||||
(mt_a.ty, Some(AutoPtr(r_borrow, mt_b.mutbl)))
|
||||
(mt_a.ty, Some(AutoBorrow::Ref(r_borrow, mt_b.mutbl)))
|
||||
}
|
||||
(&ty::TyRef(_, mt_a), &ty::TyRawPtr(mt_b)) => {
|
||||
coerce_mutbls(mt_a.mutbl, mt_b.mutbl)?;
|
||||
(mt_a.ty, Some(AutoUnsafe(mt_b.mutbl)))
|
||||
(mt_a.ty, Some(AutoBorrow::RawPtr(mt_b.mutbl)))
|
||||
}
|
||||
_ => (source, None),
|
||||
};
|
||||
@ -497,13 +492,13 @@ impl<'f, 'gcx, 'tcx> Coerce<'f, 'gcx, 'tcx> {
|
||||
|
||||
*self.unsizing_obligations.borrow_mut() = leftover_predicates;
|
||||
|
||||
let adjustment = AutoDerefRef {
|
||||
let adjustment = Adjust::DerefRef {
|
||||
autoderefs: if reborrow.is_some() { 1 } else { 0 },
|
||||
autoref: reborrow,
|
||||
unsize: Some(target),
|
||||
unsize: true,
|
||||
};
|
||||
debug!("Success, coerced with {:?}", adjustment);
|
||||
Ok((target, AdjustDerefRef(adjustment)))
|
||||
Ok((target, adjustment))
|
||||
}
|
||||
|
||||
fn coerce_from_safe_fn(&self,
|
||||
@ -516,7 +511,7 @@ impl<'f, 'gcx, 'tcx> Coerce<'f, 'gcx, 'tcx> {
|
||||
(hir::Unsafety::Normal, hir::Unsafety::Unsafe) => {
|
||||
let unsafe_a = self.tcx.safe_to_unsafe_fn_ty(fn_ty_a);
|
||||
return self.unify_and_identity(unsafe_a, b)
|
||||
.map(|(ty, _)| (ty, AdjustUnsafeFnPointer));
|
||||
.map(|(ty, _)| (ty, Adjust::UnsafeFnPointer));
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
@ -555,7 +550,7 @@ impl<'f, 'gcx, 'tcx> Coerce<'f, 'gcx, 'tcx> {
|
||||
ty::TyFnPtr(_) => {
|
||||
let a_fn_pointer = self.tcx.mk_fn_ptr(fn_ty_a);
|
||||
self.coerce_from_safe_fn(a_fn_pointer, fn_ty_a, b)
|
||||
.map(|(ty, _)| (ty, AdjustReifyFnPointer))
|
||||
.map(|(ty, _)| (ty, Adjust::ReifyFnPointer))
|
||||
}
|
||||
_ => self.unify_and_identity(a, b),
|
||||
}
|
||||
@ -585,17 +580,17 @@ impl<'f, 'gcx, 'tcx> Coerce<'f, 'gcx, 'tcx> {
|
||||
coerce_mutbls(mt_a.mutbl, mutbl_b)?;
|
||||
|
||||
// Although references and unsafe ptrs have the same
|
||||
// representation, we still register an AutoDerefRef so that
|
||||
// representation, we still register an Adjust::DerefRef so that
|
||||
// regionck knows that the region for `a` must be valid here.
|
||||
Ok((ty,
|
||||
if is_ref {
|
||||
AdjustDerefRef(AutoDerefRef {
|
||||
Adjust::DerefRef {
|
||||
autoderefs: 1,
|
||||
autoref: Some(AutoUnsafe(mutbl_b)),
|
||||
unsize: None,
|
||||
})
|
||||
autoref: Some(AutoBorrow::RawPtr(mutbl_b)),
|
||||
unsize: false,
|
||||
}
|
||||
} else if mt_a.mutbl != mutbl_b {
|
||||
AdjustMutToConstPointer
|
||||
Adjust::MutToConstPointer
|
||||
} else {
|
||||
noop
|
||||
}))
|
||||
@ -606,24 +601,25 @@ fn apply<'a, 'b, 'gcx, 'tcx, E, I>(coerce: &mut Coerce<'a, 'gcx, 'tcx>,
|
||||
exprs: &E,
|
||||
a: Ty<'tcx>,
|
||||
b: Ty<'tcx>)
|
||||
-> CoerceResult<'tcx>
|
||||
-> RelateResult<'tcx, Adjustment<'tcx>>
|
||||
where E: Fn() -> I,
|
||||
I: IntoIterator<Item = &'b hir::Expr>
|
||||
{
|
||||
|
||||
let (ty, adjustment) = indent(|| coerce.coerce(exprs, a, b))?;
|
||||
let (ty, adjust) = indent(|| coerce.coerce(exprs, a, b))?;
|
||||
|
||||
let fcx = coerce.fcx;
|
||||
if let AdjustDerefRef(auto) = adjustment {
|
||||
if auto.unsize.is_some() {
|
||||
let mut obligations = coerce.unsizing_obligations.borrow_mut();
|
||||
for obligation in obligations.drain(..) {
|
||||
fcx.register_predicate(obligation);
|
||||
}
|
||||
if let Adjust::DerefRef { unsize: true, .. } = adjust {
|
||||
let mut obligations = coerce.unsizing_obligations.borrow_mut();
|
||||
for obligation in obligations.drain(..) {
|
||||
fcx.register_predicate(obligation);
|
||||
}
|
||||
}
|
||||
|
||||
Ok((ty, adjustment))
|
||||
Ok(Adjustment {
|
||||
kind: adjust,
|
||||
target: ty
|
||||
})
|
||||
}
|
||||
|
||||
impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
|
||||
@ -641,17 +637,17 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
|
||||
|
||||
let mut coerce = Coerce::new(self, TypeOrigin::ExprAssignable(expr.span));
|
||||
self.commit_if_ok(|_| {
|
||||
let (ty, adjustment) = apply(&mut coerce, &|| Some(expr), source, target)?;
|
||||
let adjustment = apply(&mut coerce, &|| Some(expr), source, target)?;
|
||||
if !adjustment.is_identity() {
|
||||
debug!("Success, coerced with {:?}", adjustment);
|
||||
match self.tables.borrow().adjustments.get(&expr.id) {
|
||||
None |
|
||||
Some(&AdjustNeverToAny(..)) => (),
|
||||
Some(&Adjustment { kind: Adjust::NeverToAny, .. }) => (),
|
||||
_ => bug!("expr already has an adjustment on it!"),
|
||||
};
|
||||
self.write_adjustment(expr.id, adjustment);
|
||||
}
|
||||
Ok(ty)
|
||||
Ok(adjustment.target)
|
||||
})
|
||||
}
|
||||
|
||||
@ -705,12 +701,16 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
|
||||
}
|
||||
|
||||
// Reify both sides and return the reified fn pointer type.
|
||||
let fn_ptr = self.tcx.mk_fn_ptr(fty);
|
||||
for expr in exprs().into_iter().chain(Some(new)) {
|
||||
// No adjustments can produce a fn item, so this should never trip.
|
||||
assert!(!self.tables.borrow().adjustments.contains_key(&expr.id));
|
||||
self.write_adjustment(expr.id, AdjustReifyFnPointer);
|
||||
self.write_adjustment(expr.id, Adjustment {
|
||||
kind: Adjust::ReifyFnPointer,
|
||||
target: fn_ptr
|
||||
});
|
||||
}
|
||||
return Ok(self.tcx.mk_fn_ptr(fty));
|
||||
return Ok(fn_ptr);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
@ -724,11 +724,11 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
|
||||
if !self.tables.borrow().adjustments.contains_key(&new.id) {
|
||||
let result = self.commit_if_ok(|_| apply(&mut coerce, &|| Some(new), new_ty, prev_ty));
|
||||
match result {
|
||||
Ok((ty, adjustment)) => {
|
||||
Ok(adjustment) => {
|
||||
if !adjustment.is_identity() {
|
||||
self.write_adjustment(new.id, adjustment);
|
||||
}
|
||||
return Ok(ty);
|
||||
return Ok(adjustment.target);
|
||||
}
|
||||
Err(e) => first_error = Some(e),
|
||||
}
|
||||
@ -738,10 +738,12 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
|
||||
// This requires ensuring there are no coercions applied to *any* of the
|
||||
// previous expressions, other than noop reborrows (ignoring lifetimes).
|
||||
for expr in exprs() {
|
||||
let noop = match self.tables.borrow().adjustments.get(&expr.id) {
|
||||
Some(&AdjustDerefRef(AutoDerefRef { autoderefs: 1,
|
||||
autoref: Some(AutoPtr(_, mutbl_adj)),
|
||||
unsize: None })) => {
|
||||
let noop = match self.tables.borrow().adjustments.get(&expr.id).map(|adj| adj.kind) {
|
||||
Some(Adjust::DerefRef {
|
||||
autoderefs: 1,
|
||||
autoref: Some(AutoBorrow::Ref(_, mutbl_adj)),
|
||||
unsize: false
|
||||
}) => {
|
||||
match self.node_ty(expr.id).sty {
|
||||
ty::TyRef(_, mt_orig) => {
|
||||
// Reborrow that we can safely ignore.
|
||||
@ -750,7 +752,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
Some(&AdjustNeverToAny(_)) => true,
|
||||
Some(Adjust::NeverToAny) => true,
|
||||
Some(_) => false,
|
||||
None => true,
|
||||
};
|
||||
@ -783,18 +785,21 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
|
||||
})
|
||||
}
|
||||
}
|
||||
Ok((ty, adjustment)) => {
|
||||
Ok(adjustment) => {
|
||||
if !adjustment.is_identity() {
|
||||
let mut tables = self.tables.borrow_mut();
|
||||
for expr in exprs() {
|
||||
let previous = self.tables.borrow().adjustments.get(&expr.id).cloned();
|
||||
if let Some(AdjustNeverToAny(_)) = previous {
|
||||
self.write_adjustment(expr.id, AdjustNeverToAny(ty));
|
||||
} else {
|
||||
self.write_adjustment(expr.id, adjustment);
|
||||
if let Some(&mut Adjustment {
|
||||
kind: Adjust::NeverToAny,
|
||||
ref mut target
|
||||
}) = tables.adjustments.get_mut(&expr.id) {
|
||||
*target = adjustment.target;
|
||||
continue;
|
||||
}
|
||||
tables.adjustments.insert(expr.id, adjustment);
|
||||
}
|
||||
}
|
||||
Ok(ty)
|
||||
Ok(adjustment.target)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -15,7 +15,7 @@ use hir::def_id::DefId;
|
||||
use rustc::ty::subst::Substs;
|
||||
use rustc::traits;
|
||||
use rustc::ty::{self, LvaluePreference, NoPreference, PreferMutLvalue, Ty};
|
||||
use rustc::ty::adjustment::{AdjustDerefRef, AutoDerefRef, AutoPtr};
|
||||
use rustc::ty::adjustment::{Adjustment, Adjust, AutoBorrow};
|
||||
use rustc::ty::fold::TypeFoldable;
|
||||
use rustc::infer::{self, InferOk, TypeOrigin};
|
||||
use syntax_pos::Span;
|
||||
@ -140,20 +140,19 @@ impl<'a, 'gcx, 'tcx> ConfirmContext<'a, 'gcx, 'tcx> {
|
||||
unadjusted_self_ty: Ty<'tcx>,
|
||||
pick: &probe::Pick<'tcx>)
|
||||
-> Ty<'tcx> {
|
||||
let (autoref, unsize) = if let Some(mutbl) = pick.autoref {
|
||||
let autoref = if let Some(mutbl) = pick.autoref {
|
||||
let region = self.next_region_var(infer::Autoref(self.span));
|
||||
let autoref = AutoPtr(region, mutbl);
|
||||
(Some(autoref),
|
||||
pick.unsize.map(|target| target.adjust_for_autoref(self.tcx, Some(autoref))))
|
||||
Some(AutoBorrow::Ref(region, mutbl))
|
||||
} else {
|
||||
// No unsizing should be performed without autoref (at
|
||||
// least during method dispach). This is because we
|
||||
// currently only unsize `[T;N]` to `[T]`, and naturally
|
||||
// that must occur being a reference.
|
||||
assert!(pick.unsize.is_none());
|
||||
(None, None)
|
||||
None
|
||||
};
|
||||
|
||||
|
||||
// Commit the autoderefs by calling `autoderef` again, but this
|
||||
// time writing the results into the various tables.
|
||||
let mut autoderef = self.autoderef(self.span, unadjusted_self_ty);
|
||||
@ -163,19 +162,20 @@ impl<'a, 'gcx, 'tcx> ConfirmContext<'a, 'gcx, 'tcx> {
|
||||
autoderef.unambiguous_final_ty();
|
||||
autoderef.finalize(LvaluePreference::NoPreference, Some(self.self_expr));
|
||||
|
||||
// Write out the final adjustment.
|
||||
self.write_adjustment(self.self_expr.id,
|
||||
AdjustDerefRef(AutoDerefRef {
|
||||
autoderefs: pick.autoderefs,
|
||||
autoref: autoref,
|
||||
unsize: unsize,
|
||||
}));
|
||||
let target = pick.unsize.unwrap_or(autoderefd_ty);
|
||||
let target = target.adjust_for_autoref(self.tcx, autoref);
|
||||
|
||||
if let Some(target) = unsize {
|
||||
target
|
||||
} else {
|
||||
autoderefd_ty.adjust_for_autoref(self.tcx, autoref)
|
||||
}
|
||||
// Write out the final adjustment.
|
||||
self.write_adjustment(self.self_expr.id, Adjustment {
|
||||
kind: Adjust::DerefRef {
|
||||
autoderefs: pick.autoderefs,
|
||||
autoref: autoref,
|
||||
unsize: pick.unsize.is_some(),
|
||||
},
|
||||
target: target
|
||||
});
|
||||
|
||||
target
|
||||
}
|
||||
|
||||
///////////////////////////////////////////////////////////////////////////
|
||||
@ -463,29 +463,23 @@ impl<'a, 'gcx, 'tcx> ConfirmContext<'a, 'gcx, 'tcx> {
|
||||
|
||||
// Fix up autoderefs and derefs.
|
||||
for (i, &expr) in exprs.iter().rev().enumerate() {
|
||||
debug!("convert_lvalue_derefs_to_mutable: i={} expr={:?}", i, expr);
|
||||
|
||||
// Count autoderefs.
|
||||
let autoderef_count = match self.tables
|
||||
.borrow()
|
||||
.adjustments
|
||||
.get(&expr.id) {
|
||||
Some(&AdjustDerefRef(ref adj)) => adj.autoderefs,
|
||||
Some(_) | None => 0,
|
||||
};
|
||||
|
||||
debug!("convert_lvalue_derefs_to_mutable: i={} expr={:?} \
|
||||
autoderef_count={}",
|
||||
i,
|
||||
expr,
|
||||
autoderef_count);
|
||||
|
||||
if autoderef_count > 0 {
|
||||
let mut autoderef = self.autoderef(expr.span, self.node_ty(expr.id));
|
||||
autoderef.nth(autoderef_count).unwrap_or_else(|| {
|
||||
span_bug!(expr.span,
|
||||
"expr was deref-able {} times but now isn't?",
|
||||
autoderef_count);
|
||||
});
|
||||
autoderef.finalize(PreferMutLvalue, Some(expr));
|
||||
let adjustment = self.tables.borrow().adjustments.get(&expr.id).cloned();
|
||||
match adjustment {
|
||||
Some(Adjustment { kind: Adjust::DerefRef { autoderefs, .. }, .. }) => {
|
||||
if autoderefs > 0 {
|
||||
let mut autoderef = self.autoderef(expr.span, self.node_ty(expr.id));
|
||||
autoderef.nth(autoderefs).unwrap_or_else(|| {
|
||||
span_bug!(expr.span,
|
||||
"expr was deref-able {} times but now isn't?",
|
||||
autoderefs);
|
||||
});
|
||||
autoderef.finalize(PreferMutLvalue, Some(expr));
|
||||
}
|
||||
}
|
||||
Some(_) | None => {}
|
||||
}
|
||||
|
||||
// Don't retry the first one or we might infinite loop!
|
||||
@ -503,45 +497,55 @@ impl<'a, 'gcx, 'tcx> ConfirmContext<'a, 'gcx, 'tcx> {
|
||||
// ought to recode this routine so it doesn't
|
||||
// (ab)use the normal type checking paths.
|
||||
let adj = self.tables.borrow().adjustments.get(&base_expr.id).cloned();
|
||||
let (autoderefs, unsize) = match adj {
|
||||
Some(AdjustDerefRef(adr)) => {
|
||||
match adr.autoref {
|
||||
let (autoderefs, unsize, adjusted_base_ty) = match adj {
|
||||
Some(Adjustment {
|
||||
kind: Adjust::DerefRef { autoderefs, autoref, unsize },
|
||||
target
|
||||
}) => {
|
||||
match autoref {
|
||||
None => {
|
||||
assert!(adr.unsize.is_none());
|
||||
(adr.autoderefs, None)
|
||||
}
|
||||
Some(AutoPtr(..)) => {
|
||||
(adr.autoderefs,
|
||||
adr.unsize.map(|target| {
|
||||
target.builtin_deref(false, NoPreference)
|
||||
.expect("fixup: AutoPtr is not &T")
|
||||
.ty
|
||||
}))
|
||||
assert!(!unsize);
|
||||
}
|
||||
Some(AutoBorrow::Ref(..)) => {}
|
||||
Some(_) => {
|
||||
span_bug!(base_expr.span,
|
||||
"unexpected adjustment autoref {:?}",
|
||||
adr);
|
||||
adj);
|
||||
}
|
||||
}
|
||||
|
||||
(autoderefs, unsize, if unsize {
|
||||
target.builtin_deref(false, NoPreference)
|
||||
.expect("fixup: AutoBorrow::Ref is not &T")
|
||||
.ty
|
||||
} else {
|
||||
let ty = self.node_ty(base_expr.id);
|
||||
let mut ty = self.shallow_resolve(ty);
|
||||
let mut method_type = |method_call: ty::MethodCall| {
|
||||
self.tables.borrow().method_map.get(&method_call).map(|m| {
|
||||
self.resolve_type_vars_if_possible(&m.ty)
|
||||
})
|
||||
};
|
||||
|
||||
if !ty.references_error() {
|
||||
for i in 0..autoderefs {
|
||||
ty = ty.adjust_for_autoderef(self.tcx,
|
||||
base_expr.id,
|
||||
base_expr.span,
|
||||
i as u32,
|
||||
&mut method_type);
|
||||
}
|
||||
}
|
||||
|
||||
ty
|
||||
})
|
||||
}
|
||||
None => (0, None),
|
||||
None => (0, false, self.node_ty(base_expr.id)),
|
||||
Some(_) => {
|
||||
span_bug!(base_expr.span, "unexpected adjustment type");
|
||||
}
|
||||
};
|
||||
|
||||
let (adjusted_base_ty, unsize) = if let Some(target) = unsize {
|
||||
(target, true)
|
||||
} else {
|
||||
(self.adjust_expr_ty(base_expr,
|
||||
Some(&AdjustDerefRef(AutoDerefRef {
|
||||
autoderefs: autoderefs,
|
||||
autoref: None,
|
||||
unsize: None,
|
||||
}))),
|
||||
false)
|
||||
};
|
||||
let index_expr_ty = self.node_ty(index_expr.id);
|
||||
|
||||
let result = self.try_index_step(ty::MethodCall::expr(expr.id),
|
||||
|
@ -16,7 +16,7 @@ use hir::def_id::DefId;
|
||||
use rustc::ty::subst::Substs;
|
||||
use rustc::traits;
|
||||
use rustc::ty::{self, ToPredicate, ToPolyTraitRef, TraitRef, TypeFoldable};
|
||||
use rustc::ty::adjustment::{AdjustDerefRef, AutoDerefRef, AutoPtr};
|
||||
use rustc::ty::adjustment::{Adjustment, Adjust, AutoBorrow};
|
||||
use rustc::infer;
|
||||
|
||||
use syntax::ast;
|
||||
@ -294,11 +294,11 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
|
||||
unsize,
|
||||
method_ty.explicit_self);
|
||||
|
||||
match method_ty.explicit_self {
|
||||
let autoref = match method_ty.explicit_self {
|
||||
ty::ExplicitSelfCategory::ByValue => {
|
||||
// Trait method is fn(self), no transformation needed.
|
||||
assert!(!unsize);
|
||||
self.write_autoderef_adjustment(self_expr.id, autoderefs);
|
||||
None
|
||||
}
|
||||
|
||||
ty::ExplicitSelfCategory::ByReference(..) => {
|
||||
@ -306,16 +306,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
|
||||
// autoref. Pull the region etc out of the type of first argument.
|
||||
match transformed_self_ty.sty {
|
||||
ty::TyRef(region, ty::TypeAndMut { mutbl, ty: _ }) => {
|
||||
self.write_adjustment(self_expr.id,
|
||||
AdjustDerefRef(AutoDerefRef {
|
||||
autoderefs: autoderefs,
|
||||
autoref: Some(AutoPtr(region, mutbl)),
|
||||
unsize: if unsize {
|
||||
Some(transformed_self_ty)
|
||||
} else {
|
||||
None
|
||||
},
|
||||
}));
|
||||
Some(AutoBorrow::Ref(region, mutbl))
|
||||
}
|
||||
|
||||
_ => {
|
||||
@ -331,7 +322,16 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
|
||||
"unexpected explicit self type in operator method: {:?}",
|
||||
method_ty.explicit_self);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
self.write_adjustment(self_expr.id, Adjustment {
|
||||
kind: Adjust::DerefRef {
|
||||
autoderefs: autoderefs,
|
||||
autoref: autoref,
|
||||
unsize: unsize
|
||||
},
|
||||
target: transformed_self_ty
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -777,7 +777,7 @@ pub fn check_item_type<'a,'tcx>(ccx: &CrateCtxt<'a,'tcx>, it: &'tcx hir::Item) {
|
||||
check_union(ccx, it.id, it.span);
|
||||
}
|
||||
hir::ItemTy(_, ref generics) => {
|
||||
let pty_ty = ccx.tcx.node_id_to_type(it.id);
|
||||
let pty_ty = ccx.tcx.tables().node_id_to_type(it.id);
|
||||
check_bounds_are_used(ccx, generics, pty_ty);
|
||||
}
|
||||
hir::ItemForeignMod(ref m) => {
|
||||
@ -1205,7 +1205,7 @@ fn check_representable<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
sp: Span,
|
||||
item_id: ast::NodeId)
|
||||
-> bool {
|
||||
let rty = tcx.node_id_to_type(item_id);
|
||||
let rty = tcx.tables().node_id_to_type(item_id);
|
||||
|
||||
// Check that it is possible to represent this type. This call identifies
|
||||
// (1) types that contain themselves and (2) types that contain a different
|
||||
@ -1224,7 +1224,7 @@ fn check_representable<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
}
|
||||
|
||||
pub fn check_simd<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, sp: Span, id: ast::NodeId) {
|
||||
let t = tcx.node_id_to_type(id);
|
||||
let t = tcx.tables().node_id_to_type(id);
|
||||
match t.sty {
|
||||
ty::TyAdt(def, substs) if def.is_struct() => {
|
||||
let fields = &def.struct_variant().fields;
|
||||
@ -1581,20 +1581,21 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
|
||||
|
||||
pub fn write_autoderef_adjustment(&self,
|
||||
node_id: ast::NodeId,
|
||||
derefs: usize) {
|
||||
self.write_adjustment(
|
||||
node_id,
|
||||
adjustment::AdjustDerefRef(adjustment::AutoDerefRef {
|
||||
derefs: usize,
|
||||
adjusted_ty: Ty<'tcx>) {
|
||||
self.write_adjustment(node_id, adjustment::Adjustment {
|
||||
kind: adjustment::Adjust::DerefRef {
|
||||
autoderefs: derefs,
|
||||
autoref: None,
|
||||
unsize: None
|
||||
})
|
||||
);
|
||||
unsize: false
|
||||
},
|
||||
target: adjusted_ty
|
||||
});
|
||||
}
|
||||
|
||||
pub fn write_adjustment(&self,
|
||||
node_id: ast::NodeId,
|
||||
adj: adjustment::AutoAdjustment<'tcx>) {
|
||||
adj: adjustment::Adjustment<'tcx>) {
|
||||
debug!("write_adjustment(node_id={}, adj={:?})", node_id, adj);
|
||||
|
||||
if adj.is_identity() {
|
||||
@ -1760,21 +1761,6 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
|
||||
t
|
||||
}
|
||||
|
||||
/// Apply `adjustment` to the type of `expr`
|
||||
pub fn adjust_expr_ty(&self,
|
||||
expr: &hir::Expr,
|
||||
adjustment: Option<&adjustment::AutoAdjustment<'tcx>>)
|
||||
-> Ty<'tcx>
|
||||
{
|
||||
let raw_ty = self.node_ty(expr.id);
|
||||
let raw_ty = self.shallow_resolve(raw_ty);
|
||||
let resolve_ty = |ty: Ty<'tcx>| self.resolve_type_vars_if_possible(&ty);
|
||||
raw_ty.adjust(self.tcx, expr.span, expr.id, adjustment, |method_call| {
|
||||
self.tables.borrow().method_map.get(&method_call)
|
||||
.map(|method| resolve_ty(method.ty))
|
||||
})
|
||||
}
|
||||
|
||||
pub fn node_ty(&self, id: ast::NodeId) -> Ty<'tcx> {
|
||||
match self.tables.borrow().node_types.get(&id) {
|
||||
Some(&t) => t,
|
||||
@ -2311,7 +2297,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
|
||||
debug!("try_index_step: success, using built-in indexing");
|
||||
// If we had `[T; N]`, we should've caught it before unsizing to `[T]`.
|
||||
assert!(!unsize);
|
||||
self.write_autoderef_adjustment(base_expr.id, autoderefs);
|
||||
self.write_autoderef_adjustment(base_expr.id, autoderefs, adjusted_ty);
|
||||
return Some((tcx.types.usize, ty));
|
||||
}
|
||||
_ => {}
|
||||
@ -2867,9 +2853,11 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
|
||||
|
||||
// In case we did perform an adjustment, we have to update
|
||||
// the type of the block, because old trans still uses it.
|
||||
let adj = self.tables.borrow().adjustments.get(&then.id).cloned();
|
||||
if res.is_ok() && adj.is_some() {
|
||||
self.write_ty(then_blk.id, self.adjust_expr_ty(then, adj.as_ref()));
|
||||
if res.is_ok() {
|
||||
let adj = self.tables.borrow().adjustments.get(&then.id).cloned();
|
||||
if let Some(adj) = adj {
|
||||
self.write_ty(then_blk.id, adj.target);
|
||||
}
|
||||
}
|
||||
|
||||
res
|
||||
@ -2930,7 +2918,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
|
||||
let field_ty = self.field_ty(expr.span, field, substs);
|
||||
if field.vis.is_accessible_from(self.body_id, &self.tcx().map) {
|
||||
autoderef.finalize(lvalue_pref, Some(base));
|
||||
self.write_autoderef_adjustment(base.id, autoderefs);
|
||||
self.write_autoderef_adjustment(base.id, autoderefs, base_t);
|
||||
return field_ty;
|
||||
}
|
||||
private_candidate = Some((base_def.did, field_ty));
|
||||
@ -3048,7 +3036,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
|
||||
|
||||
if let Some(field_ty) = field {
|
||||
autoderef.finalize(lvalue_pref, Some(base));
|
||||
self.write_autoderef_adjustment(base.id, autoderefs);
|
||||
self.write_autoderef_adjustment(base.id, autoderefs, base_t);
|
||||
return field_ty;
|
||||
}
|
||||
}
|
||||
@ -3252,6 +3240,16 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
|
||||
}
|
||||
Def::Struct(..) | Def::Union(..) | Def::TyAlias(..) |
|
||||
Def::AssociatedTy(..) | Def::SelfTy(..) => {
|
||||
match def {
|
||||
Def::AssociatedTy(..) | Def::SelfTy(..)
|
||||
if !self.tcx.sess.features.borrow().more_struct_aliases => {
|
||||
emit_feature_err(&self.tcx.sess.parse_sess,
|
||||
"more_struct_aliases", path.span, GateIssue::Language,
|
||||
"`Self` and associated types in struct \
|
||||
expressions and patterns are unstable");
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
match ty.sty {
|
||||
ty::TyAdt(adt, substs) if !adt.is_enum() => {
|
||||
Some((adt.struct_variant(), adt.did, substs))
|
||||
@ -3358,8 +3356,10 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
|
||||
if ty.is_never() {
|
||||
if let Some(hir::map::NodeExpr(_)) = self.tcx.map.find(expr.id) {
|
||||
let adj_ty = self.next_diverging_ty_var();
|
||||
let adj = adjustment::AdjustNeverToAny(adj_ty);
|
||||
self.write_adjustment(expr.id, adj);
|
||||
self.write_adjustment(expr.id, adjustment::Adjustment {
|
||||
kind: adjustment::Adjust::NeverToAny,
|
||||
target: adj_ty
|
||||
});
|
||||
return adj_ty;
|
||||
}
|
||||
}
|
||||
|
@ -259,23 +259,10 @@ impl<'a, 'gcx, 'tcx> RegionCtxt<'a, 'gcx, 'tcx> {
|
||||
self.resolve_type(t)
|
||||
}
|
||||
|
||||
fn resolve_method_type(&self, method_call: MethodCall) -> Option<Ty<'tcx>> {
|
||||
let method_ty = self.tables.borrow().method_map
|
||||
.get(&method_call).map(|method| method.ty);
|
||||
method_ty.map(|method_ty| self.resolve_type(method_ty))
|
||||
}
|
||||
|
||||
/// Try to resolve the type for the given node.
|
||||
pub fn resolve_expr_type_adjusted(&mut self, expr: &hir::Expr) -> Ty<'tcx> {
|
||||
let ty_unadjusted = self.resolve_node_type(expr.id);
|
||||
if ty_unadjusted.references_error() {
|
||||
ty_unadjusted
|
||||
} else {
|
||||
ty_unadjusted.adjust(
|
||||
self.tcx, expr.span, expr.id,
|
||||
self.tables.borrow().adjustments.get(&expr.id),
|
||||
|method_call| self.resolve_method_type(method_call))
|
||||
}
|
||||
let ty = self.tables.borrow().expr_ty_adjusted(expr);
|
||||
self.resolve_type(ty)
|
||||
}
|
||||
|
||||
fn visit_fn_body(&mut self,
|
||||
@ -553,10 +540,8 @@ impl<'a, 'gcx, 'tcx, 'v> Visitor<'v> for RegionCtxt<'a, 'gcx, 'tcx> {
|
||||
let adjustment = self.tables.borrow().adjustments.get(&expr.id).map(|a| a.clone());
|
||||
if let Some(adjustment) = adjustment {
|
||||
debug!("adjustment={:?}", adjustment);
|
||||
match adjustment {
|
||||
adjustment::AdjustDerefRef(adjustment::AutoDerefRef {
|
||||
autoderefs, ref autoref, ..
|
||||
}) => {
|
||||
match adjustment.kind {
|
||||
adjustment::Adjust::DerefRef { autoderefs, ref autoref, .. } => {
|
||||
let expr_ty = self.resolve_node_type(expr.id);
|
||||
self.constrain_autoderefs(expr, autoderefs, expr_ty);
|
||||
if let Some(ref autoref) = *autoref {
|
||||
@ -946,7 +931,7 @@ impl<'a, 'gcx, 'tcx> RegionCtxt<'a, 'gcx, 'tcx> {
|
||||
let origin = infer::ParameterOrigin::OverloadedDeref;
|
||||
self.substs_wf_in_scope(origin, method.substs, deref_expr.span, r_deref_expr);
|
||||
|
||||
// Treat overloaded autoderefs as if an AutoRef adjustment
|
||||
// Treat overloaded autoderefs as if an AutoBorrow adjustment
|
||||
// was applied on the base type, as that is always the case.
|
||||
let fn_sig = method.ty.fn_sig();
|
||||
let fn_sig = // late-bound regions should have been instantiated
|
||||
@ -1060,15 +1045,12 @@ impl<'a, 'gcx, 'tcx> RegionCtxt<'a, 'gcx, 'tcx> {
|
||||
id: ast::NodeId,
|
||||
minimum_lifetime: &'tcx ty::Region)
|
||||
{
|
||||
let tcx = self.tcx;
|
||||
|
||||
// Try to resolve the type. If we encounter an error, then typeck
|
||||
// is going to fail anyway, so just stop here and let typeck
|
||||
// report errors later on in the writeback phase.
|
||||
let ty0 = self.resolve_node_type(id);
|
||||
let ty = ty0.adjust(tcx, origin.span(), id,
|
||||
self.tables.borrow().adjustments.get(&id),
|
||||
|method_call| self.resolve_method_type(method_call));
|
||||
let ty = self.tables.borrow().adjustments.get(&id).map_or(ty0, |adj| adj.target);
|
||||
let ty = self.resolve_type(ty);
|
||||
debug!("constrain_regions_in_type_of_node(\
|
||||
ty={}, ty0={}, id={}, minimum_lifetime={:?})",
|
||||
ty, ty0,
|
||||
@ -1165,7 +1147,7 @@ impl<'a, 'gcx, 'tcx> RegionCtxt<'a, 'gcx, 'tcx> {
|
||||
fn link_autoref(&self,
|
||||
expr: &hir::Expr,
|
||||
autoderefs: usize,
|
||||
autoref: &adjustment::AutoRef<'tcx>)
|
||||
autoref: &adjustment::AutoBorrow<'tcx>)
|
||||
{
|
||||
debug!("link_autoref(autoref={:?})", autoref);
|
||||
let mc = mc::MemCategorizationContext::new(self);
|
||||
@ -1173,12 +1155,12 @@ impl<'a, 'gcx, 'tcx> RegionCtxt<'a, 'gcx, 'tcx> {
|
||||
debug!("expr_cmt={:?}", expr_cmt);
|
||||
|
||||
match *autoref {
|
||||
adjustment::AutoPtr(r, m) => {
|
||||
adjustment::AutoBorrow::Ref(r, m) => {
|
||||
self.link_region(expr.span, r,
|
||||
ty::BorrowKind::from_mutbl(m), expr_cmt);
|
||||
}
|
||||
|
||||
adjustment::AutoUnsafe(m) => {
|
||||
adjustment::AutoBorrow::RawPtr(m) => {
|
||||
let r = self.tcx.node_scope_region(expr.id);
|
||||
self.link_region(expr.span, r, ty::BorrowKind::from_mutbl(m), expr_cmt);
|
||||
}
|
||||
|
@ -416,7 +416,7 @@ impl<'ccx, 'gcx> CheckTypeWellFormedVisitor<'ccx, 'gcx> {
|
||||
}
|
||||
}
|
||||
None => {
|
||||
let self_ty = fcx.tcx.node_id_to_type(item.id);
|
||||
let self_ty = fcx.tcx.tables().node_id_to_type(item.id);
|
||||
let self_ty = fcx.instantiate_type_scheme(item.span, free_substs, &self_ty);
|
||||
fcx.register_wf_obligation(self_ty, ast_self_ty.span, this.code.clone());
|
||||
}
|
||||
@ -519,7 +519,7 @@ impl<'ccx, 'gcx> CheckTypeWellFormedVisitor<'ccx, 'gcx> {
|
||||
item: &hir::Item,
|
||||
ast_generics: &hir::Generics)
|
||||
{
|
||||
let ty = self.tcx().node_id_to_type(item.id);
|
||||
let ty = self.tcx().tables().node_id_to_type(item.id);
|
||||
if self.tcx().has_error_field(ty) {
|
||||
return;
|
||||
}
|
||||
@ -649,7 +649,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
|
||||
let fields =
|
||||
struct_def.fields().iter()
|
||||
.map(|field| {
|
||||
let field_ty = self.tcx.node_id_to_type(field.id);
|
||||
let field_ty = self.tcx.tables().node_id_to_type(field.id);
|
||||
let field_ty = self.instantiate_type_scheme(field.span,
|
||||
&self.parameter_environment
|
||||
.free_substs,
|
||||
|
@ -229,7 +229,7 @@ impl<'cx, 'gcx, 'tcx, 'v> Visitor<'v> for WritebackCx<'cx, 'gcx, 'tcx> {
|
||||
debug!("Type for pattern binding {} (id {}) resolved to {:?}",
|
||||
pat_to_string(p),
|
||||
p.id,
|
||||
self.tcx().node_id_to_type(p.id));
|
||||
self.tcx().tables().node_id_to_type(p.id));
|
||||
|
||||
intravisit::walk_pat(self, p);
|
||||
}
|
||||
@ -381,36 +381,40 @@ impl<'cx, 'gcx, 'tcx> WritebackCx<'cx, 'gcx, 'tcx> {
|
||||
}
|
||||
|
||||
Some(adjustment) => {
|
||||
let resolved_adjustment = match adjustment {
|
||||
adjustment::AdjustNeverToAny(ty) => {
|
||||
adjustment::AdjustNeverToAny(self.resolve(&ty, reason))
|
||||
let resolved_adjustment = match adjustment.kind {
|
||||
adjustment::Adjust::NeverToAny => {
|
||||
adjustment::Adjust::NeverToAny
|
||||
}
|
||||
|
||||
adjustment::AdjustReifyFnPointer => {
|
||||
adjustment::AdjustReifyFnPointer
|
||||
adjustment::Adjust::ReifyFnPointer => {
|
||||
adjustment::Adjust::ReifyFnPointer
|
||||
}
|
||||
|
||||
adjustment::AdjustMutToConstPointer => {
|
||||
adjustment::AdjustMutToConstPointer
|
||||
adjustment::Adjust::MutToConstPointer => {
|
||||
adjustment::Adjust::MutToConstPointer
|
||||
}
|
||||
|
||||
adjustment::AdjustUnsafeFnPointer => {
|
||||
adjustment::AdjustUnsafeFnPointer
|
||||
adjustment::Adjust::UnsafeFnPointer => {
|
||||
adjustment::Adjust::UnsafeFnPointer
|
||||
}
|
||||
|
||||
adjustment::AdjustDerefRef(adj) => {
|
||||
for autoderef in 0..adj.autoderefs {
|
||||
adjustment::Adjust::DerefRef { autoderefs, autoref, unsize } => {
|
||||
for autoderef in 0..autoderefs {
|
||||
let method_call = MethodCall::autoderef(id, autoderef as u32);
|
||||
self.visit_method_map_entry(reason, method_call);
|
||||
}
|
||||
|
||||
adjustment::AdjustDerefRef(adjustment::AutoDerefRef {
|
||||
autoderefs: adj.autoderefs,
|
||||
autoref: self.resolve(&adj.autoref, reason),
|
||||
unsize: self.resolve(&adj.unsize, reason),
|
||||
})
|
||||
adjustment::Adjust::DerefRef {
|
||||
autoderefs: autoderefs,
|
||||
autoref: self.resolve(&autoref, reason),
|
||||
unsize: unsize,
|
||||
}
|
||||
}
|
||||
};
|
||||
let resolved_adjustment = adjustment::Adjustment {
|
||||
kind: resolved_adjustment,
|
||||
target: self.resolve(&adjustment.target, reason)
|
||||
};
|
||||
debug!("Adjustments for node {}: {:?}", id, resolved_adjustment);
|
||||
self.tcx().tables.borrow_mut().adjustments.insert(
|
||||
id, resolved_adjustment);
|
||||
|
@ -1724,16 +1724,15 @@ fn add_unsized_bound<'gcx: 'tcx, 'tcx>(astconv: &AstConv<'gcx, 'tcx>,
|
||||
match unbound {
|
||||
Some(ref tpb) => {
|
||||
// FIXME(#8559) currently requires the unbound to be built-in.
|
||||
let trait_def_id = tcx.expect_def(tpb.ref_id).def_id();
|
||||
match kind_id {
|
||||
Ok(kind_id) if trait_def_id != kind_id => {
|
||||
if let Ok(kind_id) = kind_id {
|
||||
let trait_def = tcx.expect_def(tpb.ref_id);
|
||||
if trait_def != Def::Trait(kind_id) {
|
||||
tcx.sess.span_warn(span,
|
||||
"default bound relaxed for a type parameter, but \
|
||||
this does nothing because the given bound is not \
|
||||
a default. Only `?Sized` is supported");
|
||||
tcx.try_add_builtin_trait(kind_id, bounds);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
_ if kind_id.is_ok() => {
|
||||
|
@ -211,7 +211,7 @@ fn check_main_fn_ty(ccx: &CrateCtxt,
|
||||
main_id: ast::NodeId,
|
||||
main_span: Span) {
|
||||
let tcx = ccx.tcx;
|
||||
let main_t = tcx.node_id_to_type(main_id);
|
||||
let main_t = tcx.tables().node_id_to_type(main_id);
|
||||
match main_t.sty {
|
||||
ty::TyFnDef(..) => {
|
||||
match tcx.map.find(main_id) {
|
||||
@ -263,7 +263,7 @@ fn check_start_fn_ty(ccx: &CrateCtxt,
|
||||
start_id: ast::NodeId,
|
||||
start_span: Span) {
|
||||
let tcx = ccx.tcx;
|
||||
let start_t = tcx.node_id_to_type(start_id);
|
||||
let start_t = tcx.tables().node_id_to_type(start_id);
|
||||
match start_t.sty {
|
||||
ty::TyFnDef(..) => {
|
||||
match tcx.map.find(start_id) {
|
||||
|
@ -126,23 +126,28 @@ pub enum ErrorKind {
|
||||
InvalidInput,
|
||||
/// Data not valid for the operation were encountered.
|
||||
///
|
||||
/// Unlike `InvalidInput`, this typically means that the operation
|
||||
/// Unlike [`InvalidInput`], this typically means that the operation
|
||||
/// parameters were valid, however the error was caused by malformed
|
||||
/// input data.
|
||||
///
|
||||
/// For example, a function that reads a file into a string will error with
|
||||
/// `InvalidData` if the file's contents are not valid UTF-8.
|
||||
///
|
||||
/// [`InvalidInput`]: #variant.InvalidInput
|
||||
#[stable(feature = "io_invalid_data", since = "1.2.0")]
|
||||
InvalidData,
|
||||
/// The I/O operation's timeout expired, causing it to be canceled.
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
TimedOut,
|
||||
/// An error returned when an operation could not be completed because a
|
||||
/// call to `write` returned `Ok(0)`.
|
||||
/// call to [`write()`] returned [`Ok(0)`].
|
||||
///
|
||||
/// This typically means that an operation could only succeed if it wrote a
|
||||
/// particular number of bytes but only a smaller number of bytes could be
|
||||
/// written.
|
||||
///
|
||||
/// [`write()`]: ../../std/io/trait.Write.html#tymethod.write
|
||||
/// [`Ok(0)`]: ../../std/io/type.Result.html
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
WriteZero,
|
||||
/// This operation was interrupted.
|
||||
|
@ -1173,6 +1173,13 @@ impl From<OsString> for PathBuf {
|
||||
}
|
||||
}
|
||||
|
||||
#[stable(feature = "from_path_buf_for_os_string", since = "1.14.0")]
|
||||
impl From<PathBuf> for OsString {
|
||||
fn from(path_buf : PathBuf) -> OsString {
|
||||
path_buf.inner
|
||||
}
|
||||
}
|
||||
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
impl From<String> for PathBuf {
|
||||
fn from(s: String) -> PathBuf {
|
||||
@ -1283,13 +1290,6 @@ impl AsRef<OsStr> for PathBuf {
|
||||
}
|
||||
}
|
||||
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
impl Into<OsString> for PathBuf {
|
||||
fn into(self) -> OsString {
|
||||
self.inner
|
||||
}
|
||||
}
|
||||
|
||||
/// A slice of a path (akin to [`str`]).
|
||||
///
|
||||
/// This type supports a number of operations for inspecting a path, including
|
||||
|
@ -350,11 +350,19 @@ mod imp {
|
||||
|
||||
#[link(name = "magenta")]
|
||||
extern {
|
||||
fn mx_cprng_draw(buffer: *mut u8, len: usize) -> isize;
|
||||
fn mx_cprng_draw(buffer: *mut u8, len: usize, actual: *mut usize) -> i32;
|
||||
}
|
||||
|
||||
fn getrandom(buf: &mut [u8]) -> isize {
|
||||
unsafe { mx_cprng_draw(buf.as_mut_ptr(), buf.len()) }
|
||||
fn getrandom(buf: &mut [u8]) -> Result<usize, i32> {
|
||||
unsafe {
|
||||
let mut actual = 0;
|
||||
let status = mx_cprng_draw(buf.as_mut_ptr(), buf.len(), &mut actual);
|
||||
if status == 0 {
|
||||
Ok(actual)
|
||||
} else {
|
||||
Err(status)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct OsRng {
|
||||
@ -381,12 +389,16 @@ mod imp {
|
||||
let mut buf = v;
|
||||
while !buf.is_empty() {
|
||||
let ret = getrandom(buf);
|
||||
if ret < 0 {
|
||||
panic!("kernel mx_cprng_draw call failed! (returned {}, buf.len() {})",
|
||||
ret, buf.len());
|
||||
match ret {
|
||||
Err(err) => {
|
||||
panic!("kernel mx_cprng_draw call failed! (returned {}, buf.len() {})",
|
||||
err, buf.len())
|
||||
}
|
||||
Ok(actual) => {
|
||||
let move_buf = buf;
|
||||
buf = &mut move_buf[(actual as usize)..];
|
||||
}
|
||||
}
|
||||
let move_buf = buf;
|
||||
buf = &mut move_buf[(ret as usize)..];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1050,7 +1050,7 @@ pub enum ExprKind {
|
||||
Ret(Option<P<Expr>>),
|
||||
|
||||
/// Output of the `asm!()` macro
|
||||
InlineAsm(InlineAsm),
|
||||
InlineAsm(P<InlineAsm>),
|
||||
|
||||
/// A macro invocation; pre-expansion
|
||||
Mac(Mac),
|
||||
|
@ -615,7 +615,9 @@ impl<'a> ExtCtxt<'a> {
|
||||
|
||||
pub fn new_parser_from_tts(&self, tts: &[tokenstream::TokenTree])
|
||||
-> parser::Parser<'a> {
|
||||
parse::tts_to_parser(self.parse_sess, tts.to_vec())
|
||||
let mut parser = parse::tts_to_parser(self.parse_sess, tts.to_vec());
|
||||
parser.allow_interpolated_tts = false; // FIXME(jseyfried) `quote!` can't handle these yet
|
||||
parser
|
||||
}
|
||||
pub fn codemap(&self) -> &'a CodeMap { self.parse_sess.codemap() }
|
||||
pub fn parse_sess(&self) -> &'a parse::ParseSess { self.parse_sess }
|
||||
|
@ -80,67 +80,71 @@ pub mod rt {
|
||||
|
||||
impl ToTokens for ast::Path {
|
||||
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
|
||||
vec![TokenTree::Token(DUMMY_SP,
|
||||
token::Interpolated(token::NtPath(Box::new(self.clone()))))]
|
||||
let nt = token::NtPath(self.clone());
|
||||
vec![TokenTree::Token(DUMMY_SP, token::Interpolated(Rc::new(nt)))]
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for ast::Ty {
|
||||
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
|
||||
vec![TokenTree::Token(self.span, token::Interpolated(token::NtTy(P(self.clone()))))]
|
||||
let nt = token::NtTy(P(self.clone()));
|
||||
vec![TokenTree::Token(self.span, token::Interpolated(Rc::new(nt)))]
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for ast::Block {
|
||||
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
|
||||
vec![TokenTree::Token(self.span, token::Interpolated(token::NtBlock(P(self.clone()))))]
|
||||
let nt = token::NtBlock(P(self.clone()));
|
||||
vec![TokenTree::Token(self.span, token::Interpolated(Rc::new(nt)))]
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for ast::Generics {
|
||||
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
|
||||
vec![TokenTree::Token(DUMMY_SP, token::Interpolated(token::NtGenerics(self.clone())))]
|
||||
let nt = token::NtGenerics(self.clone());
|
||||
vec![TokenTree::Token(DUMMY_SP, token::Interpolated(Rc::new(nt)))]
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for ast::WhereClause {
|
||||
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
|
||||
vec![TokenTree::Token(DUMMY_SP,
|
||||
token::Interpolated(token::NtWhereClause(self.clone())))]
|
||||
let nt = token::NtWhereClause(self.clone());
|
||||
vec![TokenTree::Token(DUMMY_SP, token::Interpolated(Rc::new(nt)))]
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for P<ast::Item> {
|
||||
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
|
||||
vec![TokenTree::Token(self.span, token::Interpolated(token::NtItem(self.clone())))]
|
||||
let nt = token::NtItem(self.clone());
|
||||
vec![TokenTree::Token(self.span, token::Interpolated(Rc::new(nt)))]
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for ast::ImplItem {
|
||||
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
|
||||
vec![TokenTree::Token(self.span,
|
||||
token::Interpolated(token::NtImplItem(P(self.clone()))))]
|
||||
let nt = token::NtImplItem(self.clone());
|
||||
vec![TokenTree::Token(self.span, token::Interpolated(Rc::new(nt)))]
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for P<ast::ImplItem> {
|
||||
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
|
||||
vec![TokenTree::Token(self.span, token::Interpolated(token::NtImplItem(self.clone())))]
|
||||
let nt = token::NtImplItem((**self).clone());
|
||||
vec![TokenTree::Token(self.span, token::Interpolated(Rc::new(nt)))]
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for ast::TraitItem {
|
||||
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
|
||||
vec![TokenTree::Token(self.span,
|
||||
token::Interpolated(token::NtTraitItem(P(self.clone()))))]
|
||||
let nt = token::NtTraitItem(self.clone());
|
||||
vec![TokenTree::Token(self.span, token::Interpolated(Rc::new(nt)))]
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for ast::Stmt {
|
||||
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
|
||||
let mut tts = vec![
|
||||
TokenTree::Token(self.span, token::Interpolated(token::NtStmt(P(self.clone()))))
|
||||
];
|
||||
let nt = token::NtStmt(self.clone());
|
||||
let mut tts = vec![TokenTree::Token(self.span, token::Interpolated(Rc::new(nt)))];
|
||||
|
||||
// Some statements require a trailing semicolon.
|
||||
if classify::stmt_ends_with_semi(&self.node) {
|
||||
@ -153,31 +157,36 @@ pub mod rt {
|
||||
|
||||
impl ToTokens for P<ast::Expr> {
|
||||
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
|
||||
vec![TokenTree::Token(self.span, token::Interpolated(token::NtExpr(self.clone())))]
|
||||
let nt = token::NtExpr(self.clone());
|
||||
vec![TokenTree::Token(self.span, token::Interpolated(Rc::new(nt)))]
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for P<ast::Pat> {
|
||||
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
|
||||
vec![TokenTree::Token(self.span, token::Interpolated(token::NtPat(self.clone())))]
|
||||
let nt = token::NtPat(self.clone());
|
||||
vec![TokenTree::Token(self.span, token::Interpolated(Rc::new(nt)))]
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for ast::Arm {
|
||||
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
|
||||
vec![TokenTree::Token(DUMMY_SP, token::Interpolated(token::NtArm(self.clone())))]
|
||||
let nt = token::NtArm(self.clone());
|
||||
vec![TokenTree::Token(DUMMY_SP, token::Interpolated(Rc::new(nt)))]
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for ast::Arg {
|
||||
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
|
||||
vec![TokenTree::Token(DUMMY_SP, token::Interpolated(token::NtArg(self.clone())))]
|
||||
let nt = token::NtArg(self.clone());
|
||||
vec![TokenTree::Token(DUMMY_SP, token::Interpolated(Rc::new(nt)))]
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for P<ast::Block> {
|
||||
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
|
||||
vec![TokenTree::Token(DUMMY_SP, token::Interpolated(token::NtBlock(self.clone())))]
|
||||
let nt = token::NtBlock(self.clone());
|
||||
vec![TokenTree::Token(DUMMY_SP, token::Interpolated(Rc::new(nt)))]
|
||||
}
|
||||
}
|
||||
|
||||
@ -204,7 +213,8 @@ pub mod rt {
|
||||
|
||||
impl ToTokens for P<ast::MetaItem> {
|
||||
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
|
||||
vec![TokenTree::Token(DUMMY_SP, token::Interpolated(token::NtMeta(self.clone())))]
|
||||
let nt = token::NtMeta(self.clone());
|
||||
vec![TokenTree::Token(DUMMY_SP, token::Interpolated(Rc::new(nt)))]
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -89,7 +89,6 @@ use parse::token::{DocComment, MatchNt, SubstNt};
|
||||
use parse::token::{Token, Nonterminal};
|
||||
use parse::token;
|
||||
use print::pprust;
|
||||
use ptr::P;
|
||||
use tokenstream::{self, TokenTree};
|
||||
use util::small_vector::SmallVector;
|
||||
|
||||
@ -198,7 +197,7 @@ pub fn initial_matcher_pos(ms: Vec<TokenTree>, sep: Option<Token>, lo: BytePos)
|
||||
|
||||
pub enum NamedMatch {
|
||||
MatchedSeq(Vec<Rc<NamedMatch>>, syntax_pos::Span),
|
||||
MatchedNonterminal(Nonterminal)
|
||||
MatchedNonterminal(Rc<Nonterminal>)
|
||||
}
|
||||
|
||||
pub fn nameize(p_s: &ParseSess, ms: &[TokenTree], res: &[Rc<NamedMatch>])
|
||||
@ -279,17 +278,16 @@ pub fn token_name_eq(t1 : &Token, t2 : &Token) -> bool {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn parse(sess: &ParseSess, mut rdr: TtReader, ms: &[TokenTree]) -> NamedParseResult {
|
||||
let mut cur_eis = SmallVector::one(initial_matcher_pos(ms.to_owned(),
|
||||
None,
|
||||
rdr.peek().sp.lo));
|
||||
pub fn parse(sess: &ParseSess, rdr: TtReader, ms: &[TokenTree]) -> NamedParseResult {
|
||||
let mut parser = Parser::new_with_doc_flag(sess, Box::new(rdr), true);
|
||||
let mut cur_eis = SmallVector::one(initial_matcher_pos(ms.to_owned(), None, parser.span.lo));
|
||||
|
||||
loop {
|
||||
let mut bb_eis = Vec::new(); // black-box parsed by parser.rs
|
||||
let mut next_eis = Vec::new(); // or proceed normally
|
||||
let mut eof_eis = Vec::new();
|
||||
|
||||
let TokenAndSpan { tok, sp } = rdr.peek();
|
||||
let (sp, tok) = (parser.span, parser.token.clone());
|
||||
|
||||
/* we append new items to this while we go */
|
||||
loop {
|
||||
@ -474,23 +472,19 @@ pub fn parse(sess: &ParseSess, mut rdr: TtReader, ms: &[TokenTree]) -> NamedPars
|
||||
while !next_eis.is_empty() {
|
||||
cur_eis.push(next_eis.pop().unwrap());
|
||||
}
|
||||
rdr.next_token();
|
||||
parser.bump();
|
||||
} else /* bb_eis.len() == 1 */ {
|
||||
rdr.next_tok = {
|
||||
let mut rust_parser = Parser::new(sess, Box::new(&mut rdr));
|
||||
let mut ei = bb_eis.pop().unwrap();
|
||||
if let TokenTree::Token(span, MatchNt(_, ident)) = ei.top_elts.get_tt(ei.idx) {
|
||||
let match_cur = ei.match_cur;
|
||||
(&mut ei.matches[match_cur]).push(Rc::new(MatchedNonterminal(
|
||||
parse_nt(&mut rust_parser, span, &ident.name.as_str()))));
|
||||
ei.idx += 1;
|
||||
ei.match_cur += 1;
|
||||
} else {
|
||||
unreachable!()
|
||||
}
|
||||
cur_eis.push(ei);
|
||||
Some(TokenAndSpan { tok: rust_parser.token, sp: rust_parser.span })
|
||||
};
|
||||
let mut ei = bb_eis.pop().unwrap();
|
||||
if let TokenTree::Token(span, MatchNt(_, ident)) = ei.top_elts.get_tt(ei.idx) {
|
||||
let match_cur = ei.match_cur;
|
||||
(&mut ei.matches[match_cur]).push(Rc::new(MatchedNonterminal(
|
||||
Rc::new(parse_nt(&mut parser, span, &ident.name.as_str())))));
|
||||
ei.idx += 1;
|
||||
ei.match_cur += 1;
|
||||
} else {
|
||||
unreachable!()
|
||||
}
|
||||
cur_eis.push(ei);
|
||||
}
|
||||
}
|
||||
|
||||
@ -502,10 +496,19 @@ pub fn parse_nt<'a>(p: &mut Parser<'a>, sp: Span, name: &str) -> Nonterminal {
|
||||
match name {
|
||||
"tt" => {
|
||||
p.quote_depth += 1; //but in theory, non-quoted tts might be useful
|
||||
let res: ::parse::PResult<'a, _> = p.parse_token_tree();
|
||||
let res = token::NtTT(P(panictry!(res)));
|
||||
let mut tt = panictry!(p.parse_token_tree());
|
||||
p.quote_depth -= 1;
|
||||
return res;
|
||||
loop {
|
||||
let nt = match tt {
|
||||
TokenTree::Token(_, token::Interpolated(ref nt)) => nt.clone(),
|
||||
_ => break,
|
||||
};
|
||||
match *nt {
|
||||
token::NtTT(ref sub_tt) => tt = sub_tt.clone(),
|
||||
_ => break,
|
||||
}
|
||||
}
|
||||
return token::NtTT(tt);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
@ -521,7 +524,7 @@ pub fn parse_nt<'a>(p: &mut Parser<'a>, sp: Span, name: &str) -> Nonterminal {
|
||||
},
|
||||
"block" => token::NtBlock(panictry!(p.parse_block())),
|
||||
"stmt" => match panictry!(p.parse_stmt()) {
|
||||
Some(s) => token::NtStmt(P(s)),
|
||||
Some(s) => token::NtStmt(s),
|
||||
None => {
|
||||
p.fatal("expected a statement").emit();
|
||||
panic!(FatalError);
|
||||
@ -534,7 +537,7 @@ pub fn parse_nt<'a>(p: &mut Parser<'a>, sp: Span, name: &str) -> Nonterminal {
|
||||
"ident" => match p.token {
|
||||
token::Ident(sn) => {
|
||||
p.bump();
|
||||
token::NtIdent(Box::new(Spanned::<Ident>{node: sn, span: p.span}))
|
||||
token::NtIdent(Spanned::<Ident>{node: sn, span: p.span})
|
||||
}
|
||||
_ => {
|
||||
let token_str = pprust::token_to_string(&p.token);
|
||||
@ -544,7 +547,7 @@ pub fn parse_nt<'a>(p: &mut Parser<'a>, sp: Span, name: &str) -> Nonterminal {
|
||||
}
|
||||
},
|
||||
"path" => {
|
||||
token::NtPath(Box::new(panictry!(p.parse_path(PathStyle::Type))))
|
||||
token::NtPath(panictry!(p.parse_path(PathStyle::Type)))
|
||||
},
|
||||
"meta" => token::NtMeta(panictry!(p.parse_meta_item())),
|
||||
// this is not supposed to happen, since it has been checked
|
||||
|
@ -236,12 +236,14 @@ pub fn compile(sess: &ParseSess, def: &ast::MacroDef) -> SyntaxExtension {
|
||||
// Extract the arguments:
|
||||
let lhses = match **argument_map.get(&lhs_nm).unwrap() {
|
||||
MatchedSeq(ref s, _) => {
|
||||
s.iter().map(|m| match **m {
|
||||
MatchedNonterminal(NtTT(ref tt)) => {
|
||||
valid &= check_lhs_nt_follows(sess, tt);
|
||||
(**tt).clone()
|
||||
s.iter().map(|m| {
|
||||
if let MatchedNonterminal(ref nt) = **m {
|
||||
if let NtTT(ref tt) = **nt {
|
||||
valid &= check_lhs_nt_follows(sess, tt);
|
||||
return (*tt).clone();
|
||||
}
|
||||
}
|
||||
_ => sess.span_diagnostic.span_bug(def.span, "wrong-structured lhs")
|
||||
sess.span_diagnostic.span_bug(def.span, "wrong-structured lhs")
|
||||
}).collect::<Vec<TokenTree>>()
|
||||
}
|
||||
_ => sess.span_diagnostic.span_bug(def.span, "wrong-structured lhs")
|
||||
@ -249,9 +251,13 @@ pub fn compile(sess: &ParseSess, def: &ast::MacroDef) -> SyntaxExtension {
|
||||
|
||||
let rhses = match **argument_map.get(&rhs_nm).unwrap() {
|
||||
MatchedSeq(ref s, _) => {
|
||||
s.iter().map(|m| match **m {
|
||||
MatchedNonterminal(NtTT(ref tt)) => (**tt).clone(),
|
||||
_ => sess.span_diagnostic.span_bug(def.span, "wrong-structured rhs")
|
||||
s.iter().map(|m| {
|
||||
if let MatchedNonterminal(ref nt) = **m {
|
||||
if let NtTT(ref tt) = **nt {
|
||||
return (*tt).clone();
|
||||
}
|
||||
}
|
||||
sess.span_diagnostic.span_bug(def.span, "wrong-structured lhs")
|
||||
}).collect()
|
||||
}
|
||||
_ => sess.span_diagnostic.span_bug(def.span, "wrong-structured rhs")
|
||||
|
@ -12,9 +12,7 @@ use self::LockstepIterSize::*;
|
||||
use ast::Ident;
|
||||
use errors::{Handler, DiagnosticBuilder};
|
||||
use ext::tt::macro_parser::{NamedMatch, MatchedSeq, MatchedNonterminal};
|
||||
use parse::token::{DocComment, MatchNt, SubstNt};
|
||||
use parse::token::{Token, Interpolated, NtIdent, NtTT};
|
||||
use parse::token;
|
||||
use parse::token::{self, MatchNt, SubstNt, Token, NtIdent};
|
||||
use parse::lexer::TokenAndSpan;
|
||||
use syntax_pos::{Span, DUMMY_SP};
|
||||
use tokenstream::{self, TokenTree};
|
||||
@ -46,9 +44,7 @@ pub struct TtReader<'a> {
|
||||
/* cached: */
|
||||
pub cur_tok: Token,
|
||||
pub cur_span: Span,
|
||||
pub next_tok: Option<TokenAndSpan>,
|
||||
/// Transform doc comments. Only useful in macro invocations
|
||||
pub desugar_doc_comments: bool,
|
||||
pub fatal_errs: Vec<DiagnosticBuilder<'a>>,
|
||||
}
|
||||
|
||||
@ -59,20 +55,6 @@ pub fn new_tt_reader(sp_diag: &Handler,
|
||||
interp: Option<HashMap<Ident, Rc<NamedMatch>>>,
|
||||
src: Vec<tokenstream::TokenTree>)
|
||||
-> TtReader {
|
||||
new_tt_reader_with_doc_flag(sp_diag, interp, src, false)
|
||||
}
|
||||
|
||||
/// The extra `desugar_doc_comments` flag enables reading doc comments
|
||||
/// like any other attribute which consists of `meta` and surrounding #[ ] tokens.
|
||||
///
|
||||
/// This can do Macro-By-Example transcription. On the other hand, if
|
||||
/// `src` contains no `TokenTree::Sequence`s, `MatchNt`s or `SubstNt`s, `interp` can
|
||||
/// (and should) be None.
|
||||
pub fn new_tt_reader_with_doc_flag(sp_diag: &Handler,
|
||||
interp: Option<HashMap<Ident, Rc<NamedMatch>>>,
|
||||
src: Vec<tokenstream::TokenTree>,
|
||||
desugar_doc_comments: bool)
|
||||
-> TtReader {
|
||||
let mut r = TtReader {
|
||||
sp_diag: sp_diag,
|
||||
stack: SmallVector::one(TtFrame {
|
||||
@ -91,11 +73,9 @@ pub fn new_tt_reader_with_doc_flag(sp_diag: &Handler,
|
||||
},
|
||||
repeat_idx: Vec::new(),
|
||||
repeat_len: Vec::new(),
|
||||
desugar_doc_comments: desugar_doc_comments,
|
||||
/* dummy values, never read: */
|
||||
cur_tok: token::Eof,
|
||||
cur_span: DUMMY_SP,
|
||||
next_tok: None,
|
||||
fatal_errs: Vec::new(),
|
||||
};
|
||||
tt_next_token(&mut r); /* get cur_tok and cur_span set up */
|
||||
@ -174,9 +154,6 @@ fn lockstep_iter_size(t: &TokenTree, r: &TtReader) -> LockstepIterSize {
|
||||
/// Return the next token from the TtReader.
|
||||
/// EFFECT: advances the reader's token field
|
||||
pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
|
||||
if let Some(tok) = r.next_tok.take() {
|
||||
return tok;
|
||||
}
|
||||
// FIXME(pcwalton): Bad copy?
|
||||
let ret_val = TokenAndSpan {
|
||||
tok: r.cur_tok.clone(),
|
||||
@ -269,47 +246,35 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
|
||||
}
|
||||
// FIXME #2887: think about span stuff here
|
||||
TokenTree::Token(sp, SubstNt(ident)) => {
|
||||
r.stack.last_mut().unwrap().idx += 1;
|
||||
match lookup_cur_matched(r, ident) {
|
||||
None => {
|
||||
r.stack.last_mut().unwrap().idx += 1;
|
||||
r.cur_span = sp;
|
||||
r.cur_tok = SubstNt(ident);
|
||||
return ret_val;
|
||||
// this can't be 0 length, just like TokenTree::Delimited
|
||||
}
|
||||
Some(cur_matched) => {
|
||||
match *cur_matched {
|
||||
Some(cur_matched) => if let MatchedNonterminal(ref nt) = *cur_matched {
|
||||
match **nt {
|
||||
// sidestep the interpolation tricks for ident because
|
||||
// (a) idents can be in lots of places, so it'd be a pain
|
||||
// (b) we actually can, since it's a token.
|
||||
MatchedNonterminal(NtIdent(ref sn)) => {
|
||||
r.stack.last_mut().unwrap().idx += 1;
|
||||
NtIdent(ref sn) => {
|
||||
r.cur_span = sn.span;
|
||||
r.cur_tok = token::Ident(sn.node);
|
||||
return ret_val;
|
||||
}
|
||||
MatchedNonterminal(NtTT(ref tt)) => {
|
||||
r.stack.push(TtFrame {
|
||||
forest: TokenTree::Token(sp, Interpolated(NtTT(tt.clone()))),
|
||||
idx: 0,
|
||||
dotdotdoted: false,
|
||||
sep: None,
|
||||
});
|
||||
}
|
||||
MatchedNonterminal(ref other_whole_nt) => {
|
||||
r.stack.last_mut().unwrap().idx += 1;
|
||||
_ => {
|
||||
// FIXME(pcwalton): Bad copy.
|
||||
r.cur_span = sp;
|
||||
r.cur_tok = Interpolated((*other_whole_nt).clone());
|
||||
r.cur_tok = token::Interpolated(nt.clone());
|
||||
return ret_val;
|
||||
}
|
||||
MatchedSeq(..) => {
|
||||
panic!(r.sp_diag.span_fatal(
|
||||
sp, /* blame the macro writer */
|
||||
&format!("variable '{}' is still repeating at this depth",
|
||||
ident)));
|
||||
}
|
||||
}
|
||||
} else {
|
||||
panic!(r.sp_diag.span_fatal(
|
||||
sp, /* blame the macro writer */
|
||||
&format!("variable '{}' is still repeating at this depth", ident)));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -324,14 +289,6 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
|
||||
});
|
||||
// if this could be 0-length, we'd need to potentially recur here
|
||||
}
|
||||
TokenTree::Token(sp, DocComment(name)) if r.desugar_doc_comments => {
|
||||
r.stack.push(TtFrame {
|
||||
forest: TokenTree::Token(sp, DocComment(name)),
|
||||
idx: 0,
|
||||
dotdotdoted: false,
|
||||
sep: None
|
||||
});
|
||||
}
|
||||
TokenTree::Token(sp, tok) => {
|
||||
r.cur_span = sp;
|
||||
r.cur_tok = tok;
|
||||
|
@ -309,6 +309,12 @@ declare_features! (
|
||||
|
||||
// Allows field shorthands (`x` meaning `x: x`) in struct literal expressions.
|
||||
(active, field_init_shorthand, "1.14.0", Some(37340)),
|
||||
|
||||
// The #![windows_subsystem] attribute
|
||||
(active, windows_subsystem, "1.14.0", Some(37499)),
|
||||
|
||||
// Allows using `Self` and associated types in struct expressions and patterns.
|
||||
(active, more_struct_aliases, "1.14.0", Some(37544)),
|
||||
);
|
||||
|
||||
declare_features! (
|
||||
@ -713,6 +719,12 @@ pub const KNOWN_ATTRIBUTES: &'static [(&'static str, AttributeType, AttributeGat
|
||||
"defining reflective traits is still evolving",
|
||||
cfg_fn!(reflect))),
|
||||
|
||||
("windows_subsystem", Whitelisted, Gated(Stability::Unstable,
|
||||
"windows_subsystem",
|
||||
"the windows subsystem attribute \
|
||||
is currently unstable",
|
||||
cfg_fn!(windows_subsystem))),
|
||||
|
||||
// Crate level attributes
|
||||
("crate_name", CrateLevel, Ungated),
|
||||
("crate_type", CrateLevel, Ungated),
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user