mirror of
https://github.com/rust-lang/rust.git
synced 2025-04-28 02:57:37 +00:00
Merge pull request #26 from rust-lang/master
Sync to rust-lang/rust master
This commit is contained in:
commit
cd88dae211
2
.gitmodules
vendored
2
.gitmodules
vendored
@ -43,7 +43,7 @@
|
||||
[submodule "src/llvm-project"]
|
||||
path = src/llvm-project
|
||||
url = https://github.com/rust-lang/llvm-project.git
|
||||
branch = rustc/9.0-2019-07-12
|
||||
branch = rustc/9.0-2019-09-19
|
||||
[submodule "src/doc/embedded-book"]
|
||||
path = src/doc/embedded-book
|
||||
url = https://github.com/rust-embedded/book.git
|
||||
|
30
Cargo.lock
30
Cargo.lock
@ -455,7 +455,7 @@ dependencies = [
|
||||
"itertools 0.8.0",
|
||||
"lazy_static 1.3.0",
|
||||
"matches",
|
||||
"pulldown-cmark",
|
||||
"pulldown-cmark 0.6.0",
|
||||
"quine-mc_cluskey",
|
||||
"regex-syntax",
|
||||
"semver",
|
||||
@ -1637,7 +1637,7 @@ dependencies = [
|
||||
"num_cpus",
|
||||
"tokio",
|
||||
"tokio-codec",
|
||||
"unicase 2.4.0",
|
||||
"unicase 2.5.1",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -1875,7 +1875,7 @@ dependencies = [
|
||||
"log",
|
||||
"memchr",
|
||||
"open",
|
||||
"pulldown-cmark",
|
||||
"pulldown-cmark 0.5.3",
|
||||
"regex",
|
||||
"serde",
|
||||
"serde_derive",
|
||||
@ -1897,7 +1897,7 @@ dependencies = [
|
||||
"log",
|
||||
"mdbook",
|
||||
"memchr",
|
||||
"pulldown-cmark",
|
||||
"pulldown-cmark 0.5.3",
|
||||
"rayon",
|
||||
"regex",
|
||||
"reqwest",
|
||||
@ -1957,7 +1957,7 @@ version = "0.3.13"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3e27ca21f40a310bd06d9031785f4801710d566c184a6e15bad4f1d9b65f9425"
|
||||
dependencies = [
|
||||
"unicase 2.4.0",
|
||||
"unicase 2.5.1",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -2561,7 +2561,19 @@ dependencies = [
|
||||
"bitflags",
|
||||
"getopts",
|
||||
"memchr",
|
||||
"unicase 2.4.0",
|
||||
"unicase 2.5.1",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pulldown-cmark"
|
||||
version = "0.6.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "85b0ad0d4c1702965ee6bb5b4ff5e71f83850b497d497e9444302987bf9e26a4"
|
||||
dependencies = [
|
||||
"bitflags",
|
||||
"getopts",
|
||||
"memchr",
|
||||
"unicase 2.5.1",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -3732,7 +3744,7 @@ name = "rustdoc"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"minifier",
|
||||
"pulldown-cmark",
|
||||
"pulldown-cmark 0.5.3",
|
||||
"rustc-rayon",
|
||||
"tempfile",
|
||||
]
|
||||
@ -4739,9 +4751,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "unicase"
|
||||
version = "2.4.0"
|
||||
version = "2.5.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a84e5511b2a947f3ae965dcb29b13b7b1691b6e7332cf5dbc1744138d5acb7f6"
|
||||
checksum = "2e2e6bd1e59e56598518beb94fd6db628ded570326f0a98c679a304bd9f00150"
|
||||
dependencies = [
|
||||
"version_check",
|
||||
]
|
||||
|
@ -68,6 +68,13 @@ Misc
|
||||
- [`rustc` will now warn about some incorrect uses of
|
||||
`mem::{uninitialized, zeroed}` that are known to cause undefined behaviour.][63346]
|
||||
|
||||
Compatibility Notes
|
||||
-------------------
|
||||
- Unfortunately the [`x86_64-unknown-uefi` platform can not be built][62785]
|
||||
with rustc 1.39.0.
|
||||
- The [`armv7-unknown-linux-gnueabihf` platform is also known to have
|
||||
issues][62896] for certain crates such as libc.
|
||||
|
||||
[60260]: https://github.com/rust-lang/rust/pull/60260/
|
||||
[61457]: https://github.com/rust-lang/rust/pull/61457/
|
||||
[61491]: https://github.com/rust-lang/rust/pull/61491/
|
||||
@ -79,7 +86,9 @@ Misc
|
||||
[62735]: https://github.com/rust-lang/rust/pull/62735/
|
||||
[62766]: https://github.com/rust-lang/rust/pull/62766/
|
||||
[62784]: https://github.com/rust-lang/rust/pull/62784/
|
||||
[62785]: https://github.com/rust-lang/rust/issues/62785/
|
||||
[62814]: https://github.com/rust-lang/rust/pull/62814/
|
||||
[62896]: https://github.com/rust-lang/rust/issues/62896/
|
||||
[63000]: https://github.com/rust-lang/rust/pull/63000/
|
||||
[63056]: https://github.com/rust-lang/rust/pull/63056/
|
||||
[63107]: https://github.com/rust-lang/rust/pull/63107/
|
||||
|
@ -18,7 +18,7 @@ use build_helper::{output, t};
|
||||
|
||||
use crate::{Compiler, Mode, LLVM_TOOLS};
|
||||
use crate::channel;
|
||||
use crate::util::{is_dylib, exe};
|
||||
use crate::util::{is_dylib, exe, timeit};
|
||||
use crate::builder::{Builder, RunConfig, ShouldRun, Step};
|
||||
use crate::compile;
|
||||
use crate::tool::{self, Tool};
|
||||
@ -91,14 +91,15 @@ impl Step for Docs {
|
||||
|
||||
let name = pkgname(builder, "rust-docs");
|
||||
|
||||
builder.info(&format!("Dist docs ({})", host));
|
||||
if !builder.config.docs {
|
||||
builder.info("\tskipping - docs disabled");
|
||||
return distdir(builder).join(format!("{}-{}.tar.gz", name, host));
|
||||
}
|
||||
|
||||
builder.default_doc(None);
|
||||
|
||||
builder.info(&format!("Dist docs ({})", host));
|
||||
let _time = timeit(builder);
|
||||
|
||||
let image = tmpdir(builder).join(format!("{}-{}-image", name, host));
|
||||
let _ = fs::remove_dir_all(&image);
|
||||
|
||||
@ -151,9 +152,7 @@ impl Step for RustcDocs {
|
||||
|
||||
let name = pkgname(builder, "rustc-docs");
|
||||
|
||||
builder.info(&format!("Dist compiler docs ({})", host));
|
||||
if !builder.config.compiler_docs {
|
||||
builder.info("\tskipping - compiler docs disabled");
|
||||
return distdir(builder).join(format!("{}-{}.tar.gz", name, host));
|
||||
}
|
||||
|
||||
@ -179,6 +178,9 @@ impl Step for RustcDocs {
|
||||
.arg("--component-name=rustc-docs")
|
||||
.arg("--legacy-manifest-dirs=rustlib,cargo")
|
||||
.arg("--bulk-dirs=share/doc/rust/html");
|
||||
|
||||
builder.info(&format!("Dist compiler docs ({})", host));
|
||||
let _time = timeit(builder);
|
||||
builder.run(&mut cmd);
|
||||
builder.remove_dir(&image);
|
||||
|
||||
@ -350,6 +352,7 @@ impl Step for Mingw {
|
||||
}
|
||||
|
||||
builder.info(&format!("Dist mingw ({})", host));
|
||||
let _time = timeit(builder);
|
||||
let name = pkgname(builder, "rust-mingw");
|
||||
let image = tmpdir(builder).join(format!("{}-{}-image", name, host));
|
||||
let _ = fs::remove_dir_all(&image);
|
||||
@ -403,7 +406,6 @@ impl Step for Rustc {
|
||||
let compiler = self.compiler;
|
||||
let host = self.compiler.host;
|
||||
|
||||
builder.info(&format!("Dist rustc stage{} ({})", compiler.stage, host));
|
||||
let name = pkgname(builder, "rustc");
|
||||
let image = tmpdir(builder).join(format!("{}-{}-image", name, host));
|
||||
let _ = fs::remove_dir_all(&image);
|
||||
@ -460,6 +462,9 @@ impl Step for Rustc {
|
||||
.arg(format!("--package-name={}-{}", name, host))
|
||||
.arg("--component-name=rustc")
|
||||
.arg("--legacy-manifest-dirs=rustlib,cargo");
|
||||
|
||||
builder.info(&format!("Dist rustc stage{} ({})", compiler.stage, host));
|
||||
let _time = timeit(builder);
|
||||
builder.run(&mut cmd);
|
||||
builder.remove_dir(&image);
|
||||
builder.remove_dir(&overlay);
|
||||
@ -662,8 +667,6 @@ impl Step for Std {
|
||||
let target = self.target;
|
||||
|
||||
let name = pkgname(builder, "rust-std");
|
||||
builder.info(&format!("Dist std stage{} ({} -> {})",
|
||||
compiler.stage, &compiler.host, target));
|
||||
|
||||
// The only true set of target libraries came from the build triple, so
|
||||
// let's reduce redundant work by only producing archives from that host.
|
||||
@ -714,6 +717,10 @@ impl Step for Std {
|
||||
.arg(format!("--package-name={}-{}", name, target))
|
||||
.arg(format!("--component-name=rust-std-{}", target))
|
||||
.arg("--legacy-manifest-dirs=rustlib,cargo");
|
||||
|
||||
builder.info(&format!("Dist std stage{} ({} -> {})",
|
||||
compiler.stage, &compiler.host, target));
|
||||
let _time = timeit(builder);
|
||||
builder.run(&mut cmd);
|
||||
builder.remove_dir(&image);
|
||||
distdir(builder).join(format!("{}-{}.tar.gz", name, target))
|
||||
@ -754,11 +761,9 @@ impl Step for Analysis {
|
||||
let compiler = self.compiler;
|
||||
let target = self.target;
|
||||
assert!(builder.config.extended);
|
||||
builder.info("Dist analysis");
|
||||
let name = pkgname(builder, "rust-analysis");
|
||||
|
||||
if &compiler.host != builder.config.build {
|
||||
builder.info("\tskipping, not a build host");
|
||||
return distdir(builder).join(format!("{}-{}.tar.gz", name, target));
|
||||
}
|
||||
|
||||
@ -786,6 +791,9 @@ impl Step for Analysis {
|
||||
.arg(format!("--package-name={}-{}", name, target))
|
||||
.arg(format!("--component-name=rust-analysis-{}", target))
|
||||
.arg("--legacy-manifest-dirs=rustlib,cargo");
|
||||
|
||||
builder.info("Dist analysis");
|
||||
let _time = timeit(builder);
|
||||
builder.run(&mut cmd);
|
||||
builder.remove_dir(&image);
|
||||
distdir(builder).join(format!("{}-{}.tar.gz", name, target))
|
||||
@ -874,8 +882,6 @@ impl Step for Src {
|
||||
|
||||
/// Creates the `rust-src` installer component
|
||||
fn run(self, builder: &Builder<'_>) -> PathBuf {
|
||||
builder.info("Dist src");
|
||||
|
||||
let name = pkgname(builder, "rust-src");
|
||||
let image = tmpdir(builder).join(format!("{}-image", name));
|
||||
let _ = fs::remove_dir_all(&image);
|
||||
@ -930,6 +936,9 @@ impl Step for Src {
|
||||
.arg(format!("--package-name={}", name))
|
||||
.arg("--component-name=rust-src")
|
||||
.arg("--legacy-manifest-dirs=rustlib,cargo");
|
||||
|
||||
builder.info("Dist src");
|
||||
let _time = timeit(builder);
|
||||
builder.run(&mut cmd);
|
||||
|
||||
builder.remove_dir(&image);
|
||||
@ -957,8 +966,6 @@ impl Step for PlainSourceTarball {
|
||||
|
||||
/// Creates the plain source tarball
|
||||
fn run(self, builder: &Builder<'_>) -> PathBuf {
|
||||
builder.info("Create plain source tarball");
|
||||
|
||||
// Make sure that the root folder of tarball has the correct name
|
||||
let plain_name = format!("{}-src", pkgname(builder, "rustc"));
|
||||
let plain_dst_src = tmpdir(builder).join(&plain_name);
|
||||
@ -1020,6 +1027,9 @@ impl Step for PlainSourceTarball {
|
||||
.arg("--output").arg(&tarball)
|
||||
.arg("--work-dir=.")
|
||||
.current_dir(tmpdir(builder));
|
||||
|
||||
builder.info("Create plain source tarball");
|
||||
let _time = timeit(builder);
|
||||
builder.run(&mut cmd);
|
||||
distdir(builder).join(&format!("{}.tar.gz", plain_name))
|
||||
}
|
||||
@ -1073,7 +1083,6 @@ impl Step for Cargo {
|
||||
let compiler = self.compiler;
|
||||
let target = self.target;
|
||||
|
||||
builder.info(&format!("Dist cargo stage{} ({})", compiler.stage, target));
|
||||
let src = builder.src.join("src/tools/cargo");
|
||||
let etc = src.join("src/etc");
|
||||
let release_num = builder.release_num("cargo");
|
||||
@ -1126,6 +1135,9 @@ impl Step for Cargo {
|
||||
.arg(format!("--package-name={}-{}", name, target))
|
||||
.arg("--component-name=cargo")
|
||||
.arg("--legacy-manifest-dirs=rustlib,cargo");
|
||||
|
||||
builder.info(&format!("Dist cargo stage{} ({})", compiler.stage, target));
|
||||
let _time = timeit(builder);
|
||||
builder.run(&mut cmd);
|
||||
distdir(builder).join(format!("{}-{}.tar.gz", name, target))
|
||||
}
|
||||
@ -1161,7 +1173,6 @@ impl Step for Rls {
|
||||
let target = self.target;
|
||||
assert!(builder.config.extended);
|
||||
|
||||
builder.info(&format!("Dist RLS stage{} ({})", compiler.stage, target));
|
||||
let src = builder.src.join("src/tools/rls");
|
||||
let release_num = builder.release_num("rls");
|
||||
let name = pkgname(builder, "rls");
|
||||
@ -1210,6 +1221,8 @@ impl Step for Rls {
|
||||
.arg("--legacy-manifest-dirs=rustlib,cargo")
|
||||
.arg("--component-name=rls-preview");
|
||||
|
||||
builder.info(&format!("Dist RLS stage{} ({})", compiler.stage, target));
|
||||
let _time = timeit(builder);
|
||||
builder.run(&mut cmd);
|
||||
Some(distdir(builder).join(format!("{}-{}.tar.gz", name, target)))
|
||||
}
|
||||
@ -1245,7 +1258,6 @@ impl Step for Clippy {
|
||||
let target = self.target;
|
||||
assert!(builder.config.extended);
|
||||
|
||||
builder.info(&format!("Dist clippy stage{} ({})", compiler.stage, target));
|
||||
let src = builder.src.join("src/tools/clippy");
|
||||
let release_num = builder.release_num("clippy");
|
||||
let name = pkgname(builder, "clippy");
|
||||
@ -1299,6 +1311,8 @@ impl Step for Clippy {
|
||||
.arg("--legacy-manifest-dirs=rustlib,cargo")
|
||||
.arg("--component-name=clippy-preview");
|
||||
|
||||
builder.info(&format!("Dist clippy stage{} ({})", compiler.stage, target));
|
||||
let _time = timeit(builder);
|
||||
builder.run(&mut cmd);
|
||||
Some(distdir(builder).join(format!("{}-{}.tar.gz", name, target)))
|
||||
}
|
||||
@ -1334,7 +1348,6 @@ impl Step for Miri {
|
||||
let target = self.target;
|
||||
assert!(builder.config.extended);
|
||||
|
||||
builder.info(&format!("Dist miri stage{} ({})", compiler.stage, target));
|
||||
let src = builder.src.join("src/tools/miri");
|
||||
let release_num = builder.release_num("miri");
|
||||
let name = pkgname(builder, "miri");
|
||||
@ -1389,6 +1402,8 @@ impl Step for Miri {
|
||||
.arg("--legacy-manifest-dirs=rustlib,cargo")
|
||||
.arg("--component-name=miri-preview");
|
||||
|
||||
builder.info(&format!("Dist miri stage{} ({})", compiler.stage, target));
|
||||
let _time = timeit(builder);
|
||||
builder.run(&mut cmd);
|
||||
Some(distdir(builder).join(format!("{}-{}.tar.gz", name, target)))
|
||||
}
|
||||
@ -1423,7 +1438,6 @@ impl Step for Rustfmt {
|
||||
let compiler = self.compiler;
|
||||
let target = self.target;
|
||||
|
||||
builder.info(&format!("Dist Rustfmt stage{} ({})", compiler.stage, target));
|
||||
let src = builder.src.join("src/tools/rustfmt");
|
||||
let release_num = builder.release_num("rustfmt");
|
||||
let name = pkgname(builder, "rustfmt");
|
||||
@ -1476,6 +1490,8 @@ impl Step for Rustfmt {
|
||||
.arg("--legacy-manifest-dirs=rustlib,cargo")
|
||||
.arg("--component-name=rustfmt-preview");
|
||||
|
||||
builder.info(&format!("Dist Rustfmt stage{} ({})", compiler.stage, target));
|
||||
let _time = timeit(builder);
|
||||
builder.run(&mut cmd);
|
||||
Some(distdir(builder).join(format!("{}-{}.tar.gz", name, target)))
|
||||
}
|
||||
@ -1576,6 +1592,7 @@ impl Step for Extended {
|
||||
input_tarballs.push(tarball);
|
||||
}
|
||||
|
||||
builder.info("building combined installer");
|
||||
let mut cmd = rust_installer(builder);
|
||||
cmd.arg("combine")
|
||||
.arg("--product-name=Rust")
|
||||
@ -1587,7 +1604,9 @@ impl Step for Extended {
|
||||
.arg("--legacy-manifest-dirs=rustlib,cargo")
|
||||
.arg("--input-tarballs").arg(input_tarballs)
|
||||
.arg("--non-installed-overlay").arg(&overlay);
|
||||
let time = timeit(&builder);
|
||||
builder.run(&mut cmd);
|
||||
drop(time);
|
||||
|
||||
let mut license = String::new();
|
||||
license += &builder.read(&builder.src.join("COPYRIGHT"));
|
||||
@ -1643,6 +1662,7 @@ impl Step for Extended {
|
||||
};
|
||||
|
||||
if target.contains("apple-darwin") {
|
||||
builder.info("building pkg installer");
|
||||
let pkg = tmp.join("pkg");
|
||||
let _ = fs::remove_dir_all(&pkg);
|
||||
|
||||
@ -1692,6 +1712,7 @@ impl Step for Extended {
|
||||
pkgname(builder, "rust"),
|
||||
target)))
|
||||
.arg("--package-path").arg(&pkg);
|
||||
let _time = timeit(builder);
|
||||
builder.run(&mut cmd);
|
||||
}
|
||||
|
||||
@ -1742,14 +1763,18 @@ impl Step for Extended {
|
||||
builder.create(&exe.join("LICENSE.txt"), &license);
|
||||
|
||||
// Generate exe installer
|
||||
builder.info("building `exe` installer with `iscc`");
|
||||
let mut cmd = Command::new("iscc");
|
||||
cmd.arg("rust.iss")
|
||||
.arg("/Q")
|
||||
.current_dir(&exe);
|
||||
if target.contains("windows-gnu") {
|
||||
cmd.arg("/dMINGW");
|
||||
}
|
||||
add_env(builder, &mut cmd, target);
|
||||
let time = timeit(builder);
|
||||
builder.run(&mut cmd);
|
||||
drop(time);
|
||||
builder.install(&exe.join(format!("{}-{}.exe", pkgname(builder, "rust"), target)),
|
||||
&distdir(builder),
|
||||
0o755);
|
||||
@ -1914,6 +1939,7 @@ impl Step for Extended {
|
||||
builder.install(&etc.join("gfx/banner.bmp"), &exe, 0o644);
|
||||
builder.install(&etc.join("gfx/dialogbg.bmp"), &exe, 0o644);
|
||||
|
||||
builder.info(&format!("building `msi` installer with {:?}", light));
|
||||
let filename = format!("{}-{}.msi", pkgname(builder, "rust"), target);
|
||||
let mut cmd = Command::new(&light);
|
||||
cmd.arg("-nologo")
|
||||
@ -1946,6 +1972,7 @@ impl Step for Extended {
|
||||
// ICE57 wrongly complains about the shortcuts
|
||||
cmd.arg("-sice:ICE57");
|
||||
|
||||
let _time = timeit(builder);
|
||||
builder.run(&mut cmd);
|
||||
|
||||
if !builder.config.dry_run {
|
||||
@ -2114,6 +2141,7 @@ impl Step for LlvmTools {
|
||||
}
|
||||
|
||||
builder.info(&format!("Dist LlvmTools ({})", target));
|
||||
let _time = timeit(builder);
|
||||
let src = builder.src.join("src/llvm-project/llvm");
|
||||
let name = pkgname(builder, "llvm-tools");
|
||||
|
||||
|
@ -1,27 +0,0 @@
|
||||
# `param_attrs`
|
||||
|
||||
The tracking issue for this feature is: [#60406]
|
||||
|
||||
[#60406]: https://github.com/rust-lang/rust/issues/60406
|
||||
|
||||
Allow attributes in formal function parameter position so external tools and compiler internals can
|
||||
take advantage of the additional information that the parameters provide.
|
||||
|
||||
Enables finer conditional compilation with `#[cfg(..)]` and linting control of variables. Moreover,
|
||||
opens the path to richer DSLs created by users.
|
||||
|
||||
------------------------
|
||||
|
||||
Example:
|
||||
|
||||
```rust
|
||||
#![feature(param_attrs)]
|
||||
|
||||
fn len(
|
||||
#[cfg(windows)] slice: &[u16],
|
||||
#[cfg(not(windows))] slice: &[u8],
|
||||
) -> usize
|
||||
{
|
||||
slice.len()
|
||||
}
|
||||
```
|
@ -14,6 +14,8 @@ TEST_DIR = os.path.abspath(
|
||||
os.path.join(os.path.dirname(__file__), '../test/ui/derives/'))
|
||||
|
||||
TEMPLATE = """\
|
||||
// ignore-x86
|
||||
// ^ due to stderr output differences
|
||||
// This file was auto-generated using 'src/etc/generate-deriving-span-tests.py'
|
||||
|
||||
{error_deriving}
|
||||
|
@ -25,9 +25,9 @@ SourceDir=.\
|
||||
OutputBaseFilename={#CFG_PACKAGE_NAME}-{#CFG_BUILD}
|
||||
DefaultDirName={sd}\Rust
|
||||
|
||||
Compression=lzma2/ultra
|
||||
InternalCompressLevel=ultra
|
||||
SolidCompression=true
|
||||
Compression=lzma2/normal
|
||||
InternalCompressLevel=normal
|
||||
SolidCompression=no
|
||||
|
||||
ChangesEnvironment=true
|
||||
ChangesAssociations=no
|
||||
|
@ -152,7 +152,7 @@
|
||||
</Upgrade>
|
||||
|
||||
<!-- Specifies a single cab file to be embedded in the installer's .msi. -->
|
||||
<MediaTemplate EmbedCab="yes" CompressionLevel="high" />
|
||||
<MediaTemplate EmbedCab="yes" CompressionLevel="mszip" />
|
||||
|
||||
<!-- Send a WM_SETTINGCHANGE message to tell processes like explorer to update their
|
||||
environments so any new command prompts get the updated %PATH% -->
|
||||
|
@ -240,7 +240,7 @@ pub(crate) unsafe fn box_free<T: ?Sized>(ptr: Unique<T>) {
|
||||
#[stable(feature = "global_alloc", since = "1.28.0")]
|
||||
#[rustc_allocator_nounwind]
|
||||
pub fn handle_alloc_error(layout: Layout) -> ! {
|
||||
#[allow(improper_ctypes)]
|
||||
#[cfg_attr(bootstrap, allow(improper_ctypes))]
|
||||
extern "Rust" {
|
||||
#[lang = "oom"]
|
||||
fn oom_impl(layout: Layout) -> !;
|
||||
|
@ -570,7 +570,7 @@ pub trait Ord: Eq + PartialOrd<Self> {
|
||||
#[inline]
|
||||
fn max(self, other: Self) -> Self
|
||||
where Self: Sized {
|
||||
if other >= self { other } else { self }
|
||||
max_by(self, other, Ord::cmp)
|
||||
}
|
||||
|
||||
/// Compares and returns the minimum of two values.
|
||||
@ -587,7 +587,7 @@ pub trait Ord: Eq + PartialOrd<Self> {
|
||||
#[inline]
|
||||
fn min(self, other: Self) -> Self
|
||||
where Self: Sized {
|
||||
if self <= other { self } else { other }
|
||||
min_by(self, other, Ord::cmp)
|
||||
}
|
||||
|
||||
/// Restrict a value to a certain interval.
|
||||
@ -898,6 +898,49 @@ pub fn min<T: Ord>(v1: T, v2: T) -> T {
|
||||
v1.min(v2)
|
||||
}
|
||||
|
||||
/// Returns the minimum of two values with respect to the specified comparison function.
|
||||
///
|
||||
/// Returns the first argument if the comparison determines them to be equal.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// #![feature(cmp_min_max_by)]
|
||||
///
|
||||
/// use std::cmp;
|
||||
///
|
||||
/// assert_eq!(cmp::min_by(-2, 1, |x: &i32, y: &i32| x.abs().cmp(&y.abs())), 1);
|
||||
/// assert_eq!(cmp::min_by(-2, 2, |x: &i32, y: &i32| x.abs().cmp(&y.abs())), -2);
|
||||
/// ```
|
||||
#[inline]
|
||||
#[unstable(feature = "cmp_min_max_by", issue = "64460")]
|
||||
pub fn min_by<T, F: FnOnce(&T, &T) -> Ordering>(v1: T, v2: T, compare: F) -> T {
|
||||
match compare(&v1, &v2) {
|
||||
Ordering::Less | Ordering::Equal => v1,
|
||||
Ordering::Greater => v2,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the element that gives the minimum value from the specified function.
|
||||
///
|
||||
/// Returns the first argument if the comparison determines them to be equal.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// #![feature(cmp_min_max_by)]
|
||||
///
|
||||
/// use std::cmp;
|
||||
///
|
||||
/// assert_eq!(cmp::min_by_key(-2, 1, |x: &i32| x.abs()), 1);
|
||||
/// assert_eq!(cmp::min_by_key(-2, 2, |x: &i32| x.abs()), -2);
|
||||
/// ```
|
||||
#[inline]
|
||||
#[unstable(feature = "cmp_min_max_by", issue = "64460")]
|
||||
pub fn min_by_key<T, F: FnMut(&T) -> K, K: Ord>(v1: T, v2: T, mut f: F) -> T {
|
||||
min_by(v1, v2, |v1, v2| f(v1).cmp(&f(v2)))
|
||||
}
|
||||
|
||||
/// Compares and returns the maximum of two values.
|
||||
///
|
||||
/// Returns the second argument if the comparison determines them to be equal.
|
||||
@ -918,6 +961,49 @@ pub fn max<T: Ord>(v1: T, v2: T) -> T {
|
||||
v1.max(v2)
|
||||
}
|
||||
|
||||
/// Returns the maximum of two values with respect to the specified comparison function.
|
||||
///
|
||||
/// Returns the second argument if the comparison determines them to be equal.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// #![feature(cmp_min_max_by)]
|
||||
///
|
||||
/// use std::cmp;
|
||||
///
|
||||
/// assert_eq!(cmp::max_by(-2, 1, |x: &i32, y: &i32| x.abs().cmp(&y.abs())), -2);
|
||||
/// assert_eq!(cmp::max_by(-2, 2, |x: &i32, y: &i32| x.abs().cmp(&y.abs())), 2);
|
||||
/// ```
|
||||
#[inline]
|
||||
#[unstable(feature = "cmp_min_max_by", issue = "64460")]
|
||||
pub fn max_by<T, F: FnOnce(&T, &T) -> Ordering>(v1: T, v2: T, compare: F) -> T {
|
||||
match compare(&v1, &v2) {
|
||||
Ordering::Less | Ordering::Equal => v2,
|
||||
Ordering::Greater => v1,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the element that gives the maximum value from the specified function.
|
||||
///
|
||||
/// Returns the second argument if the comparison determines them to be equal.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// #![feature(cmp_min_max_by)]
|
||||
///
|
||||
/// use std::cmp;
|
||||
///
|
||||
/// assert_eq!(cmp::max_by_key(-2, 1, |x: &i32| x.abs()), -2);
|
||||
/// assert_eq!(cmp::max_by_key(-2, 2, |x: &i32| x.abs()), 2);
|
||||
/// ```
|
||||
#[inline]
|
||||
#[unstable(feature = "cmp_min_max_by", issue = "64460")]
|
||||
pub fn max_by_key<T, F: FnMut(&T) -> K, K: Ord>(v1: T, v2: T, mut f: F) -> T {
|
||||
max_by(v1, v2, |v1, v2| f(v1).cmp(&f(v2)))
|
||||
}
|
||||
|
||||
// Implementation of PartialEq, Eq, PartialOrd and Ord for primitive types
|
||||
mod impls {
|
||||
use crate::cmp::Ordering::{self, Less, Greater, Equal};
|
||||
|
@ -1,4 +1,4 @@
|
||||
use crate::cmp::Ordering;
|
||||
use crate::cmp::{self, Ordering};
|
||||
use crate::ops::{Add, Try};
|
||||
|
||||
use super::super::LoopState;
|
||||
@ -2223,13 +2223,12 @@ pub trait Iterator {
|
||||
move |x| (f(&x), x)
|
||||
}
|
||||
|
||||
// switch to y even if it is only equal, to preserve stability.
|
||||
#[inline]
|
||||
fn select<T, B: Ord>((x_p, _): &(B, T), (y_p, _): &(B, T)) -> bool {
|
||||
x_p <= y_p
|
||||
fn compare<T, B: Ord>((x_p, _): &(B, T), (y_p, _): &(B, T)) -> Ordering {
|
||||
x_p.cmp(y_p)
|
||||
}
|
||||
|
||||
let (_, x) = select_fold1(self.map(key(f)), select)?;
|
||||
let (_, x) = self.map(key(f)).max_by(compare)?;
|
||||
Some(x)
|
||||
}
|
||||
|
||||
@ -2252,13 +2251,12 @@ pub trait Iterator {
|
||||
fn max_by<F>(self, compare: F) -> Option<Self::Item>
|
||||
where Self: Sized, F: FnMut(&Self::Item, &Self::Item) -> Ordering,
|
||||
{
|
||||
// switch to y even if it is only equal, to preserve stability.
|
||||
#[inline]
|
||||
fn select<T>(mut compare: impl FnMut(&T, &T) -> Ordering) -> impl FnMut(&T, &T) -> bool {
|
||||
move |x, y| compare(x, y) != Ordering::Greater
|
||||
fn fold<T>(mut compare: impl FnMut(&T, &T) -> Ordering) -> impl FnMut(T, T) -> T {
|
||||
move |x, y| cmp::max_by(x, y, &mut compare)
|
||||
}
|
||||
|
||||
select_fold1(self, select(compare))
|
||||
fold1(self, fold(compare))
|
||||
}
|
||||
|
||||
/// Returns the element that gives the minimum value from the
|
||||
@ -2285,13 +2283,12 @@ pub trait Iterator {
|
||||
move |x| (f(&x), x)
|
||||
}
|
||||
|
||||
// only switch to y if it is strictly smaller, to preserve stability.
|
||||
#[inline]
|
||||
fn select<T, B: Ord>((x_p, _): &(B, T), (y_p, _): &(B, T)) -> bool {
|
||||
x_p > y_p
|
||||
fn compare<T, B: Ord>((x_p, _): &(B, T), (y_p, _): &(B, T)) -> Ordering {
|
||||
x_p.cmp(y_p)
|
||||
}
|
||||
|
||||
let (_, x) = select_fold1(self.map(key(f)), select)?;
|
||||
let (_, x) = self.map(key(f)).min_by(compare)?;
|
||||
Some(x)
|
||||
}
|
||||
|
||||
@ -2314,13 +2311,12 @@ pub trait Iterator {
|
||||
fn min_by<F>(self, compare: F) -> Option<Self::Item>
|
||||
where Self: Sized, F: FnMut(&Self::Item, &Self::Item) -> Ordering,
|
||||
{
|
||||
// only switch to y if it is strictly smaller, to preserve stability.
|
||||
#[inline]
|
||||
fn select<T>(mut compare: impl FnMut(&T, &T) -> Ordering) -> impl FnMut(&T, &T) -> bool {
|
||||
move |x, y| compare(x, y) == Ordering::Greater
|
||||
fn fold<T>(mut compare: impl FnMut(&T, &T) -> Ordering) -> impl FnMut(T, T) -> T {
|
||||
move |x, y| cmp::min_by(x, y, &mut compare)
|
||||
}
|
||||
|
||||
select_fold1(self, select(compare))
|
||||
fold1(self, fold(compare))
|
||||
}
|
||||
|
||||
|
||||
@ -2958,28 +2954,18 @@ pub trait Iterator {
|
||||
}
|
||||
}
|
||||
|
||||
/// Select an element from an iterator based on the given "comparison"
|
||||
/// function.
|
||||
///
|
||||
/// This is an idiosyncratic helper to try to factor out the
|
||||
/// commonalities of {max,min}{,_by}. In particular, this avoids
|
||||
/// having to implement optimizations several times.
|
||||
/// Fold an iterator without having to provide an initial value.
|
||||
#[inline]
|
||||
fn select_fold1<I, F>(mut it: I, f: F) -> Option<I::Item>
|
||||
fn fold1<I, F>(mut it: I, f: F) -> Option<I::Item>
|
||||
where
|
||||
I: Iterator,
|
||||
F: FnMut(&I::Item, &I::Item) -> bool,
|
||||
F: FnMut(I::Item, I::Item) -> I::Item,
|
||||
{
|
||||
#[inline]
|
||||
fn select<T>(mut f: impl FnMut(&T, &T) -> bool) -> impl FnMut(T, T) -> T {
|
||||
move |sel, x| if f(&sel, &x) { x } else { sel }
|
||||
}
|
||||
|
||||
// start with the first element as our selection. This avoids
|
||||
// having to use `Option`s inside the loop, translating to a
|
||||
// sizeable performance gain (6x in one case).
|
||||
let first = it.next()?;
|
||||
Some(it.fold(first, select(f)))
|
||||
Some(it.fold(first, f))
|
||||
}
|
||||
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
|
@ -71,7 +71,7 @@ pub fn panic_fmt(fmt: fmt::Arguments<'_>, file_line_col: &(&'static str, u32, u3
|
||||
}
|
||||
|
||||
// NOTE This function never crosses the FFI boundary; it's a Rust-to-Rust call
|
||||
#[allow(improper_ctypes)] // PanicInfo contains a trait object which is not FFI safe
|
||||
#[cfg_attr(boostrap_stdarch_ignore_this, allow(improper_ctypes))]
|
||||
extern "Rust" {
|
||||
#[lang = "panic_impl"]
|
||||
fn panic_impl(pi: &PanicInfo<'_>) -> !;
|
||||
|
@ -1,4 +1,4 @@
|
||||
use core::cmp::Ordering::{Less, Greater, Equal};
|
||||
use core::cmp::{self, Ordering::*};
|
||||
|
||||
#[test]
|
||||
fn test_int_totalord() {
|
||||
@ -28,6 +28,28 @@ fn test_ord_max_min() {
|
||||
assert_eq!(1.min(1), 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_ord_min_max_by() {
|
||||
let f = |x: &i32, y: &i32| x.abs().cmp(&y.abs());
|
||||
assert_eq!(cmp::min_by(1, -1, f), 1);
|
||||
assert_eq!(cmp::min_by(1, -2, f), 1);
|
||||
assert_eq!(cmp::min_by(2, -1, f), -1);
|
||||
assert_eq!(cmp::max_by(1, -1, f), -1);
|
||||
assert_eq!(cmp::max_by(1, -2, f), -2);
|
||||
assert_eq!(cmp::max_by(2, -1, f), 2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_ord_min_max_by_key() {
|
||||
let f = |x: &i32| x.abs();
|
||||
assert_eq!(cmp::min_by_key(1, -1, f), 1);
|
||||
assert_eq!(cmp::min_by_key(1, -2, f), 1);
|
||||
assert_eq!(cmp::min_by_key(2, -1, f), -1);
|
||||
assert_eq!(cmp::max_by_key(1, -1, f), -1);
|
||||
assert_eq!(cmp::max_by_key(1, -2, f), -2);
|
||||
assert_eq!(cmp::max_by_key(2, -1, f), 2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_ordering_reverse() {
|
||||
assert_eq!(Less.reverse(), Greater);
|
||||
|
@ -34,6 +34,7 @@
|
||||
#![feature(iter_partition_in_place)]
|
||||
#![feature(iter_is_partitioned)]
|
||||
#![feature(iter_order_by)]
|
||||
#![feature(cmp_min_max_by)]
|
||||
|
||||
extern crate test;
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
use errors::{Diagnostic, DiagnosticBuilder};
|
||||
use errors::Diagnostic;
|
||||
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
|
||||
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
|
||||
use rustc_data_structures::indexed_vec::{Idx, IndexVec};
|
||||
@ -819,7 +819,7 @@ impl DepGraph {
|
||||
let handle = tcx.sess.diagnostic();
|
||||
|
||||
for diagnostic in diagnostics {
|
||||
DiagnosticBuilder::new_diagnostic(handle, diagnostic).emit();
|
||||
handle.emit_diagnostic(&diagnostic);
|
||||
}
|
||||
|
||||
// Mark the node as green now that diagnostics are emitted
|
||||
|
@ -1347,6 +1347,39 @@ struct Foo<T: 'static> {
|
||||
```
|
||||
"##,
|
||||
|
||||
E0312: r##"
|
||||
Reference's lifetime of borrowed content doesn't match the expected lifetime.
|
||||
|
||||
Erroneous code example:
|
||||
|
||||
```compile_fail,E0312
|
||||
pub fn opt_str<'a>(maybestr: &'a Option<String>) -> &'static str {
|
||||
if maybestr.is_none() {
|
||||
"(none)"
|
||||
} else {
|
||||
let s: &'a str = maybestr.as_ref().unwrap();
|
||||
s // Invalid lifetime!
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
To fix this error, either lessen the expected lifetime or find a way to not have
|
||||
to use this reference outside of its current scope (by running the code directly
|
||||
in the same block for example?):
|
||||
|
||||
```
|
||||
// In this case, we can fix the issue by switching from "static" lifetime to 'a
|
||||
pub fn opt_str<'a>(maybestr: &'a Option<String>) -> &'a str {
|
||||
if maybestr.is_none() {
|
||||
"(none)"
|
||||
} else {
|
||||
let s: &'a str = maybestr.as_ref().unwrap();
|
||||
s // Ok!
|
||||
}
|
||||
}
|
||||
```
|
||||
"##,
|
||||
|
||||
E0317: r##"
|
||||
This error occurs when an `if` expression without an `else` block is used in a
|
||||
context where a type other than `()` is expected, for example a `let`
|
||||
@ -2202,7 +2235,6 @@ static X: u32 = 42;
|
||||
// E0304, // expected signed integer constant
|
||||
// E0305, // expected constant
|
||||
E0311, // thing may not live long enough
|
||||
E0312, // lifetime of reference outlives lifetime of borrowed content
|
||||
E0313, // lifetime of borrowed pointer outlives lifetime of captured
|
||||
// variable
|
||||
E0314, // closure outlives stack frame
|
||||
|
@ -1893,10 +1893,13 @@ impl<'a> LoweringContext<'a> {
|
||||
if let Ok(snippet) = self.sess.source_map().span_to_snippet(data.span) {
|
||||
// Do not suggest going from `Trait()` to `Trait<>`
|
||||
if data.inputs.len() > 0 {
|
||||
let split = snippet.find('(').unwrap();
|
||||
let trait_name = &snippet[0..split];
|
||||
let args = &snippet[split + 1 .. snippet.len() - 1];
|
||||
err.span_suggestion(
|
||||
data.span,
|
||||
"use angle brackets instead",
|
||||
format!("<{}>", &snippet[1..snippet.len() - 1]),
|
||||
format!("{}<{}>", trait_name, args),
|
||||
Applicability::MaybeIncorrect,
|
||||
);
|
||||
}
|
||||
|
@ -2750,3 +2750,15 @@ pub enum Node<'hir> {
|
||||
|
||||
Crate,
|
||||
}
|
||||
|
||||
impl Node<'_> {
|
||||
pub fn ident(&self) -> Option<Ident> {
|
||||
match self {
|
||||
Node::TraitItem(TraitItem { ident, .. }) |
|
||||
Node::ImplItem(ImplItem { ident, .. }) |
|
||||
Node::ForeignItem(ForeignItem { ident, .. }) |
|
||||
Node::Item(Item { ident, .. }) => Some(*ident),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -30,6 +30,7 @@ use super::sub::Sub;
|
||||
use super::type_variable::TypeVariableValue;
|
||||
use super::unify_key::{ConstVarValue, ConstVariableValue};
|
||||
use super::unify_key::{ConstVariableOrigin, ConstVariableOriginKind};
|
||||
use super::unify_key::replace_if_possible;
|
||||
|
||||
use crate::hir::def_id::DefId;
|
||||
use crate::mir::interpret::ConstValue;
|
||||
@ -127,6 +128,12 @@ impl<'infcx, 'tcx> InferCtxt<'infcx, 'tcx> {
|
||||
where
|
||||
R: TypeRelation<'tcx>,
|
||||
{
|
||||
debug!("{}.consts({:?}, {:?})", relation.tag(), a, b);
|
||||
if a == b { return Ok(a); }
|
||||
|
||||
let a = replace_if_possible(self.const_unification_table.borrow_mut(), a);
|
||||
let b = replace_if_possible(self.const_unification_table.borrow_mut(), b);
|
||||
|
||||
let a_is_expected = relation.a_is_expected();
|
||||
|
||||
match (a.val, b.val) {
|
||||
|
@ -1,14 +1,12 @@
|
||||
use super::combine::{CombineFields, RelationDir, const_unification_error};
|
||||
use super::combine::{CombineFields, RelationDir};
|
||||
use super::Subtype;
|
||||
|
||||
use crate::hir::def_id::DefId;
|
||||
|
||||
use crate::ty::{self, Ty, TyCtxt, InferConst};
|
||||
use crate::ty::{self, Ty, TyCtxt};
|
||||
use crate::ty::TyVar;
|
||||
use crate::ty::subst::SubstsRef;
|
||||
use crate::ty::relate::{self, Relate, RelateResult, TypeRelation};
|
||||
use crate::mir::interpret::ConstValue;
|
||||
use crate::infer::unify_key::replace_if_possible;
|
||||
|
||||
/// Ensures `a` is made equal to `b`. Returns `a` on success.
|
||||
pub struct Equate<'combine, 'infcx, 'tcx> {
|
||||
@ -108,39 +106,7 @@ impl TypeRelation<'tcx> for Equate<'combine, 'infcx, 'tcx> {
|
||||
a: &'tcx ty::Const<'tcx>,
|
||||
b: &'tcx ty::Const<'tcx>,
|
||||
) -> RelateResult<'tcx, &'tcx ty::Const<'tcx>> {
|
||||
debug!("{}.consts({:?}, {:?})", self.tag(), a, b);
|
||||
if a == b { return Ok(a); }
|
||||
|
||||
let infcx = self.fields.infcx;
|
||||
let a = replace_if_possible(infcx.const_unification_table.borrow_mut(), a);
|
||||
let b = replace_if_possible(infcx.const_unification_table.borrow_mut(), b);
|
||||
let a_is_expected = self.a_is_expected();
|
||||
|
||||
match (a.val, b.val) {
|
||||
(ConstValue::Infer(InferConst::Var(a_vid)),
|
||||
ConstValue::Infer(InferConst::Var(b_vid))) => {
|
||||
infcx.const_unification_table
|
||||
.borrow_mut()
|
||||
.unify_var_var(a_vid, b_vid)
|
||||
.map_err(|e| const_unification_error(a_is_expected, e))?;
|
||||
return Ok(a);
|
||||
}
|
||||
|
||||
(ConstValue::Infer(InferConst::Var(a_id)), _) => {
|
||||
self.fields.infcx.unify_const_variable(a_is_expected, a_id, b)?;
|
||||
return Ok(a);
|
||||
}
|
||||
|
||||
(_, ConstValue::Infer(InferConst::Var(b_id))) => {
|
||||
self.fields.infcx.unify_const_variable(!a_is_expected, b_id, a)?;
|
||||
return Ok(a);
|
||||
}
|
||||
|
||||
_ => {}
|
||||
}
|
||||
|
||||
self.fields.infcx.super_combine_consts(self, a, b)?;
|
||||
Ok(a)
|
||||
self.fields.infcx.super_combine_consts(self, a, b)
|
||||
}
|
||||
|
||||
fn binders<T>(&mut self, a: &ty::Binder<T>, b: &ty::Binder<T>)
|
||||
|
@ -1119,7 +1119,7 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
|
||||
Some((expected, found)) => Some((expected, found)),
|
||||
None => {
|
||||
// Derived error. Cancel the emitter.
|
||||
self.tcx.sess.diagnostic().cancel(diag);
|
||||
diag.cancel();
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
@ -66,11 +66,6 @@ impl TypeRelation<'tcx> for Glb<'combine, 'infcx, 'tcx> {
|
||||
a: &'tcx ty::Const<'tcx>,
|
||||
b: &'tcx ty::Const<'tcx>,
|
||||
) -> RelateResult<'tcx, &'tcx ty::Const<'tcx>> {
|
||||
debug!("{}.consts({:?}, {:?})", self.tag(), a, b);
|
||||
if a == b {
|
||||
return Ok(a);
|
||||
}
|
||||
|
||||
self.fields.infcx.super_combine_consts(self, a, b)
|
||||
}
|
||||
|
||||
|
@ -66,11 +66,6 @@ impl TypeRelation<'tcx> for Lub<'combine, 'infcx, 'tcx> {
|
||||
a: &'tcx ty::Const<'tcx>,
|
||||
b: &'tcx ty::Const<'tcx>,
|
||||
) -> RelateResult<'tcx, &'tcx ty::Const<'tcx>> {
|
||||
debug!("{}.consts({:?}, {:?})", self.tag(), a, b);
|
||||
if a == b {
|
||||
return Ok(a);
|
||||
}
|
||||
|
||||
self.fields.infcx.super_combine_consts(self, a, b)
|
||||
}
|
||||
|
||||
|
@ -1,13 +1,11 @@
|
||||
use super::SubregionOrigin;
|
||||
use super::combine::{CombineFields, RelationDir, const_unification_error};
|
||||
use super::combine::{CombineFields, RelationDir};
|
||||
|
||||
use crate::traits::Obligation;
|
||||
use crate::ty::{self, Ty, TyCtxt, InferConst};
|
||||
use crate::ty::{self, Ty, TyCtxt};
|
||||
use crate::ty::TyVar;
|
||||
use crate::ty::fold::TypeFoldable;
|
||||
use crate::ty::relate::{Cause, Relate, RelateResult, TypeRelation};
|
||||
use crate::infer::unify_key::replace_if_possible;
|
||||
use crate::mir::interpret::ConstValue;
|
||||
use std::mem;
|
||||
|
||||
/// Ensures `a` is made a subtype of `b`. Returns `a` on success.
|
||||
@ -142,41 +140,7 @@ impl TypeRelation<'tcx> for Sub<'combine, 'infcx, 'tcx> {
|
||||
a: &'tcx ty::Const<'tcx>,
|
||||
b: &'tcx ty::Const<'tcx>,
|
||||
) -> RelateResult<'tcx, &'tcx ty::Const<'tcx>> {
|
||||
debug!("{}.consts({:?}, {:?})", self.tag(), a, b);
|
||||
if a == b { return Ok(a); }
|
||||
|
||||
let infcx = self.fields.infcx;
|
||||
let a = replace_if_possible(infcx.const_unification_table.borrow_mut(), a);
|
||||
let b = replace_if_possible(infcx.const_unification_table.borrow_mut(), b);
|
||||
|
||||
// Consts can only be equal or unequal to each other: there's no subtyping
|
||||
// relation, so we're just going to perform equating here instead.
|
||||
let a_is_expected = self.a_is_expected();
|
||||
match (a.val, b.val) {
|
||||
(ConstValue::Infer(InferConst::Var(a_vid)),
|
||||
ConstValue::Infer(InferConst::Var(b_vid))) => {
|
||||
infcx.const_unification_table
|
||||
.borrow_mut()
|
||||
.unify_var_var(a_vid, b_vid)
|
||||
.map_err(|e| const_unification_error(a_is_expected, e))?;
|
||||
return Ok(a);
|
||||
}
|
||||
|
||||
(ConstValue::Infer(InferConst::Var(a_id)), _) => {
|
||||
self.fields.infcx.unify_const_variable(a_is_expected, a_id, b)?;
|
||||
return Ok(a);
|
||||
}
|
||||
|
||||
(_, ConstValue::Infer(InferConst::Var(b_id))) => {
|
||||
self.fields.infcx.unify_const_variable(!a_is_expected, b_id, a)?;
|
||||
return Ok(a);
|
||||
}
|
||||
|
||||
_ => {}
|
||||
}
|
||||
|
||||
self.fields.infcx.super_combine_consts(self, a, b)?;
|
||||
Ok(a)
|
||||
self.fields.infcx.super_combine_consts(self, a, b)
|
||||
}
|
||||
|
||||
fn binders<T>(&mut self, a: &ty::Binder<T>, b: &ty::Binder<T>)
|
||||
|
@ -7,7 +7,7 @@
|
||||
use crate::lint::{LintPass, LateLintPass, LintArray};
|
||||
use crate::middle::stability;
|
||||
use crate::session::Session;
|
||||
use errors::{Applicability, DiagnosticBuilder};
|
||||
use errors::{Applicability, DiagnosticBuilder, pluralise};
|
||||
use syntax::ast;
|
||||
use syntax::source_map::Span;
|
||||
use syntax::symbol::Symbol;
|
||||
@ -524,7 +524,7 @@ pub(crate) fn add_elided_lifetime_in_path_suggestion(
|
||||
};
|
||||
db.span_suggestion(
|
||||
replace_span,
|
||||
&format!("indicate the anonymous lifetime{}", if n >= 2 { "s" } else { "" }),
|
||||
&format!("indicate the anonymous lifetime{}", pluralise!(n)),
|
||||
suggestion,
|
||||
Applicability::MachineApplicable
|
||||
);
|
||||
|
@ -17,7 +17,7 @@ use crate::ty::{self, DefIdTree, GenericParamDefKind, TyCtxt};
|
||||
use crate::rustc::lint;
|
||||
use crate::session::Session;
|
||||
use crate::util::nodemap::{DefIdMap, FxHashMap, FxHashSet, HirIdMap, HirIdSet};
|
||||
use errors::{Applicability, DiagnosticBuilder};
|
||||
use errors::{Applicability, DiagnosticBuilder, pluralise};
|
||||
use rustc_macros::HashStable;
|
||||
use std::borrow::Cow;
|
||||
use std::cell::Cell;
|
||||
@ -3047,7 +3047,7 @@ pub fn report_missing_lifetime_specifiers(
|
||||
span,
|
||||
E0106,
|
||||
"missing lifetime specifier{}",
|
||||
if count > 1 { "s" } else { "" }
|
||||
pluralise!(count)
|
||||
)
|
||||
}
|
||||
|
||||
|
@ -1917,6 +1917,15 @@ impl<'tcx> Place<'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
/// If this place represents a local variable like `_X` with no
|
||||
/// projections, return `Some(_X)`.
|
||||
pub fn as_local(&self) -> Option<Local> {
|
||||
match self {
|
||||
Place { projection: box [], base: PlaceBase::Local(l) } => Some(*l),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn as_ref(&self) -> PlaceRef<'_, 'tcx> {
|
||||
PlaceRef {
|
||||
base: &self.base,
|
||||
|
@ -1855,7 +1855,7 @@ pub fn rustc_optgroups() -> Vec<RustcOptGroup> {
|
||||
struct NullEmitter;
|
||||
|
||||
impl errors::emitter::Emitter for NullEmitter {
|
||||
fn emit_diagnostic(&mut self, _: &errors::DiagnosticBuilder<'_>) {}
|
||||
fn emit_diagnostic(&mut self, _: &errors::Diagnostic) {}
|
||||
}
|
||||
|
||||
// Converts strings provided as `--cfg [cfgspec]` into a `crate_cfg`.
|
||||
|
@ -87,7 +87,7 @@ fn test_can_print_warnings() {
|
||||
let registry = errors::registry::Registry::new(&[]);
|
||||
let (sessopts, _) = build_session_options_and_crate_config(&matches);
|
||||
let sess = build_session(sessopts, None, registry);
|
||||
assert!(!sess.diagnostic().flags.can_emit_warnings);
|
||||
assert!(!sess.diagnostic().can_emit_warnings());
|
||||
});
|
||||
|
||||
syntax::with_default_globals(|| {
|
||||
@ -97,7 +97,7 @@ fn test_can_print_warnings() {
|
||||
let registry = errors::registry::Registry::new(&[]);
|
||||
let (sessopts, _) = build_session_options_and_crate_config(&matches);
|
||||
let sess = build_session(sessopts, None, registry);
|
||||
assert!(sess.diagnostic().flags.can_emit_warnings);
|
||||
assert!(sess.diagnostic().can_emit_warnings());
|
||||
});
|
||||
|
||||
syntax::with_default_globals(|| {
|
||||
@ -105,7 +105,7 @@ fn test_can_print_warnings() {
|
||||
let registry = errors::registry::Registry::new(&[]);
|
||||
let (sessopts, _) = build_session_options_and_crate_config(&matches);
|
||||
let sess = build_session(sessopts, None, registry);
|
||||
assert!(sess.diagnostic().flags.can_emit_warnings);
|
||||
assert!(sess.diagnostic().can_emit_warnings());
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -365,12 +365,6 @@ impl Session {
|
||||
pub fn span_note_without_error<S: Into<MultiSpan>>(&self, sp: S, msg: &str) {
|
||||
self.diagnostic().span_note_without_error(sp, msg)
|
||||
}
|
||||
pub fn span_unimpl<S: Into<MultiSpan>>(&self, sp: S, msg: &str) -> ! {
|
||||
self.diagnostic().span_unimpl(sp, msg)
|
||||
}
|
||||
pub fn unimpl(&self, msg: &str) -> ! {
|
||||
self.diagnostic().unimpl(msg)
|
||||
}
|
||||
|
||||
pub fn buffer_lint<S: Into<MultiSpan>>(
|
||||
&self,
|
||||
@ -1040,6 +1034,7 @@ fn default_emitter(
|
||||
source_map: &Lrc<source_map::SourceMap>,
|
||||
emitter_dest: Option<Box<dyn Write + Send>>,
|
||||
) -> Box<dyn Emitter + sync::Send> {
|
||||
let external_macro_backtrace = sopts.debugging_opts.external_macro_backtrace;
|
||||
match (sopts.error_format, emitter_dest) {
|
||||
(config::ErrorOutputType::HumanReadable(kind), dst) => {
|
||||
let (short, color_config) = kind.unzip();
|
||||
@ -1048,6 +1043,7 @@ fn default_emitter(
|
||||
let emitter = AnnotateSnippetEmitterWriter::new(
|
||||
Some(source_map.clone()),
|
||||
short,
|
||||
external_macro_backtrace,
|
||||
);
|
||||
Box::new(emitter.ui_testing(sopts.debugging_opts.ui_testing))
|
||||
} else {
|
||||
@ -1058,6 +1054,7 @@ fn default_emitter(
|
||||
short,
|
||||
sopts.debugging_opts.teach,
|
||||
sopts.debugging_opts.terminal_width,
|
||||
external_macro_backtrace,
|
||||
),
|
||||
Some(dst) => EmitterWriter::new(
|
||||
dst,
|
||||
@ -1066,6 +1063,7 @@ fn default_emitter(
|
||||
false, // no teach messages when writing to a buffer
|
||||
false, // no colors when writing to a buffer
|
||||
None, // no terminal width
|
||||
external_macro_backtrace,
|
||||
),
|
||||
};
|
||||
Box::new(emitter.ui_testing(sopts.debugging_opts.ui_testing))
|
||||
@ -1077,6 +1075,7 @@ fn default_emitter(
|
||||
source_map.clone(),
|
||||
pretty,
|
||||
json_rendered,
|
||||
external_macro_backtrace,
|
||||
).ui_testing(sopts.debugging_opts.ui_testing),
|
||||
),
|
||||
(config::ErrorOutputType::Json { pretty, json_rendered }, Some(dst)) => Box::new(
|
||||
@ -1086,6 +1085,7 @@ fn default_emitter(
|
||||
source_map.clone(),
|
||||
pretty,
|
||||
json_rendered,
|
||||
external_macro_backtrace,
|
||||
).ui_testing(sopts.debugging_opts.ui_testing),
|
||||
),
|
||||
}
|
||||
@ -1382,13 +1382,13 @@ pub fn early_error(output: config::ErrorOutputType, msg: &str) -> ! {
|
||||
let emitter: Box<dyn Emitter + sync::Send> = match output {
|
||||
config::ErrorOutputType::HumanReadable(kind) => {
|
||||
let (short, color_config) = kind.unzip();
|
||||
Box::new(EmitterWriter::stderr(color_config, None, short, false, None))
|
||||
Box::new(EmitterWriter::stderr(color_config, None, short, false, None, false))
|
||||
}
|
||||
config::ErrorOutputType::Json { pretty, json_rendered } =>
|
||||
Box::new(JsonEmitter::basic(pretty, json_rendered)),
|
||||
Box::new(JsonEmitter::basic(pretty, json_rendered, false)),
|
||||
};
|
||||
let handler = errors::Handler::with_emitter(true, None, emitter);
|
||||
handler.emit(&MultiSpan::new(), msg, errors::Level::Fatal);
|
||||
handler.struct_fatal(msg).emit();
|
||||
errors::FatalError.raise();
|
||||
}
|
||||
|
||||
@ -1396,13 +1396,13 @@ pub fn early_warn(output: config::ErrorOutputType, msg: &str) {
|
||||
let emitter: Box<dyn Emitter + sync::Send> = match output {
|
||||
config::ErrorOutputType::HumanReadable(kind) => {
|
||||
let (short, color_config) = kind.unzip();
|
||||
Box::new(EmitterWriter::stderr(color_config, None, short, false, None))
|
||||
Box::new(EmitterWriter::stderr(color_config, None, short, false, None, false))
|
||||
}
|
||||
config::ErrorOutputType::Json { pretty, json_rendered } =>
|
||||
Box::new(JsonEmitter::basic(pretty, json_rendered)),
|
||||
Box::new(JsonEmitter::basic(pretty, json_rendered, false)),
|
||||
};
|
||||
let handler = errors::Handler::with_emitter(true, None, emitter);
|
||||
handler.emit(&MultiSpan::new(), msg, errors::Level::Warning);
|
||||
handler.struct_warn(msg).emit();
|
||||
}
|
||||
|
||||
pub type CompileResult = Result<(), ErrorReported>;
|
||||
|
@ -33,17 +33,19 @@ use crate::ty::subst::Subst;
|
||||
use crate::ty::SubtypePredicate;
|
||||
use crate::util::nodemap::{FxHashMap, FxHashSet};
|
||||
|
||||
use errors::{Applicability, DiagnosticBuilder};
|
||||
use errors::{Applicability, DiagnosticBuilder, pluralise};
|
||||
use std::fmt;
|
||||
use syntax::ast;
|
||||
use syntax::symbol::{sym, kw};
|
||||
use syntax_pos::{DUMMY_SP, Span, ExpnKind};
|
||||
|
||||
impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
|
||||
pub fn report_fulfillment_errors(&self,
|
||||
errors: &[FulfillmentError<'tcx>],
|
||||
body_id: Option<hir::BodyId>,
|
||||
fallback_has_occurred: bool) {
|
||||
pub fn report_fulfillment_errors(
|
||||
&self,
|
||||
errors: &[FulfillmentError<'tcx>],
|
||||
body_id: Option<hir::BodyId>,
|
||||
fallback_has_occurred: bool,
|
||||
) {
|
||||
#[derive(Debug)]
|
||||
struct ErrorDescriptor<'tcx> {
|
||||
predicate: ty::Predicate<'tcx>,
|
||||
@ -1053,6 +1055,13 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
|
||||
.filter(|c| !c.is_whitespace())
|
||||
.take_while(|c| *c == '&')
|
||||
.count();
|
||||
if let Some('\'') = snippet.chars()
|
||||
.filter(|c| !c.is_whitespace())
|
||||
.skip(refs_number)
|
||||
.next()
|
||||
{ // Do not suggest removal of borrow from type arguments.
|
||||
return;
|
||||
}
|
||||
|
||||
let mut trait_type = trait_ref.self_ty();
|
||||
|
||||
@ -1214,7 +1223,7 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
|
||||
_ => format!("{} {}argument{}",
|
||||
arg_length,
|
||||
if distinct && arg_length > 1 { "distinct " } else { "" },
|
||||
if arg_length == 1 { "" } else { "s" }),
|
||||
pluralise!(arg_length))
|
||||
}
|
||||
};
|
||||
|
||||
@ -1651,6 +1660,18 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
|
||||
err.note(&msg);
|
||||
}
|
||||
}
|
||||
ObligationCauseCode::BindingObligation(item_def_id, span) => {
|
||||
let item_name = tcx.def_path_str(item_def_id);
|
||||
let msg = format!("required by this bound in `{}`", item_name);
|
||||
if let Some(ident) = tcx.opt_item_name(item_def_id) {
|
||||
err.span_label(ident.span, "");
|
||||
}
|
||||
if span != DUMMY_SP {
|
||||
err.span_label(span, &msg);
|
||||
} else {
|
||||
err.note(&msg);
|
||||
}
|
||||
}
|
||||
ObligationCauseCode::ObjectCastObligation(object_ty) => {
|
||||
err.note(&format!("required for the cast to the object type `{}`",
|
||||
self.ty_to_string(object_ty)));
|
||||
|
@ -176,6 +176,9 @@ pub enum ObligationCauseCode<'tcx> {
|
||||
/// also implement all supertraits of `X`.
|
||||
ItemObligation(DefId),
|
||||
|
||||
/// Like `ItemObligation`, but with extra detail on the source of the obligation.
|
||||
BindingObligation(DefId, Span),
|
||||
|
||||
/// A type like `&'a T` is WF only if `T: 'a`.
|
||||
ReferenceOutlivesReferent(Ty<'tcx>),
|
||||
|
||||
@ -659,11 +662,11 @@ pub struct VtableTraitAliasData<'tcx, N> {
|
||||
}
|
||||
|
||||
/// Creates predicate obligations from the generic bounds.
|
||||
pub fn predicates_for_generics<'tcx>(cause: ObligationCause<'tcx>,
|
||||
param_env: ty::ParamEnv<'tcx>,
|
||||
generic_bounds: &ty::InstantiatedPredicates<'tcx>)
|
||||
-> PredicateObligations<'tcx>
|
||||
{
|
||||
pub fn predicates_for_generics<'tcx>(
|
||||
cause: ObligationCause<'tcx>,
|
||||
param_env: ty::ParamEnv<'tcx>,
|
||||
generic_bounds: &ty::InstantiatedPredicates<'tcx>,
|
||||
) -> PredicateObligations<'tcx> {
|
||||
util::predicates_for_generics(cause, 0, param_env, generic_bounds)
|
||||
}
|
||||
|
||||
|
@ -472,6 +472,7 @@ impl<'a, 'tcx> Lift<'tcx> for traits::ObligationCauseCode<'a> {
|
||||
super::TupleElem => Some(super::TupleElem),
|
||||
super::ProjectionWf(proj) => tcx.lift(&proj).map(super::ProjectionWf),
|
||||
super::ItemObligation(def_id) => Some(super::ItemObligation(def_id)),
|
||||
super::BindingObligation(def_id, span) => Some(super::BindingObligation(def_id, span)),
|
||||
super::ReferenceOutlivesReferent(ty) => {
|
||||
tcx.lift(&ty).map(super::ReferenceOutlivesReferent)
|
||||
}
|
||||
|
@ -513,20 +513,19 @@ pub fn impl_trait_ref_and_oblig<'a, 'tcx>(
|
||||
}
|
||||
|
||||
/// See [`super::obligations_for_generics`].
|
||||
pub fn predicates_for_generics<'tcx>(cause: ObligationCause<'tcx>,
|
||||
recursion_depth: usize,
|
||||
param_env: ty::ParamEnv<'tcx>,
|
||||
generic_bounds: &ty::InstantiatedPredicates<'tcx>)
|
||||
-> Vec<PredicateObligation<'tcx>>
|
||||
{
|
||||
debug!("predicates_for_generics(generic_bounds={:?})",
|
||||
generic_bounds);
|
||||
pub fn predicates_for_generics<'tcx>(
|
||||
cause: ObligationCause<'tcx>,
|
||||
recursion_depth: usize,
|
||||
param_env: ty::ParamEnv<'tcx>,
|
||||
generic_bounds: &ty::InstantiatedPredicates<'tcx>,
|
||||
) -> Vec<PredicateObligation<'tcx>> {
|
||||
debug!("predicates_for_generics(generic_bounds={:?})", generic_bounds);
|
||||
|
||||
generic_bounds.predicates.iter().map(|predicate| {
|
||||
Obligation { cause: cause.clone(),
|
||||
recursion_depth,
|
||||
param_env,
|
||||
predicate: predicate.clone() }
|
||||
generic_bounds.predicates.iter().map(|predicate| Obligation {
|
||||
cause: cause.clone(),
|
||||
recursion_depth,
|
||||
param_env,
|
||||
predicate: predicate.clone(),
|
||||
}).collect()
|
||||
}
|
||||
|
||||
|
@ -196,7 +196,7 @@ impl<'tcx> ty::TyS<'tcx> {
|
||||
let n = tcx.lift_to_global(&n).unwrap();
|
||||
match n.try_eval_usize(tcx, ty::ParamEnv::empty()) {
|
||||
Some(n) => {
|
||||
format!("array of {} element{}", n, if n != 1 { "s" } else { "" }).into()
|
||||
format!("array of {} element{}", n, pluralise!(n)).into()
|
||||
}
|
||||
None => "array".into(),
|
||||
}
|
||||
@ -275,10 +275,10 @@ impl<'tcx> TyCtxt<'tcx> {
|
||||
`.await`ing on both of them");
|
||||
}
|
||||
}
|
||||
if let (ty::Infer(ty::IntVar(_)), ty::Float(_)) =
|
||||
(&values.found.sty, &values.expected.sty) // Issue #53280
|
||||
{
|
||||
if let Ok(snippet) = self.sess.source_map().span_to_snippet(sp) {
|
||||
match (&values.expected.sty, &values.found.sty) {
|
||||
(ty::Float(_), ty::Infer(ty::IntVar(_))) => if let Ok( // Issue #53280
|
||||
snippet,
|
||||
) = self.sess.source_map().span_to_snippet(sp) {
|
||||
if snippet.chars().all(|c| c.is_digit(10) || c == '-' || c == '_') {
|
||||
db.span_suggestion(
|
||||
sp,
|
||||
@ -287,8 +287,96 @@ impl<'tcx> TyCtxt<'tcx> {
|
||||
Applicability::MachineApplicable
|
||||
);
|
||||
}
|
||||
},
|
||||
(ty::Param(_), ty::Param(_)) => {
|
||||
db.note("a type parameter was expected, but a different one was found; \
|
||||
you might be missing a type parameter or trait bound");
|
||||
db.note("for more information, visit \
|
||||
https://doc.rust-lang.org/book/ch10-02-traits.html\
|
||||
#traits-as-parameters");
|
||||
}
|
||||
(ty::Projection(_), ty::Projection(_)) => {
|
||||
db.note("an associated type was expected, but a different one was found");
|
||||
}
|
||||
(ty::Param(_), ty::Projection(_)) | (ty::Projection(_), ty::Param(_)) => {
|
||||
db.note("you might be missing a type parameter or trait bound");
|
||||
}
|
||||
(ty::Param(_), _) | (_, ty::Param(_)) => {
|
||||
db.help("type parameters must be constrained to match other types");
|
||||
if self.sess.teach(&db.get_code().unwrap()) {
|
||||
db.help("given a type parameter `T` and a method `foo`:
|
||||
```
|
||||
trait Trait<T> { fn foo(&self) -> T; }
|
||||
```
|
||||
the only ways to implement method `foo` are:
|
||||
- constrain `T` with an explicit type:
|
||||
```
|
||||
impl Trait<String> for X {
|
||||
fn foo(&self) -> String { String::new() }
|
||||
}
|
||||
```
|
||||
- add a trait bound to `T` and call a method on that trait that returns `Self`:
|
||||
```
|
||||
impl<T: std::default::Default> Trait<T> for X {
|
||||
fn foo(&self) -> T { <T as std::default::Default>::default() }
|
||||
}
|
||||
```
|
||||
- change `foo` to return an argument of type `T`:
|
||||
```
|
||||
impl<T> Trait<T> for X {
|
||||
fn foo(&self, x: T) -> T { x }
|
||||
}
|
||||
```");
|
||||
}
|
||||
db.note("for more information, visit \
|
||||
https://doc.rust-lang.org/book/ch10-02-traits.html\
|
||||
#traits-as-parameters");
|
||||
}
|
||||
(ty::Projection(_), _) => {
|
||||
db.note(&format!(
|
||||
"consider constraining the associated type `{}` to `{}` or calling a \
|
||||
method that returns `{}`",
|
||||
values.expected,
|
||||
values.found,
|
||||
values.expected,
|
||||
));
|
||||
if self.sess.teach(&db.get_code().unwrap()) {
|
||||
db.help("given an associated type `T` and a method `foo`:
|
||||
```
|
||||
trait Trait {
|
||||
type T;
|
||||
fn foo(&self) -> Self::T;
|
||||
}
|
||||
```
|
||||
the only way of implementing method `foo` is to constrain `T` with an explicit associated type:
|
||||
```
|
||||
impl Trait for X {
|
||||
type T = String;
|
||||
fn foo(&self) -> Self::T { String::new() }
|
||||
}
|
||||
```");
|
||||
}
|
||||
db.note("for more information, visit \
|
||||
https://doc.rust-lang.org/book/ch19-03-advanced-traits.html");
|
||||
}
|
||||
(_, ty::Projection(_)) => {
|
||||
db.note(&format!(
|
||||
"consider constraining the associated type `{}` to `{}`",
|
||||
values.found,
|
||||
values.expected,
|
||||
));
|
||||
db.note("for more information, visit \
|
||||
https://doc.rust-lang.org/book/ch19-03-advanced-traits.html");
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
debug!(
|
||||
"note_and_explain_type_err expected={:?} ({:?}) found={:?} ({:?})",
|
||||
values.expected,
|
||||
values.expected.sty,
|
||||
values.found,
|
||||
values.found.sty,
|
||||
);
|
||||
},
|
||||
CyclicTy(ty) => {
|
||||
// Watch out for various cases of cyclic types and try to explain.
|
||||
|
@ -2797,6 +2797,10 @@ impl<'tcx> TyCtxt<'tcx> {
|
||||
})
|
||||
}
|
||||
|
||||
pub fn opt_item_name(self, def_id: DefId) -> Option<Ident> {
|
||||
self.hir().as_local_hir_id(def_id).and_then(|hir_id| self.hir().get(hir_id).ident())
|
||||
}
|
||||
|
||||
pub fn opt_associated_item(self, def_id: DefId) -> Option<AssocItem> {
|
||||
let is_associated_item = if let Some(hir_id) = self.hir().as_local_hir_id(def_id) {
|
||||
match self.hir().get(hir_id) {
|
||||
|
@ -330,14 +330,13 @@ impl<'tcx> TyCtxt<'tcx> {
|
||||
let mut i = 0;
|
||||
|
||||
while let Some(query) = current_query {
|
||||
let mut db = DiagnosticBuilder::new(icx.tcx.sess.diagnostic(),
|
||||
Level::FailureNote,
|
||||
let mut diag = Diagnostic::new(Level::FailureNote,
|
||||
&format!("#{} [{}] {}",
|
||||
i,
|
||||
query.info.query.name(),
|
||||
query.info.query.describe(icx.tcx)));
|
||||
db.set_span(icx.tcx.sess.source_map().def_span(query.info.span));
|
||||
icx.tcx.sess.diagnostic().force_print_db(db);
|
||||
diag.span = icx.tcx.sess.source_map().def_span(query.info.span).into();
|
||||
icx.tcx.sess.diagnostic().force_print_diagnostic(diag);
|
||||
|
||||
current_query = query.parent.clone();
|
||||
i += 1;
|
||||
|
@ -22,12 +22,11 @@ use rustc::util::common::{time_depth, set_time_depth, print_time_passes_entry};
|
||||
use rustc::util::profiling::SelfProfiler;
|
||||
use rustc_fs_util::link_or_copy;
|
||||
use rustc_data_structures::svh::Svh;
|
||||
use rustc_errors::{Handler, Level, DiagnosticBuilder, FatalError, DiagnosticId};
|
||||
use rustc_errors::{Handler, Level, FatalError, DiagnosticId};
|
||||
use rustc_errors::emitter::{Emitter};
|
||||
use rustc_target::spec::MergeFunctions;
|
||||
use syntax::attr;
|
||||
use syntax::ext::hygiene::ExpnId;
|
||||
use syntax_pos::MultiSpan;
|
||||
use syntax_pos::symbol::{Symbol, sym};
|
||||
use jobserver::{Client, Acquired};
|
||||
|
||||
@ -1725,7 +1724,7 @@ impl SharedEmitter {
|
||||
}
|
||||
|
||||
impl Emitter for SharedEmitter {
|
||||
fn emit_diagnostic(&mut self, db: &DiagnosticBuilder<'_>) {
|
||||
fn emit_diagnostic(&mut self, db: &rustc_errors::Diagnostic) {
|
||||
drop(self.sender.send(SharedEmitterMessage::Diagnostic(Diagnostic {
|
||||
msg: db.message(),
|
||||
code: db.code.clone(),
|
||||
@ -1760,19 +1759,12 @@ impl SharedEmitterMain {
|
||||
match message {
|
||||
Ok(SharedEmitterMessage::Diagnostic(diag)) => {
|
||||
let handler = sess.diagnostic();
|
||||
match diag.code {
|
||||
Some(ref code) => {
|
||||
handler.emit_with_code(&MultiSpan::new(),
|
||||
&diag.msg,
|
||||
code.clone(),
|
||||
diag.lvl);
|
||||
}
|
||||
None => {
|
||||
handler.emit(&MultiSpan::new(),
|
||||
&diag.msg,
|
||||
diag.lvl);
|
||||
}
|
||||
let mut d = rustc_errors::Diagnostic::new(diag.lvl, &diag.msg);
|
||||
if let Some(code) = diag.code {
|
||||
d.code(code);
|
||||
}
|
||||
handler.emit_diagnostic(&d);
|
||||
handler.abort_if_errors_and_should_abort();
|
||||
}
|
||||
Ok(SharedEmitterMessage::InlineAsmError(cookie, msg)) => {
|
||||
sess.span_err(ExpnId::from_u32(cookie).expn_data().call_site, &msg)
|
||||
|
@ -66,7 +66,7 @@ use syntax::source_map::FileLoader;
|
||||
use syntax::feature_gate::{GatedCfg, UnstableFeatures};
|
||||
use syntax::parse::{self, PResult};
|
||||
use syntax::symbol::sym;
|
||||
use syntax_pos::{DUMMY_SP, MultiSpan, FileName};
|
||||
use syntax_pos::{DUMMY_SP, FileName};
|
||||
|
||||
pub mod pretty;
|
||||
mod args;
|
||||
@ -1196,15 +1196,16 @@ pub fn report_ice(info: &panic::PanicInfo<'_>, bug_report_url: &str) {
|
||||
false,
|
||||
false,
|
||||
None,
|
||||
false,
|
||||
));
|
||||
let handler = errors::Handler::with_emitter(true, None, emitter);
|
||||
|
||||
// a .span_bug or .bug call has already printed what
|
||||
// it wants to print.
|
||||
if !info.payload().is::<errors::ExplicitBug>() {
|
||||
handler.emit(&MultiSpan::new(),
|
||||
"unexpected panic",
|
||||
errors::Level::Bug);
|
||||
let d = errors::Diagnostic::new(errors::Level::Bug, "unexpected panic");
|
||||
handler.emit_diagnostic(&d);
|
||||
handler.abort_if_errors_and_should_abort();
|
||||
}
|
||||
|
||||
let mut xs: Vec<Cow<'static, str>> = vec![
|
||||
@ -1224,9 +1225,7 @@ pub fn report_ice(info: &panic::PanicInfo<'_>, bug_report_url: &str) {
|
||||
}
|
||||
|
||||
for note in &xs {
|
||||
handler.emit(&MultiSpan::new(),
|
||||
note,
|
||||
errors::Level::Note);
|
||||
handler.note_without_error(¬e);
|
||||
}
|
||||
|
||||
// If backtraces are enabled, also print the query stack
|
||||
|
@ -7,7 +7,7 @@
|
||||
|
||||
use syntax_pos::{SourceFile, MultiSpan, Loc};
|
||||
use crate::{
|
||||
Level, CodeSuggestion, DiagnosticBuilder, Emitter,
|
||||
Level, CodeSuggestion, Diagnostic, Emitter,
|
||||
SourceMapperDyn, SubDiagnostic, DiagnosticId
|
||||
};
|
||||
use crate::emitter::FileWithAnnotatedLines;
|
||||
@ -25,11 +25,13 @@ pub struct AnnotateSnippetEmitterWriter {
|
||||
short_message: bool,
|
||||
/// If true, will normalize line numbers with `LL` to prevent noise in UI test diffs.
|
||||
ui_testing: bool,
|
||||
|
||||
external_macro_backtrace: bool,
|
||||
}
|
||||
|
||||
impl Emitter for AnnotateSnippetEmitterWriter {
|
||||
/// The entry point for the diagnostics generation
|
||||
fn emit_diagnostic(&mut self, db: &DiagnosticBuilder<'_>) {
|
||||
fn emit_diagnostic(&mut self, db: &Diagnostic) {
|
||||
let mut children = db.children.clone();
|
||||
let (mut primary_span, suggestions) = self.primary_span_formatted(&db);
|
||||
|
||||
@ -37,7 +39,7 @@ impl Emitter for AnnotateSnippetEmitterWriter {
|
||||
&mut primary_span,
|
||||
&mut children,
|
||||
&db.level,
|
||||
db.handler().flags.external_macro_backtrace);
|
||||
self.external_macro_backtrace);
|
||||
|
||||
self.emit_messages_default(&db.level,
|
||||
db.message(),
|
||||
@ -163,12 +165,14 @@ impl<'a> DiagnosticConverter<'a> {
|
||||
impl AnnotateSnippetEmitterWriter {
|
||||
pub fn new(
|
||||
source_map: Option<Lrc<SourceMapperDyn>>,
|
||||
short_message: bool
|
||||
short_message: bool,
|
||||
external_macro_backtrace: bool,
|
||||
) -> Self {
|
||||
Self {
|
||||
source_map,
|
||||
short_message,
|
||||
ui_testing: false,
|
||||
external_macro_backtrace,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -99,17 +99,9 @@ impl<'a> DerefMut for DiagnosticBuilder<'a> {
|
||||
}
|
||||
|
||||
impl<'a> DiagnosticBuilder<'a> {
|
||||
pub fn handler(&self) -> &'a Handler{
|
||||
self.0.handler
|
||||
}
|
||||
|
||||
/// Emit the diagnostic.
|
||||
pub fn emit(&mut self) {
|
||||
if self.cancelled() {
|
||||
return;
|
||||
}
|
||||
|
||||
self.0.handler.emit_db(&self);
|
||||
self.0.handler.emit_diagnostic(&self);
|
||||
self.cancel();
|
||||
}
|
||||
|
||||
@ -354,7 +346,7 @@ impl<'a> DiagnosticBuilder<'a> {
|
||||
|
||||
/// Convenience function for internal use, clients should use one of the
|
||||
/// struct_* methods on Handler.
|
||||
pub fn new(handler: &'a Handler, level: Level, message: &str) -> DiagnosticBuilder<'a> {
|
||||
crate fn new(handler: &'a Handler, level: Level, message: &str) -> DiagnosticBuilder<'a> {
|
||||
DiagnosticBuilder::new_with_code(handler, level, None, message)
|
||||
}
|
||||
|
||||
@ -371,7 +363,8 @@ impl<'a> DiagnosticBuilder<'a> {
|
||||
|
||||
/// Creates a new `DiagnosticBuilder` with an already constructed
|
||||
/// diagnostic.
|
||||
pub fn new_diagnostic(handler: &'a Handler, diagnostic: Diagnostic) -> DiagnosticBuilder<'a> {
|
||||
crate fn new_diagnostic(handler: &'a Handler, diagnostic: Diagnostic)
|
||||
-> DiagnosticBuilder<'a> {
|
||||
DiagnosticBuilder(Box::new(DiagnosticBuilderInner {
|
||||
handler,
|
||||
diagnostic,
|
||||
|
@ -12,7 +12,7 @@ use Destination::*;
|
||||
use syntax_pos::{SourceFile, Span, MultiSpan};
|
||||
|
||||
use crate::{
|
||||
Level, CodeSuggestion, DiagnosticBuilder, SubDiagnostic,
|
||||
Level, CodeSuggestion, Diagnostic, SubDiagnostic,
|
||||
SuggestionStyle, SourceMapperDyn, DiagnosticId,
|
||||
};
|
||||
use crate::Level::Error;
|
||||
@ -52,10 +52,12 @@ impl HumanReadableErrorType {
|
||||
source_map: Option<Lrc<SourceMapperDyn>>,
|
||||
teach: bool,
|
||||
terminal_width: Option<usize>,
|
||||
external_macro_backtrace: bool,
|
||||
) -> EmitterWriter {
|
||||
let (short, color_config) = self.unzip();
|
||||
let color = color_config.suggests_using_colors();
|
||||
EmitterWriter::new(dst, source_map, short, teach, color, terminal_width)
|
||||
EmitterWriter::new(dst, source_map, short, teach, color, terminal_width,
|
||||
external_macro_backtrace)
|
||||
}
|
||||
}
|
||||
|
||||
@ -180,7 +182,7 @@ const ANONYMIZED_LINE_NUM: &str = "LL";
|
||||
/// Emitter trait for emitting errors.
|
||||
pub trait Emitter {
|
||||
/// Emit a structured diagnostic.
|
||||
fn emit_diagnostic(&mut self, db: &DiagnosticBuilder<'_>);
|
||||
fn emit_diagnostic(&mut self, db: &Diagnostic);
|
||||
|
||||
/// Emit a notification that an artifact has been output.
|
||||
/// This is currently only supported for the JSON format,
|
||||
@ -204,7 +206,7 @@ pub trait Emitter {
|
||||
/// we return the original `primary_span` and the original suggestions.
|
||||
fn primary_span_formatted<'a>(
|
||||
&mut self,
|
||||
db: &'a DiagnosticBuilder<'_>
|
||||
db: &'a Diagnostic
|
||||
) -> (MultiSpan, &'a [CodeSuggestion]) {
|
||||
let mut primary_span = db.span.clone();
|
||||
if let Some((sugg, rest)) = db.suggestions.split_first() {
|
||||
@ -377,7 +379,7 @@ pub trait Emitter {
|
||||
}
|
||||
|
||||
impl Emitter for EmitterWriter {
|
||||
fn emit_diagnostic(&mut self, db: &DiagnosticBuilder<'_>) {
|
||||
fn emit_diagnostic(&mut self, db: &Diagnostic) {
|
||||
let mut children = db.children.clone();
|
||||
let (mut primary_span, suggestions) = self.primary_span_formatted(&db);
|
||||
|
||||
@ -385,7 +387,7 @@ impl Emitter for EmitterWriter {
|
||||
&mut primary_span,
|
||||
&mut children,
|
||||
&db.level,
|
||||
db.handler().flags.external_macro_backtrace);
|
||||
self.external_macro_backtrace);
|
||||
|
||||
self.emit_messages_default(&db.level,
|
||||
&db.styled_message(),
|
||||
@ -449,6 +451,8 @@ pub struct EmitterWriter {
|
||||
teach: bool,
|
||||
ui_testing: bool,
|
||||
terminal_width: Option<usize>,
|
||||
|
||||
external_macro_backtrace: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
@ -465,6 +469,7 @@ impl EmitterWriter {
|
||||
short_message: bool,
|
||||
teach: bool,
|
||||
terminal_width: Option<usize>,
|
||||
external_macro_backtrace: bool,
|
||||
) -> EmitterWriter {
|
||||
let dst = Destination::from_stderr(color_config);
|
||||
EmitterWriter {
|
||||
@ -474,6 +479,7 @@ impl EmitterWriter {
|
||||
teach,
|
||||
ui_testing: false,
|
||||
terminal_width,
|
||||
external_macro_backtrace,
|
||||
}
|
||||
}
|
||||
|
||||
@ -484,6 +490,7 @@ impl EmitterWriter {
|
||||
teach: bool,
|
||||
colored: bool,
|
||||
terminal_width: Option<usize>,
|
||||
external_macro_backtrace: bool,
|
||||
) -> EmitterWriter {
|
||||
EmitterWriter {
|
||||
dst: Raw(dst, colored),
|
||||
@ -492,6 +499,7 @@ impl EmitterWriter {
|
||||
teach,
|
||||
ui_testing: false,
|
||||
terminal_width,
|
||||
external_macro_backtrace,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -16,7 +16,7 @@ use Level::*;
|
||||
use emitter::{Emitter, EmitterWriter};
|
||||
use registry::Registry;
|
||||
|
||||
use rustc_data_structures::sync::{self, Lrc, Lock, AtomicUsize, AtomicBool, SeqCst};
|
||||
use rustc_data_structures::sync::{self, Lrc, Lock};
|
||||
use rustc_data_structures::fx::FxHashSet;
|
||||
use rustc_data_structures::stable_hasher::StableHasher;
|
||||
|
||||
@ -298,30 +298,34 @@ pub use diagnostic_builder::DiagnosticBuilder;
|
||||
/// Certain errors (fatal, bug, unimpl) may cause immediate exit,
|
||||
/// others log errors for later reporting.
|
||||
pub struct Handler {
|
||||
pub flags: HandlerFlags,
|
||||
flags: HandlerFlags,
|
||||
inner: Lock<HandlerInner>,
|
||||
}
|
||||
|
||||
struct HandlerInner {
|
||||
flags: HandlerFlags,
|
||||
/// The number of errors that have been emitted, including duplicates.
|
||||
///
|
||||
/// This is not necessarily the count that's reported to the user once
|
||||
/// compilation ends.
|
||||
err_count: AtomicUsize,
|
||||
deduplicated_err_count: AtomicUsize,
|
||||
emitter: Lock<Box<dyn Emitter + sync::Send>>,
|
||||
continue_after_error: AtomicBool,
|
||||
delayed_span_bugs: Lock<Vec<Diagnostic>>,
|
||||
err_count: usize,
|
||||
deduplicated_err_count: usize,
|
||||
emitter: Box<dyn Emitter + sync::Send>,
|
||||
continue_after_error: bool,
|
||||
delayed_span_bugs: Vec<Diagnostic>,
|
||||
|
||||
/// This set contains the `DiagnosticId` of all emitted diagnostics to avoid
|
||||
/// emitting the same diagnostic with extended help (`--teach`) twice, which
|
||||
/// would be uneccessary repetition.
|
||||
taught_diagnostics: Lock<FxHashSet<DiagnosticId>>,
|
||||
taught_diagnostics: FxHashSet<DiagnosticId>,
|
||||
|
||||
/// Used to suggest rustc --explain <error code>
|
||||
emitted_diagnostic_codes: Lock<FxHashSet<DiagnosticId>>,
|
||||
emitted_diagnostic_codes: FxHashSet<DiagnosticId>,
|
||||
|
||||
/// This set contains a hash of every diagnostic that has been emitted by
|
||||
/// this handler. These hashes is used to avoid emitting the same error
|
||||
/// twice.
|
||||
emitted_diagnostics: Lock<FxHashSet<u128>>,
|
||||
emitted_diagnostics: FxHashSet<u128>,
|
||||
}
|
||||
|
||||
fn default_track_diagnostic(_: &Diagnostic) {}
|
||||
@ -329,7 +333,7 @@ fn default_track_diagnostic(_: &Diagnostic) {}
|
||||
thread_local!(pub static TRACK_DIAGNOSTICS: Cell<fn(&Diagnostic)> =
|
||||
Cell::new(default_track_diagnostic));
|
||||
|
||||
#[derive(Default)]
|
||||
#[derive(Copy, Clone, Default)]
|
||||
pub struct HandlerFlags {
|
||||
/// If false, warning-level lints are suppressed.
|
||||
/// (rustc: see `--allow warnings` and `--cap-lints`)
|
||||
@ -348,13 +352,13 @@ pub struct HandlerFlags {
|
||||
pub external_macro_backtrace: bool,
|
||||
}
|
||||
|
||||
impl Drop for Handler {
|
||||
impl Drop for HandlerInner {
|
||||
fn drop(&mut self) {
|
||||
if !self.has_errors() {
|
||||
let mut bugs = self.delayed_span_bugs.borrow_mut();
|
||||
if self.err_count == 0 {
|
||||
let bugs = std::mem::replace(&mut self.delayed_span_bugs, Vec::new());
|
||||
let has_bugs = !bugs.is_empty();
|
||||
for bug in bugs.drain(..) {
|
||||
DiagnosticBuilder::new_diagnostic(self, bug).emit();
|
||||
for bug in bugs {
|
||||
self.emit_diagnostic(&bug);
|
||||
}
|
||||
if has_bugs {
|
||||
panic!("no errors encountered even though `delay_span_bug` issued");
|
||||
@ -383,7 +387,8 @@ impl Handler {
|
||||
cm: Option<Lrc<SourceMapperDyn>>,
|
||||
flags: HandlerFlags)
|
||||
-> Handler {
|
||||
let emitter = Box::new(EmitterWriter::stderr(color_config, cm, false, false, None));
|
||||
let emitter = Box::new(EmitterWriter::stderr(
|
||||
color_config, cm, false, false, None, flags.external_macro_backtrace));
|
||||
Handler::with_emitter_and_flags(emitter, flags)
|
||||
}
|
||||
|
||||
@ -404,19 +409,28 @@ impl Handler {
|
||||
{
|
||||
Handler {
|
||||
flags,
|
||||
err_count: AtomicUsize::new(0),
|
||||
deduplicated_err_count: AtomicUsize::new(0),
|
||||
emitter: Lock::new(e),
|
||||
continue_after_error: AtomicBool::new(true),
|
||||
delayed_span_bugs: Lock::new(Vec::new()),
|
||||
taught_diagnostics: Default::default(),
|
||||
emitted_diagnostic_codes: Default::default(),
|
||||
emitted_diagnostics: Default::default(),
|
||||
inner: Lock::new(HandlerInner {
|
||||
flags,
|
||||
err_count: 0,
|
||||
deduplicated_err_count: 0,
|
||||
emitter: e,
|
||||
continue_after_error: true,
|
||||
delayed_span_bugs: Vec::new(),
|
||||
taught_diagnostics: Default::default(),
|
||||
emitted_diagnostic_codes: Default::default(),
|
||||
emitted_diagnostics: Default::default(),
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_continue_after_error(&self, continue_after_error: bool) {
|
||||
self.continue_after_error.store(continue_after_error, SeqCst);
|
||||
self.inner.borrow_mut().continue_after_error = continue_after_error;
|
||||
}
|
||||
|
||||
// This is here to not allow mutation of flags;
|
||||
// as of this writing it's only used in tests in librustc.
|
||||
pub fn can_emit_warnings(&self) -> bool {
|
||||
self.flags.can_emit_warnings
|
||||
}
|
||||
|
||||
/// Resets the diagnostic error count as well as the cached emitted diagnostics.
|
||||
@ -424,11 +438,13 @@ impl Handler {
|
||||
/// NOTE: *do not* call this function from rustc. It is only meant to be called from external
|
||||
/// tools that want to reuse a `Parser` cleaning the previously emitted diagnostics as well as
|
||||
/// the overall count of emitted error diagnostics.
|
||||
// FIXME: this does not clear inner entirely
|
||||
pub fn reset_err_count(&self) {
|
||||
let mut inner = self.inner.borrow_mut();
|
||||
// actually frees the underlying memory (which `clear` would not do)
|
||||
*self.emitted_diagnostics.borrow_mut() = Default::default();
|
||||
self.deduplicated_err_count.store(0, SeqCst);
|
||||
self.err_count.store(0, SeqCst);
|
||||
inner.emitted_diagnostics = Default::default();
|
||||
inner.deduplicated_err_count = 0;
|
||||
inner.err_count = 0;
|
||||
}
|
||||
|
||||
pub fn struct_dummy(&self) -> DiagnosticBuilder<'_> {
|
||||
@ -519,30 +535,9 @@ impl Handler {
|
||||
DiagnosticBuilder::new(self, Level::Fatal, msg)
|
||||
}
|
||||
|
||||
pub fn cancel(&self, err: &mut DiagnosticBuilder<'_>) {
|
||||
err.cancel();
|
||||
}
|
||||
|
||||
fn panic_if_treat_err_as_bug(&self) {
|
||||
if self.treat_err_as_bug() {
|
||||
let s = match (self.err_count(), self.flags.treat_err_as_bug.unwrap_or(0)) {
|
||||
(0, _) => return,
|
||||
(1, 1) => "aborting due to `-Z treat-err-as-bug=1`".to_string(),
|
||||
(1, _) => return,
|
||||
(count, as_bug) => {
|
||||
format!(
|
||||
"aborting after {} errors due to `-Z treat-err-as-bug={}`",
|
||||
count,
|
||||
as_bug,
|
||||
)
|
||||
}
|
||||
};
|
||||
panic!(s);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn span_fatal<S: Into<MultiSpan>>(&self, sp: S, msg: &str) -> FatalError {
|
||||
self.emit(&sp.into(), msg, Fatal);
|
||||
self.emit_diagnostic(Diagnostic::new(Fatal, msg).set_span(sp));
|
||||
self.abort_if_errors_and_should_abort();
|
||||
FatalError
|
||||
}
|
||||
pub fn span_fatal_with_code<S: Into<MultiSpan>>(&self,
|
||||
@ -550,11 +545,13 @@ impl Handler {
|
||||
msg: &str,
|
||||
code: DiagnosticId)
|
||||
-> FatalError {
|
||||
self.emit_with_code(&sp.into(), msg, code, Fatal);
|
||||
self.emit_diagnostic(Diagnostic::new_with_code(Fatal, Some(code), msg).set_span(sp));
|
||||
self.abort_if_errors_and_should_abort();
|
||||
FatalError
|
||||
}
|
||||
pub fn span_err<S: Into<MultiSpan>>(&self, sp: S, msg: &str) {
|
||||
self.emit(&sp.into(), msg, Error);
|
||||
self.emit_diagnostic(Diagnostic::new(Error, msg).set_span(sp));
|
||||
self.abort_if_errors_and_should_abort();
|
||||
}
|
||||
pub fn mut_span_err<S: Into<MultiSpan>>(&self,
|
||||
sp: S,
|
||||
@ -565,38 +562,30 @@ impl Handler {
|
||||
result
|
||||
}
|
||||
pub fn span_err_with_code<S: Into<MultiSpan>>(&self, sp: S, msg: &str, code: DiagnosticId) {
|
||||
self.emit_with_code(&sp.into(), msg, code, Error);
|
||||
self.emit_diagnostic(Diagnostic::new_with_code(Error, Some(code), msg).set_span(sp));
|
||||
self.abort_if_errors_and_should_abort();
|
||||
}
|
||||
pub fn span_warn<S: Into<MultiSpan>>(&self, sp: S, msg: &str) {
|
||||
self.emit(&sp.into(), msg, Warning);
|
||||
self.emit_diagnostic(Diagnostic::new(Warning, msg).set_span(sp));
|
||||
self.abort_if_errors_and_should_abort();
|
||||
}
|
||||
pub fn span_warn_with_code<S: Into<MultiSpan>>(&self, sp: S, msg: &str, code: DiagnosticId) {
|
||||
self.emit_with_code(&sp.into(), msg, code, Warning);
|
||||
self.emit_diagnostic(Diagnostic::new_with_code(Warning, Some(code), msg).set_span(sp));
|
||||
self.abort_if_errors_and_should_abort();
|
||||
}
|
||||
pub fn span_bug<S: Into<MultiSpan>>(&self, sp: S, msg: &str) -> ! {
|
||||
self.emit(&sp.into(), msg, Bug);
|
||||
panic!(ExplicitBug);
|
||||
self.inner.borrow_mut().span_bug(sp, msg)
|
||||
}
|
||||
pub fn delay_span_bug<S: Into<MultiSpan>>(&self, sp: S, msg: &str) {
|
||||
if self.treat_err_as_bug() {
|
||||
// FIXME: don't abort here if report_delayed_bugs is off
|
||||
self.span_bug(sp, msg);
|
||||
}
|
||||
let mut diagnostic = Diagnostic::new(Level::Bug, msg);
|
||||
diagnostic.set_span(sp.into());
|
||||
self.delay_as_bug(diagnostic);
|
||||
}
|
||||
fn delay_as_bug(&self, diagnostic: Diagnostic) {
|
||||
if self.flags.report_delayed_bugs {
|
||||
DiagnosticBuilder::new_diagnostic(self, diagnostic.clone()).emit();
|
||||
}
|
||||
self.delayed_span_bugs.borrow_mut().push(diagnostic);
|
||||
self.inner.borrow_mut().delay_span_bug(sp, msg)
|
||||
}
|
||||
pub fn span_bug_no_panic<S: Into<MultiSpan>>(&self, sp: S, msg: &str) {
|
||||
self.emit(&sp.into(), msg, Bug);
|
||||
self.emit_diagnostic(Diagnostic::new(Bug, msg).set_span(sp));
|
||||
self.abort_if_errors_and_should_abort();
|
||||
}
|
||||
pub fn span_note_without_error<S: Into<MultiSpan>>(&self, sp: S, msg: &str) {
|
||||
self.emit(&sp.into(), msg, Note);
|
||||
self.emit_diagnostic(Diagnostic::new(Note, msg).set_span(sp));
|
||||
self.abort_if_errors_and_should_abort();
|
||||
}
|
||||
pub fn span_note_diag(&self,
|
||||
sp: Span,
|
||||
@ -606,53 +595,29 @@ impl Handler {
|
||||
db.set_span(sp);
|
||||
db
|
||||
}
|
||||
pub fn span_unimpl<S: Into<MultiSpan>>(&self, sp: S, msg: &str) -> ! {
|
||||
self.span_bug(sp, &format!("unimplemented {}", msg));
|
||||
}
|
||||
pub fn failure(&self, msg: &str) {
|
||||
DiagnosticBuilder::new(self, FailureNote, msg).emit()
|
||||
self.inner.borrow_mut().failure(msg);
|
||||
}
|
||||
pub fn fatal(&self, msg: &str) -> FatalError {
|
||||
if self.treat_err_as_bug() {
|
||||
self.bug(msg);
|
||||
}
|
||||
DiagnosticBuilder::new(self, Fatal, msg).emit();
|
||||
FatalError
|
||||
self.inner.borrow_mut().fatal(msg)
|
||||
}
|
||||
pub fn err(&self, msg: &str) {
|
||||
if self.treat_err_as_bug() {
|
||||
self.bug(msg);
|
||||
}
|
||||
let mut db = DiagnosticBuilder::new(self, Error, msg);
|
||||
db.emit();
|
||||
self.inner.borrow_mut().err(msg);
|
||||
}
|
||||
pub fn warn(&self, msg: &str) {
|
||||
let mut db = DiagnosticBuilder::new(self, Warning, msg);
|
||||
db.emit();
|
||||
}
|
||||
fn treat_err_as_bug(&self) -> bool {
|
||||
self.flags.treat_err_as_bug.map(|c| self.err_count() >= c).unwrap_or(false)
|
||||
}
|
||||
pub fn note_without_error(&self, msg: &str) {
|
||||
let mut db = DiagnosticBuilder::new(self, Note, msg);
|
||||
db.emit();
|
||||
}
|
||||
pub fn bug(&self, msg: &str) -> ! {
|
||||
let mut db = DiagnosticBuilder::new(self, Bug, msg);
|
||||
db.emit();
|
||||
panic!(ExplicitBug);
|
||||
}
|
||||
pub fn unimpl(&self, msg: &str) -> ! {
|
||||
self.bug(&format!("unimplemented {}", msg));
|
||||
}
|
||||
|
||||
fn bump_err_count(&self) {
|
||||
self.err_count.fetch_add(1, SeqCst);
|
||||
self.panic_if_treat_err_as_bug();
|
||||
self.inner.borrow_mut().bug(msg)
|
||||
}
|
||||
|
||||
pub fn err_count(&self) -> usize {
|
||||
self.err_count.load(SeqCst)
|
||||
self.inner.borrow().err_count
|
||||
}
|
||||
|
||||
pub fn has_errors(&self) -> bool {
|
||||
@ -660,7 +625,99 @@ impl Handler {
|
||||
}
|
||||
|
||||
pub fn print_error_count(&self, registry: &Registry) {
|
||||
let s = match self.deduplicated_err_count.load(SeqCst) {
|
||||
self.inner.borrow_mut().print_error_count(registry)
|
||||
}
|
||||
|
||||
pub fn abort_if_errors(&self) {
|
||||
self.inner.borrow().abort_if_errors()
|
||||
}
|
||||
|
||||
pub fn abort_if_errors_and_should_abort(&self) {
|
||||
self.inner.borrow().abort_if_errors_and_should_abort()
|
||||
}
|
||||
|
||||
pub fn must_teach(&self, code: &DiagnosticId) -> bool {
|
||||
self.inner.borrow_mut().must_teach(code)
|
||||
}
|
||||
|
||||
pub fn force_print_diagnostic(&self, db: Diagnostic) {
|
||||
self.inner.borrow_mut().force_print_diagnostic(db)
|
||||
}
|
||||
|
||||
pub fn emit_diagnostic(&self, diagnostic: &Diagnostic) {
|
||||
self.inner.borrow_mut().emit_diagnostic(diagnostic)
|
||||
}
|
||||
|
||||
pub fn emit_artifact_notification(&self, path: &Path, artifact_type: &str) {
|
||||
self.inner.borrow_mut().emit_artifact_notification(path, artifact_type)
|
||||
}
|
||||
|
||||
pub fn delay_as_bug(&self, diagnostic: Diagnostic) {
|
||||
self.inner.borrow_mut().delay_as_bug(diagnostic)
|
||||
}
|
||||
}
|
||||
|
||||
impl HandlerInner {
|
||||
/// `true` if we haven't taught a diagnostic with this code already.
|
||||
/// The caller must then teach the user about such a diagnostic.
|
||||
///
|
||||
/// Used to suppress emitting the same error multiple times with extended explanation when
|
||||
/// calling `-Zteach`.
|
||||
fn must_teach(&mut self, code: &DiagnosticId) -> bool {
|
||||
self.taught_diagnostics.insert(code.clone())
|
||||
}
|
||||
|
||||
fn force_print_diagnostic(&mut self, db: Diagnostic) {
|
||||
self.emitter.emit_diagnostic(&db);
|
||||
}
|
||||
|
||||
fn emit_diagnostic(&mut self, diagnostic: &Diagnostic) {
|
||||
if diagnostic.cancelled() {
|
||||
return;
|
||||
}
|
||||
|
||||
if diagnostic.level == Warning && !self.flags.can_emit_warnings {
|
||||
return;
|
||||
}
|
||||
|
||||
TRACK_DIAGNOSTICS.with(|track_diagnostics| {
|
||||
track_diagnostics.get()(diagnostic);
|
||||
});
|
||||
|
||||
if let Some(ref code) = diagnostic.code {
|
||||
self.emitted_diagnostic_codes.insert(code.clone());
|
||||
}
|
||||
|
||||
let diagnostic_hash = {
|
||||
use std::hash::Hash;
|
||||
let mut hasher = StableHasher::new();
|
||||
diagnostic.hash(&mut hasher);
|
||||
hasher.finish()
|
||||
};
|
||||
|
||||
// Only emit the diagnostic if we haven't already emitted an equivalent
|
||||
// one:
|
||||
if self.emitted_diagnostics.insert(diagnostic_hash) {
|
||||
self.emitter.emit_diagnostic(diagnostic);
|
||||
if diagnostic.is_error() {
|
||||
self.deduplicated_err_count += 1;
|
||||
}
|
||||
}
|
||||
if diagnostic.is_error() {
|
||||
self.bump_err_count();
|
||||
}
|
||||
}
|
||||
|
||||
fn emit_artifact_notification(&mut self, path: &Path, artifact_type: &str) {
|
||||
self.emitter.emit_artifact_notification(path, artifact_type);
|
||||
}
|
||||
|
||||
fn treat_err_as_bug(&self) -> bool {
|
||||
self.flags.treat_err_as_bug.map(|c| self.err_count >= c).unwrap_or(false)
|
||||
}
|
||||
|
||||
fn print_error_count(&mut self, registry: &Registry) {
|
||||
let s = match self.deduplicated_err_count {
|
||||
0 => return,
|
||||
1 => "aborting due to previous error".to_string(),
|
||||
count => format!("aborting due to {} previous errors", count)
|
||||
@ -671,12 +728,11 @@ impl Handler {
|
||||
|
||||
let _ = self.fatal(&s);
|
||||
|
||||
let can_show_explain = self.emitter.borrow().should_show_explain();
|
||||
let are_there_diagnostics = !self.emitted_diagnostic_codes.borrow().is_empty();
|
||||
let can_show_explain = self.emitter.should_show_explain();
|
||||
let are_there_diagnostics = !self.emitted_diagnostic_codes.is_empty();
|
||||
if can_show_explain && are_there_diagnostics {
|
||||
let mut error_codes = self
|
||||
.emitted_diagnostic_codes
|
||||
.borrow()
|
||||
.iter()
|
||||
.filter_map(|x| match &x {
|
||||
DiagnosticId::Error(s) if registry.find_description(s).is_some() => {
|
||||
@ -704,81 +760,86 @@ impl Handler {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn abort_if_errors(&self) {
|
||||
if self.has_errors() {
|
||||
fn abort_if_errors_and_should_abort(&self) {
|
||||
if self.err_count > 0 && !self.continue_after_error {
|
||||
FatalError.raise();
|
||||
}
|
||||
}
|
||||
pub fn emit(&self, msp: &MultiSpan, msg: &str, lvl: Level) {
|
||||
if lvl == Warning && !self.flags.can_emit_warnings {
|
||||
return;
|
||||
}
|
||||
let mut db = DiagnosticBuilder::new(self, lvl, msg);
|
||||
db.set_span(msp.clone());
|
||||
db.emit();
|
||||
if !self.continue_after_error.load(SeqCst) {
|
||||
self.abort_if_errors();
|
||||
}
|
||||
}
|
||||
pub fn emit_with_code(&self, msp: &MultiSpan, msg: &str, code: DiagnosticId, lvl: Level) {
|
||||
if lvl == Warning && !self.flags.can_emit_warnings {
|
||||
return;
|
||||
}
|
||||
let mut db = DiagnosticBuilder::new_with_code(self, lvl, Some(code), msg);
|
||||
db.set_span(msp.clone());
|
||||
db.emit();
|
||||
if !self.continue_after_error.load(SeqCst) {
|
||||
self.abort_if_errors();
|
||||
|
||||
fn abort_if_errors(&self) {
|
||||
if self.err_count > 0 {
|
||||
FatalError.raise();
|
||||
}
|
||||
}
|
||||
|
||||
/// `true` if we haven't taught a diagnostic with this code already.
|
||||
/// The caller must then teach the user about such a diagnostic.
|
||||
///
|
||||
/// Used to suppress emitting the same error multiple times with extended explanation when
|
||||
/// calling `-Zteach`.
|
||||
pub fn must_teach(&self, code: &DiagnosticId) -> bool {
|
||||
self.taught_diagnostics.borrow_mut().insert(code.clone())
|
||||
fn span_bug<S: Into<MultiSpan>>(&mut self, sp: S, msg: &str) -> ! {
|
||||
self.emit_diagnostic(Diagnostic::new(Bug, msg).set_span(sp));
|
||||
self.abort_if_errors_and_should_abort();
|
||||
panic!(ExplicitBug);
|
||||
}
|
||||
|
||||
pub fn force_print_db(&self, mut db: DiagnosticBuilder<'_>) {
|
||||
self.emitter.borrow_mut().emit_diagnostic(&db);
|
||||
db.cancel();
|
||||
fn delay_span_bug<S: Into<MultiSpan>>(&mut self, sp: S, msg: &str) {
|
||||
if self.treat_err_as_bug() {
|
||||
// FIXME: don't abort here if report_delayed_bugs is off
|
||||
self.span_bug(sp, msg);
|
||||
}
|
||||
let mut diagnostic = Diagnostic::new(Level::Bug, msg);
|
||||
diagnostic.set_span(sp.into());
|
||||
self.delay_as_bug(diagnostic)
|
||||
}
|
||||
|
||||
fn emit_db(&self, db: &DiagnosticBuilder<'_>) {
|
||||
let diagnostic = &**db;
|
||||
|
||||
TRACK_DIAGNOSTICS.with(|track_diagnostics| {
|
||||
track_diagnostics.get()(diagnostic);
|
||||
});
|
||||
|
||||
if let Some(ref code) = diagnostic.code {
|
||||
self.emitted_diagnostic_codes.borrow_mut().insert(code.clone());
|
||||
}
|
||||
|
||||
let diagnostic_hash = {
|
||||
use std::hash::Hash;
|
||||
let mut hasher = StableHasher::new();
|
||||
diagnostic.hash(&mut hasher);
|
||||
hasher.finish()
|
||||
};
|
||||
|
||||
// Only emit the diagnostic if we haven't already emitted an equivalent
|
||||
// one:
|
||||
if self.emitted_diagnostics.borrow_mut().insert(diagnostic_hash) {
|
||||
self.emitter.borrow_mut().emit_diagnostic(db);
|
||||
if db.is_error() {
|
||||
self.deduplicated_err_count.fetch_add(1, SeqCst);
|
||||
}
|
||||
}
|
||||
if db.is_error() {
|
||||
self.bump_err_count();
|
||||
}
|
||||
fn failure(&mut self, msg: &str) {
|
||||
self.emit_diagnostic(&Diagnostic::new(FailureNote, msg));
|
||||
}
|
||||
|
||||
pub fn emit_artifact_notification(&self, path: &Path, artifact_type: &str) {
|
||||
self.emitter.borrow_mut().emit_artifact_notification(path, artifact_type);
|
||||
fn fatal(&mut self, msg: &str) -> FatalError {
|
||||
if self.treat_err_as_bug() {
|
||||
self.bug(msg);
|
||||
}
|
||||
self.emit_diagnostic(&Diagnostic::new(Fatal, msg));
|
||||
FatalError
|
||||
}
|
||||
|
||||
fn err(&mut self, msg: &str) {
|
||||
if self.treat_err_as_bug() {
|
||||
self.bug(msg);
|
||||
}
|
||||
self.emit_diagnostic(&Diagnostic::new(Error, msg));
|
||||
}
|
||||
|
||||
fn bug(&mut self, msg: &str) -> ! {
|
||||
self.emit_diagnostic(&Diagnostic::new(Bug, msg));
|
||||
panic!(ExplicitBug);
|
||||
}
|
||||
|
||||
fn delay_as_bug(&mut self, diagnostic: Diagnostic) {
|
||||
if self.flags.report_delayed_bugs {
|
||||
self.emit_diagnostic(&diagnostic);
|
||||
}
|
||||
self.delayed_span_bugs.push(diagnostic);
|
||||
}
|
||||
|
||||
fn bump_err_count(&mut self) {
|
||||
self.err_count += 1;
|
||||
self.panic_if_treat_err_as_bug();
|
||||
}
|
||||
|
||||
fn panic_if_treat_err_as_bug(&self) {
|
||||
if self.treat_err_as_bug() {
|
||||
let s = match (self.err_count, self.flags.treat_err_as_bug.unwrap_or(0)) {
|
||||
(0, _) => return,
|
||||
(1, 1) => "aborting due to `-Z treat-err-as-bug=1`".to_string(),
|
||||
(1, _) => return,
|
||||
(count, as_bug) => {
|
||||
format!(
|
||||
"aborting after {} errors due to `-Z treat-err-as-bug={}`",
|
||||
count,
|
||||
as_bug,
|
||||
)
|
||||
}
|
||||
};
|
||||
panic!(s);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -975,7 +975,9 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for ImproperCTypes {
|
||||
fn check_foreign_item(&mut self, cx: &LateContext<'_, '_>, it: &hir::ForeignItem) {
|
||||
let mut vis = ImproperCTypesVisitor { cx };
|
||||
let abi = cx.tcx.hir().get_foreign_abi(it.hir_id);
|
||||
if abi != Abi::RustIntrinsic && abi != Abi::PlatformIntrinsic {
|
||||
if let Abi::Rust | Abi::RustCall | Abi::RustIntrinsic | Abi::PlatformIntrinsic = abi {
|
||||
// Don't worry about types in internal ABIs.
|
||||
} else {
|
||||
match it.node {
|
||||
hir::ForeignItemKind::Fn(ref decl, _, _) => {
|
||||
vis.check_foreign_fn(it.hir_id, decl);
|
||||
|
@ -9,7 +9,7 @@ use lint::{LintPass, EarlyLintPass, LateLintPass};
|
||||
|
||||
use syntax::ast;
|
||||
use syntax::attr;
|
||||
use syntax::errors::Applicability;
|
||||
use syntax::errors::{Applicability, pluralise};
|
||||
use syntax::feature_gate::{AttributeType, BuiltinAttribute, BUILTIN_ATTRIBUTE_MAP};
|
||||
use syntax::print::pprust;
|
||||
use syntax::symbol::{kw, sym};
|
||||
@ -48,7 +48,7 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for UnusedResults {
|
||||
}
|
||||
|
||||
let ty = cx.tables.expr_ty(&expr);
|
||||
let type_permits_lack_of_use = check_must_use_ty(cx, ty, &expr, s.span, "", "", false);
|
||||
let type_permits_lack_of_use = check_must_use_ty(cx, ty, &expr, s.span, "", "", 1);
|
||||
|
||||
let mut fn_warned = false;
|
||||
let mut op_warned = false;
|
||||
@ -135,7 +135,7 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for UnusedResults {
|
||||
span: Span,
|
||||
descr_pre: &str,
|
||||
descr_post: &str,
|
||||
plural: bool,
|
||||
plural_len: usize,
|
||||
) -> bool {
|
||||
if ty.is_unit() || cx.tcx.is_ty_uninhabited_from(
|
||||
cx.tcx.hir().get_module_parent(expr.hir_id), ty)
|
||||
@ -143,13 +143,13 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for UnusedResults {
|
||||
return true;
|
||||
}
|
||||
|
||||
let plural_suffix = if plural { "s" } else { "" };
|
||||
let plural_suffix = pluralise!(plural_len);
|
||||
|
||||
match ty.sty {
|
||||
ty::Adt(..) if ty.is_box() => {
|
||||
let boxed_ty = ty.boxed_ty();
|
||||
let descr_pre = &format!("{}boxed ", descr_pre);
|
||||
check_must_use_ty(cx, boxed_ty, expr, span, descr_pre, descr_post, plural)
|
||||
check_must_use_ty(cx, boxed_ty, expr, span, descr_pre, descr_post, plural_len)
|
||||
}
|
||||
ty::Adt(def, _) => {
|
||||
check_must_use_def(cx, def.did, span, descr_pre, descr_post)
|
||||
@ -202,7 +202,15 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for UnusedResults {
|
||||
for (i, ty) in tys.iter().map(|k| k.expect_ty()).enumerate() {
|
||||
let descr_post = &format!(" in tuple element {}", i);
|
||||
let span = *spans.get(i).unwrap_or(&span);
|
||||
if check_must_use_ty(cx, ty, expr, span, descr_pre, descr_post, plural) {
|
||||
if check_must_use_ty(
|
||||
cx,
|
||||
ty,
|
||||
expr,
|
||||
span,
|
||||
descr_pre,
|
||||
descr_post,
|
||||
plural_len
|
||||
) {
|
||||
has_emitted = true;
|
||||
}
|
||||
}
|
||||
@ -216,7 +224,7 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for UnusedResults {
|
||||
descr_pre,
|
||||
plural_suffix,
|
||||
);
|
||||
check_must_use_ty(cx, ty, expr, span, descr_pre, descr_post, true)
|
||||
check_must_use_ty(cx, ty, expr, span, descr_pre, descr_post, n as usize + 1)
|
||||
}
|
||||
// Otherwise, we don't lint, to avoid false positives.
|
||||
_ => false,
|
||||
|
@ -180,7 +180,8 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
||||
);
|
||||
}
|
||||
if Some(DesugaringKind::ForLoop) == move_span.desugaring_kind() {
|
||||
if let Ok(snippet) = self.infcx.tcx.sess.source_map().span_to_snippet(span) {
|
||||
let sess = self.infcx.tcx.sess;
|
||||
if let Ok(snippet) = sess.source_map().span_to_snippet(move_span) {
|
||||
err.span_suggestion(
|
||||
move_span,
|
||||
"consider borrowing to avoid moving into the for loop",
|
||||
|
@ -23,7 +23,6 @@ use std::rc::Rc;
|
||||
|
||||
crate type PoloniusOutput = Output<RegionVid, BorrowIndex, LocationIndex, Local, MovePathIndex>;
|
||||
|
||||
// (forced to be `pub` due to its use as an associated type below.)
|
||||
crate struct Flows<'b, 'tcx> {
|
||||
borrows: FlowAtLocation<'tcx, Borrows<'b, 'tcx>>,
|
||||
pub uninits: FlowAtLocation<'tcx, MaybeUninitializedPlaces<'b, 'tcx>>,
|
||||
|
@ -402,7 +402,7 @@ fn do_mir_borrowck<'a, 'tcx>(
|
||||
}
|
||||
|
||||
for diag in mbcx.errors_buffer.drain(..) {
|
||||
DiagnosticBuilder::new_diagnostic(mbcx.infcx.tcx.sess.diagnostic(), diag).emit();
|
||||
mbcx.infcx.tcx.sess.diagnostic().emit_diagnostic(&diag);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -244,6 +244,9 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
||||
|
||||
let success = this.cfg.start_new_block();
|
||||
let cleanup = this.diverge_cleanup();
|
||||
|
||||
this.record_operands_moved(&args);
|
||||
|
||||
this.cfg.terminate(
|
||||
block,
|
||||
source_info,
|
||||
|
@ -104,25 +104,14 @@ struct Scope {
|
||||
/// the span of that region_scope
|
||||
region_scope_span: Span,
|
||||
|
||||
/// Whether there's anything to do for the cleanup path, that is,
|
||||
/// when unwinding through this scope. This includes destructors,
|
||||
/// but not StorageDead statements, which don't get emitted at all
|
||||
/// for unwinding, for several reasons:
|
||||
/// * clang doesn't emit llvm.lifetime.end for C++ unwinding
|
||||
/// * LLVM's memory dependency analysis can't handle it atm
|
||||
/// * polluting the cleanup MIR with StorageDead creates
|
||||
/// landing pads even though there's no actual destructors
|
||||
/// * freeing up stack space has no effect during unwinding
|
||||
/// Note that for generators we do emit StorageDeads, for the
|
||||
/// use of optimizations in the MIR generator transform.
|
||||
needs_cleanup: bool,
|
||||
|
||||
/// set of places to drop when exiting this scope. This starts
|
||||
/// out empty but grows as variables are declared during the
|
||||
/// building process. This is a stack, so we always drop from the
|
||||
/// end of the vector (top of the stack) first.
|
||||
drops: Vec<DropData>,
|
||||
|
||||
moved_locals: Vec<Local>,
|
||||
|
||||
/// The cache for drop chain on “normal” exit into a particular BasicBlock.
|
||||
cached_exits: FxHashMap<(BasicBlock, region::Scope), BasicBlock>,
|
||||
|
||||
@ -172,7 +161,7 @@ struct CachedBlock {
|
||||
generator_drop: Option<BasicBlock>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub(crate) enum DropKind {
|
||||
Value,
|
||||
Storage,
|
||||
@ -202,8 +191,7 @@ pub enum BreakableTarget {
|
||||
|
||||
impl CachedBlock {
|
||||
fn invalidate(&mut self) {
|
||||
self.generator_drop = None;
|
||||
self.unwind = None;
|
||||
*self = CachedBlock::default();
|
||||
}
|
||||
|
||||
fn get(&self, generator_drop: bool) -> Option<BasicBlock> {
|
||||
@ -261,6 +249,25 @@ impl Scope {
|
||||
scope: self.source_scope
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/// Whether there's anything to do for the cleanup path, that is,
|
||||
/// when unwinding through this scope. This includes destructors,
|
||||
/// but not StorageDead statements, which don't get emitted at all
|
||||
/// for unwinding, for several reasons:
|
||||
/// * clang doesn't emit llvm.lifetime.end for C++ unwinding
|
||||
/// * LLVM's memory dependency analysis can't handle it atm
|
||||
/// * polluting the cleanup MIR with StorageDead creates
|
||||
/// landing pads even though there's no actual destructors
|
||||
/// * freeing up stack space has no effect during unwinding
|
||||
/// Note that for generators we do emit StorageDeads, for the
|
||||
/// use of optimizations in the MIR generator transform.
|
||||
fn needs_cleanup(&self) -> bool {
|
||||
self.drops.iter().any(|drop| match drop.kind {
|
||||
DropKind::Value => true,
|
||||
DropKind::Storage => false,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx> Scopes<'tcx> {
|
||||
@ -274,8 +281,8 @@ impl<'tcx> Scopes<'tcx> {
|
||||
source_scope: vis_scope,
|
||||
region_scope: region_scope.0,
|
||||
region_scope_span: region_scope.1.span,
|
||||
needs_cleanup: false,
|
||||
drops: vec![],
|
||||
moved_locals: vec![],
|
||||
cached_generator_drop: None,
|
||||
cached_exits: Default::default(),
|
||||
cached_unwind: CachedBlock::default(),
|
||||
@ -295,7 +302,7 @@ impl<'tcx> Scopes<'tcx> {
|
||||
|
||||
fn may_panic(&self, scope_count: usize) -> bool {
|
||||
let len = self.len();
|
||||
self.scopes[(len - scope_count)..].iter().any(|s| s.needs_cleanup)
|
||||
self.scopes[(len - scope_count)..].iter().any(|s| s.needs_cleanup())
|
||||
}
|
||||
|
||||
/// Finds the breakable scope for a given label. This is used for
|
||||
@ -480,7 +487,8 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
||||
block,
|
||||
unwind_to,
|
||||
self.arg_count,
|
||||
false,
|
||||
false, // not generator
|
||||
false, // not unwind path
|
||||
));
|
||||
|
||||
block.unit()
|
||||
@ -572,7 +580,8 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
||||
block,
|
||||
unwind_to,
|
||||
self.arg_count,
|
||||
false,
|
||||
false, // not generator
|
||||
false, // not unwind path
|
||||
));
|
||||
|
||||
scope = next_scope;
|
||||
@ -622,7 +631,8 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
||||
block,
|
||||
unwind_to,
|
||||
self.arg_count,
|
||||
true,
|
||||
true, // is generator
|
||||
true, // is cached path
|
||||
));
|
||||
}
|
||||
|
||||
@ -801,10 +811,6 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
||||
// cache of outer scope stays intact.
|
||||
scope.invalidate_cache(!needs_drop, self.is_generator, this_scope);
|
||||
if this_scope {
|
||||
if let DropKind::Value = drop_kind {
|
||||
scope.needs_cleanup = true;
|
||||
}
|
||||
|
||||
let region_scope_span = region_scope.span(self.hir.tcx(),
|
||||
&self.hir.region_scope_tree);
|
||||
// Attribute scope exit drops to scope's closing brace.
|
||||
@ -822,6 +828,75 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
||||
span_bug!(span, "region scope {:?} not in scope to drop {:?}", region_scope, local);
|
||||
}
|
||||
|
||||
/// Indicates that the "local operand" stored in `local` is
|
||||
/// *moved* at some point during execution (see `local_scope` for
|
||||
/// more information about what a "local operand" is -- in short,
|
||||
/// it's an intermediate operand created as part of preparing some
|
||||
/// MIR instruction). We use this information to suppress
|
||||
/// redundant drops on the non-unwind paths. This results in less
|
||||
/// MIR, but also avoids spurious borrow check errors
|
||||
/// (c.f. #64391).
|
||||
///
|
||||
/// Example: when compiling the call to `foo` here:
|
||||
///
|
||||
/// ```rust
|
||||
/// foo(bar(), ...)
|
||||
/// ```
|
||||
///
|
||||
/// we would evaluate `bar()` to an operand `_X`. We would also
|
||||
/// schedule `_X` to be dropped when the expression scope for
|
||||
/// `foo(bar())` is exited. This is relevant, for example, if the
|
||||
/// later arguments should unwind (it would ensure that `_X` gets
|
||||
/// dropped). However, if no unwind occurs, then `_X` will be
|
||||
/// unconditionally consumed by the `call`:
|
||||
///
|
||||
/// ```
|
||||
/// bb {
|
||||
/// ...
|
||||
/// _R = CALL(foo, _X, ...)
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// However, `_X` is still registered to be dropped, and so if we
|
||||
/// do nothing else, we would generate a `DROP(_X)` that occurs
|
||||
/// after the call. This will later be optimized out by the
|
||||
/// drop-elaboation code, but in the meantime it can lead to
|
||||
/// spurious borrow-check errors -- the problem, ironically, is
|
||||
/// not the `DROP(_X)` itself, but the (spurious) unwind pathways
|
||||
/// that it creates. See #64391 for an example.
|
||||
pub fn record_operands_moved(
|
||||
&mut self,
|
||||
operands: &[Operand<'tcx>],
|
||||
) {
|
||||
let scope = match self.local_scope() {
|
||||
None => {
|
||||
// if there is no local scope, operands won't be dropped anyway
|
||||
return;
|
||||
}
|
||||
|
||||
Some(local_scope) => {
|
||||
self.scopes.iter_mut().find(|scope| scope.region_scope == local_scope)
|
||||
.unwrap_or_else(|| bug!("scope {:?} not found in scope list!", local_scope))
|
||||
}
|
||||
};
|
||||
|
||||
// look for moves of a local variable, like `MOVE(_X)`
|
||||
let locals_moved = operands.iter().flat_map(|operand| match operand {
|
||||
Operand::Copy(_) | Operand::Constant(_) => None,
|
||||
Operand::Move(place) => place.as_local(),
|
||||
});
|
||||
|
||||
for local in locals_moved {
|
||||
// check if we have a Drop for this operand and -- if so
|
||||
// -- add it to the list of moved operands. Note that this
|
||||
// local might not have been an operand created for this
|
||||
// call, it could come from other places too.
|
||||
if scope.drops.iter().any(|drop| drop.local == local && drop.kind == DropKind::Value) {
|
||||
scope.moved_locals.push(local);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Other
|
||||
// =====
|
||||
/// Branch based on a boolean condition.
|
||||
@ -1020,6 +1095,7 @@ fn build_scope_drops<'tcx>(
|
||||
last_unwind_to: BasicBlock,
|
||||
arg_count: usize,
|
||||
generator_drop: bool,
|
||||
is_cached_path: bool,
|
||||
) -> BlockAnd<()> {
|
||||
debug!("build_scope_drops({:?} -> {:?})", block, scope);
|
||||
|
||||
@ -1046,8 +1122,17 @@ fn build_scope_drops<'tcx>(
|
||||
let drop_data = &scope.drops[drop_idx];
|
||||
let source_info = scope.source_info(drop_data.span);
|
||||
let local = drop_data.local;
|
||||
|
||||
match drop_data.kind {
|
||||
DropKind::Value => {
|
||||
// If the operand has been moved, and we are not on an unwind
|
||||
// path, then don't generate the drop. (We only take this into
|
||||
// account for non-unwind paths so as not to disturb the
|
||||
// caching mechanism.)
|
||||
if !is_cached_path && scope.moved_locals.iter().any(|&o| o == local) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let unwind_to = get_unwind_to(scope, is_generator, drop_idx, generator_drop)
|
||||
.unwrap_or(last_unwind_to);
|
||||
|
||||
|
@ -1407,10 +1407,17 @@ impl<'a, 'tcx> Visitor<'tcx> for Checker<'a, 'tcx> {
|
||||
}
|
||||
}
|
||||
ty::FnPtr(_) => {
|
||||
if self.mode.requires_const_checking() {
|
||||
let unleash_miri = self
|
||||
.tcx
|
||||
.sess
|
||||
.opts
|
||||
.debugging_opts
|
||||
.unleash_the_miri_inside_of_you;
|
||||
if self.mode.requires_const_checking() && !unleash_miri {
|
||||
let mut err = self.tcx.sess.struct_span_err(
|
||||
self.span,
|
||||
&format!("function pointers are not allowed in const fn"));
|
||||
"function pointers are not allowed in const fn"
|
||||
);
|
||||
err.emit();
|
||||
}
|
||||
}
|
||||
|
@ -26,6 +26,8 @@
|
||||
use crate::Resolver;
|
||||
use crate::resolve_imports::ImportDirectiveSubclass;
|
||||
|
||||
use errors::pluralise;
|
||||
|
||||
use rustc::util::nodemap::NodeMap;
|
||||
use rustc::{lint, ty};
|
||||
use rustc_data_structures::fx::FxHashSet;
|
||||
@ -295,7 +297,7 @@ impl Resolver<'_> {
|
||||
}).collect::<Vec<String>>();
|
||||
span_snippets.sort();
|
||||
let msg = format!("unused import{}{}",
|
||||
if len > 1 { "s" } else { "" },
|
||||
pluralise!(len),
|
||||
if !span_snippets.is_empty() {
|
||||
format!(": {}", span_snippets.join(", "))
|
||||
} else {
|
||||
|
@ -424,7 +424,7 @@ impl<'a> LateResolutionVisitor<'a, '_> {
|
||||
} else {
|
||||
err.note("did you mean to use one of the enum's variants?");
|
||||
}
|
||||
},
|
||||
}
|
||||
(Res::Def(DefKind::Struct, def_id), _) if ns == ValueNS => {
|
||||
if let Some((ctor_def, ctor_vis))
|
||||
= self.r.struct_constructors.get(&def_id).cloned() {
|
||||
@ -445,6 +445,12 @@ impl<'a> LateResolutionVisitor<'a, '_> {
|
||||
(Res::Def(DefKind::Ctor(_, CtorKind::Fictive), _), _) if ns == ValueNS => {
|
||||
bad_struct_syntax_suggestion();
|
||||
}
|
||||
(Res::Def(DefKind::Ctor(_, CtorKind::Fn), _), _) if ns == ValueNS => {
|
||||
err.span_label(
|
||||
span,
|
||||
format!("did you mean `{} ( /* fields */ )`?", path_str),
|
||||
);
|
||||
}
|
||||
(Res::SelfTy(..), _) if ns == ValueNS => {
|
||||
err.span_label(span, fallback_label);
|
||||
err.note("can't use `Self` as a constructor, you must use the implemented struct");
|
||||
|
@ -11,7 +11,7 @@ use crate::{Resolver, ResolutionError, Segment, ModuleKind};
|
||||
use crate::{names_to_string, module_to_string};
|
||||
use crate::diagnostics::Suggestion;
|
||||
|
||||
use errors::Applicability;
|
||||
use errors::{Applicability, pluralise};
|
||||
|
||||
use rustc_data_structures::ptr_key::PtrKey;
|
||||
use rustc::ty;
|
||||
@ -728,7 +728,7 @@ impl<'a, 'b> ImportResolver<'a, 'b> {
|
||||
|
||||
let msg = format!(
|
||||
"unresolved import{} {}",
|
||||
if paths.len() > 1 { "s" } else { "" },
|
||||
pluralise!(paths.len()),
|
||||
paths.join(", "),
|
||||
);
|
||||
|
||||
|
@ -1346,7 +1346,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
|
||||
span,
|
||||
E0191,
|
||||
"the value of the associated type{} {} must be specified",
|
||||
if associated_types.len() == 1 { "" } else { "s" },
|
||||
pluralise!(associated_types.len()),
|
||||
names,
|
||||
);
|
||||
let (suggest, potential_assoc_types_spans) =
|
||||
|
@ -263,7 +263,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
|
||||
fn confirm_builtin_call(
|
||||
&self,
|
||||
call_expr: &hir::Expr,
|
||||
call_expr: &'tcx hir::Expr,
|
||||
callee_ty: Ty<'tcx>,
|
||||
arg_exprs: &'tcx [hir::Expr],
|
||||
expected: Expectation<'tcx>,
|
||||
@ -425,7 +425,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
);
|
||||
self.check_argument_types(
|
||||
call_expr.span,
|
||||
call_expr.span,
|
||||
call_expr,
|
||||
inputs,
|
||||
&expected_arg_tys[..],
|
||||
arg_exprs,
|
||||
@ -439,7 +439,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
|
||||
fn confirm_deferred_closure_call(
|
||||
&self,
|
||||
call_expr: &hir::Expr,
|
||||
call_expr: &'tcx hir::Expr,
|
||||
arg_exprs: &'tcx [hir::Expr],
|
||||
expected: Expectation<'tcx>,
|
||||
fn_sig: ty::FnSig<'tcx>,
|
||||
@ -458,7 +458,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
|
||||
self.check_argument_types(
|
||||
call_expr.span,
|
||||
call_expr.span,
|
||||
call_expr,
|
||||
fn_sig.inputs(),
|
||||
&expected_arg_tys,
|
||||
arg_exprs,
|
||||
@ -472,14 +472,14 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
|
||||
fn confirm_overloaded_call(
|
||||
&self,
|
||||
call_expr: &hir::Expr,
|
||||
call_expr: &'tcx hir::Expr,
|
||||
arg_exprs: &'tcx [hir::Expr],
|
||||
expected: Expectation<'tcx>,
|
||||
method_callee: MethodCallee<'tcx>,
|
||||
) -> Ty<'tcx> {
|
||||
let output_type = self.check_method_argument_types(
|
||||
call_expr.span,
|
||||
call_expr.span,
|
||||
call_expr,
|
||||
Ok(method_callee),
|
||||
arg_exprs,
|
||||
TupleArgumentsFlag::TupleArguments,
|
||||
|
@ -163,7 +163,7 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> {
|
||||
|
||||
// Just ignore error types.
|
||||
if a.references_error() || b.references_error() {
|
||||
return success(vec![], b, vec![]);
|
||||
return success(vec![], self.fcx.tcx.types.err, vec![]);
|
||||
}
|
||||
|
||||
if a.is_never() {
|
||||
@ -821,7 +821,11 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
|
||||
let (adjustments, _) = self.register_infer_ok_obligations(ok);
|
||||
self.apply_adjustments(expr, adjustments);
|
||||
Ok(target)
|
||||
Ok(if expr_ty.references_error() {
|
||||
self.tcx.types.err
|
||||
} else {
|
||||
target
|
||||
})
|
||||
}
|
||||
|
||||
/// Same as `try_coerce()`, but without side-effects.
|
||||
|
@ -17,7 +17,7 @@ use crate::util::common::ErrorReported;
|
||||
use crate::util::nodemap::FxHashMap;
|
||||
use crate::astconv::AstConv as _;
|
||||
|
||||
use errors::{Applicability, DiagnosticBuilder};
|
||||
use errors::{Applicability, DiagnosticBuilder, pluralise};
|
||||
use syntax::ast;
|
||||
use syntax::symbol::{Symbol, kw, sym};
|
||||
use syntax::source_map::Span;
|
||||
@ -796,7 +796,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
// Call the generic checker.
|
||||
self.check_method_argument_types(
|
||||
span,
|
||||
expr.span,
|
||||
expr,
|
||||
method,
|
||||
&args[1..],
|
||||
DontTupleArguments,
|
||||
@ -1178,7 +1178,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
|
||||
struct_span_err!(tcx.sess, span, E0063,
|
||||
"missing field{} {}{} in initializer of `{}`",
|
||||
if remaining_fields.len() == 1 { "" } else { "s" },
|
||||
pluralise!(remaining_fields.len()),
|
||||
remaining_fields_names,
|
||||
truncated_fields_error,
|
||||
adt_ty)
|
||||
|
@ -181,13 +181,34 @@ impl<'a, 'tcx> Visitor<'tcx> for InteriorVisitor<'a, 'tcx> {
|
||||
|
||||
let scope = self.region_scope_tree.temporary_scope(expr.hir_id.local_id);
|
||||
|
||||
// Record the unadjusted type
|
||||
// If there are adjustments, then record the final type --
|
||||
// this is the actual value that is being produced.
|
||||
if let Some(adjusted_ty) = self.fcx.tables.borrow().expr_ty_adjusted_opt(expr) {
|
||||
self.record(adjusted_ty, scope, Some(expr), expr.span);
|
||||
}
|
||||
|
||||
// Also record the unadjusted type (which is the only type if
|
||||
// there are no adjustments). The reason for this is that the
|
||||
// unadjusted value is sometimes a "temporary" that would wind
|
||||
// up in a MIR temporary.
|
||||
//
|
||||
// As an example, consider an expression like `vec![].push()`.
|
||||
// Here, the `vec![]` would wind up MIR stored into a
|
||||
// temporary variable `t` which we can borrow to invoke
|
||||
// `<Vec<_>>::push(&mut t)`.
|
||||
//
|
||||
// Note that an expression can have many adjustments, and we
|
||||
// are just ignoring those intermediate types. This is because
|
||||
// those intermediate values are always linearly "consumed" by
|
||||
// the other adjustments, and hence would never be directly
|
||||
// captured in the MIR.
|
||||
//
|
||||
// (Note that this partly relies on the fact that the `Deref`
|
||||
// traits always return references, which means their content
|
||||
// can be reborrowed without needing to spill to a temporary.
|
||||
// If this were not the case, then we could conceivably have
|
||||
// to create intermediate temporaries.)
|
||||
let ty = self.fcx.tables.borrow().expr_ty(expr);
|
||||
self.record(ty, scope, Some(expr), expr.span);
|
||||
|
||||
// Also include the adjusted types, since these can result in MIR locals
|
||||
for adjustment in self.fcx.tables.borrow().expr_adjustments(expr) {
|
||||
self.record(adjustment.target, scope, Some(expr), expr.span);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -5,7 +5,7 @@ use crate::check::FnCtxt;
|
||||
use crate::middle::lang_items::FnOnceTraitLangItem;
|
||||
use crate::namespace::Namespace;
|
||||
use crate::util::nodemap::FxHashSet;
|
||||
use errors::{Applicability, DiagnosticBuilder};
|
||||
use errors::{Applicability, DiagnosticBuilder, pluralise};
|
||||
use rustc::hir::{self, ExprKind, Node, QPath};
|
||||
use rustc::hir::def::{Res, DefKind};
|
||||
use rustc::hir::def_id::{CRATE_DEF_INDEX, LOCAL_CRATE, DefId};
|
||||
@ -560,7 +560,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
let help = format!("{an}other candidate{s} {were} found in the following \
|
||||
trait{s}, perhaps add a `use` for {one_of_them}:",
|
||||
an = if candidates.len() == 1 {"an" } else { "" },
|
||||
s = if candidates.len() == 1 { "" } else { "s" },
|
||||
s = pluralise!(candidates.len()),
|
||||
were = if candidates.len() == 1 { "was" } else { "were" },
|
||||
one_of_them = if candidates.len() == 1 {
|
||||
"it"
|
||||
|
@ -88,7 +88,7 @@ pub mod intrinsic;
|
||||
mod op;
|
||||
|
||||
use crate::astconv::{AstConv, PathSeg};
|
||||
use errors::{Applicability, DiagnosticBuilder, DiagnosticId};
|
||||
use errors::{Applicability, DiagnosticBuilder, DiagnosticId, pluralise};
|
||||
use rustc::hir::{self, ExprKind, GenericArg, ItemKind, Node, PatKind, QPath};
|
||||
use rustc::hir::def::{CtorOf, Res, DefKind};
|
||||
use rustc::hir::def_id::{CrateNum, DefId, LOCAL_CRATE};
|
||||
@ -153,7 +153,7 @@ use self::method::{MethodCallee, SelfSource};
|
||||
use self::TupleArgumentsFlag::*;
|
||||
|
||||
/// The type of a local binding, including the revealed type for anon types.
|
||||
#[derive(Copy, Clone)]
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub struct LocalTy<'tcx> {
|
||||
decl_ty: Ty<'tcx>,
|
||||
revealed_ty: Ty<'tcx>
|
||||
@ -2340,16 +2340,19 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
// If span arose from a desugaring of `if` or `while`, then it is the condition itself,
|
||||
// which diverges, that we are about to lint on. This gives suboptimal diagnostics.
|
||||
// Instead, stop here so that the `if`- or `while`-expression's block is linted instead.
|
||||
if !span.is_desugaring(DesugaringKind::CondTemporary) {
|
||||
if !span.is_desugaring(DesugaringKind::CondTemporary) &&
|
||||
!span.is_desugaring(DesugaringKind::Async)
|
||||
{
|
||||
self.diverges.set(Diverges::WarnedAlways);
|
||||
|
||||
debug!("warn_if_unreachable: id={:?} span={:?} kind={}", id, span, kind);
|
||||
|
||||
let msg = format!("unreachable {}", kind);
|
||||
self.tcx().struct_span_lint_hir(lint::builtin::UNREACHABLE_CODE, id, span, &msg)
|
||||
.span_note(
|
||||
.span_label(span, &msg)
|
||||
.span_label(
|
||||
orig_span,
|
||||
custom_note.unwrap_or("any code following this expression is unreachable")
|
||||
custom_note.unwrap_or("any code following this expression is unreachable"),
|
||||
)
|
||||
.emit();
|
||||
}
|
||||
@ -2614,16 +2617,24 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
|
||||
/// As `instantiate_type_scheme`, but for the bounds found in a
|
||||
/// generic type scheme.
|
||||
fn instantiate_bounds(&self, span: Span, def_id: DefId, substs: SubstsRef<'tcx>)
|
||||
-> ty::InstantiatedPredicates<'tcx> {
|
||||
fn instantiate_bounds(
|
||||
&self,
|
||||
span: Span,
|
||||
def_id: DefId,
|
||||
substs: SubstsRef<'tcx>,
|
||||
) -> (ty::InstantiatedPredicates<'tcx>, Vec<Span>) {
|
||||
let bounds = self.tcx.predicates_of(def_id);
|
||||
let spans: Vec<Span> = bounds.predicates.iter().map(|(_, span)| *span).collect();
|
||||
let result = bounds.instantiate(self.tcx, substs);
|
||||
let result = self.normalize_associated_types_in(span, &result);
|
||||
debug!("instantiate_bounds(bounds={:?}, substs={:?}) = {:?}",
|
||||
debug!(
|
||||
"instantiate_bounds(bounds={:?}, substs={:?}) = {:?}, {:?}",
|
||||
bounds,
|
||||
substs,
|
||||
result);
|
||||
result
|
||||
result,
|
||||
spans,
|
||||
);
|
||||
(result, spans)
|
||||
}
|
||||
|
||||
/// Replaces the opaque types from the given value with type variables,
|
||||
@ -3059,12 +3070,13 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
fn check_method_argument_types(
|
||||
&self,
|
||||
sp: Span,
|
||||
expr_sp: Span,
|
||||
expr: &'tcx hir::Expr,
|
||||
method: Result<MethodCallee<'tcx>, ()>,
|
||||
args_no_rcvr: &'tcx [hir::Expr],
|
||||
tuple_arguments: TupleArgumentsFlag,
|
||||
expected: Expectation<'tcx>,
|
||||
) -> Ty<'tcx> {
|
||||
|
||||
let has_error = match method {
|
||||
Ok(method) => {
|
||||
method.substs.references_error() || method.sig.references_error()
|
||||
@ -3079,8 +3091,16 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
TupleArguments => vec![self.tcx.intern_tup(&err_inputs[..])],
|
||||
};
|
||||
|
||||
self.check_argument_types(sp, expr_sp, &err_inputs[..], &[], args_no_rcvr,
|
||||
false, tuple_arguments, None);
|
||||
self.check_argument_types(
|
||||
sp,
|
||||
expr,
|
||||
&err_inputs[..],
|
||||
&[],
|
||||
args_no_rcvr,
|
||||
false,
|
||||
tuple_arguments,
|
||||
None,
|
||||
);
|
||||
return self.tcx.types.err;
|
||||
}
|
||||
|
||||
@ -3092,9 +3112,16 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
method.sig.output(),
|
||||
&method.sig.inputs()[1..]
|
||||
);
|
||||
self.check_argument_types(sp, expr_sp, &method.sig.inputs()[1..], &expected_arg_tys[..],
|
||||
args_no_rcvr, method.sig.c_variadic, tuple_arguments,
|
||||
self.tcx.hir().span_if_local(method.def_id));
|
||||
self.check_argument_types(
|
||||
sp,
|
||||
expr,
|
||||
&method.sig.inputs()[1..],
|
||||
&expected_arg_tys[..],
|
||||
args_no_rcvr,
|
||||
method.sig.c_variadic,
|
||||
tuple_arguments,
|
||||
self.tcx.hir().span_if_local(method.def_id),
|
||||
);
|
||||
method.sig.output()
|
||||
}
|
||||
|
||||
@ -3171,7 +3198,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
fn check_argument_types(
|
||||
&self,
|
||||
sp: Span,
|
||||
expr_sp: Span,
|
||||
expr: &'tcx hir::Expr,
|
||||
fn_inputs: &[Ty<'tcx>],
|
||||
expected_arg_tys: &[Ty<'tcx>],
|
||||
args: &'tcx [hir::Expr],
|
||||
@ -3180,7 +3207,6 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
def_span: Option<Span>,
|
||||
) {
|
||||
let tcx = self.tcx;
|
||||
|
||||
// Grab the argument types, supplying fresh type variables
|
||||
// if the wrong number of arguments were supplied
|
||||
let supplied_arg_count = if tuple_arguments == DontTupleArguments {
|
||||
@ -3191,8 +3217,8 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
|
||||
// All the input types from the fn signature must outlive the call
|
||||
// so as to validate implied bounds.
|
||||
for &fn_input_ty in fn_inputs {
|
||||
self.register_wf_obligation(fn_input_ty, sp, traits::MiscObligation);
|
||||
for (fn_input_ty, arg_expr) in fn_inputs.iter().zip(args.iter()) {
|
||||
self.register_wf_obligation(fn_input_ty, arg_expr.span, traits::MiscObligation);
|
||||
}
|
||||
|
||||
let expected_arg_count = fn_inputs.len();
|
||||
@ -3214,7 +3240,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
err.span_label(def_s, "defined here");
|
||||
}
|
||||
if sugg_unit {
|
||||
let sugg_span = tcx.sess.source_map().end_point(expr_sp);
|
||||
let sugg_span = tcx.sess.source_map().end_point(expr.span);
|
||||
// remove closing `)` from the span
|
||||
let sugg_span = sugg_span.shrink_to_lo();
|
||||
err.span_suggestion(
|
||||
@ -3308,6 +3334,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
// the call. This helps coercions.
|
||||
if check_closures {
|
||||
self.select_obligations_where_possible(false, |errors| {
|
||||
self.point_at_type_arg_instead_of_call_if_possible(errors, expr);
|
||||
self.point_at_arg_instead_of_call_if_possible(
|
||||
errors,
|
||||
&final_arg_types[..],
|
||||
@ -3445,6 +3472,50 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
/// Given a vec of evaluated `FullfillmentError`s and an `fn` call expression, we walk the
|
||||
/// `PathSegment`s and resolve their type parameters to see if any of the `FullfillmentError`s
|
||||
/// were caused by them. If they were, we point at the corresponding type argument's span
|
||||
/// instead of the `fn` call path span.
|
||||
fn point_at_type_arg_instead_of_call_if_possible(
|
||||
&self,
|
||||
errors: &mut Vec<traits::FulfillmentError<'_>>,
|
||||
call_expr: &'tcx hir::Expr,
|
||||
) {
|
||||
if let hir::ExprKind::Call(path, _) = &call_expr.node {
|
||||
if let hir::ExprKind::Path(qpath) = &path.node {
|
||||
if let hir::QPath::Resolved(_, path) = &qpath {
|
||||
for error in errors {
|
||||
if let ty::Predicate::Trait(predicate) = error.obligation.predicate {
|
||||
// If any of the type arguments in this path segment caused the
|
||||
// `FullfillmentError`, point at its span (#61860).
|
||||
for arg in path.segments.iter()
|
||||
.filter_map(|seg| seg.args.as_ref())
|
||||
.flat_map(|a| a.args.iter())
|
||||
{
|
||||
if let hir::GenericArg::Type(hir_ty) = &arg {
|
||||
if let hir::TyKind::Path(
|
||||
hir::QPath::TypeRelative(..),
|
||||
) = &hir_ty.node {
|
||||
// Avoid ICE with associated types. As this is best
|
||||
// effort only, it's ok to ignore the case. It
|
||||
// would trigger in `is_send::<T::AssocType>();`
|
||||
// from `typeck-default-trait-impl-assoc-type.rs`.
|
||||
} else {
|
||||
let ty = AstConv::ast_ty_to_ty(self, hir_ty);
|
||||
let ty = self.resolve_vars_if_possible(&ty);
|
||||
if ty == predicate.skip_binder().self_ty() {
|
||||
error.obligation.cause.span = hir_ty.span;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// AST fragment checking
|
||||
fn check_lit(&self,
|
||||
lit: &hir::Lit,
|
||||
@ -3601,7 +3672,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
self.write_user_type_annotation_from_substs(hir_id, did, substs, None);
|
||||
|
||||
// Check bounds on type arguments used in the path.
|
||||
let bounds = self.instantiate_bounds(path_span, did, substs);
|
||||
let (bounds, _) = self.instantiate_bounds(path_span, did, substs);
|
||||
let cause = traits::ObligationCause::new(
|
||||
path_span,
|
||||
self.body_id,
|
||||
@ -3751,15 +3822,25 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
|
||||
if let Some(ref init) = local.init {
|
||||
let init_ty = self.check_decl_initializer(local, &init);
|
||||
if init_ty.references_error() {
|
||||
self.write_ty(local.hir_id, init_ty);
|
||||
}
|
||||
self.overwrite_local_ty_if_err(local, t, init_ty);
|
||||
}
|
||||
|
||||
self.check_pat_top(&local.pat, t, None);
|
||||
let pat_ty = self.node_ty(local.pat.hir_id);
|
||||
if pat_ty.references_error() {
|
||||
self.write_ty(local.hir_id, pat_ty);
|
||||
self.overwrite_local_ty_if_err(local, t, pat_ty);
|
||||
}
|
||||
|
||||
fn overwrite_local_ty_if_err(&self, local: &'tcx hir::Local, decl_ty: Ty<'tcx>, ty: Ty<'tcx>) {
|
||||
if ty.references_error() {
|
||||
// Override the types everywhere with `types.err` to avoid knock down errors.
|
||||
self.write_ty(local.hir_id, ty);
|
||||
self.write_ty(local.pat.hir_id, ty);
|
||||
let local_ty = LocalTy {
|
||||
decl_ty,
|
||||
revealed_ty: ty,
|
||||
};
|
||||
self.locals.borrow_mut().insert(local.hir_id, local_ty);
|
||||
self.locals.borrow_mut().insert(local.pat.hir_id, local_ty);
|
||||
}
|
||||
}
|
||||
|
||||
@ -4725,13 +4806,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
// First, store the "user substs" for later.
|
||||
self.write_user_type_annotation_from_substs(hir_id, def_id, substs, user_self_ty);
|
||||
|
||||
// Add all the obligations that are required, substituting and
|
||||
// normalized appropriately.
|
||||
let bounds = self.instantiate_bounds(span, def_id, &substs);
|
||||
self.add_obligations_for_parameters(
|
||||
traits::ObligationCause::new(span, self.body_id, traits::ItemObligation(def_id)),
|
||||
&bounds,
|
||||
);
|
||||
self.add_required_obligations(span, def_id, &substs);
|
||||
|
||||
// Substitute the values for the type parameters into the type of
|
||||
// the referenced item.
|
||||
@ -4768,6 +4843,27 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
(ty_substituted, res)
|
||||
}
|
||||
|
||||
/// Add all the obligations that are required, substituting and normalized appropriately.
|
||||
fn add_required_obligations(&self, span: Span, def_id: DefId, substs: &SubstsRef<'tcx>) {
|
||||
let (bounds, spans) = self.instantiate_bounds(span, def_id, &substs);
|
||||
|
||||
for (i, mut obligation) in traits::predicates_for_generics(
|
||||
traits::ObligationCause::new(
|
||||
span,
|
||||
self.body_id,
|
||||
traits::ItemObligation(def_id),
|
||||
),
|
||||
self.param_env,
|
||||
&bounds,
|
||||
).into_iter().enumerate() {
|
||||
// This makes the error point at the bound, but we want to point at the argument
|
||||
if let Some(span) = spans.get(i) {
|
||||
obligation.cause.code = traits::BindingObligation(def_id, *span);
|
||||
}
|
||||
self.register_predicate(obligation);
|
||||
}
|
||||
}
|
||||
|
||||
fn check_rustc_args_require_const(&self,
|
||||
def_id: DefId,
|
||||
hir_id: hir::HirId,
|
||||
@ -4935,5 +5031,5 @@ fn fatally_break_rust(sess: &Session) {
|
||||
}
|
||||
|
||||
fn potentially_plural_count(count: usize, word: &str) -> String {
|
||||
format!("{} {}{}", count, word, if count == 1 { "" } else { "s" })
|
||||
format!("{} {}{}", count, word, pluralise!(count))
|
||||
}
|
||||
|
@ -1,6 +1,6 @@
|
||||
use crate::check::FnCtxt;
|
||||
use crate::util::nodemap::FxHashMap;
|
||||
use errors::{Applicability, DiagnosticBuilder};
|
||||
use errors::{Applicability, DiagnosticBuilder, pluralise};
|
||||
use rustc::hir::{self, PatKind, Pat, HirId};
|
||||
use rustc::hir::def::{Res, DefKind, CtorKind};
|
||||
use rustc::hir::pat_util::EnumerateAndAdjustIterator;
|
||||
@ -676,18 +676,35 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
}
|
||||
} else {
|
||||
// Pattern has wrong number of fields.
|
||||
self.e0023(pat.span, res, &subpats, &variant.fields);
|
||||
self.e0023(pat.span, res, &subpats, &variant.fields, expected);
|
||||
on_error();
|
||||
return tcx.types.err;
|
||||
}
|
||||
pat_ty
|
||||
}
|
||||
|
||||
fn e0023(&self, pat_span: Span, res: Res, subpats: &'tcx [P<Pat>], fields: &[ty::FieldDef]) {
|
||||
let subpats_ending = if subpats.len() == 1 { "" } else { "s" };
|
||||
let fields_ending = if fields.len() == 1 { "" } else { "s" };
|
||||
fn e0023(
|
||||
&self,
|
||||
pat_span: Span,
|
||||
res: Res,
|
||||
subpats: &'tcx [P<Pat>],
|
||||
fields: &[ty::FieldDef],
|
||||
expected: Ty<'tcx>
|
||||
) {
|
||||
let subpats_ending = pluralise!(subpats.len());
|
||||
let fields_ending = pluralise!(fields.len());
|
||||
let missing_parenthesis = match expected.sty {
|
||||
ty::Adt(_, substs) if fields.len() == 1 => {
|
||||
let field_ty = fields[0].ty(self.tcx, substs);
|
||||
match field_ty.sty {
|
||||
ty::Tuple(_) => field_ty.tuple_fields().count() == subpats.len(),
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
_ => false,
|
||||
};
|
||||
let res_span = self.tcx.def_span(res.def_id());
|
||||
struct_span_err!(
|
||||
let mut err = struct_span_err!(
|
||||
self.tcx.sess,
|
||||
pat_span,
|
||||
E0023,
|
||||
@ -697,15 +714,25 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
res.descr(),
|
||||
fields.len(),
|
||||
fields_ending,
|
||||
)
|
||||
.span_label(pat_span, format!(
|
||||
);
|
||||
err.span_label(pat_span, format!(
|
||||
"expected {} field{}, found {}",
|
||||
fields.len(),
|
||||
fields_ending,
|
||||
subpats.len(),
|
||||
))
|
||||
.span_label(res_span, format!("{} defined here", res.descr()))
|
||||
.emit();
|
||||
.span_label(res_span, format!("{} defined here", res.descr()));
|
||||
|
||||
if missing_parenthesis {
|
||||
err.multipart_suggestion(
|
||||
"missing parenthesis",
|
||||
vec![(subpats[0].span.shrink_to_lo(), "(".to_string()),
|
||||
(subpats[subpats.len()-1].span.shrink_to_hi(), ")".to_string())],
|
||||
Applicability::MachineApplicable,
|
||||
);
|
||||
}
|
||||
|
||||
err.emit();
|
||||
}
|
||||
|
||||
fn check_pat_tuple(
|
||||
@ -1103,10 +1130,10 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
E0527,
|
||||
"pattern requires {} element{} but array has {}",
|
||||
min_len,
|
||||
if min_len != 1 { "s" } else { "" },
|
||||
pluralise!(min_len),
|
||||
size,
|
||||
)
|
||||
.span_label(span, format!("expected {} element{}", size, if size != 1 { "s" } else { "" }))
|
||||
.span_label(span, format!("expected {} element{}", size, pluralise!(size)))
|
||||
.emit();
|
||||
}
|
||||
|
||||
@ -1117,14 +1144,14 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
E0528,
|
||||
"pattern requires at least {} element{} but array has {}",
|
||||
min_len,
|
||||
if min_len != 1 { "s" } else { "" },
|
||||
pluralise!(min_len),
|
||||
size,
|
||||
).span_label(
|
||||
span,
|
||||
format!(
|
||||
"pattern cannot match array of {} element{}",
|
||||
size,
|
||||
if size != 1 { "s" } else { "" },
|
||||
pluralise!(size),
|
||||
),
|
||||
).emit();
|
||||
}
|
||||
|
@ -3,7 +3,6 @@
|
||||
// substitutions.
|
||||
|
||||
use crate::check::FnCtxt;
|
||||
use errors::DiagnosticBuilder;
|
||||
use rustc::hir;
|
||||
use rustc::hir::def_id::{DefId, DefIndex};
|
||||
use rustc::hir::intravisit::{self, NestedVisitorMap, Visitor};
|
||||
@ -407,7 +406,7 @@ impl<'cx, 'tcx> WritebackCx<'cx, 'tcx> {
|
||||
if !errors_buffer.is_empty() {
|
||||
errors_buffer.sort_by_key(|diag| diag.span.primary_span());
|
||||
for diag in errors_buffer.drain(..) {
|
||||
DiagnosticBuilder::new_diagnostic(self.tcx().sess.diagnostic(), diag).emit();
|
||||
self.tcx().sess.diagnostic().emit_diagnostic(&diag);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -193,6 +193,7 @@ pub fn new_handler(error_format: ErrorOutputType,
|
||||
short,
|
||||
sessopts.debugging_opts.teach,
|
||||
sessopts.debugging_opts.terminal_width,
|
||||
false,
|
||||
).ui_testing(ui_testing)
|
||||
)
|
||||
},
|
||||
@ -205,6 +206,7 @@ pub fn new_handler(error_format: ErrorOutputType,
|
||||
source_map,
|
||||
pretty,
|
||||
json_rendered,
|
||||
false,
|
||||
).ui_testing(ui_testing)
|
||||
)
|
||||
},
|
||||
|
@ -401,7 +401,7 @@ pub fn make_test(s: &str,
|
||||
// Any errors in parsing should also appear when the doctest is compiled for real, so just
|
||||
// send all the errors that libsyntax emits directly into a `Sink` instead of stderr.
|
||||
let cm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
|
||||
let emitter = EmitterWriter::new(box io::sink(), None, false, false, false, None);
|
||||
let emitter = EmitterWriter::new(box io::sink(), None, false, false, false, None, false);
|
||||
// FIXME(misdreavus): pass `-Z treat-err-as-bug` to the doctest parser
|
||||
let handler = Handler::with_emitter(false, None, box emitter);
|
||||
let sess = ParseSess::with_span_handler(handler, cm);
|
||||
|
@ -940,7 +940,7 @@ impl Stdio {
|
||||
/// }
|
||||
///
|
||||
/// let output = child.wait_with_output().expect("Failed to read stdout");
|
||||
/// assert_eq!(String::from_utf8_lossy(&output.stdout), "!dlrow ,olleH\n");
|
||||
/// assert_eq!(String::from_utf8_lossy(&output.stdout), "!dlrow ,olleH");
|
||||
/// ```
|
||||
#[stable(feature = "process", since = "1.0.0")]
|
||||
pub fn piped() -> Stdio { Stdio(imp::Stdio::MakePipe) }
|
||||
|
@ -1048,9 +1048,6 @@ impl<'a> ExtCtxt<'a> {
|
||||
pub fn span_warn<S: Into<MultiSpan>>(&self, sp: S, msg: &str) {
|
||||
self.parse_sess.span_diagnostic.span_warn(sp, msg);
|
||||
}
|
||||
pub fn span_unimpl<S: Into<MultiSpan>>(&self, sp: S, msg: &str) -> ! {
|
||||
self.parse_sess.span_diagnostic.span_unimpl(sp, msg);
|
||||
}
|
||||
pub fn span_bug<S: Into<MultiSpan>>(&self, sp: S, msg: &str) -> ! {
|
||||
self.parse_sess.span_diagnostic.span_bug(sp, msg);
|
||||
}
|
||||
|
@ -1,33 +1,28 @@
|
||||
use crate::ast::{self, Ident, Generics, Expr, BlockCheckMode, UnOp, PatKind};
|
||||
use crate::ast::{self, Ident, Expr, BlockCheckMode, UnOp, PatKind};
|
||||
use crate::attr;
|
||||
use crate::source_map::{dummy_spanned, respan, Spanned};
|
||||
use crate::source_map::{respan, Spanned};
|
||||
use crate::ext::base::ExtCtxt;
|
||||
use crate::ptr::P;
|
||||
use crate::symbol::{kw, sym, Symbol};
|
||||
use crate::ThinVec;
|
||||
|
||||
use rustc_target::spec::abi::Abi;
|
||||
use syntax_pos::{Pos, Span};
|
||||
|
||||
// Left so that Cargo tests don't break, this can be removed once those no longer use it
|
||||
pub trait AstBuilder {}
|
||||
|
||||
impl<'a> ExtCtxt<'a> {
|
||||
pub fn path(&self, span: Span, strs: Vec<ast::Ident> ) -> ast::Path {
|
||||
self.path_all(span, false, strs, vec![], vec![])
|
||||
self.path_all(span, false, strs, vec![])
|
||||
}
|
||||
pub fn path_ident(&self, span: Span, id: ast::Ident) -> ast::Path {
|
||||
self.path(span, vec![id])
|
||||
}
|
||||
pub fn path_global(&self, span: Span, strs: Vec<ast::Ident> ) -> ast::Path {
|
||||
self.path_all(span, true, strs, vec![], vec![])
|
||||
self.path_all(span, true, strs, vec![])
|
||||
}
|
||||
pub fn path_all(&self,
|
||||
span: Span,
|
||||
global: bool,
|
||||
mut idents: Vec<ast::Ident> ,
|
||||
args: Vec<ast::GenericArg>,
|
||||
constraints: Vec<ast::AssocTyConstraint> )
|
||||
args: Vec<ast::GenericArg>)
|
||||
-> ast::Path {
|
||||
assert!(!idents.is_empty());
|
||||
let add_root = global && !idents[0].is_path_segment_keyword();
|
||||
@ -39,8 +34,8 @@ impl<'a> ExtCtxt<'a> {
|
||||
segments.extend(idents.into_iter().map(|ident| {
|
||||
ast::PathSegment::from_ident(ident.with_span_pos(span))
|
||||
}));
|
||||
let args = if !args.is_empty() || !constraints.is_empty() {
|
||||
ast::AngleBracketedArgs { args, constraints, span }.into()
|
||||
let args = if !args.is_empty() {
|
||||
ast::AngleBracketedArgs { args, constraints: Vec::new(), span }.into()
|
||||
} else {
|
||||
None
|
||||
};
|
||||
@ -52,42 +47,6 @@ impl<'a> ExtCtxt<'a> {
|
||||
ast::Path { span, segments }
|
||||
}
|
||||
|
||||
/// Constructs a qualified path.
|
||||
///
|
||||
/// Constructs a path like `<self_type as trait_path>::ident`.
|
||||
pub fn qpath(&self,
|
||||
self_type: P<ast::Ty>,
|
||||
trait_path: ast::Path,
|
||||
ident: ast::Ident)
|
||||
-> (ast::QSelf, ast::Path) {
|
||||
self.qpath_all(self_type, trait_path, ident, vec![], vec![])
|
||||
}
|
||||
|
||||
/// Constructs a qualified path.
|
||||
///
|
||||
/// Constructs a path like `<self_type as trait_path>::ident<'a, T, A = Bar>`.
|
||||
pub fn qpath_all(&self,
|
||||
self_type: P<ast::Ty>,
|
||||
trait_path: ast::Path,
|
||||
ident: ast::Ident,
|
||||
args: Vec<ast::GenericArg>,
|
||||
constraints: Vec<ast::AssocTyConstraint>)
|
||||
-> (ast::QSelf, ast::Path) {
|
||||
let mut path = trait_path;
|
||||
let args = if !args.is_empty() || !constraints.is_empty() {
|
||||
ast::AngleBracketedArgs { args, constraints, span: ident.span }.into()
|
||||
} else {
|
||||
None
|
||||
};
|
||||
path.segments.push(ast::PathSegment { ident, id: ast::DUMMY_NODE_ID, args });
|
||||
|
||||
(ast::QSelf {
|
||||
ty: self_type,
|
||||
path_span: path.span,
|
||||
position: path.segments.len() - 1
|
||||
}, path)
|
||||
}
|
||||
|
||||
pub fn ty_mt(&self, ty: P<ast::Ty>, mutbl: ast::Mutability) -> ast::MutTy {
|
||||
ast::MutTy {
|
||||
ty,
|
||||
@ -149,10 +108,6 @@ impl<'a> ExtCtxt<'a> {
|
||||
ast::TyKind::Ptr(self.ty_mt(ty, mutbl)))
|
||||
}
|
||||
|
||||
pub fn ty_infer(&self, span: Span) -> P<ast::Ty> {
|
||||
self.ty(span, ast::TyKind::Infer)
|
||||
}
|
||||
|
||||
pub fn typaram(&self,
|
||||
span: Span,
|
||||
ident: ast::Ident,
|
||||
@ -220,14 +175,6 @@ impl<'a> ExtCtxt<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn stmt_semi(&self, expr: P<ast::Expr>) -> ast::Stmt {
|
||||
ast::Stmt {
|
||||
id: ast::DUMMY_NODE_ID,
|
||||
span: expr.span,
|
||||
node: ast::StmtKind::Semi(expr),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn stmt_let(&self, sp: Span, mutbl: bool, ident: ast::Ident,
|
||||
ex: P<ast::Expr>) -> ast::Stmt {
|
||||
let pat = if mutbl {
|
||||
@ -251,34 +198,6 @@ impl<'a> ExtCtxt<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn stmt_let_typed(&self,
|
||||
sp: Span,
|
||||
mutbl: bool,
|
||||
ident: ast::Ident,
|
||||
typ: P<ast::Ty>,
|
||||
ex: P<ast::Expr>)
|
||||
-> ast::Stmt {
|
||||
let pat = if mutbl {
|
||||
let binding_mode = ast::BindingMode::ByValue(ast::Mutability::Mutable);
|
||||
self.pat_ident_binding_mode(sp, ident, binding_mode)
|
||||
} else {
|
||||
self.pat_ident(sp, ident)
|
||||
};
|
||||
let local = P(ast::Local {
|
||||
pat,
|
||||
ty: Some(typ),
|
||||
init: Some(ex),
|
||||
id: ast::DUMMY_NODE_ID,
|
||||
span: sp,
|
||||
attrs: ThinVec::new(),
|
||||
});
|
||||
ast::Stmt {
|
||||
id: ast::DUMMY_NODE_ID,
|
||||
node: ast::StmtKind::Local(local),
|
||||
span: sp,
|
||||
}
|
||||
}
|
||||
|
||||
// Generates `let _: Type;`, which is usually used for type assertions.
|
||||
pub fn stmt_let_type_only(&self, span: Span, ty: P<ast::Ty>) -> ast::Stmt {
|
||||
let local = P(ast::Local {
|
||||
@ -333,11 +252,6 @@ impl<'a> ExtCtxt<'a> {
|
||||
self.expr(path.span, ast::ExprKind::Path(None, path))
|
||||
}
|
||||
|
||||
/// Constructs a `QPath` expression.
|
||||
pub fn expr_qpath(&self, span: Span, qself: ast::QSelf, path: ast::Path) -> P<ast::Expr> {
|
||||
self.expr(span, ast::ExprKind::Path(Some(qself), path))
|
||||
}
|
||||
|
||||
pub fn expr_ident(&self, span: Span, id: ast::Ident) -> P<ast::Expr> {
|
||||
self.expr_path(self.path_ident(span, id))
|
||||
}
|
||||
@ -351,27 +265,12 @@ impl<'a> ExtCtxt<'a> {
|
||||
}
|
||||
|
||||
pub fn expr_deref(&self, sp: Span, e: P<ast::Expr>) -> P<ast::Expr> {
|
||||
self.expr_unary(sp, UnOp::Deref, e)
|
||||
}
|
||||
pub fn expr_unary(&self, sp: Span, op: ast::UnOp, e: P<ast::Expr>) -> P<ast::Expr> {
|
||||
self.expr(sp, ast::ExprKind::Unary(op, e))
|
||||
self.expr(sp, ast::ExprKind::Unary(UnOp::Deref, e))
|
||||
}
|
||||
|
||||
pub fn expr_field_access(
|
||||
&self, sp: Span, expr: P<ast::Expr>, ident: ast::Ident,
|
||||
) -> P<ast::Expr> {
|
||||
self.expr(sp, ast::ExprKind::Field(expr, ident.with_span_pos(sp)))
|
||||
}
|
||||
pub fn expr_tup_field_access(&self, sp: Span, expr: P<ast::Expr>, idx: usize) -> P<ast::Expr> {
|
||||
let ident = Ident::new(sym::integer(idx), sp);
|
||||
self.expr(sp, ast::ExprKind::Field(expr, ident))
|
||||
}
|
||||
pub fn expr_addr_of(&self, sp: Span, e: P<ast::Expr>) -> P<ast::Expr> {
|
||||
self.expr(sp, ast::ExprKind::AddrOf(ast::Mutability::Immutable, e))
|
||||
}
|
||||
pub fn expr_mut_addr_of(&self, sp: Span, e: P<ast::Expr>) -> P<ast::Expr> {
|
||||
self.expr(sp, ast::ExprKind::AddrOf(ast::Mutability::Mutable, e))
|
||||
}
|
||||
|
||||
pub fn expr_call(
|
||||
&self, span: Span, expr: P<ast::Expr>, args: Vec<P<ast::Expr>>,
|
||||
@ -427,28 +326,10 @@ impl<'a> ExtCtxt<'a> {
|
||||
self.expr_lit(span, ast::LitKind::Int(i as u128,
|
||||
ast::LitIntType::Unsigned(ast::UintTy::Usize)))
|
||||
}
|
||||
pub fn expr_isize(&self, sp: Span, i: isize) -> P<ast::Expr> {
|
||||
if i < 0 {
|
||||
let i = (-i) as u128;
|
||||
let lit_ty = ast::LitIntType::Signed(ast::IntTy::Isize);
|
||||
let lit = self.expr_lit(sp, ast::LitKind::Int(i, lit_ty));
|
||||
self.expr_unary(sp, ast::UnOp::Neg, lit)
|
||||
} else {
|
||||
self.expr_lit(sp, ast::LitKind::Int(i as u128,
|
||||
ast::LitIntType::Signed(ast::IntTy::Isize)))
|
||||
}
|
||||
}
|
||||
pub fn expr_u32(&self, sp: Span, u: u32) -> P<ast::Expr> {
|
||||
self.expr_lit(sp, ast::LitKind::Int(u as u128,
|
||||
ast::LitIntType::Unsigned(ast::UintTy::U32)))
|
||||
}
|
||||
pub fn expr_u16(&self, sp: Span, u: u16) -> P<ast::Expr> {
|
||||
self.expr_lit(sp, ast::LitKind::Int(u as u128,
|
||||
ast::LitIntType::Unsigned(ast::UintTy::U16)))
|
||||
}
|
||||
pub fn expr_u8(&self, sp: Span, u: u8) -> P<ast::Expr> {
|
||||
self.expr_lit(sp, ast::LitKind::Int(u as u128, ast::LitIntType::Unsigned(ast::UintTy::U8)))
|
||||
}
|
||||
pub fn expr_bool(&self, sp: Span, value: bool) -> P<ast::Expr> {
|
||||
self.expr_lit(sp, ast::LitKind::Bool(value))
|
||||
}
|
||||
@ -456,10 +337,6 @@ impl<'a> ExtCtxt<'a> {
|
||||
pub fn expr_vec(&self, sp: Span, exprs: Vec<P<ast::Expr>>) -> P<ast::Expr> {
|
||||
self.expr(sp, ast::ExprKind::Array(exprs))
|
||||
}
|
||||
pub fn expr_vec_ng(&self, sp: Span) -> P<ast::Expr> {
|
||||
self.expr_call_global(sp, self.std_path(&[sym::vec, sym::Vec, sym::new]),
|
||||
Vec::new())
|
||||
}
|
||||
pub fn expr_vec_slice(&self, sp: Span, exprs: Vec<P<ast::Expr>>) -> P<ast::Expr> {
|
||||
self.expr_addr_of(sp, self.expr_vec(sp, exprs))
|
||||
}
|
||||
@ -476,16 +353,6 @@ impl<'a> ExtCtxt<'a> {
|
||||
self.expr_call_global(sp, some, vec![expr])
|
||||
}
|
||||
|
||||
pub fn expr_none(&self, sp: Span) -> P<ast::Expr> {
|
||||
let none = self.std_path(&[sym::option, sym::Option, sym::None]);
|
||||
let none = self.path_global(sp, none);
|
||||
self.expr_path(none)
|
||||
}
|
||||
|
||||
pub fn expr_break(&self, sp: Span) -> P<ast::Expr> {
|
||||
self.expr(sp, ast::ExprKind::Break(None, None))
|
||||
}
|
||||
|
||||
pub fn expr_tuple(&self, sp: Span, exprs: Vec<P<ast::Expr>>) -> P<ast::Expr> {
|
||||
self.expr(sp, ast::ExprKind::Tup(exprs))
|
||||
}
|
||||
@ -514,11 +381,6 @@ impl<'a> ExtCtxt<'a> {
|
||||
self.expr_call_global(sp, ok, vec![expr])
|
||||
}
|
||||
|
||||
pub fn expr_err(&self, sp: Span, expr: P<ast::Expr>) -> P<ast::Expr> {
|
||||
let err = self.std_path(&[sym::result, sym::Result, sym::Err]);
|
||||
self.expr_call_global(sp, err, vec![expr])
|
||||
}
|
||||
|
||||
pub fn expr_try(&self, sp: Span, head: P<ast::Expr>) -> P<ast::Expr> {
|
||||
let ok = self.std_path(&[sym::result, sym::Result, sym::Ok]);
|
||||
let ok_path = self.path_global(sp, ok);
|
||||
@ -635,10 +497,6 @@ impl<'a> ExtCtxt<'a> {
|
||||
self.expr(span, ast::ExprKind::If(cond, self.block_expr(then), els))
|
||||
}
|
||||
|
||||
pub fn expr_loop(&self, span: Span, block: P<ast::Block>) -> P<ast::Expr> {
|
||||
self.expr(span, ast::ExprKind::Loop(block, None))
|
||||
}
|
||||
|
||||
pub fn lambda_fn_decl(&self,
|
||||
span: Span,
|
||||
fn_decl: P<ast::FnDecl>,
|
||||
@ -659,7 +517,7 @@ impl<'a> ExtCtxt<'a> {
|
||||
body: P<ast::Expr>)
|
||||
-> P<ast::Expr> {
|
||||
let fn_decl = self.fn_decl(
|
||||
ids.iter().map(|id| self.param(span, *id, self.ty_infer(span))).collect(),
|
||||
ids.iter().map(|id| self.param(span, *id, self.ty(span, ast::TyKind::Infer))).collect(),
|
||||
ast::FunctionRetTy::Default(span));
|
||||
|
||||
// FIXME -- We are using `span` as the span of the `|...|`
|
||||
@ -682,16 +540,6 @@ impl<'a> ExtCtxt<'a> {
|
||||
self.lambda(span, vec![ident], body)
|
||||
}
|
||||
|
||||
pub fn lambda_stmts(&self,
|
||||
span: Span,
|
||||
ids: Vec<ast::Ident>,
|
||||
stmts: Vec<ast::Stmt>)
|
||||
-> P<ast::Expr> {
|
||||
self.lambda(span, ids, self.expr_block(self.block(span, stmts)))
|
||||
}
|
||||
pub fn lambda_stmts_0(&self, span: Span, stmts: Vec<ast::Stmt>) -> P<ast::Expr> {
|
||||
self.lambda0(span, self.expr_block(self.block(span, stmts)))
|
||||
}
|
||||
pub fn lambda_stmts_1(&self, span: Span, stmts: Vec<ast::Stmt>,
|
||||
ident: ast::Ident) -> P<ast::Expr> {
|
||||
self.lambda1(span, self.expr_block(self.block(span, stmts)), ident)
|
||||
@ -733,43 +581,6 @@ impl<'a> ExtCtxt<'a> {
|
||||
})
|
||||
}
|
||||
|
||||
pub fn item_fn_poly(&self,
|
||||
span: Span,
|
||||
name: Ident,
|
||||
inputs: Vec<ast::Param> ,
|
||||
output: P<ast::Ty>,
|
||||
generics: Generics,
|
||||
body: P<ast::Block>) -> P<ast::Item> {
|
||||
self.item(span,
|
||||
name,
|
||||
Vec::new(),
|
||||
ast::ItemKind::Fn(self.fn_decl(inputs, ast::FunctionRetTy::Ty(output)),
|
||||
ast::FnHeader {
|
||||
unsafety: ast::Unsafety::Normal,
|
||||
asyncness: dummy_spanned(ast::IsAsync::NotAsync),
|
||||
constness: dummy_spanned(ast::Constness::NotConst),
|
||||
abi: Abi::Rust,
|
||||
},
|
||||
generics,
|
||||
body))
|
||||
}
|
||||
|
||||
pub fn item_fn(&self,
|
||||
span: Span,
|
||||
name: Ident,
|
||||
inputs: Vec<ast::Param> ,
|
||||
output: P<ast::Ty>,
|
||||
body: P<ast::Block>
|
||||
) -> P<ast::Item> {
|
||||
self.item_fn_poly(
|
||||
span,
|
||||
name,
|
||||
inputs,
|
||||
output,
|
||||
Generics::default(),
|
||||
body)
|
||||
}
|
||||
|
||||
pub fn variant(&self, span: Span, ident: Ident, tys: Vec<P<ast::Ty>> ) -> ast::Variant {
|
||||
let fields: Vec<_> = tys.into_iter().map(|ty| {
|
||||
ast::StructField {
|
||||
@ -800,52 +611,6 @@ impl<'a> ExtCtxt<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn item_enum_poly(&self, span: Span, name: Ident,
|
||||
enum_definition: ast::EnumDef,
|
||||
generics: Generics) -> P<ast::Item> {
|
||||
self.item(span, name, Vec::new(), ast::ItemKind::Enum(enum_definition, generics))
|
||||
}
|
||||
|
||||
pub fn item_enum(&self, span: Span, name: Ident,
|
||||
enum_definition: ast::EnumDef) -> P<ast::Item> {
|
||||
self.item_enum_poly(span, name, enum_definition,
|
||||
Generics::default())
|
||||
}
|
||||
|
||||
pub fn item_struct(&self, span: Span, name: Ident,
|
||||
struct_def: ast::VariantData) -> P<ast::Item> {
|
||||
self.item_struct_poly(
|
||||
span,
|
||||
name,
|
||||
struct_def,
|
||||
Generics::default()
|
||||
)
|
||||
}
|
||||
|
||||
pub fn item_struct_poly(&self, span: Span, name: Ident,
|
||||
struct_def: ast::VariantData, generics: Generics) -> P<ast::Item> {
|
||||
self.item(span, name, Vec::new(), ast::ItemKind::Struct(struct_def, generics))
|
||||
}
|
||||
|
||||
pub fn item_mod(&self, span: Span, inner_span: Span, name: Ident,
|
||||
attrs: Vec<ast::Attribute>,
|
||||
items: Vec<P<ast::Item>>) -> P<ast::Item> {
|
||||
self.item(
|
||||
span,
|
||||
name,
|
||||
attrs,
|
||||
ast::ItemKind::Mod(ast::Mod {
|
||||
inner: inner_span,
|
||||
items,
|
||||
inline: true
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
pub fn item_extern_crate(&self, span: Span, name: Ident) -> P<ast::Item> {
|
||||
self.item(span, name, Vec::new(), ast::ItemKind::ExternCrate(None))
|
||||
}
|
||||
|
||||
pub fn item_static(&self,
|
||||
span: Span,
|
||||
name: Ident,
|
||||
@ -865,15 +630,6 @@ impl<'a> ExtCtxt<'a> {
|
||||
self.item(span, name, Vec::new(), ast::ItemKind::Const(ty, expr))
|
||||
}
|
||||
|
||||
pub fn item_ty_poly(&self, span: Span, name: Ident, ty: P<ast::Ty>,
|
||||
generics: Generics) -> P<ast::Item> {
|
||||
self.item(span, name, Vec::new(), ast::ItemKind::TyAlias(ty, generics))
|
||||
}
|
||||
|
||||
pub fn item_ty(&self, span: Span, name: Ident, ty: P<ast::Ty>) -> P<ast::Item> {
|
||||
self.item_ty_poly(span, name, ty, Generics::default())
|
||||
}
|
||||
|
||||
pub fn attribute(&self, mi: ast::MetaItem) -> ast::Attribute {
|
||||
attr::mk_attr_outer(mi)
|
||||
}
|
||||
@ -881,70 +637,4 @@ impl<'a> ExtCtxt<'a> {
|
||||
pub fn meta_word(&self, sp: Span, w: ast::Name) -> ast::MetaItem {
|
||||
attr::mk_word_item(Ident::new(w, sp))
|
||||
}
|
||||
|
||||
pub fn meta_list_item_word(&self, sp: Span, w: ast::Name) -> ast::NestedMetaItem {
|
||||
attr::mk_nested_word_item(Ident::new(w, sp))
|
||||
}
|
||||
|
||||
pub fn meta_list(&self, sp: Span, name: ast::Name, mis: Vec<ast::NestedMetaItem>)
|
||||
-> ast::MetaItem {
|
||||
attr::mk_list_item(Ident::new(name, sp), mis)
|
||||
}
|
||||
|
||||
pub fn meta_name_value(&self, span: Span, name: ast::Name, lit_kind: ast::LitKind)
|
||||
-> ast::MetaItem {
|
||||
attr::mk_name_value_item(Ident::new(name, span), lit_kind, span)
|
||||
}
|
||||
|
||||
pub fn item_use(&self, sp: Span,
|
||||
vis: ast::Visibility, vp: P<ast::UseTree>) -> P<ast::Item> {
|
||||
P(ast::Item {
|
||||
id: ast::DUMMY_NODE_ID,
|
||||
ident: Ident::invalid(),
|
||||
attrs: vec![],
|
||||
node: ast::ItemKind::Use(vp),
|
||||
vis,
|
||||
span: sp,
|
||||
tokens: None,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn item_use_simple(&self, sp: Span, vis: ast::Visibility, path: ast::Path) -> P<ast::Item> {
|
||||
self.item_use_simple_(sp, vis, None, path)
|
||||
}
|
||||
|
||||
pub fn item_use_simple_(&self, sp: Span, vis: ast::Visibility,
|
||||
rename: Option<ast::Ident>, path: ast::Path) -> P<ast::Item> {
|
||||
self.item_use(sp, vis, P(ast::UseTree {
|
||||
span: sp,
|
||||
prefix: path,
|
||||
kind: ast::UseTreeKind::Simple(rename, ast::DUMMY_NODE_ID, ast::DUMMY_NODE_ID),
|
||||
}))
|
||||
}
|
||||
|
||||
pub fn item_use_list(&self, sp: Span, vis: ast::Visibility,
|
||||
path: Vec<ast::Ident>, imports: &[ast::Ident]) -> P<ast::Item> {
|
||||
let imports = imports.iter().map(|id| {
|
||||
(ast::UseTree {
|
||||
span: sp,
|
||||
prefix: self.path(sp, vec![*id]),
|
||||
kind: ast::UseTreeKind::Simple(None, ast::DUMMY_NODE_ID, ast::DUMMY_NODE_ID),
|
||||
}, ast::DUMMY_NODE_ID)
|
||||
}).collect();
|
||||
|
||||
self.item_use(sp, vis, P(ast::UseTree {
|
||||
span: sp,
|
||||
prefix: self.path(sp, path),
|
||||
kind: ast::UseTreeKind::Nested(imports),
|
||||
}))
|
||||
}
|
||||
|
||||
pub fn item_use_glob(&self, sp: Span,
|
||||
vis: ast::Visibility, path: Vec<ast::Ident>) -> P<ast::Item> {
|
||||
self.item_use(sp, vis, P(ast::UseTree {
|
||||
span: sp,
|
||||
prefix: self.path(sp, path),
|
||||
kind: ast::UseTreeKind::Glob,
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
@ -4,7 +4,7 @@ use crate::parse::{self, token, ParseSess};
|
||||
use crate::parse::lexer::comments;
|
||||
use crate::tokenstream::{self, DelimSpan, IsJoint::*, TokenStream, TreeAndJoint};
|
||||
|
||||
use errors::{Diagnostic, DiagnosticBuilder};
|
||||
use errors::Diagnostic;
|
||||
use rustc_data_structures::sync::Lrc;
|
||||
use syntax_pos::{BytePos, FileName, MultiSpan, Pos, SourceFile, Span};
|
||||
use syntax_pos::symbol::{kw, sym, Symbol};
|
||||
@ -650,7 +650,7 @@ impl server::Diagnostic for Rustc<'_> {
|
||||
diag.sub(level.to_internal(), msg, MultiSpan::from_spans(spans), None);
|
||||
}
|
||||
fn emit(&mut self, diag: Self::Diagnostic) {
|
||||
DiagnosticBuilder::new_diagnostic(&self.sess.span_diagnostic, diag).emit()
|
||||
self.sess.span_diagnostic.emit_diagnostic(&diag);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -8,6 +8,7 @@ use crate::tokenstream::{DelimSpan, TokenStream, TokenTree, TreeAndJoint};
|
||||
|
||||
use smallvec::{smallvec, SmallVec};
|
||||
|
||||
use errors::pluralise;
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
use rustc_data_structures::sync::Lrc;
|
||||
use syntax_pos::hygiene::{ExpnId, Transparency};
|
||||
@ -348,10 +349,10 @@ impl LockstepIterSize {
|
||||
"meta-variable `{}` repeats {} time{}, but `{}` repeats {} time{}",
|
||||
l_id,
|
||||
l_len,
|
||||
if l_len != 1 { "s" } else { "" },
|
||||
pluralise!(l_len),
|
||||
r_id,
|
||||
r_len,
|
||||
if r_len != 1 { "s" } else { "" },
|
||||
pluralise!(r_len),
|
||||
);
|
||||
LockstepIterSize::Contradiction(msg)
|
||||
}
|
||||
|
@ -243,6 +243,8 @@ declare_features! (
|
||||
(accepted, async_await, "1.39.0", Some(50547), None),
|
||||
/// Allows mixing bind-by-move in patterns and references to those identifiers in guards.
|
||||
(accepted, bind_by_move_pattern_guards, "1.39.0", Some(15287), None),
|
||||
/// Allows attributes in formal function parameters.
|
||||
(accepted, param_attrs, "1.39.0", Some(60406), None),
|
||||
|
||||
// -------------------------------------------------------------------------
|
||||
// feature-group-end: accepted features
|
||||
|
@ -489,9 +489,6 @@ declare_features! (
|
||||
/// Allows the user of associated type bounds.
|
||||
(active, associated_type_bounds, "1.34.0", Some(52662), None),
|
||||
|
||||
/// Attributes on formal function params.
|
||||
(active, param_attrs, "1.36.0", Some(60406), None),
|
||||
|
||||
/// Allows calling constructor functions in `const fn`.
|
||||
(active, const_constructor, "1.37.0", Some(61456), None),
|
||||
|
||||
|
@ -812,7 +812,6 @@ pub fn check_crate(krate: &ast::Crate,
|
||||
}
|
||||
}
|
||||
|
||||
gate_all!(param_attrs, "attributes on function parameters are unstable");
|
||||
gate_all!(let_chains, "`let` expressions in this position are experimental");
|
||||
gate_all!(async_closure, "async closures are unstable");
|
||||
gate_all!(yields, generators, "yield syntax is experimental");
|
||||
|
@ -12,7 +12,7 @@
|
||||
use crate::source_map::{SourceMap, FilePathMapping};
|
||||
|
||||
use errors::registry::Registry;
|
||||
use errors::{DiagnosticBuilder, SubDiagnostic, CodeSuggestion, SourceMapper};
|
||||
use errors::{SubDiagnostic, CodeSuggestion, SourceMapper};
|
||||
use errors::{DiagnosticId, Applicability};
|
||||
use errors::emitter::{Emitter, HumanReadableErrorType};
|
||||
|
||||
@ -32,6 +32,7 @@ pub struct JsonEmitter {
|
||||
pretty: bool,
|
||||
ui_testing: bool,
|
||||
json_rendered: HumanReadableErrorType,
|
||||
external_macro_backtrace: bool,
|
||||
}
|
||||
|
||||
impl JsonEmitter {
|
||||
@ -40,6 +41,7 @@ impl JsonEmitter {
|
||||
source_map: Lrc<SourceMap>,
|
||||
pretty: bool,
|
||||
json_rendered: HumanReadableErrorType,
|
||||
external_macro_backtrace: bool,
|
||||
) -> JsonEmitter {
|
||||
JsonEmitter {
|
||||
dst: Box::new(io::stderr()),
|
||||
@ -48,13 +50,18 @@ impl JsonEmitter {
|
||||
pretty,
|
||||
ui_testing: false,
|
||||
json_rendered,
|
||||
external_macro_backtrace,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn basic(pretty: bool, json_rendered: HumanReadableErrorType) -> JsonEmitter {
|
||||
pub fn basic(
|
||||
pretty: bool,
|
||||
json_rendered: HumanReadableErrorType,
|
||||
external_macro_backtrace: bool,
|
||||
) -> JsonEmitter {
|
||||
let file_path_mapping = FilePathMapping::empty();
|
||||
JsonEmitter::stderr(None, Lrc::new(SourceMap::new(file_path_mapping)),
|
||||
pretty, json_rendered)
|
||||
pretty, json_rendered, external_macro_backtrace)
|
||||
}
|
||||
|
||||
pub fn new(
|
||||
@ -63,6 +70,7 @@ impl JsonEmitter {
|
||||
source_map: Lrc<SourceMap>,
|
||||
pretty: bool,
|
||||
json_rendered: HumanReadableErrorType,
|
||||
external_macro_backtrace: bool,
|
||||
) -> JsonEmitter {
|
||||
JsonEmitter {
|
||||
dst,
|
||||
@ -71,6 +79,7 @@ impl JsonEmitter {
|
||||
pretty,
|
||||
ui_testing: false,
|
||||
json_rendered,
|
||||
external_macro_backtrace,
|
||||
}
|
||||
}
|
||||
|
||||
@ -80,8 +89,8 @@ impl JsonEmitter {
|
||||
}
|
||||
|
||||
impl Emitter for JsonEmitter {
|
||||
fn emit_diagnostic(&mut self, db: &DiagnosticBuilder<'_>) {
|
||||
let data = Diagnostic::from_diagnostic_builder(db, self);
|
||||
fn emit_diagnostic(&mut self, db: &errors::Diagnostic) {
|
||||
let data = Diagnostic::from_errors_diagnostic(db, self);
|
||||
let result = if self.pretty {
|
||||
writeln!(&mut self.dst, "{}", as_pretty_json(&data))
|
||||
} else {
|
||||
@ -189,7 +198,7 @@ struct ArtifactNotification<'a> {
|
||||
}
|
||||
|
||||
impl Diagnostic {
|
||||
fn from_diagnostic_builder(db: &DiagnosticBuilder<'_>,
|
||||
fn from_errors_diagnostic(db: &errors::Diagnostic,
|
||||
je: &JsonEmitter)
|
||||
-> Diagnostic {
|
||||
let sugg = db.suggestions.iter().map(|sugg| {
|
||||
@ -219,8 +228,9 @@ impl Diagnostic {
|
||||
}
|
||||
let buf = BufWriter::default();
|
||||
let output = buf.clone();
|
||||
je.json_rendered.new_emitter(Box::new(buf), Some(je.sm.clone()), false, None)
|
||||
.ui_testing(je.ui_testing).emit_diagnostic(db);
|
||||
je.json_rendered.new_emitter(
|
||||
Box::new(buf), Some(je.sm.clone()), false, None, je.external_macro_backtrace
|
||||
).ui_testing(je.ui_testing).emit_diagnostic(db);
|
||||
let output = Arc::try_unwrap(output.0).unwrap().into_inner().unwrap();
|
||||
let output = String::from_utf8(output).unwrap();
|
||||
|
||||
|
@ -60,12 +60,12 @@ macro_rules! panictry {
|
||||
macro_rules! panictry_buffer {
|
||||
($handler:expr, $e:expr) => ({
|
||||
use std::result::Result::{Ok, Err};
|
||||
use errors::{FatalError, DiagnosticBuilder};
|
||||
use errors::FatalError;
|
||||
match $e {
|
||||
Ok(e) => e,
|
||||
Err(errs) => {
|
||||
for e in errs {
|
||||
DiagnosticBuilder::new_diagnostic($handler, e).emit();
|
||||
$handler.emit_diagnostic(&e);
|
||||
}
|
||||
FatalError.raise()
|
||||
}
|
||||
|
@ -19,13 +19,6 @@ const DEFAULT_UNEXPECTED_INNER_ATTR_ERR_MSG: &str = "an inner attribute is not \
|
||||
permitted in this context";
|
||||
|
||||
impl<'a> Parser<'a> {
|
||||
crate fn parse_param_attributes(&mut self) -> PResult<'a, Vec<ast::Attribute>> {
|
||||
let attrs = self.parse_outer_attributes()?;
|
||||
self.sess.gated_spans.param_attrs.borrow_mut()
|
||||
.extend(attrs.iter().map(|a| a.span));
|
||||
Ok(attrs)
|
||||
}
|
||||
|
||||
/// Parses attributes that appear before an item.
|
||||
crate fn parse_outer_attributes(&mut self) -> PResult<'a, Vec<ast::Attribute>> {
|
||||
let mut attrs: Vec<ast::Attribute> = Vec::new();
|
||||
@ -309,14 +302,14 @@ impl<'a> Parser<'a> {
|
||||
Ok(lit) => {
|
||||
return Ok(ast::NestedMetaItem::Literal(lit))
|
||||
}
|
||||
Err(ref mut err) => self.diagnostic().cancel(err)
|
||||
Err(ref mut err) => err.cancel(),
|
||||
}
|
||||
|
||||
match self.parse_meta_item() {
|
||||
Ok(mi) => {
|
||||
return Ok(ast::NestedMetaItem::MetaItem(mi))
|
||||
}
|
||||
Err(ref mut err) => self.diagnostic().cancel(err)
|
||||
Err(ref mut err) => err.cancel(),
|
||||
}
|
||||
|
||||
let found = self.this_token_to_string();
|
||||
|
@ -11,7 +11,7 @@ use crate::ptr::P;
|
||||
use crate::symbol::{kw, sym};
|
||||
use crate::ThinVec;
|
||||
use crate::util::parser::AssocOp;
|
||||
use errors::{Applicability, DiagnosticBuilder, DiagnosticId};
|
||||
use errors::{Applicability, DiagnosticBuilder, DiagnosticId, pluralise};
|
||||
use rustc_data_structures::fx::FxHashSet;
|
||||
use syntax_pos::{Span, DUMMY_SP, MultiSpan, SpanSnippetError};
|
||||
use log::{debug, trace};
|
||||
@ -197,10 +197,6 @@ impl<'a> Parser<'a> {
|
||||
self.sess.span_diagnostic.span_bug(sp, m)
|
||||
}
|
||||
|
||||
crate fn cancel(&self, err: &mut DiagnosticBuilder<'_>) {
|
||||
self.sess.span_diagnostic.cancel(err)
|
||||
}
|
||||
|
||||
crate fn diagnostic(&self) -> &'a errors::Handler {
|
||||
&self.sess.span_diagnostic
|
||||
}
|
||||
@ -426,15 +422,13 @@ impl<'a> Parser<'a> {
|
||||
/// Eats and discards tokens until one of `kets` is encountered. Respects token trees,
|
||||
/// passes through any errors encountered. Used for error recovery.
|
||||
crate fn eat_to_tokens(&mut self, kets: &[&TokenKind]) {
|
||||
let handler = self.diagnostic();
|
||||
|
||||
if let Err(ref mut err) = self.parse_seq_to_before_tokens(
|
||||
kets,
|
||||
SeqSep::none(),
|
||||
TokenExpectType::Expect,
|
||||
|p| Ok(p.parse_token_tree()),
|
||||
) {
|
||||
handler.cancel(err);
|
||||
err.cancel();
|
||||
}
|
||||
}
|
||||
|
||||
@ -532,15 +526,15 @@ impl<'a> Parser<'a> {
|
||||
self.eat_to_tokens(&[&end]);
|
||||
let span = lo.until(self.token.span);
|
||||
|
||||
let plural = number_of_gt > 1 || number_of_shr >= 1;
|
||||
let total_num_of_gt = number_of_gt + number_of_shr * 2;
|
||||
self.diagnostic()
|
||||
.struct_span_err(
|
||||
span,
|
||||
&format!("unmatched angle bracket{}", if plural { "s" } else { "" }),
|
||||
&format!("unmatched angle bracket{}", pluralise!(total_num_of_gt)),
|
||||
)
|
||||
.span_suggestion(
|
||||
span,
|
||||
&format!("remove extra angle bracket{}", if plural { "s" } else { "" }),
|
||||
&format!("remove extra angle bracket{}", pluralise!(total_num_of_gt)),
|
||||
String::new(),
|
||||
Applicability::MachineApplicable,
|
||||
)
|
||||
|
@ -18,6 +18,7 @@ fn mk_sess(sm: Lrc<SourceMap>) -> ParseSess {
|
||||
false,
|
||||
false,
|
||||
None,
|
||||
false,
|
||||
);
|
||||
ParseSess::with_span_handler(Handler::with_emitter(true, None, Box::new(emitter)), sm)
|
||||
}
|
||||
|
@ -49,8 +49,6 @@ static_assert_size!(PResult<'_, bool>, 16);
|
||||
/// used and should be feature gated accordingly in `check_crate`.
|
||||
#[derive(Default)]
|
||||
pub struct GatedSpans {
|
||||
/// Spans collected for gating `param_attrs`, e.g. `fn foo(#[attr] x: u8) {}`.
|
||||
pub param_attrs: Lock<Vec<Span>>,
|
||||
/// Spans collected for gating `let_chains`, e.g. `if a && let b = c {}`.
|
||||
pub let_chains: Lock<Vec<Span>>,
|
||||
/// Spans collected for gating `async_closure`, e.g. `async || ..`.
|
||||
@ -306,7 +304,7 @@ fn file_to_source_file(sess: &ParseSess, path: &Path, spanopt: Option<Span>)
|
||||
match try_file_to_source_file(sess, path, spanopt) {
|
||||
Ok(source_file) => source_file,
|
||||
Err(d) => {
|
||||
DiagnosticBuilder::new_diagnostic(&sess.span_diagnostic, d).emit();
|
||||
sess.span_diagnostic.emit_diagnostic(&d);
|
||||
FatalError.raise();
|
||||
}
|
||||
}
|
||||
|
@ -979,7 +979,7 @@ impl<'a> Parser<'a> {
|
||||
is_name_required: impl Fn(&token::Token) -> bool,
|
||||
) -> PResult<'a, Param> {
|
||||
let lo = self.token.span;
|
||||
let attrs = self.parse_param_attributes()?;
|
||||
let attrs = self.parse_outer_attributes()?;
|
||||
if let Some(mut param) = self.parse_self_param()? {
|
||||
param.attrs = attrs.into();
|
||||
return self.recover_bad_self_param(param, is_trait_item);
|
||||
@ -1362,7 +1362,7 @@ impl<'a> Parser<'a> {
|
||||
/// Returns the parsed optional self parameter with attributes and whether a self
|
||||
/// shortcut was used.
|
||||
fn parse_self_parameter_with_attrs(&mut self) -> PResult<'a, Option<Param>> {
|
||||
let attrs = self.parse_param_attributes()?;
|
||||
let attrs = self.parse_outer_attributes()?;
|
||||
let param_opt = self.parse_self_param()?;
|
||||
Ok(param_opt.map(|mut param| {
|
||||
param.attrs = attrs.into();
|
||||
|
@ -66,6 +66,10 @@ pub(super) enum LhsExpr {
|
||||
}
|
||||
|
||||
impl From<Option<ThinVec<Attribute>>> for LhsExpr {
|
||||
/// Converts `Some(attrs)` into `LhsExpr::AttributesParsed(attrs)`
|
||||
/// and `None` into `LhsExpr::NotYetParsed`.
|
||||
///
|
||||
/// This conversion does not allocate.
|
||||
fn from(o: Option<ThinVec<Attribute>>) -> Self {
|
||||
if let Some(attrs) = o {
|
||||
LhsExpr::AttributesParsed(attrs)
|
||||
@ -76,6 +80,9 @@ impl From<Option<ThinVec<Attribute>>> for LhsExpr {
|
||||
}
|
||||
|
||||
impl From<P<Expr>> for LhsExpr {
|
||||
/// Converts the `expr: P<Expr>` into `LhsExpr::AlreadyParsed(expr)`.
|
||||
///
|
||||
/// This conversion does not allocate.
|
||||
fn from(expr: P<Expr>) -> Self {
|
||||
LhsExpr::AlreadyParsed(expr)
|
||||
}
|
||||
@ -770,7 +777,7 @@ impl<'a> Parser<'a> {
|
||||
ex = ExprKind::Lit(literal);
|
||||
}
|
||||
Err(mut err) => {
|
||||
self.cancel(&mut err);
|
||||
err.cancel();
|
||||
return Err(self.expected_expression_found());
|
||||
}
|
||||
}
|
||||
@ -1176,7 +1183,7 @@ impl<'a> Parser<'a> {
|
||||
/// Parses a parameter in a closure header (e.g., `|arg, arg|`).
|
||||
fn parse_fn_block_param(&mut self) -> PResult<'a, Param> {
|
||||
let lo = self.token.span;
|
||||
let attrs = self.parse_param_attributes()?;
|
||||
let attrs = self.parse_outer_attributes()?;
|
||||
let pat = self.parse_pat(PARAM_EXPECTED)?;
|
||||
let t = if self.eat(&token::Colon) {
|
||||
self.parse_ty()?
|
||||
|
@ -100,13 +100,31 @@ impl<'a> Parser<'a> {
|
||||
} else if self.check_ident() {
|
||||
// Parse type parameter.
|
||||
params.push(self.parse_ty_param(attrs)?);
|
||||
} else if self.token.can_begin_type() {
|
||||
// Trying to write an associated type bound? (#26271)
|
||||
let snapshot = self.clone();
|
||||
match self.parse_ty_where_predicate() {
|
||||
Ok(where_predicate) => {
|
||||
self.struct_span_err(
|
||||
where_predicate.span(),
|
||||
"bounds on associated types do not belong here",
|
||||
)
|
||||
.span_label(where_predicate.span(), "belongs in `where` clause")
|
||||
.emit();
|
||||
}
|
||||
Err(mut err) => {
|
||||
err.cancel();
|
||||
std::mem::replace(self, snapshot);
|
||||
break
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Check for trailing attributes and stop parsing.
|
||||
if !attrs.is_empty() {
|
||||
if !params.is_empty() {
|
||||
self.struct_span_err(
|
||||
attrs[0].span,
|
||||
&format!("trailing attribute after generic parameter"),
|
||||
"trailing attribute after generic parameter",
|
||||
)
|
||||
.span_label(attrs[0].span, "attributes must go before parameters")
|
||||
.emit();
|
||||
@ -202,43 +220,7 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
));
|
||||
} else if self.check_type() {
|
||||
// Parse optional `for<'a, 'b>`.
|
||||
// This `for` is parsed greedily and applies to the whole predicate,
|
||||
// the bounded type can have its own `for` applying only to it.
|
||||
// Examples:
|
||||
// * `for<'a> Trait1<'a>: Trait2<'a /* ok */>`
|
||||
// * `(for<'a> Trait1<'a>): Trait2<'a /* not ok */>`
|
||||
// * `for<'a> for<'b> Trait1<'a, 'b>: Trait2<'a /* ok */, 'b /* not ok */>`
|
||||
let lifetime_defs = self.parse_late_bound_lifetime_defs()?;
|
||||
|
||||
// Parse type with mandatory colon and (possibly empty) bounds,
|
||||
// or with mandatory equality sign and the second type.
|
||||
let ty = self.parse_ty()?;
|
||||
if self.eat(&token::Colon) {
|
||||
let bounds = self.parse_generic_bounds(Some(self.prev_span))?;
|
||||
where_clause.predicates.push(ast::WherePredicate::BoundPredicate(
|
||||
ast::WhereBoundPredicate {
|
||||
span: lo.to(self.prev_span),
|
||||
bound_generic_params: lifetime_defs,
|
||||
bounded_ty: ty,
|
||||
bounds,
|
||||
}
|
||||
));
|
||||
// FIXME: Decide what should be used here, `=` or `==`.
|
||||
// FIXME: We are just dropping the binders in lifetime_defs on the floor here.
|
||||
} else if self.eat(&token::Eq) || self.eat(&token::EqEq) {
|
||||
let rhs_ty = self.parse_ty()?;
|
||||
where_clause.predicates.push(ast::WherePredicate::EqPredicate(
|
||||
ast::WhereEqPredicate {
|
||||
span: lo.to(self.prev_span),
|
||||
lhs_ty: ty,
|
||||
rhs_ty,
|
||||
id: ast::DUMMY_NODE_ID,
|
||||
}
|
||||
));
|
||||
} else {
|
||||
return self.unexpected();
|
||||
}
|
||||
where_clause.predicates.push(self.parse_ty_where_predicate()?);
|
||||
} else {
|
||||
break
|
||||
}
|
||||
@ -252,6 +234,47 @@ impl<'a> Parser<'a> {
|
||||
Ok(where_clause)
|
||||
}
|
||||
|
||||
fn parse_ty_where_predicate(&mut self) -> PResult<'a, ast::WherePredicate> {
|
||||
let lo = self.token.span;
|
||||
// Parse optional `for<'a, 'b>`.
|
||||
// This `for` is parsed greedily and applies to the whole predicate,
|
||||
// the bounded type can have its own `for` applying only to it.
|
||||
// Examples:
|
||||
// * `for<'a> Trait1<'a>: Trait2<'a /* ok */>`
|
||||
// * `(for<'a> Trait1<'a>): Trait2<'a /* not ok */>`
|
||||
// * `for<'a> for<'b> Trait1<'a, 'b>: Trait2<'a /* ok */, 'b /* not ok */>`
|
||||
let lifetime_defs = self.parse_late_bound_lifetime_defs()?;
|
||||
|
||||
// Parse type with mandatory colon and (possibly empty) bounds,
|
||||
// or with mandatory equality sign and the second type.
|
||||
let ty = self.parse_ty()?;
|
||||
if self.eat(&token::Colon) {
|
||||
let bounds = self.parse_generic_bounds(Some(self.prev_span))?;
|
||||
Ok(ast::WherePredicate::BoundPredicate(
|
||||
ast::WhereBoundPredicate {
|
||||
span: lo.to(self.prev_span),
|
||||
bound_generic_params: lifetime_defs,
|
||||
bounded_ty: ty,
|
||||
bounds,
|
||||
}
|
||||
))
|
||||
// FIXME: Decide what should be used here, `=` or `==`.
|
||||
// FIXME: We are just dropping the binders in lifetime_defs on the floor here.
|
||||
} else if self.eat(&token::Eq) || self.eat(&token::EqEq) {
|
||||
let rhs_ty = self.parse_ty()?;
|
||||
Ok(ast::WherePredicate::EqPredicate(
|
||||
ast::WhereEqPredicate {
|
||||
span: lo.to(self.prev_span),
|
||||
lhs_ty: ty,
|
||||
rhs_ty,
|
||||
id: ast::DUMMY_NODE_ID,
|
||||
}
|
||||
))
|
||||
} else {
|
||||
self.unexpected()
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn choose_generics_over_qpath(&self) -> bool {
|
||||
// There's an ambiguity between generic parameters and qualified paths in impls.
|
||||
// If we see `<` it may start both, so we have to inspect some following tokens.
|
||||
|
@ -537,7 +537,7 @@ impl<'a> Parser<'a> {
|
||||
mut err: DiagnosticBuilder<'a>,
|
||||
expected: Expected,
|
||||
) -> PResult<'a, P<Pat>> {
|
||||
self.cancel(&mut err);
|
||||
err.cancel();
|
||||
|
||||
let expected = expected.unwrap_or("pattern");
|
||||
let msg = format!("expected {}, found {}", expected, self.this_token_descr());
|
||||
|
@ -9,7 +9,7 @@ use crate::symbol::kw;
|
||||
|
||||
use std::mem;
|
||||
use log::debug;
|
||||
use errors::{Applicability};
|
||||
use errors::{Applicability, pluralise};
|
||||
|
||||
/// Specifies how to parse a path.
|
||||
#[derive(Copy, Clone, PartialEq)]
|
||||
@ -129,10 +129,11 @@ impl<'a> Parser<'a> {
|
||||
self.parse_path(style)
|
||||
}
|
||||
|
||||
crate fn parse_path_segments(&mut self,
|
||||
segments: &mut Vec<PathSegment>,
|
||||
style: PathStyle)
|
||||
-> PResult<'a, ()> {
|
||||
crate fn parse_path_segments(
|
||||
&mut self,
|
||||
segments: &mut Vec<PathSegment>,
|
||||
style: PathStyle,
|
||||
) -> PResult<'a, ()> {
|
||||
loop {
|
||||
let segment = self.parse_path_segment(style)?;
|
||||
if style == PathStyle::Expr {
|
||||
@ -201,7 +202,7 @@ impl<'a> Parser<'a> {
|
||||
} else {
|
||||
// `(T, U) -> R`
|
||||
let (inputs, _) = self.parse_paren_comma_seq(|p| p.parse_ty())?;
|
||||
let span = lo.to(self.prev_span);
|
||||
let span = ident.span.to(self.prev_span);
|
||||
let output = if self.eat(&token::RArrow) {
|
||||
Some(self.parse_ty_common(false, false, false)?)
|
||||
} else {
|
||||
@ -347,20 +348,19 @@ impl<'a> Parser<'a> {
|
||||
let span = lo.with_hi(
|
||||
lo.lo() + BytePos(snapshot.unmatched_angle_bracket_count)
|
||||
);
|
||||
let plural = snapshot.unmatched_angle_bracket_count > 1;
|
||||
self.diagnostic()
|
||||
.struct_span_err(
|
||||
span,
|
||||
&format!(
|
||||
"unmatched angle bracket{}",
|
||||
if plural { "s" } else { "" }
|
||||
pluralise!(snapshot.unmatched_angle_bracket_count)
|
||||
),
|
||||
)
|
||||
.span_suggestion(
|
||||
span,
|
||||
&format!(
|
||||
"remove extra angle bracket{}",
|
||||
if plural { "s" } else { "" }
|
||||
pluralise!(snapshot.unmatched_angle_bracket_count)
|
||||
),
|
||||
String::new(),
|
||||
Applicability::MachineApplicable,
|
||||
|
@ -361,7 +361,7 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
Err(mut e) => {
|
||||
self.recover_stmt_(SemiColonMode::Break, BlockMode::Ignore);
|
||||
self.cancel(&mut e);
|
||||
e.cancel();
|
||||
}
|
||||
_ => ()
|
||||
}
|
||||
|
@ -11,7 +11,7 @@ use crate::symbol::{kw};
|
||||
|
||||
use rustc_target::spec::abi::Abi;
|
||||
|
||||
use errors::{Applicability};
|
||||
use errors::{Applicability, pluralise};
|
||||
|
||||
/// Returns `true` if `IDENT t` can start a type -- `IDENT::a::b`, `IDENT<u8, u8>`,
|
||||
/// `IDENT<<u8 as Trait>::AssocTy>`.
|
||||
@ -397,7 +397,7 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
|
||||
if !negative_bounds.is_empty() || was_negative {
|
||||
let plural = negative_bounds.len() > 1;
|
||||
let negative_bounds_len = negative_bounds.len();
|
||||
let last_span = negative_bounds.last().map(|sp| *sp);
|
||||
let mut err = self.struct_span_err(
|
||||
negative_bounds,
|
||||
@ -420,7 +420,7 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
err.span_suggestion_hidden(
|
||||
bound_list,
|
||||
&format!("remove the trait bound{}", if plural { "s" } else { "" }),
|
||||
&format!("remove the trait bound{}", pluralise!(negative_bounds_len)),
|
||||
new_bound_list,
|
||||
Applicability::MachineApplicable,
|
||||
);
|
||||
|
@ -147,6 +147,7 @@ fn test_harness(file_text: &str, span_labels: Vec<SpanLabel>, expected_output: &
|
||||
false,
|
||||
false,
|
||||
None,
|
||||
false,
|
||||
);
|
||||
let handler = Handler::with_emitter(true, None, Box::new(emitter));
|
||||
handler.span_err(msp, "foo");
|
||||
|
@ -115,7 +115,7 @@ fn cs_clone_shallow(name: &str,
|
||||
let span = cx.with_def_site_ctxt(span);
|
||||
let assert_path = cx.path_all(span, true,
|
||||
cx.std_path(&[sym::clone, Symbol::intern(helper_name)]),
|
||||
vec![GenericArg::Type(ty)], vec![]);
|
||||
vec![GenericArg::Type(ty)]);
|
||||
stmts.push(cx.stmt_let_type_only(span, cx.ty_path(assert_path)));
|
||||
}
|
||||
fn process_variant(cx: &mut ExtCtxt<'_>, stmts: &mut Vec<ast::Stmt>, variant: &VariantData) {
|
||||
|
@ -2,7 +2,7 @@ use crate::deriving::path_std;
|
||||
use crate::deriving::generic::*;
|
||||
use crate::deriving::generic::ty::*;
|
||||
|
||||
use syntax::ast::{self, Expr, MetaItem, GenericArg};
|
||||
use syntax::ast::{self, Ident, Expr, MetaItem, GenericArg};
|
||||
use syntax::ext::base::{Annotatable, ExtCtxt, SpecialDerives};
|
||||
use syntax::ptr::P;
|
||||
use syntax::symbol::{sym, Symbol};
|
||||
@ -16,8 +16,8 @@ pub fn expand_deriving_eq(cx: &mut ExtCtxt<'_>,
|
||||
cx.resolver.add_derives(cx.current_expansion.id.expn_data().parent, SpecialDerives::EQ);
|
||||
|
||||
let inline = cx.meta_word(span, sym::inline);
|
||||
let hidden = cx.meta_list_item_word(span, sym::hidden);
|
||||
let doc = cx.meta_list(span, sym::doc, vec![hidden]);
|
||||
let hidden = syntax::attr::mk_nested_word_item(Ident::new(sym::hidden, span));
|
||||
let doc = syntax::attr::mk_list_item(Ident::new(sym::doc, span), vec![hidden]);
|
||||
let attrs = vec![cx.attribute(inline), cx.attribute(doc)];
|
||||
let trait_def = TraitDef {
|
||||
span,
|
||||
@ -56,7 +56,7 @@ fn cs_total_eq_assert(cx: &mut ExtCtxt<'_>,
|
||||
let span = cx.with_def_site_ctxt(span);
|
||||
let assert_path = cx.path_all(span, true,
|
||||
cx.std_path(&[sym::cmp, Symbol::intern(helper_name)]),
|
||||
vec![GenericArg::Type(ty)], vec![]);
|
||||
vec![GenericArg::Type(ty)]);
|
||||
stmts.push(cx.stmt_let_type_only(span, cx.ty_path(assert_path)));
|
||||
}
|
||||
fn process_variant(cx: &mut ExtCtxt<'_>,
|
||||
|
@ -664,7 +664,7 @@ impl<'a> TraitDef<'a> {
|
||||
}).collect();
|
||||
|
||||
// Create the type of `self`.
|
||||
let path = cx.path_all(self.span, false, vec![type_ident], self_params, vec![]);
|
||||
let path = cx.path_all(self.span, false, vec![type_ident], self_params);
|
||||
let self_type = cx.ty_path(path);
|
||||
|
||||
let attr = cx.attribute(cx.meta_word(self.span, sym::automatically_derived));
|
||||
@ -672,8 +672,11 @@ impl<'a> TraitDef<'a> {
|
||||
attr::mark_used(&attr);
|
||||
let opt_trait_ref = Some(trait_ref);
|
||||
let unused_qual = {
|
||||
let word = cx.meta_list_item_word(self.span, Symbol::intern("unused_qualifications"));
|
||||
cx.attribute(cx.meta_list(self.span, sym::allow, vec![word]))
|
||||
let word = syntax::attr::mk_nested_word_item(
|
||||
Ident::new(Symbol::intern("unused_qualifications"), self.span));
|
||||
let list = syntax::attr::mk_list_item(
|
||||
Ident::new(sym::allow, self.span), vec![word]);
|
||||
cx.attribute(list)
|
||||
};
|
||||
|
||||
let mut a = vec![attr, unused_qual];
|
||||
|
@ -82,12 +82,12 @@ impl<'a> Path<'a> {
|
||||
.collect();
|
||||
|
||||
match self.kind {
|
||||
PathKind::Global => cx.path_all(span, true, idents, params, Vec::new()),
|
||||
PathKind::Local => cx.path_all(span, false, idents, params, Vec::new()),
|
||||
PathKind::Global => cx.path_all(span, true, idents, params),
|
||||
PathKind::Local => cx.path_all(span, false, idents, params),
|
||||
PathKind::Std => {
|
||||
let def_site = cx.with_def_site_ctxt(DUMMY_SP);
|
||||
idents.insert(0, Ident::new(kw::DollarCrate, def_site));
|
||||
cx.path_all(span, false, idents, params, Vec::new())
|
||||
cx.path_all(span, false, idents, params)
|
||||
}
|
||||
}
|
||||
|
||||
@ -183,7 +183,7 @@ impl<'a> Ty<'a> {
|
||||
}
|
||||
}).collect();
|
||||
|
||||
cx.path_all(span, false, vec![self_ty], params, vec![])
|
||||
cx.path_all(span, false, vec![self_ty], params)
|
||||
}
|
||||
Literal(ref p) => p.to_path(cx, span, self_ty, generics),
|
||||
Ptr(..) => cx.span_bug(span, "pointer in a path in generic `derive`"),
|
||||
|
@ -32,7 +32,7 @@ pub fn expand_option_env<'cx>(cx: &'cx mut ExtCtxt<'_>,
|
||||
Ident::new(sym::str, sp)),
|
||||
Some(lt),
|
||||
ast::Mutability::Immutable))],
|
||||
vec![]))
|
||||
))
|
||||
}
|
||||
Ok(s) => {
|
||||
cx.expr_call_global(sp,
|
||||
|
@ -5,6 +5,7 @@ use fmt_macros as parse;
|
||||
|
||||
use errors::DiagnosticBuilder;
|
||||
use errors::Applicability;
|
||||
use errors::pluralise;
|
||||
|
||||
use syntax::ast;
|
||||
use syntax::ext::base::{self, *};
|
||||
@ -299,7 +300,7 @@ impl<'a, 'b> Context<'a, 'b> {
|
||||
&format!(
|
||||
"{} positional argument{} in format string, but {}",
|
||||
count,
|
||||
if count != 1 { "s" } else { "" },
|
||||
pluralise!(count),
|
||||
self.describe_num_args(),
|
||||
),
|
||||
);
|
||||
|
@ -145,8 +145,8 @@ pub fn expand_test_or_bench(
|
||||
let mut test_const = cx.item(sp, ast::Ident::new(item.ident.name, sp),
|
||||
vec![
|
||||
// #[cfg(test)]
|
||||
cx.attribute(cx.meta_list(attr_sp, sym::cfg, vec![
|
||||
cx.meta_list_item_word(attr_sp, sym::test)
|
||||
cx.attribute(attr::mk_list_item(ast::Ident::new(sym::cfg, attr_sp), vec![
|
||||
attr::mk_nested_word_item(ast::Ident::new(sym::test, attr_sp))
|
||||
])),
|
||||
// #[rustc_test_marker]
|
||||
cx.attribute(cx.meta_word(attr_sp, sym::rustc_test_marker)),
|
||||
|
@ -1 +1 @@
|
||||
Subproject commit 71fe7ec06b85f612fc0e4eb4134c7a7d0f23fac5
|
||||
Subproject commit 8adf9bdccfefb8d03f0e8db3b012fb41da1580a4
|
@ -57,25 +57,18 @@ impl Drop for S {
|
||||
// }
|
||||
//
|
||||
// bb5: {
|
||||
// drop(_4) -> [return: bb8, unwind: bb6];
|
||||
// }
|
||||
//
|
||||
// bb6 (cleanup): {
|
||||
// drop(_1) -> bb1;
|
||||
// }
|
||||
//
|
||||
// bb7 (cleanup): {
|
||||
// drop(_4) -> bb6;
|
||||
// }
|
||||
//
|
||||
// bb8: {
|
||||
// StorageDead(_4);
|
||||
// StorageDead(_3);
|
||||
// _0 = ();
|
||||
// drop(_1) -> bb9;
|
||||
// drop(_1) -> bb8;
|
||||
// }
|
||||
//
|
||||
// bb9: {
|
||||
// bb6 (cleanup): {
|
||||
// drop(_1) -> bb1;
|
||||
// }
|
||||
// bb7 (cleanup): {
|
||||
// drop(_4) -> bb6;
|
||||
// }
|
||||
// bb8: {
|
||||
// StorageDead(_1);
|
||||
// return;
|
||||
// }
|
||||
|
@ -57,7 +57,7 @@ fn main() {
|
||||
// StorageLive(_6);
|
||||
// StorageLive(_7);
|
||||
// _7 = move _2;
|
||||
// _6 = const take::<Foo>(move _7) -> [return: bb9, unwind: bb8];
|
||||
// _6 = const take::<Foo>(move _7) -> [return: bb7, unwind: bb9];
|
||||
// }
|
||||
// bb3 (cleanup): {
|
||||
// StorageDead(_2);
|
||||
@ -75,17 +75,7 @@ fn main() {
|
||||
// bb6: {
|
||||
// generator_drop;
|
||||
// }
|
||||
// bb7 (cleanup): {
|
||||
// StorageDead(_3);
|
||||
// StorageDead(_2);
|
||||
// drop(_1) -> bb1;
|
||||
// }
|
||||
// bb8 (cleanup): {
|
||||
// StorageDead(_7);
|
||||
// StorageDead(_6);
|
||||
// goto -> bb7;
|
||||
// }
|
||||
// bb9: {
|
||||
// bb7: {
|
||||
// StorageDead(_7);
|
||||
// StorageDead(_6);
|
||||
// StorageLive(_8);
|
||||
@ -93,6 +83,16 @@ fn main() {
|
||||
// _9 = move _3;
|
||||
// _8 = const take::<Bar>(move _9) -> [return: bb10, unwind: bb11];
|
||||
// }
|
||||
// bb8 (cleanup): {
|
||||
// StorageDead(_3);
|
||||
// StorageDead(_2);
|
||||
// drop(_1) -> bb1;
|
||||
// }
|
||||
// bb9 (cleanup): {
|
||||
// StorageDead(_7);
|
||||
// StorageDead(_6);
|
||||
// goto -> bb8;
|
||||
// }
|
||||
// bb10: {
|
||||
// StorageDead(_9);
|
||||
// StorageDead(_8);
|
||||
@ -104,7 +104,7 @@ fn main() {
|
||||
// bb11 (cleanup): {
|
||||
// StorageDead(_9);
|
||||
// StorageDead(_8);
|
||||
// goto -> bb7;
|
||||
// goto -> bb8;
|
||||
// }
|
||||
// bb12: {
|
||||
// return;
|
||||
|
24
src/test/mir-opt/no-spurious-drop-after-call.rs
Normal file
24
src/test/mir-opt/no-spurious-drop-after-call.rs
Normal file
@ -0,0 +1,24 @@
|
||||
// ignore-wasm32-bare compiled with panic=abort by default
|
||||
|
||||
// Test that after the call to `std::mem::drop` we do not generate a
|
||||
// MIR drop of the argument. (We used to have a `DROP(_2)` in the code
|
||||
// below, as part of bb3.)
|
||||
|
||||
fn main() {
|
||||
std::mem::drop("".to_string());
|
||||
}
|
||||
|
||||
// END RUST SOURCE
|
||||
// START rustc.main.ElaborateDrops.before.mir
|
||||
// bb2: {
|
||||
// StorageDead(_3);
|
||||
// _1 = const std::mem::drop::<std::string::String>(move _2) -> [return: bb3, unwind: bb4];
|
||||
// }
|
||||
// bb3: {
|
||||
// StorageDead(_2);
|
||||
// StorageDead(_4);
|
||||
// StorageDead(_1);
|
||||
// _0 = ();
|
||||
// return;
|
||||
// }
|
||||
// END rustc.main.ElaborateDrops.before.mir
|
@ -7,7 +7,7 @@ LL | f1(|_: (), _: ()| {});
|
||||
| expected signature of `for<'r, 's> fn(&'r (), &'s ()) -> _`
|
||||
...
|
||||
LL | fn f1<F>(_: F) where F: Fn(&(), &()) {}
|
||||
| ------------------------------------ required by `f1`
|
||||
| -- ------------ required by this bound in `f1`
|
||||
|
||||
error[E0631]: type mismatch in closure arguments
|
||||
--> $DIR/anonymous-higher-ranked-lifetime.rs:2:5
|
||||
@ -18,7 +18,7 @@ LL | f1(|_: (), _: ()| {});
|
||||
| expected signature of `fn(&(), &()) -> _`
|
||||
...
|
||||
LL | fn f1<F>(_: F) where F: Fn(&(), &()) {}
|
||||
| ------------------------------------ required by `f1`
|
||||
| -- ------------ required by this bound in `f1`
|
||||
|
||||
error[E0631]: type mismatch in closure arguments
|
||||
--> $DIR/anonymous-higher-ranked-lifetime.rs:4:5
|
||||
@ -29,7 +29,7 @@ LL | f2(|_: (), _: ()| {});
|
||||
| expected signature of `for<'a, 'r> fn(&'a (), &'r ()) -> _`
|
||||
...
|
||||
LL | fn f2<F>(_: F) where F: for<'a> Fn(&'a (), &()) {}
|
||||
| ----------------------------------------------- required by `f2`
|
||||
| -- ----------------------- required by this bound in `f2`
|
||||
|
||||
error[E0631]: type mismatch in closure arguments
|
||||
--> $DIR/anonymous-higher-ranked-lifetime.rs:4:5
|
||||
@ -40,7 +40,7 @@ LL | f2(|_: (), _: ()| {});
|
||||
| expected signature of `fn(&'a (), &()) -> _`
|
||||
...
|
||||
LL | fn f2<F>(_: F) where F: for<'a> Fn(&'a (), &()) {}
|
||||
| ----------------------------------------------- required by `f2`
|
||||
| -- --------------- required by this bound in `f2`
|
||||
|
||||
error[E0631]: type mismatch in closure arguments
|
||||
--> $DIR/anonymous-higher-ranked-lifetime.rs:6:5
|
||||
@ -51,7 +51,7 @@ LL | f3(|_: (), _: ()| {});
|
||||
| expected signature of `for<'r> fn(&(), &'r ()) -> _`
|
||||
...
|
||||
LL | fn f3<'a, F>(_: F) where F: Fn(&'a (), &()) {}
|
||||
| ------------------------------------------- required by `f3`
|
||||
| -- --------------- required by this bound in `f3`
|
||||
|
||||
error[E0631]: type mismatch in closure arguments
|
||||
--> $DIR/anonymous-higher-ranked-lifetime.rs:6:5
|
||||
@ -62,7 +62,7 @@ LL | f3(|_: (), _: ()| {});
|
||||
| expected signature of `fn(&(), &()) -> _`
|
||||
...
|
||||
LL | fn f3<'a, F>(_: F) where F: Fn(&'a (), &()) {}
|
||||
| ------------------------------------------- required by `f3`
|
||||
| -- --------------- required by this bound in `f3`
|
||||
|
||||
error[E0631]: type mismatch in closure arguments
|
||||
--> $DIR/anonymous-higher-ranked-lifetime.rs:8:5
|
||||
@ -73,7 +73,7 @@ LL | f4(|_: (), _: ()| {});
|
||||
| expected signature of `for<'s, 'r> fn(&'s (), &'r ()) -> _`
|
||||
...
|
||||
LL | fn f4<F>(_: F) where F: for<'r> Fn(&(), &'r ()) {}
|
||||
| ----------------------------------------------- required by `f4`
|
||||
| -- ----------------------- required by this bound in `f4`
|
||||
|
||||
error[E0631]: type mismatch in closure arguments
|
||||
--> $DIR/anonymous-higher-ranked-lifetime.rs:8:5
|
||||
@ -84,7 +84,7 @@ LL | f4(|_: (), _: ()| {});
|
||||
| expected signature of `fn(&(), &'r ()) -> _`
|
||||
...
|
||||
LL | fn f4<F>(_: F) where F: for<'r> Fn(&(), &'r ()) {}
|
||||
| ----------------------------------------------- required by `f4`
|
||||
| -- --------------- required by this bound in `f4`
|
||||
|
||||
error[E0631]: type mismatch in closure arguments
|
||||
--> $DIR/anonymous-higher-ranked-lifetime.rs:10:5
|
||||
@ -95,7 +95,7 @@ LL | f5(|_: (), _: ()| {});
|
||||
| expected signature of `for<'r> fn(&'r (), &'r ()) -> _`
|
||||
...
|
||||
LL | fn f5<F>(_: F) where F: for<'r> Fn(&'r (), &'r ()) {}
|
||||
| -------------------------------------------------- required by `f5`
|
||||
| -- -------------------------- required by this bound in `f5`
|
||||
|
||||
error[E0631]: type mismatch in closure arguments
|
||||
--> $DIR/anonymous-higher-ranked-lifetime.rs:10:5
|
||||
@ -106,7 +106,7 @@ LL | f5(|_: (), _: ()| {});
|
||||
| expected signature of `fn(&'r (), &'r ()) -> _`
|
||||
...
|
||||
LL | fn f5<F>(_: F) where F: for<'r> Fn(&'r (), &'r ()) {}
|
||||
| -------------------------------------------------- required by `f5`
|
||||
| -- ------------------ required by this bound in `f5`
|
||||
|
||||
error[E0631]: type mismatch in closure arguments
|
||||
--> $DIR/anonymous-higher-ranked-lifetime.rs:12:5
|
||||
@ -117,7 +117,7 @@ LL | g1(|_: (), _: ()| {});
|
||||
| expected signature of `for<'r> fn(&'r (), std::boxed::Box<(dyn for<'s> std::ops::Fn(&'s ()) + 'static)>) -> _`
|
||||
...
|
||||
LL | fn g1<F>(_: F) where F: Fn(&(), Box<dyn Fn(&())>) {}
|
||||
| ------------------------------------------------- required by `g1`
|
||||
| -- ------------------------- required by this bound in `g1`
|
||||
|
||||
error[E0631]: type mismatch in closure arguments
|
||||
--> $DIR/anonymous-higher-ranked-lifetime.rs:12:5
|
||||
@ -128,7 +128,7 @@ LL | g1(|_: (), _: ()| {});
|
||||
| expected signature of `fn(&(), std::boxed::Box<(dyn for<'r> std::ops::Fn(&'r ()) + 'static)>) -> _`
|
||||
...
|
||||
LL | fn g1<F>(_: F) where F: Fn(&(), Box<dyn Fn(&())>) {}
|
||||
| ------------------------------------------------- required by `g1`
|
||||
| -- ------------------------- required by this bound in `g1`
|
||||
|
||||
error[E0631]: type mismatch in closure arguments
|
||||
--> $DIR/anonymous-higher-ranked-lifetime.rs:14:5
|
||||
@ -139,7 +139,7 @@ LL | g2(|_: (), _: ()| {});
|
||||
| expected signature of `for<'r> fn(&'r (), for<'s> fn(&'s ())) -> _`
|
||||
...
|
||||
LL | fn g2<F>(_: F) where F: Fn(&(), fn(&())) {}
|
||||
| ---------------------------------------- required by `g2`
|
||||
| -- ---------------- required by this bound in `g2`
|
||||
|
||||
error[E0631]: type mismatch in closure arguments
|
||||
--> $DIR/anonymous-higher-ranked-lifetime.rs:14:5
|
||||
@ -150,7 +150,7 @@ LL | g2(|_: (), _: ()| {});
|
||||
| expected signature of `fn(&(), for<'r> fn(&'r ())) -> _`
|
||||
...
|
||||
LL | fn g2<F>(_: F) where F: Fn(&(), fn(&())) {}
|
||||
| ---------------------------------------- required by `g2`
|
||||
| -- ---------------- required by this bound in `g2`
|
||||
|
||||
error[E0631]: type mismatch in closure arguments
|
||||
--> $DIR/anonymous-higher-ranked-lifetime.rs:16:5
|
||||
@ -161,7 +161,7 @@ LL | g3(|_: (), _: ()| {});
|
||||
| expected signature of `for<'s> fn(&'s (), std::boxed::Box<(dyn for<'r> std::ops::Fn(&'r ()) + 'static)>) -> _`
|
||||
...
|
||||
LL | fn g3<F>(_: F) where F: for<'s> Fn(&'s (), Box<dyn Fn(&())>) {}
|
||||
| ------------------------------------------------------------ required by `g3`
|
||||
| -- ------------------------------------ required by this bound in `g3`
|
||||
|
||||
error[E0631]: type mismatch in closure arguments
|
||||
--> $DIR/anonymous-higher-ranked-lifetime.rs:16:5
|
||||
@ -172,7 +172,7 @@ LL | g3(|_: (), _: ()| {});
|
||||
| expected signature of `fn(&'s (), std::boxed::Box<(dyn for<'r> std::ops::Fn(&'r ()) + 'static)>) -> _`
|
||||
...
|
||||
LL | fn g3<F>(_: F) where F: for<'s> Fn(&'s (), Box<dyn Fn(&())>) {}
|
||||
| ------------------------------------------------------------ required by `g3`
|
||||
| -- ---------------------------- required by this bound in `g3`
|
||||
|
||||
error[E0631]: type mismatch in closure arguments
|
||||
--> $DIR/anonymous-higher-ranked-lifetime.rs:18:5
|
||||
@ -183,7 +183,7 @@ LL | g4(|_: (), _: ()| {});
|
||||
| expected signature of `for<'s> fn(&'s (), for<'r> fn(&'r ())) -> _`
|
||||
...
|
||||
LL | fn g4<F>(_: F) where F: Fn(&(), for<'r> fn(&'r ())) {}
|
||||
| --------------------------------------------------- required by `g4`
|
||||
| -- --------------------------- required by this bound in `g4`
|
||||
|
||||
error[E0631]: type mismatch in closure arguments
|
||||
--> $DIR/anonymous-higher-ranked-lifetime.rs:18:5
|
||||
@ -194,7 +194,7 @@ LL | g4(|_: (), _: ()| {});
|
||||
| expected signature of `fn(&(), for<'r> fn(&'r ())) -> _`
|
||||
...
|
||||
LL | fn g4<F>(_: F) where F: Fn(&(), for<'r> fn(&'r ())) {}
|
||||
| --------------------------------------------------- required by `g4`
|
||||
| -- --------------------------- required by this bound in `g4`
|
||||
|
||||
error[E0631]: type mismatch in closure arguments
|
||||
--> $DIR/anonymous-higher-ranked-lifetime.rs:20:5
|
||||
@ -205,7 +205,7 @@ LL | h1(|_: (), _: (), _: (), _: ()| {});
|
||||
| expected signature of `for<'r, 's> fn(&'r (), std::boxed::Box<(dyn for<'t0> std::ops::Fn(&'t0 ()) + 'static)>, &'s (), for<'t0, 't1> fn(&'t0 (), &'t1 ())) -> _`
|
||||
...
|
||||
LL | fn h1<F>(_: F) where F: Fn(&(), Box<dyn Fn(&())>, &(), fn(&(), &())) {}
|
||||
| -------------------------------------------------------------------- required by `h1`
|
||||
| -- -------------------------------------------- required by this bound in `h1`
|
||||
|
||||
error[E0631]: type mismatch in closure arguments
|
||||
--> $DIR/anonymous-higher-ranked-lifetime.rs:20:5
|
||||
@ -216,7 +216,7 @@ LL | h1(|_: (), _: (), _: (), _: ()| {});
|
||||
| expected signature of `fn(&(), std::boxed::Box<(dyn for<'r> std::ops::Fn(&'r ()) + 'static)>, &(), for<'r, 's> fn(&'r (), &'s ())) -> _`
|
||||
...
|
||||
LL | fn h1<F>(_: F) where F: Fn(&(), Box<dyn Fn(&())>, &(), fn(&(), &())) {}
|
||||
| -------------------------------------------------------------------- required by `h1`
|
||||
| -- -------------------------------------------- required by this bound in `h1`
|
||||
|
||||
error[E0631]: type mismatch in closure arguments
|
||||
--> $DIR/anonymous-higher-ranked-lifetime.rs:22:5
|
||||
@ -227,7 +227,7 @@ LL | h2(|_: (), _: (), _: (), _: ()| {});
|
||||
| expected signature of `for<'r, 't0> fn(&'r (), std::boxed::Box<(dyn for<'s> std::ops::Fn(&'s ()) + 'static)>, &'t0 (), for<'s, 't1> fn(&'s (), &'t1 ())) -> _`
|
||||
...
|
||||
LL | fn h2<F>(_: F) where F: for<'t0> Fn(&(), Box<dyn Fn(&())>, &'t0 (), fn(&(), &())) {}
|
||||
| --------------------------------------------------------------------------------- required by `h2`
|
||||
| -- --------------------------------------------------------- required by this bound in `h2`
|
||||
|
||||
error[E0631]: type mismatch in closure arguments
|
||||
--> $DIR/anonymous-higher-ranked-lifetime.rs:22:5
|
||||
@ -238,7 +238,7 @@ LL | h2(|_: (), _: (), _: (), _: ()| {});
|
||||
| expected signature of `fn(&(), std::boxed::Box<(dyn for<'r> std::ops::Fn(&'r ()) + 'static)>, &'t0 (), for<'r, 's> fn(&'r (), &'s ())) -> _`
|
||||
...
|
||||
LL | fn h2<F>(_: F) where F: for<'t0> Fn(&(), Box<dyn Fn(&())>, &'t0 (), fn(&(), &())) {}
|
||||
| --------------------------------------------------------------------------------- required by `h2`
|
||||
| -- ------------------------------------------------ required by this bound in `h2`
|
||||
|
||||
error: aborting due to 22 previous errors
|
||||
|
||||
|
@ -9,6 +9,8 @@ LL | const FROM: &'static str = "foo";
|
||||
|
|
||||
= note: expected type `<T as Foo>::Out`
|
||||
found type `&'static str`
|
||||
= note: consider constraining the associated type `<T as Foo>::Out` to `&'static str` or calling a method that returns `<T as Foo>::Out`
|
||||
= note: for more information, visit https://doc.rust-lang.org/book/ch19-03-advanced-traits.html
|
||||
|
||||
error: aborting due to previous error
|
||||
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user