mirror of
https://github.com/rust-lang/rust.git
synced 2024-11-22 23:04:33 +00:00
commit
984d29d260
3
.github/ISSUE_TEMPLATE/config.yml
vendored
3
.github/ISSUE_TEMPLATE/config.yml
vendored
@ -6,3 +6,6 @@ contact_links:
|
|||||||
- name: Feature Request
|
- name: Feature Request
|
||||||
url: https://internals.rust-lang.org/
|
url: https://internals.rust-lang.org/
|
||||||
about: Please discuss language feature requests on the internals forum.
|
about: Please discuss language feature requests on the internals forum.
|
||||||
|
- name: Clippy Bug
|
||||||
|
url: https://github.com/rust-lang/rust-clippy/issues/new/choose
|
||||||
|
about: Please report Clippy bugs such as false positives in the Clippy repo.
|
||||||
|
24
.github/workflows/ci.yml
vendored
24
.github/workflows/ci.yml
vendored
@ -30,10 +30,11 @@ permissions:
|
|||||||
defaults:
|
defaults:
|
||||||
run:
|
run:
|
||||||
shell: bash
|
shell: bash
|
||||||
|
concurrency:
|
||||||
|
group: "${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}"
|
||||||
|
cancel-in-progress: true
|
||||||
jobs:
|
jobs:
|
||||||
pr:
|
pr:
|
||||||
permissions:
|
|
||||||
actions: write
|
|
||||||
name: "PR - ${{ matrix.name }}"
|
name: "PR - ${{ matrix.name }}"
|
||||||
env:
|
env:
|
||||||
CI_JOB_NAME: "${{ matrix.name }}"
|
CI_JOB_NAME: "${{ matrix.name }}"
|
||||||
@ -84,11 +85,6 @@ jobs:
|
|||||||
- name: ensure the channel matches the target branch
|
- name: ensure the channel matches the target branch
|
||||||
run: src/ci/scripts/verify-channel.sh
|
run: src/ci/scripts/verify-channel.sh
|
||||||
if: success() && !env.SKIP_JOB
|
if: success() && !env.SKIP_JOB
|
||||||
- name: configure GitHub Actions to kill the build when outdated
|
|
||||||
uses: rust-lang/simpleinfra/github-actions/cancel-outdated-builds@master
|
|
||||||
with:
|
|
||||||
github_token: "${{ secrets.github_token }}"
|
|
||||||
if: "success() && !env.SKIP_JOB && github.ref != 'refs/heads/try' && github.ref != 'refs/heads/try-perf'"
|
|
||||||
- name: collect CPU statistics
|
- name: collect CPU statistics
|
||||||
run: src/ci/scripts/collect-cpu-stats.sh
|
run: src/ci/scripts/collect-cpu-stats.sh
|
||||||
if: success() && !env.SKIP_JOB
|
if: success() && !env.SKIP_JOB
|
||||||
@ -162,8 +158,6 @@ jobs:
|
|||||||
AWS_SECRET_ACCESS_KEY: "${{ secrets[format('AWS_SECRET_ACCESS_KEY_{0}', env.ARTIFACTS_AWS_ACCESS_KEY_ID)] }}"
|
AWS_SECRET_ACCESS_KEY: "${{ secrets[format('AWS_SECRET_ACCESS_KEY_{0}', env.ARTIFACTS_AWS_ACCESS_KEY_ID)] }}"
|
||||||
if: "success() && !env.SKIP_JOB && (github.event_name == 'push' || env.DEPLOY == '1' || env.DEPLOY_ALT == '1')"
|
if: "success() && !env.SKIP_JOB && (github.event_name == 'push' || env.DEPLOY == '1' || env.DEPLOY_ALT == '1')"
|
||||||
auto:
|
auto:
|
||||||
permissions:
|
|
||||||
actions: write
|
|
||||||
name: "auto - ${{ matrix.name }}"
|
name: "auto - ${{ matrix.name }}"
|
||||||
env:
|
env:
|
||||||
CI_JOB_NAME: "${{ matrix.name }}"
|
CI_JOB_NAME: "${{ matrix.name }}"
|
||||||
@ -496,11 +490,6 @@ jobs:
|
|||||||
- name: ensure the channel matches the target branch
|
- name: ensure the channel matches the target branch
|
||||||
run: src/ci/scripts/verify-channel.sh
|
run: src/ci/scripts/verify-channel.sh
|
||||||
if: success() && !env.SKIP_JOB
|
if: success() && !env.SKIP_JOB
|
||||||
- name: configure GitHub Actions to kill the build when outdated
|
|
||||||
uses: rust-lang/simpleinfra/github-actions/cancel-outdated-builds@master
|
|
||||||
with:
|
|
||||||
github_token: "${{ secrets.github_token }}"
|
|
||||||
if: "success() && !env.SKIP_JOB && github.ref != 'refs/heads/try' && github.ref != 'refs/heads/try-perf'"
|
|
||||||
- name: collect CPU statistics
|
- name: collect CPU statistics
|
||||||
run: src/ci/scripts/collect-cpu-stats.sh
|
run: src/ci/scripts/collect-cpu-stats.sh
|
||||||
if: success() && !env.SKIP_JOB
|
if: success() && !env.SKIP_JOB
|
||||||
@ -574,8 +563,6 @@ jobs:
|
|||||||
AWS_SECRET_ACCESS_KEY: "${{ secrets[format('AWS_SECRET_ACCESS_KEY_{0}', env.ARTIFACTS_AWS_ACCESS_KEY_ID)] }}"
|
AWS_SECRET_ACCESS_KEY: "${{ secrets[format('AWS_SECRET_ACCESS_KEY_{0}', env.ARTIFACTS_AWS_ACCESS_KEY_ID)] }}"
|
||||||
if: "success() && !env.SKIP_JOB && (github.event_name == 'push' || env.DEPLOY == '1' || env.DEPLOY_ALT == '1')"
|
if: "success() && !env.SKIP_JOB && (github.event_name == 'push' || env.DEPLOY == '1' || env.DEPLOY_ALT == '1')"
|
||||||
try:
|
try:
|
||||||
permissions:
|
|
||||||
actions: write
|
|
||||||
name: "try - ${{ matrix.name }}"
|
name: "try - ${{ matrix.name }}"
|
||||||
env:
|
env:
|
||||||
DIST_TRY_BUILD: 1
|
DIST_TRY_BUILD: 1
|
||||||
@ -623,11 +610,6 @@ jobs:
|
|||||||
- name: ensure the channel matches the target branch
|
- name: ensure the channel matches the target branch
|
||||||
run: src/ci/scripts/verify-channel.sh
|
run: src/ci/scripts/verify-channel.sh
|
||||||
if: success() && !env.SKIP_JOB
|
if: success() && !env.SKIP_JOB
|
||||||
- name: configure GitHub Actions to kill the build when outdated
|
|
||||||
uses: rust-lang/simpleinfra/github-actions/cancel-outdated-builds@master
|
|
||||||
with:
|
|
||||||
github_token: "${{ secrets.github_token }}"
|
|
||||||
if: "success() && !env.SKIP_JOB && github.ref != 'refs/heads/try' && github.ref != 'refs/heads/try-perf'"
|
|
||||||
- name: collect CPU statistics
|
- name: collect CPU statistics
|
||||||
run: src/ci/scripts/collect-cpu-stats.sh
|
run: src/ci/scripts/collect-cpu-stats.sh
|
||||||
if: success() && !env.SKIP_JOB
|
if: success() && !env.SKIP_JOB
|
||||||
|
3
.github/workflows/dependencies.yml
vendored
3
.github/workflows/dependencies.yml
vendored
@ -25,6 +25,7 @@ env:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
not-waiting-on-bors:
|
not-waiting-on-bors:
|
||||||
|
if: github.repository_owner == 'rust-lang'
|
||||||
name: skip if S-waiting-on-bors
|
name: skip if S-waiting-on-bors
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
@ -43,6 +44,7 @@ jobs:
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
update:
|
update:
|
||||||
|
if: github.repository_owner == 'rust-lang'
|
||||||
name: update dependencies
|
name: update dependencies
|
||||||
needs: not-waiting-on-bors
|
needs: not-waiting-on-bors
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
@ -76,6 +78,7 @@ jobs:
|
|||||||
retention-days: 1
|
retention-days: 1
|
||||||
|
|
||||||
pr:
|
pr:
|
||||||
|
if: github.repository_owner == 'rust-lang'
|
||||||
name: amend PR
|
name: amend PR
|
||||||
needs: update
|
needs: update
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
95
Cargo.lock
95
Cargo.lock
@ -480,23 +480,6 @@ dependencies = [
|
|||||||
"winapi",
|
"winapi",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "clap"
|
|
||||||
version = "3.2.20"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "23b71c3ce99b7611011217b366d923f1d0a7e07a92bb2dbf1e84508c673ca3bd"
|
|
||||||
dependencies = [
|
|
||||||
"atty",
|
|
||||||
"bitflags",
|
|
||||||
"clap_derive 3.2.18",
|
|
||||||
"clap_lex 0.2.2",
|
|
||||||
"indexmap",
|
|
||||||
"once_cell",
|
|
||||||
"strsim",
|
|
||||||
"termcolor",
|
|
||||||
"textwrap",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "clap"
|
name = "clap"
|
||||||
version = "4.2.1"
|
version = "4.2.1"
|
||||||
@ -504,7 +487,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||||||
checksum = "046ae530c528f252094e4a77886ee1374437744b2bff1497aa898bbddbbb29b3"
|
checksum = "046ae530c528f252094e4a77886ee1374437744b2bff1497aa898bbddbbb29b3"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"clap_builder",
|
"clap_builder",
|
||||||
"clap_derive 4.2.0",
|
"clap_derive",
|
||||||
"once_cell",
|
"once_cell",
|
||||||
]
|
]
|
||||||
|
|
||||||
@ -517,7 +500,7 @@ dependencies = [
|
|||||||
"anstream",
|
"anstream",
|
||||||
"anstyle",
|
"anstyle",
|
||||||
"bitflags",
|
"bitflags",
|
||||||
"clap_lex 0.4.1",
|
"clap_lex",
|
||||||
"once_cell",
|
"once_cell",
|
||||||
"strsim",
|
"strsim",
|
||||||
"terminal_size",
|
"terminal_size",
|
||||||
@ -529,20 +512,7 @@ version = "4.0.7"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "10861370d2ba66b0f5989f83ebf35db6421713fd92351790e7fdd6c36774c56b"
|
checksum = "10861370d2ba66b0f5989f83ebf35db6421713fd92351790e7fdd6c36774c56b"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"clap 4.2.1",
|
"clap",
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "clap_derive"
|
|
||||||
version = "3.2.18"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "ea0c8bce528c4be4da13ea6fead8965e95b6073585a2f05204bd8f4119f82a65"
|
|
||||||
dependencies = [
|
|
||||||
"heck",
|
|
||||||
"proc-macro-error",
|
|
||||||
"proc-macro2",
|
|
||||||
"quote",
|
|
||||||
"syn 1.0.102",
|
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -557,15 +527,6 @@ dependencies = [
|
|||||||
"syn 2.0.8",
|
"syn 2.0.8",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "clap_lex"
|
|
||||||
version = "0.2.2"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "5538cd660450ebeb4234cfecf8f2284b844ffc4c50531e66d584ad5b91293613"
|
|
||||||
dependencies = [
|
|
||||||
"os_str_bytes",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "clap_lex"
|
name = "clap_lex"
|
||||||
version = "0.4.1"
|
version = "0.4.1"
|
||||||
@ -576,7 +537,7 @@ checksum = "8a2dd5a6fe8c6e3502f568a6353e5273bbb15193ad9a89e457b9970798efbea1"
|
|||||||
name = "clippy"
|
name = "clippy"
|
||||||
version = "0.1.72"
|
version = "0.1.72"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"clap 4.2.1",
|
"clap",
|
||||||
"clippy_lints",
|
"clippy_lints",
|
||||||
"clippy_utils",
|
"clippy_utils",
|
||||||
"compiletest_rs",
|
"compiletest_rs",
|
||||||
@ -605,7 +566,7 @@ name = "clippy_dev"
|
|||||||
version = "0.0.1"
|
version = "0.0.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"aho-corasick",
|
"aho-corasick",
|
||||||
"clap 4.2.1",
|
"clap",
|
||||||
"indoc",
|
"indoc",
|
||||||
"itertools",
|
"itertools",
|
||||||
"opener",
|
"opener",
|
||||||
@ -1749,7 +1710,7 @@ name = "installer"
|
|||||||
version = "0.0.0"
|
version = "0.0.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"anyhow",
|
"anyhow",
|
||||||
"clap 3.2.20",
|
"clap",
|
||||||
"flate2",
|
"flate2",
|
||||||
"num_cpus",
|
"num_cpus",
|
||||||
"rayon",
|
"rayon",
|
||||||
@ -1869,7 +1830,7 @@ name = "jsondoclint"
|
|||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"anyhow",
|
"anyhow",
|
||||||
"clap 4.2.1",
|
"clap",
|
||||||
"fs-err",
|
"fs-err",
|
||||||
"rustc-hash",
|
"rustc-hash",
|
||||||
"rustdoc-json-types",
|
"rustdoc-json-types",
|
||||||
@ -2086,7 +2047,7 @@ dependencies = [
|
|||||||
"ammonia",
|
"ammonia",
|
||||||
"anyhow",
|
"anyhow",
|
||||||
"chrono",
|
"chrono",
|
||||||
"clap 4.2.1",
|
"clap",
|
||||||
"clap_complete",
|
"clap_complete",
|
||||||
"elasticlunr-rs",
|
"elasticlunr-rs",
|
||||||
"env_logger 0.10.0",
|
"env_logger 0.10.0",
|
||||||
@ -2370,12 +2331,6 @@ dependencies = [
|
|||||||
"vcpkg",
|
"vcpkg",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "os_str_bytes"
|
|
||||||
version = "6.0.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "8e22443d1643a904602595ba1cd8f7d896afe56d26712531c5ff73a15b2fbf64"
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "owo-colors"
|
name = "owo-colors"
|
||||||
version = "3.5.0"
|
version = "3.5.0"
|
||||||
@ -2617,30 +2572,6 @@ version = "0.1.1"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "925383efa346730478fb4838dbe9137d2a47675ad789c546d150a6e1dd4ab31c"
|
checksum = "925383efa346730478fb4838dbe9137d2a47675ad789c546d150a6e1dd4ab31c"
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "proc-macro-error"
|
|
||||||
version = "1.0.4"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c"
|
|
||||||
dependencies = [
|
|
||||||
"proc-macro-error-attr",
|
|
||||||
"proc-macro2",
|
|
||||||
"quote",
|
|
||||||
"syn 1.0.102",
|
|
||||||
"version_check",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "proc-macro-error-attr"
|
|
||||||
version = "1.0.4"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869"
|
|
||||||
dependencies = [
|
|
||||||
"proc-macro2",
|
|
||||||
"quote",
|
|
||||||
"version_check",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "proc-macro-hack"
|
name = "proc-macro-hack"
|
||||||
version = "0.5.19"
|
version = "0.5.19"
|
||||||
@ -2897,7 +2828,7 @@ dependencies = [
|
|||||||
name = "rustbook"
|
name = "rustbook"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"clap 4.2.1",
|
"clap",
|
||||||
"env_logger 0.10.0",
|
"env_logger 0.10.0",
|
||||||
"mdbook",
|
"mdbook",
|
||||||
]
|
]
|
||||||
@ -4346,7 +4277,7 @@ dependencies = [
|
|||||||
"anyhow",
|
"anyhow",
|
||||||
"bytecount",
|
"bytecount",
|
||||||
"cargo_metadata",
|
"cargo_metadata",
|
||||||
"clap 4.2.1",
|
"clap",
|
||||||
"diff",
|
"diff",
|
||||||
"dirs",
|
"dirs",
|
||||||
"env_logger 0.10.0",
|
"env_logger 0.10.0",
|
||||||
@ -4874,12 +4805,6 @@ dependencies = [
|
|||||||
"term",
|
"term",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "textwrap"
|
|
||||||
version = "0.15.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "b1141d4d61095b28419e22cb0bbf02755f5e54e0526f97f1e3d1d160e60885fb"
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "thin-vec"
|
name = "thin-vec"
|
||||||
version = "0.2.12"
|
version = "0.2.12"
|
||||||
|
15
README.md
15
README.md
@ -33,24 +33,13 @@ format:
|
|||||||
```
|
```
|
||||||
|
|
||||||
This is how the documentation and examples assume you are running `x.py`.
|
This is how the documentation and examples assume you are running `x.py`.
|
||||||
Some alternative ways are:
|
See the [rustc dev guide][rustcguidebuild] if this does not work on your platform.
|
||||||
|
|
||||||
```sh
|
|
||||||
# On a Unix shell if you don't have the necessary `python3` command
|
|
||||||
./x <subcommand> [flags]
|
|
||||||
|
|
||||||
# On the Windows Command Prompt (if .py files are configured to run Python)
|
|
||||||
x.py <subcommand> [flags]
|
|
||||||
|
|
||||||
# You can also run Python yourself, e.g.:
|
|
||||||
python x.py <subcommand> [flags]
|
|
||||||
```
|
|
||||||
|
|
||||||
More information about `x.py` can be found by running it with the `--help` flag
|
More information about `x.py` can be found by running it with the `--help` flag
|
||||||
or reading the [rustc dev guide][rustcguidebuild].
|
or reading the [rustc dev guide][rustcguidebuild].
|
||||||
|
|
||||||
[gettingstarted]: https://rustc-dev-guide.rust-lang.org/getting-started.html
|
[gettingstarted]: https://rustc-dev-guide.rust-lang.org/getting-started.html
|
||||||
[rustcguidebuild]: https://rustc-dev-guide.rust-lang.org/building/how-to-build-and-run.html
|
[rustcguidebuild]: https://rustc-dev-guide.rust-lang.org/building/how-to-build-and-run.html#what-is-xpy
|
||||||
|
|
||||||
### Dependencies
|
### Dependencies
|
||||||
|
|
||||||
|
@ -277,9 +277,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||||||
ExprKind::Yeet(sub_expr) => self.lower_expr_yeet(e.span, sub_expr.as_deref()),
|
ExprKind::Yeet(sub_expr) => self.lower_expr_yeet(e.span, sub_expr.as_deref()),
|
||||||
ExprKind::Become(sub_expr) => {
|
ExprKind::Become(sub_expr) => {
|
||||||
let sub_expr = self.lower_expr(sub_expr);
|
let sub_expr = self.lower_expr(sub_expr);
|
||||||
|
hir::ExprKind::Become(sub_expr)
|
||||||
// FIXME(explicit_tail_calls): Use `hir::ExprKind::Become` once we implemented it
|
|
||||||
hir::ExprKind::Ret(Some(sub_expr))
|
|
||||||
}
|
}
|
||||||
ExprKind::InlineAsm(asm) => {
|
ExprKind::InlineAsm(asm) => {
|
||||||
hir::ExprKind::InlineAsm(self.lower_inline_asm(e.span, asm))
|
hir::ExprKind::InlineAsm(self.lower_inline_asm(e.span, asm))
|
||||||
|
@ -623,13 +623,12 @@ impl<'a> AstValidator<'a> {
|
|||||||
fn maybe_lint_missing_abi(&mut self, span: Span, id: NodeId) {
|
fn maybe_lint_missing_abi(&mut self, span: Span, id: NodeId) {
|
||||||
// FIXME(davidtwco): This is a hack to detect macros which produce spans of the
|
// FIXME(davidtwco): This is a hack to detect macros which produce spans of the
|
||||||
// call site which do not have a macro backtrace. See #61963.
|
// call site which do not have a macro backtrace. See #61963.
|
||||||
let is_macro_callsite = self
|
if self
|
||||||
.session
|
.session
|
||||||
.source_map()
|
.source_map()
|
||||||
.span_to_snippet(span)
|
.span_to_snippet(span)
|
||||||
.map(|snippet| snippet.starts_with("#["))
|
.is_ok_and(|snippet| !snippet.starts_with("#["))
|
||||||
.unwrap_or(true);
|
{
|
||||||
if !is_macro_callsite {
|
|
||||||
self.lint_buffer.buffer_lint_with_diagnostic(
|
self.lint_buffer.buffer_lint_with_diagnostic(
|
||||||
MISSING_ABI,
|
MISSING_ABI,
|
||||||
id,
|
id,
|
||||||
|
@ -77,13 +77,6 @@ pub struct ForbiddenLifetimeBound {
|
|||||||
pub spans: Vec<Span>,
|
pub spans: Vec<Span>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Diagnostic)]
|
|
||||||
#[diag(ast_passes_forbidden_non_lifetime_param)]
|
|
||||||
pub struct ForbiddenNonLifetimeParam {
|
|
||||||
#[primary_span]
|
|
||||||
pub spans: Vec<Span>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Diagnostic)]
|
#[derive(Diagnostic)]
|
||||||
#[diag(ast_passes_fn_param_too_many)]
|
#[diag(ast_passes_fn_param_too_many)]
|
||||||
pub struct FnParamTooMany {
|
pub struct FnParamTooMany {
|
||||||
|
@ -678,8 +678,8 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
|||||||
let tcx = self.infcx.tcx;
|
let tcx = self.infcx.tcx;
|
||||||
|
|
||||||
// Find out if the predicates show that the type is a Fn or FnMut
|
// Find out if the predicates show that the type is a Fn or FnMut
|
||||||
let find_fn_kind_from_did = |(pred, _): (ty::Predicate<'tcx>, _)| {
|
let find_fn_kind_from_did = |(pred, _): (ty::Clause<'tcx>, _)| {
|
||||||
if let ty::PredicateKind::Clause(ty::ClauseKind::Trait(pred)) = pred.kind().skip_binder()
|
if let ty::ClauseKind::Trait(pred) = pred.kind().skip_binder()
|
||||||
&& pred.self_ty() == ty
|
&& pred.self_ty() == ty
|
||||||
{
|
{
|
||||||
if Some(pred.def_id()) == tcx.lang_items().fn_trait() {
|
if Some(pred.def_id()) == tcx.lang_items().fn_trait() {
|
||||||
@ -705,7 +705,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
|||||||
ty::Alias(ty::Opaque, ty::AliasTy { def_id, substs, .. }) => tcx
|
ty::Alias(ty::Opaque, ty::AliasTy { def_id, substs, .. }) => tcx
|
||||||
.explicit_item_bounds(def_id)
|
.explicit_item_bounds(def_id)
|
||||||
.subst_iter_copied(tcx, substs)
|
.subst_iter_copied(tcx, substs)
|
||||||
.find_map(|(clause, span)| find_fn_kind_from_did((clause.as_predicate(), span))),
|
.find_map(|(clause, span)| find_fn_kind_from_did((clause, span))),
|
||||||
ty::Closure(_, substs) => match substs.as_closure().kind() {
|
ty::Closure(_, substs) => match substs.as_closure().kind() {
|
||||||
ty::ClosureKind::Fn => Some(hir::Mutability::Not),
|
ty::ClosureKind::Fn => Some(hir::Mutability::Not),
|
||||||
ty::ClosureKind::FnMut => Some(hir::Mutability::Mut),
|
ty::ClosureKind::FnMut => Some(hir::Mutability::Mut),
|
||||||
|
@ -123,13 +123,7 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
|
|||||||
item_msg = access_place_desc;
|
item_msg = access_place_desc;
|
||||||
debug_assert!(self.body.local_decls[ty::CAPTURE_STRUCT_LOCAL].ty.is_ref());
|
debug_assert!(self.body.local_decls[ty::CAPTURE_STRUCT_LOCAL].ty.is_ref());
|
||||||
debug_assert!(is_closure_or_generator(
|
debug_assert!(is_closure_or_generator(
|
||||||
Place::ty_from(
|
the_place_err.ty(self.body, self.infcx.tcx).ty
|
||||||
the_place_err.local,
|
|
||||||
the_place_err.projection,
|
|
||||||
self.body,
|
|
||||||
self.infcx.tcx
|
|
||||||
)
|
|
||||||
.ty
|
|
||||||
));
|
));
|
||||||
|
|
||||||
reason = if self.is_upvar_field_projection(access_place.as_ref()).is_some() {
|
reason = if self.is_upvar_field_projection(access_place.as_ref()).is_some() {
|
||||||
|
@ -928,7 +928,7 @@ impl<'tcx> MirBorrowckCtxt<'_, 'tcx> {
|
|||||||
|
|
||||||
fn any_param_predicate_mentions(
|
fn any_param_predicate_mentions(
|
||||||
&self,
|
&self,
|
||||||
predicates: &[ty::Predicate<'tcx>],
|
clauses: &[ty::Clause<'tcx>],
|
||||||
ty: Ty<'tcx>,
|
ty: Ty<'tcx>,
|
||||||
region: ty::EarlyBoundRegion,
|
region: ty::EarlyBoundRegion,
|
||||||
) -> bool {
|
) -> bool {
|
||||||
@ -937,10 +937,10 @@ impl<'tcx> MirBorrowckCtxt<'_, 'tcx> {
|
|||||||
if let ty::GenericArgKind::Type(ty) = arg.unpack()
|
if let ty::GenericArgKind::Type(ty) = arg.unpack()
|
||||||
&& let ty::Param(_) = ty.kind()
|
&& let ty::Param(_) = ty.kind()
|
||||||
{
|
{
|
||||||
predicates.iter().any(|pred| {
|
clauses.iter().any(|pred| {
|
||||||
match pred.kind().skip_binder() {
|
match pred.kind().skip_binder() {
|
||||||
ty::PredicateKind::Clause(ty::ClauseKind::Trait(data)) if data.self_ty() == ty => {}
|
ty::ClauseKind::Trait(data) if data.self_ty() == ty => {}
|
||||||
ty::PredicateKind::Clause(ty::ClauseKind::Projection(data)) if data.projection_ty.self_ty() == ty => {}
|
ty::ClauseKind::Projection(data) if data.projection_ty.self_ty() == ty => {}
|
||||||
_ => return false,
|
_ => return false,
|
||||||
}
|
}
|
||||||
tcx.any_free_region_meets(pred, |r| {
|
tcx.any_free_region_meets(pred, |r| {
|
||||||
|
@ -46,11 +46,9 @@ impl<'tcx> PlaceExt<'tcx> for Place<'tcx> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for (i, elem) in self.projection.iter().enumerate() {
|
for (i, (proj_base, elem)) in self.iter_projections().enumerate() {
|
||||||
let proj_base = &self.projection[..i];
|
|
||||||
|
|
||||||
if elem == ProjectionElem::Deref {
|
if elem == ProjectionElem::Deref {
|
||||||
let ty = Place::ty_from(self.local, proj_base, body, tcx).ty;
|
let ty = proj_base.ty(body, tcx).ty;
|
||||||
match ty.kind() {
|
match ty.kind() {
|
||||||
ty::Ref(_, _, hir::Mutability::Not) if i == 0 => {
|
ty::Ref(_, _, hir::Mutability::Not) if i == 0 => {
|
||||||
// For references to thread-local statics, we do need
|
// For references to thread-local statics, we do need
|
||||||
|
@ -137,13 +137,11 @@ fn place_components_conflict<'tcx>(
|
|||||||
}
|
}
|
||||||
|
|
||||||
// loop invariant: borrow_c is always either equal to access_c or disjoint from it.
|
// loop invariant: borrow_c is always either equal to access_c or disjoint from it.
|
||||||
for (i, (borrow_c, &access_c)) in
|
for ((borrow_place, borrow_c), &access_c) in
|
||||||
iter::zip(borrow_place.projection, access_place.projection).enumerate()
|
iter::zip(borrow_place.iter_projections(), access_place.projection)
|
||||||
{
|
{
|
||||||
debug!(?borrow_c, ?access_c);
|
debug!(?borrow_c, ?access_c);
|
||||||
|
|
||||||
let borrow_proj_base = &borrow_place.projection[..i];
|
|
||||||
|
|
||||||
// Borrow and access path both have more components.
|
// Borrow and access path both have more components.
|
||||||
//
|
//
|
||||||
// Examples:
|
// Examples:
|
||||||
@ -156,15 +154,7 @@ fn place_components_conflict<'tcx>(
|
|||||||
// check whether the components being borrowed vs
|
// check whether the components being borrowed vs
|
||||||
// accessed are disjoint (as in the second example,
|
// accessed are disjoint (as in the second example,
|
||||||
// but not the first).
|
// but not the first).
|
||||||
match place_projection_conflict(
|
match place_projection_conflict(tcx, body, borrow_place, borrow_c, access_c, bias) {
|
||||||
tcx,
|
|
||||||
body,
|
|
||||||
borrow_local,
|
|
||||||
borrow_proj_base,
|
|
||||||
borrow_c,
|
|
||||||
access_c,
|
|
||||||
bias,
|
|
||||||
) {
|
|
||||||
Overlap::Arbitrary => {
|
Overlap::Arbitrary => {
|
||||||
// We have encountered different fields of potentially
|
// We have encountered different fields of potentially
|
||||||
// the same union - the borrow now partially overlaps.
|
// the same union - the borrow now partially overlaps.
|
||||||
@ -195,8 +185,7 @@ fn place_components_conflict<'tcx>(
|
|||||||
}
|
}
|
||||||
|
|
||||||
if borrow_place.projection.len() > access_place.projection.len() {
|
if borrow_place.projection.len() > access_place.projection.len() {
|
||||||
for (i, elem) in borrow_place.projection[access_place.projection.len()..].iter().enumerate()
|
for (base, elem) in borrow_place.iter_projections().skip(access_place.projection.len()) {
|
||||||
{
|
|
||||||
// Borrow path is longer than the access path. Examples:
|
// Borrow path is longer than the access path. Examples:
|
||||||
//
|
//
|
||||||
// - borrow of `a.b.c`, access to `a.b`
|
// - borrow of `a.b.c`, access to `a.b`
|
||||||
@ -205,8 +194,7 @@ fn place_components_conflict<'tcx>(
|
|||||||
// our place. This is a conflict if that is a part our
|
// our place. This is a conflict if that is a part our
|
||||||
// access cares about.
|
// access cares about.
|
||||||
|
|
||||||
let proj_base = &borrow_place.projection[..access_place.projection.len() + i];
|
let base_ty = base.ty(body, tcx).ty;
|
||||||
let base_ty = Place::ty_from(borrow_local, proj_base, body, tcx).ty;
|
|
||||||
|
|
||||||
match (elem, &base_ty.kind(), access) {
|
match (elem, &base_ty.kind(), access) {
|
||||||
(_, _, Shallow(Some(ArtificialField::ArrayLength)))
|
(_, _, Shallow(Some(ArtificialField::ArrayLength)))
|
||||||
@ -310,8 +298,7 @@ fn place_base_conflict(l1: Local, l2: Local) -> Overlap {
|
|||||||
fn place_projection_conflict<'tcx>(
|
fn place_projection_conflict<'tcx>(
|
||||||
tcx: TyCtxt<'tcx>,
|
tcx: TyCtxt<'tcx>,
|
||||||
body: &Body<'tcx>,
|
body: &Body<'tcx>,
|
||||||
pi1_local: Local,
|
pi1: PlaceRef<'tcx>,
|
||||||
pi1_proj_base: &[PlaceElem<'tcx>],
|
|
||||||
pi1_elem: PlaceElem<'tcx>,
|
pi1_elem: PlaceElem<'tcx>,
|
||||||
pi2_elem: PlaceElem<'tcx>,
|
pi2_elem: PlaceElem<'tcx>,
|
||||||
bias: PlaceConflictBias,
|
bias: PlaceConflictBias,
|
||||||
@ -333,7 +320,7 @@ fn place_projection_conflict<'tcx>(
|
|||||||
debug!("place_element_conflict: DISJOINT-OR-EQ-FIELD");
|
debug!("place_element_conflict: DISJOINT-OR-EQ-FIELD");
|
||||||
Overlap::EqualOrDisjoint
|
Overlap::EqualOrDisjoint
|
||||||
} else {
|
} else {
|
||||||
let ty = Place::ty_from(pi1_local, pi1_proj_base, body, tcx).ty;
|
let ty = pi1.ty(body, tcx).ty;
|
||||||
if ty.is_union() {
|
if ty.is_union() {
|
||||||
// Different fields of a union, we are basically stuck.
|
// Different fields of a union, we are basically stuck.
|
||||||
debug!("place_element_conflict: STUCK-UNION");
|
debug!("place_element_conflict: STUCK-UNION");
|
||||||
|
@ -50,7 +50,6 @@ use rustc_mir_dataflow::impls::MaybeInitializedPlaces;
|
|||||||
use rustc_mir_dataflow::move_paths::MoveData;
|
use rustc_mir_dataflow::move_paths::MoveData;
|
||||||
use rustc_mir_dataflow::ResultsCursor;
|
use rustc_mir_dataflow::ResultsCursor;
|
||||||
|
|
||||||
use crate::renumber::RegionCtxt;
|
|
||||||
use crate::session_diagnostics::MoveUnsized;
|
use crate::session_diagnostics::MoveUnsized;
|
||||||
use crate::{
|
use crate::{
|
||||||
borrow_set::BorrowSet,
|
borrow_set::BorrowSet,
|
||||||
@ -1040,9 +1039,9 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
|||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
let renumbered_opaques = self.infcx.tcx.fold_regions(opaques, |_, _| {
|
let renumbered_opaques = self.infcx.tcx.fold_regions(opaques, |_, _| {
|
||||||
self.infcx.next_nll_region_var(
|
self.infcx.next_nll_region_var_in_universe(
|
||||||
NllRegionVariableOrigin::Existential { from_forall: false },
|
NllRegionVariableOrigin::Existential { from_forall: false },
|
||||||
|| RegionCtxt::Unknown,
|
ty::UniverseIndex::ROOT,
|
||||||
)
|
)
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -2503,7 +2502,6 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
|||||||
location, borrow_region, borrowed_place
|
location, borrow_region, borrowed_place
|
||||||
);
|
);
|
||||||
|
|
||||||
let mut cursor = borrowed_place.projection.as_ref();
|
|
||||||
let tcx = self.infcx.tcx;
|
let tcx = self.infcx.tcx;
|
||||||
let field = path_utils::is_upvar_field_projection(
|
let field = path_utils::is_upvar_field_projection(
|
||||||
tcx,
|
tcx,
|
||||||
@ -2517,14 +2515,12 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
|||||||
ConstraintCategory::Boring
|
ConstraintCategory::Boring
|
||||||
};
|
};
|
||||||
|
|
||||||
while let [proj_base @ .., elem] = cursor {
|
for (base, elem) in borrowed_place.as_ref().iter_projections().rev() {
|
||||||
cursor = proj_base;
|
|
||||||
|
|
||||||
debug!("add_reborrow_constraint - iteration {:?}", elem);
|
debug!("add_reborrow_constraint - iteration {:?}", elem);
|
||||||
|
|
||||||
match elem {
|
match elem {
|
||||||
ProjectionElem::Deref => {
|
ProjectionElem::Deref => {
|
||||||
let base_ty = Place::ty_from(borrowed_place.local, proj_base, body, tcx).ty;
|
let base_ty = base.ty(body, tcx).ty;
|
||||||
|
|
||||||
debug!("add_reborrow_constraint - base_ty = {:?}", base_ty);
|
debug!("add_reborrow_constraint - base_ty = {:?}", base_ty);
|
||||||
match base_ty.kind() {
|
match base_ty.kind() {
|
||||||
|
@ -8,7 +8,7 @@ use rustc_feature::AttributeTemplate;
|
|||||||
use rustc_parse::validate_attr;
|
use rustc_parse::validate_attr;
|
||||||
use rustc_session::Session;
|
use rustc_session::Session;
|
||||||
use rustc_span::symbol::{sym, Ident};
|
use rustc_span::symbol::{sym, Ident};
|
||||||
use rustc_span::Span;
|
use rustc_span::{ErrorGuaranteed, Span};
|
||||||
|
|
||||||
pub(crate) struct Expander(pub bool);
|
pub(crate) struct Expander(pub bool);
|
||||||
|
|
||||||
@ -22,7 +22,7 @@ impl MultiItemModifier for Expander {
|
|||||||
_: bool,
|
_: bool,
|
||||||
) -> ExpandResult<Vec<Annotatable>, Annotatable> {
|
) -> ExpandResult<Vec<Annotatable>, Annotatable> {
|
||||||
let sess = ecx.sess;
|
let sess = ecx.sess;
|
||||||
if report_bad_target(sess, &item, span) {
|
if report_bad_target(sess, &item, span).is_err() {
|
||||||
// We don't want to pass inappropriate targets to derive macros to avoid
|
// We don't want to pass inappropriate targets to derive macros to avoid
|
||||||
// follow up errors, all other errors below are recoverable.
|
// follow up errors, all other errors below are recoverable.
|
||||||
return ExpandResult::Ready(vec![item]);
|
return ExpandResult::Ready(vec![item]);
|
||||||
@ -103,7 +103,11 @@ fn dummy_annotatable() -> Annotatable {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn report_bad_target(sess: &Session, item: &Annotatable, span: Span) -> bool {
|
fn report_bad_target(
|
||||||
|
sess: &Session,
|
||||||
|
item: &Annotatable,
|
||||||
|
span: Span,
|
||||||
|
) -> Result<(), ErrorGuaranteed> {
|
||||||
let item_kind = match item {
|
let item_kind = match item {
|
||||||
Annotatable::Item(item) => Some(&item.kind),
|
Annotatable::Item(item) => Some(&item.kind),
|
||||||
Annotatable::Stmt(stmt) => match &stmt.kind {
|
Annotatable::Stmt(stmt) => match &stmt.kind {
|
||||||
@ -116,9 +120,9 @@ fn report_bad_target(sess: &Session, item: &Annotatable, span: Span) -> bool {
|
|||||||
let bad_target =
|
let bad_target =
|
||||||
!matches!(item_kind, Some(ItemKind::Struct(..) | ItemKind::Enum(..) | ItemKind::Union(..)));
|
!matches!(item_kind, Some(ItemKind::Struct(..) | ItemKind::Enum(..) | ItemKind::Union(..)));
|
||||||
if bad_target {
|
if bad_target {
|
||||||
sess.emit_err(errors::BadDeriveTarget { span, item: item.span() });
|
return Err(sess.emit_err(errors::BadDeriveTarget { span, item: item.span() }));
|
||||||
}
|
}
|
||||||
bad_target
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn report_unexpected_meta_item_lit(sess: &Session, lit: &ast::MetaItemLit) {
|
fn report_unexpected_meta_item_lit(sess: &Session, lit: &ast::MetaItemLit) {
|
||||||
|
@ -8,7 +8,7 @@ use rustc_ast_pretty::pprust;
|
|||||||
use rustc_errors::Applicability;
|
use rustc_errors::Applicability;
|
||||||
use rustc_expand::base::*;
|
use rustc_expand::base::*;
|
||||||
use rustc_span::symbol::{sym, Ident, Symbol};
|
use rustc_span::symbol::{sym, Ident, Symbol};
|
||||||
use rustc_span::{FileNameDisplayPreference, Span};
|
use rustc_span::{ErrorGuaranteed, FileNameDisplayPreference, Span};
|
||||||
use std::iter;
|
use std::iter;
|
||||||
use thin_vec::{thin_vec, ThinVec};
|
use thin_vec::{thin_vec, ThinVec};
|
||||||
|
|
||||||
@ -128,12 +128,15 @@ pub fn expand_test_or_bench(
|
|||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
// has_*_signature will report any errors in the type so compilation
|
// check_*_signature will report any errors in the type so compilation
|
||||||
// will fail. We shouldn't try to expand in this case because the errors
|
// will fail. We shouldn't try to expand in this case because the errors
|
||||||
// would be spurious.
|
// would be spurious.
|
||||||
if (!is_bench && !has_test_signature(cx, &item))
|
let check_result = if is_bench {
|
||||||
|| (is_bench && !has_bench_signature(cx, &item))
|
check_bench_signature(cx, &item, &fn_)
|
||||||
{
|
} else {
|
||||||
|
check_test_signature(cx, &item, &fn_)
|
||||||
|
};
|
||||||
|
if check_result.is_err() {
|
||||||
return if is_stmt {
|
return if is_stmt {
|
||||||
vec![Annotatable::Stmt(P(cx.stmt_item(item.span, item)))]
|
vec![Annotatable::Stmt(P(cx.stmt_item(item.span, item)))]
|
||||||
} else {
|
} else {
|
||||||
@ -523,72 +526,62 @@ fn test_type(cx: &ExtCtxt<'_>) -> TestType {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn has_test_signature(cx: &ExtCtxt<'_>, i: &ast::Item) -> bool {
|
fn check_test_signature(
|
||||||
|
cx: &ExtCtxt<'_>,
|
||||||
|
i: &ast::Item,
|
||||||
|
f: &ast::Fn,
|
||||||
|
) -> Result<(), ErrorGuaranteed> {
|
||||||
let has_should_panic_attr = attr::contains_name(&i.attrs, sym::should_panic);
|
let has_should_panic_attr = attr::contains_name(&i.attrs, sym::should_panic);
|
||||||
let sd = &cx.sess.parse_sess.span_diagnostic;
|
let sd = &cx.sess.parse_sess.span_diagnostic;
|
||||||
match &i.kind {
|
|
||||||
ast::ItemKind::Fn(box ast::Fn { sig, generics, .. }) => {
|
|
||||||
if let ast::Unsafe::Yes(span) = sig.header.unsafety {
|
|
||||||
sd.emit_err(errors::TestBadFn { span: i.span, cause: span, kind: "unsafe" });
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
if let ast::Async::Yes { span, .. } = sig.header.asyncness {
|
|
||||||
sd.emit_err(errors::TestBadFn { span: i.span, cause: span, kind: "async" });
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
// If the termination trait is active, the compiler will check that the output
|
if let ast::Unsafe::Yes(span) = f.sig.header.unsafety {
|
||||||
// type implements the `Termination` trait as `libtest` enforces that.
|
return Err(sd.emit_err(errors::TestBadFn { span: i.span, cause: span, kind: "unsafe" }));
|
||||||
let has_output = match &sig.decl.output {
|
|
||||||
ast::FnRetTy::Default(..) => false,
|
|
||||||
ast::FnRetTy::Ty(t) if t.kind.is_unit() => false,
|
|
||||||
_ => true,
|
|
||||||
};
|
|
||||||
|
|
||||||
if !sig.decl.inputs.is_empty() {
|
|
||||||
sd.span_err(i.span, "functions used as tests can not have any arguments");
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
if has_should_panic_attr && has_output {
|
|
||||||
sd.span_err(i.span, "functions using `#[should_panic]` must return `()`");
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
if generics.params.iter().any(|param| !matches!(param.kind, GenericParamKind::Lifetime))
|
|
||||||
{
|
|
||||||
sd.span_err(
|
|
||||||
i.span,
|
|
||||||
"functions used as tests can not have any non-lifetime generic parameters",
|
|
||||||
);
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
true
|
|
||||||
}
|
|
||||||
_ => {
|
|
||||||
// should be unreachable because `is_test_fn_item` should catch all non-fn items
|
|
||||||
debug_assert!(false);
|
|
||||||
false
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
fn has_bench_signature(cx: &ExtCtxt<'_>, i: &ast::Item) -> bool {
|
if let ast::Async::Yes { span, .. } = f.sig.header.asyncness {
|
||||||
let has_sig = match &i.kind {
|
return Err(sd.emit_err(errors::TestBadFn { span: i.span, cause: span, kind: "async" }));
|
||||||
// N.B., inadequate check, but we're running
|
}
|
||||||
// well before resolve, can't get too deep.
|
|
||||||
ast::ItemKind::Fn(box ast::Fn { sig, .. }) => sig.decl.inputs.len() == 1,
|
// If the termination trait is active, the compiler will check that the output
|
||||||
_ => false,
|
// type implements the `Termination` trait as `libtest` enforces that.
|
||||||
|
let has_output = match &f.sig.decl.output {
|
||||||
|
ast::FnRetTy::Default(..) => false,
|
||||||
|
ast::FnRetTy::Ty(t) if t.kind.is_unit() => false,
|
||||||
|
_ => true,
|
||||||
};
|
};
|
||||||
|
|
||||||
if !has_sig {
|
if !f.sig.decl.inputs.is_empty() {
|
||||||
cx.sess.parse_sess.span_diagnostic.span_err(
|
return Err(sd.span_err(i.span, "functions used as tests can not have any arguments"));
|
||||||
|
}
|
||||||
|
|
||||||
|
if has_should_panic_attr && has_output {
|
||||||
|
return Err(sd.span_err(i.span, "functions using `#[should_panic]` must return `()`"));
|
||||||
|
}
|
||||||
|
|
||||||
|
if f.generics.params.iter().any(|param| !matches!(param.kind, GenericParamKind::Lifetime)) {
|
||||||
|
return Err(sd.span_err(
|
||||||
|
i.span,
|
||||||
|
"functions used as tests can not have any non-lifetime generic parameters",
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn check_bench_signature(
|
||||||
|
cx: &ExtCtxt<'_>,
|
||||||
|
i: &ast::Item,
|
||||||
|
f: &ast::Fn,
|
||||||
|
) -> Result<(), ErrorGuaranteed> {
|
||||||
|
// N.B., inadequate check, but we're running
|
||||||
|
// well before resolve, can't get too deep.
|
||||||
|
if f.sig.decl.inputs.len() != 1 {
|
||||||
|
return Err(cx.sess.parse_sess.span_diagnostic.span_err(
|
||||||
i.span,
|
i.span,
|
||||||
"functions used as benches must have \
|
"functions used as benches must have \
|
||||||
signature `fn(&mut Bencher) -> impl Termination`",
|
signature `fn(&mut Bencher) -> impl Termination`",
|
||||||
);
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
has_sig
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -225,10 +225,10 @@ pub(crate) fn write_ir_file(
|
|||||||
let res = std::fs::File::create(clif_file_name).and_then(|mut file| write(&mut file));
|
let res = std::fs::File::create(clif_file_name).and_then(|mut file| write(&mut file));
|
||||||
if let Err(err) = res {
|
if let Err(err) = res {
|
||||||
// Using early_warn as no Session is available here
|
// Using early_warn as no Session is available here
|
||||||
rustc_session::early_warn(
|
let handler = rustc_session::EarlyErrorHandler::new(
|
||||||
rustc_session::config::ErrorOutputType::default(),
|
rustc_session::config::ErrorOutputType::default(),
|
||||||
format!("error writing ir file: {}", err),
|
|
||||||
);
|
);
|
||||||
|
handler.early_warn(format!("error writing ir file: {}", err));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -572,8 +572,6 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
|
|||||||
) {
|
) {
|
||||||
let zero = self.const_usize(0);
|
let zero = self.const_usize(0);
|
||||||
let count = self.const_usize(count);
|
let count = self.const_usize(count);
|
||||||
let start = dest.project_index(self, zero).llval;
|
|
||||||
let end = dest.project_index(self, count).llval;
|
|
||||||
|
|
||||||
let header_bb = self.append_sibling_block("repeat_loop_header");
|
let header_bb = self.append_sibling_block("repeat_loop_header");
|
||||||
let body_bb = self.append_sibling_block("repeat_loop_body");
|
let body_bb = self.append_sibling_block("repeat_loop_body");
|
||||||
@ -582,24 +580,18 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
|
|||||||
self.br(header_bb);
|
self.br(header_bb);
|
||||||
|
|
||||||
let mut header_bx = Self::build(self.cx, header_bb);
|
let mut header_bx = Self::build(self.cx, header_bb);
|
||||||
let current = header_bx.phi(self.val_ty(start), &[start], &[self.llbb()]);
|
let i = header_bx.phi(self.val_ty(zero), &[zero], &[self.llbb()]);
|
||||||
|
|
||||||
let keep_going = header_bx.icmp(IntPredicate::IntNE, current, end);
|
let keep_going = header_bx.icmp(IntPredicate::IntULT, i, count);
|
||||||
header_bx.cond_br(keep_going, body_bb, next_bb);
|
header_bx.cond_br(keep_going, body_bb, next_bb);
|
||||||
|
|
||||||
let mut body_bx = Self::build(self.cx, body_bb);
|
let mut body_bx = Self::build(self.cx, body_bb);
|
||||||
let align = dest.align.restrict_for_offset(dest.layout.field(self.cx(), 0).size);
|
let dest_elem = dest.project_index(&mut body_bx, i);
|
||||||
cg_elem
|
cg_elem.val.store(&mut body_bx, dest_elem);
|
||||||
.val
|
|
||||||
.store(&mut body_bx, PlaceRef::new_sized_aligned(current, cg_elem.layout, align));
|
|
||||||
|
|
||||||
let next = body_bx.inbounds_gep(
|
let next = body_bx.unchecked_uadd(i, self.const_usize(1));
|
||||||
self.backend_type(cg_elem.layout),
|
|
||||||
current,
|
|
||||||
&[self.const_usize(1)],
|
|
||||||
);
|
|
||||||
body_bx.br(header_bb);
|
body_bx.br(header_bb);
|
||||||
header_bx.add_incoming_to_phi(current, next, body_bb);
|
header_bx.add_incoming_to_phi(i, next, body_bb);
|
||||||
|
|
||||||
*self = Self::build(self.cx, next_bb);
|
*self = Self::build(self.cx, next_bb);
|
||||||
}
|
}
|
||||||
|
@ -4,6 +4,7 @@
|
|||||||
#![feature(if_let_guard)]
|
#![feature(if_let_guard)]
|
||||||
#![feature(int_roundings)]
|
#![feature(int_roundings)]
|
||||||
#![feature(let_chains)]
|
#![feature(let_chains)]
|
||||||
|
#![feature(negative_impls)]
|
||||||
#![feature(never_type)]
|
#![feature(never_type)]
|
||||||
#![feature(strict_provenance)]
|
#![feature(strict_provenance)]
|
||||||
#![feature(try_blocks)]
|
#![feature(try_blocks)]
|
||||||
|
@ -1729,7 +1729,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
|
|||||||
IndirectOperand(tmp, index) => {
|
IndirectOperand(tmp, index) => {
|
||||||
let op = bx.load_operand(tmp);
|
let op = bx.load_operand(tmp);
|
||||||
tmp.storage_dead(bx);
|
tmp.storage_dead(bx);
|
||||||
self.locals[index] = LocalRef::Operand(op);
|
self.overwrite_local(index, LocalRef::Operand(op));
|
||||||
self.debug_introduce_local(bx, index);
|
self.debug_introduce_local(bx, index);
|
||||||
}
|
}
|
||||||
DirectOperand(index) => {
|
DirectOperand(index) => {
|
||||||
@ -1744,7 +1744,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
|
|||||||
} else {
|
} else {
|
||||||
OperandRef::from_immediate_or_packed_pair(bx, llval, ret_abi.layout)
|
OperandRef::from_immediate_or_packed_pair(bx, llval, ret_abi.layout)
|
||||||
};
|
};
|
||||||
self.locals[index] = LocalRef::Operand(op);
|
self.overwrite_local(index, LocalRef::Operand(op));
|
||||||
self.debug_introduce_local(bx, index);
|
self.debug_introduce_local(bx, index);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -248,7 +248,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn spill_operand_to_stack(
|
fn spill_operand_to_stack(
|
||||||
operand: &OperandRef<'tcx, Bx::Value>,
|
operand: OperandRef<'tcx, Bx::Value>,
|
||||||
name: Option<String>,
|
name: Option<String>,
|
||||||
bx: &mut Bx,
|
bx: &mut Bx,
|
||||||
) -> PlaceRef<'tcx, Bx::Value> {
|
) -> PlaceRef<'tcx, Bx::Value> {
|
||||||
@ -375,7 +375,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
Self::spill_operand_to_stack(operand, name, bx)
|
Self::spill_operand_to_stack(*operand, name, bx)
|
||||||
}
|
}
|
||||||
|
|
||||||
LocalRef::Place(place) => *place,
|
LocalRef::Place(place) => *place,
|
||||||
@ -550,7 +550,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
|
|||||||
if let Ok(operand) = self.eval_mir_constant_to_operand(bx, &c) {
|
if let Ok(operand) = self.eval_mir_constant_to_operand(bx, &c) {
|
||||||
self.set_debug_loc(bx, var.source_info);
|
self.set_debug_loc(bx, var.source_info);
|
||||||
let base = Self::spill_operand_to_stack(
|
let base = Self::spill_operand_to_stack(
|
||||||
&operand,
|
operand,
|
||||||
Some(var.name.to_string()),
|
Some(var.name.to_string()),
|
||||||
bx,
|
bx,
|
||||||
);
|
);
|
||||||
|
75
compiler/rustc_codegen_ssa/src/mir/locals.rs
Normal file
75
compiler/rustc_codegen_ssa/src/mir/locals.rs
Normal file
@ -0,0 +1,75 @@
|
|||||||
|
//! Locals are in a private module as updating `LocalRef::Operand` has to
|
||||||
|
//! be careful wrt to subtyping. To deal with this we only allow updates by using
|
||||||
|
//! `FunctionCx::overwrite_local` which handles it automatically.
|
||||||
|
use crate::mir::{FunctionCx, LocalRef};
|
||||||
|
use crate::traits::BuilderMethods;
|
||||||
|
use rustc_index::IndexVec;
|
||||||
|
use rustc_middle::mir;
|
||||||
|
use rustc_middle::ty::print::with_no_trimmed_paths;
|
||||||
|
use std::ops::{Index, IndexMut};
|
||||||
|
|
||||||
|
pub(super) struct Locals<'tcx, V> {
|
||||||
|
values: IndexVec<mir::Local, LocalRef<'tcx, V>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'tcx, V> Index<mir::Local> for Locals<'tcx, V> {
|
||||||
|
type Output = LocalRef<'tcx, V>;
|
||||||
|
#[inline]
|
||||||
|
fn index(&self, index: mir::Local) -> &LocalRef<'tcx, V> {
|
||||||
|
&self.values[index]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// To mutate locals, use `FunctionCx::overwrite_local` instead.
|
||||||
|
impl<'tcx, V, Idx: ?Sized> !IndexMut<Idx> for Locals<'tcx, V> {}
|
||||||
|
|
||||||
|
impl<'tcx, V> Locals<'tcx, V> {
|
||||||
|
pub(super) fn empty() -> Locals<'tcx, V> {
|
||||||
|
Locals { values: IndexVec::default() }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(super) fn indices(&self) -> impl DoubleEndedIterator<Item = mir::Local> + Clone + 'tcx {
|
||||||
|
self.values.indices()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
|
||||||
|
pub(super) fn initialize_locals(&mut self, values: Vec<LocalRef<'tcx, Bx::Value>>) {
|
||||||
|
assert!(self.locals.values.is_empty());
|
||||||
|
|
||||||
|
for (local, value) in values.into_iter().enumerate() {
|
||||||
|
match value {
|
||||||
|
LocalRef::Place(_) | LocalRef::UnsizedPlace(_) | LocalRef::PendingOperand => (),
|
||||||
|
LocalRef::Operand(op) => {
|
||||||
|
let local = mir::Local::from_usize(local);
|
||||||
|
let expected_ty = self.monomorphize(self.mir.local_decls[local].ty);
|
||||||
|
assert_eq!(expected_ty, op.layout.ty, "unexpected initial operand type");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
self.locals.values.push(value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(super) fn overwrite_local(
|
||||||
|
&mut self,
|
||||||
|
local: mir::Local,
|
||||||
|
mut value: LocalRef<'tcx, Bx::Value>,
|
||||||
|
) {
|
||||||
|
match value {
|
||||||
|
LocalRef::Place(_) | LocalRef::UnsizedPlace(_) | LocalRef::PendingOperand => (),
|
||||||
|
LocalRef::Operand(ref mut op) => {
|
||||||
|
let local_ty = self.monomorphize(self.mir.local_decls[local].ty);
|
||||||
|
if local_ty != op.layout.ty {
|
||||||
|
// FIXME(#112651): This can be changed to an ICE afterwards.
|
||||||
|
debug!("updating type of operand due to subtyping");
|
||||||
|
with_no_trimmed_paths!(debug!(?op.layout.ty));
|
||||||
|
with_no_trimmed_paths!(debug!(?local_ty));
|
||||||
|
op.layout.ty = local_ty;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
self.locals.values[local] = value;
|
||||||
|
}
|
||||||
|
}
|
@ -1,21 +1,31 @@
|
|||||||
use crate::base;
|
use crate::base;
|
||||||
use crate::traits::*;
|
use crate::traits::*;
|
||||||
|
use rustc_index::bit_set::BitSet;
|
||||||
|
use rustc_index::IndexVec;
|
||||||
use rustc_middle::mir;
|
use rustc_middle::mir;
|
||||||
use rustc_middle::mir::interpret::ErrorHandled;
|
use rustc_middle::mir::interpret::ErrorHandled;
|
||||||
|
use rustc_middle::mir::traversal;
|
||||||
use rustc_middle::ty::layout::{FnAbiOf, HasTyCtxt, TyAndLayout};
|
use rustc_middle::ty::layout::{FnAbiOf, HasTyCtxt, TyAndLayout};
|
||||||
use rustc_middle::ty::{self, Instance, Ty, TyCtxt, TypeFoldable, TypeVisitableExt};
|
use rustc_middle::ty::{self, Instance, Ty, TyCtxt, TypeFoldable, TypeVisitableExt};
|
||||||
use rustc_target::abi::call::{FnAbi, PassMode};
|
use rustc_target::abi::call::{FnAbi, PassMode};
|
||||||
|
|
||||||
use std::iter;
|
use std::iter;
|
||||||
|
|
||||||
use rustc_index::bit_set::BitSet;
|
mod analyze;
|
||||||
use rustc_index::IndexVec;
|
mod block;
|
||||||
|
pub mod constant;
|
||||||
|
pub mod coverageinfo;
|
||||||
|
pub mod debuginfo;
|
||||||
|
mod intrinsic;
|
||||||
|
mod locals;
|
||||||
|
pub mod operand;
|
||||||
|
pub mod place;
|
||||||
|
mod rvalue;
|
||||||
|
mod statement;
|
||||||
|
|
||||||
use self::debuginfo::{FunctionDebugContext, PerLocalVarDebugInfo};
|
use self::debuginfo::{FunctionDebugContext, PerLocalVarDebugInfo};
|
||||||
use self::place::PlaceRef;
|
|
||||||
use rustc_middle::mir::traversal;
|
|
||||||
|
|
||||||
use self::operand::{OperandRef, OperandValue};
|
use self::operand::{OperandRef, OperandValue};
|
||||||
|
use self::place::PlaceRef;
|
||||||
|
|
||||||
// Used for tracking the state of generated basic blocks.
|
// Used for tracking the state of generated basic blocks.
|
||||||
enum CachedLlbb<T> {
|
enum CachedLlbb<T> {
|
||||||
@ -91,7 +101,7 @@ pub struct FunctionCx<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> {
|
|||||||
///
|
///
|
||||||
/// Avoiding allocs can also be important for certain intrinsics,
|
/// Avoiding allocs can also be important for certain intrinsics,
|
||||||
/// notably `expect`.
|
/// notably `expect`.
|
||||||
locals: IndexVec<mir::Local, LocalRef<'tcx, Bx::Value>>,
|
locals: locals::Locals<'tcx, Bx::Value>,
|
||||||
|
|
||||||
/// All `VarDebugInfo` from the MIR body, partitioned by `Local`.
|
/// All `VarDebugInfo` from the MIR body, partitioned by `Local`.
|
||||||
/// This is `None` if no var`#[non_exhaustive]`iable debuginfo/names are needed.
|
/// This is `None` if no var`#[non_exhaustive]`iable debuginfo/names are needed.
|
||||||
@ -192,7 +202,7 @@ pub fn codegen_mir<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
|
|||||||
cleanup_kinds,
|
cleanup_kinds,
|
||||||
landing_pads: IndexVec::from_elem(None, &mir.basic_blocks),
|
landing_pads: IndexVec::from_elem(None, &mir.basic_blocks),
|
||||||
funclets: IndexVec::from_fn_n(|_| None, mir.basic_blocks.len()),
|
funclets: IndexVec::from_fn_n(|_| None, mir.basic_blocks.len()),
|
||||||
locals: IndexVec::new(),
|
locals: locals::Locals::empty(),
|
||||||
debug_context,
|
debug_context,
|
||||||
per_local_var_debug_info: None,
|
per_local_var_debug_info: None,
|
||||||
caller_location: None,
|
caller_location: None,
|
||||||
@ -223,7 +233,7 @@ pub fn codegen_mir<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
|
|||||||
let memory_locals = analyze::non_ssa_locals(&fx);
|
let memory_locals = analyze::non_ssa_locals(&fx);
|
||||||
|
|
||||||
// Allocate variable and temp allocas
|
// Allocate variable and temp allocas
|
||||||
fx.locals = {
|
let local_values = {
|
||||||
let args = arg_local_refs(&mut start_bx, &mut fx, &memory_locals);
|
let args = arg_local_refs(&mut start_bx, &mut fx, &memory_locals);
|
||||||
|
|
||||||
let mut allocate_local = |local| {
|
let mut allocate_local = |local| {
|
||||||
@ -256,6 +266,7 @@ pub fn codegen_mir<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
|
|||||||
.chain(mir.vars_and_temps_iter().map(allocate_local))
|
.chain(mir.vars_and_temps_iter().map(allocate_local))
|
||||||
.collect()
|
.collect()
|
||||||
};
|
};
|
||||||
|
fx.initialize_locals(local_values);
|
||||||
|
|
||||||
// Apply debuginfo to the newly allocated locals.
|
// Apply debuginfo to the newly allocated locals.
|
||||||
fx.debug_introduce_locals(&mut start_bx);
|
fx.debug_introduce_locals(&mut start_bx);
|
||||||
@ -289,14 +300,13 @@ fn arg_local_refs<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
|
|||||||
.enumerate()
|
.enumerate()
|
||||||
.map(|(arg_index, local)| {
|
.map(|(arg_index, local)| {
|
||||||
let arg_decl = &mir.local_decls[local];
|
let arg_decl = &mir.local_decls[local];
|
||||||
|
let arg_ty = fx.monomorphize(arg_decl.ty);
|
||||||
|
|
||||||
if Some(local) == mir.spread_arg {
|
if Some(local) == mir.spread_arg {
|
||||||
// This argument (e.g., the last argument in the "rust-call" ABI)
|
// This argument (e.g., the last argument in the "rust-call" ABI)
|
||||||
// is a tuple that was spread at the ABI level and now we have
|
// is a tuple that was spread at the ABI level and now we have
|
||||||
// to reconstruct it into a tuple local variable, from multiple
|
// to reconstruct it into a tuple local variable, from multiple
|
||||||
// individual LLVM function arguments.
|
// individual LLVM function arguments.
|
||||||
|
|
||||||
let arg_ty = fx.monomorphize(arg_decl.ty);
|
|
||||||
let ty::Tuple(tupled_arg_tys) = arg_ty.kind() else {
|
let ty::Tuple(tupled_arg_tys) = arg_ty.kind() else {
|
||||||
bug!("spread argument isn't a tuple?!");
|
bug!("spread argument isn't a tuple?!");
|
||||||
};
|
};
|
||||||
@ -331,8 +341,6 @@ fn arg_local_refs<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
|
|||||||
}
|
}
|
||||||
|
|
||||||
if fx.fn_abi.c_variadic && arg_index == fx.fn_abi.args.len() {
|
if fx.fn_abi.c_variadic && arg_index == fx.fn_abi.args.len() {
|
||||||
let arg_ty = fx.monomorphize(arg_decl.ty);
|
|
||||||
|
|
||||||
let va_list = PlaceRef::alloca(bx, bx.layout_of(arg_ty));
|
let va_list = PlaceRef::alloca(bx, bx.layout_of(arg_ty));
|
||||||
bx.va_start(va_list.llval);
|
bx.va_start(va_list.llval);
|
||||||
|
|
||||||
@ -429,14 +437,3 @@ fn arg_local_refs<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
|
|||||||
|
|
||||||
args
|
args
|
||||||
}
|
}
|
||||||
|
|
||||||
mod analyze;
|
|
||||||
mod block;
|
|
||||||
pub mod constant;
|
|
||||||
pub mod coverageinfo;
|
|
||||||
pub mod debuginfo;
|
|
||||||
mod intrinsic;
|
|
||||||
pub mod operand;
|
|
||||||
pub mod place;
|
|
||||||
mod rvalue;
|
|
||||||
mod statement;
|
|
||||||
|
@ -20,7 +20,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
|
|||||||
}
|
}
|
||||||
LocalRef::PendingOperand => {
|
LocalRef::PendingOperand => {
|
||||||
let operand = self.codegen_rvalue_operand(bx, rvalue);
|
let operand = self.codegen_rvalue_operand(bx, rvalue);
|
||||||
self.locals[index] = LocalRef::Operand(operand);
|
self.overwrite_local(index, LocalRef::Operand(operand));
|
||||||
self.debug_introduce_local(bx, index);
|
self.debug_introduce_local(bx, index);
|
||||||
}
|
}
|
||||||
LocalRef::Operand(op) => {
|
LocalRef::Operand(op) => {
|
||||||
|
@ -44,6 +44,7 @@ const ARM_ALLOWED_FEATURES: &[(&str, Option<Symbol>)] = &[
|
|||||||
// #[target_feature].
|
// #[target_feature].
|
||||||
("thumb-mode", Some(sym::arm_target_feature)),
|
("thumb-mode", Some(sym::arm_target_feature)),
|
||||||
("thumb2", Some(sym::arm_target_feature)),
|
("thumb2", Some(sym::arm_target_feature)),
|
||||||
|
("trustzone", Some(sym::arm_target_feature)),
|
||||||
("v5te", Some(sym::arm_target_feature)),
|
("v5te", Some(sym::arm_target_feature)),
|
||||||
("v6", Some(sym::arm_target_feature)),
|
("v6", Some(sym::arm_target_feature)),
|
||||||
("v6k", Some(sym::arm_target_feature)),
|
("v6k", Some(sym::arm_target_feature)),
|
||||||
@ -53,6 +54,7 @@ const ARM_ALLOWED_FEATURES: &[(&str, Option<Symbol>)] = &[
|
|||||||
("vfp2", Some(sym::arm_target_feature)),
|
("vfp2", Some(sym::arm_target_feature)),
|
||||||
("vfp3", Some(sym::arm_target_feature)),
|
("vfp3", Some(sym::arm_target_feature)),
|
||||||
("vfp4", Some(sym::arm_target_feature)),
|
("vfp4", Some(sym::arm_target_feature)),
|
||||||
|
("virtualization", Some(sym::arm_target_feature)),
|
||||||
// tidy-alphabetical-end
|
// tidy-alphabetical-end
|
||||||
];
|
];
|
||||||
|
|
||||||
|
@ -14,7 +14,7 @@ use rustc_middle::ty::{self, TyCtxt};
|
|||||||
use rustc_span::source_map::Span;
|
use rustc_span::source_map::Span;
|
||||||
use rustc_target::abi::{self, Abi};
|
use rustc_target::abi::{self, Abi};
|
||||||
|
|
||||||
use super::{CompileTimeEvalContext, CompileTimeInterpreter};
|
use super::{CanAccessStatics, CompileTimeEvalContext, CompileTimeInterpreter};
|
||||||
use crate::errors;
|
use crate::errors;
|
||||||
use crate::interpret::eval_nullary_intrinsic;
|
use crate::interpret::eval_nullary_intrinsic;
|
||||||
use crate::interpret::{
|
use crate::interpret::{
|
||||||
@ -93,7 +93,7 @@ pub(super) fn mk_eval_cx<'mir, 'tcx>(
|
|||||||
tcx: TyCtxt<'tcx>,
|
tcx: TyCtxt<'tcx>,
|
||||||
root_span: Span,
|
root_span: Span,
|
||||||
param_env: ty::ParamEnv<'tcx>,
|
param_env: ty::ParamEnv<'tcx>,
|
||||||
can_access_statics: bool,
|
can_access_statics: CanAccessStatics,
|
||||||
) -> CompileTimeEvalContext<'mir, 'tcx> {
|
) -> CompileTimeEvalContext<'mir, 'tcx> {
|
||||||
debug!("mk_eval_cx: {:?}", param_env);
|
debug!("mk_eval_cx: {:?}", param_env);
|
||||||
InterpCx::new(
|
InterpCx::new(
|
||||||
@ -207,7 +207,7 @@ pub(crate) fn turn_into_const_value<'tcx>(
|
|||||||
tcx,
|
tcx,
|
||||||
tcx.def_span(key.value.instance.def_id()),
|
tcx.def_span(key.value.instance.def_id()),
|
||||||
key.param_env,
|
key.param_env,
|
||||||
/*can_access_statics:*/ is_static,
|
CanAccessStatics::from(is_static),
|
||||||
);
|
);
|
||||||
|
|
||||||
let mplace = ecx.raw_const_to_mplace(constant).expect(
|
let mplace = ecx.raw_const_to_mplace(constant).expect(
|
||||||
@ -309,7 +309,7 @@ pub fn eval_to_allocation_raw_provider<'tcx>(
|
|||||||
// Statics (and promoteds inside statics) may access other statics, because unlike consts
|
// Statics (and promoteds inside statics) may access other statics, because unlike consts
|
||||||
// they do not have to behave "as if" they were evaluated at runtime.
|
// they do not have to behave "as if" they were evaluated at runtime.
|
||||||
CompileTimeInterpreter::new(
|
CompileTimeInterpreter::new(
|
||||||
/*can_access_statics:*/ is_static,
|
CanAccessStatics::from(is_static),
|
||||||
if tcx.sess.opts.unstable_opts.extra_const_ub_checks {
|
if tcx.sess.opts.unstable_opts.extra_const_ub_checks {
|
||||||
CheckAlignment::Error
|
CheckAlignment::Error
|
||||||
} else {
|
} else {
|
||||||
|
@ -57,7 +57,7 @@ pub struct CompileTimeInterpreter<'mir, 'tcx> {
|
|||||||
/// * Interning makes everything outside of statics immutable.
|
/// * Interning makes everything outside of statics immutable.
|
||||||
/// * Pointers to allocations inside of statics can never leak outside, to a non-static global.
|
/// * Pointers to allocations inside of statics can never leak outside, to a non-static global.
|
||||||
/// This boolean here controls the second part.
|
/// This boolean here controls the second part.
|
||||||
pub(super) can_access_statics: bool,
|
pub(super) can_access_statics: CanAccessStatics,
|
||||||
|
|
||||||
/// Whether to check alignment during evaluation.
|
/// Whether to check alignment during evaluation.
|
||||||
pub(super) check_alignment: CheckAlignment,
|
pub(super) check_alignment: CheckAlignment,
|
||||||
@ -83,8 +83,23 @@ impl CheckAlignment {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Copy, Clone, PartialEq)]
|
||||||
|
pub(crate) enum CanAccessStatics {
|
||||||
|
No,
|
||||||
|
Yes,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<bool> for CanAccessStatics {
|
||||||
|
fn from(value: bool) -> Self {
|
||||||
|
if value { Self::Yes } else { Self::No }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl<'mir, 'tcx> CompileTimeInterpreter<'mir, 'tcx> {
|
impl<'mir, 'tcx> CompileTimeInterpreter<'mir, 'tcx> {
|
||||||
pub(crate) fn new(can_access_statics: bool, check_alignment: CheckAlignment) -> Self {
|
pub(crate) fn new(
|
||||||
|
can_access_statics: CanAccessStatics,
|
||||||
|
check_alignment: CheckAlignment,
|
||||||
|
) -> Self {
|
||||||
CompileTimeInterpreter {
|
CompileTimeInterpreter {
|
||||||
num_evaluated_steps: 0,
|
num_evaluated_steps: 0,
|
||||||
stack: Vec::new(),
|
stack: Vec::new(),
|
||||||
@ -699,7 +714,7 @@ impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for CompileTimeInterpreter<'mir,
|
|||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// Read access. These are usually allowed, with some exceptions.
|
// Read access. These are usually allowed, with some exceptions.
|
||||||
if machine.can_access_statics {
|
if machine.can_access_statics == CanAccessStatics::Yes {
|
||||||
// Machine configuration allows us read from anything (e.g., `static` initializer).
|
// Machine configuration allows us read from anything (e.g., `static` initializer).
|
||||||
Ok(())
|
Ok(())
|
||||||
} else if static_def_id.is_some() {
|
} else if static_def_id.is_some() {
|
||||||
|
@ -26,7 +26,7 @@ pub(crate) fn const_caller_location(
|
|||||||
(file, line, col): (Symbol, u32, u32),
|
(file, line, col): (Symbol, u32, u32),
|
||||||
) -> ConstValue<'_> {
|
) -> ConstValue<'_> {
|
||||||
trace!("const_caller_location: {}:{}:{}", file, line, col);
|
trace!("const_caller_location: {}:{}:{}", file, line, col);
|
||||||
let mut ecx = mk_eval_cx(tcx, DUMMY_SP, ty::ParamEnv::reveal_all(), false);
|
let mut ecx = mk_eval_cx(tcx, DUMMY_SP, ty::ParamEnv::reveal_all(), CanAccessStatics::No);
|
||||||
|
|
||||||
let loc_place = ecx.alloc_caller_location(file, line, col);
|
let loc_place = ecx.alloc_caller_location(file, line, col);
|
||||||
if intern_const_alloc_recursive(&mut ecx, InternKind::Constant, &loc_place).is_err() {
|
if intern_const_alloc_recursive(&mut ecx, InternKind::Constant, &loc_place).is_err() {
|
||||||
@ -55,10 +55,12 @@ pub(crate) fn eval_to_valtree<'tcx>(
|
|||||||
|
|
||||||
// FIXME Need to provide a span to `eval_to_valtree`
|
// FIXME Need to provide a span to `eval_to_valtree`
|
||||||
let ecx = mk_eval_cx(
|
let ecx = mk_eval_cx(
|
||||||
tcx, DUMMY_SP, param_env,
|
tcx,
|
||||||
|
DUMMY_SP,
|
||||||
|
param_env,
|
||||||
// It is absolutely crucial for soundness that
|
// It is absolutely crucial for soundness that
|
||||||
// we do not read from static items or other mutable memory.
|
// we do not read from static items or other mutable memory.
|
||||||
false,
|
CanAccessStatics::No,
|
||||||
);
|
);
|
||||||
let place = ecx.raw_const_to_mplace(const_alloc).unwrap();
|
let place = ecx.raw_const_to_mplace(const_alloc).unwrap();
|
||||||
debug!(?place);
|
debug!(?place);
|
||||||
@ -91,7 +93,7 @@ pub(crate) fn try_destructure_mir_constant<'tcx>(
|
|||||||
val: mir::ConstantKind<'tcx>,
|
val: mir::ConstantKind<'tcx>,
|
||||||
) -> InterpResult<'tcx, mir::DestructuredConstant<'tcx>> {
|
) -> InterpResult<'tcx, mir::DestructuredConstant<'tcx>> {
|
||||||
trace!("destructure_mir_constant: {:?}", val);
|
trace!("destructure_mir_constant: {:?}", val);
|
||||||
let ecx = mk_eval_cx(tcx, DUMMY_SP, param_env, false);
|
let ecx = mk_eval_cx(tcx, DUMMY_SP, param_env, CanAccessStatics::No);
|
||||||
let op = ecx.eval_mir_constant(&val, None, None)?;
|
let op = ecx.eval_mir_constant(&val, None, None)?;
|
||||||
|
|
||||||
// We go to `usize` as we cannot allocate anything bigger anyway.
|
// We go to `usize` as we cannot allocate anything bigger anyway.
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
use super::eval_queries::{mk_eval_cx, op_to_const};
|
use super::eval_queries::{mk_eval_cx, op_to_const};
|
||||||
use super::machine::CompileTimeEvalContext;
|
use super::machine::CompileTimeEvalContext;
|
||||||
use super::{ValTreeCreationError, ValTreeCreationResult, VALTREE_MAX_NODES};
|
use super::{ValTreeCreationError, ValTreeCreationResult, VALTREE_MAX_NODES};
|
||||||
|
use crate::const_eval::CanAccessStatics;
|
||||||
use crate::interpret::{
|
use crate::interpret::{
|
||||||
intern_const_alloc_recursive, ConstValue, ImmTy, Immediate, InternKind, MemPlaceMeta,
|
intern_const_alloc_recursive, ConstValue, ImmTy, Immediate, InternKind, MemPlaceMeta,
|
||||||
MemoryKind, PlaceTy, Scalar,
|
MemoryKind, PlaceTy, Scalar,
|
||||||
@ -263,7 +264,11 @@ pub fn valtree_to_const_value<'tcx>(
|
|||||||
// FIXME Does this need an example?
|
// FIXME Does this need an example?
|
||||||
|
|
||||||
let (param_env, ty) = param_env_ty.into_parts();
|
let (param_env, ty) = param_env_ty.into_parts();
|
||||||
let mut ecx = mk_eval_cx(tcx, DUMMY_SP, param_env, false);
|
let mut ecx: crate::interpret::InterpCx<
|
||||||
|
'_,
|
||||||
|
'_,
|
||||||
|
crate::const_eval::CompileTimeInterpreter<'_, '_>,
|
||||||
|
> = mk_eval_cx(tcx, DUMMY_SP, param_env, CanAccessStatics::No);
|
||||||
|
|
||||||
match ty.kind() {
|
match ty.kind() {
|
||||||
ty::FnDef(..) => {
|
ty::FnDef(..) => {
|
||||||
|
@ -612,30 +612,28 @@ impl<'tcx> Visitor<'tcx> for Checker<'_, 'tcx> {
|
|||||||
}
|
}
|
||||||
fn visit_projection_elem(
|
fn visit_projection_elem(
|
||||||
&mut self,
|
&mut self,
|
||||||
place_local: Local,
|
place_ref: PlaceRef<'tcx>,
|
||||||
proj_base: &[PlaceElem<'tcx>],
|
|
||||||
elem: PlaceElem<'tcx>,
|
elem: PlaceElem<'tcx>,
|
||||||
context: PlaceContext,
|
context: PlaceContext,
|
||||||
location: Location,
|
location: Location,
|
||||||
) {
|
) {
|
||||||
trace!(
|
trace!(
|
||||||
"visit_projection_elem: place_local={:?} proj_base={:?} elem={:?} \
|
"visit_projection_elem: place_ref={:?} elem={:?} \
|
||||||
context={:?} location={:?}",
|
context={:?} location={:?}",
|
||||||
place_local,
|
place_ref,
|
||||||
proj_base,
|
|
||||||
elem,
|
elem,
|
||||||
context,
|
context,
|
||||||
location,
|
location,
|
||||||
);
|
);
|
||||||
|
|
||||||
self.super_projection_elem(place_local, proj_base, elem, context, location);
|
self.super_projection_elem(place_ref, elem, context, location);
|
||||||
|
|
||||||
match elem {
|
match elem {
|
||||||
ProjectionElem::Deref => {
|
ProjectionElem::Deref => {
|
||||||
let base_ty = Place::ty_from(place_local, proj_base, self.body, self.tcx).ty;
|
let base_ty = place_ref.ty(self.body, self.tcx).ty;
|
||||||
if base_ty.is_unsafe_ptr() {
|
if base_ty.is_unsafe_ptr() {
|
||||||
if proj_base.is_empty() {
|
if place_ref.projection.is_empty() {
|
||||||
let decl = &self.body.local_decls[place_local];
|
let decl = &self.body.local_decls[place_ref.local];
|
||||||
if let LocalInfo::StaticRef { def_id, .. } = *decl.local_info() {
|
if let LocalInfo::StaticRef { def_id, .. } = *decl.local_info() {
|
||||||
let span = decl.source_info.span;
|
let span = decl.source_info.span;
|
||||||
self.check_static(def_id, span);
|
self.check_static(def_id, span);
|
||||||
|
@ -318,8 +318,7 @@ impl<'a, 'tcx> Visitor<'tcx> for TypeChecker<'a, 'tcx> {
|
|||||||
|
|
||||||
fn visit_projection_elem(
|
fn visit_projection_elem(
|
||||||
&mut self,
|
&mut self,
|
||||||
local: Local,
|
place_ref: PlaceRef<'tcx>,
|
||||||
proj_base: &[PlaceElem<'tcx>],
|
|
||||||
elem: PlaceElem<'tcx>,
|
elem: PlaceElem<'tcx>,
|
||||||
context: PlaceContext,
|
context: PlaceContext,
|
||||||
location: Location,
|
location: Location,
|
||||||
@ -334,7 +333,7 @@ impl<'a, 'tcx> Visitor<'tcx> for TypeChecker<'a, 'tcx> {
|
|||||||
ProjectionElem::Deref
|
ProjectionElem::Deref
|
||||||
if self.mir_phase >= MirPhase::Runtime(RuntimePhase::PostCleanup) =>
|
if self.mir_phase >= MirPhase::Runtime(RuntimePhase::PostCleanup) =>
|
||||||
{
|
{
|
||||||
let base_ty = Place::ty_from(local, proj_base, &self.body.local_decls, self.tcx).ty;
|
let base_ty = place_ref.ty(&self.body.local_decls, self.tcx).ty;
|
||||||
|
|
||||||
if base_ty.is_box() {
|
if base_ty.is_box() {
|
||||||
self.fail(
|
self.fail(
|
||||||
@ -344,8 +343,7 @@ impl<'a, 'tcx> Visitor<'tcx> for TypeChecker<'a, 'tcx> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
ProjectionElem::Field(f, ty) => {
|
ProjectionElem::Field(f, ty) => {
|
||||||
let parent = Place { local, projection: self.tcx.mk_place_elems(proj_base) };
|
let parent_ty = place_ref.ty(&self.body.local_decls, self.tcx);
|
||||||
let parent_ty = parent.ty(&self.body.local_decls, self.tcx);
|
|
||||||
let fail_out_of_bounds = |this: &Self, location| {
|
let fail_out_of_bounds = |this: &Self, location| {
|
||||||
this.fail(location, format!("Out of bounds field {:?} for {:?}", f, parent_ty));
|
this.fail(location, format!("Out of bounds field {:?} for {:?}", f, parent_ty));
|
||||||
};
|
};
|
||||||
@ -355,7 +353,7 @@ impl<'a, 'tcx> Visitor<'tcx> for TypeChecker<'a, 'tcx> {
|
|||||||
location,
|
location,
|
||||||
format!(
|
format!(
|
||||||
"Field projection `{:?}.{:?}` specified type `{:?}`, but actual type is `{:?}`",
|
"Field projection `{:?}.{:?}` specified type `{:?}`, but actual type is `{:?}`",
|
||||||
parent, f, ty, f_ty
|
place_ref, f, ty, f_ty
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
@ -434,7 +432,7 @@ impl<'a, 'tcx> Visitor<'tcx> for TypeChecker<'a, 'tcx> {
|
|||||||
}
|
}
|
||||||
_ => {}
|
_ => {}
|
||||||
}
|
}
|
||||||
self.super_projection_elem(local, proj_base, elem, context, location);
|
self.super_projection_elem(place_ref, elem, context, location);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn visit_var_debug_info(&mut self, debuginfo: &VarDebugInfo<'tcx>) {
|
fn visit_var_debug_info(&mut self, debuginfo: &VarDebugInfo<'tcx>) {
|
||||||
|
@ -2,7 +2,7 @@ use rustc_middle::ty::layout::{LayoutCx, LayoutError, LayoutOf, TyAndLayout, Val
|
|||||||
use rustc_middle::ty::{ParamEnv, ParamEnvAnd, Ty, TyCtxt};
|
use rustc_middle::ty::{ParamEnv, ParamEnvAnd, Ty, TyCtxt};
|
||||||
use rustc_target::abi::{Abi, FieldsShape, Scalar, Variants};
|
use rustc_target::abi::{Abi, FieldsShape, Scalar, Variants};
|
||||||
|
|
||||||
use crate::const_eval::{CheckAlignment, CompileTimeInterpreter};
|
use crate::const_eval::{CanAccessStatics, CheckAlignment, CompileTimeInterpreter};
|
||||||
use crate::interpret::{InterpCx, MemoryKind, OpTy};
|
use crate::interpret::{InterpCx, MemoryKind, OpTy};
|
||||||
|
|
||||||
/// Determines if this type permits "raw" initialization by just transmuting some memory into an
|
/// Determines if this type permits "raw" initialization by just transmuting some memory into an
|
||||||
@ -44,8 +44,7 @@ fn might_permit_raw_init_strict<'tcx>(
|
|||||||
tcx: TyCtxt<'tcx>,
|
tcx: TyCtxt<'tcx>,
|
||||||
kind: ValidityRequirement,
|
kind: ValidityRequirement,
|
||||||
) -> Result<bool, LayoutError<'tcx>> {
|
) -> Result<bool, LayoutError<'tcx>> {
|
||||||
let machine =
|
let machine = CompileTimeInterpreter::new(CanAccessStatics::No, CheckAlignment::Error);
|
||||||
CompileTimeInterpreter::new(/*can_access_statics:*/ false, CheckAlignment::Error);
|
|
||||||
|
|
||||||
let mut cx = InterpCx::new(tcx, rustc_span::DUMMY_SP, ParamEnv::reveal_all(), machine);
|
let mut cx = InterpCx::new(tcx, rustc_span::DUMMY_SP, ParamEnv::reveal_all(), machine);
|
||||||
|
|
||||||
|
@ -3,6 +3,8 @@ use std::fmt;
|
|||||||
use std::fs;
|
use std::fs;
|
||||||
use std::io;
|
use std::io;
|
||||||
|
|
||||||
|
use rustc_session::EarlyErrorHandler;
|
||||||
|
|
||||||
fn arg_expand(arg: String) -> Result<Vec<String>, Error> {
|
fn arg_expand(arg: String) -> Result<Vec<String>, Error> {
|
||||||
if let Some(path) = arg.strip_prefix('@') {
|
if let Some(path) = arg.strip_prefix('@') {
|
||||||
let file = match fs::read_to_string(path) {
|
let file = match fs::read_to_string(path) {
|
||||||
@ -21,15 +23,12 @@ fn arg_expand(arg: String) -> Result<Vec<String>, Error> {
|
|||||||
/// **Note:** This function doesn't interpret argument 0 in any special way.
|
/// **Note:** This function doesn't interpret argument 0 in any special way.
|
||||||
/// If this function is intended to be used with command line arguments,
|
/// If this function is intended to be used with command line arguments,
|
||||||
/// `argv[0]` must be removed prior to calling it manually.
|
/// `argv[0]` must be removed prior to calling it manually.
|
||||||
pub fn arg_expand_all(at_args: &[String]) -> Vec<String> {
|
pub fn arg_expand_all(handler: &EarlyErrorHandler, at_args: &[String]) -> Vec<String> {
|
||||||
let mut args = Vec::new();
|
let mut args = Vec::new();
|
||||||
for arg in at_args {
|
for arg in at_args {
|
||||||
match arg_expand(arg.clone()) {
|
match arg_expand(arg.clone()) {
|
||||||
Ok(arg) => args.extend(arg),
|
Ok(arg) => args.extend(arg),
|
||||||
Err(err) => rustc_session::early_error(
|
Err(err) => handler.early_error(format!("Failed to load argument file: {err}")),
|
||||||
rustc_session::config::ErrorOutputType::default(),
|
|
||||||
format!("Failed to load argument file: {err}"),
|
|
||||||
),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
args
|
args
|
||||||
|
@ -40,8 +40,7 @@ use rustc_session::config::{
|
|||||||
use rustc_session::cstore::MetadataLoader;
|
use rustc_session::cstore::MetadataLoader;
|
||||||
use rustc_session::getopts::{self, Matches};
|
use rustc_session::getopts::{self, Matches};
|
||||||
use rustc_session::lint::{Lint, LintId};
|
use rustc_session::lint::{Lint, LintId};
|
||||||
use rustc_session::{config, Session};
|
use rustc_session::{config, EarlyErrorHandler, Session};
|
||||||
use rustc_session::{early_error, early_error_no_abort, early_warn};
|
|
||||||
use rustc_span::source_map::{FileLoader, FileName};
|
use rustc_span::source_map::{FileLoader, FileName};
|
||||||
use rustc_span::symbol::sym;
|
use rustc_span::symbol::sym;
|
||||||
use rustc_target::json::ToJson;
|
use rustc_target::json::ToJson;
|
||||||
@ -174,6 +173,7 @@ pub trait Callbacks {
|
|||||||
/// continue the compilation afterwards (defaults to `Compilation::Continue`)
|
/// continue the compilation afterwards (defaults to `Compilation::Continue`)
|
||||||
fn after_analysis<'tcx>(
|
fn after_analysis<'tcx>(
|
||||||
&mut self,
|
&mut self,
|
||||||
|
_handler: &EarlyErrorHandler,
|
||||||
_compiler: &interface::Compiler,
|
_compiler: &interface::Compiler,
|
||||||
_queries: &'tcx Queries<'tcx>,
|
_queries: &'tcx Queries<'tcx>,
|
||||||
) -> Compilation {
|
) -> Compilation {
|
||||||
@ -260,6 +260,8 @@ fn run_compiler(
|
|||||||
Box<dyn FnOnce(&config::Options) -> Box<dyn CodegenBackend> + Send>,
|
Box<dyn FnOnce(&config::Options) -> Box<dyn CodegenBackend> + Send>,
|
||||||
>,
|
>,
|
||||||
) -> interface::Result<()> {
|
) -> interface::Result<()> {
|
||||||
|
let mut early_error_handler = EarlyErrorHandler::new(ErrorOutputType::default());
|
||||||
|
|
||||||
// Throw away the first argument, the name of the binary.
|
// Throw away the first argument, the name of the binary.
|
||||||
// In case of at_args being empty, as might be the case by
|
// In case of at_args being empty, as might be the case by
|
||||||
// passing empty argument array to execve under some platforms,
|
// passing empty argument array to execve under some platforms,
|
||||||
@ -270,22 +272,22 @@ fn run_compiler(
|
|||||||
// the compiler with @empty_file as argv[0] and no more arguments.
|
// the compiler with @empty_file as argv[0] and no more arguments.
|
||||||
let at_args = at_args.get(1..).unwrap_or_default();
|
let at_args = at_args.get(1..).unwrap_or_default();
|
||||||
|
|
||||||
let args = args::arg_expand_all(at_args);
|
let args = args::arg_expand_all(&early_error_handler, at_args);
|
||||||
|
|
||||||
let Some(matches) = handle_options(&args) else { return Ok(()) };
|
let Some(matches) = handle_options(&early_error_handler, &args) else { return Ok(()) };
|
||||||
|
|
||||||
let sopts = config::build_session_options(&matches);
|
let sopts = config::build_session_options(&mut early_error_handler, &matches);
|
||||||
|
|
||||||
// Set parallel mode before thread pool creation, which will create `Lock`s.
|
// Set parallel mode before thread pool creation, which will create `Lock`s.
|
||||||
interface::set_thread_safe_mode(&sopts.unstable_opts);
|
interface::set_thread_safe_mode(&sopts.unstable_opts);
|
||||||
|
|
||||||
if let Some(ref code) = matches.opt_str("explain") {
|
if let Some(ref code) = matches.opt_str("explain") {
|
||||||
handle_explain(diagnostics_registry(), code, sopts.error_format);
|
handle_explain(&early_error_handler, diagnostics_registry(), code);
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
|
|
||||||
let cfg = interface::parse_cfgspecs(matches.opt_strs("cfg"));
|
let cfg = interface::parse_cfgspecs(&early_error_handler, matches.opt_strs("cfg"));
|
||||||
let check_cfg = interface::parse_check_cfg(matches.opt_strs("check-cfg"));
|
let check_cfg = interface::parse_check_cfg(&early_error_handler, matches.opt_strs("check-cfg"));
|
||||||
let (odir, ofile) = make_output(&matches);
|
let (odir, ofile) = make_output(&matches);
|
||||||
let mut config = interface::Config {
|
let mut config = interface::Config {
|
||||||
opts: sopts,
|
opts: sopts,
|
||||||
@ -304,7 +306,7 @@ fn run_compiler(
|
|||||||
registry: diagnostics_registry(),
|
registry: diagnostics_registry(),
|
||||||
};
|
};
|
||||||
|
|
||||||
match make_input(config.opts.error_format, &matches.free) {
|
match make_input(&early_error_handler, &matches.free) {
|
||||||
Err(reported) => return Err(reported),
|
Err(reported) => return Err(reported),
|
||||||
Ok(Some(input)) => {
|
Ok(Some(input)) => {
|
||||||
config.input = input;
|
config.input = input;
|
||||||
@ -314,8 +316,13 @@ fn run_compiler(
|
|||||||
Ok(None) => match matches.free.len() {
|
Ok(None) => match matches.free.len() {
|
||||||
0 => {
|
0 => {
|
||||||
callbacks.config(&mut config);
|
callbacks.config(&mut config);
|
||||||
|
|
||||||
|
early_error_handler.abort_if_errors();
|
||||||
|
|
||||||
interface::run_compiler(config, |compiler| {
|
interface::run_compiler(config, |compiler| {
|
||||||
let sopts = &compiler.session().opts;
|
let sopts = &compiler.session().opts;
|
||||||
|
let handler = EarlyErrorHandler::new(sopts.error_format);
|
||||||
|
|
||||||
if sopts.describe_lints {
|
if sopts.describe_lints {
|
||||||
let mut lint_store =
|
let mut lint_store =
|
||||||
rustc_lint::new_lint_store(compiler.session().enable_internal_lints());
|
rustc_lint::new_lint_store(compiler.session().enable_internal_lints());
|
||||||
@ -329,31 +336,38 @@ fn run_compiler(
|
|||||||
describe_lints(compiler.session(), &lint_store, registered_lints);
|
describe_lints(compiler.session(), &lint_store, registered_lints);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
let should_stop =
|
let should_stop = print_crate_info(
|
||||||
print_crate_info(&**compiler.codegen_backend(), compiler.session(), false);
|
&handler,
|
||||||
|
&**compiler.codegen_backend(),
|
||||||
|
compiler.session(),
|
||||||
|
false,
|
||||||
|
);
|
||||||
|
|
||||||
if should_stop == Compilation::Stop {
|
if should_stop == Compilation::Stop {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
early_error(sopts.error_format, "no input filename given")
|
handler.early_error("no input filename given")
|
||||||
});
|
});
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
1 => panic!("make_input should have provided valid inputs"),
|
1 => panic!("make_input should have provided valid inputs"),
|
||||||
_ => early_error(
|
_ => early_error_handler.early_error(format!(
|
||||||
config.opts.error_format,
|
"multiple input filenames provided (first two filenames are `{}` and `{}`)",
|
||||||
format!(
|
matches.free[0], matches.free[1],
|
||||||
"multiple input filenames provided (first two filenames are `{}` and `{}`)",
|
)),
|
||||||
matches.free[0], matches.free[1],
|
|
||||||
),
|
|
||||||
),
|
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
early_error_handler.abort_if_errors();
|
||||||
|
|
||||||
interface::run_compiler(config, |compiler| {
|
interface::run_compiler(config, |compiler| {
|
||||||
let sess = compiler.session();
|
let sess = compiler.session();
|
||||||
let should_stop = print_crate_info(&**compiler.codegen_backend(), sess, true)
|
let handler = EarlyErrorHandler::new(sess.opts.error_format);
|
||||||
.and_then(|| list_metadata(sess, &*compiler.codegen_backend().metadata_loader()))
|
|
||||||
|
let should_stop = print_crate_info(&handler, &**compiler.codegen_backend(), sess, true)
|
||||||
|
.and_then(|| {
|
||||||
|
list_metadata(&handler, sess, &*compiler.codegen_backend().metadata_loader())
|
||||||
|
})
|
||||||
.and_then(|| try_process_rlink(sess, compiler));
|
.and_then(|| try_process_rlink(sess, compiler));
|
||||||
|
|
||||||
if should_stop == Compilation::Stop {
|
if should_stop == Compilation::Stop {
|
||||||
@ -421,7 +435,7 @@ fn run_compiler(
|
|||||||
|
|
||||||
queries.global_ctxt()?.enter(|tcx| tcx.analysis(()))?;
|
queries.global_ctxt()?.enter(|tcx| tcx.analysis(()))?;
|
||||||
|
|
||||||
if callbacks.after_analysis(compiler, queries) == Compilation::Stop {
|
if callbacks.after_analysis(&handler, compiler, queries) == Compilation::Stop {
|
||||||
return early_exit();
|
return early_exit();
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -475,7 +489,7 @@ fn make_output(matches: &getopts::Matches) -> (Option<PathBuf>, Option<OutFileNa
|
|||||||
|
|
||||||
// Extract input (string or file and optional path) from matches.
|
// Extract input (string or file and optional path) from matches.
|
||||||
fn make_input(
|
fn make_input(
|
||||||
error_format: ErrorOutputType,
|
handler: &EarlyErrorHandler,
|
||||||
free_matches: &[String],
|
free_matches: &[String],
|
||||||
) -> Result<Option<Input>, ErrorGuaranteed> {
|
) -> Result<Option<Input>, ErrorGuaranteed> {
|
||||||
if free_matches.len() == 1 {
|
if free_matches.len() == 1 {
|
||||||
@ -485,8 +499,7 @@ fn make_input(
|
|||||||
if io::stdin().read_to_string(&mut src).is_err() {
|
if io::stdin().read_to_string(&mut src).is_err() {
|
||||||
// Immediately stop compilation if there was an issue reading
|
// Immediately stop compilation if there was an issue reading
|
||||||
// the input (for example if the input stream is not UTF-8).
|
// the input (for example if the input stream is not UTF-8).
|
||||||
let reported = early_error_no_abort(
|
let reported = handler.early_error_no_abort(
|
||||||
error_format,
|
|
||||||
"couldn't read from stdin, as it did not contain valid UTF-8",
|
"couldn't read from stdin, as it did not contain valid UTF-8",
|
||||||
);
|
);
|
||||||
return Err(reported);
|
return Err(reported);
|
||||||
@ -527,7 +540,7 @@ impl Compilation {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn handle_explain(registry: Registry, code: &str, output: ErrorOutputType) {
|
fn handle_explain(handler: &EarlyErrorHandler, registry: Registry, code: &str) {
|
||||||
let upper_cased_code = code.to_ascii_uppercase();
|
let upper_cased_code = code.to_ascii_uppercase();
|
||||||
let normalised =
|
let normalised =
|
||||||
if upper_cased_code.starts_with('E') { upper_cased_code } else { format!("E{code:0>4}") };
|
if upper_cased_code.starts_with('E') { upper_cased_code } else { format!("E{code:0>4}") };
|
||||||
@ -557,7 +570,7 @@ fn handle_explain(registry: Registry, code: &str, output: ErrorOutputType) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
Err(InvalidErrorCode) => {
|
Err(InvalidErrorCode) => {
|
||||||
early_error(output, format!("{code} is not a valid error code"));
|
handler.early_error(format!("{code} is not a valid error code"));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -636,7 +649,11 @@ pub fn try_process_rlink(sess: &Session, compiler: &interface::Compiler) -> Comp
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn list_metadata(sess: &Session, metadata_loader: &dyn MetadataLoader) -> Compilation {
|
pub fn list_metadata(
|
||||||
|
handler: &EarlyErrorHandler,
|
||||||
|
sess: &Session,
|
||||||
|
metadata_loader: &dyn MetadataLoader,
|
||||||
|
) -> Compilation {
|
||||||
if sess.opts.unstable_opts.ls {
|
if sess.opts.unstable_opts.ls {
|
||||||
match sess.io.input {
|
match sess.io.input {
|
||||||
Input::File(ref ifile) => {
|
Input::File(ref ifile) => {
|
||||||
@ -646,7 +663,7 @@ pub fn list_metadata(sess: &Session, metadata_loader: &dyn MetadataLoader) -> Co
|
|||||||
safe_println!("{}", String::from_utf8(v).unwrap());
|
safe_println!("{}", String::from_utf8(v).unwrap());
|
||||||
}
|
}
|
||||||
Input::Str { .. } => {
|
Input::Str { .. } => {
|
||||||
early_error(ErrorOutputType::default(), "cannot list metadata for stdin");
|
handler.early_error("cannot list metadata for stdin");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return Compilation::Stop;
|
return Compilation::Stop;
|
||||||
@ -656,6 +673,7 @@ pub fn list_metadata(sess: &Session, metadata_loader: &dyn MetadataLoader) -> Co
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn print_crate_info(
|
fn print_crate_info(
|
||||||
|
handler: &EarlyErrorHandler,
|
||||||
codegen_backend: &dyn CodegenBackend,
|
codegen_backend: &dyn CodegenBackend,
|
||||||
sess: &Session,
|
sess: &Session,
|
||||||
parse_attrs: bool,
|
parse_attrs: bool,
|
||||||
@ -787,10 +805,8 @@ fn print_crate_info(
|
|||||||
.expect("unknown Apple target OS")
|
.expect("unknown Apple target OS")
|
||||||
)
|
)
|
||||||
} else {
|
} else {
|
||||||
early_error(
|
handler
|
||||||
ErrorOutputType::default(),
|
.early_error("only Apple targets currently support deployment version info")
|
||||||
"only Apple targets currently support deployment version info",
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -801,11 +817,12 @@ fn print_crate_info(
|
|||||||
/// Prints version information
|
/// Prints version information
|
||||||
///
|
///
|
||||||
/// NOTE: this is a macro to support drivers built at a different time than the main `rustc_driver` crate.
|
/// NOTE: this is a macro to support drivers built at a different time than the main `rustc_driver` crate.
|
||||||
pub macro version($binary: literal, $matches: expr) {
|
pub macro version($handler: expr, $binary: literal, $matches: expr) {
|
||||||
fn unw(x: Option<&str>) -> &str {
|
fn unw(x: Option<&str>) -> &str {
|
||||||
x.unwrap_or("unknown")
|
x.unwrap_or("unknown")
|
||||||
}
|
}
|
||||||
$crate::version_at_macro_invocation(
|
$crate::version_at_macro_invocation(
|
||||||
|
$handler,
|
||||||
$binary,
|
$binary,
|
||||||
$matches,
|
$matches,
|
||||||
unw(option_env!("CFG_VERSION")),
|
unw(option_env!("CFG_VERSION")),
|
||||||
@ -817,6 +834,7 @@ pub macro version($binary: literal, $matches: expr) {
|
|||||||
|
|
||||||
#[doc(hidden)] // use the macro instead
|
#[doc(hidden)] // use the macro instead
|
||||||
pub fn version_at_macro_invocation(
|
pub fn version_at_macro_invocation(
|
||||||
|
handler: &EarlyErrorHandler,
|
||||||
binary: &str,
|
binary: &str,
|
||||||
matches: &getopts::Matches,
|
matches: &getopts::Matches,
|
||||||
version: &str,
|
version: &str,
|
||||||
@ -837,7 +855,7 @@ pub fn version_at_macro_invocation(
|
|||||||
|
|
||||||
let debug_flags = matches.opt_strs("Z");
|
let debug_flags = matches.opt_strs("Z");
|
||||||
let backend_name = debug_flags.iter().find_map(|x| x.strip_prefix("codegen-backend="));
|
let backend_name = debug_flags.iter().find_map(|x| x.strip_prefix("codegen-backend="));
|
||||||
get_codegen_backend(&None, backend_name).print_version();
|
get_codegen_backend(handler, &None, backend_name).print_version();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1014,7 +1032,7 @@ Available lint options:
|
|||||||
/// Show help for flag categories shared between rustdoc and rustc.
|
/// Show help for flag categories shared between rustdoc and rustc.
|
||||||
///
|
///
|
||||||
/// Returns whether a help option was printed.
|
/// Returns whether a help option was printed.
|
||||||
pub fn describe_flag_categories(matches: &Matches) -> bool {
|
pub fn describe_flag_categories(handler: &EarlyErrorHandler, matches: &Matches) -> bool {
|
||||||
// Handle the special case of -Wall.
|
// Handle the special case of -Wall.
|
||||||
let wall = matches.opt_strs("W");
|
let wall = matches.opt_strs("W");
|
||||||
if wall.iter().any(|x| *x == "all") {
|
if wall.iter().any(|x| *x == "all") {
|
||||||
@ -1036,15 +1054,12 @@ pub fn describe_flag_categories(matches: &Matches) -> bool {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if cg_flags.iter().any(|x| *x == "no-stack-check") {
|
if cg_flags.iter().any(|x| *x == "no-stack-check") {
|
||||||
early_warn(
|
handler.early_warn("the --no-stack-check flag is deprecated and does nothing");
|
||||||
ErrorOutputType::default(),
|
|
||||||
"the --no-stack-check flag is deprecated and does nothing",
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if cg_flags.iter().any(|x| *x == "passes=list") {
|
if cg_flags.iter().any(|x| *x == "passes=list") {
|
||||||
let backend_name = debug_flags.iter().find_map(|x| x.strip_prefix("codegen-backend="));
|
let backend_name = debug_flags.iter().find_map(|x| x.strip_prefix("codegen-backend="));
|
||||||
get_codegen_backend(&None, backend_name).print_passes();
|
get_codegen_backend(handler, &None, backend_name).print_passes();
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1101,7 +1116,7 @@ fn print_flag_list<T>(
|
|||||||
///
|
///
|
||||||
/// So with all that in mind, the comments below have some more detail about the
|
/// So with all that in mind, the comments below have some more detail about the
|
||||||
/// contortions done here to get things to work out correctly.
|
/// contortions done here to get things to work out correctly.
|
||||||
pub fn handle_options(args: &[String]) -> Option<getopts::Matches> {
|
pub fn handle_options(handler: &EarlyErrorHandler, args: &[String]) -> Option<getopts::Matches> {
|
||||||
if args.is_empty() {
|
if args.is_empty() {
|
||||||
// user did not write `-v` nor `-Z unstable-options`, so do not
|
// user did not write `-v` nor `-Z unstable-options`, so do not
|
||||||
// include that extra information.
|
// include that extra information.
|
||||||
@ -1127,7 +1142,7 @@ pub fn handle_options(args: &[String]) -> Option<getopts::Matches> {
|
|||||||
.map(|(flag, _)| format!("{e}. Did you mean `-{flag} {opt}`?")),
|
.map(|(flag, _)| format!("{e}. Did you mean `-{flag} {opt}`?")),
|
||||||
_ => None,
|
_ => None,
|
||||||
};
|
};
|
||||||
early_error(ErrorOutputType::default(), msg.unwrap_or_else(|| e.to_string()));
|
handler.early_error(msg.unwrap_or_else(|| e.to_string()));
|
||||||
});
|
});
|
||||||
|
|
||||||
// For all options we just parsed, we check a few aspects:
|
// For all options we just parsed, we check a few aspects:
|
||||||
@ -1141,7 +1156,7 @@ pub fn handle_options(args: &[String]) -> Option<getopts::Matches> {
|
|||||||
// we're good to go.
|
// we're good to go.
|
||||||
// * Otherwise, if we're an unstable option then we generate an error
|
// * Otherwise, if we're an unstable option then we generate an error
|
||||||
// (unstable option being used on stable)
|
// (unstable option being used on stable)
|
||||||
nightly_options::check_nightly_options(&matches, &config::rustc_optgroups());
|
nightly_options::check_nightly_options(handler, &matches, &config::rustc_optgroups());
|
||||||
|
|
||||||
if matches.opt_present("h") || matches.opt_present("help") {
|
if matches.opt_present("h") || matches.opt_present("help") {
|
||||||
// Only show unstable options in --help if we accept unstable options.
|
// Only show unstable options in --help if we accept unstable options.
|
||||||
@ -1151,12 +1166,12 @@ pub fn handle_options(args: &[String]) -> Option<getopts::Matches> {
|
|||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
if describe_flag_categories(&matches) {
|
if describe_flag_categories(handler, &matches) {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
if matches.opt_present("version") {
|
if matches.opt_present("version") {
|
||||||
version!("rustc", &matches);
|
version!(handler, "rustc", &matches);
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1276,7 +1291,8 @@ pub fn install_ice_hook(bug_report_url: &'static str, extra_info: fn(&Handler))
|
|||||||
if let Some(msg) = info.payload().downcast_ref::<String>() {
|
if let Some(msg) = info.payload().downcast_ref::<String>() {
|
||||||
if msg.starts_with("failed printing to stdout: ") && msg.ends_with("(os error 232)") {
|
if msg.starts_with("failed printing to stdout: ") && msg.ends_with("(os error 232)") {
|
||||||
// the error code is already going to be reported when the panic unwinds up the stack
|
// the error code is already going to be reported when the panic unwinds up the stack
|
||||||
let _ = early_error_no_abort(ErrorOutputType::default(), msg.clone());
|
let handler = EarlyErrorHandler::new(ErrorOutputType::default());
|
||||||
|
let _ = handler.early_error_no_abort(msg.clone());
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -1359,16 +1375,16 @@ pub fn report_ice(info: &panic::PanicInfo<'_>, bug_report_url: &str, extra_info:
|
|||||||
|
|
||||||
/// This allows tools to enable rust logging without having to magically match rustc's
|
/// This allows tools to enable rust logging without having to magically match rustc's
|
||||||
/// tracing crate version.
|
/// tracing crate version.
|
||||||
pub fn init_rustc_env_logger() {
|
pub fn init_rustc_env_logger(handler: &EarlyErrorHandler) {
|
||||||
init_env_logger("RUSTC_LOG");
|
init_env_logger(handler, "RUSTC_LOG");
|
||||||
}
|
}
|
||||||
|
|
||||||
/// This allows tools to enable rust logging without having to magically match rustc's
|
/// This allows tools to enable rust logging without having to magically match rustc's
|
||||||
/// tracing crate version. In contrast to `init_rustc_env_logger` it allows you to choose an env var
|
/// tracing crate version. In contrast to `init_rustc_env_logger` it allows you to choose an env var
|
||||||
/// other than `RUSTC_LOG`.
|
/// other than `RUSTC_LOG`.
|
||||||
pub fn init_env_logger(env: &str) {
|
pub fn init_env_logger(handler: &EarlyErrorHandler, env: &str) {
|
||||||
if let Err(error) = rustc_log::init_env_logger(env) {
|
if let Err(error) = rustc_log::init_env_logger(env) {
|
||||||
early_error(ErrorOutputType::default(), error.to_string());
|
handler.early_error(error.to_string());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1424,7 +1440,10 @@ mod signal_handler {
|
|||||||
pub fn main() -> ! {
|
pub fn main() -> ! {
|
||||||
let start_time = Instant::now();
|
let start_time = Instant::now();
|
||||||
let start_rss = get_resident_set_size();
|
let start_rss = get_resident_set_size();
|
||||||
init_rustc_env_logger();
|
|
||||||
|
let handler = EarlyErrorHandler::new(ErrorOutputType::default());
|
||||||
|
|
||||||
|
init_rustc_env_logger(&handler);
|
||||||
signal_handler::install();
|
signal_handler::install();
|
||||||
let mut callbacks = TimePassesCallbacks::default();
|
let mut callbacks = TimePassesCallbacks::default();
|
||||||
install_ice_hook(DEFAULT_BUG_REPORT_URL, |_| ());
|
install_ice_hook(DEFAULT_BUG_REPORT_URL, |_| ());
|
||||||
@ -1433,10 +1452,7 @@ pub fn main() -> ! {
|
|||||||
.enumerate()
|
.enumerate()
|
||||||
.map(|(i, arg)| {
|
.map(|(i, arg)| {
|
||||||
arg.into_string().unwrap_or_else(|arg| {
|
arg.into_string().unwrap_or_else(|arg| {
|
||||||
early_error(
|
handler.early_error(format!("argument {i} is not valid Unicode: {arg:?}"))
|
||||||
ErrorOutputType::default(),
|
|
||||||
format!("argument {i} is not valid Unicode: {arg:?}"),
|
|
||||||
)
|
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
|
@ -445,7 +445,7 @@ impl<'a> StripUnconfigured<'a> {
|
|||||||
/// If attributes are not allowed on expressions, emit an error for `attr`
|
/// If attributes are not allowed on expressions, emit an error for `attr`
|
||||||
#[instrument(level = "trace", skip(self))]
|
#[instrument(level = "trace", skip(self))]
|
||||||
pub(crate) fn maybe_emit_expr_attr_err(&self, attr: &Attribute) {
|
pub(crate) fn maybe_emit_expr_attr_err(&self, attr: &Attribute) {
|
||||||
if !self.features.map_or(true, |features| features.stmt_expr_attributes) {
|
if self.features.is_some_and(|features| !features.stmt_expr_attributes) {
|
||||||
let mut err = feature_err(
|
let mut err = feature_err(
|
||||||
&self.sess.parse_sess,
|
&self.sess.parse_sess,
|
||||||
sym::stmt_expr_attributes,
|
sym::stmt_expr_attributes,
|
||||||
|
@ -179,7 +179,8 @@ pub(crate) fn fluent_messages(input: proc_macro::TokenStream) -> proc_macro::Tok
|
|||||||
let mut previous_defns = HashMap::new();
|
let mut previous_defns = HashMap::new();
|
||||||
let mut message_refs = Vec::new();
|
let mut message_refs = Vec::new();
|
||||||
for entry in resource.entries() {
|
for entry in resource.entries() {
|
||||||
if let Entry::Message(Message { id: Identifier { name }, attributes, value, .. }) = entry {
|
if let Entry::Message(msg) = entry {
|
||||||
|
let Message { id: Identifier { name }, attributes, value, .. } = msg;
|
||||||
let _ = previous_defns.entry(name.to_string()).or_insert(resource_span);
|
let _ = previous_defns.entry(name.to_string()).or_insert(resource_span);
|
||||||
if name.contains('-') {
|
if name.contains('-') {
|
||||||
Diagnostic::spanned(
|
Diagnostic::spanned(
|
||||||
@ -229,9 +230,10 @@ pub(crate) fn fluent_messages(input: proc_macro::TokenStream) -> proc_macro::Tok
|
|||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
let msg = format!("Constant referring to Fluent message `{name}` from `{crate_name}`");
|
let docstr =
|
||||||
|
format!("Constant referring to Fluent message `{name}` from `{crate_name}`");
|
||||||
constants.extend(quote! {
|
constants.extend(quote! {
|
||||||
#[doc = #msg]
|
#[doc = #docstr]
|
||||||
pub const #snake_name: crate::DiagnosticMessage =
|
pub const #snake_name: crate::DiagnosticMessage =
|
||||||
crate::DiagnosticMessage::FluentIdentifier(
|
crate::DiagnosticMessage::FluentIdentifier(
|
||||||
std::borrow::Cow::Borrowed(#name),
|
std::borrow::Cow::Borrowed(#name),
|
||||||
@ -269,6 +271,15 @@ pub(crate) fn fluent_messages(input: proc_macro::TokenStream) -> proc_macro::Tok
|
|||||||
);
|
);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Record variables referenced by these messages so we can produce
|
||||||
|
// tests in the derive diagnostics to validate them.
|
||||||
|
let ident = quote::format_ident!("{snake_name}_refs");
|
||||||
|
let vrefs = variable_references(msg);
|
||||||
|
constants.extend(quote! {
|
||||||
|
#[cfg(test)]
|
||||||
|
pub const #ident: &[&str] = &[#(#vrefs),*];
|
||||||
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -334,3 +345,28 @@ pub(crate) fn fluent_messages(input: proc_macro::TokenStream) -> proc_macro::Tok
|
|||||||
}
|
}
|
||||||
.into()
|
.into()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn variable_references<'a>(msg: &Message<&'a str>) -> Vec<&'a str> {
|
||||||
|
let mut refs = vec![];
|
||||||
|
if let Some(Pattern { elements }) = &msg.value {
|
||||||
|
for elt in elements {
|
||||||
|
if let PatternElement::Placeable {
|
||||||
|
expression: Expression::Inline(InlineExpression::VariableReference { id }),
|
||||||
|
} = elt
|
||||||
|
{
|
||||||
|
refs.push(id.name);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for attr in &msg.attributes {
|
||||||
|
for elt in &attr.value.elements {
|
||||||
|
if let PatternElement::Placeable {
|
||||||
|
expression: Expression::Inline(InlineExpression::VariableReference { id }),
|
||||||
|
} = elt
|
||||||
|
{
|
||||||
|
refs.push(id.name);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
refs
|
||||||
|
}
|
||||||
|
@ -1719,6 +1719,7 @@ impl Expr<'_> {
|
|||||||
ExprKind::Break(..) => ExprPrecedence::Break,
|
ExprKind::Break(..) => ExprPrecedence::Break,
|
||||||
ExprKind::Continue(..) => ExprPrecedence::Continue,
|
ExprKind::Continue(..) => ExprPrecedence::Continue,
|
||||||
ExprKind::Ret(..) => ExprPrecedence::Ret,
|
ExprKind::Ret(..) => ExprPrecedence::Ret,
|
||||||
|
ExprKind::Become(..) => ExprPrecedence::Become,
|
||||||
ExprKind::InlineAsm(..) => ExprPrecedence::InlineAsm,
|
ExprKind::InlineAsm(..) => ExprPrecedence::InlineAsm,
|
||||||
ExprKind::OffsetOf(..) => ExprPrecedence::OffsetOf,
|
ExprKind::OffsetOf(..) => ExprPrecedence::OffsetOf,
|
||||||
ExprKind::Struct(..) => ExprPrecedence::Struct,
|
ExprKind::Struct(..) => ExprPrecedence::Struct,
|
||||||
@ -1776,6 +1777,7 @@ impl Expr<'_> {
|
|||||||
| ExprKind::Break(..)
|
| ExprKind::Break(..)
|
||||||
| ExprKind::Continue(..)
|
| ExprKind::Continue(..)
|
||||||
| ExprKind::Ret(..)
|
| ExprKind::Ret(..)
|
||||||
|
| ExprKind::Become(..)
|
||||||
| ExprKind::Let(..)
|
| ExprKind::Let(..)
|
||||||
| ExprKind::Loop(..)
|
| ExprKind::Loop(..)
|
||||||
| ExprKind::Assign(..)
|
| ExprKind::Assign(..)
|
||||||
@ -1866,6 +1868,7 @@ impl Expr<'_> {
|
|||||||
| ExprKind::Break(..)
|
| ExprKind::Break(..)
|
||||||
| ExprKind::Continue(..)
|
| ExprKind::Continue(..)
|
||||||
| ExprKind::Ret(..)
|
| ExprKind::Ret(..)
|
||||||
|
| ExprKind::Become(..)
|
||||||
| ExprKind::Let(..)
|
| ExprKind::Let(..)
|
||||||
| ExprKind::Loop(..)
|
| ExprKind::Loop(..)
|
||||||
| ExprKind::Assign(..)
|
| ExprKind::Assign(..)
|
||||||
@ -2025,6 +2028,8 @@ pub enum ExprKind<'hir> {
|
|||||||
Continue(Destination),
|
Continue(Destination),
|
||||||
/// A `return`, with an optional value to be returned.
|
/// A `return`, with an optional value to be returned.
|
||||||
Ret(Option<&'hir Expr<'hir>>),
|
Ret(Option<&'hir Expr<'hir>>),
|
||||||
|
/// A `become`, with the value to be returned.
|
||||||
|
Become(&'hir Expr<'hir>),
|
||||||
|
|
||||||
/// Inline assembly (from `asm!`), with its outputs and inputs.
|
/// Inline assembly (from `asm!`), with its outputs and inputs.
|
||||||
InlineAsm(&'hir InlineAsm<'hir>),
|
InlineAsm(&'hir InlineAsm<'hir>),
|
||||||
|
@ -791,6 +791,7 @@ pub fn walk_expr<'v, V: Visitor<'v>>(visitor: &mut V, expression: &'v Expr<'v>)
|
|||||||
ExprKind::Ret(ref optional_expression) => {
|
ExprKind::Ret(ref optional_expression) => {
|
||||||
walk_list!(visitor, visit_expr, optional_expression);
|
walk_list!(visitor, visit_expr, optional_expression);
|
||||||
}
|
}
|
||||||
|
ExprKind::Become(ref expr) => visitor.visit_expr(expr),
|
||||||
ExprKind::InlineAsm(ref asm) => {
|
ExprKind::InlineAsm(ref asm) => {
|
||||||
visitor.visit_inline_asm(asm, expression.hir_id);
|
visitor.visit_inline_asm(asm, expression.hir_id);
|
||||||
}
|
}
|
||||||
|
@ -137,7 +137,7 @@ hir_analysis_missing_trait_item_suggestion = implement the missing item: `{$snip
|
|||||||
|
|
||||||
hir_analysis_missing_trait_item_unstable = not all trait items implemented, missing: `{$missing_item_name}`
|
hir_analysis_missing_trait_item_unstable = not all trait items implemented, missing: `{$missing_item_name}`
|
||||||
.note = default implementation of `{$missing_item_name}` is unstable
|
.note = default implementation of `{$missing_item_name}` is unstable
|
||||||
.some_note = use of unstable library feature '{$feature}': {$r}
|
.some_note = use of unstable library feature '{$feature}': {$reason}
|
||||||
.none_note = use of unstable library feature '{$feature}'
|
.none_note = use of unstable library feature '{$feature}'
|
||||||
|
|
||||||
hir_analysis_missing_type_params =
|
hir_analysis_missing_type_params =
|
||||||
|
@ -945,40 +945,30 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
|
|||||||
|
|
||||||
let mut trait_bounds = vec![];
|
let mut trait_bounds = vec![];
|
||||||
let mut projection_bounds = vec![];
|
let mut projection_bounds = vec![];
|
||||||
for (clause, span) in bounds.clauses() {
|
for (pred, span) in bounds.clauses() {
|
||||||
let pred: ty::Predicate<'tcx> = clause.as_predicate();
|
|
||||||
let bound_pred = pred.kind();
|
let bound_pred = pred.kind();
|
||||||
match bound_pred.skip_binder() {
|
match bound_pred.skip_binder() {
|
||||||
ty::PredicateKind::Clause(clause) => match clause {
|
ty::ClauseKind::Trait(trait_pred) => {
|
||||||
ty::ClauseKind::Trait(trait_pred) => {
|
assert_eq!(trait_pred.polarity, ty::ImplPolarity::Positive);
|
||||||
assert_eq!(trait_pred.polarity, ty::ImplPolarity::Positive);
|
trait_bounds.push((
|
||||||
trait_bounds.push((
|
bound_pred.rebind(trait_pred.trait_ref),
|
||||||
bound_pred.rebind(trait_pred.trait_ref),
|
span,
|
||||||
span,
|
trait_pred.constness,
|
||||||
trait_pred.constness,
|
));
|
||||||
));
|
}
|
||||||
}
|
ty::ClauseKind::Projection(proj) => {
|
||||||
ty::ClauseKind::Projection(proj) => {
|
projection_bounds.push((bound_pred.rebind(proj), span));
|
||||||
projection_bounds.push((bound_pred.rebind(proj), span));
|
}
|
||||||
}
|
ty::ClauseKind::TypeOutlives(_) => {
|
||||||
ty::ClauseKind::TypeOutlives(_) => {
|
// Do nothing, we deal with regions separately
|
||||||
// Do nothing, we deal with regions separately
|
}
|
||||||
}
|
ty::ClauseKind::RegionOutlives(_)
|
||||||
ty::ClauseKind::RegionOutlives(_)
|
| ty::ClauseKind::ConstArgHasType(..)
|
||||||
| ty::ClauseKind::ConstArgHasType(..)
|
| ty::ClauseKind::WellFormed(_)
|
||||||
| ty::ClauseKind::WellFormed(_)
|
| ty::ClauseKind::ConstEvaluatable(_)
|
||||||
| ty::ClauseKind::ConstEvaluatable(_) => {
|
| ty::ClauseKind::TypeWellFormedFromEnv(_) => {
|
||||||
bug!()
|
bug!()
|
||||||
}
|
}
|
||||||
},
|
|
||||||
ty::PredicateKind::AliasRelate(..)
|
|
||||||
| ty::PredicateKind::ObjectSafe(_)
|
|
||||||
| ty::PredicateKind::ClosureKind(_, _, _)
|
|
||||||
| ty::PredicateKind::Subtype(_)
|
|
||||||
| ty::PredicateKind::Coerce(_)
|
|
||||||
| ty::PredicateKind::ConstEquate(_, _)
|
|
||||||
| ty::PredicateKind::TypeWellFormedFromEnv(_)
|
|
||||||
| ty::PredicateKind::Ambiguous => bug!(),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1425,9 +1415,9 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
|
|||||||
|| {
|
|| {
|
||||||
traits::transitive_bounds_that_define_assoc_item(
|
traits::transitive_bounds_that_define_assoc_item(
|
||||||
tcx,
|
tcx,
|
||||||
predicates.iter().filter_map(|(p, _)| {
|
predicates
|
||||||
Some(p.to_opt_poly_trait_pred()?.map_bound(|t| t.trait_ref))
|
.iter()
|
||||||
}),
|
.filter_map(|(p, _)| Some(p.as_trait_clause()?.map_bound(|t| t.trait_ref))),
|
||||||
assoc_name,
|
assoc_name,
|
||||||
)
|
)
|
||||||
},
|
},
|
||||||
|
@ -220,7 +220,7 @@ fn compare_method_predicate_entailment<'tcx>(
|
|||||||
// the new hybrid bounds we computed.
|
// the new hybrid bounds we computed.
|
||||||
let normalize_cause = traits::ObligationCause::misc(impl_m_span, impl_m_def_id);
|
let normalize_cause = traits::ObligationCause::misc(impl_m_span, impl_m_def_id);
|
||||||
let param_env = ty::ParamEnv::new(
|
let param_env = ty::ParamEnv::new(
|
||||||
tcx.mk_predicates(&hybrid_preds.predicates),
|
tcx.mk_clauses(&hybrid_preds.predicates),
|
||||||
Reveal::UserFacing,
|
Reveal::UserFacing,
|
||||||
hir::Constness::NotConst,
|
hir::Constness::NotConst,
|
||||||
);
|
);
|
||||||
@ -1835,7 +1835,7 @@ fn compare_type_predicate_entailment<'tcx>(
|
|||||||
let impl_ty_span = tcx.def_span(impl_ty_def_id);
|
let impl_ty_span = tcx.def_span(impl_ty_def_id);
|
||||||
let normalize_cause = traits::ObligationCause::misc(impl_ty_span, impl_ty_def_id);
|
let normalize_cause = traits::ObligationCause::misc(impl_ty_span, impl_ty_def_id);
|
||||||
let param_env = ty::ParamEnv::new(
|
let param_env = ty::ParamEnv::new(
|
||||||
tcx.mk_predicates(&hybrid_preds.predicates),
|
tcx.mk_clauses(&hybrid_preds.predicates),
|
||||||
Reveal::UserFacing,
|
Reveal::UserFacing,
|
||||||
hir::Constness::NotConst,
|
hir::Constness::NotConst,
|
||||||
);
|
);
|
||||||
@ -2011,7 +2011,7 @@ pub(super) fn check_type_bounds<'tcx>(
|
|||||||
.to_predicate(tcx),
|
.to_predicate(tcx),
|
||||||
),
|
),
|
||||||
};
|
};
|
||||||
ty::ParamEnv::new(tcx.mk_predicates(&predicates), Reveal::UserFacing, param_env.constness())
|
ty::ParamEnv::new(tcx.mk_clauses(&predicates), Reveal::UserFacing, param_env.constness())
|
||||||
};
|
};
|
||||||
debug!(?normalize_param_env);
|
debug!(?normalize_param_env);
|
||||||
|
|
||||||
|
@ -296,7 +296,7 @@ fn default_body_is_unstable(
|
|||||||
/// Re-sugar `ty::GenericPredicates` in a way suitable to be used in structured suggestions.
|
/// Re-sugar `ty::GenericPredicates` in a way suitable to be used in structured suggestions.
|
||||||
fn bounds_from_generic_predicates<'tcx>(
|
fn bounds_from_generic_predicates<'tcx>(
|
||||||
tcx: TyCtxt<'tcx>,
|
tcx: TyCtxt<'tcx>,
|
||||||
predicates: impl IntoIterator<Item = (ty::Predicate<'tcx>, Span)>,
|
predicates: impl IntoIterator<Item = (ty::Clause<'tcx>, Span)>,
|
||||||
) -> (String, String) {
|
) -> (String, String) {
|
||||||
let mut types: FxHashMap<Ty<'tcx>, Vec<DefId>> = FxHashMap::default();
|
let mut types: FxHashMap<Ty<'tcx>, Vec<DefId>> = FxHashMap::default();
|
||||||
let mut projections = vec![];
|
let mut projections = vec![];
|
||||||
@ -304,7 +304,7 @@ fn bounds_from_generic_predicates<'tcx>(
|
|||||||
debug!("predicate {:?}", predicate);
|
debug!("predicate {:?}", predicate);
|
||||||
let bound_predicate = predicate.kind();
|
let bound_predicate = predicate.kind();
|
||||||
match bound_predicate.skip_binder() {
|
match bound_predicate.skip_binder() {
|
||||||
ty::PredicateKind::Clause(ty::ClauseKind::Trait(trait_predicate)) => {
|
ty::ClauseKind::Trait(trait_predicate) => {
|
||||||
let entry = types.entry(trait_predicate.self_ty()).or_default();
|
let entry = types.entry(trait_predicate.self_ty()).or_default();
|
||||||
let def_id = trait_predicate.def_id();
|
let def_id = trait_predicate.def_id();
|
||||||
if Some(def_id) != tcx.lang_items().sized_trait() {
|
if Some(def_id) != tcx.lang_items().sized_trait() {
|
||||||
@ -313,7 +313,7 @@ fn bounds_from_generic_predicates<'tcx>(
|
|||||||
entry.push(trait_predicate.def_id());
|
entry.push(trait_predicate.def_id());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
ty::PredicateKind::Clause(ty::ClauseKind::Projection(projection_pred)) => {
|
ty::ClauseKind::Projection(projection_pred) => {
|
||||||
projections.push(bound_predicate.rebind(projection_pred));
|
projections.push(bound_predicate.rebind(projection_pred));
|
||||||
}
|
}
|
||||||
_ => {}
|
_ => {}
|
||||||
@ -362,7 +362,7 @@ fn fn_sig_suggestion<'tcx>(
|
|||||||
tcx: TyCtxt<'tcx>,
|
tcx: TyCtxt<'tcx>,
|
||||||
sig: ty::FnSig<'tcx>,
|
sig: ty::FnSig<'tcx>,
|
||||||
ident: Ident,
|
ident: Ident,
|
||||||
predicates: impl IntoIterator<Item = (ty::Predicate<'tcx>, Span)>,
|
predicates: impl IntoIterator<Item = (ty::Clause<'tcx>, Span)>,
|
||||||
assoc: ty::AssocItem,
|
assoc: ty::AssocItem,
|
||||||
) -> String {
|
) -> String {
|
||||||
let args = sig
|
let args = sig
|
||||||
|
@ -15,7 +15,7 @@ use rustc_middle::mir::ConstraintCategory;
|
|||||||
use rustc_middle::query::Providers;
|
use rustc_middle::query::Providers;
|
||||||
use rustc_middle::ty::trait_def::TraitSpecializationKind;
|
use rustc_middle::ty::trait_def::TraitSpecializationKind;
|
||||||
use rustc_middle::ty::{
|
use rustc_middle::ty::{
|
||||||
self, AdtKind, GenericParamDefKind, Ty, TyCtxt, TypeFoldable, TypeSuperVisitable,
|
self, AdtKind, GenericParamDefKind, ToPredicate, Ty, TyCtxt, TypeFoldable, TypeSuperVisitable,
|
||||||
TypeVisitable, TypeVisitableExt, TypeVisitor,
|
TypeVisitable, TypeVisitableExt, TypeVisitor,
|
||||||
};
|
};
|
||||||
use rustc_middle::ty::{GenericArgKind, InternalSubsts};
|
use rustc_middle::ty::{GenericArgKind, InternalSubsts};
|
||||||
@ -322,7 +322,7 @@ fn check_gat_where_clauses(tcx: TyCtxt<'_>, associated_items: &[hir::TraitItemRe
|
|||||||
// Gather the bounds with which all other items inside of this trait constrain the GAT.
|
// Gather the bounds with which all other items inside of this trait constrain the GAT.
|
||||||
// This is calculated by taking the intersection of the bounds that each item
|
// This is calculated by taking the intersection of the bounds that each item
|
||||||
// constrains the GAT with individually.
|
// constrains the GAT with individually.
|
||||||
let mut new_required_bounds: Option<FxHashSet<ty::Predicate<'_>>> = None;
|
let mut new_required_bounds: Option<FxHashSet<ty::Clause<'_>>> = None;
|
||||||
for item in associated_items {
|
for item in associated_items {
|
||||||
let item_def_id = item.id.owner_id;
|
let item_def_id = item.id.owner_id;
|
||||||
// Skip our own GAT, since it does not constrain itself at all.
|
// Skip our own GAT, since it does not constrain itself at all.
|
||||||
@ -419,9 +419,17 @@ fn check_gat_where_clauses(tcx: TyCtxt<'_>, associated_items: &[hir::TraitItemRe
|
|||||||
let mut unsatisfied_bounds: Vec<_> = required_bounds
|
let mut unsatisfied_bounds: Vec<_> = required_bounds
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.filter(|clause| match clause.kind().skip_binder() {
|
.filter(|clause| match clause.kind().skip_binder() {
|
||||||
ty::PredicateKind::Clause(ty::ClauseKind::RegionOutlives(
|
ty::ClauseKind::RegionOutlives(ty::OutlivesPredicate(a, b)) => {
|
||||||
ty::OutlivesPredicate(a, b),
|
!region_known_to_outlive(
|
||||||
)) => !region_known_to_outlive(
|
tcx,
|
||||||
|
gat_def_id.def_id,
|
||||||
|
param_env,
|
||||||
|
&FxIndexSet::default(),
|
||||||
|
a,
|
||||||
|
b,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
ty::ClauseKind::TypeOutlives(ty::OutlivesPredicate(a, b)) => !ty_known_to_outlive(
|
||||||
tcx,
|
tcx,
|
||||||
gat_def_id.def_id,
|
gat_def_id.def_id,
|
||||||
param_env,
|
param_env,
|
||||||
@ -429,18 +437,7 @@ fn check_gat_where_clauses(tcx: TyCtxt<'_>, associated_items: &[hir::TraitItemRe
|
|||||||
a,
|
a,
|
||||||
b,
|
b,
|
||||||
),
|
),
|
||||||
ty::PredicateKind::Clause(ty::ClauseKind::TypeOutlives(ty::OutlivesPredicate(
|
_ => bug!("Unexpected ClauseKind"),
|
||||||
a,
|
|
||||||
b,
|
|
||||||
))) => !ty_known_to_outlive(
|
|
||||||
tcx,
|
|
||||||
gat_def_id.def_id,
|
|
||||||
param_env,
|
|
||||||
&FxIndexSet::default(),
|
|
||||||
a,
|
|
||||||
b,
|
|
||||||
),
|
|
||||||
_ => bug!("Unexpected PredicateKind"),
|
|
||||||
})
|
})
|
||||||
.map(|clause| clause.to_string())
|
.map(|clause| clause.to_string())
|
||||||
.collect();
|
.collect();
|
||||||
@ -488,7 +485,7 @@ fn check_gat_where_clauses(tcx: TyCtxt<'_>, associated_items: &[hir::TraitItemRe
|
|||||||
fn augment_param_env<'tcx>(
|
fn augment_param_env<'tcx>(
|
||||||
tcx: TyCtxt<'tcx>,
|
tcx: TyCtxt<'tcx>,
|
||||||
param_env: ty::ParamEnv<'tcx>,
|
param_env: ty::ParamEnv<'tcx>,
|
||||||
new_predicates: Option<&FxHashSet<ty::Predicate<'tcx>>>,
|
new_predicates: Option<&FxHashSet<ty::Clause<'tcx>>>,
|
||||||
) -> ty::ParamEnv<'tcx> {
|
) -> ty::ParamEnv<'tcx> {
|
||||||
let Some(new_predicates) = new_predicates else {
|
let Some(new_predicates) = new_predicates else {
|
||||||
return param_env;
|
return param_env;
|
||||||
@ -498,7 +495,7 @@ fn augment_param_env<'tcx>(
|
|||||||
return param_env;
|
return param_env;
|
||||||
}
|
}
|
||||||
|
|
||||||
let bounds = tcx.mk_predicates_from_iter(
|
let bounds = tcx.mk_clauses_from_iter(
|
||||||
param_env.caller_bounds().iter().chain(new_predicates.iter().cloned()),
|
param_env.caller_bounds().iter().chain(new_predicates.iter().cloned()),
|
||||||
);
|
);
|
||||||
// FIXME(compiler-errors): Perhaps there is a case where we need to normalize this
|
// FIXME(compiler-errors): Perhaps there is a case where we need to normalize this
|
||||||
@ -524,7 +521,7 @@ fn gather_gat_bounds<'tcx, T: TypeFoldable<TyCtxt<'tcx>>>(
|
|||||||
wf_tys: &FxIndexSet<Ty<'tcx>>,
|
wf_tys: &FxIndexSet<Ty<'tcx>>,
|
||||||
gat_def_id: LocalDefId,
|
gat_def_id: LocalDefId,
|
||||||
gat_generics: &'tcx ty::Generics,
|
gat_generics: &'tcx ty::Generics,
|
||||||
) -> Option<FxHashSet<ty::Predicate<'tcx>>> {
|
) -> Option<FxHashSet<ty::Clause<'tcx>>> {
|
||||||
// The bounds we that we would require from `to_check`
|
// The bounds we that we would require from `to_check`
|
||||||
let mut bounds = FxHashSet::default();
|
let mut bounds = FxHashSet::default();
|
||||||
|
|
||||||
@ -573,11 +570,10 @@ fn gather_gat_bounds<'tcx, T: TypeFoldable<TyCtxt<'tcx>>>(
|
|||||||
);
|
);
|
||||||
// The predicate we expect to see. (In our example,
|
// The predicate we expect to see. (In our example,
|
||||||
// `Self: 'me`.)
|
// `Self: 'me`.)
|
||||||
let clause = ty::PredicateKind::Clause(ty::ClauseKind::TypeOutlives(
|
bounds.insert(
|
||||||
ty::OutlivesPredicate(ty_param, region_param),
|
ty::ClauseKind::TypeOutlives(ty::OutlivesPredicate(ty_param, region_param))
|
||||||
));
|
.to_predicate(tcx),
|
||||||
let clause = tcx.mk_predicate(ty::Binder::dummy(clause));
|
);
|
||||||
bounds.insert(clause);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -622,11 +618,13 @@ fn gather_gat_bounds<'tcx, T: TypeFoldable<TyCtxt<'tcx>>>(
|
|||||||
},
|
},
|
||||||
);
|
);
|
||||||
// The predicate we expect to see.
|
// The predicate we expect to see.
|
||||||
let clause = ty::PredicateKind::Clause(ty::ClauseKind::RegionOutlives(
|
bounds.insert(
|
||||||
ty::OutlivesPredicate(region_a_param, region_b_param),
|
ty::ClauseKind::RegionOutlives(ty::OutlivesPredicate(
|
||||||
));
|
region_a_param,
|
||||||
let clause = tcx.mk_predicate(ty::Binder::dummy(clause));
|
region_b_param,
|
||||||
bounds.insert(clause);
|
))
|
||||||
|
.to_predicate(tcx),
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -1406,7 +1404,7 @@ fn check_where_clauses<'tcx>(wfcx: &WfCheckingCtxt<'_, 'tcx>, span: Span, def_id
|
|||||||
infcx,
|
infcx,
|
||||||
wfcx.param_env.without_const(),
|
wfcx.param_env.without_const(),
|
||||||
wfcx.body_def_id,
|
wfcx.body_def_id,
|
||||||
p,
|
p.as_predicate(),
|
||||||
sp,
|
sp,
|
||||||
)
|
)
|
||||||
});
|
});
|
||||||
@ -1549,7 +1547,7 @@ impl<'tcx> TypeVisitor<TyCtxt<'tcx>> for ImplTraitInTraitFinder<'_, 'tcx> {
|
|||||||
{
|
{
|
||||||
let opaque_ty = tcx.fold_regions(unshifted_opaque_ty, |re, _depth| {
|
let opaque_ty = tcx.fold_regions(unshifted_opaque_ty, |re, _depth| {
|
||||||
match re.kind() {
|
match re.kind() {
|
||||||
ty::ReEarlyBound(_) | ty::ReFree(_) | ty::ReError(_) => re,
|
ty::ReEarlyBound(_) | ty::ReFree(_) | ty::ReError(_) | ty::ReStatic => re,
|
||||||
r => bug!("unexpected region: {r:?}"),
|
r => bug!("unexpected region: {r:?}"),
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
@ -1875,9 +1873,7 @@ impl<'tcx> WfCheckingCtxt<'_, 'tcx> {
|
|||||||
// We lower empty bounds like `Vec<dyn Copy>:` as
|
// We lower empty bounds like `Vec<dyn Copy>:` as
|
||||||
// `WellFormed(Vec<dyn Copy>)`, which will later get checked by
|
// `WellFormed(Vec<dyn Copy>)`, which will later get checked by
|
||||||
// regular WF checking
|
// regular WF checking
|
||||||
if let ty::PredicateKind::Clause(ty::ClauseKind::WellFormed(..)) =
|
if let ty::ClauseKind::WellFormed(..) = pred.kind().skip_binder() {
|
||||||
pred.kind().skip_binder()
|
|
||||||
{
|
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
// Match the existing behavior.
|
// Match the existing behavior.
|
||||||
|
@ -140,7 +140,7 @@ impl<'tcx> InherentOverlapChecker<'tcx> {
|
|||||||
impl1_def_id: DefId,
|
impl1_def_id: DefId,
|
||||||
impl2_def_id: DefId,
|
impl2_def_id: DefId,
|
||||||
) {
|
) {
|
||||||
traits::overlapping_impls(
|
let maybe_overlap = traits::overlapping_impls(
|
||||||
self.tcx,
|
self.tcx,
|
||||||
impl1_def_id,
|
impl1_def_id,
|
||||||
impl2_def_id,
|
impl2_def_id,
|
||||||
@ -148,11 +148,11 @@ impl<'tcx> InherentOverlapChecker<'tcx> {
|
|||||||
// inherent impls without warning.
|
// inherent impls without warning.
|
||||||
SkipLeakCheck::Yes,
|
SkipLeakCheck::Yes,
|
||||||
overlap_mode,
|
overlap_mode,
|
||||||
)
|
);
|
||||||
.map_or(true, |overlap| {
|
|
||||||
|
if let Some(overlap) = maybe_overlap {
|
||||||
self.check_for_common_items_in_impls(impl1_def_id, impl2_def_id, overlap);
|
self.check_for_common_items_in_impls(impl1_def_id, impl2_def_id, overlap);
|
||||||
false
|
}
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn check_item(&mut self, id: hir::ItemId) {
|
fn check_item(&mut self, id: hir::ItemId) {
|
||||||
|
@ -38,16 +38,12 @@ fn associated_type_bounds<'tcx>(
|
|||||||
.iter()
|
.iter()
|
||||||
.copied()
|
.copied()
|
||||||
.filter(|(pred, _)| match pred.kind().skip_binder() {
|
.filter(|(pred, _)| match pred.kind().skip_binder() {
|
||||||
ty::PredicateKind::Clause(ty::ClauseKind::Trait(tr)) => tr.self_ty() == item_ty,
|
ty::ClauseKind::Trait(tr) => tr.self_ty() == item_ty,
|
||||||
ty::PredicateKind::Clause(ty::ClauseKind::Projection(proj)) => {
|
ty::ClauseKind::Projection(proj) => proj.projection_ty.self_ty() == item_ty,
|
||||||
proj.projection_ty.self_ty() == item_ty
|
ty::ClauseKind::TypeOutlives(outlives) => outlives.0 == item_ty,
|
||||||
}
|
|
||||||
ty::PredicateKind::Clause(ty::ClauseKind::TypeOutlives(outlives)) => {
|
|
||||||
outlives.0 == item_ty
|
|
||||||
}
|
|
||||||
_ => false,
|
_ => false,
|
||||||
})
|
})
|
||||||
.map(|(pred, span)| (pred.expect_clause(), span));
|
.map(|(clause, span)| (clause, span));
|
||||||
|
|
||||||
let all_bounds = tcx.arena.alloc_from_iter(bounds.clauses().chain(bounds_from_parent));
|
let all_bounds = tcx.arena.alloc_from_iter(bounds.clauses().chain(bounds_from_parent));
|
||||||
debug!(
|
debug!(
|
||||||
|
@ -75,7 +75,7 @@ fn gather_explicit_predicates_of(tcx: TyCtxt<'_>, def_id: LocalDefId) -> ty::Gen
|
|||||||
|
|
||||||
// We use an `IndexSet` to preserve order of insertion.
|
// We use an `IndexSet` to preserve order of insertion.
|
||||||
// Preserving the order of insertion is important here so as not to break UI tests.
|
// Preserving the order of insertion is important here so as not to break UI tests.
|
||||||
let mut predicates: FxIndexSet<(ty::Predicate<'_>, Span)> = FxIndexSet::default();
|
let mut predicates: FxIndexSet<(ty::Clause<'_>, Span)> = FxIndexSet::default();
|
||||||
|
|
||||||
let ast_generics = match node {
|
let ast_generics = match node {
|
||||||
Node::TraitItem(item) => item.generics,
|
Node::TraitItem(item) => item.generics,
|
||||||
@ -126,8 +126,7 @@ fn gather_explicit_predicates_of(tcx: TyCtxt<'_>, def_id: LocalDefId) -> ty::Gen
|
|||||||
predicates.extend(
|
predicates.extend(
|
||||||
icx.astconv()
|
icx.astconv()
|
||||||
.compute_bounds(tcx.types.self_param, self_bounds, OnlySelfBounds(false))
|
.compute_bounds(tcx.types.self_param, self_bounds, OnlySelfBounds(false))
|
||||||
.clauses()
|
.clauses(),
|
||||||
.map(|(clause, span)| (clause.as_predicate(), span)),
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -176,8 +175,7 @@ fn gather_explicit_predicates_of(tcx: TyCtxt<'_>, def_id: LocalDefId) -> ty::Gen
|
|||||||
param.span,
|
param.span,
|
||||||
);
|
);
|
||||||
trace!(?bounds);
|
trace!(?bounds);
|
||||||
predicates
|
predicates.extend(bounds.clauses());
|
||||||
.extend(bounds.clauses().map(|(clause, span)| (clause.as_predicate(), span)));
|
|
||||||
trace!(?predicates);
|
trace!(?predicates);
|
||||||
}
|
}
|
||||||
GenericParamKind::Const { .. } => {
|
GenericParamKind::Const { .. } => {
|
||||||
@ -188,11 +186,10 @@ fn gather_explicit_predicates_of(tcx: TyCtxt<'_>, def_id: LocalDefId) -> ty::Gen
|
|||||||
|
|
||||||
let ct = tcx.mk_const(param_const, ct_ty);
|
let ct = tcx.mk_const(param_const, ct_ty);
|
||||||
|
|
||||||
let predicate = ty::Binder::dummy(ty::PredicateKind::Clause(
|
predicates.insert((
|
||||||
ty::ClauseKind::ConstArgHasType(ct, ct_ty),
|
ty::ClauseKind::ConstArgHasType(ct, ct_ty).to_predicate(tcx),
|
||||||
))
|
param.span,
|
||||||
.to_predicate(tcx);
|
));
|
||||||
predicates.insert((predicate, param.span));
|
|
||||||
|
|
||||||
index += 1;
|
index += 1;
|
||||||
}
|
}
|
||||||
@ -221,7 +218,7 @@ fn gather_explicit_predicates_of(tcx: TyCtxt<'_>, def_id: LocalDefId) -> ty::Gen
|
|||||||
} else {
|
} else {
|
||||||
let span = bound_pred.bounded_ty.span;
|
let span = bound_pred.bounded_ty.span;
|
||||||
let predicate = ty::Binder::bind_with_vars(
|
let predicate = ty::Binder::bind_with_vars(
|
||||||
ty::PredicateKind::Clause(ty::ClauseKind::WellFormed(ty.into())),
|
ty::ClauseKind::WellFormed(ty.into()),
|
||||||
bound_vars,
|
bound_vars,
|
||||||
);
|
);
|
||||||
predicates.insert((predicate.to_predicate(tcx), span));
|
predicates.insert((predicate.to_predicate(tcx), span));
|
||||||
@ -236,8 +233,7 @@ fn gather_explicit_predicates_of(tcx: TyCtxt<'_>, def_id: LocalDefId) -> ty::Gen
|
|||||||
bound_vars,
|
bound_vars,
|
||||||
OnlySelfBounds(false),
|
OnlySelfBounds(false),
|
||||||
);
|
);
|
||||||
predicates
|
predicates.extend(bounds.clauses());
|
||||||
.extend(bounds.clauses().map(|(clause, span)| (clause.as_predicate(), span)));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
hir::WherePredicate::RegionPredicate(region_pred) => {
|
hir::WherePredicate::RegionPredicate(region_pred) => {
|
||||||
@ -249,11 +245,8 @@ fn gather_explicit_predicates_of(tcx: TyCtxt<'_>, def_id: LocalDefId) -> ty::Gen
|
|||||||
}
|
}
|
||||||
_ => bug!(),
|
_ => bug!(),
|
||||||
};
|
};
|
||||||
let pred = ty::Binder::dummy(ty::PredicateKind::Clause(
|
let pred = ty::ClauseKind::RegionOutlives(ty::OutlivesPredicate(r1, r2))
|
||||||
ty::ClauseKind::RegionOutlives(ty::OutlivesPredicate(r1, r2)),
|
.to_predicate(icx.tcx);
|
||||||
))
|
|
||||||
.to_predicate(icx.tcx);
|
|
||||||
|
|
||||||
(pred, span)
|
(pred, span)
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
@ -318,17 +311,13 @@ fn gather_explicit_predicates_of(tcx: TyCtxt<'_>, def_id: LocalDefId) -> ty::Gen
|
|||||||
},
|
},
|
||||||
);
|
);
|
||||||
predicates.push((
|
predicates.push((
|
||||||
ty::Binder::dummy(ty::PredicateKind::Clause(ty::ClauseKind::RegionOutlives(
|
ty::ClauseKind::RegionOutlives(ty::OutlivesPredicate(orig_region, dup_region))
|
||||||
ty::OutlivesPredicate(orig_region, dup_region),
|
.to_predicate(icx.tcx),
|
||||||
)))
|
|
||||||
.to_predicate(icx.tcx),
|
|
||||||
duplicate.span,
|
duplicate.span,
|
||||||
));
|
));
|
||||||
predicates.push((
|
predicates.push((
|
||||||
ty::Binder::dummy(ty::PredicateKind::Clause(ty::ClauseKind::RegionOutlives(
|
ty::ClauseKind::RegionOutlives(ty::OutlivesPredicate(dup_region, orig_region))
|
||||||
ty::OutlivesPredicate(dup_region, orig_region),
|
.to_predicate(icx.tcx),
|
||||||
)))
|
|
||||||
.to_predicate(icx.tcx),
|
|
||||||
duplicate.span,
|
duplicate.span,
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
@ -344,10 +333,10 @@ fn gather_explicit_predicates_of(tcx: TyCtxt<'_>, def_id: LocalDefId) -> ty::Gen
|
|||||||
fn const_evaluatable_predicates_of(
|
fn const_evaluatable_predicates_of(
|
||||||
tcx: TyCtxt<'_>,
|
tcx: TyCtxt<'_>,
|
||||||
def_id: LocalDefId,
|
def_id: LocalDefId,
|
||||||
) -> FxIndexSet<(ty::Predicate<'_>, Span)> {
|
) -> FxIndexSet<(ty::Clause<'_>, Span)> {
|
||||||
struct ConstCollector<'tcx> {
|
struct ConstCollector<'tcx> {
|
||||||
tcx: TyCtxt<'tcx>,
|
tcx: TyCtxt<'tcx>,
|
||||||
preds: FxIndexSet<(ty::Predicate<'tcx>, Span)>,
|
preds: FxIndexSet<(ty::Clause<'tcx>, Span)>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'tcx> intravisit::Visitor<'tcx> for ConstCollector<'tcx> {
|
impl<'tcx> intravisit::Visitor<'tcx> for ConstCollector<'tcx> {
|
||||||
@ -355,13 +344,8 @@ fn const_evaluatable_predicates_of(
|
|||||||
let ct = ty::Const::from_anon_const(self.tcx, c.def_id);
|
let ct = ty::Const::from_anon_const(self.tcx, c.def_id);
|
||||||
if let ty::ConstKind::Unevaluated(_) = ct.kind() {
|
if let ty::ConstKind::Unevaluated(_) = ct.kind() {
|
||||||
let span = self.tcx.def_span(c.def_id);
|
let span = self.tcx.def_span(c.def_id);
|
||||||
self.preds.insert((
|
self.preds
|
||||||
ty::Binder::dummy(ty::PredicateKind::Clause(ty::ClauseKind::ConstEvaluatable(
|
.insert((ty::ClauseKind::ConstEvaluatable(ct).to_predicate(self.tcx), span));
|
||||||
ct,
|
|
||||||
)))
|
|
||||||
.to_predicate(self.tcx),
|
|
||||||
span,
|
|
||||||
));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -449,15 +433,9 @@ pub(super) fn explicit_predicates_of<'tcx>(
|
|||||||
.iter()
|
.iter()
|
||||||
.copied()
|
.copied()
|
||||||
.filter(|(pred, _)| match pred.kind().skip_binder() {
|
.filter(|(pred, _)| match pred.kind().skip_binder() {
|
||||||
ty::PredicateKind::Clause(ty::ClauseKind::Trait(tr)) => {
|
ty::ClauseKind::Trait(tr) => !is_assoc_item_ty(tr.self_ty()),
|
||||||
!is_assoc_item_ty(tr.self_ty())
|
ty::ClauseKind::Projection(proj) => !is_assoc_item_ty(proj.projection_ty.self_ty()),
|
||||||
}
|
ty::ClauseKind::TypeOutlives(outlives) => !is_assoc_item_ty(outlives.0),
|
||||||
ty::PredicateKind::Clause(ty::ClauseKind::Projection(proj)) => {
|
|
||||||
!is_assoc_item_ty(proj.projection_ty.self_ty())
|
|
||||||
}
|
|
||||||
ty::PredicateKind::Clause(ty::ClauseKind::TypeOutlives(outlives)) => {
|
|
||||||
!is_assoc_item_ty(outlives.0)
|
|
||||||
}
|
|
||||||
_ => true,
|
_ => true,
|
||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
@ -498,9 +476,7 @@ pub(super) fn explicit_predicates_of<'tcx>(
|
|||||||
.predicates
|
.predicates
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.filter(|(pred, _)| {
|
.filter(|(pred, _)| {
|
||||||
if let ty::PredicateKind::Clause(ty::ClauseKind::ConstArgHasType(ct, _)) =
|
if let ty::ClauseKind::ConstArgHasType(ct, _) = pred.kind().skip_binder() {
|
||||||
pred.kind().skip_binder()
|
|
||||||
{
|
|
||||||
match ct.kind() {
|
match ct.kind() {
|
||||||
ty::ConstKind::Param(param_const) => {
|
ty::ConstKind::Param(param_const) => {
|
||||||
let defaulted_param_idx = tcx
|
let defaulted_param_idx = tcx
|
||||||
@ -665,12 +641,8 @@ pub(super) fn implied_predicates_with_filter(
|
|||||||
};
|
};
|
||||||
|
|
||||||
// Combine the two lists to form the complete set of superbounds:
|
// Combine the two lists to form the complete set of superbounds:
|
||||||
let implied_bounds = &*tcx.arena.alloc_from_iter(
|
let implied_bounds =
|
||||||
superbounds
|
&*tcx.arena.alloc_from_iter(superbounds.clauses().chain(where_bounds_that_match));
|
||||||
.clauses()
|
|
||||||
.map(|(clause, span)| (clause.as_predicate(), span))
|
|
||||||
.chain(where_bounds_that_match),
|
|
||||||
);
|
|
||||||
debug!(?implied_bounds);
|
debug!(?implied_bounds);
|
||||||
|
|
||||||
// Now require that immediate supertraits are converted, which will, in
|
// Now require that immediate supertraits are converted, which will, in
|
||||||
@ -679,7 +651,7 @@ pub(super) fn implied_predicates_with_filter(
|
|||||||
if matches!(filter, PredicateFilter::SelfOnly) {
|
if matches!(filter, PredicateFilter::SelfOnly) {
|
||||||
for &(pred, span) in implied_bounds {
|
for &(pred, span) in implied_bounds {
|
||||||
debug!("superbound: {:?}", pred);
|
debug!("superbound: {:?}", pred);
|
||||||
if let ty::PredicateKind::Clause(ty::ClauseKind::Trait(bound)) = pred.kind().skip_binder()
|
if let ty::ClauseKind::Trait(bound) = pred.kind().skip_binder()
|
||||||
&& bound.polarity == ty::ImplPolarity::Positive
|
&& bound.polarity == ty::ImplPolarity::Positive
|
||||||
{
|
{
|
||||||
tcx.at(span).super_predicates_of(bound.def_id());
|
tcx.at(span).super_predicates_of(bound.def_id());
|
||||||
@ -776,9 +748,7 @@ pub(super) fn type_param_predicates(
|
|||||||
)
|
)
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.filter(|(predicate, _)| match predicate.kind().skip_binder() {
|
.filter(|(predicate, _)| match predicate.kind().skip_binder() {
|
||||||
ty::PredicateKind::Clause(ty::ClauseKind::Trait(data)) => {
|
ty::ClauseKind::Trait(data) => data.self_ty().is_param(index),
|
||||||
data.self_ty().is_param(index)
|
|
||||||
}
|
|
||||||
_ => false,
|
_ => false,
|
||||||
}),
|
}),
|
||||||
);
|
);
|
||||||
@ -800,7 +770,7 @@ impl<'tcx> ItemCtxt<'tcx> {
|
|||||||
ty: Ty<'tcx>,
|
ty: Ty<'tcx>,
|
||||||
only_self_bounds: OnlySelfBounds,
|
only_self_bounds: OnlySelfBounds,
|
||||||
assoc_name: Option<Ident>,
|
assoc_name: Option<Ident>,
|
||||||
) -> Vec<(ty::Predicate<'tcx>, Span)> {
|
) -> Vec<(ty::Clause<'tcx>, Span)> {
|
||||||
let mut bounds = Bounds::default();
|
let mut bounds = Bounds::default();
|
||||||
|
|
||||||
for predicate in ast_generics.predicates {
|
for predicate in ast_generics.predicates {
|
||||||
@ -829,7 +799,7 @@ impl<'tcx> ItemCtxt<'tcx> {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
bounds.clauses().map(|(clause, span)| (clause.as_predicate(), span)).collect()
|
bounds.clauses().collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
#[instrument(level = "trace", skip(self))]
|
#[instrument(level = "trace", skip(self))]
|
||||||
|
@ -1761,7 +1761,7 @@ impl<'a, 'tcx> BoundVarContext<'a, 'tcx> {
|
|||||||
let obligations = predicates.predicates.iter().filter_map(|&(pred, _)| {
|
let obligations = predicates.predicates.iter().filter_map(|&(pred, _)| {
|
||||||
let bound_predicate = pred.kind();
|
let bound_predicate = pred.kind();
|
||||||
match bound_predicate.skip_binder() {
|
match bound_predicate.skip_binder() {
|
||||||
ty::PredicateKind::Clause(ty::ClauseKind::Trait(data)) => {
|
ty::ClauseKind::Trait(data) => {
|
||||||
// The order here needs to match what we would get from `subst_supertrait`
|
// The order here needs to match what we would get from `subst_supertrait`
|
||||||
let pred_bound_vars = bound_predicate.bound_vars();
|
let pred_bound_vars = bound_predicate.bound_vars();
|
||||||
let mut all_bound_vars = bound_vars.clone();
|
let mut all_bound_vars = bound_vars.clone();
|
||||||
|
@ -151,7 +151,7 @@ pub fn identify_constrained_generic_params<'tcx>(
|
|||||||
/// think of any.
|
/// think of any.
|
||||||
pub fn setup_constraining_predicates<'tcx>(
|
pub fn setup_constraining_predicates<'tcx>(
|
||||||
tcx: TyCtxt<'tcx>,
|
tcx: TyCtxt<'tcx>,
|
||||||
predicates: &mut [(ty::Predicate<'tcx>, Span)],
|
predicates: &mut [(ty::Clause<'tcx>, Span)],
|
||||||
impl_trait_ref: Option<ty::TraitRef<'tcx>>,
|
impl_trait_ref: Option<ty::TraitRef<'tcx>>,
|
||||||
input_parameters: &mut FxHashSet<Parameter>,
|
input_parameters: &mut FxHashSet<Parameter>,
|
||||||
) {
|
) {
|
||||||
@ -187,9 +187,7 @@ pub fn setup_constraining_predicates<'tcx>(
|
|||||||
for j in i..predicates.len() {
|
for j in i..predicates.len() {
|
||||||
// Note that we don't have to care about binders here,
|
// Note that we don't have to care about binders here,
|
||||||
// as the impl trait ref never contains any late-bound regions.
|
// as the impl trait ref never contains any late-bound regions.
|
||||||
if let ty::PredicateKind::Clause(ty::ClauseKind::Projection(projection)) =
|
if let ty::ClauseKind::Projection(projection) = predicates[j].0.kind().skip_binder() {
|
||||||
predicates[j].0.kind().skip_binder()
|
|
||||||
{
|
|
||||||
// Special case: watch out for some kind of sneaky attempt
|
// Special case: watch out for some kind of sneaky attempt
|
||||||
// to project out an associated type defined by this very
|
// to project out an associated type defined by this very
|
||||||
// trait.
|
// trait.
|
||||||
|
@ -235,10 +235,8 @@ fn unconstrained_parent_impl_substs<'tcx>(
|
|||||||
// what we want here. We want only a list of constrained parameters while
|
// what we want here. We want only a list of constrained parameters while
|
||||||
// the functions in `cgp` add the constrained parameters to a list of
|
// the functions in `cgp` add the constrained parameters to a list of
|
||||||
// unconstrained parameters.
|
// unconstrained parameters.
|
||||||
for (predicate, _) in impl_generic_predicates.predicates.iter() {
|
for (clause, _) in impl_generic_predicates.predicates.iter() {
|
||||||
if let ty::PredicateKind::Clause(ty::ClauseKind::Projection(proj)) =
|
if let ty::ClauseKind::Projection(proj) = clause.kind().skip_binder() {
|
||||||
predicate.kind().skip_binder()
|
|
||||||
{
|
|
||||||
let projection_ty = proj.projection_ty;
|
let projection_ty = proj.projection_ty;
|
||||||
let projected_ty = proj.term;
|
let projected_ty = proj.term;
|
||||||
|
|
||||||
@ -340,8 +338,11 @@ fn check_predicates<'tcx>(
|
|||||||
impl2_substs: SubstsRef<'tcx>,
|
impl2_substs: SubstsRef<'tcx>,
|
||||||
span: Span,
|
span: Span,
|
||||||
) {
|
) {
|
||||||
let instantiated = tcx.predicates_of(impl1_def_id).instantiate(tcx, impl1_substs);
|
let impl1_predicates: Vec<_> = traits::elaborate(
|
||||||
let impl1_predicates: Vec<_> = traits::elaborate(tcx, instantiated.into_iter()).collect();
|
tcx,
|
||||||
|
tcx.predicates_of(impl1_def_id).instantiate(tcx, impl1_substs).into_iter(),
|
||||||
|
)
|
||||||
|
.collect();
|
||||||
|
|
||||||
let mut impl2_predicates = if impl2_node.is_from_trait() {
|
let mut impl2_predicates = if impl2_node.is_from_trait() {
|
||||||
// Always applicable traits have to be always applicable without any
|
// Always applicable traits have to be always applicable without any
|
||||||
@ -352,8 +353,8 @@ fn check_predicates<'tcx>(
|
|||||||
tcx,
|
tcx,
|
||||||
tcx.predicates_of(impl2_node.def_id())
|
tcx.predicates_of(impl2_node.def_id())
|
||||||
.instantiate(tcx, impl2_substs)
|
.instantiate(tcx, impl2_substs)
|
||||||
.predicates
|
.into_iter()
|
||||||
.into_iter(),
|
.map(|(c, _s)| c.as_predicate()),
|
||||||
)
|
)
|
||||||
.collect()
|
.collect()
|
||||||
};
|
};
|
||||||
@ -377,13 +378,13 @@ fn check_predicates<'tcx>(
|
|||||||
let always_applicable_traits = impl1_predicates
|
let always_applicable_traits = impl1_predicates
|
||||||
.iter()
|
.iter()
|
||||||
.copied()
|
.copied()
|
||||||
.filter(|&(predicate, _)| {
|
.filter(|(clause, _span)| {
|
||||||
matches!(
|
matches!(
|
||||||
trait_predicate_kind(tcx, predicate),
|
trait_predicate_kind(tcx, clause.as_predicate()),
|
||||||
Some(TraitSpecializationKind::AlwaysApplicable)
|
Some(TraitSpecializationKind::AlwaysApplicable)
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
.map(|(pred, _span)| pred);
|
.map(|(c, _span)| c.as_predicate());
|
||||||
|
|
||||||
// Include the well-formed predicates of the type parameters of the impl.
|
// Include the well-formed predicates of the type parameters of the impl.
|
||||||
for arg in tcx.impl_trait_ref(impl1_def_id).unwrap().subst_identity().substs {
|
for arg in tcx.impl_trait_ref(impl1_def_id).unwrap().subst_identity().substs {
|
||||||
@ -398,9 +399,12 @@ fn check_predicates<'tcx>(
|
|||||||
}
|
}
|
||||||
impl2_predicates.extend(traits::elaborate(tcx, always_applicable_traits));
|
impl2_predicates.extend(traits::elaborate(tcx, always_applicable_traits));
|
||||||
|
|
||||||
for (predicate, span) in impl1_predicates {
|
for (clause, span) in impl1_predicates {
|
||||||
if !impl2_predicates.iter().any(|pred2| trait_predicates_eq(tcx, predicate, *pred2, span)) {
|
if !impl2_predicates
|
||||||
check_specialization_on(tcx, predicate, span)
|
.iter()
|
||||||
|
.any(|pred2| trait_predicates_eq(tcx, clause.as_predicate(), *pred2, span))
|
||||||
|
{
|
||||||
|
check_specialization_on(tcx, clause.as_predicate(), span)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -550,6 +554,6 @@ fn trait_predicate_kind<'tcx>(
|
|||||||
| ty::PredicateKind::Clause(ty::ClauseKind::ConstEvaluatable(..))
|
| ty::PredicateKind::Clause(ty::ClauseKind::ConstEvaluatable(..))
|
||||||
| ty::PredicateKind::ConstEquate(..)
|
| ty::PredicateKind::ConstEquate(..)
|
||||||
| ty::PredicateKind::Ambiguous
|
| ty::PredicateKind::Ambiguous
|
||||||
| ty::PredicateKind::TypeWellFormedFromEnv(..) => None,
|
| ty::PredicateKind::Clause(ty::ClauseKind::TypeWellFormedFromEnv(..)) => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -30,40 +30,31 @@ impl<'tcx> ExplicitPredicatesMap<'tcx> {
|
|||||||
// process predicates and convert to `RequiredPredicates` entry, see below
|
// process predicates and convert to `RequiredPredicates` entry, see below
|
||||||
for &(predicate, span) in predicates.predicates {
|
for &(predicate, span) in predicates.predicates {
|
||||||
match predicate.kind().skip_binder() {
|
match predicate.kind().skip_binder() {
|
||||||
ty::PredicateKind::Clause(ty::ClauseKind::TypeOutlives(OutlivesPredicate(
|
ty::ClauseKind::TypeOutlives(OutlivesPredicate(ty, reg)) => {
|
||||||
ty,
|
insert_outlives_predicate(
|
||||||
reg,
|
tcx,
|
||||||
))) => insert_outlives_predicate(
|
ty.into(),
|
||||||
tcx,
|
reg,
|
||||||
ty.into(),
|
span,
|
||||||
reg,
|
&mut required_predicates,
|
||||||
span,
|
)
|
||||||
&mut required_predicates,
|
}
|
||||||
),
|
|
||||||
|
|
||||||
ty::PredicateKind::Clause(ty::ClauseKind::RegionOutlives(
|
ty::ClauseKind::RegionOutlives(OutlivesPredicate(reg1, reg2)) => {
|
||||||
OutlivesPredicate(reg1, reg2),
|
insert_outlives_predicate(
|
||||||
)) => insert_outlives_predicate(
|
tcx,
|
||||||
tcx,
|
reg1.into(),
|
||||||
reg1.into(),
|
reg2,
|
||||||
reg2,
|
span,
|
||||||
span,
|
&mut required_predicates,
|
||||||
&mut required_predicates,
|
)
|
||||||
),
|
}
|
||||||
|
ty::ClauseKind::Trait(_)
|
||||||
ty::PredicateKind::Clause(ty::ClauseKind::Trait(..))
|
| ty::ClauseKind::Projection(_)
|
||||||
| ty::PredicateKind::Clause(ty::ClauseKind::Projection(..))
|
| ty::ClauseKind::ConstArgHasType(_, _)
|
||||||
| ty::PredicateKind::Clause(ty::ClauseKind::ConstArgHasType(..))
|
| ty::ClauseKind::WellFormed(_)
|
||||||
| ty::PredicateKind::Clause(ty::ClauseKind::WellFormed(..))
|
| ty::ClauseKind::ConstEvaluatable(_)
|
||||||
| ty::PredicateKind::AliasRelate(..)
|
| ty::ClauseKind::TypeWellFormedFromEnv(_) => {}
|
||||||
| ty::PredicateKind::ObjectSafe(..)
|
|
||||||
| ty::PredicateKind::ClosureKind(..)
|
|
||||||
| ty::PredicateKind::Subtype(..)
|
|
||||||
| ty::PredicateKind::Coerce(..)
|
|
||||||
| ty::PredicateKind::Clause(ty::ClauseKind::ConstEvaluatable(..))
|
|
||||||
| ty::PredicateKind::ConstEquate(..)
|
|
||||||
| ty::PredicateKind::Ambiguous
|
|
||||||
| ty::PredicateKind::TypeWellFormedFromEnv(..) => (),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1554,6 +1554,11 @@ impl<'a> State<'a> {
|
|||||||
self.print_expr_maybe_paren(expr, parser::PREC_JUMP);
|
self.print_expr_maybe_paren(expr, parser::PREC_JUMP);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
hir::ExprKind::Become(result) => {
|
||||||
|
self.word("become");
|
||||||
|
self.word(" ");
|
||||||
|
self.print_expr_maybe_paren(result, parser::PREC_JUMP);
|
||||||
|
}
|
||||||
hir::ExprKind::InlineAsm(asm) => {
|
hir::ExprKind::InlineAsm(asm) => {
|
||||||
self.word("asm!");
|
self.word("asm!");
|
||||||
self.print_inline_asm(asm);
|
self.print_inline_asm(asm);
|
||||||
|
@ -78,8 +78,8 @@ hir_typeck_note_edition_guide = for more on editions, read https://doc.rust-lang
|
|||||||
hir_typeck_op_trait_generic_params = `{$method_name}` must not have any generic parameters
|
hir_typeck_op_trait_generic_params = `{$method_name}` must not have any generic parameters
|
||||||
|
|
||||||
hir_typeck_return_stmt_outside_of_fn_body =
|
hir_typeck_return_stmt_outside_of_fn_body =
|
||||||
return statement outside of function body
|
{$statement_kind} statement outside of function body
|
||||||
.encl_body_label = the return is part of this body...
|
.encl_body_label = the {$statement_kind} is part of this body...
|
||||||
.encl_fn_label = ...not the enclosing function body
|
.encl_fn_label = ...not the enclosing function body
|
||||||
|
|
||||||
hir_typeck_struct_expr_non_exhaustive =
|
hir_typeck_struct_expr_non_exhaustive =
|
||||||
|
@ -83,6 +83,10 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
self.annotate_expected_due_to_let_ty(err, expr, error);
|
self.annotate_expected_due_to_let_ty(err, expr, error);
|
||||||
|
|
||||||
|
if self.is_destruct_assignment_desugaring(expr) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
self.emit_type_mismatch_suggestions(err, expr, expr_ty, expected, expected_ty_expr, error);
|
self.emit_type_mismatch_suggestions(err, expr, expr_ty, expected, expected_ty_expr, error);
|
||||||
self.note_type_is_not_clone(err, expected, expr_ty, expr);
|
self.note_type_is_not_clone(err, expected, expr_ty, expr);
|
||||||
self.note_internal_mutation_in_method(err, expr, Some(expected), expr_ty);
|
self.note_internal_mutation_in_method(err, expr, Some(expected), expr_ty);
|
||||||
@ -1253,6 +1257,26 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
|||||||
false
|
false
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Returns whether the given expression is a destruct assignment desugaring.
|
||||||
|
// For example, `(a, b) = (1, &2);`
|
||||||
|
// Here we try to find the pattern binding of the expression,
|
||||||
|
// `default_binding_modes` is false only for destruct assignment desugaring.
|
||||||
|
pub(crate) fn is_destruct_assignment_desugaring(&self, expr: &hir::Expr<'_>) -> bool {
|
||||||
|
if let hir::ExprKind::Path(hir::QPath::Resolved(
|
||||||
|
_,
|
||||||
|
hir::Path { res: hir::def::Res::Local(bind_hir_id), .. },
|
||||||
|
)) = expr.kind
|
||||||
|
{
|
||||||
|
let bind = self.tcx.hir().find(*bind_hir_id);
|
||||||
|
let parent = self.tcx.hir().find(self.tcx.hir().parent_id(*bind_hir_id));
|
||||||
|
if let Some(hir::Node::Pat(hir::Pat { kind: hir::PatKind::Binding(_, _hir_id, _, _), .. })) = bind &&
|
||||||
|
let Some(hir::Node::Pat(hir::Pat { default_binding_modes: false, .. })) = parent {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
/// This function is used to determine potential "simple" improvements or users' errors and
|
/// This function is used to determine potential "simple" improvements or users' errors and
|
||||||
/// provide them useful help. For example:
|
/// provide them useful help. For example:
|
||||||
///
|
///
|
||||||
@ -1443,6 +1467,20 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
|||||||
_,
|
_,
|
||||||
&ty::Ref(_, checked, _),
|
&ty::Ref(_, checked, _),
|
||||||
) if self.can_sub(self.param_env, checked, expected) => {
|
) if self.can_sub(self.param_env, checked, expected) => {
|
||||||
|
let make_sugg = |start: Span, end: BytePos| {
|
||||||
|
// skip `(` for tuples such as `(c) = (&123)`.
|
||||||
|
// make sure we won't suggest like `(c) = 123)` which is incorrect.
|
||||||
|
let sp = sm.span_extend_while(start.shrink_to_lo(), |c| c == '(' || c.is_whitespace())
|
||||||
|
.map_or(start, |s| s.shrink_to_hi());
|
||||||
|
Some((
|
||||||
|
vec![(sp.with_hi(end), String::new())],
|
||||||
|
"consider removing the borrow".to_string(),
|
||||||
|
Applicability::MachineApplicable,
|
||||||
|
true,
|
||||||
|
true,
|
||||||
|
))
|
||||||
|
};
|
||||||
|
|
||||||
// We have `&T`, check if what was expected was `T`. If so,
|
// We have `&T`, check if what was expected was `T`. If so,
|
||||||
// we may want to suggest removing a `&`.
|
// we may want to suggest removing a `&`.
|
||||||
if sm.is_imported(expr.span) {
|
if sm.is_imported(expr.span) {
|
||||||
@ -1456,24 +1494,12 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
|||||||
.find(|&s| sp.contains(s))
|
.find(|&s| sp.contains(s))
|
||||||
&& sm.is_span_accessible(call_span)
|
&& sm.is_span_accessible(call_span)
|
||||||
{
|
{
|
||||||
return Some((
|
return make_sugg(sp, call_span.lo())
|
||||||
vec![(sp.with_hi(call_span.lo()), String::new())],
|
|
||||||
"consider removing the borrow".to_string(),
|
|
||||||
Applicability::MachineApplicable,
|
|
||||||
true,
|
|
||||||
true,
|
|
||||||
));
|
|
||||||
}
|
}
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
if sp.contains(expr.span) && sm.is_span_accessible(expr.span) {
|
if sp.contains(expr.span) && sm.is_span_accessible(expr.span) {
|
||||||
return Some((
|
return make_sugg(sp, expr.span.lo())
|
||||||
vec![(sp.with_hi(expr.span.lo()), String::new())],
|
|
||||||
"consider removing the borrow".to_string(),
|
|
||||||
Applicability::MachineApplicable,
|
|
||||||
true,
|
|
||||||
true,
|
|
||||||
));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
(
|
(
|
||||||
|
@ -2,7 +2,10 @@
|
|||||||
use std::borrow::Cow;
|
use std::borrow::Cow;
|
||||||
|
|
||||||
use crate::fluent_generated as fluent;
|
use crate::fluent_generated as fluent;
|
||||||
use rustc_errors::{AddToDiagnostic, Applicability, Diagnostic, MultiSpan, SubdiagnosticMessage};
|
use rustc_errors::{
|
||||||
|
AddToDiagnostic, Applicability, Diagnostic, DiagnosticArgValue, IntoDiagnosticArg, MultiSpan,
|
||||||
|
SubdiagnosticMessage,
|
||||||
|
};
|
||||||
use rustc_macros::{Diagnostic, Subdiagnostic};
|
use rustc_macros::{Diagnostic, Subdiagnostic};
|
||||||
use rustc_middle::ty::Ty;
|
use rustc_middle::ty::Ty;
|
||||||
use rustc_span::{
|
use rustc_span::{
|
||||||
@ -31,6 +34,24 @@ pub struct ReturnStmtOutsideOfFnBody {
|
|||||||
pub encl_body_span: Option<Span>,
|
pub encl_body_span: Option<Span>,
|
||||||
#[label(hir_typeck_encl_fn_label)]
|
#[label(hir_typeck_encl_fn_label)]
|
||||||
pub encl_fn_span: Option<Span>,
|
pub encl_fn_span: Option<Span>,
|
||||||
|
pub statement_kind: ReturnLikeStatementKind,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub enum ReturnLikeStatementKind {
|
||||||
|
Return,
|
||||||
|
Become,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl IntoDiagnosticArg for ReturnLikeStatementKind {
|
||||||
|
fn into_diagnostic_arg(self) -> DiagnosticArgValue<'static> {
|
||||||
|
let kind = match self {
|
||||||
|
Self::Return => "return",
|
||||||
|
Self::Become => "become",
|
||||||
|
}
|
||||||
|
.into();
|
||||||
|
|
||||||
|
DiagnosticArgValue::Str(kind)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Diagnostic)]
|
#[derive(Diagnostic)]
|
||||||
|
@ -5,6 +5,7 @@
|
|||||||
use crate::cast;
|
use crate::cast;
|
||||||
use crate::coercion::CoerceMany;
|
use crate::coercion::CoerceMany;
|
||||||
use crate::coercion::DynamicCoerceMany;
|
use crate::coercion::DynamicCoerceMany;
|
||||||
|
use crate::errors::ReturnLikeStatementKind;
|
||||||
use crate::errors::TypeMismatchFruTypo;
|
use crate::errors::TypeMismatchFruTypo;
|
||||||
use crate::errors::{AddressOfTemporaryTaken, ReturnStmtOutsideOfFnBody, StructExprNonExhaustive};
|
use crate::errors::{AddressOfTemporaryTaken, ReturnStmtOutsideOfFnBody, StructExprNonExhaustive};
|
||||||
use crate::errors::{
|
use crate::errors::{
|
||||||
@ -324,6 +325,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
ExprKind::Ret(ref expr_opt) => self.check_expr_return(expr_opt.as_deref(), expr),
|
ExprKind::Ret(ref expr_opt) => self.check_expr_return(expr_opt.as_deref(), expr),
|
||||||
|
ExprKind::Become(call) => self.check_expr_become(call, expr),
|
||||||
ExprKind::Let(let_expr) => self.check_expr_let(let_expr),
|
ExprKind::Let(let_expr) => self.check_expr_let(let_expr),
|
||||||
ExprKind::Loop(body, _, source, _) => {
|
ExprKind::Loop(body, _, source, _) => {
|
||||||
self.check_expr_loop(body, source, expected, expr)
|
self.check_expr_loop(body, source, expected, expr)
|
||||||
@ -735,47 +737,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
|||||||
expr: &'tcx hir::Expr<'tcx>,
|
expr: &'tcx hir::Expr<'tcx>,
|
||||||
) -> Ty<'tcx> {
|
) -> Ty<'tcx> {
|
||||||
if self.ret_coercion.is_none() {
|
if self.ret_coercion.is_none() {
|
||||||
let mut err = ReturnStmtOutsideOfFnBody {
|
self.emit_return_outside_of_fn_body(expr, ReturnLikeStatementKind::Return);
|
||||||
span: expr.span,
|
|
||||||
encl_body_span: None,
|
|
||||||
encl_fn_span: None,
|
|
||||||
};
|
|
||||||
|
|
||||||
let encl_item_id = self.tcx.hir().get_parent_item(expr.hir_id);
|
|
||||||
|
|
||||||
if let Some(hir::Node::Item(hir::Item {
|
|
||||||
kind: hir::ItemKind::Fn(..),
|
|
||||||
span: encl_fn_span,
|
|
||||||
..
|
|
||||||
}))
|
|
||||||
| Some(hir::Node::TraitItem(hir::TraitItem {
|
|
||||||
kind: hir::TraitItemKind::Fn(_, hir::TraitFn::Provided(_)),
|
|
||||||
span: encl_fn_span,
|
|
||||||
..
|
|
||||||
}))
|
|
||||||
| Some(hir::Node::ImplItem(hir::ImplItem {
|
|
||||||
kind: hir::ImplItemKind::Fn(..),
|
|
||||||
span: encl_fn_span,
|
|
||||||
..
|
|
||||||
})) = self.tcx.hir().find_by_def_id(encl_item_id.def_id)
|
|
||||||
{
|
|
||||||
// We are inside a function body, so reporting "return statement
|
|
||||||
// outside of function body" needs an explanation.
|
|
||||||
|
|
||||||
let encl_body_owner_id = self.tcx.hir().enclosing_body_owner(expr.hir_id);
|
|
||||||
|
|
||||||
// If this didn't hold, we would not have to report an error in
|
|
||||||
// the first place.
|
|
||||||
assert_ne!(encl_item_id.def_id, encl_body_owner_id);
|
|
||||||
|
|
||||||
let encl_body_id = self.tcx.hir().body_owned_by(encl_body_owner_id);
|
|
||||||
let encl_body = self.tcx.hir().body(encl_body_id);
|
|
||||||
|
|
||||||
err.encl_body_span = Some(encl_body.value.span);
|
|
||||||
err.encl_fn_span = Some(*encl_fn_span);
|
|
||||||
}
|
|
||||||
|
|
||||||
self.tcx.sess.emit_err(err);
|
|
||||||
|
|
||||||
if let Some(e) = expr_opt {
|
if let Some(e) = expr_opt {
|
||||||
// We still have to type-check `e` (issue #86188), but calling
|
// We still have to type-check `e` (issue #86188), but calling
|
||||||
@ -815,6 +777,38 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
|||||||
self.tcx.types.never
|
self.tcx.types.never
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn check_expr_become(
|
||||||
|
&self,
|
||||||
|
call: &'tcx hir::Expr<'tcx>,
|
||||||
|
expr: &'tcx hir::Expr<'tcx>,
|
||||||
|
) -> Ty<'tcx> {
|
||||||
|
match &self.ret_coercion {
|
||||||
|
Some(ret_coercion) => {
|
||||||
|
let ret_ty = ret_coercion.borrow().expected_ty();
|
||||||
|
let call_expr_ty = self.check_expr_with_hint(call, ret_ty);
|
||||||
|
|
||||||
|
// N.B. don't coerce here, as tail calls can't support most/all coercions
|
||||||
|
// FIXME(explicit_tail_calls): add a diagnostic note that `become` doesn't allow coercions
|
||||||
|
self.demand_suptype(expr.span, ret_ty, call_expr_ty);
|
||||||
|
}
|
||||||
|
None => {
|
||||||
|
self.emit_return_outside_of_fn_body(expr, ReturnLikeStatementKind::Become);
|
||||||
|
|
||||||
|
// Fallback to simply type checking `call` without hint/demanding the right types.
|
||||||
|
// Best effort to highlight more errors.
|
||||||
|
self.check_expr(call);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
self.tcx.types.never
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Check an expression that _is being returned_.
|
||||||
|
/// For example, this is called with `return_expr: $expr` when `return $expr`
|
||||||
|
/// is encountered.
|
||||||
|
///
|
||||||
|
/// Note that this function must only be called in function bodies.
|
||||||
|
///
|
||||||
/// `explicit_return` is `true` if we're checking an explicit `return expr`,
|
/// `explicit_return` is `true` if we're checking an explicit `return expr`,
|
||||||
/// and `false` if we're checking a trailing expression.
|
/// and `false` if we're checking a trailing expression.
|
||||||
pub(super) fn check_return_expr(
|
pub(super) fn check_return_expr(
|
||||||
@ -831,10 +825,11 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
|||||||
let mut span = return_expr.span;
|
let mut span = return_expr.span;
|
||||||
// Use the span of the trailing expression for our cause,
|
// Use the span of the trailing expression for our cause,
|
||||||
// not the span of the entire function
|
// not the span of the entire function
|
||||||
if !explicit_return {
|
if !explicit_return
|
||||||
if let ExprKind::Block(body, _) = return_expr.kind && let Some(last_expr) = body.expr {
|
&& let ExprKind::Block(body, _) = return_expr.kind
|
||||||
|
&& let Some(last_expr) = body.expr
|
||||||
|
{
|
||||||
span = last_expr.span;
|
span = last_expr.span;
|
||||||
}
|
|
||||||
}
|
}
|
||||||
ret_coercion.borrow_mut().coerce(
|
ret_coercion.borrow_mut().coerce(
|
||||||
self,
|
self,
|
||||||
@ -854,6 +849,55 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Emit an error because `return` or `become` is used outside of a function body.
|
||||||
|
///
|
||||||
|
/// `expr` is the `return` (`become`) "statement", `kind` is the kind of the statement
|
||||||
|
/// either `Return` or `Become`.
|
||||||
|
fn emit_return_outside_of_fn_body(&self, expr: &hir::Expr<'_>, kind: ReturnLikeStatementKind) {
|
||||||
|
let mut err = ReturnStmtOutsideOfFnBody {
|
||||||
|
span: expr.span,
|
||||||
|
encl_body_span: None,
|
||||||
|
encl_fn_span: None,
|
||||||
|
statement_kind: kind,
|
||||||
|
};
|
||||||
|
|
||||||
|
let encl_item_id = self.tcx.hir().get_parent_item(expr.hir_id);
|
||||||
|
|
||||||
|
if let Some(hir::Node::Item(hir::Item {
|
||||||
|
kind: hir::ItemKind::Fn(..),
|
||||||
|
span: encl_fn_span,
|
||||||
|
..
|
||||||
|
}))
|
||||||
|
| Some(hir::Node::TraitItem(hir::TraitItem {
|
||||||
|
kind: hir::TraitItemKind::Fn(_, hir::TraitFn::Provided(_)),
|
||||||
|
span: encl_fn_span,
|
||||||
|
..
|
||||||
|
}))
|
||||||
|
| Some(hir::Node::ImplItem(hir::ImplItem {
|
||||||
|
kind: hir::ImplItemKind::Fn(..),
|
||||||
|
span: encl_fn_span,
|
||||||
|
..
|
||||||
|
})) = self.tcx.hir().find_by_def_id(encl_item_id.def_id)
|
||||||
|
{
|
||||||
|
// We are inside a function body, so reporting "return statement
|
||||||
|
// outside of function body" needs an explanation.
|
||||||
|
|
||||||
|
let encl_body_owner_id = self.tcx.hir().enclosing_body_owner(expr.hir_id);
|
||||||
|
|
||||||
|
// If this didn't hold, we would not have to report an error in
|
||||||
|
// the first place.
|
||||||
|
assert_ne!(encl_item_id.def_id, encl_body_owner_id);
|
||||||
|
|
||||||
|
let encl_body_id = self.tcx.hir().body_owned_by(encl_body_owner_id);
|
||||||
|
let encl_body = self.tcx.hir().body(encl_body_id);
|
||||||
|
|
||||||
|
err.encl_body_span = Some(encl_body.value.span);
|
||||||
|
err.encl_fn_span = Some(*encl_fn_span);
|
||||||
|
}
|
||||||
|
|
||||||
|
self.tcx.sess.emit_err(err);
|
||||||
|
}
|
||||||
|
|
||||||
fn point_at_return_for_opaque_ty_error(
|
fn point_at_return_for_opaque_ty_error(
|
||||||
&self,
|
&self,
|
||||||
errors: &mut Vec<traits::FulfillmentError<'tcx>>,
|
errors: &mut Vec<traits::FulfillmentError<'tcx>>,
|
||||||
|
@ -326,6 +326,10 @@ impl<'a, 'tcx> ExprUseVisitor<'a, 'tcx> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
hir::ExprKind::Become(call) => {
|
||||||
|
self.consume_expr(call);
|
||||||
|
}
|
||||||
|
|
||||||
hir::ExprKind::Assign(lhs, rhs, _) => {
|
hir::ExprKind::Assign(lhs, rhs, _) => {
|
||||||
self.mutate_expr(lhs);
|
self.mutate_expr(lhs);
|
||||||
self.consume_expr(rhs);
|
self.consume_expr(rhs);
|
||||||
|
@ -683,7 +683,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
|||||||
// inference variable.
|
// inference variable.
|
||||||
| ty::PredicateKind::ClosureKind(..)
|
| ty::PredicateKind::ClosureKind(..)
|
||||||
| ty::PredicateKind::Ambiguous
|
| ty::PredicateKind::Ambiguous
|
||||||
| ty::PredicateKind::TypeWellFormedFromEnv(..) => None,
|
| ty::PredicateKind::Clause(ty::ClauseKind::TypeWellFormedFromEnv(..)) => None,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
@ -25,16 +25,12 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
|||||||
|
|
||||||
let generics = self.tcx.generics_of(def_id);
|
let generics = self.tcx.generics_of(def_id);
|
||||||
let predicate_substs = match unsubstituted_pred.kind().skip_binder() {
|
let predicate_substs = match unsubstituted_pred.kind().skip_binder() {
|
||||||
ty::PredicateKind::Clause(ty::ClauseKind::Trait(pred)) => {
|
ty::ClauseKind::Trait(pred) => pred.trait_ref.substs.to_vec(),
|
||||||
pred.trait_ref.substs.to_vec()
|
ty::ClauseKind::Projection(pred) => pred.projection_ty.substs.to_vec(),
|
||||||
}
|
ty::ClauseKind::ConstArgHasType(arg, ty) => {
|
||||||
ty::PredicateKind::Clause(ty::ClauseKind::Projection(pred)) => {
|
|
||||||
pred.projection_ty.substs.to_vec()
|
|
||||||
}
|
|
||||||
ty::PredicateKind::Clause(ty::ClauseKind::ConstArgHasType(arg, ty)) => {
|
|
||||||
vec![ty.into(), arg.into()]
|
vec![ty.into(), arg.into()]
|
||||||
}
|
}
|
||||||
ty::PredicateKind::Clause(ty::ClauseKind::ConstEvaluatable(e)) => vec![e.into()],
|
ty::ClauseKind::ConstEvaluatable(e) => vec![e.into()],
|
||||||
_ => return false,
|
_ => return false,
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -512,11 +508,9 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
|||||||
// This shouldn't happen, but since this is only a diagnostic improvement, avoid breaking things.
|
// This shouldn't happen, but since this is only a diagnostic improvement, avoid breaking things.
|
||||||
return Err(expr);
|
return Err(expr);
|
||||||
}
|
}
|
||||||
let relevant_broken_predicate: ty::PredicateKind<'tcx> =
|
|
||||||
impl_predicates.predicates[impl_predicate_index].0.kind().skip_binder();
|
|
||||||
|
|
||||||
match relevant_broken_predicate {
|
match impl_predicates.predicates[impl_predicate_index].0.kind().skip_binder() {
|
||||||
ty::PredicateKind::Clause(ty::ClauseKind::Trait(broken_trait)) => {
|
ty::ClauseKind::Trait(broken_trait) => {
|
||||||
// ...
|
// ...
|
||||||
self.blame_specific_part_of_expr_corresponding_to_generic_param(
|
self.blame_specific_part_of_expr_corresponding_to_generic_param(
|
||||||
broken_trait.trait_ref.self_ty().into(),
|
broken_trait.trait_ref.self_ty().into(),
|
||||||
|
@ -955,9 +955,9 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
|||||||
// - f(0, 1,)
|
// - f(0, 1,)
|
||||||
// + f()
|
// + f()
|
||||||
if only_extras_so_far
|
if only_extras_so_far
|
||||||
&& errors
|
&& !errors
|
||||||
.peek()
|
.peek()
|
||||||
.map_or(true, |next_error| !matches!(next_error, Error::Extra(_)))
|
.is_some_and(|next_error| matches!(next_error, Error::Extra(_)))
|
||||||
{
|
{
|
||||||
let next = provided_arg_tys
|
let next = provided_arg_tys
|
||||||
.get(arg_idx + 1)
|
.get(arg_idx + 1)
|
||||||
@ -1948,7 +1948,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
|||||||
// do that, so it's OK.
|
// do that, so it's OK.
|
||||||
for (predicate, span) in instantiated
|
for (predicate, span) in instantiated
|
||||||
{
|
{
|
||||||
if let ty::PredicateKind::Clause(ty::ClauseKind::Trait(pred)) = predicate.kind().skip_binder()
|
if let ty::ClauseKind::Trait(pred) = predicate.kind().skip_binder()
|
||||||
&& pred.self_ty().peel_refs() == callee_ty
|
&& pred.self_ty().peel_refs() == callee_ty
|
||||||
&& self.tcx.is_fn_trait(pred.def_id())
|
&& self.tcx.is_fn_trait(pred.def_id())
|
||||||
{
|
{
|
||||||
|
@ -226,9 +226,7 @@ impl<'a, 'tcx> AstConv<'tcx> for FnCtxt<'a, 'tcx> {
|
|||||||
predicates: tcx.arena.alloc_from_iter(
|
predicates: tcx.arena.alloc_from_iter(
|
||||||
self.param_env.caller_bounds().iter().filter_map(|predicate| {
|
self.param_env.caller_bounds().iter().filter_map(|predicate| {
|
||||||
match predicate.kind().skip_binder() {
|
match predicate.kind().skip_binder() {
|
||||||
ty::PredicateKind::Clause(ty::ClauseKind::Trait(data))
|
ty::ClauseKind::Trait(data) if data.self_ty().is_param(index) => {
|
||||||
if data.self_ty().is_param(index) =>
|
|
||||||
{
|
|
||||||
// HACK(eddyb) should get the original `Span`.
|
// HACK(eddyb) should get the original `Span`.
|
||||||
let span = tcx.def_span(def_id);
|
let span = tcx.def_span(def_id);
|
||||||
Some((predicate, span))
|
Some((predicate, span))
|
||||||
|
@ -214,6 +214,7 @@ impl<'a, 'tcx> DropRangeVisitor<'a, 'tcx> {
|
|||||||
| ExprKind::Break(..)
|
| ExprKind::Break(..)
|
||||||
| ExprKind::Continue(..)
|
| ExprKind::Continue(..)
|
||||||
| ExprKind::Ret(..)
|
| ExprKind::Ret(..)
|
||||||
|
| ExprKind::Become(..)
|
||||||
| ExprKind::InlineAsm(..)
|
| ExprKind::InlineAsm(..)
|
||||||
| ExprKind::OffsetOf(..)
|
| ExprKind::OffsetOf(..)
|
||||||
| ExprKind::Struct(..)
|
| ExprKind::Struct(..)
|
||||||
@ -451,6 +452,8 @@ impl<'a, 'tcx> Visitor<'tcx> for DropRangeVisitor<'a, 'tcx> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
ExprKind::Become(_call) => bug!("encountered a tail-call inside a generator"),
|
||||||
|
|
||||||
ExprKind::Call(f, args) => {
|
ExprKind::Call(f, args) => {
|
||||||
self.visit_expr(f);
|
self.visit_expr(f);
|
||||||
for arg in args {
|
for arg in args {
|
||||||
|
@ -361,6 +361,7 @@ impl<'a, 'tcx> MemCategorizationContext<'a, 'tcx> {
|
|||||||
| hir::ExprKind::AssignOp(..)
|
| hir::ExprKind::AssignOp(..)
|
||||||
| hir::ExprKind::Closure { .. }
|
| hir::ExprKind::Closure { .. }
|
||||||
| hir::ExprKind::Ret(..)
|
| hir::ExprKind::Ret(..)
|
||||||
|
| hir::ExprKind::Become(..)
|
||||||
| hir::ExprKind::Unary(..)
|
| hir::ExprKind::Unary(..)
|
||||||
| hir::ExprKind::Yield(..)
|
| hir::ExprKind::Yield(..)
|
||||||
| hir::ExprKind::MethodCall(..)
|
| hir::ExprKind::MethodCall(..)
|
||||||
|
@ -606,9 +606,7 @@ impl<'a, 'tcx> ConfirmContext<'a, 'tcx> {
|
|||||||
traits::elaborate(self.tcx, predicates.predicates.iter().copied())
|
traits::elaborate(self.tcx, predicates.predicates.iter().copied())
|
||||||
// We don't care about regions here.
|
// We don't care about regions here.
|
||||||
.filter_map(|pred| match pred.kind().skip_binder() {
|
.filter_map(|pred| match pred.kind().skip_binder() {
|
||||||
ty::PredicateKind::Clause(ty::ClauseKind::Trait(trait_pred))
|
ty::ClauseKind::Trait(trait_pred) if trait_pred.def_id() == sized_def_id => {
|
||||||
if trait_pred.def_id() == sized_def_id =>
|
|
||||||
{
|
|
||||||
let span = predicates
|
let span = predicates
|
||||||
.iter()
|
.iter()
|
||||||
.find_map(|(p, span)| if p == pred { Some(span) } else { None })
|
.find_map(|(p, span)| if p == pred { Some(span) } else { None })
|
||||||
|
@ -834,7 +834,7 @@ impl<'a, 'tcx> ProbeContext<'a, 'tcx> {
|
|||||||
let bounds = self.param_env.caller_bounds().iter().filter_map(|predicate| {
|
let bounds = self.param_env.caller_bounds().iter().filter_map(|predicate| {
|
||||||
let bound_predicate = predicate.kind();
|
let bound_predicate = predicate.kind();
|
||||||
match bound_predicate.skip_binder() {
|
match bound_predicate.skip_binder() {
|
||||||
ty::PredicateKind::Clause(ty::ClauseKind::Trait(trait_predicate)) => {
|
ty::ClauseKind::Trait(trait_predicate) => {
|
||||||
match *trait_predicate.trait_ref.self_ty().kind() {
|
match *trait_predicate.trait_ref.self_ty().kind() {
|
||||||
ty::Param(p) if p == param_ty => {
|
ty::Param(p) if p == param_ty => {
|
||||||
Some(bound_predicate.rebind(trait_predicate.trait_ref))
|
Some(bound_predicate.rebind(trait_predicate.trait_ref))
|
||||||
@ -842,20 +842,13 @@ impl<'a, 'tcx> ProbeContext<'a, 'tcx> {
|
|||||||
_ => None,
|
_ => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
ty::PredicateKind::Subtype(..)
|
ty::ClauseKind::RegionOutlives(_)
|
||||||
| ty::PredicateKind::Clause(ty::ClauseKind::ConstArgHasType(..))
|
| ty::ClauseKind::TypeOutlives(_)
|
||||||
| ty::PredicateKind::Coerce(..)
|
| ty::ClauseKind::Projection(_)
|
||||||
| ty::PredicateKind::Clause(ty::ClauseKind::Projection(..))
|
| ty::ClauseKind::ConstArgHasType(_, _)
|
||||||
| ty::PredicateKind::Clause(ty::ClauseKind::RegionOutlives(..))
|
| ty::ClauseKind::WellFormed(_)
|
||||||
| ty::PredicateKind::Clause(ty::ClauseKind::WellFormed(..))
|
| ty::ClauseKind::ConstEvaluatable(_)
|
||||||
| ty::PredicateKind::ObjectSafe(..)
|
| ty::ClauseKind::TypeWellFormedFromEnv(_) => None,
|
||||||
| ty::PredicateKind::ClosureKind(..)
|
|
||||||
| ty::PredicateKind::Clause(ty::ClauseKind::TypeOutlives(..))
|
|
||||||
| ty::PredicateKind::Clause(ty::ClauseKind::ConstEvaluatable(..))
|
|
||||||
| ty::PredicateKind::ConstEquate(..)
|
|
||||||
| ty::PredicateKind::Ambiguous
|
|
||||||
| ty::PredicateKind::AliasRelate(..)
|
|
||||||
| ty::PredicateKind::TypeWellFormedFromEnv(..) => None,
|
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -393,8 +393,9 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
|||||||
// They can denote both statically and dynamically-sized byte arrays.
|
// They can denote both statically and dynamically-sized byte arrays.
|
||||||
let mut pat_ty = ty;
|
let mut pat_ty = ty;
|
||||||
if let hir::ExprKind::Lit(Spanned { node: ast::LitKind::ByteStr(..), .. }) = lt.kind {
|
if let hir::ExprKind::Lit(Spanned { node: ast::LitKind::ByteStr(..), .. }) = lt.kind {
|
||||||
if let ty::Ref(_, inner_ty, _) = *self.structurally_resolved_type(span, expected).kind()
|
let expected = self.structurally_resolved_type(span, expected);
|
||||||
&& self.structurally_resolved_type(span, inner_ty).is_slice()
|
if let ty::Ref(_, inner_ty, _) = expected.kind()
|
||||||
|
&& matches!(inner_ty.kind(), ty::Slice(_))
|
||||||
{
|
{
|
||||||
let tcx = self.tcx;
|
let tcx = self.tcx;
|
||||||
trace!(?lt.hir_id.local_id, "polymorphic byte string lit");
|
trace!(?lt.hir_id.local_id, "polymorphic byte string lit");
|
||||||
|
@ -739,10 +739,8 @@ impl<'tcx> TypeRelatingDelegate<'tcx> for QueryTypeRelatingDelegate<'_, 'tcx> {
|
|||||||
self.obligations.push(Obligation {
|
self.obligations.push(Obligation {
|
||||||
cause: self.cause.clone(),
|
cause: self.cause.clone(),
|
||||||
param_env: self.param_env,
|
param_env: self.param_env,
|
||||||
predicate: ty::Binder::dummy(ty::PredicateKind::Clause(
|
predicate: ty::ClauseKind::RegionOutlives(ty::OutlivesPredicate(sup, sub))
|
||||||
ty::ClauseKind::RegionOutlives(ty::OutlivesPredicate(sup, sub)),
|
.to_predicate(self.infcx.tcx),
|
||||||
))
|
|
||||||
.to_predicate(self.infcx.tcx),
|
|
||||||
recursion_depth: 0,
|
recursion_depth: 0,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -139,7 +139,7 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> {
|
|||||||
tcx,
|
tcx,
|
||||||
generics,
|
generics,
|
||||||
diag,
|
diag,
|
||||||
&format!("{}", proj.self_ty()),
|
&proj.self_ty().to_string(),
|
||||||
&path,
|
&path,
|
||||||
None,
|
None,
|
||||||
matching_span,
|
matching_span,
|
||||||
@ -153,7 +153,7 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> {
|
|||||||
tcx,
|
tcx,
|
||||||
generics,
|
generics,
|
||||||
diag,
|
diag,
|
||||||
&format!("{}", proj.self_ty()),
|
&proj.self_ty().to_string(),
|
||||||
&path,
|
&path,
|
||||||
None,
|
None,
|
||||||
matching_span,
|
matching_span,
|
||||||
|
@ -1474,6 +1474,7 @@ impl<'tcx> InferCtxt<'tcx> {
|
|||||||
/// universes. Updates `self.universe` to that new universe.
|
/// universes. Updates `self.universe` to that new universe.
|
||||||
pub fn create_next_universe(&self) -> ty::UniverseIndex {
|
pub fn create_next_universe(&self) -> ty::UniverseIndex {
|
||||||
let u = self.universe.get().next_universe();
|
let u = self.universe.get().next_universe();
|
||||||
|
debug!("create_next_universe {u:?}");
|
||||||
self.universe.set(u);
|
self.universe.set(u);
|
||||||
u
|
u
|
||||||
}
|
}
|
||||||
|
@ -20,27 +20,19 @@ pub fn explicit_outlives_bounds<'tcx>(
|
|||||||
param_env
|
param_env
|
||||||
.caller_bounds()
|
.caller_bounds()
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(ty::Predicate::kind)
|
.map(ty::Clause::kind)
|
||||||
.filter_map(ty::Binder::no_bound_vars)
|
.filter_map(ty::Binder::no_bound_vars)
|
||||||
.filter_map(move |kind| match kind {
|
.filter_map(move |kind| match kind {
|
||||||
ty::PredicateKind::Clause(ty::ClauseKind::Projection(..))
|
ty::ClauseKind::RegionOutlives(ty::OutlivesPredicate(r_a, r_b)) => {
|
||||||
| ty::PredicateKind::Clause(ty::ClauseKind::Trait(..))
|
Some(OutlivesBound::RegionSubRegion(r_b, r_a))
|
||||||
| ty::PredicateKind::Clause(ty::ClauseKind::ConstArgHasType(..))
|
}
|
||||||
| ty::PredicateKind::AliasRelate(..)
|
ty::ClauseKind::Trait(_)
|
||||||
| ty::PredicateKind::Coerce(..)
|
| ty::ClauseKind::TypeOutlives(_)
|
||||||
| ty::PredicateKind::Subtype(..)
|
| ty::ClauseKind::Projection(_)
|
||||||
| ty::PredicateKind::Clause(ty::ClauseKind::WellFormed(..))
|
| ty::ClauseKind::ConstArgHasType(_, _)
|
||||||
| ty::PredicateKind::ObjectSafe(..)
|
| ty::ClauseKind::WellFormed(_)
|
||||||
| ty::PredicateKind::ClosureKind(..)
|
| ty::ClauseKind::ConstEvaluatable(_)
|
||||||
| ty::PredicateKind::Clause(ty::ClauseKind::TypeOutlives(..))
|
| ty::ClauseKind::TypeWellFormedFromEnv(_) => None,
|
||||||
| ty::PredicateKind::Clause(ty::ClauseKind::ConstEvaluatable(..))
|
|
||||||
| ty::PredicateKind::ConstEquate(..)
|
|
||||||
| ty::PredicateKind::Ambiguous
|
|
||||||
| ty::PredicateKind::TypeWellFormedFromEnv(..) => None,
|
|
||||||
ty::PredicateKind::Clause(ty::ClauseKind::RegionOutlives(ty::OutlivesPredicate(
|
|
||||||
r_a,
|
|
||||||
r_b,
|
|
||||||
))) => Some(OutlivesBound::RegionSubRegion(r_b, r_a)),
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -223,7 +223,7 @@ impl<'cx, 'tcx> VerifyBoundCx<'cx, 'tcx> {
|
|||||||
// parameter environments are already elaborated, so we don't
|
// parameter environments are already elaborated, so we don't
|
||||||
// have to worry about that.
|
// have to worry about that.
|
||||||
let c_b = self.param_env.caller_bounds();
|
let c_b = self.param_env.caller_bounds();
|
||||||
let param_bounds = self.collect_outlives_from_predicate_list(erased_ty, c_b.into_iter());
|
let param_bounds = self.collect_outlives_from_clause_list(erased_ty, c_b.into_iter());
|
||||||
|
|
||||||
// Next, collect regions we scraped from the well-formedness
|
// Next, collect regions we scraped from the well-formedness
|
||||||
// constraints in the fn signature. To do that, we walk the list
|
// constraints in the fn signature. To do that, we walk the list
|
||||||
@ -307,15 +307,15 @@ impl<'cx, 'tcx> VerifyBoundCx<'cx, 'tcx> {
|
|||||||
/// when comparing `ty` for equality, so `ty` must be something
|
/// when comparing `ty` for equality, so `ty` must be something
|
||||||
/// that does not involve inference variables and where you
|
/// that does not involve inference variables and where you
|
||||||
/// otherwise want a precise match.
|
/// otherwise want a precise match.
|
||||||
fn collect_outlives_from_predicate_list(
|
fn collect_outlives_from_clause_list(
|
||||||
&self,
|
&self,
|
||||||
erased_ty: Ty<'tcx>,
|
erased_ty: Ty<'tcx>,
|
||||||
predicates: impl Iterator<Item = ty::Predicate<'tcx>>,
|
clauses: impl Iterator<Item = ty::Clause<'tcx>>,
|
||||||
) -> impl Iterator<Item = ty::Binder<'tcx, ty::OutlivesPredicate<Ty<'tcx>, ty::Region<'tcx>>>>
|
) -> impl Iterator<Item = ty::Binder<'tcx, ty::OutlivesPredicate<Ty<'tcx>, ty::Region<'tcx>>>>
|
||||||
{
|
{
|
||||||
let tcx = self.tcx;
|
let tcx = self.tcx;
|
||||||
let param_env = self.param_env;
|
let param_env = self.param_env;
|
||||||
predicates.filter_map(|p| p.to_opt_type_outlives()).filter(move |outlives_predicate| {
|
clauses.filter_map(|p| p.as_type_outlives_clause()).filter(move |outlives_predicate| {
|
||||||
super::test_type_match::can_match_erased_ty(
|
super::test_type_match::can_match_erased_ty(
|
||||||
tcx,
|
tcx,
|
||||||
param_env,
|
param_env,
|
||||||
|
@ -258,7 +258,8 @@ impl<'tcx, O: Elaboratable<'tcx>> Elaborator<'tcx, O> {
|
|||||||
pred = pred.without_const(tcx);
|
pred = pred.without_const(tcx);
|
||||||
}
|
}
|
||||||
elaboratable.child_with_derived_cause(
|
elaboratable.child_with_derived_cause(
|
||||||
pred.subst_supertrait(tcx, &bound_predicate.rebind(data.trait_ref)),
|
pred.subst_supertrait(tcx, &bound_predicate.rebind(data.trait_ref))
|
||||||
|
.as_predicate(),
|
||||||
span,
|
span,
|
||||||
bound_predicate.rebind(data),
|
bound_predicate.rebind(data),
|
||||||
index,
|
index,
|
||||||
@ -367,7 +368,7 @@ impl<'tcx, O: Elaboratable<'tcx>> Elaborator<'tcx, O> {
|
|||||||
.map(|predicate| elaboratable.child(predicate)),
|
.map(|predicate| elaboratable.child(predicate)),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
ty::PredicateKind::TypeWellFormedFromEnv(..) => {
|
ty::PredicateKind::Clause(ty::ClauseKind::TypeWellFormedFromEnv(..)) => {
|
||||||
// Nothing to elaborate
|
// Nothing to elaborate
|
||||||
}
|
}
|
||||||
ty::PredicateKind::Ambiguous => {}
|
ty::PredicateKind::Ambiguous => {}
|
||||||
@ -440,7 +441,7 @@ pub fn transitive_bounds_that_define_assoc_item<'tcx>(
|
|||||||
tcx.super_predicates_that_define_assoc_item((trait_ref.def_id(), assoc_name));
|
tcx.super_predicates_that_define_assoc_item((trait_ref.def_id(), assoc_name));
|
||||||
for (super_predicate, _) in super_predicates.predicates {
|
for (super_predicate, _) in super_predicates.predicates {
|
||||||
let subst_predicate = super_predicate.subst_supertrait(tcx, &trait_ref);
|
let subst_predicate = super_predicate.subst_supertrait(tcx, &trait_ref);
|
||||||
if let Some(binder) = subst_predicate.to_opt_poly_trait_pred() {
|
if let Some(binder) = subst_predicate.as_trait_clause() {
|
||||||
stack.push(binder.map_bound(|t| t.trait_ref));
|
stack.push(binder.map_bound(|t| t.trait_ref));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -14,12 +14,11 @@ use rustc_middle::{bug, ty};
|
|||||||
use rustc_parse::maybe_new_parser_from_source_str;
|
use rustc_parse::maybe_new_parser_from_source_str;
|
||||||
use rustc_query_impl::QueryCtxt;
|
use rustc_query_impl::QueryCtxt;
|
||||||
use rustc_query_system::query::print_query_stack;
|
use rustc_query_system::query::print_query_stack;
|
||||||
use rustc_session::config::{self, ErrorOutputType, Input, OutFileName, OutputFilenames};
|
use rustc_session::config::{self, CheckCfg, ExpectedValues, Input, OutFileName, OutputFilenames};
|
||||||
use rustc_session::config::{CheckCfg, ExpectedValues};
|
|
||||||
use rustc_session::lint;
|
|
||||||
use rustc_session::parse::{CrateConfig, ParseSess};
|
use rustc_session::parse::{CrateConfig, ParseSess};
|
||||||
|
use rustc_session::CompilerIO;
|
||||||
use rustc_session::Session;
|
use rustc_session::Session;
|
||||||
use rustc_session::{early_error, CompilerIO};
|
use rustc_session::{lint, EarlyErrorHandler};
|
||||||
use rustc_span::source_map::{FileLoader, FileName};
|
use rustc_span::source_map::{FileLoader, FileName};
|
||||||
use rustc_span::symbol::sym;
|
use rustc_span::symbol::sym;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
@ -66,7 +65,10 @@ pub fn set_thread_safe_mode(sopts: &config::UnstableOptions) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Converts strings provided as `--cfg [cfgspec]` into a `crate_cfg`.
|
/// Converts strings provided as `--cfg [cfgspec]` into a `crate_cfg`.
|
||||||
pub fn parse_cfgspecs(cfgspecs: Vec<String>) -> FxHashSet<(String, Option<String>)> {
|
pub fn parse_cfgspecs(
|
||||||
|
handler: &EarlyErrorHandler,
|
||||||
|
cfgspecs: Vec<String>,
|
||||||
|
) -> FxHashSet<(String, Option<String>)> {
|
||||||
rustc_span::create_default_session_if_not_set_then(move |_| {
|
rustc_span::create_default_session_if_not_set_then(move |_| {
|
||||||
let cfg = cfgspecs
|
let cfg = cfgspecs
|
||||||
.into_iter()
|
.into_iter()
|
||||||
@ -78,10 +80,10 @@ pub fn parse_cfgspecs(cfgspecs: Vec<String>) -> FxHashSet<(String, Option<String
|
|||||||
|
|
||||||
macro_rules! error {
|
macro_rules! error {
|
||||||
($reason: expr) => {
|
($reason: expr) => {
|
||||||
early_error(
|
handler.early_error(format!(
|
||||||
ErrorOutputType::default(),
|
concat!("invalid `--cfg` argument: `{}` (", $reason, ")"),
|
||||||
format!(concat!("invalid `--cfg` argument: `{}` (", $reason, ")"), s),
|
s
|
||||||
);
|
));
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -125,7 +127,7 @@ pub fn parse_cfgspecs(cfgspecs: Vec<String>) -> FxHashSet<(String, Option<String
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Converts strings provided as `--check-cfg [specs]` into a `CheckCfg`.
|
/// Converts strings provided as `--check-cfg [specs]` into a `CheckCfg`.
|
||||||
pub fn parse_check_cfg(specs: Vec<String>) -> CheckCfg {
|
pub fn parse_check_cfg(handler: &EarlyErrorHandler, specs: Vec<String>) -> CheckCfg {
|
||||||
rustc_span::create_default_session_if_not_set_then(move |_| {
|
rustc_span::create_default_session_if_not_set_then(move |_| {
|
||||||
let mut check_cfg = CheckCfg::default();
|
let mut check_cfg = CheckCfg::default();
|
||||||
|
|
||||||
@ -137,10 +139,10 @@ pub fn parse_check_cfg(specs: Vec<String>) -> CheckCfg {
|
|||||||
|
|
||||||
macro_rules! error {
|
macro_rules! error {
|
||||||
($reason: expr) => {
|
($reason: expr) => {
|
||||||
early_error(
|
handler.early_error(format!(
|
||||||
ErrorOutputType::default(),
|
concat!("invalid `--check-cfg` argument: `{}` (", $reason, ")"),
|
||||||
format!(concat!("invalid `--check-cfg` argument: `{}` (", $reason, ")"), s),
|
s
|
||||||
)
|
))
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -294,8 +296,11 @@ pub fn run_compiler<R: Send>(config: Config, f: impl FnOnce(&Compiler) -> R + Se
|
|||||||
|
|
||||||
let registry = &config.registry;
|
let registry = &config.registry;
|
||||||
|
|
||||||
|
let handler = EarlyErrorHandler::new(config.opts.error_format);
|
||||||
|
|
||||||
let temps_dir = config.opts.unstable_opts.temps_dir.as_deref().map(PathBuf::from);
|
let temps_dir = config.opts.unstable_opts.temps_dir.as_deref().map(PathBuf::from);
|
||||||
let (mut sess, codegen_backend) = util::create_session(
|
let (mut sess, codegen_backend) = util::create_session(
|
||||||
|
&handler,
|
||||||
config.opts,
|
config.opts,
|
||||||
config.crate_cfg,
|
config.crate_cfg,
|
||||||
config.crate_check_cfg,
|
config.crate_check_cfg,
|
||||||
|
@ -21,8 +21,8 @@ use rustc_session::config::{InstrumentCoverage, Passes};
|
|||||||
use rustc_session::lint::Level;
|
use rustc_session::lint::Level;
|
||||||
use rustc_session::search_paths::SearchPath;
|
use rustc_session::search_paths::SearchPath;
|
||||||
use rustc_session::utils::{CanonicalizedPath, NativeLib, NativeLibKind};
|
use rustc_session::utils::{CanonicalizedPath, NativeLib, NativeLibKind};
|
||||||
use rustc_session::CompilerIO;
|
|
||||||
use rustc_session::{build_session, getopts, Session};
|
use rustc_session::{build_session, getopts, Session};
|
||||||
|
use rustc_session::{CompilerIO, EarlyErrorHandler};
|
||||||
use rustc_span::edition::{Edition, DEFAULT_EDITION};
|
use rustc_span::edition::{Edition, DEFAULT_EDITION};
|
||||||
use rustc_span::symbol::sym;
|
use rustc_span::symbol::sym;
|
||||||
use rustc_span::FileName;
|
use rustc_span::FileName;
|
||||||
@ -36,15 +36,18 @@ use std::path::{Path, PathBuf};
|
|||||||
|
|
||||||
type CfgSpecs = FxHashSet<(String, Option<String>)>;
|
type CfgSpecs = FxHashSet<(String, Option<String>)>;
|
||||||
|
|
||||||
fn build_session_options_and_crate_config(matches: getopts::Matches) -> (Options, CfgSpecs) {
|
fn build_session_options_and_crate_config(
|
||||||
let sessopts = build_session_options(&matches);
|
handler: &mut EarlyErrorHandler,
|
||||||
let cfg = parse_cfgspecs(matches.opt_strs("cfg"));
|
matches: getopts::Matches,
|
||||||
|
) -> (Options, CfgSpecs) {
|
||||||
|
let sessopts = build_session_options(handler, &matches);
|
||||||
|
let cfg = parse_cfgspecs(handler, matches.opt_strs("cfg"));
|
||||||
(sessopts, cfg)
|
(sessopts, cfg)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn mk_session(matches: getopts::Matches) -> (Session, CfgSpecs) {
|
fn mk_session(handler: &mut EarlyErrorHandler, matches: getopts::Matches) -> (Session, CfgSpecs) {
|
||||||
let registry = registry::Registry::new(&[]);
|
let registry = registry::Registry::new(&[]);
|
||||||
let (sessopts, cfg) = build_session_options_and_crate_config(matches);
|
let (sessopts, cfg) = build_session_options_and_crate_config(handler, matches);
|
||||||
let temps_dir = sessopts.unstable_opts.temps_dir.as_deref().map(PathBuf::from);
|
let temps_dir = sessopts.unstable_opts.temps_dir.as_deref().map(PathBuf::from);
|
||||||
let io = CompilerIO {
|
let io = CompilerIO {
|
||||||
input: Input::Str { name: FileName::Custom(String::new()), input: String::new() },
|
input: Input::Str { name: FileName::Custom(String::new()), input: String::new() },
|
||||||
@ -52,8 +55,18 @@ fn mk_session(matches: getopts::Matches) -> (Session, CfgSpecs) {
|
|||||||
output_file: None,
|
output_file: None,
|
||||||
temps_dir,
|
temps_dir,
|
||||||
};
|
};
|
||||||
let sess =
|
let sess = build_session(
|
||||||
build_session(sessopts, io, None, registry, vec![], Default::default(), None, None, "");
|
handler,
|
||||||
|
sessopts,
|
||||||
|
io,
|
||||||
|
None,
|
||||||
|
registry,
|
||||||
|
vec![],
|
||||||
|
Default::default(),
|
||||||
|
None,
|
||||||
|
None,
|
||||||
|
"",
|
||||||
|
);
|
||||||
(sess, cfg)
|
(sess, cfg)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -120,7 +133,8 @@ fn assert_non_crate_hash_different(x: &Options, y: &Options) {
|
|||||||
fn test_switch_implies_cfg_test() {
|
fn test_switch_implies_cfg_test() {
|
||||||
rustc_span::create_default_session_globals_then(|| {
|
rustc_span::create_default_session_globals_then(|| {
|
||||||
let matches = optgroups().parse(&["--test".to_string()]).unwrap();
|
let matches = optgroups().parse(&["--test".to_string()]).unwrap();
|
||||||
let (sess, cfg) = mk_session(matches);
|
let mut handler = EarlyErrorHandler::new(ErrorOutputType::default());
|
||||||
|
let (sess, cfg) = mk_session(&mut handler, matches);
|
||||||
let cfg = build_configuration(&sess, to_crate_config(cfg));
|
let cfg = build_configuration(&sess, to_crate_config(cfg));
|
||||||
assert!(cfg.contains(&(sym::test, None)));
|
assert!(cfg.contains(&(sym::test, None)));
|
||||||
});
|
});
|
||||||
@ -131,7 +145,8 @@ fn test_switch_implies_cfg_test() {
|
|||||||
fn test_switch_implies_cfg_test_unless_cfg_test() {
|
fn test_switch_implies_cfg_test_unless_cfg_test() {
|
||||||
rustc_span::create_default_session_globals_then(|| {
|
rustc_span::create_default_session_globals_then(|| {
|
||||||
let matches = optgroups().parse(&["--test".to_string(), "--cfg=test".to_string()]).unwrap();
|
let matches = optgroups().parse(&["--test".to_string(), "--cfg=test".to_string()]).unwrap();
|
||||||
let (sess, cfg) = mk_session(matches);
|
let mut handler = EarlyErrorHandler::new(ErrorOutputType::default());
|
||||||
|
let (sess, cfg) = mk_session(&mut handler, matches);
|
||||||
let cfg = build_configuration(&sess, to_crate_config(cfg));
|
let cfg = build_configuration(&sess, to_crate_config(cfg));
|
||||||
let mut test_items = cfg.iter().filter(|&&(name, _)| name == sym::test);
|
let mut test_items = cfg.iter().filter(|&&(name, _)| name == sym::test);
|
||||||
assert!(test_items.next().is_some());
|
assert!(test_items.next().is_some());
|
||||||
@ -143,20 +158,23 @@ fn test_switch_implies_cfg_test_unless_cfg_test() {
|
|||||||
fn test_can_print_warnings() {
|
fn test_can_print_warnings() {
|
||||||
rustc_span::create_default_session_globals_then(|| {
|
rustc_span::create_default_session_globals_then(|| {
|
||||||
let matches = optgroups().parse(&["-Awarnings".to_string()]).unwrap();
|
let matches = optgroups().parse(&["-Awarnings".to_string()]).unwrap();
|
||||||
let (sess, _) = mk_session(matches);
|
let mut handler = EarlyErrorHandler::new(ErrorOutputType::default());
|
||||||
|
let (sess, _) = mk_session(&mut handler, matches);
|
||||||
assert!(!sess.diagnostic().can_emit_warnings());
|
assert!(!sess.diagnostic().can_emit_warnings());
|
||||||
});
|
});
|
||||||
|
|
||||||
rustc_span::create_default_session_globals_then(|| {
|
rustc_span::create_default_session_globals_then(|| {
|
||||||
let matches =
|
let matches =
|
||||||
optgroups().parse(&["-Awarnings".to_string(), "-Dwarnings".to_string()]).unwrap();
|
optgroups().parse(&["-Awarnings".to_string(), "-Dwarnings".to_string()]).unwrap();
|
||||||
let (sess, _) = mk_session(matches);
|
let mut handler = EarlyErrorHandler::new(ErrorOutputType::default());
|
||||||
|
let (sess, _) = mk_session(&mut handler, matches);
|
||||||
assert!(sess.diagnostic().can_emit_warnings());
|
assert!(sess.diagnostic().can_emit_warnings());
|
||||||
});
|
});
|
||||||
|
|
||||||
rustc_span::create_default_session_globals_then(|| {
|
rustc_span::create_default_session_globals_then(|| {
|
||||||
let matches = optgroups().parse(&["-Adead_code".to_string()]).unwrap();
|
let matches = optgroups().parse(&["-Adead_code".to_string()]).unwrap();
|
||||||
let (sess, _) = mk_session(matches);
|
let mut handler = EarlyErrorHandler::new(ErrorOutputType::default());
|
||||||
|
let (sess, _) = mk_session(&mut handler, matches);
|
||||||
assert!(sess.diagnostic().can_emit_warnings());
|
assert!(sess.diagnostic().can_emit_warnings());
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
@ -302,35 +320,36 @@ fn test_search_paths_tracking_hash_different_order() {
|
|||||||
let mut v3 = Options::default();
|
let mut v3 = Options::default();
|
||||||
let mut v4 = Options::default();
|
let mut v4 = Options::default();
|
||||||
|
|
||||||
|
let handler = EarlyErrorHandler::new(JSON);
|
||||||
const JSON: ErrorOutputType = ErrorOutputType::Json {
|
const JSON: ErrorOutputType = ErrorOutputType::Json {
|
||||||
pretty: false,
|
pretty: false,
|
||||||
json_rendered: HumanReadableErrorType::Default(ColorConfig::Never),
|
json_rendered: HumanReadableErrorType::Default(ColorConfig::Never),
|
||||||
};
|
};
|
||||||
|
|
||||||
// Reference
|
// Reference
|
||||||
v1.search_paths.push(SearchPath::from_cli_opt("native=abc", JSON));
|
v1.search_paths.push(SearchPath::from_cli_opt(&handler, "native=abc"));
|
||||||
v1.search_paths.push(SearchPath::from_cli_opt("crate=def", JSON));
|
v1.search_paths.push(SearchPath::from_cli_opt(&handler, "crate=def"));
|
||||||
v1.search_paths.push(SearchPath::from_cli_opt("dependency=ghi", JSON));
|
v1.search_paths.push(SearchPath::from_cli_opt(&handler, "dependency=ghi"));
|
||||||
v1.search_paths.push(SearchPath::from_cli_opt("framework=jkl", JSON));
|
v1.search_paths.push(SearchPath::from_cli_opt(&handler, "framework=jkl"));
|
||||||
v1.search_paths.push(SearchPath::from_cli_opt("all=mno", JSON));
|
v1.search_paths.push(SearchPath::from_cli_opt(&handler, "all=mno"));
|
||||||
|
|
||||||
v2.search_paths.push(SearchPath::from_cli_opt("native=abc", JSON));
|
v2.search_paths.push(SearchPath::from_cli_opt(&handler, "native=abc"));
|
||||||
v2.search_paths.push(SearchPath::from_cli_opt("dependency=ghi", JSON));
|
v2.search_paths.push(SearchPath::from_cli_opt(&handler, "dependency=ghi"));
|
||||||
v2.search_paths.push(SearchPath::from_cli_opt("crate=def", JSON));
|
v2.search_paths.push(SearchPath::from_cli_opt(&handler, "crate=def"));
|
||||||
v2.search_paths.push(SearchPath::from_cli_opt("framework=jkl", JSON));
|
v2.search_paths.push(SearchPath::from_cli_opt(&handler, "framework=jkl"));
|
||||||
v2.search_paths.push(SearchPath::from_cli_opt("all=mno", JSON));
|
v2.search_paths.push(SearchPath::from_cli_opt(&handler, "all=mno"));
|
||||||
|
|
||||||
v3.search_paths.push(SearchPath::from_cli_opt("crate=def", JSON));
|
v3.search_paths.push(SearchPath::from_cli_opt(&handler, "crate=def"));
|
||||||
v3.search_paths.push(SearchPath::from_cli_opt("framework=jkl", JSON));
|
v3.search_paths.push(SearchPath::from_cli_opt(&handler, "framework=jkl"));
|
||||||
v3.search_paths.push(SearchPath::from_cli_opt("native=abc", JSON));
|
v3.search_paths.push(SearchPath::from_cli_opt(&handler, "native=abc"));
|
||||||
v3.search_paths.push(SearchPath::from_cli_opt("dependency=ghi", JSON));
|
v3.search_paths.push(SearchPath::from_cli_opt(&handler, "dependency=ghi"));
|
||||||
v3.search_paths.push(SearchPath::from_cli_opt("all=mno", JSON));
|
v3.search_paths.push(SearchPath::from_cli_opt(&handler, "all=mno"));
|
||||||
|
|
||||||
v4.search_paths.push(SearchPath::from_cli_opt("all=mno", JSON));
|
v4.search_paths.push(SearchPath::from_cli_opt(&handler, "all=mno"));
|
||||||
v4.search_paths.push(SearchPath::from_cli_opt("native=abc", JSON));
|
v4.search_paths.push(SearchPath::from_cli_opt(&handler, "native=abc"));
|
||||||
v4.search_paths.push(SearchPath::from_cli_opt("crate=def", JSON));
|
v4.search_paths.push(SearchPath::from_cli_opt(&handler, "crate=def"));
|
||||||
v4.search_paths.push(SearchPath::from_cli_opt("dependency=ghi", JSON));
|
v4.search_paths.push(SearchPath::from_cli_opt(&handler, "dependency=ghi"));
|
||||||
v4.search_paths.push(SearchPath::from_cli_opt("framework=jkl", JSON));
|
v4.search_paths.push(SearchPath::from_cli_opt(&handler, "framework=jkl"));
|
||||||
|
|
||||||
assert_same_hash(&v1, &v2);
|
assert_same_hash(&v1, &v2);
|
||||||
assert_same_hash(&v1, &v3);
|
assert_same_hash(&v1, &v3);
|
||||||
@ -851,7 +870,9 @@ fn test_edition_parsing() {
|
|||||||
let options = Options::default();
|
let options = Options::default();
|
||||||
assert!(options.edition == DEFAULT_EDITION);
|
assert!(options.edition == DEFAULT_EDITION);
|
||||||
|
|
||||||
|
let mut handler = EarlyErrorHandler::new(ErrorOutputType::default());
|
||||||
|
|
||||||
let matches = optgroups().parse(&["--edition=2018".to_string()]).unwrap();
|
let matches = optgroups().parse(&["--edition=2018".to_string()]).unwrap();
|
||||||
let (sessopts, _) = build_session_options_and_crate_config(matches);
|
let (sessopts, _) = build_session_options_and_crate_config(&mut handler, matches);
|
||||||
assert!(sessopts.edition == Edition::Edition2018)
|
assert!(sessopts.edition == Edition::Edition2018)
|
||||||
}
|
}
|
||||||
|
@ -11,16 +11,16 @@ use rustc_parse::validate_attr;
|
|||||||
use rustc_session as session;
|
use rustc_session as session;
|
||||||
use rustc_session::config::CheckCfg;
|
use rustc_session::config::CheckCfg;
|
||||||
use rustc_session::config::{self, CrateType};
|
use rustc_session::config::{self, CrateType};
|
||||||
use rustc_session::config::{ErrorOutputType, OutFileName, OutputFilenames, OutputTypes};
|
use rustc_session::config::{OutFileName, OutputFilenames, OutputTypes};
|
||||||
use rustc_session::filesearch::sysroot_candidates;
|
use rustc_session::filesearch::sysroot_candidates;
|
||||||
use rustc_session::lint::{self, BuiltinLintDiagnostics, LintBuffer};
|
use rustc_session::lint::{self, BuiltinLintDiagnostics, LintBuffer};
|
||||||
use rustc_session::parse::CrateConfig;
|
use rustc_session::parse::CrateConfig;
|
||||||
use rustc_session::{early_error, filesearch, output, Session};
|
use rustc_session::{filesearch, output, Session};
|
||||||
use rustc_span::edit_distance::find_best_match_for_name;
|
use rustc_span::edit_distance::find_best_match_for_name;
|
||||||
use rustc_span::edition::Edition;
|
use rustc_span::edition::Edition;
|
||||||
use rustc_span::source_map::FileLoader;
|
use rustc_span::source_map::FileLoader;
|
||||||
use rustc_span::symbol::{sym, Symbol};
|
use rustc_span::symbol::{sym, Symbol};
|
||||||
use session::CompilerIO;
|
use session::{CompilerIO, EarlyErrorHandler};
|
||||||
use std::env;
|
use std::env;
|
||||||
use std::env::consts::{DLL_PREFIX, DLL_SUFFIX};
|
use std::env::consts::{DLL_PREFIX, DLL_SUFFIX};
|
||||||
use std::mem;
|
use std::mem;
|
||||||
@ -58,6 +58,7 @@ pub fn add_configuration(
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn create_session(
|
pub fn create_session(
|
||||||
|
handler: &EarlyErrorHandler,
|
||||||
sopts: config::Options,
|
sopts: config::Options,
|
||||||
cfg: FxHashSet<(String, Option<String>)>,
|
cfg: FxHashSet<(String, Option<String>)>,
|
||||||
check_cfg: CheckCfg,
|
check_cfg: CheckCfg,
|
||||||
@ -73,7 +74,11 @@ pub fn create_session(
|
|||||||
let codegen_backend = if let Some(make_codegen_backend) = make_codegen_backend {
|
let codegen_backend = if let Some(make_codegen_backend) = make_codegen_backend {
|
||||||
make_codegen_backend(&sopts)
|
make_codegen_backend(&sopts)
|
||||||
} else {
|
} else {
|
||||||
get_codegen_backend(&sopts.maybe_sysroot, sopts.unstable_opts.codegen_backend.as_deref())
|
get_codegen_backend(
|
||||||
|
handler,
|
||||||
|
&sopts.maybe_sysroot,
|
||||||
|
sopts.unstable_opts.codegen_backend.as_deref(),
|
||||||
|
)
|
||||||
};
|
};
|
||||||
|
|
||||||
// target_override is documented to be called before init(), so this is okay
|
// target_override is documented to be called before init(), so this is okay
|
||||||
@ -88,7 +93,7 @@ pub fn create_session(
|
|||||||
) {
|
) {
|
||||||
Ok(bundle) => bundle,
|
Ok(bundle) => bundle,
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
early_error(sopts.error_format, format!("failed to load fluent bundle: {e}"));
|
handler.early_error(format!("failed to load fluent bundle: {e}"));
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -96,6 +101,7 @@ pub fn create_session(
|
|||||||
locale_resources.push(codegen_backend.locale_resource());
|
locale_resources.push(codegen_backend.locale_resource());
|
||||||
|
|
||||||
let mut sess = session::build_session(
|
let mut sess = session::build_session(
|
||||||
|
handler,
|
||||||
sopts,
|
sopts,
|
||||||
io,
|
io,
|
||||||
bundle,
|
bundle,
|
||||||
@ -218,16 +224,16 @@ pub(crate) fn run_in_thread_pool_with_globals<F: FnOnce() -> R + Send, R: Send>(
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn load_backend_from_dylib(path: &Path) -> MakeBackendFn {
|
fn load_backend_from_dylib(handler: &EarlyErrorHandler, path: &Path) -> MakeBackendFn {
|
||||||
let lib = unsafe { Library::new(path) }.unwrap_or_else(|err| {
|
let lib = unsafe { Library::new(path) }.unwrap_or_else(|err| {
|
||||||
let err = format!("couldn't load codegen backend {path:?}: {err}");
|
let err = format!("couldn't load codegen backend {path:?}: {err}");
|
||||||
early_error(ErrorOutputType::default(), err);
|
handler.early_error(err);
|
||||||
});
|
});
|
||||||
|
|
||||||
let backend_sym = unsafe { lib.get::<MakeBackendFn>(b"__rustc_codegen_backend") }
|
let backend_sym = unsafe { lib.get::<MakeBackendFn>(b"__rustc_codegen_backend") }
|
||||||
.unwrap_or_else(|e| {
|
.unwrap_or_else(|e| {
|
||||||
let err = format!("couldn't load codegen backend: {e}");
|
let err = format!("couldn't load codegen backend: {e}");
|
||||||
early_error(ErrorOutputType::default(), err);
|
handler.early_error(err);
|
||||||
});
|
});
|
||||||
|
|
||||||
// Intentionally leak the dynamic library. We can't ever unload it
|
// Intentionally leak the dynamic library. We can't ever unload it
|
||||||
@ -242,6 +248,7 @@ fn load_backend_from_dylib(path: &Path) -> MakeBackendFn {
|
|||||||
///
|
///
|
||||||
/// A name of `None` indicates that the default backend should be used.
|
/// A name of `None` indicates that the default backend should be used.
|
||||||
pub fn get_codegen_backend(
|
pub fn get_codegen_backend(
|
||||||
|
handler: &EarlyErrorHandler,
|
||||||
maybe_sysroot: &Option<PathBuf>,
|
maybe_sysroot: &Option<PathBuf>,
|
||||||
backend_name: Option<&str>,
|
backend_name: Option<&str>,
|
||||||
) -> Box<dyn CodegenBackend> {
|
) -> Box<dyn CodegenBackend> {
|
||||||
@ -251,10 +258,12 @@ pub fn get_codegen_backend(
|
|||||||
let default_codegen_backend = option_env!("CFG_DEFAULT_CODEGEN_BACKEND").unwrap_or("llvm");
|
let default_codegen_backend = option_env!("CFG_DEFAULT_CODEGEN_BACKEND").unwrap_or("llvm");
|
||||||
|
|
||||||
match backend_name.unwrap_or(default_codegen_backend) {
|
match backend_name.unwrap_or(default_codegen_backend) {
|
||||||
filename if filename.contains('.') => load_backend_from_dylib(filename.as_ref()),
|
filename if filename.contains('.') => {
|
||||||
|
load_backend_from_dylib(handler, filename.as_ref())
|
||||||
|
}
|
||||||
#[cfg(feature = "llvm")]
|
#[cfg(feature = "llvm")]
|
||||||
"llvm" => rustc_codegen_llvm::LlvmCodegenBackend::new,
|
"llvm" => rustc_codegen_llvm::LlvmCodegenBackend::new,
|
||||||
backend_name => get_codegen_sysroot(maybe_sysroot, backend_name),
|
backend_name => get_codegen_sysroot(handler, maybe_sysroot, backend_name),
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -286,7 +295,11 @@ fn get_rustc_path_inner(bin_path: &str) -> Option<PathBuf> {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_codegen_sysroot(maybe_sysroot: &Option<PathBuf>, backend_name: &str) -> MakeBackendFn {
|
fn get_codegen_sysroot(
|
||||||
|
handler: &EarlyErrorHandler,
|
||||||
|
maybe_sysroot: &Option<PathBuf>,
|
||||||
|
backend_name: &str,
|
||||||
|
) -> MakeBackendFn {
|
||||||
// For now we only allow this function to be called once as it'll dlopen a
|
// For now we only allow this function to be called once as it'll dlopen a
|
||||||
// few things, which seems to work best if we only do that once. In
|
// few things, which seems to work best if we only do that once. In
|
||||||
// general this assertion never trips due to the once guard in `get_codegen_backend`,
|
// general this assertion never trips due to the once guard in `get_codegen_backend`,
|
||||||
@ -321,7 +334,7 @@ fn get_codegen_sysroot(maybe_sysroot: &Option<PathBuf>, backend_name: &str) -> M
|
|||||||
"failed to find a `codegen-backends` folder \
|
"failed to find a `codegen-backends` folder \
|
||||||
in the sysroot candidates:\n* {candidates}"
|
in the sysroot candidates:\n* {candidates}"
|
||||||
);
|
);
|
||||||
early_error(ErrorOutputType::default(), err);
|
handler.early_error(err);
|
||||||
});
|
});
|
||||||
info!("probing {} for a codegen backend", sysroot.display());
|
info!("probing {} for a codegen backend", sysroot.display());
|
||||||
|
|
||||||
@ -332,7 +345,7 @@ fn get_codegen_sysroot(maybe_sysroot: &Option<PathBuf>, backend_name: &str) -> M
|
|||||||
sysroot.display(),
|
sysroot.display(),
|
||||||
e
|
e
|
||||||
);
|
);
|
||||||
early_error(ErrorOutputType::default(), err);
|
handler.early_error(err);
|
||||||
});
|
});
|
||||||
|
|
||||||
let mut file: Option<PathBuf> = None;
|
let mut file: Option<PathBuf> = None;
|
||||||
@ -360,16 +373,16 @@ fn get_codegen_sysroot(maybe_sysroot: &Option<PathBuf>, backend_name: &str) -> M
|
|||||||
prev.display(),
|
prev.display(),
|
||||||
path.display()
|
path.display()
|
||||||
);
|
);
|
||||||
early_error(ErrorOutputType::default(), err);
|
handler.early_error(err);
|
||||||
}
|
}
|
||||||
file = Some(path.clone());
|
file = Some(path.clone());
|
||||||
}
|
}
|
||||||
|
|
||||||
match file {
|
match file {
|
||||||
Some(ref s) => load_backend_from_dylib(s),
|
Some(ref s) => load_backend_from_dylib(handler, s),
|
||||||
None => {
|
None => {
|
||||||
let err = format!("unsupported builtin codegen backend `{backend_name}`");
|
let err = format!("unsupported builtin codegen backend `{backend_name}`");
|
||||||
early_error(ErrorOutputType::default(), err);
|
handler.early_error(err);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1593,33 +1593,25 @@ declare_lint_pass!(
|
|||||||
impl<'tcx> LateLintPass<'tcx> for TrivialConstraints {
|
impl<'tcx> LateLintPass<'tcx> for TrivialConstraints {
|
||||||
fn check_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx hir::Item<'tcx>) {
|
fn check_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx hir::Item<'tcx>) {
|
||||||
use rustc_middle::ty::ClauseKind;
|
use rustc_middle::ty::ClauseKind;
|
||||||
use rustc_middle::ty::PredicateKind::*;
|
|
||||||
|
|
||||||
if cx.tcx.features().trivial_bounds {
|
if cx.tcx.features().trivial_bounds {
|
||||||
let predicates = cx.tcx.predicates_of(item.owner_id);
|
let predicates = cx.tcx.predicates_of(item.owner_id);
|
||||||
for &(predicate, span) in predicates.predicates {
|
for &(predicate, span) in predicates.predicates {
|
||||||
let predicate_kind_name = match predicate.kind().skip_binder() {
|
let predicate_kind_name = match predicate.kind().skip_binder() {
|
||||||
Clause(ClauseKind::Trait(..)) => "trait",
|
ClauseKind::Trait(..) => "trait",
|
||||||
Clause(ClauseKind::TypeOutlives(..)) |
|
ClauseKind::TypeOutlives(..) |
|
||||||
Clause(ClauseKind::RegionOutlives(..)) => "lifetime",
|
ClauseKind::RegionOutlives(..) => "lifetime",
|
||||||
|
|
||||||
// `ConstArgHasType` is never global as `ct` is always a param
|
// `ConstArgHasType` is never global as `ct` is always a param
|
||||||
Clause(ClauseKind::ConstArgHasType(..)) |
|
ClauseKind::ConstArgHasType(..)
|
||||||
// Ignore projections, as they can only be global
|
// Ignore projections, as they can only be global
|
||||||
// if the trait bound is global
|
// if the trait bound is global
|
||||||
Clause(ClauseKind::Projection(..)) |
|
| ClauseKind::Projection(..)
|
||||||
// Ignore bounds that a user can't type
|
// Ignore bounds that a user can't type
|
||||||
Clause(ClauseKind::WellFormed(..)) |
|
| ClauseKind::WellFormed(..)
|
||||||
// FIXME(generic_const_exprs): `ConstEvaluatable` can be written
|
// FIXME(generic_const_exprs): `ConstEvaluatable` can be written
|
||||||
Clause(ClauseKind::ConstEvaluatable(..)) |
|
| ClauseKind::ConstEvaluatable(..)
|
||||||
AliasRelate(..) |
|
| ClauseKind::TypeWellFormedFromEnv(_) => continue,
|
||||||
ObjectSafe(..) |
|
|
||||||
ClosureKind(..) |
|
|
||||||
Subtype(..) |
|
|
||||||
Coerce(..) |
|
|
||||||
ConstEquate(..) |
|
|
||||||
Ambiguous |
|
|
||||||
TypeWellFormedFromEnv(..) => continue,
|
|
||||||
};
|
};
|
||||||
if predicate.is_global() {
|
if predicate.is_global() {
|
||||||
cx.emit_spanned_lint(
|
cx.emit_spanned_lint(
|
||||||
|
@ -10,7 +10,7 @@ use rustc_errors::{
|
|||||||
use rustc_hir::def_id::DefId;
|
use rustc_hir::def_id::DefId;
|
||||||
use rustc_macros::{LintDiagnostic, Subdiagnostic};
|
use rustc_macros::{LintDiagnostic, Subdiagnostic};
|
||||||
use rustc_middle::ty::{
|
use rustc_middle::ty::{
|
||||||
inhabitedness::InhabitedPredicate, PolyExistentialTraitRef, Predicate, Ty, TyCtxt,
|
inhabitedness::InhabitedPredicate, Clause, PolyExistentialTraitRef, Ty, TyCtxt,
|
||||||
};
|
};
|
||||||
use rustc_session::parse::ParseSess;
|
use rustc_session::parse::ParseSess;
|
||||||
use rustc_span::{edition::Edition, sym, symbol::Ident, Span, Symbol};
|
use rustc_span::{edition::Edition, sym, symbol::Ident, Span, Symbol};
|
||||||
@ -352,7 +352,7 @@ impl AddToDiagnostic for BuiltinTypeAliasGenericBoundsSuggestion {
|
|||||||
#[diag(lint_builtin_trivial_bounds)]
|
#[diag(lint_builtin_trivial_bounds)]
|
||||||
pub struct BuiltinTrivialBounds<'a> {
|
pub struct BuiltinTrivialBounds<'a> {
|
||||||
pub predicate_kind_name: &'a str,
|
pub predicate_kind_name: &'a str,
|
||||||
pub predicate: Predicate<'a>,
|
pub predicate: Clause<'a>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(LintDiagnostic)]
|
#[derive(LintDiagnostic)]
|
||||||
@ -1262,7 +1262,7 @@ pub struct RedundantSemicolonsDiag {
|
|||||||
|
|
||||||
// traits.rs
|
// traits.rs
|
||||||
pub struct DropTraitConstraintsDiag<'a> {
|
pub struct DropTraitConstraintsDiag<'a> {
|
||||||
pub predicate: Predicate<'a>,
|
pub predicate: Clause<'a>,
|
||||||
pub tcx: TyCtxt<'a>,
|
pub tcx: TyCtxt<'a>,
|
||||||
pub def_id: DefId,
|
pub def_id: DefId,
|
||||||
}
|
}
|
||||||
|
@ -45,7 +45,7 @@ impl<'tcx> LateLintPass<'tcx> for MultipleSupertraitUpcastable {
|
|||||||
.super_predicates_of(def_id)
|
.super_predicates_of(def_id)
|
||||||
.predicates
|
.predicates
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.filter_map(|(pred, _)| pred.to_opt_poly_trait_pred());
|
.filter_map(|(pred, _)| pred.as_trait_clause());
|
||||||
if direct_super_traits_iter.count() > 1 {
|
if direct_super_traits_iter.count() > 1 {
|
||||||
cx.emit_spanned_lint(
|
cx.emit_spanned_lint(
|
||||||
MULTIPLE_SUPERTRAIT_UPCASTABLE,
|
MULTIPLE_SUPERTRAIT_UPCASTABLE,
|
||||||
|
@ -88,11 +88,10 @@ declare_lint_pass!(
|
|||||||
impl<'tcx> LateLintPass<'tcx> for DropTraitConstraints {
|
impl<'tcx> LateLintPass<'tcx> for DropTraitConstraints {
|
||||||
fn check_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx hir::Item<'tcx>) {
|
fn check_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx hir::Item<'tcx>) {
|
||||||
use rustc_middle::ty::ClauseKind;
|
use rustc_middle::ty::ClauseKind;
|
||||||
use rustc_middle::ty::PredicateKind::*;
|
|
||||||
|
|
||||||
let predicates = cx.tcx.explicit_predicates_of(item.owner_id);
|
let predicates = cx.tcx.explicit_predicates_of(item.owner_id);
|
||||||
for &(predicate, span) in predicates.predicates {
|
for &(predicate, span) in predicates.predicates {
|
||||||
let Clause(ClauseKind::Trait(trait_predicate)) = predicate.kind().skip_binder() else {
|
let ClauseKind::Trait(trait_predicate) = predicate.kind().skip_binder() else {
|
||||||
continue
|
continue
|
||||||
};
|
};
|
||||||
let def_id = trait_predicate.trait_ref.def_id;
|
let def_id = trait_predicate.trait_ref.def_id;
|
||||||
|
@ -1,5 +1,7 @@
|
|||||||
#![deny(unused_must_use)]
|
#![deny(unused_must_use)]
|
||||||
|
|
||||||
|
use std::cell::RefCell;
|
||||||
|
|
||||||
use crate::diagnostics::diagnostic_builder::{DiagnosticDeriveBuilder, DiagnosticDeriveKind};
|
use crate::diagnostics::diagnostic_builder::{DiagnosticDeriveBuilder, DiagnosticDeriveKind};
|
||||||
use crate::diagnostics::error::{span_err, DiagnosticDeriveError};
|
use crate::diagnostics::error::{span_err, DiagnosticDeriveError};
|
||||||
use crate::diagnostics::utils::SetOnce;
|
use crate::diagnostics::utils::SetOnce;
|
||||||
@ -28,6 +30,7 @@ impl<'a> DiagnosticDerive<'a> {
|
|||||||
pub(crate) fn into_tokens(self) -> TokenStream {
|
pub(crate) fn into_tokens(self) -> TokenStream {
|
||||||
let DiagnosticDerive { mut structure, mut builder } = self;
|
let DiagnosticDerive { mut structure, mut builder } = self;
|
||||||
|
|
||||||
|
let slugs = RefCell::new(Vec::new());
|
||||||
let implementation = builder.each_variant(&mut structure, |mut builder, variant| {
|
let implementation = builder.each_variant(&mut structure, |mut builder, variant| {
|
||||||
let preamble = builder.preamble(variant);
|
let preamble = builder.preamble(variant);
|
||||||
let body = builder.body(variant);
|
let body = builder.body(variant);
|
||||||
@ -56,6 +59,7 @@ impl<'a> DiagnosticDerive<'a> {
|
|||||||
return DiagnosticDeriveError::ErrorHandled.to_compile_error();
|
return DiagnosticDeriveError::ErrorHandled.to_compile_error();
|
||||||
}
|
}
|
||||||
Some(slug) => {
|
Some(slug) => {
|
||||||
|
slugs.borrow_mut().push(slug.clone());
|
||||||
quote! {
|
quote! {
|
||||||
let mut #diag = #handler.struct_diagnostic(crate::fluent_generated::#slug);
|
let mut #diag = #handler.struct_diagnostic(crate::fluent_generated::#slug);
|
||||||
}
|
}
|
||||||
@ -73,7 +77,8 @@ impl<'a> DiagnosticDerive<'a> {
|
|||||||
});
|
});
|
||||||
|
|
||||||
let DiagnosticDeriveKind::Diagnostic { handler } = &builder.kind else { unreachable!() };
|
let DiagnosticDeriveKind::Diagnostic { handler } = &builder.kind else { unreachable!() };
|
||||||
structure.gen_impl(quote! {
|
|
||||||
|
let mut imp = structure.gen_impl(quote! {
|
||||||
gen impl<'__diagnostic_handler_sess, G>
|
gen impl<'__diagnostic_handler_sess, G>
|
||||||
rustc_errors::IntoDiagnostic<'__diagnostic_handler_sess, G>
|
rustc_errors::IntoDiagnostic<'__diagnostic_handler_sess, G>
|
||||||
for @Self
|
for @Self
|
||||||
@ -89,7 +94,11 @@ impl<'a> DiagnosticDerive<'a> {
|
|||||||
#implementation
|
#implementation
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
})
|
});
|
||||||
|
for test in slugs.borrow().iter().map(|s| generate_test(s, &structure)) {
|
||||||
|
imp.extend(test);
|
||||||
|
}
|
||||||
|
imp
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -124,6 +133,7 @@ impl<'a> LintDiagnosticDerive<'a> {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
let slugs = RefCell::new(Vec::new());
|
||||||
let msg = builder.each_variant(&mut structure, |mut builder, variant| {
|
let msg = builder.each_variant(&mut structure, |mut builder, variant| {
|
||||||
// Collect the slug by generating the preamble.
|
// Collect the slug by generating the preamble.
|
||||||
let _ = builder.preamble(variant);
|
let _ = builder.preamble(variant);
|
||||||
@ -148,6 +158,7 @@ impl<'a> LintDiagnosticDerive<'a> {
|
|||||||
DiagnosticDeriveError::ErrorHandled.to_compile_error()
|
DiagnosticDeriveError::ErrorHandled.to_compile_error()
|
||||||
}
|
}
|
||||||
Some(slug) => {
|
Some(slug) => {
|
||||||
|
slugs.borrow_mut().push(slug.clone());
|
||||||
quote! {
|
quote! {
|
||||||
crate::fluent_generated::#slug.into()
|
crate::fluent_generated::#slug.into()
|
||||||
}
|
}
|
||||||
@ -156,7 +167,7 @@ impl<'a> LintDiagnosticDerive<'a> {
|
|||||||
});
|
});
|
||||||
|
|
||||||
let diag = &builder.diag;
|
let diag = &builder.diag;
|
||||||
structure.gen_impl(quote! {
|
let mut imp = structure.gen_impl(quote! {
|
||||||
gen impl<'__a> rustc_errors::DecorateLint<'__a, ()> for @Self {
|
gen impl<'__a> rustc_errors::DecorateLint<'__a, ()> for @Self {
|
||||||
#[track_caller]
|
#[track_caller]
|
||||||
fn decorate_lint<'__b>(
|
fn decorate_lint<'__b>(
|
||||||
@ -171,7 +182,12 @@ impl<'a> LintDiagnosticDerive<'a> {
|
|||||||
#msg
|
#msg
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
})
|
});
|
||||||
|
for test in slugs.borrow().iter().map(|s| generate_test(s, &structure)) {
|
||||||
|
imp.extend(test);
|
||||||
|
}
|
||||||
|
|
||||||
|
imp
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -198,3 +214,40 @@ impl Mismatch {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Generates a `#[test]` that verifies that all referenced variables
|
||||||
|
/// exist on this structure.
|
||||||
|
fn generate_test(slug: &syn::Path, structure: &Structure<'_>) -> TokenStream {
|
||||||
|
// FIXME: We can't identify variables in a subdiagnostic
|
||||||
|
for field in structure.variants().iter().flat_map(|v| v.ast().fields.iter()) {
|
||||||
|
for attr_name in field.attrs.iter().filter_map(|at| at.path().get_ident()) {
|
||||||
|
if attr_name == "subdiagnostic" {
|
||||||
|
return quote!();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
use std::sync::atomic::{AtomicUsize, Ordering};
|
||||||
|
// We need to make sure that the same diagnostic slug can be used multiple times without causing an
|
||||||
|
// error, so just have a global counter here.
|
||||||
|
static COUNTER: AtomicUsize = AtomicUsize::new(0);
|
||||||
|
let slug = slug.get_ident().unwrap();
|
||||||
|
let ident = quote::format_ident!("verify_{slug}_{}", COUNTER.fetch_add(1, Ordering::Relaxed));
|
||||||
|
let ref_slug = quote::format_ident!("{slug}_refs");
|
||||||
|
let struct_name = &structure.ast().ident;
|
||||||
|
let variables: Vec<_> = structure
|
||||||
|
.variants()
|
||||||
|
.iter()
|
||||||
|
.flat_map(|v| v.ast().fields.iter().filter_map(|f| f.ident.as_ref().map(|i| i.to_string())))
|
||||||
|
.collect();
|
||||||
|
// tidy errors on `#[test]` outside of test files, so we use `#[test ]` to work around this
|
||||||
|
quote! {
|
||||||
|
#[cfg(test)]
|
||||||
|
#[test ]
|
||||||
|
fn #ident() {
|
||||||
|
let variables = [#(#variables),*];
|
||||||
|
for vref in crate::fluent_generated::#ref_slug {
|
||||||
|
assert!(variables.contains(vref), "{}: variable `{vref}` not found ({})", stringify!(#struct_name), stringify!(#slug));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@ -636,12 +636,6 @@ impl<'a, 'tcx> Decodable<DecodeContext<'a, 'tcx>> for Symbol {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, 'tcx> Decodable<DecodeContext<'a, 'tcx>> for &'tcx [(ty::Predicate<'tcx>, Span)] {
|
|
||||||
fn decode(d: &mut DecodeContext<'a, 'tcx>) -> Self {
|
|
||||||
ty::codec::RefDecodable::decode(d)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a, 'tcx> Decodable<DecodeContext<'a, 'tcx>> for &'tcx [(ty::Clause<'tcx>, Span)] {
|
impl<'a, 'tcx> Decodable<DecodeContext<'a, 'tcx>> for &'tcx [(ty::Clause<'tcx>, Span)] {
|
||||||
fn decode(d: &mut DecodeContext<'a, 'tcx>) -> Self {
|
fn decode(d: &mut DecodeContext<'a, 'tcx>) -> Self {
|
||||||
ty::codec::RefDecodable::decode(d)
|
ty::codec::RefDecodable::decode(d)
|
||||||
|
@ -83,9 +83,9 @@ macro_rules! arena_types {
|
|||||||
rustc_middle::infer::canonical::Canonical<'tcx,
|
rustc_middle::infer::canonical::Canonical<'tcx,
|
||||||
rustc_middle::infer::canonical::QueryResponse<'tcx, rustc_middle::ty::FnSig<'tcx>>
|
rustc_middle::infer::canonical::QueryResponse<'tcx, rustc_middle::ty::FnSig<'tcx>>
|
||||||
>,
|
>,
|
||||||
[] type_op_normalize_predicate:
|
[] type_op_normalize_clause:
|
||||||
rustc_middle::infer::canonical::Canonical<'tcx,
|
rustc_middle::infer::canonical::Canonical<'tcx,
|
||||||
rustc_middle::infer::canonical::QueryResponse<'tcx, rustc_middle::ty::Predicate<'tcx>>
|
rustc_middle::infer::canonical::QueryResponse<'tcx, rustc_middle::ty::Clause<'tcx>>
|
||||||
>,
|
>,
|
||||||
[] type_op_normalize_ty:
|
[] type_op_normalize_ty:
|
||||||
rustc_middle::infer::canonical::Canonical<'tcx,
|
rustc_middle::infer::canonical::Canonical<'tcx,
|
||||||
|
@ -388,10 +388,11 @@ pub fn struct_lint_level(
|
|||||||
// it'll become a hard error, so we have to emit *something*. Also,
|
// it'll become a hard error, so we have to emit *something*. Also,
|
||||||
// if this lint occurs in the expansion of a macro from an external crate,
|
// if this lint occurs in the expansion of a macro from an external crate,
|
||||||
// allow individual lints to opt-out from being reported.
|
// allow individual lints to opt-out from being reported.
|
||||||
let not_future_incompatible =
|
let incompatible = future_incompatible.is_some_and(|f| f.reason.edition().is_none());
|
||||||
future_incompatible.map(|f| f.reason.edition().is_some()).unwrap_or(true);
|
|
||||||
if not_future_incompatible && !lint.report_in_external_macro {
|
if !incompatible && !lint.report_in_external_macro {
|
||||||
err.cancel();
|
err.cancel();
|
||||||
|
|
||||||
// Don't continue further, since we don't want to have
|
// Don't continue further, since we don't want to have
|
||||||
// `diag_span_note_once` called for a diagnostic that isn't emitted.
|
// `diag_span_note_once` called for a diagnostic that isn't emitted.
|
||||||
return;
|
return;
|
||||||
|
@ -272,7 +272,8 @@ impl<'tcx> Debug for TerminatorKind<'tcx> {
|
|||||||
|
|
||||||
let unwind = match self.unwind() {
|
let unwind = match self.unwind() {
|
||||||
// Not needed or included in successors
|
// Not needed or included in successors
|
||||||
None | Some(UnwindAction::Continue) | Some(UnwindAction::Cleanup(_)) => None,
|
None | Some(UnwindAction::Cleanup(_)) => None,
|
||||||
|
Some(UnwindAction::Continue) => Some("unwind continue"),
|
||||||
Some(UnwindAction::Unreachable) => Some("unwind unreachable"),
|
Some(UnwindAction::Unreachable) => Some("unwind unreachable"),
|
||||||
Some(UnwindAction::Terminate) => Some("unwind terminate"),
|
Some(UnwindAction::Terminate) => Some("unwind terminate"),
|
||||||
};
|
};
|
||||||
|
@ -1133,13 +1133,12 @@ macro_rules! visit_place_fns {
|
|||||||
|
|
||||||
fn visit_projection_elem(
|
fn visit_projection_elem(
|
||||||
&mut self,
|
&mut self,
|
||||||
local: Local,
|
place_ref: PlaceRef<'tcx>,
|
||||||
proj_base: &[PlaceElem<'tcx>],
|
|
||||||
elem: PlaceElem<'tcx>,
|
elem: PlaceElem<'tcx>,
|
||||||
context: PlaceContext,
|
context: PlaceContext,
|
||||||
location: Location,
|
location: Location,
|
||||||
) {
|
) {
|
||||||
self.super_projection_elem(local, proj_base, elem, context, location);
|
self.super_projection_elem(place_ref, elem, context, location);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn super_place(&mut self, place: &Place<'tcx>, context: PlaceContext, location: Location) {
|
fn super_place(&mut self, place: &Place<'tcx>, context: PlaceContext, location: Location) {
|
||||||
@ -1168,15 +1167,13 @@ macro_rules! visit_place_fns {
|
|||||||
location: Location,
|
location: Location,
|
||||||
) {
|
) {
|
||||||
for (base, elem) in place_ref.iter_projections().rev() {
|
for (base, elem) in place_ref.iter_projections().rev() {
|
||||||
let base_proj = base.projection;
|
self.visit_projection_elem(base, elem, context, location);
|
||||||
self.visit_projection_elem(place_ref.local, base_proj, elem, context, location);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn super_projection_elem(
|
fn super_projection_elem(
|
||||||
&mut self,
|
&mut self,
|
||||||
_local: Local,
|
_place_ref: PlaceRef<'tcx>,
|
||||||
_proj_base: &[PlaceElem<'tcx>],
|
|
||||||
elem: PlaceElem<'tcx>,
|
elem: PlaceElem<'tcx>,
|
||||||
_context: PlaceContext,
|
_context: PlaceContext,
|
||||||
location: Location,
|
location: Location,
|
||||||
|
@ -420,7 +420,7 @@ impl<'tcx> Key for (Ty<'tcx>, Ty<'tcx>) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'tcx> Key for &'tcx ty::List<ty::Predicate<'tcx>> {
|
impl<'tcx> Key for &'tcx ty::List<ty::Clause<'tcx>> {
|
||||||
type CacheSelector = DefaultCacheSelector<Self>;
|
type CacheSelector = DefaultCacheSelector<Self>;
|
||||||
|
|
||||||
fn default_span(&self, _: TyCtxt<'_>) -> Span {
|
fn default_span(&self, _: TyCtxt<'_>) -> Span {
|
||||||
|
@ -2031,10 +2031,10 @@ rustc_queries! {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Do not call this query directly: part of the `Normalize` type-op
|
/// Do not call this query directly: part of the `Normalize` type-op
|
||||||
query type_op_normalize_predicate(
|
query type_op_normalize_clause(
|
||||||
goal: CanonicalTypeOpNormalizeGoal<'tcx, ty::Predicate<'tcx>>
|
goal: CanonicalTypeOpNormalizeGoal<'tcx, ty::Clause<'tcx>>
|
||||||
) -> Result<
|
) -> Result<
|
||||||
&'tcx Canonical<'tcx, canonical::QueryResponse<'tcx, ty::Predicate<'tcx>>>,
|
&'tcx Canonical<'tcx, canonical::QueryResponse<'tcx, ty::Clause<'tcx>>>,
|
||||||
NoSolution,
|
NoSolution,
|
||||||
> {
|
> {
|
||||||
desc { "normalizing `{:?}`", goal.value.value.value }
|
desc { "normalizing `{:?}`", goal.value.value.value }
|
||||||
@ -2125,7 +2125,7 @@ rustc_queries! {
|
|||||||
desc { "resolving instance `{}`", ty::Instance::new(key.value.0, key.value.1) }
|
desc { "resolving instance `{}`", ty::Instance::new(key.value.0, key.value.1) }
|
||||||
}
|
}
|
||||||
|
|
||||||
query reveal_opaque_types_in_bounds(key: &'tcx ty::List<ty::Predicate<'tcx>>) -> &'tcx ty::List<ty::Predicate<'tcx>> {
|
query reveal_opaque_types_in_bounds(key: &'tcx ty::List<ty::Clause<'tcx>>) -> &'tcx ty::List<ty::Clause<'tcx>> {
|
||||||
desc { "revealing opaque types in `{:?}`", key }
|
desc { "revealing opaque types in `{:?}`", key }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -791,13 +791,6 @@ impl<'a, 'tcx> Decodable<CacheDecoder<'a, 'tcx>>
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, 'tcx> Decodable<CacheDecoder<'a, 'tcx>> for &'tcx [(ty::Predicate<'tcx>, Span)] {
|
|
||||||
#[inline]
|
|
||||||
fn decode(d: &mut CacheDecoder<'a, 'tcx>) -> Self {
|
|
||||||
RefDecodable::decode(d)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a, 'tcx> Decodable<CacheDecoder<'a, 'tcx>> for &'tcx [(ty::Clause<'tcx>, Span)] {
|
impl<'a, 'tcx> Decodable<CacheDecoder<'a, 'tcx>> for &'tcx [(ty::Clause<'tcx>, Span)] {
|
||||||
#[inline]
|
#[inline]
|
||||||
fn decode(d: &mut CacheDecoder<'a, 'tcx>) -> Self {
|
fn decode(d: &mut CacheDecoder<'a, 'tcx>) -> Self {
|
||||||
|
@ -410,6 +410,10 @@ pub enum ExprKind<'tcx> {
|
|||||||
Return {
|
Return {
|
||||||
value: Option<ExprId>,
|
value: Option<ExprId>,
|
||||||
},
|
},
|
||||||
|
/// A `become` expression.
|
||||||
|
Become {
|
||||||
|
value: ExprId,
|
||||||
|
},
|
||||||
/// An inline `const` block, e.g. `const {}`.
|
/// An inline `const` block, e.g. `const {}`.
|
||||||
ConstBlock {
|
ConstBlock {
|
||||||
did: DefId,
|
did: DefId,
|
||||||
|
@ -100,6 +100,7 @@ pub fn walk_expr<'a, 'tcx: 'a, V: Visitor<'a, 'tcx>>(visitor: &mut V, expr: &Exp
|
|||||||
visitor.visit_expr(&visitor.thir()[value])
|
visitor.visit_expr(&visitor.thir()[value])
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
Become { value } => visitor.visit_expr(&visitor.thir()[value]),
|
||||||
ConstBlock { did: _, substs: _ } => {}
|
ConstBlock { did: _, substs: _ } => {}
|
||||||
Repeat { value, count: _ } => {
|
Repeat { value, count: _ } => {
|
||||||
visitor.visit_expr(&visitor.thir()[value]);
|
visitor.visit_expr(&visitor.thir()[value]);
|
||||||
|
@ -385,7 +385,7 @@ impl<'tcx> chalk_ir::interner::HasInterner for RustInterner<'tcx> {
|
|||||||
/// A chalk environment and goal.
|
/// A chalk environment and goal.
|
||||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, HashStable, TypeFoldable, TypeVisitable)]
|
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, HashStable, TypeFoldable, TypeVisitable)]
|
||||||
pub struct ChalkEnvironmentAndGoal<'tcx> {
|
pub struct ChalkEnvironmentAndGoal<'tcx> {
|
||||||
pub environment: &'tcx ty::List<ty::Predicate<'tcx>>,
|
pub environment: &'tcx ty::List<ty::Clause<'tcx>>,
|
||||||
pub goal: ty::Predicate<'tcx>,
|
pub goal: ty::Predicate<'tcx>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -25,9 +25,7 @@ impl<'tcx> Elaborator<'tcx> {
|
|||||||
.super_predicates_of(trait_ref.def_id())
|
.super_predicates_of(trait_ref.def_id())
|
||||||
.predicates
|
.predicates
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.flat_map(|(pred, _)| {
|
.flat_map(|(pred, _)| pred.subst_supertrait(self.tcx, &trait_ref).as_trait_clause())
|
||||||
pred.subst_supertrait(self.tcx, &trait_ref).to_opt_poly_trait_pred()
|
|
||||||
})
|
|
||||||
.map(|t| t.map_bound(|pred| pred.trait_ref))
|
.map(|t| t.map_bound(|pred| pred.trait_ref))
|
||||||
.filter(|supertrait_ref| self.visited.insert(*supertrait_ref));
|
.filter(|supertrait_ref| self.visited.insert(*supertrait_ref));
|
||||||
|
|
||||||
|
@ -368,16 +368,6 @@ impl<'tcx, D: TyDecoder<I = TyCtxt<'tcx>>> Decodable<D> for AdtDef<'tcx> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'tcx, D: TyDecoder<I = TyCtxt<'tcx>>> RefDecodable<'tcx, D>
|
|
||||||
for [(ty::Predicate<'tcx>, Span)]
|
|
||||||
{
|
|
||||||
fn decode(decoder: &mut D) -> &'tcx Self {
|
|
||||||
decoder.interner().arena.alloc_from_iter(
|
|
||||||
(0..decoder.read_usize()).map(|_| Decodable::decode(decoder)).collect::<Vec<_>>(),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'tcx, D: TyDecoder<I = TyCtxt<'tcx>>> RefDecodable<'tcx, D> for [(ty::Clause<'tcx>, Span)] {
|
impl<'tcx, D: TyDecoder<I = TyCtxt<'tcx>>> RefDecodable<'tcx, D> for [(ty::Clause<'tcx>, Span)] {
|
||||||
fn decode(decoder: &mut D) -> &'tcx Self {
|
fn decode(decoder: &mut D) -> &'tcx Self {
|
||||||
decoder.interner().arena.alloc_from_iter(
|
decoder.interner().arena.alloc_from_iter(
|
||||||
@ -406,11 +396,11 @@ impl<'tcx, D: TyDecoder<I = TyCtxt<'tcx>>> RefDecodable<'tcx, D> for ty::List<ty
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'tcx, D: TyDecoder<I = TyCtxt<'tcx>>> RefDecodable<'tcx, D> for ty::List<ty::Predicate<'tcx>> {
|
impl<'tcx, D: TyDecoder<I = TyCtxt<'tcx>>> RefDecodable<'tcx, D> for ty::List<ty::Clause<'tcx>> {
|
||||||
fn decode(decoder: &mut D) -> &'tcx Self {
|
fn decode(decoder: &mut D) -> &'tcx Self {
|
||||||
let len = decoder.read_usize();
|
let len = decoder.read_usize();
|
||||||
decoder.interner().mk_predicates_from_iter(
|
decoder.interner().mk_clauses_from_iter(
|
||||||
(0..len).map::<ty::Predicate<'tcx>, _>(|_| Decodable::decode(decoder)),
|
(0..len).map::<ty::Clause<'tcx>, _>(|_| Decodable::decode(decoder)),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -434,7 +424,7 @@ impl_decodable_via_ref! {
|
|||||||
&'tcx mir::BorrowCheckResult<'tcx>,
|
&'tcx mir::BorrowCheckResult<'tcx>,
|
||||||
&'tcx mir::coverage::CodeRegion,
|
&'tcx mir::coverage::CodeRegion,
|
||||||
&'tcx ty::List<ty::BoundVariableKind>,
|
&'tcx ty::List<ty::BoundVariableKind>,
|
||||||
&'tcx ty::List<ty::Predicate<'tcx>>,
|
&'tcx ty::List<ty::Clause<'tcx>>,
|
||||||
&'tcx ty::List<FieldIdx>,
|
&'tcx ty::List<FieldIdx>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -141,8 +141,6 @@ pub struct CtxtInterners<'tcx> {
|
|||||||
region: InternedSet<'tcx, RegionKind<'tcx>>,
|
region: InternedSet<'tcx, RegionKind<'tcx>>,
|
||||||
poly_existential_predicates: InternedSet<'tcx, List<PolyExistentialPredicate<'tcx>>>,
|
poly_existential_predicates: InternedSet<'tcx, List<PolyExistentialPredicate<'tcx>>>,
|
||||||
predicate: InternedSet<'tcx, WithCachedTypeInfo<ty::Binder<'tcx, PredicateKind<'tcx>>>>,
|
predicate: InternedSet<'tcx, WithCachedTypeInfo<ty::Binder<'tcx, PredicateKind<'tcx>>>>,
|
||||||
// FIXME(clause): remove this when all usages are moved to predicate
|
|
||||||
predicates: InternedSet<'tcx, List<Predicate<'tcx>>>,
|
|
||||||
clauses: InternedSet<'tcx, List<Clause<'tcx>>>,
|
clauses: InternedSet<'tcx, List<Clause<'tcx>>>,
|
||||||
projs: InternedSet<'tcx, List<ProjectionKind>>,
|
projs: InternedSet<'tcx, List<ProjectionKind>>,
|
||||||
place_elems: InternedSet<'tcx, List<PlaceElem<'tcx>>>,
|
place_elems: InternedSet<'tcx, List<PlaceElem<'tcx>>>,
|
||||||
@ -168,7 +166,6 @@ impl<'tcx> CtxtInterners<'tcx> {
|
|||||||
poly_existential_predicates: Default::default(),
|
poly_existential_predicates: Default::default(),
|
||||||
canonical_var_infos: Default::default(),
|
canonical_var_infos: Default::default(),
|
||||||
predicate: Default::default(),
|
predicate: Default::default(),
|
||||||
predicates: Default::default(),
|
|
||||||
clauses: Default::default(),
|
clauses: Default::default(),
|
||||||
projs: Default::default(),
|
projs: Default::default(),
|
||||||
place_elems: Default::default(),
|
place_elems: Default::default(),
|
||||||
@ -1260,10 +1257,11 @@ nop_lift! {region; Region<'a> => Region<'tcx>}
|
|||||||
nop_lift! {const_; Const<'a> => Const<'tcx>}
|
nop_lift! {const_; Const<'a> => Const<'tcx>}
|
||||||
nop_lift! {const_allocation; ConstAllocation<'a> => ConstAllocation<'tcx>}
|
nop_lift! {const_allocation; ConstAllocation<'a> => ConstAllocation<'tcx>}
|
||||||
nop_lift! {predicate; Predicate<'a> => Predicate<'tcx>}
|
nop_lift! {predicate; Predicate<'a> => Predicate<'tcx>}
|
||||||
|
nop_lift! {predicate; Clause<'a> => Clause<'tcx>}
|
||||||
|
|
||||||
nop_list_lift! {type_lists; Ty<'a> => Ty<'tcx>}
|
nop_list_lift! {type_lists; Ty<'a> => Ty<'tcx>}
|
||||||
nop_list_lift! {poly_existential_predicates; PolyExistentialPredicate<'a> => PolyExistentialPredicate<'tcx>}
|
nop_list_lift! {poly_existential_predicates; PolyExistentialPredicate<'a> => PolyExistentialPredicate<'tcx>}
|
||||||
nop_list_lift! {predicates; Predicate<'a> => Predicate<'tcx>}
|
nop_list_lift! {clauses; Clause<'a> => Clause<'tcx>}
|
||||||
nop_list_lift! {canonical_var_infos; CanonicalVarInfo<'a> => CanonicalVarInfo<'tcx>}
|
nop_list_lift! {canonical_var_infos; CanonicalVarInfo<'a> => CanonicalVarInfo<'tcx>}
|
||||||
nop_list_lift! {projs; ProjectionKind => ProjectionKind}
|
nop_list_lift! {projs; ProjectionKind => ProjectionKind}
|
||||||
nop_list_lift! {bound_variable_kinds; ty::BoundVariableKind => ty::BoundVariableKind}
|
nop_list_lift! {bound_variable_kinds; ty::BoundVariableKind => ty::BoundVariableKind}
|
||||||
@ -1541,7 +1539,6 @@ slice_interners!(
|
|||||||
type_lists: pub mk_type_list(Ty<'tcx>),
|
type_lists: pub mk_type_list(Ty<'tcx>),
|
||||||
canonical_var_infos: pub mk_canonical_var_infos(CanonicalVarInfo<'tcx>),
|
canonical_var_infos: pub mk_canonical_var_infos(CanonicalVarInfo<'tcx>),
|
||||||
poly_existential_predicates: intern_poly_existential_predicates(PolyExistentialPredicate<'tcx>),
|
poly_existential_predicates: intern_poly_existential_predicates(PolyExistentialPredicate<'tcx>),
|
||||||
predicates: intern_predicates(Predicate<'tcx>),
|
|
||||||
clauses: intern_clauses(Clause<'tcx>),
|
clauses: intern_clauses(Clause<'tcx>),
|
||||||
projs: pub mk_projs(ProjectionKind),
|
projs: pub mk_projs(ProjectionKind),
|
||||||
place_elems: pub mk_place_elems(PlaceElem<'tcx>),
|
place_elems: pub mk_place_elems(PlaceElem<'tcx>),
|
||||||
@ -1597,9 +1594,7 @@ impl<'tcx> TyCtxt<'tcx> {
|
|||||||
let generic_predicates = self.super_predicates_of(trait_did);
|
let generic_predicates = self.super_predicates_of(trait_did);
|
||||||
|
|
||||||
for (predicate, _) in generic_predicates.predicates {
|
for (predicate, _) in generic_predicates.predicates {
|
||||||
if let ty::PredicateKind::Clause(ty::ClauseKind::Trait(data)) =
|
if let ty::ClauseKind::Trait(data) = predicate.kind().skip_binder() {
|
||||||
predicate.kind().skip_binder()
|
|
||||||
{
|
|
||||||
if set.insert(data.def_id()) {
|
if set.insert(data.def_id()) {
|
||||||
stack.push(data.def_id());
|
stack.push(data.def_id());
|
||||||
}
|
}
|
||||||
@ -2087,18 +2082,11 @@ impl<'tcx> TyCtxt<'tcx> {
|
|||||||
self.intern_poly_existential_predicates(eps)
|
self.intern_poly_existential_predicates(eps)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn mk_predicates(self, preds: &[Predicate<'tcx>]) -> &'tcx List<Predicate<'tcx>> {
|
pub fn mk_clauses(self, clauses: &[Clause<'tcx>]) -> &'tcx List<Clause<'tcx>> {
|
||||||
// FIXME consider asking the input slice to be sorted to avoid
|
// FIXME consider asking the input slice to be sorted to avoid
|
||||||
// re-interning permutations, in which case that would be asserted
|
// re-interning permutations, in which case that would be asserted
|
||||||
// here.
|
// here.
|
||||||
self.intern_predicates(preds)
|
self.intern_clauses(clauses)
|
||||||
}
|
|
||||||
|
|
||||||
pub fn mk_clauses(self, preds: &[Clause<'tcx>]) -> &'tcx List<Clause<'tcx>> {
|
|
||||||
// FIXME consider asking the input slice to be sorted to avoid
|
|
||||||
// re-interning permutations, in which case that would be asserted
|
|
||||||
// here.
|
|
||||||
self.intern_clauses(preds)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn mk_const_list_from_iter<I, T>(self, iter: I) -> T::Output
|
pub fn mk_const_list_from_iter<I, T>(self, iter: I) -> T::Output
|
||||||
@ -2144,14 +2132,6 @@ impl<'tcx> TyCtxt<'tcx> {
|
|||||||
T::collect_and_apply(iter, |xs| self.mk_poly_existential_predicates(xs))
|
T::collect_and_apply(iter, |xs| self.mk_poly_existential_predicates(xs))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn mk_predicates_from_iter<I, T>(self, iter: I) -> T::Output
|
|
||||||
where
|
|
||||||
I: Iterator<Item = T>,
|
|
||||||
T: CollectAndApply<Predicate<'tcx>, &'tcx List<Predicate<'tcx>>>,
|
|
||||||
{
|
|
||||||
T::collect_and_apply(iter, |xs| self.mk_predicates(xs))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn mk_clauses_from_iter<I, T>(self, iter: I) -> T::Output
|
pub fn mk_clauses_from_iter<I, T>(self, iter: I) -> T::Output
|
||||||
where
|
where
|
||||||
I: Iterator<Item = T>,
|
I: Iterator<Item = T>,
|
||||||
|
@ -287,7 +287,7 @@ impl FlagComputation {
|
|||||||
self.add_const(expected);
|
self.add_const(expected);
|
||||||
self.add_const(found);
|
self.add_const(found);
|
||||||
}
|
}
|
||||||
ty::PredicateKind::TypeWellFormedFromEnv(ty) => {
|
ty::PredicateKind::Clause(ty::ClauseKind::TypeWellFormedFromEnv(ty)) => {
|
||||||
self.add_ty(ty);
|
self.add_ty(ty);
|
||||||
}
|
}
|
||||||
ty::PredicateKind::Ambiguous => {}
|
ty::PredicateKind::Ambiguous => {}
|
||||||
|
@ -6,7 +6,7 @@ use rustc_hir::def_id::DefId;
|
|||||||
use rustc_span::symbol::{kw, Symbol};
|
use rustc_span::symbol::{kw, Symbol};
|
||||||
use rustc_span::Span;
|
use rustc_span::Span;
|
||||||
|
|
||||||
use super::{EarlyBoundRegion, InstantiatedPredicates, ParamConst, ParamTy, Predicate, TyCtxt};
|
use super::{Clause, EarlyBoundRegion, InstantiatedPredicates, ParamConst, ParamTy, TyCtxt};
|
||||||
|
|
||||||
#[derive(Clone, Debug, TyEncodable, TyDecodable, HashStable)]
|
#[derive(Clone, Debug, TyEncodable, TyDecodable, HashStable)]
|
||||||
pub enum GenericParamDefKind {
|
pub enum GenericParamDefKind {
|
||||||
@ -323,7 +323,7 @@ impl<'tcx> Generics {
|
|||||||
#[derive(Copy, Clone, Default, Debug, TyEncodable, TyDecodable, HashStable)]
|
#[derive(Copy, Clone, Default, Debug, TyEncodable, TyDecodable, HashStable)]
|
||||||
pub struct GenericPredicates<'tcx> {
|
pub struct GenericPredicates<'tcx> {
|
||||||
pub parent: Option<DefId>,
|
pub parent: Option<DefId>,
|
||||||
pub predicates: &'tcx [(Predicate<'tcx>, Span)],
|
pub predicates: &'tcx [(Clause<'tcx>, Span)],
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'tcx> GenericPredicates<'tcx> {
|
impl<'tcx> GenericPredicates<'tcx> {
|
||||||
@ -341,8 +341,7 @@ impl<'tcx> GenericPredicates<'tcx> {
|
|||||||
&self,
|
&self,
|
||||||
tcx: TyCtxt<'tcx>,
|
tcx: TyCtxt<'tcx>,
|
||||||
substs: SubstsRef<'tcx>,
|
substs: SubstsRef<'tcx>,
|
||||||
) -> impl Iterator<Item = (Predicate<'tcx>, Span)> + DoubleEndedIterator + ExactSizeIterator
|
) -> impl Iterator<Item = (Clause<'tcx>, Span)> + DoubleEndedIterator + ExactSizeIterator {
|
||||||
{
|
|
||||||
EarlyBinder::bind(self.predicates).subst_iter_copied(tcx, substs)
|
EarlyBinder::bind(self.predicates).subst_iter_copied(tcx, substs)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -62,7 +62,18 @@ impl<'tcx> InhabitedPredicate<'tcx> {
|
|||||||
Some(1..) => Ok(false),
|
Some(1..) => Ok(false),
|
||||||
},
|
},
|
||||||
Self::NotInModule(id) => in_module(id).map(|in_mod| !in_mod),
|
Self::NotInModule(id) => in_module(id).map(|in_mod| !in_mod),
|
||||||
Self::GenericType(_) => Ok(true),
|
// `t` may be a projection, for which `inhabited_predicate` returns a `GenericType`. As
|
||||||
|
// we have a param_env available, we can do better.
|
||||||
|
Self::GenericType(t) => {
|
||||||
|
let normalized_pred = tcx
|
||||||
|
.try_normalize_erasing_regions(param_env, t)
|
||||||
|
.map_or(self, |t| t.inhabited_predicate(tcx));
|
||||||
|
match normalized_pred {
|
||||||
|
// We don't have more information than we started with, so consider inhabited.
|
||||||
|
Self::GenericType(_) => Ok(true),
|
||||||
|
pred => pred.apply_inner(tcx, param_env, in_module),
|
||||||
|
}
|
||||||
|
}
|
||||||
Self::And([a, b]) => try_and(a, b, |x| x.apply_inner(tcx, param_env, in_module)),
|
Self::And([a, b]) => try_and(a, b, |x| x.apply_inner(tcx, param_env, in_module)),
|
||||||
Self::Or([a, b]) => try_or(a, b, |x| x.apply_inner(tcx, param_env, in_module)),
|
Self::Or([a, b]) => try_or(a, b, |x| x.apply_inner(tcx, param_env, in_module)),
|
||||||
}
|
}
|
||||||
|
@ -555,7 +555,7 @@ impl<'tcx> Predicate<'tcx> {
|
|||||||
| PredicateKind::Clause(ClauseKind::ConstEvaluatable(_))
|
| PredicateKind::Clause(ClauseKind::ConstEvaluatable(_))
|
||||||
| PredicateKind::ConstEquate(_, _)
|
| PredicateKind::ConstEquate(_, _)
|
||||||
| PredicateKind::Ambiguous
|
| PredicateKind::Ambiguous
|
||||||
| PredicateKind::TypeWellFormedFromEnv(_) => true,
|
| PredicateKind::Clause(ClauseKind::TypeWellFormedFromEnv(_)) => true,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -566,6 +566,12 @@ impl rustc_errors::IntoDiagnosticArg for Predicate<'_> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl rustc_errors::IntoDiagnosticArg for Clause<'_> {
|
||||||
|
fn into_diagnostic_arg(self) -> rustc_errors::DiagnosticArgValue<'static> {
|
||||||
|
rustc_errors::DiagnosticArgValue::Str(std::borrow::Cow::Owned(self.to_string()))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// A subset of predicates which can be assumed by the trait solver. They show up in
|
/// A subset of predicates which can be assumed by the trait solver. They show up in
|
||||||
/// an item's where clauses, hence the name `Clause`, and may either be user-written
|
/// an item's where clauses, hence the name `Clause`, and may either be user-written
|
||||||
/// (such as traits) or may be inserted during lowering.
|
/// (such as traits) or may be inserted during lowering.
|
||||||
@ -620,6 +626,10 @@ impl<'tcx> Clause<'tcx> {
|
|||||||
None
|
None
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn without_const(self, tcx: TyCtxt<'tcx>) -> Clause<'tcx> {
|
||||||
|
self.as_predicate().without_const(tcx).expect_clause()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Copy, PartialEq, Eq, Hash, TyEncodable, TyDecodable)]
|
#[derive(Clone, Copy, PartialEq, Eq, Hash, TyEncodable, TyDecodable)]
|
||||||
@ -651,6 +661,11 @@ pub enum ClauseKind<'tcx> {
|
|||||||
|
|
||||||
/// Constant initializer must evaluate successfully.
|
/// Constant initializer must evaluate successfully.
|
||||||
ConstEvaluatable(ty::Const<'tcx>),
|
ConstEvaluatable(ty::Const<'tcx>),
|
||||||
|
|
||||||
|
/// Represents a type found in the environment that we can use for implied bounds.
|
||||||
|
///
|
||||||
|
/// Only used for Chalk.
|
||||||
|
TypeWellFormedFromEnv(Ty<'tcx>),
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Copy, PartialEq, Eq, Hash, TyEncodable, TyDecodable)]
|
#[derive(Clone, Copy, PartialEq, Eq, Hash, TyEncodable, TyDecodable)]
|
||||||
@ -687,11 +702,6 @@ pub enum PredicateKind<'tcx> {
|
|||||||
/// Constants must be equal. The first component is the const that is expected.
|
/// Constants must be equal. The first component is the const that is expected.
|
||||||
ConstEquate(Const<'tcx>, Const<'tcx>),
|
ConstEquate(Const<'tcx>, Const<'tcx>),
|
||||||
|
|
||||||
/// Represents a type found in the environment that we can use for implied bounds.
|
|
||||||
///
|
|
||||||
/// Only used for Chalk.
|
|
||||||
TypeWellFormedFromEnv(Ty<'tcx>),
|
|
||||||
|
|
||||||
/// A marker predicate that is always ambiguous.
|
/// A marker predicate that is always ambiguous.
|
||||||
/// Used for coherence to mark opaque types as possibly equal to each other but ambiguous.
|
/// Used for coherence to mark opaque types as possibly equal to each other but ambiguous.
|
||||||
Ambiguous,
|
Ambiguous,
|
||||||
@ -730,11 +740,10 @@ pub struct CratePredicatesMap<'tcx> {
|
|||||||
/// For each struct with outlive bounds, maps to a vector of the
|
/// For each struct with outlive bounds, maps to a vector of the
|
||||||
/// predicate of its outlive bounds. If an item has no outlives
|
/// predicate of its outlive bounds. If an item has no outlives
|
||||||
/// bounds, it will have no entry.
|
/// bounds, it will have no entry.
|
||||||
// FIXME(clause): should this be a `Clause`?
|
|
||||||
pub predicates: FxHashMap<DefId, &'tcx [(Clause<'tcx>, Span)]>,
|
pub predicates: FxHashMap<DefId, &'tcx [(Clause<'tcx>, Span)]>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'tcx> Predicate<'tcx> {
|
impl<'tcx> Clause<'tcx> {
|
||||||
/// Performs a substitution suitable for going from a
|
/// Performs a substitution suitable for going from a
|
||||||
/// poly-trait-ref to supertraits that must hold if that
|
/// poly-trait-ref to supertraits that must hold if that
|
||||||
/// poly-trait-ref holds. This is slightly different from a normal
|
/// poly-trait-ref holds. This is slightly different from a normal
|
||||||
@ -744,7 +753,7 @@ impl<'tcx> Predicate<'tcx> {
|
|||||||
self,
|
self,
|
||||||
tcx: TyCtxt<'tcx>,
|
tcx: TyCtxt<'tcx>,
|
||||||
trait_ref: &ty::PolyTraitRef<'tcx>,
|
trait_ref: &ty::PolyTraitRef<'tcx>,
|
||||||
) -> Predicate<'tcx> {
|
) -> Clause<'tcx> {
|
||||||
// The interaction between HRTB and supertraits is not entirely
|
// The interaction between HRTB and supertraits is not entirely
|
||||||
// obvious. Let me walk you (and myself) through an example.
|
// obvious. Let me walk you (and myself) through an example.
|
||||||
//
|
//
|
||||||
@ -830,7 +839,13 @@ impl<'tcx> Predicate<'tcx> {
|
|||||||
// 3) ['x] + ['b] -> ['x, 'b]
|
// 3) ['x] + ['b] -> ['x, 'b]
|
||||||
let bound_vars =
|
let bound_vars =
|
||||||
tcx.mk_bound_variable_kinds_from_iter(trait_bound_vars.iter().chain(pred_bound_vars));
|
tcx.mk_bound_variable_kinds_from_iter(trait_bound_vars.iter().chain(pred_bound_vars));
|
||||||
tcx.reuse_or_mk_predicate(self, ty::Binder::bind_with_vars(new, bound_vars))
|
|
||||||
|
// FIXME: Is it really perf sensitive to use reuse_or_mk_predicate here?
|
||||||
|
tcx.reuse_or_mk_predicate(
|
||||||
|
self.as_predicate(),
|
||||||
|
ty::Binder::bind_with_vars(PredicateKind::Clause(new), bound_vars),
|
||||||
|
)
|
||||||
|
.expect_clause()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1301,6 +1316,14 @@ impl<'tcx> ToPredicate<'tcx, Clause<'tcx>> for TraitRef<'tcx> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl<'tcx> ToPredicate<'tcx, Clause<'tcx>> for TraitPredicate<'tcx> {
|
||||||
|
#[inline(always)]
|
||||||
|
fn to_predicate(self, tcx: TyCtxt<'tcx>) -> Clause<'tcx> {
|
||||||
|
let p: Predicate<'tcx> = self.to_predicate(tcx);
|
||||||
|
p.expect_clause()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl<'tcx> ToPredicate<'tcx> for Binder<'tcx, TraitRef<'tcx>> {
|
impl<'tcx> ToPredicate<'tcx> for Binder<'tcx, TraitRef<'tcx>> {
|
||||||
#[inline(always)]
|
#[inline(always)]
|
||||||
fn to_predicate(self, tcx: TyCtxt<'tcx>) -> Predicate<'tcx> {
|
fn to_predicate(self, tcx: TyCtxt<'tcx>) -> Predicate<'tcx> {
|
||||||
@ -1402,7 +1425,7 @@ impl<'tcx> Predicate<'tcx> {
|
|||||||
| PredicateKind::Clause(ClauseKind::ConstEvaluatable(..))
|
| PredicateKind::Clause(ClauseKind::ConstEvaluatable(..))
|
||||||
| PredicateKind::ConstEquate(..)
|
| PredicateKind::ConstEquate(..)
|
||||||
| PredicateKind::Ambiguous
|
| PredicateKind::Ambiguous
|
||||||
| PredicateKind::TypeWellFormedFromEnv(..) => None,
|
| PredicateKind::Clause(ClauseKind::TypeWellFormedFromEnv(..)) => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1423,7 +1446,7 @@ impl<'tcx> Predicate<'tcx> {
|
|||||||
| PredicateKind::Clause(ClauseKind::ConstEvaluatable(..))
|
| PredicateKind::Clause(ClauseKind::ConstEvaluatable(..))
|
||||||
| PredicateKind::ConstEquate(..)
|
| PredicateKind::ConstEquate(..)
|
||||||
| PredicateKind::Ambiguous
|
| PredicateKind::Ambiguous
|
||||||
| PredicateKind::TypeWellFormedFromEnv(..) => None,
|
| PredicateKind::Clause(ClauseKind::TypeWellFormedFromEnv(..)) => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1444,7 +1467,7 @@ impl<'tcx> Predicate<'tcx> {
|
|||||||
| PredicateKind::Clause(ClauseKind::ConstEvaluatable(..))
|
| PredicateKind::Clause(ClauseKind::ConstEvaluatable(..))
|
||||||
| PredicateKind::ConstEquate(..)
|
| PredicateKind::ConstEquate(..)
|
||||||
| PredicateKind::Ambiguous
|
| PredicateKind::Ambiguous
|
||||||
| PredicateKind::TypeWellFormedFromEnv(..) => None,
|
| PredicateKind::Clause(ClauseKind::TypeWellFormedFromEnv(..)) => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1456,12 +1479,11 @@ impl<'tcx> Predicate<'tcx> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Turns a predicate into a clause without checking that it is a `PredicateKind::Clause`
|
/// Assert that the predicate is a clause.
|
||||||
/// first. This will ICE when methods are called on `Clause`.
|
|
||||||
pub fn expect_clause(self) -> Clause<'tcx> {
|
pub fn expect_clause(self) -> Clause<'tcx> {
|
||||||
match self.kind().skip_binder() {
|
match self.kind().skip_binder() {
|
||||||
PredicateKind::Clause(..) => Clause(self.0),
|
PredicateKind::Clause(..) => Clause(self.0),
|
||||||
_ => bug!(),
|
_ => bug!("{self} is not a clause"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -1487,7 +1509,7 @@ impl<'tcx> Predicate<'tcx> {
|
|||||||
/// [usize:Bar<isize>]]`.
|
/// [usize:Bar<isize>]]`.
|
||||||
#[derive(Clone, Debug, TypeFoldable, TypeVisitable)]
|
#[derive(Clone, Debug, TypeFoldable, TypeVisitable)]
|
||||||
pub struct InstantiatedPredicates<'tcx> {
|
pub struct InstantiatedPredicates<'tcx> {
|
||||||
pub predicates: Vec<Predicate<'tcx>>,
|
pub predicates: Vec<Clause<'tcx>>,
|
||||||
pub spans: Vec<Span>,
|
pub spans: Vec<Span>,
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1506,9 +1528,9 @@ impl<'tcx> InstantiatedPredicates<'tcx> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl<'tcx> IntoIterator for InstantiatedPredicates<'tcx> {
|
impl<'tcx> IntoIterator for InstantiatedPredicates<'tcx> {
|
||||||
type Item = (Predicate<'tcx>, Span);
|
type Item = (Clause<'tcx>, Span);
|
||||||
|
|
||||||
type IntoIter = std::iter::Zip<std::vec::IntoIter<Predicate<'tcx>>, std::vec::IntoIter<Span>>;
|
type IntoIter = std::iter::Zip<std::vec::IntoIter<Clause<'tcx>>, std::vec::IntoIter<Span>>;
|
||||||
|
|
||||||
fn into_iter(self) -> Self::IntoIter {
|
fn into_iter(self) -> Self::IntoIter {
|
||||||
debug_assert_eq!(self.predicates.len(), self.spans.len());
|
debug_assert_eq!(self.predicates.len(), self.spans.len());
|
||||||
@ -1517,10 +1539,10 @@ impl<'tcx> IntoIterator for InstantiatedPredicates<'tcx> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, 'tcx> IntoIterator for &'a InstantiatedPredicates<'tcx> {
|
impl<'a, 'tcx> IntoIterator for &'a InstantiatedPredicates<'tcx> {
|
||||||
type Item = (Predicate<'tcx>, Span);
|
type Item = (Clause<'tcx>, Span);
|
||||||
|
|
||||||
type IntoIter = std::iter::Zip<
|
type IntoIter = std::iter::Zip<
|
||||||
std::iter::Copied<std::slice::Iter<'a, Predicate<'tcx>>>,
|
std::iter::Copied<std::slice::Iter<'a, Clause<'tcx>>>,
|
||||||
std::iter::Copied<std::slice::Iter<'a, Span>>,
|
std::iter::Copied<std::slice::Iter<'a, Span>>,
|
||||||
>;
|
>;
|
||||||
|
|
||||||
@ -1670,7 +1692,7 @@ pub struct ParamEnv<'tcx> {
|
|||||||
/// want `Reveal::All`.
|
/// want `Reveal::All`.
|
||||||
///
|
///
|
||||||
/// Note: This is packed, use the reveal() method to access it.
|
/// Note: This is packed, use the reveal() method to access it.
|
||||||
packed: CopyTaggedPtr<&'tcx List<Predicate<'tcx>>, ParamTag, true>,
|
packed: CopyTaggedPtr<&'tcx List<Clause<'tcx>>, ParamTag, true>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Copy, Clone)]
|
#[derive(Copy, Clone)]
|
||||||
@ -1736,7 +1758,7 @@ impl<'tcx> ParamEnv<'tcx> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn caller_bounds(self) -> &'tcx List<Predicate<'tcx>> {
|
pub fn caller_bounds(self) -> &'tcx List<Clause<'tcx>> {
|
||||||
self.packed.pointer()
|
self.packed.pointer()
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1770,7 +1792,7 @@ impl<'tcx> ParamEnv<'tcx> {
|
|||||||
/// Construct a trait environment with the given set of predicates.
|
/// Construct a trait environment with the given set of predicates.
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn new(
|
pub fn new(
|
||||||
caller_bounds: &'tcx List<Predicate<'tcx>>,
|
caller_bounds: &'tcx List<Clause<'tcx>>,
|
||||||
reveal: Reveal,
|
reveal: Reveal,
|
||||||
constness: hir::Constness,
|
constness: hir::Constness,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
|
@ -2864,20 +2864,38 @@ define_print_and_forward_display! {
|
|||||||
p!(print(binder))
|
p!(print(binder))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
ty::Clause<'tcx> {
|
||||||
|
p!(print(self.kind()))
|
||||||
|
}
|
||||||
|
|
||||||
|
ty::ClauseKind<'tcx> {
|
||||||
|
match *self {
|
||||||
|
ty::ClauseKind::Trait(ref data) => {
|
||||||
|
p!(print(data))
|
||||||
|
}
|
||||||
|
ty::ClauseKind::RegionOutlives(predicate) => p!(print(predicate)),
|
||||||
|
ty::ClauseKind::TypeOutlives(predicate) => p!(print(predicate)),
|
||||||
|
ty::ClauseKind::Projection(predicate) => p!(print(predicate)),
|
||||||
|
ty::ClauseKind::ConstArgHasType(ct, ty) => {
|
||||||
|
p!("the constant `", print(ct), "` has type `", print(ty), "`")
|
||||||
|
},
|
||||||
|
ty::ClauseKind::WellFormed(arg) => p!(print(arg), " well-formed"),
|
||||||
|
ty::ClauseKind::ConstEvaluatable(ct) => {
|
||||||
|
p!("the constant `", print(ct), "` can be evaluated")
|
||||||
|
}
|
||||||
|
ty::ClauseKind::TypeWellFormedFromEnv(ty) => {
|
||||||
|
p!("the type `", print(ty), "` is found in the environment")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
ty::PredicateKind<'tcx> {
|
ty::PredicateKind<'tcx> {
|
||||||
match *self {
|
match *self {
|
||||||
ty::PredicateKind::Clause(ty::ClauseKind::Trait(ref data)) => {
|
ty::PredicateKind::Clause(data) => {
|
||||||
p!(print(data))
|
p!(print(data))
|
||||||
}
|
}
|
||||||
ty::PredicateKind::Subtype(predicate) => p!(print(predicate)),
|
ty::PredicateKind::Subtype(predicate) => p!(print(predicate)),
|
||||||
ty::PredicateKind::Coerce(predicate) => p!(print(predicate)),
|
ty::PredicateKind::Coerce(predicate) => p!(print(predicate)),
|
||||||
ty::PredicateKind::Clause(ty::ClauseKind::RegionOutlives(predicate)) => p!(print(predicate)),
|
|
||||||
ty::PredicateKind::Clause(ty::ClauseKind::TypeOutlives(predicate)) => p!(print(predicate)),
|
|
||||||
ty::PredicateKind::Clause(ty::ClauseKind::Projection(predicate)) => p!(print(predicate)),
|
|
||||||
ty::PredicateKind::Clause(ty::ClauseKind::ConstArgHasType(ct, ty)) => {
|
|
||||||
p!("the constant `", print(ct), "` has type `", print(ty), "`")
|
|
||||||
},
|
|
||||||
ty::PredicateKind::Clause(ty::ClauseKind::WellFormed(arg)) => p!(print(arg), " well-formed"),
|
|
||||||
ty::PredicateKind::ObjectSafe(trait_def_id) => {
|
ty::PredicateKind::ObjectSafe(trait_def_id) => {
|
||||||
p!("the trait `", print_def_path(trait_def_id, &[]), "` is object-safe")
|
p!("the trait `", print_def_path(trait_def_id, &[]), "` is object-safe")
|
||||||
}
|
}
|
||||||
@ -2886,15 +2904,9 @@ define_print_and_forward_display! {
|
|||||||
print_value_path(closure_def_id, &[]),
|
print_value_path(closure_def_id, &[]),
|
||||||
write("` implements the trait `{}`", kind)
|
write("` implements the trait `{}`", kind)
|
||||||
),
|
),
|
||||||
ty::PredicateKind::Clause(ty::ClauseKind::ConstEvaluatable(ct)) => {
|
|
||||||
p!("the constant `", print(ct), "` can be evaluated")
|
|
||||||
}
|
|
||||||
ty::PredicateKind::ConstEquate(c1, c2) => {
|
ty::PredicateKind::ConstEquate(c1, c2) => {
|
||||||
p!("the constant `", print(c1), "` equals `", print(c2), "`")
|
p!("the constant `", print(c1), "` equals `", print(c2), "`")
|
||||||
}
|
}
|
||||||
ty::PredicateKind::TypeWellFormedFromEnv(ty) => {
|
|
||||||
p!("the type `", print(ty), "` is found in the environment")
|
|
||||||
}
|
|
||||||
ty::PredicateKind::Ambiguous => p!("ambiguous"),
|
ty::PredicateKind::Ambiguous => p!("ambiguous"),
|
||||||
ty::PredicateKind::AliasRelate(t1, t2, dir) => p!(print(t1), write(" {} ", dir), print(t2)),
|
ty::PredicateKind::AliasRelate(t1, t2, dir) => p!(print(t1), write(" {} ", dir), print(t2)),
|
||||||
}
|
}
|
||||||
|
@ -189,6 +189,9 @@ impl<'tcx> fmt::Debug for ty::ClauseKind<'tcx> {
|
|||||||
ty::ClauseKind::ConstEvaluatable(ct) => {
|
ty::ClauseKind::ConstEvaluatable(ct) => {
|
||||||
write!(f, "ConstEvaluatable({ct:?})")
|
write!(f, "ConstEvaluatable({ct:?})")
|
||||||
}
|
}
|
||||||
|
ty::ClauseKind::TypeWellFormedFromEnv(ty) => {
|
||||||
|
write!(f, "TypeWellFormedFromEnv({:?})", ty)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -206,9 +209,6 @@ impl<'tcx> fmt::Debug for ty::PredicateKind<'tcx> {
|
|||||||
write!(f, "ClosureKind({:?}, {:?}, {:?})", closure_def_id, closure_substs, kind)
|
write!(f, "ClosureKind({:?}, {:?}, {:?})", closure_def_id, closure_substs, kind)
|
||||||
}
|
}
|
||||||
ty::PredicateKind::ConstEquate(c1, c2) => write!(f, "ConstEquate({:?}, {:?})", c1, c2),
|
ty::PredicateKind::ConstEquate(c1, c2) => write!(f, "ConstEquate({:?}, {:?})", c1, c2),
|
||||||
ty::PredicateKind::TypeWellFormedFromEnv(ty) => {
|
|
||||||
write!(f, "TypeWellFormedFromEnv({:?})", ty)
|
|
||||||
}
|
|
||||||
ty::PredicateKind::Ambiguous => write!(f, "Ambiguous"),
|
ty::PredicateKind::Ambiguous => write!(f, "Ambiguous"),
|
||||||
ty::PredicateKind::AliasRelate(t1, t2, dir) => {
|
ty::PredicateKind::AliasRelate(t1, t2, dir) => {
|
||||||
write!(f, "AliasRelate({t1:?}, {dir:?}, {t2:?})")
|
write!(f, "AliasRelate({t1:?}, {dir:?}, {t2:?})")
|
||||||
@ -701,15 +701,6 @@ impl<'tcx> TypeSuperVisitable<TyCtxt<'tcx>> for ty::Predicate<'tcx> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'tcx> TypeFoldable<TyCtxt<'tcx>> for &'tcx ty::List<ty::Predicate<'tcx>> {
|
|
||||||
fn try_fold_with<F: FallibleTypeFolder<TyCtxt<'tcx>>>(
|
|
||||||
self,
|
|
||||||
folder: &mut F,
|
|
||||||
) -> Result<Self, F::Error> {
|
|
||||||
ty::util::fold_list(self, folder, |tcx, v| tcx.mk_predicates(v))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'tcx> TypeFoldable<TyCtxt<'tcx>> for &'tcx ty::List<ty::Clause<'tcx>> {
|
impl<'tcx> TypeFoldable<TyCtxt<'tcx>> for &'tcx ty::List<ty::Clause<'tcx>> {
|
||||||
fn try_fold_with<F: FallibleTypeFolder<TyCtxt<'tcx>>>(
|
fn try_fold_with<F: FallibleTypeFolder<TyCtxt<'tcx>>>(
|
||||||
self,
|
self,
|
||||||
|
@ -715,7 +715,7 @@ impl<'tcx> PolyExistentialPredicate<'tcx> {
|
|||||||
/// Given an existential predicate like `?Self: PartialEq<u32>` (e.g., derived from `dyn PartialEq<u32>`),
|
/// Given an existential predicate like `?Self: PartialEq<u32>` (e.g., derived from `dyn PartialEq<u32>`),
|
||||||
/// and a concrete type `self_ty`, returns a full predicate where the existentially quantified variable `?Self`
|
/// and a concrete type `self_ty`, returns a full predicate where the existentially quantified variable `?Self`
|
||||||
/// has been replaced with `self_ty` (e.g., `self_ty: PartialEq<u32>`, in our example).
|
/// has been replaced with `self_ty` (e.g., `self_ty: PartialEq<u32>`, in our example).
|
||||||
pub fn with_self_ty(&self, tcx: TyCtxt<'tcx>, self_ty: Ty<'tcx>) -> ty::Predicate<'tcx> {
|
pub fn with_self_ty(&self, tcx: TyCtxt<'tcx>, self_ty: Ty<'tcx>) -> ty::Clause<'tcx> {
|
||||||
use crate::ty::ToPredicate;
|
use crate::ty::ToPredicate;
|
||||||
match self.skip_binder() {
|
match self.skip_binder() {
|
||||||
ExistentialPredicate::Trait(tr) => {
|
ExistentialPredicate::Trait(tr) => {
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user