mirror of
https://github.com/rust-lang/rust.git
synced 2024-11-02 07:22:42 +00:00
Merge from rustc
This commit is contained in:
commit
ed29546a27
19
.github/workflows/dependencies.yml
vendored
19
.github/workflows/dependencies.yml
vendored
@ -6,8 +6,6 @@ on:
|
||||
schedule:
|
||||
# Run weekly
|
||||
- cron: '0 0 * * Sun'
|
||||
# Re-bump deps every 4 hours
|
||||
- cron: '0 */4 * * *'
|
||||
workflow_dispatch:
|
||||
# Needed so we can run it manually
|
||||
permissions:
|
||||
@ -42,7 +40,7 @@ jobs:
|
||||
|
||||
# Exit with error if open and S-waiting-on-bors
|
||||
if [[ "$STATE" == "OPEN" && "$WAITING_ON_BORS" == "true" ]]; then
|
||||
gh run cancel ${{ github.run_id }}
|
||||
exit 1
|
||||
fi
|
||||
|
||||
update:
|
||||
@ -65,10 +63,7 @@ jobs:
|
||||
|
||||
- name: cargo update
|
||||
# Remove first line that always just says "Updating crates.io index"
|
||||
# If there are no changes, cancel the job here
|
||||
run: |
|
||||
cargo update 2>&1 | sed '/crates.io index/d' | tee -a cargo_update.log
|
||||
git status --porcelain | grep -q Cargo.lock || gh run cancel ${{ github.run_id }}
|
||||
run: cargo update 2>&1 | sed '/crates.io index/d' | tee -a cargo_update.log
|
||||
- name: upload Cargo.lock artifact for use in PR
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
@ -95,11 +90,11 @@ jobs:
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: download Cargo.lock from update job
|
||||
uses: actions/download-artifact@v3
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: Cargo-lock
|
||||
- name: download cargo-update log from update job
|
||||
uses: actions/download-artifact@v3
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: cargo-updates
|
||||
|
||||
@ -134,14 +129,14 @@ jobs:
|
||||
# Exit with error if PR is closed
|
||||
STATE=$(gh pr view cargo_update --repo $GITHUB_REPOSITORY --json state --jq '.state')
|
||||
if [[ "$STATE" != "OPEN" ]]; then
|
||||
gh run cancel ${{ github.run_id }}
|
||||
exit 1
|
||||
fi
|
||||
|
||||
gh pr edit cargo_update --title "${PR_TITLE}" --body-file body.md --repo $GITHUB_REPOSITORY
|
||||
|
||||
- name: open new pull request
|
||||
# Only run if there wasn't an existing PR and if this is the weekly run
|
||||
if: steps.edit.outcome != 'success' && github.event.schedule == '0 0 * * Sun'
|
||||
# Only run if there wasn't an existing PR
|
||||
if: steps.edit.outcome != 'success'
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: gh pr create --title "${PR_TITLE}" --body-file body.md --repo $GITHUB_REPOSITORY
|
||||
|
1
.mailmap
1
.mailmap
@ -324,6 +324,7 @@ Katze <binary@benary.org>
|
||||
Keegan McAllister <mcallister.keegan@gmail.com> <kmcallister@mozilla.com>
|
||||
Kerem Kat <keremkat@gmail.com>
|
||||
Kevin Butler <haqkrs@gmail.com>
|
||||
Kevin Reid <kpreid@switchb.org> <kpreid@google.com>
|
||||
Kevin Jiang <kwj2104@columbia.edu>
|
||||
Kornel Lesiński <kornel@geekhood.net>
|
||||
Krishna Sai Veera Reddy <veerareddy@email.arizona.edu>
|
||||
|
@ -151,8 +151,8 @@ toolchain.
|
||||
directory and uncomment the line `MSYS2_PATH_TYPE=inherit`.
|
||||
|
||||
You could install and use MSYS2's version of git instead with `pacman`,
|
||||
however this is not recommended as it's excrutiatingly slow, and not frequently
|
||||
tested for compatability.
|
||||
however this is not recommended as it's excruciatingly slow, and not frequently
|
||||
tested for compatibility.
|
||||
|
||||
3. Start a MINGW64 or MINGW32 shell (depending on whether you want 32-bit
|
||||
or 64-bit Rust) either from your start menu, or by running `mingw64.exe`
|
||||
|
10
RELEASES.md
10
RELEASES.md
@ -1,3 +1,13 @@
|
||||
Version 1.77.1 (2024-03-28)
|
||||
===========================
|
||||
|
||||
<a id="1.77.1"></a>
|
||||
|
||||
- [Revert stripping debuginfo by default for Windows](https://github.com/rust-lang/cargo/pull/13654)
|
||||
This fixes a regression in 1.77 by reverting to the previous default.
|
||||
Platforms other than Windows are not affected.
|
||||
- Internal: [Fix heading anchor rendering in doc pages](https://github.com/rust-lang/rust/pull/122693)
|
||||
|
||||
Version 1.77.0 (2024-03-21)
|
||||
==========================
|
||||
|
||||
|
@ -75,7 +75,7 @@ pub(crate) struct FixupContext {
|
||||
}
|
||||
|
||||
/// The default amount of fixing is minimal fixing. Fixups should be turned on
|
||||
/// in a targetted fashion where needed.
|
||||
/// in a targeted fashion where needed.
|
||||
impl Default for FixupContext {
|
||||
fn default() -> Self {
|
||||
FixupContext {
|
||||
|
@ -4,6 +4,7 @@
|
||||
#![allow(rustc::untranslatable_diagnostic)]
|
||||
|
||||
use either::Either;
|
||||
use hir::ClosureKind;
|
||||
use rustc_data_structures::captures::Captures;
|
||||
use rustc_data_structures::fx::FxIndexSet;
|
||||
use rustc_errors::{codes::*, struct_span_code_err, Applicability, Diag, MultiSpan};
|
||||
@ -463,6 +464,15 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
||||
} else if let UseSpans::FnSelfUse { kind: CallKind::Normal { .. }, .. } = move_spans
|
||||
{
|
||||
// We already suggest cloning for these cases in `explain_captures`.
|
||||
} else if let UseSpans::ClosureUse {
|
||||
closure_kind:
|
||||
ClosureKind::Coroutine(CoroutineKind::Desugared(_, CoroutineSource::Block)),
|
||||
args_span: _,
|
||||
capture_kind_span: _,
|
||||
path_span,
|
||||
} = move_spans
|
||||
{
|
||||
self.suggest_cloning(err, ty, expr, path_span);
|
||||
} else if self.suggest_hoisting_call_outside_loop(err, expr) {
|
||||
// The place where the the type moves would be misleading to suggest clone.
|
||||
// #121466
|
||||
@ -621,7 +631,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
||||
}
|
||||
|
||||
// FIXME: We make sure that this is a normal top-level binding,
|
||||
// but we could suggest `todo!()` for all uninitalized bindings in the pattern pattern
|
||||
// but we could suggest `todo!()` for all uninitialized bindings in the pattern pattern
|
||||
if let hir::StmtKind::Let(hir::LetStmt { span, ty, init: None, pat, .. }) =
|
||||
&ex.kind
|
||||
&& let hir::PatKind::Binding(..) = pat.kind
|
||||
@ -749,7 +759,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
||||
true
|
||||
}
|
||||
|
||||
/// In a move error that occurs on a call wihtin a loop, we try to identify cases where cloning
|
||||
/// In a move error that occurs on a call within a loop, we try to identify cases where cloning
|
||||
/// the value would lead to a logic error. We infer these cases by seeing if the moved value is
|
||||
/// part of the logic to break the loop, either through an explicit `break` or if the expression
|
||||
/// is part of a `while let`.
|
||||
@ -950,7 +960,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
||||
{
|
||||
// FIXME: We could check that the call's *parent* takes `&mut val` to make the
|
||||
// suggestion more targeted to the `mk_iter(val).next()` case. Maybe do that only to
|
||||
// check for wheter to suggest `let value` or `let mut value`.
|
||||
// check for whether to suggest `let value` or `let mut value`.
|
||||
|
||||
let span = in_loop.span;
|
||||
if !finder.found_breaks.is_empty()
|
||||
|
@ -608,7 +608,7 @@ impl<'a, 'b, 'tcx> TypeVerifier<'a, 'b, 'tcx> {
|
||||
}
|
||||
self.cx.borrowck_context.constraints.outlives_constraints.push(constraint)
|
||||
}
|
||||
// If the region is live at at least one location in the promoted MIR,
|
||||
// If the region is live at least one location in the promoted MIR,
|
||||
// then add a liveness constraint to the main MIR for this region
|
||||
// at the location provided as an argument to this method
|
||||
//
|
||||
|
@ -3,17 +3,22 @@ use rustc_ast::ptr::P;
|
||||
use rustc_ast::token;
|
||||
use rustc_ast::tokenstream::TokenStream;
|
||||
use rustc_ast_pretty::pprust;
|
||||
use rustc_expand::base::{check_zero_tts, get_single_str_from_tts, parse_expr, resolve_path};
|
||||
use rustc_data_structures::sync::Lrc;
|
||||
use rustc_expand::base::{
|
||||
check_zero_tts, get_single_str_from_tts, get_single_str_spanned_from_tts, parse_expr,
|
||||
resolve_path,
|
||||
};
|
||||
use rustc_expand::base::{DummyResult, ExpandResult, ExtCtxt};
|
||||
use rustc_expand::base::{MacEager, MacResult, MacroExpanderResult};
|
||||
use rustc_expand::module::DirOwnership;
|
||||
use rustc_parse::new_parser_from_file;
|
||||
use rustc_parse::parser::{ForceCollect, Parser};
|
||||
use rustc_session::lint::builtin::INCOMPLETE_INCLUDE;
|
||||
use rustc_span::source_map::SourceMap;
|
||||
use rustc_span::symbol::Symbol;
|
||||
use rustc_span::{Pos, Span};
|
||||
|
||||
use smallvec::SmallVec;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::rc::Rc;
|
||||
|
||||
// These macros all relate to the file system; they either return
|
||||
@ -182,35 +187,26 @@ pub fn expand_include_str(
|
||||
tts: TokenStream,
|
||||
) -> MacroExpanderResult<'static> {
|
||||
let sp = cx.with_def_site_ctxt(sp);
|
||||
let ExpandResult::Ready(mac) = get_single_str_from_tts(cx, sp, tts, "include_str!") else {
|
||||
let ExpandResult::Ready(mac) = get_single_str_spanned_from_tts(cx, sp, tts, "include_str!")
|
||||
else {
|
||||
return ExpandResult::Retry(());
|
||||
};
|
||||
let file = match mac {
|
||||
Ok(file) => file,
|
||||
let (path, path_span) = match mac {
|
||||
Ok(res) => res,
|
||||
Err(guar) => return ExpandResult::Ready(DummyResult::any(sp, guar)),
|
||||
};
|
||||
let file = match resolve_path(&cx.sess, file.as_str(), sp) {
|
||||
Ok(f) => f,
|
||||
Err(err) => {
|
||||
let guar = err.emit();
|
||||
return ExpandResult::Ready(DummyResult::any(sp, guar));
|
||||
}
|
||||
};
|
||||
ExpandResult::Ready(match cx.source_map().load_binary_file(&file) {
|
||||
ExpandResult::Ready(match load_binary_file(cx, path.as_str().as_ref(), sp, path_span) {
|
||||
Ok(bytes) => match std::str::from_utf8(&bytes) {
|
||||
Ok(src) => {
|
||||
let interned_src = Symbol::intern(src);
|
||||
MacEager::expr(cx.expr_str(sp, interned_src))
|
||||
}
|
||||
Err(_) => {
|
||||
let guar = cx.dcx().span_err(sp, format!("{} wasn't a utf-8 file", file.display()));
|
||||
let guar = cx.dcx().span_err(sp, format!("`{path}` wasn't a utf-8 file"));
|
||||
DummyResult::any(sp, guar)
|
||||
}
|
||||
},
|
||||
Err(e) => {
|
||||
let guar = cx.dcx().span_err(sp, format!("couldn't read {}: {}", file.display(), e));
|
||||
DummyResult::any(sp, guar)
|
||||
}
|
||||
Err(dummy) => dummy,
|
||||
})
|
||||
}
|
||||
|
||||
@ -220,28 +216,127 @@ pub fn expand_include_bytes(
|
||||
tts: TokenStream,
|
||||
) -> MacroExpanderResult<'static> {
|
||||
let sp = cx.with_def_site_ctxt(sp);
|
||||
let ExpandResult::Ready(mac) = get_single_str_from_tts(cx, sp, tts, "include_bytes!") else {
|
||||
let ExpandResult::Ready(mac) = get_single_str_spanned_from_tts(cx, sp, tts, "include_bytes!")
|
||||
else {
|
||||
return ExpandResult::Retry(());
|
||||
};
|
||||
let file = match mac {
|
||||
Ok(file) => file,
|
||||
let (path, path_span) = match mac {
|
||||
Ok(res) => res,
|
||||
Err(guar) => return ExpandResult::Ready(DummyResult::any(sp, guar)),
|
||||
};
|
||||
let file = match resolve_path(&cx.sess, file.as_str(), sp) {
|
||||
Ok(f) => f,
|
||||
Err(err) => {
|
||||
let guar = err.emit();
|
||||
return ExpandResult::Ready(DummyResult::any(sp, guar));
|
||||
}
|
||||
};
|
||||
ExpandResult::Ready(match cx.source_map().load_binary_file(&file) {
|
||||
ExpandResult::Ready(match load_binary_file(cx, path.as_str().as_ref(), sp, path_span) {
|
||||
Ok(bytes) => {
|
||||
let expr = cx.expr(sp, ast::ExprKind::IncludedBytes(bytes));
|
||||
MacEager::expr(expr)
|
||||
}
|
||||
Err(e) => {
|
||||
let guar = cx.dcx().span_err(sp, format!("couldn't read {}: {}", file.display(), e));
|
||||
DummyResult::any(sp, guar)
|
||||
}
|
||||
Err(dummy) => dummy,
|
||||
})
|
||||
}
|
||||
|
||||
fn load_binary_file(
|
||||
cx: &mut ExtCtxt<'_>,
|
||||
original_path: &Path,
|
||||
macro_span: Span,
|
||||
path_span: Span,
|
||||
) -> Result<Lrc<[u8]>, Box<dyn MacResult>> {
|
||||
let resolved_path = match resolve_path(&cx.sess, original_path, macro_span) {
|
||||
Ok(path) => path,
|
||||
Err(err) => {
|
||||
let guar = err.emit();
|
||||
return Err(DummyResult::any(macro_span, guar));
|
||||
}
|
||||
};
|
||||
match cx.source_map().load_binary_file(&resolved_path) {
|
||||
Ok(data) => Ok(data),
|
||||
Err(io_err) => {
|
||||
let mut err = cx.dcx().struct_span_err(
|
||||
macro_span,
|
||||
format!("couldn't read `{}`: {io_err}", resolved_path.display()),
|
||||
);
|
||||
|
||||
if original_path.is_relative() {
|
||||
let source_map = cx.sess.source_map();
|
||||
let new_path = source_map
|
||||
.span_to_filename(macro_span.source_callsite())
|
||||
.into_local_path()
|
||||
.and_then(|src| find_path_suggestion(source_map, src.parent()?, original_path))
|
||||
.and_then(|path| path.into_os_string().into_string().ok());
|
||||
|
||||
if let Some(new_path) = new_path {
|
||||
err.span_suggestion(
|
||||
path_span,
|
||||
"there is a file with the same name in a different directory",
|
||||
format!("\"{}\"", new_path.replace('\\', "/").escape_debug()),
|
||||
rustc_lint_defs::Applicability::MachineApplicable,
|
||||
);
|
||||
}
|
||||
}
|
||||
let guar = err.emit();
|
||||
Err(DummyResult::any(macro_span, guar))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn find_path_suggestion(
|
||||
source_map: &SourceMap,
|
||||
base_dir: &Path,
|
||||
wanted_path: &Path,
|
||||
) -> Option<PathBuf> {
|
||||
// Fix paths that assume they're relative to cargo manifest dir
|
||||
let mut base_c = base_dir.components();
|
||||
let mut wanted_c = wanted_path.components();
|
||||
let mut without_base = None;
|
||||
while let Some(wanted_next) = wanted_c.next() {
|
||||
if wanted_c.as_path().file_name().is_none() {
|
||||
break;
|
||||
}
|
||||
// base_dir may be absolute
|
||||
while let Some(base_next) = base_c.next() {
|
||||
if base_next == wanted_next {
|
||||
without_base = Some(wanted_c.as_path());
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
let root_absolute = without_base.into_iter().map(PathBuf::from);
|
||||
|
||||
let base_dir_components = base_dir.components().count();
|
||||
// Avoid going all the way to the root dir
|
||||
let max_parent_components = if base_dir.is_relative() {
|
||||
base_dir_components + 1
|
||||
} else {
|
||||
base_dir_components.saturating_sub(1)
|
||||
};
|
||||
|
||||
// Try with additional leading ../
|
||||
let mut prefix = PathBuf::new();
|
||||
let add = std::iter::from_fn(|| {
|
||||
prefix.push("..");
|
||||
Some(prefix.join(wanted_path))
|
||||
})
|
||||
.take(max_parent_components.min(3));
|
||||
|
||||
// Try without leading directories
|
||||
let mut trimmed_path = wanted_path;
|
||||
let remove = std::iter::from_fn(|| {
|
||||
let mut components = trimmed_path.components();
|
||||
let removed = components.next()?;
|
||||
trimmed_path = components.as_path();
|
||||
let _ = trimmed_path.file_name()?; // ensure there is a file name left
|
||||
Some([
|
||||
Some(trimmed_path.to_path_buf()),
|
||||
(removed != std::path::Component::ParentDir)
|
||||
.then(|| Path::new("..").join(trimmed_path)),
|
||||
])
|
||||
})
|
||||
.flatten()
|
||||
.flatten()
|
||||
.take(4);
|
||||
|
||||
for new_path in root_absolute.chain(add).chain(remove) {
|
||||
if source_map.file_exists(&base_dir.join(&new_path)) {
|
||||
return Some(new_path);
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
@ -46,18 +46,18 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
|
||||
|
||||
[[package]]
|
||||
name = "cranelift-bforest"
|
||||
version = "0.105.2"
|
||||
version = "0.106.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9515fcc42b6cb5137f76b84c1a6f819782d0cf12473d145d3bc5cd67eedc8bc2"
|
||||
checksum = "6a535eb1cf5a6003197dc569320c40c1cb2d2f97ef5d5348eebf067f20957381"
|
||||
dependencies = [
|
||||
"cranelift-entity",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "cranelift-codegen"
|
||||
version = "0.105.2"
|
||||
version = "0.106.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1ad827c6071bfe6d22de1bc331296a29f9ddc506ff926d8415b435ec6a6efce0"
|
||||
checksum = "11b5066db32cec1492573827183af2142d2d88fe85a83cfc9e73f0f63d3788d4"
|
||||
dependencies = [
|
||||
"bumpalo",
|
||||
"cranelift-bforest",
|
||||
@ -76,39 +76,39 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "cranelift-codegen-meta"
|
||||
version = "0.105.2"
|
||||
version = "0.106.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "10e6b36237a9ca2ce2fb4cc7741d418a080afa1327402138412ef85d5367bef1"
|
||||
checksum = "64942e5774308e835fbad4dd25f253105412c90324631910e1ec27963147bddb"
|
||||
dependencies = [
|
||||
"cranelift-codegen-shared",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "cranelift-codegen-shared"
|
||||
version = "0.105.2"
|
||||
version = "0.106.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c36bf4bfb86898a94ccfa773a1f86e8a5346b1983ff72059bdd2db4600325251"
|
||||
checksum = "c39c33db9a86dd6d8d04166a10c53deb477aeea3500eaaefca682e4eda9bb986"
|
||||
|
||||
[[package]]
|
||||
name = "cranelift-control"
|
||||
version = "0.105.2"
|
||||
version = "0.106.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7cbf36560e7a6bd1409ca91e7b43b2cc7ed8429f343d7605eadf9046e8fac0d0"
|
||||
checksum = "4b7fc4937613aea3156a0538800a17bf56f345a5da2e79ae3df58488c93d867f"
|
||||
dependencies = [
|
||||
"arbitrary",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "cranelift-entity"
|
||||
version = "0.105.2"
|
||||
version = "0.106.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a71e11061a75b1184c09bea97c026a88f08b59ade96a7bb1f259d4ea0df2e942"
|
||||
checksum = "f85575e79a153ce1ddbfb7fe1813519b4bfe1eb200cc9c8353b45ad123ae4d36"
|
||||
|
||||
[[package]]
|
||||
name = "cranelift-frontend"
|
||||
version = "0.105.2"
|
||||
version = "0.106.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "af5d4da63143ee3485c7bcedde0a818727d737d1083484a0ceedb8950c89e495"
|
||||
checksum = "bbc31d6c0ab2249fe0c21e988256b42f5f401ab2673b4fc40076c82a698bdfb9"
|
||||
dependencies = [
|
||||
"cranelift-codegen",
|
||||
"log",
|
||||
@ -118,15 +118,15 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "cranelift-isle"
|
||||
version = "0.105.2"
|
||||
version = "0.106.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "457a9832b089e26f5eea70dcf49bed8ec6edafed630ce7c83161f24d46ab8085"
|
||||
checksum = "dc14f37e3314c0e4c53779c2f46753bf242efff76ee9473757a1fff3b495ad37"
|
||||
|
||||
[[package]]
|
||||
name = "cranelift-jit"
|
||||
version = "0.105.2"
|
||||
version = "0.106.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0af95fe68d5a10919012c8db82b1d59820405b8001c8c6d05f94b08031334fa9"
|
||||
checksum = "cfdd1942f3233176a68c285380dbc84ff0440246a1bce308611c0a385b56ab18"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"cranelift-codegen",
|
||||
@ -144,9 +144,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "cranelift-module"
|
||||
version = "0.105.2"
|
||||
version = "0.106.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "11b0b201fa10a4014062d4c56c307c8d18fdf9a84cb5279efe6080241f42c7a7"
|
||||
checksum = "121b2b5a16912554a1b9aace75b9b21eca49f28e33cbfbad4786dd9bc5361a5c"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"cranelift-codegen",
|
||||
@ -155,9 +155,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "cranelift-native"
|
||||
version = "0.105.2"
|
||||
version = "0.106.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9b490d579df1ce365e1ea359e24ed86d82289fa785153327c2f6a69a59a731e4"
|
||||
checksum = "2ea5375f76ab31f9800a23fb2b440810286a6f669a3eb467cdd7ff255ea64268"
|
||||
dependencies = [
|
||||
"cranelift-codegen",
|
||||
"libc",
|
||||
@ -166,9 +166,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "cranelift-object"
|
||||
version = "0.105.2"
|
||||
version = "0.106.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fb7e821ac6db471bcdbd004e5a4fa0d374f1046bd3a2ce278c332e0b0c01ca63"
|
||||
checksum = "f34e04419ab41661e973d90a73aa7b12771455394dae7a69b101a9b7e7589db7"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"cranelift-codegen",
|
||||
@ -392,9 +392,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "target-lexicon"
|
||||
version = "0.12.12"
|
||||
version = "0.12.14"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "14c39fd04924ca3a864207c66fc2cd7d22d7c016007f9ce846cbb9326331930a"
|
||||
checksum = "e1fc403891a21bcfb7c37834ba66a547a8f402146eba7265b5a6d88059c9ff2f"
|
||||
|
||||
[[package]]
|
||||
name = "unicode-ident"
|
||||
@ -410,9 +410,9 @@ checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
|
||||
|
||||
[[package]]
|
||||
name = "wasmtime-jit-icache-coherence"
|
||||
version = "18.0.2"
|
||||
version = "19.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "33f4121cb29dda08139b2824a734dd095d83ce843f2d613a84eb580b9cfc17ac"
|
||||
checksum = "2796e4b4989db62899d2117e1e0258b839d088c044591b14e3a0396e7b3ae53a"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"libc",
|
||||
|
@ -8,12 +8,12 @@ crate-type = ["dylib"]
|
||||
|
||||
[dependencies]
|
||||
# These have to be in sync with each other
|
||||
cranelift-codegen = { version = "0.105.2", default-features = false, features = ["std", "unwind", "all-arch"] }
|
||||
cranelift-frontend = { version = "0.105.2" }
|
||||
cranelift-module = { version = "0.105.2" }
|
||||
cranelift-native = { version = "0.105.2" }
|
||||
cranelift-jit = { version = "0.105.2", optional = true }
|
||||
cranelift-object = { version = "0.105.2" }
|
||||
cranelift-codegen = { version = "0.106.0", default-features = false, features = ["std", "unwind", "all-arch"] }
|
||||
cranelift-frontend = { version = "0.106.0" }
|
||||
cranelift-module = { version = "0.106.0" }
|
||||
cranelift-native = { version = "0.106.0" }
|
||||
cranelift-jit = { version = "0.106.0", optional = true }
|
||||
cranelift-object = { version = "0.106.0" }
|
||||
target-lexicon = "0.12.0"
|
||||
gimli = { version = "0.28", default-features = false, features = ["write"]}
|
||||
object = { version = "0.32", default-features = false, features = ["std", "read_core", "write", "archive", "coff", "elf", "macho", "pe"] }
|
||||
|
@ -465,6 +465,36 @@ pub fn panic(_msg: &'static str) -> ! {
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! panic_const {
|
||||
($($lang:ident = $message:expr,)+) => {
|
||||
#[cfg(not(bootstrap))]
|
||||
pub mod panic_const {
|
||||
use super::*;
|
||||
|
||||
$(
|
||||
#[track_caller]
|
||||
#[lang = stringify!($lang)]
|
||||
pub fn $lang() -> ! {
|
||||
panic($message);
|
||||
}
|
||||
)+
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
panic_const! {
|
||||
panic_const_add_overflow = "attempt to add with overflow",
|
||||
panic_const_sub_overflow = "attempt to subtract with overflow",
|
||||
panic_const_mul_overflow = "attempt to multiply with overflow",
|
||||
panic_const_div_overflow = "attempt to divide with overflow",
|
||||
panic_const_rem_overflow = "attempt to calculate the remainder with overflow",
|
||||
panic_const_neg_overflow = "attempt to negate with overflow",
|
||||
panic_const_shr_overflow = "attempt to shift right with overflow",
|
||||
panic_const_shl_overflow = "attempt to shift left with overflow",
|
||||
panic_const_div_by_zero = "attempt to divide by zero",
|
||||
panic_const_rem_by_zero = "attempt to calculate the remainder with a divisor of zero",
|
||||
}
|
||||
|
||||
#[lang = "panic_bounds_check"]
|
||||
#[track_caller]
|
||||
fn panic_bounds_check(index: usize, len: usize) -> ! {
|
||||
|
@ -39,6 +39,6 @@ index 42a26ae..5ac1042 100644
|
||||
+#![cfg(test)]
|
||||
#![feature(alloc_layout_extra)]
|
||||
#![feature(array_chunks)]
|
||||
#![feature(array_windows)]
|
||||
#![feature(array_ptr_get)]
|
||||
--
|
||||
2.21.0 (Apple Git-122)
|
||||
|
@ -1,3 +1,3 @@
|
||||
[toolchain]
|
||||
channel = "nightly-2024-03-16"
|
||||
channel = "nightly-2024-03-28"
|
||||
components = ["rust-src", "rustc-dev", "llvm-tools"]
|
||||
|
@ -26,9 +26,10 @@ fn main() {
|
||||
codegen_backend_arg.push(cg_clif_dylib_path);
|
||||
args.push(codegen_backend_arg);
|
||||
}
|
||||
if !passed_args.iter().any(|arg| {
|
||||
arg == "--sysroot" || arg.to_str().is_some_and(|s| s.starts_with("--sysroot="))
|
||||
}) {
|
||||
if !passed_args
|
||||
.iter()
|
||||
.any(|arg| arg == "--sysroot" || arg.to_str().is_some_and(|s| s.starts_with("--sysroot=")))
|
||||
{
|
||||
args.push(OsString::from("--sysroot"));
|
||||
args.push(OsString::from(sysroot.to_str().unwrap()));
|
||||
}
|
||||
|
@ -26,12 +26,18 @@ fn main() {
|
||||
codegen_backend_arg.push(cg_clif_dylib_path);
|
||||
args.push(codegen_backend_arg);
|
||||
}
|
||||
if !passed_args.iter().any(|arg| {
|
||||
arg == "--sysroot" || arg.to_str().is_some_and(|s| s.starts_with("--sysroot="))
|
||||
}) {
|
||||
if !passed_args
|
||||
.iter()
|
||||
.any(|arg| arg == "--sysroot" || arg.to_str().is_some_and(|s| s.starts_with("--sysroot=")))
|
||||
{
|
||||
args.push(OsString::from("--sysroot"));
|
||||
args.push(OsString::from(sysroot.to_str().unwrap()));
|
||||
}
|
||||
if passed_args.is_empty() {
|
||||
// Don't pass any arguments when the user didn't pass any arguments
|
||||
// either to ensure the help message is shown.
|
||||
args.clear();
|
||||
}
|
||||
args.extend(passed_args);
|
||||
|
||||
let rustdoc = if let Some(rustdoc) = option_env!("RUSTDOC") {
|
||||
|
@ -10,14 +10,6 @@ pushd rust
|
||||
|
||||
command -v rg >/dev/null 2>&1 || cargo install ripgrep
|
||||
|
||||
# FIXME(rust-lang/rust#122196) fix stage0 rmake.rs run-make tests and remove
|
||||
# this workaround
|
||||
for test in $(ls tests/run-make); do
|
||||
if [[ -e "tests/run-make/$test/rmake.rs" ]]; then
|
||||
rm -r "tests/run-make/$test"
|
||||
fi
|
||||
done
|
||||
|
||||
# FIXME remove this workaround once ICE tests no longer emit an outdated nightly message
|
||||
for test in $(rg -i --files-with-matches "//@(\[.*\])? failure-status: 101" tests/ui); do
|
||||
echo "rm $test"
|
||||
@ -42,7 +34,6 @@ rm tests/ui/parser/unclosed-delimiter-in-dep.rs # submodule contains //~ERROR
|
||||
# ================
|
||||
|
||||
# vendor intrinsics
|
||||
rm tests/ui/simd/array-type.rs # "Index argument for `simd_insert` is not a constant"
|
||||
rm tests/ui/asm/x86_64/evex512-implicit-feature.rs # unimplemented AVX512 x86 vendor intrinsic
|
||||
|
||||
# exotic linkages
|
||||
@ -59,12 +50,9 @@ rm -r tests/run-make/c-link-to-rust-va-list-fn # requires callee side vararg sup
|
||||
rm -r tests/run-pass-valgrind/unsized-locals
|
||||
|
||||
# misc unimplemented things
|
||||
rm tests/ui/intrinsics/intrinsic-nearby.rs # unimplemented nearbyintf32 and nearbyintf64 intrinsics
|
||||
rm tests/ui/target-feature/missing-plusminus.rs # error not implemented
|
||||
rm -r tests/run-make/emit-named-files # requires full --emit support
|
||||
rm -r tests/run-make/repr128-dwarf # debuginfo test
|
||||
rm -r tests/run-make/split-debuginfo # same
|
||||
rm -r tests/run-make/symbols-include-type-name # --emit=asm not supported
|
||||
rm -r tests/run-make/target-specs # i686 not supported by Cranelift
|
||||
rm -r tests/run-make/mismatching-target-triples # same
|
||||
rm tests/ui/asm/x86_64/issue-96797.rs # const and sym inline asm operands don't work entirely correctly
|
||||
@ -102,6 +90,17 @@ rm tests/ui/abi/stack-protector.rs # requires stack protector support
|
||||
rm -r tests/run-make/emit-stack-sizes # requires support for -Z emit-stack-sizes
|
||||
rm -r tests/run-make/optimization-remarks-dir # remarks are LLVM specific
|
||||
|
||||
# requires asm, llvm-ir and/or llvm-bc emit support
|
||||
# =============================================
|
||||
rm -r tests/run-make/emit-named-files
|
||||
rm -r tests/run-make/issue-30063
|
||||
rm -r tests/run-make/multiple-emits
|
||||
rm -r tests/run-make/output-type-permutations
|
||||
rm -r tests/run-make/emit-to-stdout
|
||||
rm -r tests/run-make/compressed-debuginfo
|
||||
rm -r tests/run-make/symbols-include-type-name
|
||||
|
||||
|
||||
# giving different but possibly correct results
|
||||
# =============================================
|
||||
rm tests/ui/mir/mir_misc_casts.rs # depends on deduplication of constants
|
||||
@ -109,35 +108,21 @@ rm tests/ui/mir/mir_raw_fat_ptr.rs # same
|
||||
rm tests/ui/consts/issue-33537.rs # same
|
||||
rm tests/ui/consts/const-mut-refs-crate.rs # same
|
||||
|
||||
# rustdoc-clif passes extra args, suppressing the help message when no args are passed
|
||||
rm -r tests/run-make/issue-88756-default-output
|
||||
|
||||
# doesn't work due to the way the rustc test suite is invoked.
|
||||
# should work when using ./x.py test the way it is intended
|
||||
# ============================================================
|
||||
rm -r tests/run-make/remap-path-prefix-dwarf # requires llvm-dwarfdump
|
||||
rm -r tests/run-make/compiler-builtins # Expects lib/rustlib/src/rust to contains the standard library source
|
||||
|
||||
# genuine bugs
|
||||
# ============
|
||||
rm tests/incremental/spike-neg1.rs # errors out for some reason
|
||||
rm tests/incremental/spike-neg2.rs # same
|
||||
|
||||
rm -r tests/run-make/issue-51671 # wrong filename given in case of --emit=obj
|
||||
rm -r tests/run-make/issue-30063 # same
|
||||
rm -r tests/run-make/multiple-emits # same
|
||||
rm -r tests/run-make/output-type-permutations # same
|
||||
rm -r tests/run-make/used # same
|
||||
rm -r tests/run-make/no-alloc-shim
|
||||
rm -r tests/run-make/emit-to-stdout
|
||||
rm -r tests/run-make/compressed-debuginfo
|
||||
|
||||
rm -r tests/run-make/extern-fn-explicit-align # argument alignment not yet supported
|
||||
|
||||
rm tests/ui/codegen/subtyping-enforces-type-equality.rs # assert_assignable bug with Coroutine's
|
||||
rm -r tests/run-make/panic-abort-eh_frame # .eh_frame emitted with panic=abort
|
||||
|
||||
# bugs in the test suite
|
||||
# ======================
|
||||
rm tests/ui/backtrace.rs # TODO warning
|
||||
rm tests/ui/process/nofile-limit.rs # TODO some AArch64 linking issue
|
||||
|
||||
rm tests/ui/stdio-is-blocking.rs # really slow with unoptimized libstd
|
||||
@ -160,6 +145,19 @@ index ea06b620c4c..b969d0009c6 100644
|
||||
ifdef RUSTC_LINKER
|
||||
RUSTC := \$(RUSTC) -Clinker='\$(RUSTC_LINKER)'
|
||||
RUSTDOC := \$(RUSTDOC) -Clinker='\$(RUSTC_LINKER)'
|
||||
diff --git a/src/tools/run-make-support/src/rustdoc.rs b/src/tools/run-make-support/src/rustdoc.rs
|
||||
index 9607ff02f96..b7d97caf9a2 100644
|
||||
--- a/src/tools/run-make-support/src/rustdoc.rs
|
||||
+++ b/src/tools/run-make-support/src/rustdoc.rs
|
||||
@@ -34,8 +34,6 @@ pub fn bare() -> Self {
|
||||
/// Construct a \`rustdoc\` invocation with \`-L \$(TARGET_RPATH_DIR)\` set.
|
||||
pub fn new() -> Self {
|
||||
let mut cmd = setup_common();
|
||||
- let target_rpath_dir = env::var_os("TARGET_RPATH_DIR").unwrap();
|
||||
- cmd.arg(format!("-L{}", target_rpath_dir.to_string_lossy()));
|
||||
Self { cmd }
|
||||
}
|
||||
|
||||
EOF
|
||||
|
||||
echo "[TEST] rustc test suite"
|
||||
|
@ -222,17 +222,15 @@ pub(crate) fn codegen_fn_prelude<'tcx>(fx: &mut FunctionCx<'_, '_, 'tcx>, start_
|
||||
Spread(Vec<Option<CValue<'tcx>>>),
|
||||
}
|
||||
|
||||
let fn_abi = fx.fn_abi.take().unwrap();
|
||||
|
||||
// FIXME implement variadics in cranelift
|
||||
if fn_abi.c_variadic {
|
||||
if fx.fn_abi.c_variadic {
|
||||
fx.tcx.dcx().span_fatal(
|
||||
fx.mir.span,
|
||||
"Defining variadic functions is not yet supported by Cranelift",
|
||||
);
|
||||
}
|
||||
|
||||
let mut arg_abis_iter = fn_abi.args.iter();
|
||||
let mut arg_abis_iter = fx.fn_abi.args.iter();
|
||||
|
||||
let func_params = fx
|
||||
.mir
|
||||
@ -279,7 +277,6 @@ pub(crate) fn codegen_fn_prelude<'tcx>(fx: &mut FunctionCx<'_, '_, 'tcx>, start_
|
||||
}
|
||||
|
||||
assert!(arg_abis_iter.next().is_none(), "ArgAbi left behind");
|
||||
fx.fn_abi = Some(fn_abi);
|
||||
assert!(block_params_iter.next().is_none(), "arg_value left behind");
|
||||
|
||||
self::comments::add_locals_header_comment(fx);
|
||||
|
@ -12,27 +12,15 @@ pub(super) fn codegen_return_param<'tcx>(
|
||||
ssa_analyzed: &rustc_index::IndexSlice<Local, crate::analyze::SsaKind>,
|
||||
block_params_iter: &mut impl Iterator<Item = Value>,
|
||||
) -> CPlace<'tcx> {
|
||||
let (ret_place, ret_param): (_, SmallVec<[_; 2]>) = match fx.fn_abi.as_ref().unwrap().ret.mode {
|
||||
let (ret_place, ret_param): (_, SmallVec<[_; 2]>) = match fx.fn_abi.ret.mode {
|
||||
PassMode::Ignore | PassMode::Direct(_) | PassMode::Pair(_, _) | PassMode::Cast { .. } => {
|
||||
let is_ssa =
|
||||
ssa_analyzed[RETURN_PLACE].is_ssa(fx, fx.fn_abi.as_ref().unwrap().ret.layout.ty);
|
||||
(
|
||||
super::make_local_place(
|
||||
fx,
|
||||
RETURN_PLACE,
|
||||
fx.fn_abi.as_ref().unwrap().ret.layout,
|
||||
is_ssa,
|
||||
),
|
||||
smallvec![],
|
||||
)
|
||||
let is_ssa = ssa_analyzed[RETURN_PLACE].is_ssa(fx, fx.fn_abi.ret.layout.ty);
|
||||
(super::make_local_place(fx, RETURN_PLACE, fx.fn_abi.ret.layout, is_ssa), smallvec![])
|
||||
}
|
||||
PassMode::Indirect { attrs: _, meta_attrs: None, on_stack: _ } => {
|
||||
let ret_param = block_params_iter.next().unwrap();
|
||||
assert_eq!(fx.bcx.func.dfg.value_type(ret_param), fx.pointer_type);
|
||||
(
|
||||
CPlace::for_ptr(Pointer::new(ret_param), fx.fn_abi.as_ref().unwrap().ret.layout),
|
||||
smallvec![ret_param],
|
||||
)
|
||||
(CPlace::for_ptr(Pointer::new(ret_param), fx.fn_abi.ret.layout), smallvec![ret_param])
|
||||
}
|
||||
PassMode::Indirect { attrs: _, meta_attrs: Some(_), on_stack: _ } => {
|
||||
unreachable!("unsized return value")
|
||||
@ -45,8 +33,8 @@ pub(super) fn codegen_return_param<'tcx>(
|
||||
Some(RETURN_PLACE),
|
||||
None,
|
||||
&ret_param,
|
||||
&fx.fn_abi.as_ref().unwrap().ret.mode,
|
||||
fx.fn_abi.as_ref().unwrap().ret.layout,
|
||||
&fx.fn_abi.ret.mode,
|
||||
fx.fn_abi.ret.layout,
|
||||
);
|
||||
|
||||
ret_place
|
||||
@ -115,7 +103,7 @@ pub(super) fn codegen_with_call_return_arg<'tcx>(
|
||||
|
||||
/// Codegen a return instruction with the right return value(s) if any.
|
||||
pub(crate) fn codegen_return(fx: &mut FunctionCx<'_, '_, '_>) {
|
||||
match fx.fn_abi.as_ref().unwrap().ret.mode {
|
||||
match fx.fn_abi.ret.mode {
|
||||
PassMode::Ignore | PassMode::Indirect { attrs: _, meta_attrs: None, on_stack: _ } => {
|
||||
fx.bcx.ins().return_(&[]);
|
||||
}
|
||||
|
@ -11,7 +11,7 @@ use rustc_middle::ty::print::with_no_trimmed_paths;
|
||||
use rustc_monomorphize::is_call_from_compiler_builtins_to_upstream_monomorphization;
|
||||
|
||||
use crate::constant::ConstantCx;
|
||||
use crate::debuginfo::FunctionDebugContext;
|
||||
use crate::debuginfo::{FunctionDebugContext, TypeDebugContext};
|
||||
use crate::prelude::*;
|
||||
use crate::pretty_clif::CommentWriter;
|
||||
|
||||
@ -26,6 +26,7 @@ pub(crate) struct CodegenedFunction {
|
||||
pub(crate) fn codegen_fn<'tcx>(
|
||||
tcx: TyCtxt<'tcx>,
|
||||
cx: &mut crate::CodegenCx,
|
||||
type_dbg: &mut TypeDebugContext<'tcx>,
|
||||
cached_func: Function,
|
||||
module: &mut dyn Module,
|
||||
instance: Instance<'tcx>,
|
||||
@ -69,8 +70,10 @@ pub(crate) fn codegen_fn<'tcx>(
|
||||
let pointer_type = target_config.pointer_type();
|
||||
let clif_comments = crate::pretty_clif::CommentWriter::new(tcx, instance);
|
||||
|
||||
let fn_abi = RevealAllLayoutCx(tcx).fn_abi_of_instance(instance, ty::List::empty());
|
||||
|
||||
let func_debug_cx = if let Some(debug_context) = &mut cx.debug_context {
|
||||
Some(debug_context.define_function(tcx, &symbol_name, mir.span))
|
||||
Some(debug_context.define_function(tcx, type_dbg, instance, fn_abi, &symbol_name, mir.span))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
@ -87,7 +90,7 @@ pub(crate) fn codegen_fn<'tcx>(
|
||||
instance,
|
||||
symbol_name,
|
||||
mir,
|
||||
fn_abi: Some(RevealAllLayoutCx(tcx).fn_abi_of_instance(instance, ty::List::empty())),
|
||||
fn_abi,
|
||||
|
||||
bcx,
|
||||
block_map,
|
||||
@ -95,7 +98,6 @@ pub(crate) fn codegen_fn<'tcx>(
|
||||
caller_location: None, // set by `codegen_fn_prelude`
|
||||
|
||||
clif_comments,
|
||||
last_source_file: None,
|
||||
next_ssa_var: 0,
|
||||
};
|
||||
|
||||
@ -370,8 +372,14 @@ fn codegen_fn_body(fx: &mut FunctionCx<'_, '_, '_>, start_block: Block) {
|
||||
);
|
||||
}
|
||||
_ => {
|
||||
let msg_str = msg.description();
|
||||
codegen_panic(fx, msg_str, source_info);
|
||||
let location = fx.get_caller_location(source_info).load_scalar(fx);
|
||||
|
||||
codegen_panic_inner(
|
||||
fx,
|
||||
msg.panic_function(),
|
||||
&[location],
|
||||
Some(source_info.span),
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -955,20 +963,6 @@ pub(crate) fn codegen_operand<'tcx>(
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn codegen_panic<'tcx>(
|
||||
fx: &mut FunctionCx<'_, '_, 'tcx>,
|
||||
msg_str: &str,
|
||||
source_info: mir::SourceInfo,
|
||||
) {
|
||||
let location = fx.get_caller_location(source_info).load_scalar(fx);
|
||||
|
||||
let msg_ptr = fx.anonymous_str(msg_str);
|
||||
let msg_len = fx.bcx.ins().iconst(fx.pointer_type, i64::try_from(msg_str.len()).unwrap());
|
||||
let args = [msg_ptr, msg_len, location];
|
||||
|
||||
codegen_panic_inner(fx, rustc_hir::LangItem::Panic, &args, Some(source_info.span));
|
||||
}
|
||||
|
||||
pub(crate) fn codegen_panic_nounwind<'tcx>(
|
||||
fx: &mut FunctionCx<'_, '_, 'tcx>,
|
||||
msg_str: &str,
|
||||
|
@ -1,12 +1,9 @@
|
||||
use cranelift_codegen::isa::TargetFrontendConfig;
|
||||
use gimli::write::FileId;
|
||||
use rustc_data_structures::sync::Lrc;
|
||||
use rustc_index::IndexVec;
|
||||
use rustc_middle::ty::layout::{
|
||||
FnAbiError, FnAbiOfHelpers, FnAbiRequest, LayoutError, LayoutOfHelpers,
|
||||
};
|
||||
use rustc_span::source_map::Spanned;
|
||||
use rustc_span::SourceFile;
|
||||
use rustc_target::abi::call::FnAbi;
|
||||
use rustc_target::abi::{Integer, Primitive};
|
||||
use rustc_target::spec::{HasTargetSpec, Target};
|
||||
@ -294,7 +291,7 @@ pub(crate) struct FunctionCx<'m, 'clif, 'tcx: 'm> {
|
||||
pub(crate) instance: Instance<'tcx>,
|
||||
pub(crate) symbol_name: String,
|
||||
pub(crate) mir: &'tcx Body<'tcx>,
|
||||
pub(crate) fn_abi: Option<&'tcx FnAbi<'tcx, Ty<'tcx>>>,
|
||||
pub(crate) fn_abi: &'tcx FnAbi<'tcx, Ty<'tcx>>,
|
||||
|
||||
pub(crate) bcx: FunctionBuilder<'clif>,
|
||||
pub(crate) block_map: IndexVec<BasicBlock, Block>,
|
||||
@ -305,11 +302,6 @@ pub(crate) struct FunctionCx<'m, 'clif, 'tcx: 'm> {
|
||||
|
||||
pub(crate) clif_comments: crate::pretty_clif::CommentWriter,
|
||||
|
||||
/// Last accessed source file and it's debuginfo file id.
|
||||
///
|
||||
/// For optimization purposes only
|
||||
pub(crate) last_source_file: Option<(Lrc<SourceFile>, FileId)>,
|
||||
|
||||
/// This should only be accessed by `CPlace::new_var`.
|
||||
pub(crate) next_ssa_var: u32,
|
||||
}
|
||||
@ -419,25 +411,8 @@ impl<'tcx> FunctionCx<'_, '_, 'tcx> {
|
||||
|
||||
pub(crate) fn set_debug_loc(&mut self, source_info: mir::SourceInfo) {
|
||||
if let Some(debug_context) = &mut self.cx.debug_context {
|
||||
let (file, line, column) =
|
||||
DebugContext::get_span_loc(self.tcx, self.mir.span, source_info.span);
|
||||
|
||||
// add_source_file is very slow.
|
||||
// Optimize for the common case of the current file not being changed.
|
||||
let mut cached_file_id = None;
|
||||
if let Some((ref last_source_file, last_file_id)) = self.last_source_file {
|
||||
// If the allocations are not equal, the files may still be equal, but that
|
||||
// doesn't matter, as this is just an optimization.
|
||||
if rustc_data_structures::sync::Lrc::ptr_eq(last_source_file, &file) {
|
||||
cached_file_id = Some(last_file_id);
|
||||
}
|
||||
}
|
||||
|
||||
let file_id = if let Some(file_id) = cached_file_id {
|
||||
file_id
|
||||
} else {
|
||||
debug_context.add_source_file(&file)
|
||||
};
|
||||
let (file_id, line, column) =
|
||||
debug_context.get_span_loc(self.tcx, self.mir.span, source_info.span);
|
||||
|
||||
let source_loc =
|
||||
self.func_debug_cx.as_mut().unwrap().add_dbg_loc(file_id, line, column);
|
||||
|
@ -6,17 +6,16 @@ use cranelift_module::*;
|
||||
use rustc_data_structures::fx::FxHashSet;
|
||||
use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrFlags;
|
||||
use rustc_middle::mir::interpret::{read_target_uint, AllocId, GlobalAlloc, Scalar};
|
||||
use rustc_middle::ty::ScalarInt;
|
||||
use rustc_middle::ty::{Binder, ExistentialTraitRef, ScalarInt};
|
||||
|
||||
use crate::prelude::*;
|
||||
|
||||
pub(crate) struct ConstantCx {
|
||||
todo: Vec<TodoItem>,
|
||||
done: FxHashSet<DataId>,
|
||||
anon_allocs: FxHashMap<AllocId, DataId>,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
|
||||
enum TodoItem {
|
||||
Alloc(AllocId),
|
||||
Static(DefId),
|
||||
@ -24,19 +23,24 @@ enum TodoItem {
|
||||
|
||||
impl ConstantCx {
|
||||
pub(crate) fn new() -> Self {
|
||||
ConstantCx { todo: vec![], done: FxHashSet::default(), anon_allocs: FxHashMap::default() }
|
||||
ConstantCx { todo: vec![], anon_allocs: FxHashMap::default() }
|
||||
}
|
||||
|
||||
pub(crate) fn finalize(mut self, tcx: TyCtxt<'_>, module: &mut dyn Module) {
|
||||
define_all_allocs(tcx, module, &mut self);
|
||||
self.done.clear();
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn codegen_static(tcx: TyCtxt<'_>, module: &mut dyn Module, def_id: DefId) {
|
||||
pub(crate) fn codegen_static(tcx: TyCtxt<'_>, module: &mut dyn Module, def_id: DefId) -> DataId {
|
||||
let mut constants_cx = ConstantCx::new();
|
||||
constants_cx.todo.push(TodoItem::Static(def_id));
|
||||
constants_cx.finalize(tcx, module);
|
||||
|
||||
data_id_for_static(
|
||||
tcx, module, def_id, false,
|
||||
// For a declaration the stated mutability doesn't matter.
|
||||
false,
|
||||
)
|
||||
}
|
||||
|
||||
pub(crate) fn codegen_tls_ref<'tcx>(
|
||||
@ -153,14 +157,12 @@ pub(crate) fn codegen_const_value<'tcx>(
|
||||
fx.bcx.ins().func_addr(fx.pointer_type, local_func_id)
|
||||
}
|
||||
GlobalAlloc::VTable(ty, trait_ref) => {
|
||||
let alloc_id = fx.tcx.vtable_allocation((ty, trait_ref));
|
||||
let alloc = fx.tcx.global_alloc(alloc_id).unwrap_memory();
|
||||
// FIXME: factor this common code with the `Memory` arm into a function?
|
||||
let data_id = data_id_for_alloc_id(
|
||||
let data_id = data_id_for_vtable(
|
||||
fx.tcx,
|
||||
&mut fx.constants_cx,
|
||||
fx.module,
|
||||
alloc_id,
|
||||
alloc.inner().mutability,
|
||||
ty,
|
||||
trait_ref,
|
||||
);
|
||||
let local_data_id =
|
||||
fx.module.declare_data_in_func(data_id, &mut fx.bcx.func);
|
||||
@ -208,12 +210,8 @@ fn pointer_for_allocation<'tcx>(
|
||||
alloc_id: AllocId,
|
||||
) -> crate::pointer::Pointer {
|
||||
let alloc = fx.tcx.global_alloc(alloc_id).unwrap_memory();
|
||||
let data_id = data_id_for_alloc_id(
|
||||
&mut fx.constants_cx,
|
||||
&mut *fx.module,
|
||||
alloc_id,
|
||||
alloc.inner().mutability,
|
||||
);
|
||||
let data_id =
|
||||
data_id_for_alloc_id(&mut fx.constants_cx, fx.module, alloc_id, alloc.inner().mutability);
|
||||
|
||||
let local_data_id = fx.module.declare_data_in_func(data_id, &mut fx.bcx.func);
|
||||
if fx.clif_comments.enabled() {
|
||||
@ -235,6 +233,17 @@ pub(crate) fn data_id_for_alloc_id(
|
||||
.or_insert_with(|| module.declare_anonymous_data(mutability.is_mut(), false).unwrap())
|
||||
}
|
||||
|
||||
pub(crate) fn data_id_for_vtable<'tcx>(
|
||||
tcx: TyCtxt<'tcx>,
|
||||
cx: &mut ConstantCx,
|
||||
module: &mut dyn Module,
|
||||
ty: Ty<'tcx>,
|
||||
trait_ref: Option<Binder<'tcx, ExistentialTraitRef<'tcx>>>,
|
||||
) -> DataId {
|
||||
let alloc_id = tcx.vtable_allocation((ty, trait_ref));
|
||||
data_id_for_alloc_id(cx, module, alloc_id, Mutability::Not)
|
||||
}
|
||||
|
||||
fn data_id_for_static(
|
||||
tcx: TyCtxt<'_>,
|
||||
module: &mut dyn Module,
|
||||
@ -327,7 +336,12 @@ fn data_id_for_static(
|
||||
}
|
||||
|
||||
fn define_all_allocs(tcx: TyCtxt<'_>, module: &mut dyn Module, cx: &mut ConstantCx) {
|
||||
let mut done = FxHashSet::default();
|
||||
while let Some(todo_item) = cx.todo.pop() {
|
||||
if !done.insert(todo_item) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let (data_id, alloc, section_name) = match todo_item {
|
||||
TodoItem::Alloc(alloc_id) => {
|
||||
let alloc = match tcx.global_alloc(alloc_id) {
|
||||
@ -358,10 +372,6 @@ fn define_all_allocs(tcx: TyCtxt<'_>, module: &mut dyn Module, cx: &mut Constant
|
||||
}
|
||||
};
|
||||
|
||||
if cx.done.contains(&data_id) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let mut data = DataDescription::new();
|
||||
let alloc = alloc.inner();
|
||||
data.set_align(alloc.align.bytes());
|
||||
@ -384,13 +394,7 @@ fn define_all_allocs(tcx: TyCtxt<'_>, module: &mut dyn Module, cx: &mut Constant
|
||||
}
|
||||
|
||||
let bytes = alloc.inspect_with_uninit_and_ptr_outside_interpreter(0..alloc.len()).to_vec();
|
||||
if bytes.is_empty() {
|
||||
// FIXME(bytecodealliance/wasmtime#7918) cranelift-jit has a bug where it causes UB on
|
||||
// empty data objects
|
||||
data.define(Box::new([0]));
|
||||
} else {
|
||||
data.define(bytes.into_boxed_slice());
|
||||
}
|
||||
data.define(bytes.into_boxed_slice());
|
||||
|
||||
for &(offset, prov) in alloc.provenance().ptrs().iter() {
|
||||
let alloc_id = prov.alloc_id();
|
||||
@ -418,8 +422,7 @@ fn define_all_allocs(tcx: TyCtxt<'_>, module: &mut dyn Module, cx: &mut Constant
|
||||
data_id_for_alloc_id(cx, module, alloc_id, target_alloc.inner().mutability)
|
||||
}
|
||||
GlobalAlloc::VTable(ty, trait_ref) => {
|
||||
let alloc_id = tcx.vtable_allocation((ty, trait_ref));
|
||||
data_id_for_alloc_id(cx, module, alloc_id, Mutability::Not)
|
||||
data_id_for_vtable(tcx, cx, module, ty, trait_ref)
|
||||
}
|
||||
GlobalAlloc::Static(def_id) => {
|
||||
if tcx.codegen_fn_attrs(def_id).flags.contains(CodegenFnAttrFlags::THREAD_LOCAL)
|
||||
@ -446,7 +449,6 @@ fn define_all_allocs(tcx: TyCtxt<'_>, module: &mut dyn Module, cx: &mut Constant
|
||||
}
|
||||
|
||||
module.define_data(data_id, &data).unwrap();
|
||||
cx.done.insert(data_id);
|
||||
}
|
||||
|
||||
assert!(cx.todo.is_empty(), "{:?}", cx.todo);
|
||||
|
@ -1,5 +1,6 @@
|
||||
//! Write the debuginfo into an object file.
|
||||
|
||||
use cranelift_module::{DataId, FuncId};
|
||||
use cranelift_object::ObjectProduct;
|
||||
use gimli::write::{Address, AttributeValue, EndianVec, Result, Sections, Writer};
|
||||
use gimli::{RunTimeEndian, SectionId};
|
||||
@ -8,6 +9,18 @@ use rustc_data_structures::fx::FxHashMap;
|
||||
use super::object::WriteDebugInfo;
|
||||
use super::DebugContext;
|
||||
|
||||
pub(super) fn address_for_func(func_id: FuncId) -> Address {
|
||||
let symbol = func_id.as_u32();
|
||||
assert!(symbol & 1 << 31 == 0);
|
||||
Address::Symbol { symbol: symbol as usize, addend: 0 }
|
||||
}
|
||||
|
||||
pub(super) fn address_for_data(data_id: DataId) -> Address {
|
||||
let symbol = data_id.as_u32();
|
||||
assert!(symbol & 1 << 31 == 0);
|
||||
Address::Symbol { symbol: (symbol | 1 << 31) as usize, addend: 0 }
|
||||
}
|
||||
|
||||
impl DebugContext {
|
||||
pub(crate) fn emit(&mut self, product: &mut ObjectProduct) {
|
||||
let unit_range_list_id = self.dwarf.unit.ranges.add(self.unit_range_list.clone());
|
||||
@ -171,6 +184,7 @@ impl Writer for WriterRelocate {
|
||||
gimli::DW_EH_PE_pcrel => {
|
||||
let size = match eh_pe.format() {
|
||||
gimli::DW_EH_PE_sdata4 => 4,
|
||||
gimli::DW_EH_PE_sdata8 => 8,
|
||||
_ => return Err(gimli::write::Error::UnsupportedPointerEncoding(eh_pe)),
|
||||
};
|
||||
self.relocs.push(DebugReloc {
|
||||
|
@ -5,14 +5,12 @@ use std::path::{Component, Path};
|
||||
|
||||
use cranelift_codegen::binemit::CodeOffset;
|
||||
use cranelift_codegen::MachSrcLoc;
|
||||
use gimli::write::{
|
||||
Address, AttributeValue, FileId, FileInfo, LineProgram, LineString, LineStringTable,
|
||||
};
|
||||
use rustc_data_structures::sync::Lrc;
|
||||
use gimli::write::{AttributeValue, FileId, FileInfo, LineProgram, LineString, LineStringTable};
|
||||
use rustc_span::{
|
||||
FileName, Pos, SourceFile, SourceFileAndLine, SourceFileHash, SourceFileHashAlgorithm,
|
||||
};
|
||||
|
||||
use crate::debuginfo::emit::address_for_func;
|
||||
use crate::debuginfo::FunctionDebugContext;
|
||||
use crate::prelude::*;
|
||||
|
||||
@ -60,10 +58,11 @@ fn make_file_info(hash: SourceFileHash) -> Option<FileInfo> {
|
||||
|
||||
impl DebugContext {
|
||||
pub(crate) fn get_span_loc(
|
||||
&mut self,
|
||||
tcx: TyCtxt<'_>,
|
||||
function_span: Span,
|
||||
span: Span,
|
||||
) -> (Lrc<SourceFile>, u64, u64) {
|
||||
) -> (FileId, u64, u64) {
|
||||
// Based on https://github.com/rust-lang/rust/blob/e369d87b015a84653343032833d65d0545fd3f26/src/librustc_codegen_ssa/mir/mod.rs#L116-L131
|
||||
// In order to have a good line stepping behavior in debugger, we overwrite debug
|
||||
// locations of macro expansions with that of the outermost expansion site (when the macro is
|
||||
@ -71,61 +70,66 @@ impl DebugContext {
|
||||
let span = tcx.collapsed_debuginfo(span, function_span);
|
||||
match tcx.sess.source_map().lookup_line(span.lo()) {
|
||||
Ok(SourceFileAndLine { sf: file, line }) => {
|
||||
let file_id = self.add_source_file(&file);
|
||||
let line_pos = file.lines()[line];
|
||||
let col = file.relative_position(span.lo()) - line_pos;
|
||||
|
||||
(file, u64::try_from(line).unwrap() + 1, u64::from(col.to_u32()) + 1)
|
||||
(file_id, u64::try_from(line).unwrap() + 1, u64::from(col.to_u32()) + 1)
|
||||
}
|
||||
Err(file) => (file, 0, 0),
|
||||
Err(file) => (self.add_source_file(&file), 0, 0),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn add_source_file(&mut self, source_file: &SourceFile) -> FileId {
|
||||
let line_program: &mut LineProgram = &mut self.dwarf.unit.line_program;
|
||||
let line_strings: &mut LineStringTable = &mut self.dwarf.line_strings;
|
||||
let cache_key = (source_file.stable_id, source_file.src_hash);
|
||||
*self.created_files.entry(cache_key).or_insert_with(|| {
|
||||
let line_program: &mut LineProgram = &mut self.dwarf.unit.line_program;
|
||||
let line_strings: &mut LineStringTable = &mut self.dwarf.line_strings;
|
||||
|
||||
match &source_file.name {
|
||||
FileName::Real(path) => {
|
||||
let (dir_path, file_name) =
|
||||
split_path_dir_and_file(if self.should_remap_filepaths {
|
||||
path.remapped_path_if_available()
|
||||
} else {
|
||||
path.local_path_if_available()
|
||||
});
|
||||
let dir_name = osstr_as_utf8_bytes(dir_path.as_os_str());
|
||||
let file_name = osstr_as_utf8_bytes(file_name);
|
||||
|
||||
let dir_id = if !dir_name.is_empty() {
|
||||
let dir_name = LineString::new(dir_name, line_program.encoding(), line_strings);
|
||||
line_program.add_directory(dir_name)
|
||||
} else {
|
||||
line_program.default_directory()
|
||||
};
|
||||
let file_name = LineString::new(file_name, line_program.encoding(), line_strings);
|
||||
|
||||
let info = make_file_info(source_file.src_hash);
|
||||
|
||||
line_program.file_has_md5 &= info.is_some();
|
||||
line_program.add_file(file_name, dir_id, info)
|
||||
}
|
||||
// FIXME give more appropriate file names
|
||||
filename => {
|
||||
let dir_id = line_program.default_directory();
|
||||
let dummy_file_name = LineString::new(
|
||||
filename
|
||||
.display(if self.should_remap_filepaths {
|
||||
FileNameDisplayPreference::Remapped
|
||||
match &source_file.name {
|
||||
FileName::Real(path) => {
|
||||
let (dir_path, file_name) =
|
||||
split_path_dir_and_file(if self.should_remap_filepaths {
|
||||
path.remapped_path_if_available()
|
||||
} else {
|
||||
FileNameDisplayPreference::Local
|
||||
})
|
||||
.to_string()
|
||||
.into_bytes(),
|
||||
line_program.encoding(),
|
||||
line_strings,
|
||||
);
|
||||
line_program.add_file(dummy_file_name, dir_id, None)
|
||||
path.local_path_if_available()
|
||||
});
|
||||
let dir_name = osstr_as_utf8_bytes(dir_path.as_os_str());
|
||||
let file_name = osstr_as_utf8_bytes(file_name);
|
||||
|
||||
let dir_id = if !dir_name.is_empty() {
|
||||
let dir_name =
|
||||
LineString::new(dir_name, line_program.encoding(), line_strings);
|
||||
line_program.add_directory(dir_name)
|
||||
} else {
|
||||
line_program.default_directory()
|
||||
};
|
||||
let file_name =
|
||||
LineString::new(file_name, line_program.encoding(), line_strings);
|
||||
|
||||
let info = make_file_info(source_file.src_hash);
|
||||
|
||||
line_program.file_has_md5 &= info.is_some();
|
||||
line_program.add_file(file_name, dir_id, info)
|
||||
}
|
||||
filename => {
|
||||
let dir_id = line_program.default_directory();
|
||||
let dummy_file_name = LineString::new(
|
||||
filename
|
||||
.display(if self.should_remap_filepaths {
|
||||
FileNameDisplayPreference::Remapped
|
||||
} else {
|
||||
FileNameDisplayPreference::Local
|
||||
})
|
||||
.to_string()
|
||||
.into_bytes(),
|
||||
line_program.encoding(),
|
||||
line_strings,
|
||||
);
|
||||
line_program.add_file(dummy_file_name, dir_id, None)
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@ -138,7 +142,7 @@ impl FunctionDebugContext {
|
||||
pub(super) fn create_debug_lines(
|
||||
&mut self,
|
||||
debug_context: &mut DebugContext,
|
||||
symbol: usize,
|
||||
func_id: FuncId,
|
||||
context: &Context,
|
||||
) -> CodeOffset {
|
||||
let create_row_for_span =
|
||||
@ -151,11 +155,7 @@ impl FunctionDebugContext {
|
||||
debug_context.dwarf.unit.line_program.generate_row();
|
||||
};
|
||||
|
||||
debug_context
|
||||
.dwarf
|
||||
.unit
|
||||
.line_program
|
||||
.begin_sequence(Some(Address::Symbol { symbol, addend: 0 }));
|
||||
debug_context.dwarf.unit.line_program.begin_sequence(Some(address_for_func(func_id)));
|
||||
|
||||
let mut func_end = 0;
|
||||
|
||||
@ -178,10 +178,7 @@ impl FunctionDebugContext {
|
||||
assert_ne!(func_end, 0);
|
||||
|
||||
let entry = debug_context.dwarf.unit.get_mut(self.entry_id);
|
||||
entry.set(
|
||||
gimli::DW_AT_low_pc,
|
||||
AttributeValue::Address(Address::Symbol { symbol, addend: 0 }),
|
||||
);
|
||||
entry.set(gimli::DW_AT_low_pc, AttributeValue::Address(address_for_func(func_id)));
|
||||
entry.set(gimli::DW_AT_high_pc, AttributeValue::Udata(u64::from(func_end)));
|
||||
|
||||
func_end
|
||||
|
@ -3,20 +3,29 @@
|
||||
mod emit;
|
||||
mod line_info;
|
||||
mod object;
|
||||
mod types;
|
||||
mod unwind;
|
||||
|
||||
use cranelift_codegen::ir::Endianness;
|
||||
use cranelift_codegen::isa::TargetIsa;
|
||||
use cranelift_module::DataId;
|
||||
use gimli::write::{
|
||||
Address, AttributeValue, DwarfUnit, FileId, LineProgram, LineString, Range, RangeList,
|
||||
UnitEntryId,
|
||||
Address, AttributeValue, DwarfUnit, Expression, FileId, LineProgram, LineString, Range,
|
||||
RangeList, UnitEntryId,
|
||||
};
|
||||
use gimli::{Encoding, Format, LineEncoding, RunTimeEndian};
|
||||
use gimli::{AArch64, Encoding, Format, LineEncoding, Register, RiscV, RunTimeEndian, X86_64};
|
||||
use indexmap::IndexSet;
|
||||
use rustc_codegen_ssa::debuginfo::type_names;
|
||||
use rustc_hir::def::DefKind;
|
||||
use rustc_hir::def_id::DefIdMap;
|
||||
use rustc_session::Session;
|
||||
use rustc_span::{SourceFileHash, StableSourceFileId};
|
||||
use rustc_target::abi::call::FnAbi;
|
||||
|
||||
pub(crate) use self::emit::{DebugReloc, DebugRelocName};
|
||||
pub(crate) use self::types::TypeDebugContext;
|
||||
pub(crate) use self::unwind::UnwindContext;
|
||||
use crate::debuginfo::emit::{address_for_data, address_for_func};
|
||||
use crate::prelude::*;
|
||||
|
||||
pub(crate) fn producer(sess: &Session) -> String {
|
||||
@ -28,6 +37,10 @@ pub(crate) struct DebugContext {
|
||||
|
||||
dwarf: DwarfUnit,
|
||||
unit_range_list: RangeList,
|
||||
created_files: FxHashMap<(StableSourceFileId, SourceFileHash), FileId>,
|
||||
stack_pointer_register: Register,
|
||||
namespace_map: DefIdMap<UnitEntryId>,
|
||||
array_size_type: UnitEntryId,
|
||||
|
||||
should_remap_filepaths: bool,
|
||||
}
|
||||
@ -39,7 +52,7 @@ pub(crate) struct FunctionDebugContext {
|
||||
}
|
||||
|
||||
impl DebugContext {
|
||||
pub(crate) fn new(tcx: TyCtxt<'_>, isa: &dyn TargetIsa) -> Self {
|
||||
pub(crate) fn new(tcx: TyCtxt<'_>, isa: &dyn TargetIsa, cgu_name: &str) -> Self {
|
||||
let encoding = Encoding {
|
||||
format: Format::Dwarf32,
|
||||
// FIXME this should be configurable
|
||||
@ -60,6 +73,15 @@ impl DebugContext {
|
||||
Endianness::Big => RunTimeEndian::Big,
|
||||
};
|
||||
|
||||
let stack_pointer_register = match isa.triple().architecture {
|
||||
target_lexicon::Architecture::Aarch64(_) => AArch64::SP,
|
||||
target_lexicon::Architecture::Riscv64(_) => RiscV::SP,
|
||||
target_lexicon::Architecture::X86_64 | target_lexicon::Architecture::X86_64h => {
|
||||
X86_64::RSP
|
||||
}
|
||||
_ => Register(u16::MAX),
|
||||
};
|
||||
|
||||
let mut dwarf = DwarfUnit::new(encoding);
|
||||
|
||||
let should_remap_filepaths = tcx.sess.should_prefer_remapped_for_codegen();
|
||||
@ -95,7 +117,7 @@ impl DebugContext {
|
||||
dwarf.unit.line_program = line_program;
|
||||
|
||||
{
|
||||
let name = dwarf.strings.add(name);
|
||||
let name = dwarf.strings.add(format!("{name}/@/{cgu_name}"));
|
||||
let comp_dir = dwarf.strings.add(comp_dir);
|
||||
|
||||
let root = dwarf.unit.root();
|
||||
@ -103,41 +125,134 @@ impl DebugContext {
|
||||
root.set(gimli::DW_AT_producer, AttributeValue::StringRef(dwarf.strings.add(producer)));
|
||||
root.set(gimli::DW_AT_language, AttributeValue::Language(gimli::DW_LANG_Rust));
|
||||
root.set(gimli::DW_AT_name, AttributeValue::StringRef(name));
|
||||
|
||||
// This will be replaced when emitting the debuginfo. It is only
|
||||
// defined here to ensure that the order of the attributes matches
|
||||
// rustc.
|
||||
root.set(gimli::DW_AT_stmt_list, AttributeValue::Udata(0));
|
||||
|
||||
root.set(gimli::DW_AT_comp_dir, AttributeValue::StringRef(comp_dir));
|
||||
root.set(gimli::DW_AT_low_pc, AttributeValue::Address(Address::Constant(0)));
|
||||
}
|
||||
|
||||
let array_size_type = dwarf.unit.add(dwarf.unit.root(), gimli::DW_TAG_base_type);
|
||||
let array_size_type_entry = dwarf.unit.get_mut(array_size_type);
|
||||
array_size_type_entry.set(
|
||||
gimli::DW_AT_name,
|
||||
AttributeValue::StringRef(dwarf.strings.add("__ARRAY_SIZE_TYPE__")),
|
||||
);
|
||||
array_size_type_entry
|
||||
.set(gimli::DW_AT_encoding, AttributeValue::Encoding(gimli::DW_ATE_unsigned));
|
||||
array_size_type_entry.set(
|
||||
gimli::DW_AT_byte_size,
|
||||
AttributeValue::Udata(isa.frontend_config().pointer_bytes().into()),
|
||||
);
|
||||
|
||||
DebugContext {
|
||||
endian,
|
||||
dwarf,
|
||||
unit_range_list: RangeList(Vec::new()),
|
||||
created_files: FxHashMap::default(),
|
||||
stack_pointer_register,
|
||||
namespace_map: DefIdMap::default(),
|
||||
array_size_type,
|
||||
should_remap_filepaths,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn define_function(
|
||||
fn item_namespace(&mut self, tcx: TyCtxt<'_>, def_id: DefId) -> UnitEntryId {
|
||||
if let Some(&scope) = self.namespace_map.get(&def_id) {
|
||||
return scope;
|
||||
}
|
||||
|
||||
let def_key = tcx.def_key(def_id);
|
||||
let parent_scope = def_key
|
||||
.parent
|
||||
.map(|parent| self.item_namespace(tcx, DefId { krate: def_id.krate, index: parent }))
|
||||
.unwrap_or(self.dwarf.unit.root());
|
||||
|
||||
let namespace_name = {
|
||||
let mut output = String::new();
|
||||
type_names::push_item_name(tcx, def_id, false, &mut output);
|
||||
output
|
||||
};
|
||||
let namespace_name_id = self.dwarf.strings.add(namespace_name);
|
||||
|
||||
let scope = self.dwarf.unit.add(parent_scope, gimli::DW_TAG_namespace);
|
||||
let scope_entry = self.dwarf.unit.get_mut(scope);
|
||||
scope_entry.set(gimli::DW_AT_name, AttributeValue::StringRef(namespace_name_id));
|
||||
|
||||
self.namespace_map.insert(def_id, scope);
|
||||
scope
|
||||
}
|
||||
|
||||
pub(crate) fn define_function<'tcx>(
|
||||
&mut self,
|
||||
tcx: TyCtxt<'_>,
|
||||
name: &str,
|
||||
tcx: TyCtxt<'tcx>,
|
||||
type_dbg: &mut TypeDebugContext<'tcx>,
|
||||
instance: Instance<'tcx>,
|
||||
fn_abi: &'tcx FnAbi<'tcx, Ty<'tcx>>,
|
||||
linkage_name: &str,
|
||||
function_span: Span,
|
||||
) -> FunctionDebugContext {
|
||||
let (file, line, column) = DebugContext::get_span_loc(tcx, function_span, function_span);
|
||||
let (file_id, line, column) = self.get_span_loc(tcx, function_span, function_span);
|
||||
|
||||
let file_id = self.add_source_file(&file);
|
||||
let scope = self.item_namespace(tcx, tcx.parent(instance.def_id()));
|
||||
|
||||
// FIXME: add to appropriate scope instead of root
|
||||
let scope = self.dwarf.unit.root();
|
||||
let mut name = String::new();
|
||||
type_names::push_item_name(tcx, instance.def_id(), false, &mut name);
|
||||
|
||||
// Find the enclosing function, in case this is a closure.
|
||||
let enclosing_fn_def_id = tcx.typeck_root_def_id(instance.def_id());
|
||||
|
||||
// We look up the generics of the enclosing function and truncate the args
|
||||
// to their length in order to cut off extra stuff that might be in there for
|
||||
// closures or coroutines.
|
||||
let generics = tcx.generics_of(enclosing_fn_def_id);
|
||||
let args = instance.args.truncate_to(tcx, generics);
|
||||
|
||||
type_names::push_generic_params(
|
||||
tcx,
|
||||
tcx.normalize_erasing_regions(ty::ParamEnv::reveal_all(), args),
|
||||
enclosing_fn_def_id,
|
||||
&mut name,
|
||||
);
|
||||
|
||||
let entry_id = self.dwarf.unit.add(scope, gimli::DW_TAG_subprogram);
|
||||
let entry = self.dwarf.unit.get_mut(entry_id);
|
||||
let linkage_name_id =
|
||||
if name != linkage_name { Some(self.dwarf.strings.add(linkage_name)) } else { None };
|
||||
let name_id = self.dwarf.strings.add(name);
|
||||
|
||||
// These will be replaced in FunctionDebugContext::finalize. They are
|
||||
// only defined here to ensure that the order of the attributes matches
|
||||
// rustc.
|
||||
entry.set(gimli::DW_AT_low_pc, AttributeValue::Udata(0));
|
||||
entry.set(gimli::DW_AT_high_pc, AttributeValue::Udata(0));
|
||||
|
||||
let mut frame_base_expr = Expression::new();
|
||||
frame_base_expr.op_reg(self.stack_pointer_register);
|
||||
entry.set(gimli::DW_AT_frame_base, AttributeValue::Exprloc(frame_base_expr));
|
||||
|
||||
if let Some(linkage_name_id) = linkage_name_id {
|
||||
entry.set(gimli::DW_AT_linkage_name, AttributeValue::StringRef(linkage_name_id));
|
||||
}
|
||||
// Gdb requires DW_AT_name. Otherwise the DW_TAG_subprogram is skipped.
|
||||
entry.set(gimli::DW_AT_name, AttributeValue::StringRef(name_id));
|
||||
entry.set(gimli::DW_AT_linkage_name, AttributeValue::StringRef(name_id));
|
||||
|
||||
entry.set(gimli::DW_AT_decl_file, AttributeValue::FileIndex(Some(file_id)));
|
||||
entry.set(gimli::DW_AT_decl_line, AttributeValue::Udata(line));
|
||||
entry.set(gimli::DW_AT_decl_column, AttributeValue::Udata(column));
|
||||
|
||||
if !fn_abi.ret.is_ignore() {
|
||||
let return_dw_ty = self.debug_type(tcx, type_dbg, fn_abi.ret.layout.ty);
|
||||
let entry = self.dwarf.unit.get_mut(entry_id);
|
||||
entry.set(gimli::DW_AT_type, AttributeValue::UnitRef(return_dw_ty));
|
||||
}
|
||||
|
||||
if tcx.is_reachable_non_generic(instance.def_id()) {
|
||||
let entry = self.dwarf.unit.get_mut(entry_id);
|
||||
entry.set(gimli::DW_AT_external, AttributeValue::FlagPresent);
|
||||
}
|
||||
|
||||
FunctionDebugContext {
|
||||
entry_id,
|
||||
@ -145,6 +260,62 @@ impl DebugContext {
|
||||
source_loc_set: IndexSet::new(),
|
||||
}
|
||||
}
|
||||
|
||||
// Adapted from https://github.com/rust-lang/rust/blob/10a7aa14fed9b528b74b0f098c4899c37c09a9c7/compiler/rustc_codegen_llvm/src/debuginfo/metadata.rs#L1288-L1346
|
||||
pub(crate) fn define_static<'tcx>(
|
||||
&mut self,
|
||||
tcx: TyCtxt<'tcx>,
|
||||
type_dbg: &mut TypeDebugContext<'tcx>,
|
||||
def_id: DefId,
|
||||
data_id: DataId,
|
||||
) {
|
||||
let DefKind::Static { nested, .. } = tcx.def_kind(def_id) else { bug!() };
|
||||
if nested {
|
||||
return;
|
||||
}
|
||||
|
||||
let scope = self.item_namespace(tcx, tcx.parent(def_id));
|
||||
|
||||
let span = tcx.def_span(def_id);
|
||||
let (file_id, line, _column) = self.get_span_loc(tcx, span, span);
|
||||
|
||||
let static_type = Instance::mono(tcx, def_id).ty(tcx, ty::ParamEnv::reveal_all());
|
||||
let static_layout = tcx.layout_of(ty::ParamEnv::reveal_all().and(static_type)).unwrap();
|
||||
// FIXME use the actual type layout
|
||||
let type_id = self.debug_type(tcx, type_dbg, static_type);
|
||||
|
||||
let name = tcx.item_name(def_id);
|
||||
let linkage_name = tcx.symbol_name(Instance::mono(tcx, def_id)).name;
|
||||
|
||||
let entry_id = self.dwarf.unit.add(scope, gimli::DW_TAG_variable);
|
||||
let entry = self.dwarf.unit.get_mut(entry_id);
|
||||
let linkage_name_id = if name.as_str() != linkage_name {
|
||||
Some(self.dwarf.strings.add(linkage_name))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let name_id = self.dwarf.strings.add(name.as_str());
|
||||
|
||||
entry.set(gimli::DW_AT_name, AttributeValue::StringRef(name_id));
|
||||
entry.set(gimli::DW_AT_type, AttributeValue::UnitRef(type_id));
|
||||
|
||||
if tcx.is_reachable_non_generic(def_id) {
|
||||
entry.set(gimli::DW_AT_external, AttributeValue::FlagPresent);
|
||||
}
|
||||
|
||||
entry.set(gimli::DW_AT_decl_file, AttributeValue::FileIndex(Some(file_id)));
|
||||
entry.set(gimli::DW_AT_decl_line, AttributeValue::Udata(line));
|
||||
|
||||
entry.set(gimli::DW_AT_alignment, AttributeValue::Udata(static_layout.align.pref.bytes()));
|
||||
|
||||
let mut expr = Expression::new();
|
||||
expr.op_addr(address_for_data(data_id));
|
||||
entry.set(gimli::DW_AT_location, AttributeValue::Exprloc(expr));
|
||||
|
||||
if let Some(linkage_name_id) = linkage_name_id {
|
||||
entry.set(gimli::DW_AT_linkage_name, AttributeValue::StringRef(linkage_name_id));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl FunctionDebugContext {
|
||||
@ -154,21 +325,16 @@ impl FunctionDebugContext {
|
||||
func_id: FuncId,
|
||||
context: &Context,
|
||||
) {
|
||||
let symbol = func_id.as_u32() as usize;
|
||||
let end = self.create_debug_lines(debug_context, func_id, context);
|
||||
|
||||
let end = self.create_debug_lines(debug_context, symbol, context);
|
||||
|
||||
debug_context.unit_range_list.0.push(Range::StartLength {
|
||||
begin: Address::Symbol { symbol, addend: 0 },
|
||||
length: u64::from(end),
|
||||
});
|
||||
debug_context
|
||||
.unit_range_list
|
||||
.0
|
||||
.push(Range::StartLength { begin: address_for_func(func_id), length: u64::from(end) });
|
||||
|
||||
let func_entry = debug_context.dwarf.unit.get_mut(self.entry_id);
|
||||
// Gdb requires both DW_AT_low_pc and DW_AT_high_pc. Otherwise the DW_TAG_subprogram is skipped.
|
||||
func_entry.set(
|
||||
gimli::DW_AT_low_pc,
|
||||
AttributeValue::Address(Address::Symbol { symbol, addend: 0 }),
|
||||
);
|
||||
func_entry.set(gimli::DW_AT_low_pc, AttributeValue::Address(address_for_func(func_id)));
|
||||
// Using Udata for DW_AT_high_pc requires at least DWARF4
|
||||
func_entry.set(gimli::DW_AT_high_pc, AttributeValue::Udata(u64::from(end)));
|
||||
}
|
||||
|
@ -1,4 +1,4 @@
|
||||
use cranelift_module::FuncId;
|
||||
use cranelift_module::{DataId, FuncId};
|
||||
use cranelift_object::ObjectProduct;
|
||||
use gimli::SectionId;
|
||||
use object::write::{Relocation, StandardSegment};
|
||||
@ -57,10 +57,13 @@ impl WriteDebugInfo for ObjectProduct {
|
||||
let (symbol, symbol_offset) = match reloc.name {
|
||||
DebugRelocName::Section(id) => (section_map.get(&id).unwrap().1, 0),
|
||||
DebugRelocName::Symbol(id) => {
|
||||
let symbol_id = self.function_symbol(FuncId::from_u32(id.try_into().unwrap()));
|
||||
self.object
|
||||
.symbol_section_and_offset(symbol_id)
|
||||
.expect("Debug reloc for undef sym???")
|
||||
let id = id.try_into().unwrap();
|
||||
let symbol_id = if id & 1 << 31 == 0 {
|
||||
self.function_symbol(FuncId::from_u32(id))
|
||||
} else {
|
||||
self.data_symbol(DataId::from_u32(id & !(1 << 31)))
|
||||
};
|
||||
self.object.symbol_section_and_offset(symbol_id).unwrap_or((symbol_id, 0))
|
||||
}
|
||||
};
|
||||
self.object
|
||||
|
204
compiler/rustc_codegen_cranelift/src/debuginfo/types.rs
Normal file
204
compiler/rustc_codegen_cranelift/src/debuginfo/types.rs
Normal file
@ -0,0 +1,204 @@
|
||||
// Adapted from https://github.com/rust-lang/rust/blob/10a7aa14fed9b528b74b0f098c4899c37c09a9c7/compiler/rustc_codegen_llvm/src/debuginfo/metadata.rs
|
||||
|
||||
use gimli::write::{AttributeValue, UnitEntryId};
|
||||
use rustc_codegen_ssa::debuginfo::type_names;
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
use rustc_middle::ty::layout::LayoutOf;
|
||||
use rustc_middle::ty::{self, Ty, TyCtxt};
|
||||
|
||||
use crate::{has_ptr_meta, DebugContext, RevealAllLayoutCx};
|
||||
|
||||
#[derive(Default)]
|
||||
pub(crate) struct TypeDebugContext<'tcx> {
|
||||
type_map: FxHashMap<Ty<'tcx>, UnitEntryId>,
|
||||
}
|
||||
|
||||
/// Returns from the enclosing function if the type debuginfo node with the given
|
||||
/// unique ID can be found in the type map.
|
||||
macro_rules! return_if_type_created_in_meantime {
|
||||
($type_dbg:expr, $ty:expr) => {
|
||||
if let Some(&type_id) = $type_dbg.type_map.get(&$ty) {
|
||||
return type_id;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
impl DebugContext {
|
||||
pub(crate) fn debug_type<'tcx>(
|
||||
&mut self,
|
||||
tcx: TyCtxt<'tcx>,
|
||||
type_dbg: &mut TypeDebugContext<'tcx>,
|
||||
ty: Ty<'tcx>,
|
||||
) -> UnitEntryId {
|
||||
if let Some(&type_id) = type_dbg.type_map.get(&ty) {
|
||||
return type_id;
|
||||
}
|
||||
|
||||
let type_id = match ty.kind() {
|
||||
ty::Never | ty::Bool | ty::Char | ty::Int(_) | ty::Uint(_) | ty::Float(_) => {
|
||||
self.basic_type(tcx, ty)
|
||||
}
|
||||
ty::Tuple(elems) if elems.is_empty() => self.basic_type(tcx, ty),
|
||||
ty::Array(elem_ty, len) => self.array_type(
|
||||
tcx,
|
||||
type_dbg,
|
||||
ty,
|
||||
*elem_ty,
|
||||
len.eval_target_usize(tcx, ty::ParamEnv::reveal_all()),
|
||||
),
|
||||
// ty::Slice(_) | ty::Str
|
||||
// ty::Dynamic
|
||||
// ty::Foreign
|
||||
ty::RawPtr(pointee_type, _) | ty::Ref(_, pointee_type, _) => {
|
||||
self.pointer_type(tcx, type_dbg, ty, *pointee_type)
|
||||
}
|
||||
// ty::Adt(def, args) if def.is_box() && args.get(1).map_or(true, |arg| cx.layout_of(arg.expect_ty()).is_1zst())
|
||||
// ty::FnDef(..) | ty::FnPtr(..)
|
||||
// ty::Closure(..)
|
||||
// ty::Adt(def, ..)
|
||||
ty::Tuple(components) => self.tuple_type(tcx, type_dbg, ty, *components),
|
||||
// ty::Param(_)
|
||||
// FIXME implement remaining types and add unreachable!() to the fallback branch
|
||||
_ => self.placeholder_for_type(tcx, type_dbg, ty),
|
||||
};
|
||||
|
||||
type_dbg.type_map.insert(ty, type_id);
|
||||
|
||||
type_id
|
||||
}
|
||||
|
||||
fn basic_type<'tcx>(&mut self, tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> UnitEntryId {
|
||||
let (name, encoding) = match ty.kind() {
|
||||
ty::Never => ("!", gimli::DW_ATE_unsigned),
|
||||
ty::Tuple(elems) if elems.is_empty() => ("()", gimli::DW_ATE_unsigned),
|
||||
ty::Bool => ("bool", gimli::DW_ATE_boolean),
|
||||
ty::Char => ("char", gimli::DW_ATE_UTF),
|
||||
ty::Int(int_ty) => (int_ty.name_str(), gimli::DW_ATE_signed),
|
||||
ty::Uint(uint_ty) => (uint_ty.name_str(), gimli::DW_ATE_unsigned),
|
||||
ty::Float(float_ty) => (float_ty.name_str(), gimli::DW_ATE_float),
|
||||
_ => unreachable!(),
|
||||
};
|
||||
|
||||
let type_id = self.dwarf.unit.add(self.dwarf.unit.root(), gimli::DW_TAG_base_type);
|
||||
let type_entry = self.dwarf.unit.get_mut(type_id);
|
||||
type_entry.set(gimli::DW_AT_name, AttributeValue::StringRef(self.dwarf.strings.add(name)));
|
||||
type_entry.set(gimli::DW_AT_encoding, AttributeValue::Encoding(encoding));
|
||||
type_entry.set(
|
||||
gimli::DW_AT_byte_size,
|
||||
AttributeValue::Udata(RevealAllLayoutCx(tcx).layout_of(ty).size.bytes()),
|
||||
);
|
||||
|
||||
type_id
|
||||
}
|
||||
|
||||
fn array_type<'tcx>(
|
||||
&mut self,
|
||||
tcx: TyCtxt<'tcx>,
|
||||
type_dbg: &mut TypeDebugContext<'tcx>,
|
||||
array_ty: Ty<'tcx>,
|
||||
elem_ty: Ty<'tcx>,
|
||||
len: u64,
|
||||
) -> UnitEntryId {
|
||||
let elem_dw_ty = self.debug_type(tcx, type_dbg, elem_ty);
|
||||
|
||||
return_if_type_created_in_meantime!(type_dbg, array_ty);
|
||||
|
||||
let array_type_id = self.dwarf.unit.add(self.dwarf.unit.root(), gimli::DW_TAG_array_type);
|
||||
let array_type_entry = self.dwarf.unit.get_mut(array_type_id);
|
||||
array_type_entry.set(gimli::DW_AT_type, AttributeValue::UnitRef(elem_dw_ty));
|
||||
|
||||
let subrange_id = self.dwarf.unit.add(array_type_id, gimli::DW_TAG_subrange_type);
|
||||
let subrange_entry = self.dwarf.unit.get_mut(subrange_id);
|
||||
subrange_entry.set(gimli::DW_AT_type, AttributeValue::UnitRef(self.array_size_type));
|
||||
subrange_entry.set(gimli::DW_AT_lower_bound, AttributeValue::Udata(0));
|
||||
subrange_entry.set(gimli::DW_AT_count, AttributeValue::Udata(len));
|
||||
|
||||
array_type_id
|
||||
}
|
||||
|
||||
fn pointer_type<'tcx>(
|
||||
&mut self,
|
||||
tcx: TyCtxt<'tcx>,
|
||||
type_dbg: &mut TypeDebugContext<'tcx>,
|
||||
ptr_type: Ty<'tcx>,
|
||||
pointee_type: Ty<'tcx>,
|
||||
) -> UnitEntryId {
|
||||
let pointee_dw_ty = self.debug_type(tcx, type_dbg, pointee_type);
|
||||
|
||||
return_if_type_created_in_meantime!(type_dbg, ptr_type);
|
||||
|
||||
let name = type_names::compute_debuginfo_type_name(tcx, ptr_type, true);
|
||||
|
||||
if !has_ptr_meta(tcx, ptr_type) {
|
||||
let pointer_type_id =
|
||||
self.dwarf.unit.add(self.dwarf.unit.root(), gimli::DW_TAG_pointer_type);
|
||||
let pointer_entry = self.dwarf.unit.get_mut(pointer_type_id);
|
||||
pointer_entry.set(gimli::DW_AT_type, AttributeValue::UnitRef(pointee_dw_ty));
|
||||
pointer_entry
|
||||
.set(gimli::DW_AT_name, AttributeValue::StringRef(self.dwarf.strings.add(name)));
|
||||
|
||||
pointer_type_id
|
||||
} else {
|
||||
// FIXME implement debuginfo for fat pointers
|
||||
self.placeholder_for_type(tcx, type_dbg, ptr_type)
|
||||
}
|
||||
}
|
||||
|
||||
fn tuple_type<'tcx>(
|
||||
&mut self,
|
||||
tcx: TyCtxt<'tcx>,
|
||||
type_dbg: &mut TypeDebugContext<'tcx>,
|
||||
tuple_type: Ty<'tcx>,
|
||||
components: &'tcx [Ty<'tcx>],
|
||||
) -> UnitEntryId {
|
||||
let components = components
|
||||
.into_iter()
|
||||
.map(|&ty| (ty, self.debug_type(tcx, type_dbg, ty)))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
return_if_type_created_in_meantime!(type_dbg, tuple_type);
|
||||
|
||||
let name = type_names::compute_debuginfo_type_name(tcx, tuple_type, false);
|
||||
let layout = RevealAllLayoutCx(tcx).layout_of(tuple_type);
|
||||
|
||||
let tuple_type_id =
|
||||
self.dwarf.unit.add(self.dwarf.unit.root(), gimli::DW_TAG_structure_type);
|
||||
let tuple_entry = self.dwarf.unit.get_mut(tuple_type_id);
|
||||
tuple_entry.set(gimli::DW_AT_name, AttributeValue::StringRef(self.dwarf.strings.add(name)));
|
||||
tuple_entry.set(gimli::DW_AT_byte_size, AttributeValue::Udata(layout.size.bytes()));
|
||||
tuple_entry.set(gimli::DW_AT_alignment, AttributeValue::Udata(layout.align.pref.bytes()));
|
||||
|
||||
for (i, (ty, dw_ty)) in components.into_iter().enumerate() {
|
||||
let member_id = self.dwarf.unit.add(tuple_type_id, gimli::DW_TAG_member);
|
||||
let member_entry = self.dwarf.unit.get_mut(member_id);
|
||||
member_entry.set(
|
||||
gimli::DW_AT_name,
|
||||
AttributeValue::StringRef(self.dwarf.strings.add(format!("__{i}"))),
|
||||
);
|
||||
member_entry.set(gimli::DW_AT_type, AttributeValue::UnitRef(dw_ty));
|
||||
member_entry.set(
|
||||
gimli::DW_AT_alignment,
|
||||
AttributeValue::Udata(RevealAllLayoutCx(tcx).layout_of(ty).align.pref.bytes()),
|
||||
);
|
||||
member_entry.set(
|
||||
gimli::DW_AT_data_member_location,
|
||||
AttributeValue::Udata(layout.fields.offset(i).bytes()),
|
||||
);
|
||||
}
|
||||
|
||||
tuple_type_id
|
||||
}
|
||||
|
||||
fn placeholder_for_type<'tcx>(
|
||||
&mut self,
|
||||
tcx: TyCtxt<'tcx>,
|
||||
type_dbg: &mut TypeDebugContext<'tcx>,
|
||||
ty: Ty<'tcx>,
|
||||
) -> UnitEntryId {
|
||||
self.debug_type(
|
||||
tcx,
|
||||
type_dbg,
|
||||
Ty::new_array(tcx, tcx.types.u8, RevealAllLayoutCx(tcx).layout_of(ty).size.bytes()),
|
||||
)
|
||||
}
|
||||
}
|
@ -3,9 +3,10 @@
|
||||
use cranelift_codegen::ir::Endianness;
|
||||
use cranelift_codegen::isa::{unwind::UnwindInfo, TargetIsa};
|
||||
use cranelift_object::ObjectProduct;
|
||||
use gimli::write::{Address, CieId, EhFrame, FrameTable, Section};
|
||||
use gimli::write::{CieId, EhFrame, FrameTable, Section};
|
||||
use gimli::RunTimeEndian;
|
||||
|
||||
use super::emit::address_for_func;
|
||||
use super::object::WriteDebugInfo;
|
||||
use crate::prelude::*;
|
||||
|
||||
@ -47,11 +48,8 @@ impl UnwindContext {
|
||||
|
||||
match unwind_info {
|
||||
UnwindInfo::SystemV(unwind_info) => {
|
||||
self.frame_table.add_fde(
|
||||
self.cie_id.unwrap(),
|
||||
unwind_info
|
||||
.to_fde(Address::Symbol { symbol: func_id.as_u32() as usize, addend: 0 }),
|
||||
);
|
||||
self.frame_table
|
||||
.add_fde(self.cie_id.unwrap(), unwind_info.to_fde(address_for_func(func_id)));
|
||||
}
|
||||
UnwindInfo::WindowsX64(_) => {
|
||||
// FIXME implement this
|
||||
|
@ -1,25 +1,29 @@
|
||||
//! The AOT driver uses [`cranelift_object`] to write object files suitable for linking into a
|
||||
//! standalone executable.
|
||||
|
||||
use std::fs::File;
|
||||
use std::path::PathBuf;
|
||||
use std::fs::{self, File};
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::sync::Arc;
|
||||
use std::thread::JoinHandle;
|
||||
|
||||
use cranelift_object::{ObjectBuilder, ObjectModule};
|
||||
use rustc_codegen_ssa::assert_module_sources::CguReuse;
|
||||
use rustc_codegen_ssa::back::link::ensure_removed;
|
||||
use rustc_codegen_ssa::back::metadata::create_compressed_metadata_file;
|
||||
use rustc_codegen_ssa::base::determine_cgu_reuse;
|
||||
use rustc_codegen_ssa::errors as ssa_errors;
|
||||
use rustc_codegen_ssa::{CodegenResults, CompiledModule, CrateInfo, ModuleKind};
|
||||
use rustc_data_structures::profiling::SelfProfilerRef;
|
||||
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
|
||||
use rustc_metadata::fs::copy_to_stdout;
|
||||
use rustc_metadata::EncodedMetadata;
|
||||
use rustc_middle::dep_graph::{WorkProduct, WorkProductId};
|
||||
use rustc_middle::mir::mono::{CodegenUnit, MonoItem};
|
||||
use rustc_session::config::{DebugInfo, OutputFilenames, OutputType};
|
||||
use rustc_session::config::{DebugInfo, OutFileName, OutputFilenames, OutputType};
|
||||
use rustc_session::Session;
|
||||
|
||||
use crate::concurrency_limiter::{ConcurrencyLimiter, ConcurrencyLimiterToken};
|
||||
use crate::debuginfo::TypeDebugContext;
|
||||
use crate::global_asm::GlobalAsmConfig;
|
||||
use crate::{prelude::*, BackendConfig};
|
||||
|
||||
@ -53,6 +57,7 @@ impl OngoingCodegen {
|
||||
pub(crate) fn join(
|
||||
self,
|
||||
sess: &Session,
|
||||
outputs: &OutputFilenames,
|
||||
backend_config: &BackendConfig,
|
||||
) -> (CodegenResults, FxIndexMap<WorkProductId, WorkProduct>) {
|
||||
let mut work_products = FxIndexMap::default();
|
||||
@ -110,19 +115,185 @@ impl OngoingCodegen {
|
||||
|
||||
sess.dcx().abort_if_errors();
|
||||
|
||||
(
|
||||
CodegenResults {
|
||||
modules,
|
||||
allocator_module: self.allocator_module,
|
||||
metadata_module: self.metadata_module,
|
||||
metadata: self.metadata,
|
||||
crate_info: self.crate_info,
|
||||
},
|
||||
work_products,
|
||||
)
|
||||
let codegen_results = CodegenResults {
|
||||
modules,
|
||||
allocator_module: self.allocator_module,
|
||||
metadata_module: self.metadata_module,
|
||||
metadata: self.metadata,
|
||||
crate_info: self.crate_info,
|
||||
};
|
||||
|
||||
produce_final_output_artifacts(sess, &codegen_results, outputs);
|
||||
|
||||
(codegen_results, work_products)
|
||||
}
|
||||
}
|
||||
|
||||
// Adapted from https://github.com/rust-lang/rust/blob/73476d49904751f8d90ce904e16dfbc278083d2c/compiler/rustc_codegen_ssa/src/back/write.rs#L547C1-L706C2
|
||||
fn produce_final_output_artifacts(
|
||||
sess: &Session,
|
||||
codegen_results: &CodegenResults,
|
||||
crate_output: &OutputFilenames,
|
||||
) {
|
||||
let user_wants_bitcode = false;
|
||||
let mut user_wants_objects = false;
|
||||
|
||||
// Produce final compile outputs.
|
||||
let copy_gracefully = |from: &Path, to: &OutFileName| match to {
|
||||
OutFileName::Stdout => {
|
||||
if let Err(e) = copy_to_stdout(from) {
|
||||
sess.dcx().emit_err(ssa_errors::CopyPath::new(from, to.as_path(), e));
|
||||
}
|
||||
}
|
||||
OutFileName::Real(path) => {
|
||||
if let Err(e) = fs::copy(from, path) {
|
||||
sess.dcx().emit_err(ssa_errors::CopyPath::new(from, path, e));
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
let copy_if_one_unit = |output_type: OutputType, keep_numbered: bool| {
|
||||
if codegen_results.modules.len() == 1 {
|
||||
// 1) Only one codegen unit. In this case it's no difficulty
|
||||
// to copy `foo.0.x` to `foo.x`.
|
||||
let module_name = Some(&codegen_results.modules[0].name[..]);
|
||||
let path = crate_output.temp_path(output_type, module_name);
|
||||
let output = crate_output.path(output_type);
|
||||
if !output_type.is_text_output() && output.is_tty() {
|
||||
sess.dcx()
|
||||
.emit_err(ssa_errors::BinaryOutputToTty { shorthand: output_type.shorthand() });
|
||||
} else {
|
||||
copy_gracefully(&path, &output);
|
||||
}
|
||||
if !sess.opts.cg.save_temps && !keep_numbered {
|
||||
// The user just wants `foo.x`, not `foo.#module-name#.x`.
|
||||
ensure_removed(sess.dcx(), &path);
|
||||
}
|
||||
} else {
|
||||
let extension = crate_output
|
||||
.temp_path(output_type, None)
|
||||
.extension()
|
||||
.unwrap()
|
||||
.to_str()
|
||||
.unwrap()
|
||||
.to_owned();
|
||||
|
||||
if crate_output.outputs.contains_explicit_name(&output_type) {
|
||||
// 2) Multiple codegen units, with `--emit foo=some_name`. We have
|
||||
// no good solution for this case, so warn the user.
|
||||
sess.dcx().emit_warn(ssa_errors::IgnoringEmitPath { extension });
|
||||
} else if crate_output.single_output_file.is_some() {
|
||||
// 3) Multiple codegen units, with `-o some_name`. We have
|
||||
// no good solution for this case, so warn the user.
|
||||
sess.dcx().emit_warn(ssa_errors::IgnoringOutput { extension });
|
||||
} else {
|
||||
// 4) Multiple codegen units, but no explicit name. We
|
||||
// just leave the `foo.0.x` files in place.
|
||||
// (We don't have to do any work in this case.)
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Flag to indicate whether the user explicitly requested bitcode.
|
||||
// Otherwise, we produced it only as a temporary output, and will need
|
||||
// to get rid of it.
|
||||
for output_type in crate_output.outputs.keys() {
|
||||
match *output_type {
|
||||
OutputType::Bitcode => {
|
||||
// Cranelift doesn't have bitcode
|
||||
// user_wants_bitcode = true;
|
||||
// // Copy to .bc, but always keep the .0.bc. There is a later
|
||||
// // check to figure out if we should delete .0.bc files, or keep
|
||||
// // them for making an rlib.
|
||||
// copy_if_one_unit(OutputType::Bitcode, true);
|
||||
}
|
||||
OutputType::LlvmAssembly => {
|
||||
// Cranelift IR text already emitted during codegen
|
||||
// copy_if_one_unit(OutputType::LlvmAssembly, false);
|
||||
}
|
||||
OutputType::Assembly => {
|
||||
// Currently no support for emitting raw assembly files
|
||||
// copy_if_one_unit(OutputType::Assembly, false);
|
||||
}
|
||||
OutputType::Object => {
|
||||
user_wants_objects = true;
|
||||
copy_if_one_unit(OutputType::Object, true);
|
||||
}
|
||||
OutputType::Mir | OutputType::Metadata | OutputType::Exe | OutputType::DepInfo => {}
|
||||
}
|
||||
}
|
||||
|
||||
// Clean up unwanted temporary files.
|
||||
|
||||
// We create the following files by default:
|
||||
// - #crate#.#module-name#.bc
|
||||
// - #crate#.#module-name#.o
|
||||
// - #crate#.crate.metadata.bc
|
||||
// - #crate#.crate.metadata.o
|
||||
// - #crate#.o (linked from crate.##.o)
|
||||
// - #crate#.bc (copied from crate.##.bc)
|
||||
// We may create additional files if requested by the user (through
|
||||
// `-C save-temps` or `--emit=` flags).
|
||||
|
||||
if !sess.opts.cg.save_temps {
|
||||
// Remove the temporary .#module-name#.o objects. If the user didn't
|
||||
// explicitly request bitcode (with --emit=bc), and the bitcode is not
|
||||
// needed for building an rlib, then we must remove .#module-name#.bc as
|
||||
// well.
|
||||
|
||||
// Specific rules for keeping .#module-name#.bc:
|
||||
// - If the user requested bitcode (`user_wants_bitcode`), and
|
||||
// codegen_units > 1, then keep it.
|
||||
// - If the user requested bitcode but codegen_units == 1, then we
|
||||
// can toss .#module-name#.bc because we copied it to .bc earlier.
|
||||
// - If we're not building an rlib and the user didn't request
|
||||
// bitcode, then delete .#module-name#.bc.
|
||||
// If you change how this works, also update back::link::link_rlib,
|
||||
// where .#module-name#.bc files are (maybe) deleted after making an
|
||||
// rlib.
|
||||
let needs_crate_object = crate_output.outputs.contains_key(&OutputType::Exe);
|
||||
|
||||
let keep_numbered_bitcode = user_wants_bitcode && sess.codegen_units().as_usize() > 1;
|
||||
|
||||
let keep_numbered_objects =
|
||||
needs_crate_object || (user_wants_objects && sess.codegen_units().as_usize() > 1);
|
||||
|
||||
for module in codegen_results.modules.iter() {
|
||||
if let Some(ref path) = module.object {
|
||||
if !keep_numbered_objects {
|
||||
ensure_removed(sess.dcx(), path);
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(ref path) = module.dwarf_object {
|
||||
if !keep_numbered_objects {
|
||||
ensure_removed(sess.dcx(), path);
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(ref path) = module.bytecode {
|
||||
if !keep_numbered_bitcode {
|
||||
ensure_removed(sess.dcx(), path);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if !user_wants_bitcode {
|
||||
if let Some(ref allocator_module) = codegen_results.allocator_module {
|
||||
if let Some(ref path) = allocator_module.bytecode {
|
||||
ensure_removed(sess.dcx(), path);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// We leave the following files around by default:
|
||||
// - #crate#.o
|
||||
// - #crate#.crate.metadata.o
|
||||
// - #crate#.bc
|
||||
// These are used in linking steps and will be cleaned up afterward.
|
||||
}
|
||||
|
||||
fn make_module(sess: &Session, backend_config: &BackendConfig, name: String) -> ObjectModule {
|
||||
let isa = crate::build_isa(sess, backend_config);
|
||||
|
||||
@ -290,6 +461,7 @@ fn module_codegen(
|
||||
tcx.sess.opts.debuginfo != DebugInfo::None,
|
||||
cgu_name,
|
||||
);
|
||||
let mut type_dbg = TypeDebugContext::default();
|
||||
super::predefine_mono_items(tcx, &mut module, &mono_items);
|
||||
let mut codegened_functions = vec![];
|
||||
for (mono_item, _) in mono_items {
|
||||
@ -298,6 +470,7 @@ fn module_codegen(
|
||||
let codegened_function = crate::base::codegen_fn(
|
||||
tcx,
|
||||
&mut cx,
|
||||
&mut type_dbg,
|
||||
Function::new(),
|
||||
&mut module,
|
||||
inst,
|
||||
@ -305,7 +478,10 @@ fn module_codegen(
|
||||
codegened_functions.push(codegened_function);
|
||||
}
|
||||
MonoItem::Static(def_id) => {
|
||||
crate::constant::codegen_static(tcx, &mut module, def_id)
|
||||
let data_id = crate::constant::codegen_static(tcx, &mut module, def_id);
|
||||
if let Some(debug_context) = &mut cx.debug_context {
|
||||
debug_context.define_static(tcx, &mut type_dbg, def_id, data_id);
|
||||
}
|
||||
}
|
||||
MonoItem::GlobalAsm(item_id) => {
|
||||
crate::global_asm::codegen_global_asm_item(
|
||||
|
@ -12,6 +12,7 @@ use rustc_middle::mir::mono::MonoItem;
|
||||
use rustc_session::Session;
|
||||
use rustc_span::Symbol;
|
||||
|
||||
use crate::debuginfo::TypeDebugContext;
|
||||
use crate::{prelude::*, BackendConfig};
|
||||
use crate::{CodegenCx, CodegenMode};
|
||||
|
||||
@ -229,7 +230,14 @@ pub(crate) fn codegen_and_compile_fn<'tcx>(
|
||||
crate::PrintOnPanic(|| format!("{:?} {}", instance, tcx.symbol_name(instance).name));
|
||||
|
||||
let cached_func = std::mem::replace(&mut cached_context.func, Function::new());
|
||||
let codegened_func = crate::base::codegen_fn(tcx, cx, cached_func, module, instance);
|
||||
let codegened_func = crate::base::codegen_fn(
|
||||
tcx,
|
||||
cx,
|
||||
&mut TypeDebugContext::default(),
|
||||
cached_func,
|
||||
module,
|
||||
instance,
|
||||
);
|
||||
|
||||
crate::base::compile_fn(cx, cached_context, module, codegened_func);
|
||||
});
|
||||
|
@ -8,6 +8,7 @@ use std::sync::Arc;
|
||||
|
||||
use rustc_ast::{InlineAsmOptions, InlineAsmTemplatePiece};
|
||||
use rustc_hir::{InlineAsmOperand, ItemId};
|
||||
use rustc_middle::mir::interpret::ErrorHandled;
|
||||
use rustc_session::config::{OutputFilenames, OutputType};
|
||||
use rustc_target::asm::InlineAsmArch;
|
||||
|
||||
@ -32,18 +33,27 @@ pub(crate) fn codegen_global_asm_item(tcx: TyCtxt<'_>, global_asm: &mut String,
|
||||
InlineAsmTemplatePiece::Placeholder { operand_idx, modifier: _, span: op_sp } => {
|
||||
match asm.operands[operand_idx].0 {
|
||||
InlineAsmOperand::Const { ref anon_const } => {
|
||||
let const_value =
|
||||
tcx.const_eval_poly(anon_const.def_id.to_def_id()).unwrap_or_else(
|
||||
|_| span_bug!(op_sp, "asm const cannot be resolved"),
|
||||
);
|
||||
let ty = tcx.typeck_body(anon_const.body).node_type(anon_const.hir_id);
|
||||
let string = rustc_codegen_ssa::common::asm_const_to_str(
|
||||
tcx,
|
||||
op_sp,
|
||||
const_value,
|
||||
RevealAllLayoutCx(tcx).layout_of(ty),
|
||||
);
|
||||
global_asm.push_str(&string);
|
||||
match tcx.const_eval_poly(anon_const.def_id.to_def_id()) {
|
||||
Ok(const_value) => {
|
||||
let ty = tcx
|
||||
.typeck_body(anon_const.body)
|
||||
.node_type(anon_const.hir_id);
|
||||
let string = rustc_codegen_ssa::common::asm_const_to_str(
|
||||
tcx,
|
||||
op_sp,
|
||||
const_value,
|
||||
RevealAllLayoutCx(tcx).layout_of(ty),
|
||||
);
|
||||
global_asm.push_str(&string);
|
||||
}
|
||||
Err(ErrorHandled::Reported { .. }) => {
|
||||
// An error has already been reported and compilation is
|
||||
// guaranteed to fail if execution hits this path.
|
||||
}
|
||||
Err(ErrorHandled::TooGeneric(_)) => {
|
||||
span_bug!(op_sp, "asm const cannot be resolved; too generic");
|
||||
}
|
||||
}
|
||||
}
|
||||
InlineAsmOperand::SymFn { anon_const } => {
|
||||
if cfg!(not(feature = "inline_asm_sym")) {
|
||||
|
@ -341,6 +341,8 @@ fn codegen_float_intrinsic_call<'tcx>(
|
||||
sym::roundf64 => ("round", 1, fx.tcx.types.f64, types::F64),
|
||||
sym::roundevenf32 => ("roundevenf", 1, fx.tcx.types.f32, types::F32),
|
||||
sym::roundevenf64 => ("roundeven", 1, fx.tcx.types.f64, types::F64),
|
||||
sym::nearbyintf32 => ("nearbyintf", 1, fx.tcx.types.f32, types::F32),
|
||||
sym::nearbyintf64 => ("nearbyint", 1, fx.tcx.types.f64, types::F64),
|
||||
sym::sinf32 => ("sinf", 1, fx.tcx.types.f32, types::F32),
|
||||
sym::sinf64 => ("sin", 1, fx.tcx.types.f64, types::F64),
|
||||
sym::cosf32 => ("cosf", 1, fx.tcx.types.f32, types::F32),
|
||||
@ -392,6 +394,8 @@ fn codegen_float_intrinsic_call<'tcx>(
|
||||
| sym::ceilf64
|
||||
| sym::truncf32
|
||||
| sym::truncf64
|
||||
| sym::nearbyintf32
|
||||
| sym::nearbyintf64
|
||||
| sym::sqrtf32
|
||||
| sym::sqrtf64 => {
|
||||
let val = match intrinsic {
|
||||
@ -399,6 +403,7 @@ fn codegen_float_intrinsic_call<'tcx>(
|
||||
sym::floorf32 | sym::floorf64 => fx.bcx.ins().floor(args[0]),
|
||||
sym::ceilf32 | sym::ceilf64 => fx.bcx.ins().ceil(args[0]),
|
||||
sym::truncf32 | sym::truncf64 => fx.bcx.ins().trunc(args[0]),
|
||||
sym::nearbyintf32 | sym::nearbyintf64 => fx.bcx.ins().nearest(args[0]),
|
||||
sym::sqrtf32 | sym::sqrtf64 => fx.bcx.ins().sqrt(args[0]),
|
||||
_ => unreachable!(),
|
||||
};
|
||||
|
@ -148,7 +148,7 @@ impl CodegenCx {
|
||||
let unwind_context =
|
||||
UnwindContext::new(isa, matches!(backend_config.codegen_mode, CodegenMode::Aot));
|
||||
let debug_context = if debug_info && !tcx.sess.target.options.is_like_windows {
|
||||
Some(DebugContext::new(tcx, isa))
|
||||
Some(DebugContext::new(tcx, isa, cgu_name.as_str()))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
@ -233,12 +233,13 @@ impl CodegenBackend for CraneliftCodegenBackend {
|
||||
&self,
|
||||
ongoing_codegen: Box<dyn Any>,
|
||||
sess: &Session,
|
||||
_outputs: &OutputFilenames,
|
||||
outputs: &OutputFilenames,
|
||||
) -> (CodegenResults, FxIndexMap<WorkProductId, WorkProduct>) {
|
||||
ongoing_codegen
|
||||
.downcast::<driver::aot::OngoingCodegen>()
|
||||
.unwrap()
|
||||
.join(sess, self.config.borrow().as_ref().unwrap())
|
||||
ongoing_codegen.downcast::<driver::aot::OngoingCodegen>().unwrap().join(
|
||||
sess,
|
||||
outputs,
|
||||
self.config.borrow().as_ref().unwrap(),
|
||||
)
|
||||
}
|
||||
|
||||
fn link(
|
||||
|
@ -2,7 +2,7 @@
|
||||
//!
|
||||
//! See `rustc_codegen_ssa/src/meth.rs` for reference.
|
||||
|
||||
use crate::constant::data_id_for_alloc_id;
|
||||
use crate::constant::data_id_for_vtable;
|
||||
use crate::prelude::*;
|
||||
|
||||
pub(crate) fn vtable_memflags() -> MemFlags {
|
||||
@ -92,12 +92,10 @@ pub(crate) fn get_vtable<'tcx>(
|
||||
ty: Ty<'tcx>,
|
||||
trait_ref: Option<ty::PolyExistentialTraitRef<'tcx>>,
|
||||
) -> Value {
|
||||
let alloc_id = fx.tcx.vtable_allocation((ty, trait_ref));
|
||||
let data_id =
|
||||
data_id_for_alloc_id(&mut fx.constants_cx, &mut *fx.module, alloc_id, Mutability::Not);
|
||||
let data_id = data_id_for_vtable(fx.tcx, &mut fx.constants_cx, fx.module, ty, trait_ref);
|
||||
let local_data_id = fx.module.declare_data_in_func(data_id, fx.bcx.func);
|
||||
if fx.clif_comments.enabled() {
|
||||
fx.add_comment(local_data_id, format!("vtable: {:?}", alloc_id));
|
||||
fx.add_comment(local_data_id, "vtable");
|
||||
}
|
||||
fx.bcx.ins().global_value(fx.pointer_type, local_data_id)
|
||||
}
|
||||
|
@ -418,6 +418,36 @@ pub fn panic(_msg: &'static str) -> ! {
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! panic_const {
|
||||
($($lang:ident = $message:expr,)+) => {
|
||||
#[cfg(not(bootstrap))]
|
||||
pub mod panic_const {
|
||||
use super::*;
|
||||
|
||||
$(
|
||||
#[track_caller]
|
||||
#[lang = stringify!($lang)]
|
||||
pub fn $lang() -> ! {
|
||||
panic($message);
|
||||
}
|
||||
)+
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
panic_const! {
|
||||
panic_const_add_overflow = "attempt to add with overflow",
|
||||
panic_const_sub_overflow = "attempt to subtract with overflow",
|
||||
panic_const_mul_overflow = "attempt to multiply with overflow",
|
||||
panic_const_div_overflow = "attempt to divide with overflow",
|
||||
panic_const_rem_overflow = "attempt to calculate the remainder with overflow",
|
||||
panic_const_neg_overflow = "attempt to negate with overflow",
|
||||
panic_const_shr_overflow = "attempt to shift right with overflow",
|
||||
panic_const_shl_overflow = "attempt to shift left with overflow",
|
||||
panic_const_div_by_zero = "attempt to divide by zero",
|
||||
panic_const_rem_by_zero = "attempt to calculate the remainder with a divisor of zero",
|
||||
}
|
||||
|
||||
#[lang = "panic_cannot_unwind"]
|
||||
fn panic_cannot_unwind() -> ! {
|
||||
unsafe {
|
||||
|
@ -6,9 +6,8 @@ use crate::llvm;
|
||||
|
||||
use itertools::Itertools as _;
|
||||
use rustc_codegen_ssa::traits::{BaseTypeMethods, ConstMethods};
|
||||
use rustc_data_structures::fx::{FxIndexMap, FxIndexSet};
|
||||
use rustc_hir::def::DefKind;
|
||||
use rustc_hir::def_id::DefId;
|
||||
use rustc_data_structures::fx::{FxHashSet, FxIndexMap, FxIndexSet};
|
||||
use rustc_hir::def_id::{DefId, LocalDefId};
|
||||
use rustc_index::IndexVec;
|
||||
use rustc_middle::bug;
|
||||
use rustc_middle::mir;
|
||||
@ -335,16 +334,9 @@ fn save_function_record(
|
||||
);
|
||||
}
|
||||
|
||||
/// When finalizing the coverage map, `FunctionCoverage` only has the `CodeRegion`s and counters for
|
||||
/// the functions that went through codegen; such as public functions and "used" functions
|
||||
/// (functions referenced by other "used" or public items). Any other functions considered unused,
|
||||
/// or "Unreachable", were still parsed and processed through the MIR stage, but were not
|
||||
/// codegenned. (Note that `-Clink-dead-code` can force some unused code to be codegenned, but
|
||||
/// that flag is known to cause other errors, when combined with `-C instrument-coverage`; and
|
||||
/// `-Clink-dead-code` will not generate code for unused generic functions.)
|
||||
///
|
||||
/// We can find the unused functions (including generic functions) by the set difference of all MIR
|
||||
/// `DefId`s (`tcx` query `mir_keys`) minus the codegenned `DefId`s (`codegenned_and_inlined_items`).
|
||||
/// Each CGU will normally only emit coverage metadata for the functions that it actually generates.
|
||||
/// But since we don't want unused functions to disappear from coverage reports, we also scan for
|
||||
/// functions that were instrumented but are not participating in codegen.
|
||||
///
|
||||
/// These unused functions don't need to be codegenned, but we do need to add them to the function
|
||||
/// coverage map (in a single designated CGU) so that we still emit coverage mappings for them.
|
||||
@ -354,75 +346,109 @@ fn add_unused_functions(cx: &CodegenCx<'_, '_>) {
|
||||
assert!(cx.codegen_unit.is_code_coverage_dead_code_cgu());
|
||||
|
||||
let tcx = cx.tcx;
|
||||
let usage = prepare_usage_sets(tcx);
|
||||
|
||||
let eligible_def_ids = tcx.mir_keys(()).iter().filter_map(|local_def_id| {
|
||||
let def_id = local_def_id.to_def_id();
|
||||
let kind = tcx.def_kind(def_id);
|
||||
// `mir_keys` will give us `DefId`s for all kinds of things, not
|
||||
// just "functions", like consts, statics, etc. Filter those out.
|
||||
if !matches!(kind, DefKind::Fn | DefKind::AssocFn | DefKind::Closure) {
|
||||
return None;
|
||||
}
|
||||
let is_unused_fn = |def_id: LocalDefId| -> bool {
|
||||
let def_id = def_id.to_def_id();
|
||||
|
||||
// To be eligible for "unused function" mappings, a definition must:
|
||||
// - Be function-like
|
||||
// - Not participate directly in codegen (or have lost all its coverage statements)
|
||||
// - Not have any coverage statements inlined into codegenned functions
|
||||
tcx.def_kind(def_id).is_fn_like()
|
||||
&& (!usage.all_mono_items.contains(&def_id)
|
||||
|| usage.missing_own_coverage.contains(&def_id))
|
||||
&& !usage.used_via_inlining.contains(&def_id)
|
||||
};
|
||||
|
||||
// Scan for unused functions that were instrumented for coverage.
|
||||
for def_id in tcx.mir_keys(()).iter().copied().filter(|&def_id| is_unused_fn(def_id)) {
|
||||
// Get the coverage info from MIR, skipping functions that were never instrumented.
|
||||
let body = tcx.optimized_mir(def_id);
|
||||
let Some(function_coverage_info) = body.function_coverage_info.as_deref() else { continue };
|
||||
|
||||
// FIXME(79651): Consider trying to filter out dummy instantiations of
|
||||
// unused generic functions from library crates, because they can produce
|
||||
// "unused instantiation" in coverage reports even when they are actually
|
||||
// used by some downstream crate in the same binary.
|
||||
|
||||
Some(local_def_id.to_def_id())
|
||||
});
|
||||
|
||||
let codegenned_def_ids = codegenned_and_inlined_items(tcx);
|
||||
|
||||
// For each `DefId` that should have coverage instrumentation but wasn't
|
||||
// codegenned, add it to the function coverage map as an unused function.
|
||||
for def_id in eligible_def_ids.filter(|id| !codegenned_def_ids.contains(id)) {
|
||||
// Skip any function that didn't have coverage data added to it by the
|
||||
// coverage instrumentor.
|
||||
let body = tcx.instance_mir(ty::InstanceDef::Item(def_id));
|
||||
let Some(function_coverage_info) = body.function_coverage_info.as_deref() else {
|
||||
continue;
|
||||
};
|
||||
|
||||
debug!("generating unused fn: {def_id:?}");
|
||||
let instance = declare_unused_fn(tcx, def_id);
|
||||
add_unused_function_coverage(cx, instance, function_coverage_info);
|
||||
add_unused_function_coverage(cx, def_id, function_coverage_info);
|
||||
}
|
||||
}
|
||||
|
||||
/// All items participating in code generation together with (instrumented)
|
||||
/// items inlined into them.
|
||||
fn codegenned_and_inlined_items(tcx: TyCtxt<'_>) -> DefIdSet {
|
||||
let (items, cgus) = tcx.collect_and_partition_mono_items(());
|
||||
let mut visited = DefIdSet::default();
|
||||
let mut result = items.clone();
|
||||
struct UsageSets<'tcx> {
|
||||
all_mono_items: &'tcx DefIdSet,
|
||||
used_via_inlining: FxHashSet<DefId>,
|
||||
missing_own_coverage: FxHashSet<DefId>,
|
||||
}
|
||||
|
||||
for cgu in cgus {
|
||||
for item in cgu.items().keys() {
|
||||
if let mir::mono::MonoItem::Fn(ref instance) = item {
|
||||
let did = instance.def_id();
|
||||
if !visited.insert(did) {
|
||||
continue;
|
||||
}
|
||||
let body = tcx.instance_mir(instance.def);
|
||||
for block in body.basic_blocks.iter() {
|
||||
for statement in &block.statements {
|
||||
let mir::StatementKind::Coverage(_) = statement.kind else { continue };
|
||||
let scope = statement.source_info.scope;
|
||||
if let Some(inlined) = scope.inlined_instance(&body.source_scopes) {
|
||||
result.insert(inlined.def_id());
|
||||
}
|
||||
}
|
||||
}
|
||||
/// Prepare sets of definitions that are relevant to deciding whether something
|
||||
/// is an "unused function" for coverage purposes.
|
||||
fn prepare_usage_sets<'tcx>(tcx: TyCtxt<'tcx>) -> UsageSets<'tcx> {
|
||||
let (all_mono_items, cgus) = tcx.collect_and_partition_mono_items(());
|
||||
|
||||
// Obtain a MIR body for each function participating in codegen, via an
|
||||
// arbitrary instance.
|
||||
let mut def_ids_seen = FxHashSet::default();
|
||||
let def_and_mir_for_all_mono_fns = cgus
|
||||
.iter()
|
||||
.flat_map(|cgu| cgu.items().keys())
|
||||
.filter_map(|item| match item {
|
||||
mir::mono::MonoItem::Fn(instance) => Some(instance),
|
||||
mir::mono::MonoItem::Static(_) | mir::mono::MonoItem::GlobalAsm(_) => None,
|
||||
})
|
||||
// We only need one arbitrary instance per definition.
|
||||
.filter(move |instance| def_ids_seen.insert(instance.def_id()))
|
||||
.map(|instance| {
|
||||
// We don't care about the instance, just its underlying MIR.
|
||||
let body = tcx.instance_mir(instance.def);
|
||||
(instance.def_id(), body)
|
||||
});
|
||||
|
||||
// Functions whose coverage statments were found inlined into other functions.
|
||||
let mut used_via_inlining = FxHashSet::default();
|
||||
// Functions that were instrumented, but had all of their coverage statements
|
||||
// removed by later MIR transforms (e.g. UnreachablePropagation).
|
||||
let mut missing_own_coverage = FxHashSet::default();
|
||||
|
||||
for (def_id, body) in def_and_mir_for_all_mono_fns {
|
||||
let mut saw_own_coverage = false;
|
||||
|
||||
// Inspect every coverage statement in the function's MIR.
|
||||
for stmt in body
|
||||
.basic_blocks
|
||||
.iter()
|
||||
.flat_map(|block| &block.statements)
|
||||
.filter(|stmt| matches!(stmt.kind, mir::StatementKind::Coverage(_)))
|
||||
{
|
||||
if let Some(inlined) = stmt.source_info.scope.inlined_instance(&body.source_scopes) {
|
||||
// This coverage statement was inlined from another function.
|
||||
used_via_inlining.insert(inlined.def_id());
|
||||
} else {
|
||||
// Non-inlined coverage statements belong to the enclosing function.
|
||||
saw_own_coverage = true;
|
||||
}
|
||||
}
|
||||
|
||||
if !saw_own_coverage && body.function_coverage_info.is_some() {
|
||||
missing_own_coverage.insert(def_id);
|
||||
}
|
||||
}
|
||||
|
||||
result
|
||||
UsageSets { all_mono_items, used_via_inlining, missing_own_coverage }
|
||||
}
|
||||
|
||||
fn declare_unused_fn<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) -> ty::Instance<'tcx> {
|
||||
ty::Instance::new(
|
||||
fn add_unused_function_coverage<'tcx>(
|
||||
cx: &CodegenCx<'_, 'tcx>,
|
||||
def_id: LocalDefId,
|
||||
function_coverage_info: &'tcx mir::coverage::FunctionCoverageInfo,
|
||||
) {
|
||||
let tcx = cx.tcx;
|
||||
let def_id = def_id.to_def_id();
|
||||
|
||||
// Make a dummy instance that fills in all generics with placeholders.
|
||||
let instance = ty::Instance::new(
|
||||
def_id,
|
||||
ty::GenericArgs::for_item(tcx, def_id, |param, _| {
|
||||
if let ty::GenericParamDefKind::Lifetime = param.kind {
|
||||
@ -431,14 +457,8 @@ fn declare_unused_fn<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) -> ty::Instance<'tc
|
||||
tcx.mk_param_from_def(param)
|
||||
}
|
||||
}),
|
||||
)
|
||||
}
|
||||
);
|
||||
|
||||
fn add_unused_function_coverage<'tcx>(
|
||||
cx: &CodegenCx<'_, 'tcx>,
|
||||
instance: ty::Instance<'tcx>,
|
||||
function_coverage_info: &'tcx mir::coverage::FunctionCoverageInfo,
|
||||
) {
|
||||
// An unused function's mappings will automatically be rewritten to map to
|
||||
// zero, because none of its counters/expressions are marked as seen.
|
||||
let function_coverage = FunctionCoverageCollector::unused(instance, function_coverage_info);
|
||||
|
@ -683,7 +683,8 @@ fn build_union_fields_for_direct_tag_coroutine<'ll, 'tcx>(
|
||||
_ => unreachable!(),
|
||||
};
|
||||
|
||||
let coroutine_layout = cx.tcx.optimized_mir(coroutine_def_id).coroutine_layout().unwrap();
|
||||
let coroutine_layout =
|
||||
cx.tcx.coroutine_layout(coroutine_def_id, coroutine_args.kind_ty()).unwrap();
|
||||
|
||||
let common_upvar_names = cx.tcx.closure_saved_names_of_captured_variables(coroutine_def_id);
|
||||
let variant_range = coroutine_args.variant_range(coroutine_def_id, cx.tcx);
|
||||
|
@ -135,7 +135,7 @@ pub(super) fn build_coroutine_di_node<'ll, 'tcx>(
|
||||
unique_type_id: UniqueTypeId<'tcx>,
|
||||
) -> DINodeCreationResult<'ll> {
|
||||
let coroutine_type = unique_type_id.expect_ty();
|
||||
let &ty::Coroutine(coroutine_def_id, _) = coroutine_type.kind() else {
|
||||
let &ty::Coroutine(coroutine_def_id, coroutine_args) = coroutine_type.kind() else {
|
||||
bug!("build_coroutine_di_node() called with non-coroutine type: `{:?}`", coroutine_type)
|
||||
};
|
||||
|
||||
@ -158,8 +158,10 @@ pub(super) fn build_coroutine_di_node<'ll, 'tcx>(
|
||||
DIFlags::FlagZero,
|
||||
),
|
||||
|cx, coroutine_type_di_node| {
|
||||
let coroutine_layout =
|
||||
cx.tcx.optimized_mir(coroutine_def_id).coroutine_layout().unwrap();
|
||||
let coroutine_layout = cx
|
||||
.tcx
|
||||
.coroutine_layout(coroutine_def_id, coroutine_args.as_coroutine().kind_ty())
|
||||
.unwrap();
|
||||
|
||||
let Variants::Multiple { tag_encoding: TagEncoding::Direct, ref variants, .. } =
|
||||
coroutine_type_and_layout.variants
|
||||
|
@ -682,10 +682,8 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
|
||||
(LangItem::PanicMisalignedPointerDereference, vec![required, found, location])
|
||||
}
|
||||
_ => {
|
||||
let msg = bx.const_str(msg.description());
|
||||
// It's `pub fn panic(expr: &str)`, with the wide reference being passed
|
||||
// as two arguments, and `#[track_caller]` adds an implicit third argument.
|
||||
(LangItem::Panic, vec![msg.0, msg.1, location])
|
||||
// It's `pub fn panic_...()` and `#[track_caller]` adds an implicit argument.
|
||||
(msg.panic_function(), vec![location])
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -409,7 +409,7 @@ fn const_validate_mplace<'mir, 'tcx>(
|
||||
}
|
||||
};
|
||||
ecx.const_validate_operand(&mplace.into(), path, &mut ref_tracking, mode)
|
||||
// Instead of just reporting the `InterpError` via the usual machinery, we give a more targetted
|
||||
// Instead of just reporting the `InterpError` via the usual machinery, we give a more targeted
|
||||
// error about the validation failure.
|
||||
.map_err(|error| report_validation_error(&ecx, error, alloc_id))?;
|
||||
inner = true;
|
||||
|
@ -101,18 +101,17 @@ impl<'tcx> MirPass<'tcx> for Validator {
|
||||
}
|
||||
|
||||
// Enforce that coroutine-closure layouts are identical.
|
||||
if let Some(layout) = body.coroutine_layout()
|
||||
if let Some(layout) = body.coroutine_layout_raw()
|
||||
&& let Some(by_move_body) = body.coroutine_by_move_body()
|
||||
&& let Some(by_move_layout) = by_move_body.coroutine_layout()
|
||||
&& let Some(by_move_layout) = by_move_body.coroutine_layout_raw()
|
||||
{
|
||||
if layout != by_move_layout {
|
||||
// If this turns out not to be true, please let compiler-errors know.
|
||||
// It is possible to support, but requires some changes to the layout
|
||||
// computation code.
|
||||
// FIXME(async_closures): We could do other validation here?
|
||||
if layout.variant_fields.len() != by_move_layout.variant_fields.len() {
|
||||
cfg_checker.fail(
|
||||
Location::START,
|
||||
format!(
|
||||
"Coroutine layout differs from by-move coroutine layout:\n\
|
||||
"Coroutine layout has different number of variant fields from \
|
||||
by-move coroutine layout:\n\
|
||||
layout: {layout:#?}\n\
|
||||
by_move_layout: {by_move_layout:#?}",
|
||||
),
|
||||
@ -715,13 +714,14 @@ impl<'a, 'tcx> Visitor<'tcx> for TypeChecker<'a, 'tcx> {
|
||||
// args of the coroutine. Otherwise, we prefer to use this body
|
||||
// since we may be in the process of computing this MIR in the
|
||||
// first place.
|
||||
let gen_body = if def_id == self.caller_body.source.def_id() {
|
||||
self.caller_body
|
||||
let layout = if def_id == self.caller_body.source.def_id() {
|
||||
// FIXME: This is not right for async closures.
|
||||
self.caller_body.coroutine_layout_raw()
|
||||
} else {
|
||||
self.tcx.optimized_mir(def_id)
|
||||
self.tcx.coroutine_layout(def_id, args.as_coroutine().kind_ty())
|
||||
};
|
||||
|
||||
let Some(layout) = gen_body.coroutine_layout() else {
|
||||
let Some(layout) = layout else {
|
||||
self.fail(
|
||||
location,
|
||||
format!("No coroutine layout for {parent_ty:?}"),
|
||||
|
@ -1252,21 +1252,18 @@ pub fn resolve_path(sess: &Session, path: impl Into<PathBuf>, span: Span) -> PRe
|
||||
// after macro expansion (that is, they are unhygienic).
|
||||
if !path.is_absolute() {
|
||||
let callsite = span.source_callsite();
|
||||
let mut result = match sess.source_map().span_to_filename(callsite) {
|
||||
FileName::Real(name) => name
|
||||
.into_local_path()
|
||||
.expect("attempting to resolve a file path in an external file"),
|
||||
FileName::DocTest(path, _) => path,
|
||||
other => {
|
||||
return Err(sess.dcx().create_err(errors::ResolveRelativePath {
|
||||
span,
|
||||
path: sess.source_map().filename_for_diagnostics(&other).to_string(),
|
||||
}));
|
||||
}
|
||||
let source_map = sess.source_map();
|
||||
let Some(mut base_path) = source_map.span_to_filename(callsite).into_local_path() else {
|
||||
return Err(sess.dcx().create_err(errors::ResolveRelativePath {
|
||||
span,
|
||||
path: source_map
|
||||
.filename_for_diagnostics(&source_map.span_to_filename(callsite))
|
||||
.to_string(),
|
||||
}));
|
||||
};
|
||||
result.pop();
|
||||
result.push(path);
|
||||
Ok(result)
|
||||
base_path.pop();
|
||||
base_path.push(path);
|
||||
Ok(base_path)
|
||||
} else {
|
||||
Ok(path)
|
||||
}
|
||||
@ -1379,6 +1376,15 @@ pub fn get_single_str_from_tts(
|
||||
tts: TokenStream,
|
||||
name: &str,
|
||||
) -> ExpandResult<Result<Symbol, ErrorGuaranteed>, ()> {
|
||||
get_single_str_spanned_from_tts(cx, span, tts, name).map(|res| res.map(|(s, _)| s))
|
||||
}
|
||||
|
||||
pub fn get_single_str_spanned_from_tts(
|
||||
cx: &mut ExtCtxt<'_>,
|
||||
span: Span,
|
||||
tts: TokenStream,
|
||||
name: &str,
|
||||
) -> ExpandResult<Result<(Symbol, Span), ErrorGuaranteed>, ()> {
|
||||
let mut p = cx.new_parser_from_tts(tts);
|
||||
if p.token == token::Eof {
|
||||
let guar = cx.dcx().emit_err(errors::OnlyOneArgument { span, name });
|
||||
@ -1393,7 +1399,13 @@ pub fn get_single_str_from_tts(
|
||||
if p.token != token::Eof {
|
||||
cx.dcx().emit_err(errors::OnlyOneArgument { span, name });
|
||||
}
|
||||
expr_to_string(cx, ret, "argument must be a string literal").map(|s| s.map(|(s, _)| s))
|
||||
expr_to_spanned_string(cx, ret, "argument must be a string literal").map(|res| {
|
||||
res.map_err(|err| match err {
|
||||
Ok((err, _)) => err.emit(),
|
||||
Err(guar) => guar,
|
||||
})
|
||||
.map(|(symbol, _style, span)| (symbol, span))
|
||||
})
|
||||
}
|
||||
|
||||
/// Extracts comma-separated expressions from `tts`.
|
||||
|
@ -821,6 +821,10 @@ pub const BUILTIN_ATTRIBUTES: &[BuiltinAttribute] = &[
|
||||
rustc_allow_incoherent_impl, AttributeType::Normal, template!(Word), ErrorFollowing, EncodeCrossCrate::No,
|
||||
"#[rustc_allow_incoherent_impl] has to be added to all impl items of an incoherent inherent impl."
|
||||
),
|
||||
rustc_attr!(
|
||||
rustc_preserve_ub_checks, AttributeType::CrateLevel, template!(Word), ErrorFollowing, EncodeCrossCrate::No,
|
||||
"`#![rustc_preserve_ub_checks]` prevents the designated crate from evaluating whether UB checks are enabled when optimizing MIR",
|
||||
),
|
||||
rustc_attr!(
|
||||
rustc_deny_explicit_impl,
|
||||
AttributeType::Normal,
|
||||
|
@ -2289,21 +2289,15 @@ pub enum ImplItemKind<'hir> {
|
||||
Type(&'hir Ty<'hir>),
|
||||
}
|
||||
|
||||
/// Bind a type to an associated type (i.e., `A = Foo`).
|
||||
/// An associated item binding.
|
||||
///
|
||||
/// Bindings like `A: Debug` are represented as a special type `A =
|
||||
/// $::Debug` that is understood by the HIR ty lowering code.
|
||||
/// ### Examples
|
||||
///
|
||||
/// FIXME(alexreg): why have a separate type for the binding case,
|
||||
/// wouldn't it be better to make the `ty` field an enum like the
|
||||
/// following?
|
||||
///
|
||||
/// ```ignore (pseudo-rust)
|
||||
/// enum TypeBindingKind {
|
||||
/// Equals(...),
|
||||
/// Binding(...),
|
||||
/// }
|
||||
/// ```
|
||||
/// * `Trait<A = Ty, B = Ty>`
|
||||
/// * `Trait<G<Ty> = Ty>`
|
||||
/// * `Trait<A: Bound>`
|
||||
/// * `Trait<C = { Ct }>` (under feature `associated_const_equality`)
|
||||
/// * `Trait<f(): Bound>` (under feature `return_type_notation`)
|
||||
#[derive(Debug, Clone, Copy, HashStable_Generic)]
|
||||
pub struct TypeBinding<'hir> {
|
||||
pub hir_id: HirId,
|
||||
@ -2336,7 +2330,7 @@ impl<'hir> From<AnonConst> for Term<'hir> {
|
||||
pub enum TypeBindingKind<'hir> {
|
||||
/// E.g., `Foo<Bar: Send>`.
|
||||
Constraint { bounds: &'hir [GenericBound<'hir>] },
|
||||
/// E.g., `Foo<Bar = ()>`, `Foo<Bar = ()>`
|
||||
/// E.g., `Foo<Bar = ()>`.
|
||||
Equality { term: Term<'hir> },
|
||||
}
|
||||
|
||||
|
@ -199,6 +199,7 @@ language_item_table! {
|
||||
|
||||
Deref, sym::deref, deref_trait, Target::Trait, GenericRequirement::Exact(0);
|
||||
DerefMut, sym::deref_mut, deref_mut_trait, Target::Trait, GenericRequirement::Exact(0);
|
||||
DerefPure, sym::deref_pure, deref_pure_trait, Target::Trait, GenericRequirement::Exact(0);
|
||||
DerefTarget, sym::deref_target, deref_target, Target::AssocTy, GenericRequirement::None;
|
||||
Receiver, sym::receiver, receiver_trait, Target::Trait, GenericRequirement::None;
|
||||
|
||||
@ -247,6 +248,25 @@ language_item_table! {
|
||||
PanicImpl, sym::panic_impl, panic_impl, Target::Fn, GenericRequirement::None;
|
||||
PanicCannotUnwind, sym::panic_cannot_unwind, panic_cannot_unwind, Target::Fn, GenericRequirement::Exact(0);
|
||||
PanicInCleanup, sym::panic_in_cleanup, panic_in_cleanup, Target::Fn, GenericRequirement::Exact(0);
|
||||
/// Constant panic messages, used for codegen of MIR asserts.
|
||||
PanicAddOverflow, sym::panic_const_add_overflow, panic_const_add_overflow, Target::Fn, GenericRequirement::None;
|
||||
PanicSubOverflow, sym::panic_const_sub_overflow, panic_const_sub_overflow, Target::Fn, GenericRequirement::None;
|
||||
PanicMulOverflow, sym::panic_const_mul_overflow, panic_const_mul_overflow, Target::Fn, GenericRequirement::None;
|
||||
PanicDivOverflow, sym::panic_const_div_overflow, panic_const_div_overflow, Target::Fn, GenericRequirement::None;
|
||||
PanicRemOverflow, sym::panic_const_rem_overflow, panic_const_rem_overflow, Target::Fn, GenericRequirement::None;
|
||||
PanicNegOverflow, sym::panic_const_neg_overflow, panic_const_neg_overflow, Target::Fn, GenericRequirement::None;
|
||||
PanicShrOverflow, sym::panic_const_shr_overflow, panic_const_shr_overflow, Target::Fn, GenericRequirement::None;
|
||||
PanicShlOverflow, sym::panic_const_shl_overflow, panic_const_shl_overflow, Target::Fn, GenericRequirement::None;
|
||||
PanicDivZero, sym::panic_const_div_by_zero, panic_const_div_by_zero, Target::Fn, GenericRequirement::None;
|
||||
PanicRemZero, sym::panic_const_rem_by_zero, panic_const_rem_by_zero, Target::Fn, GenericRequirement::None;
|
||||
PanicCoroutineResumed, sym::panic_const_coroutine_resumed, panic_const_coroutine_resumed, Target::Fn, GenericRequirement::None;
|
||||
PanicAsyncFnResumed, sym::panic_const_async_fn_resumed, panic_const_async_fn_resumed, Target::Fn, GenericRequirement::None;
|
||||
PanicAsyncGenFnResumed, sym::panic_const_async_gen_fn_resumed, panic_const_async_gen_fn_resumed, Target::Fn, GenericRequirement::None;
|
||||
PanicGenFnNone, sym::panic_const_gen_fn_none, panic_const_gen_fn_none, Target::Fn, GenericRequirement::None;
|
||||
PanicCoroutineResumedPanic, sym::panic_const_coroutine_resumed_panic, panic_const_coroutine_resumed_panic, Target::Fn, GenericRequirement::None;
|
||||
PanicAsyncFnResumedPanic, sym::panic_const_async_fn_resumed_panic, panic_const_async_fn_resumed_panic, Target::Fn, GenericRequirement::None;
|
||||
PanicAsyncGenFnResumedPanic, sym::panic_const_async_gen_fn_resumed_panic, panic_const_async_gen_fn_resumed_panic, Target::Fn, GenericRequirement::None;
|
||||
PanicGenFnNonePanic, sym::panic_const_gen_fn_none_panic, panic_const_gen_fn_none_panic, Target::Fn, GenericRequirement::None;
|
||||
/// libstd panic entry point. Necessary for const eval to be able to catch it
|
||||
BeginPanic, sym::begin_panic, begin_panic_fn, Target::Fn, GenericRequirement::None;
|
||||
|
||||
|
@ -9,8 +9,85 @@ use rustc_trait_selection::traits::error_reporting::suggestions::NextTypeParamNa
|
||||
use super::HirTyLowerer;
|
||||
|
||||
impl<'tcx> dyn HirTyLowerer<'tcx> + '_ {
|
||||
/// Prohibit or lint against *bare* trait object types depending on the edition.
|
||||
///
|
||||
/// *Bare* trait object types are ones that aren't preceded by the keyword `dyn`.
|
||||
/// In edition 2021 and onward we emit a hard error for them.
|
||||
pub(super) fn prohibit_or_lint_bare_trait_object_ty(
|
||||
&self,
|
||||
self_ty: &hir::Ty<'_>,
|
||||
in_path: bool,
|
||||
) {
|
||||
let tcx = self.tcx();
|
||||
|
||||
let hir::TyKind::TraitObject([poly_trait_ref, ..], _, TraitObjectSyntax::None) =
|
||||
self_ty.kind
|
||||
else {
|
||||
return;
|
||||
};
|
||||
|
||||
let needs_bracket = in_path
|
||||
&& !tcx
|
||||
.sess
|
||||
.source_map()
|
||||
.span_to_prev_source(self_ty.span)
|
||||
.ok()
|
||||
.is_some_and(|s| s.trim_end().ends_with('<'));
|
||||
|
||||
let is_global = poly_trait_ref.trait_ref.path.is_global();
|
||||
|
||||
let mut sugg = vec![(
|
||||
self_ty.span.shrink_to_lo(),
|
||||
format!(
|
||||
"{}dyn {}",
|
||||
if needs_bracket { "<" } else { "" },
|
||||
if is_global { "(" } else { "" },
|
||||
),
|
||||
)];
|
||||
|
||||
if is_global || needs_bracket {
|
||||
sugg.push((
|
||||
self_ty.span.shrink_to_hi(),
|
||||
format!(
|
||||
"{}{}",
|
||||
if is_global { ")" } else { "" },
|
||||
if needs_bracket { ">" } else { "" },
|
||||
),
|
||||
));
|
||||
}
|
||||
|
||||
if self_ty.span.edition().at_least_rust_2021() {
|
||||
let msg = "trait objects must include the `dyn` keyword";
|
||||
let label = "add `dyn` keyword before this trait";
|
||||
let mut diag =
|
||||
rustc_errors::struct_span_code_err!(tcx.dcx(), self_ty.span, E0782, "{}", msg);
|
||||
if self_ty.span.can_be_used_for_suggestions()
|
||||
&& !self.maybe_suggest_impl_trait(self_ty, &mut diag)
|
||||
{
|
||||
// FIXME: Only emit this suggestion if the trait is object safe.
|
||||
diag.multipart_suggestion_verbose(label, sugg, Applicability::MachineApplicable);
|
||||
}
|
||||
// Check if the impl trait that we are considering is an impl of a local trait.
|
||||
self.maybe_suggest_blanket_trait_impl(self_ty, &mut diag);
|
||||
self.maybe_suggest_assoc_ty_bound(self_ty, &mut diag);
|
||||
diag.stash(self_ty.span, StashKey::TraitMissingMethod);
|
||||
} else {
|
||||
let msg = "trait objects without an explicit `dyn` are deprecated";
|
||||
tcx.node_span_lint(BARE_TRAIT_OBJECTS, self_ty.hir_id, self_ty.span, msg, |lint| {
|
||||
if self_ty.span.can_be_used_for_suggestions() {
|
||||
lint.multipart_suggestion_verbose(
|
||||
"if this is an object-safe trait, use `dyn`",
|
||||
sugg,
|
||||
Applicability::MachineApplicable,
|
||||
);
|
||||
}
|
||||
self.maybe_suggest_blanket_trait_impl(self_ty, lint);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/// Make sure that we are in the condition to suggest the blanket implementation.
|
||||
pub(super) fn maybe_lint_blanket_trait_impl<G: EmissionGuarantee>(
|
||||
fn maybe_suggest_blanket_trait_impl<G: EmissionGuarantee>(
|
||||
&self,
|
||||
self_ty: &hir::Ty<'_>,
|
||||
diag: &mut Diag<'_, G>,
|
||||
@ -75,9 +152,11 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ {
|
||||
}
|
||||
|
||||
/// Make sure that we are in the condition to suggest `impl Trait`.
|
||||
fn maybe_lint_impl_trait(&self, self_ty: &hir::Ty<'_>, diag: &mut Diag<'_>) -> bool {
|
||||
fn maybe_suggest_impl_trait(&self, self_ty: &hir::Ty<'_>, diag: &mut Diag<'_>) -> bool {
|
||||
let tcx = self.tcx();
|
||||
let parent_id = tcx.hir().get_parent_item(self_ty.hir_id).def_id;
|
||||
// FIXME: If `type_alias_impl_trait` is enabled, also look for `Trait0<Ty = Trait1>`
|
||||
// and suggest `Trait0<Ty = impl Trait1>`.
|
||||
let (sig, generics, owner) = match tcx.hir_node_by_def_id(parent_id) {
|
||||
hir::Node::Item(hir::Item { kind: hir::ItemKind::Fn(sig, generics, _), .. }) => {
|
||||
(sig, generics, None)
|
||||
@ -186,71 +265,37 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ {
|
||||
false
|
||||
}
|
||||
|
||||
pub(super) fn maybe_lint_bare_trait(&self, self_ty: &hir::Ty<'_>, in_path: bool) {
|
||||
let tcx = self.tcx();
|
||||
if let hir::TyKind::TraitObject([poly_trait_ref, ..], _, TraitObjectSyntax::None) =
|
||||
self_ty.kind
|
||||
fn maybe_suggest_assoc_ty_bound(&self, self_ty: &hir::Ty<'_>, diag: &mut Diag<'_>) {
|
||||
let mut parents = self.tcx().hir().parent_iter(self_ty.hir_id);
|
||||
|
||||
if let Some((_, hir::Node::TypeBinding(binding))) = parents.next()
|
||||
&& let hir::TypeBindingKind::Equality { term: hir::Term::Ty(obj_ty) } = binding.kind
|
||||
{
|
||||
let needs_bracket = in_path
|
||||
&& !tcx
|
||||
.sess
|
||||
.source_map()
|
||||
.span_to_prev_source(self_ty.span)
|
||||
.ok()
|
||||
.is_some_and(|s| s.trim_end().ends_with('<'));
|
||||
|
||||
let is_global = poly_trait_ref.trait_ref.path.is_global();
|
||||
|
||||
let mut sugg = Vec::from_iter([(
|
||||
self_ty.span.shrink_to_lo(),
|
||||
format!(
|
||||
"{}dyn {}",
|
||||
if needs_bracket { "<" } else { "" },
|
||||
if is_global { "(" } else { "" },
|
||||
),
|
||||
)]);
|
||||
|
||||
if is_global || needs_bracket {
|
||||
sugg.push((
|
||||
self_ty.span.shrink_to_hi(),
|
||||
format!(
|
||||
"{}{}",
|
||||
if is_global { ")" } else { "" },
|
||||
if needs_bracket { ">" } else { "" },
|
||||
),
|
||||
));
|
||||
if let Some((_, hir::Node::TraitRef(..))) = parents.next()
|
||||
&& let Some((_, hir::Node::Ty(ty))) = parents.next()
|
||||
&& let hir::TyKind::TraitObject(..) = ty.kind
|
||||
{
|
||||
// Assoc ty bounds aren't permitted inside trait object types.
|
||||
return;
|
||||
}
|
||||
|
||||
if self_ty.span.edition().at_least_rust_2021() {
|
||||
let msg = "trait objects must include the `dyn` keyword";
|
||||
let label = "add `dyn` keyword before this trait";
|
||||
let mut diag =
|
||||
rustc_errors::struct_span_code_err!(tcx.dcx(), self_ty.span, E0782, "{}", msg);
|
||||
if self_ty.span.can_be_used_for_suggestions()
|
||||
&& !self.maybe_lint_impl_trait(self_ty, &mut diag)
|
||||
{
|
||||
diag.multipart_suggestion_verbose(
|
||||
label,
|
||||
sugg,
|
||||
Applicability::MachineApplicable,
|
||||
);
|
||||
}
|
||||
// check if the impl trait that we are considering is a impl of a local trait
|
||||
self.maybe_lint_blanket_trait_impl(self_ty, &mut diag);
|
||||
diag.stash(self_ty.span, StashKey::TraitMissingMethod);
|
||||
let lo = if binding.gen_args.span_ext.is_dummy() {
|
||||
binding.ident.span
|
||||
} else {
|
||||
let msg = "trait objects without an explicit `dyn` are deprecated";
|
||||
tcx.node_span_lint(BARE_TRAIT_OBJECTS, self_ty.hir_id, self_ty.span, msg, |lint| {
|
||||
if self_ty.span.can_be_used_for_suggestions() {
|
||||
lint.multipart_suggestion_verbose(
|
||||
"if this is an object-safe trait, use `dyn`",
|
||||
sugg,
|
||||
Applicability::MachineApplicable,
|
||||
);
|
||||
}
|
||||
self.maybe_lint_blanket_trait_impl(self_ty, lint);
|
||||
});
|
||||
binding.gen_args.span_ext
|
||||
};
|
||||
let hi = obj_ty.span;
|
||||
|
||||
if !lo.eq_ctxt(hi) {
|
||||
return;
|
||||
}
|
||||
|
||||
diag.span_suggestion_verbose(
|
||||
lo.between(hi),
|
||||
"you might have meant to write a bound here",
|
||||
": ",
|
||||
Applicability::MaybeIncorrect,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -2212,6 +2212,17 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ {
|
||||
try_emit("delegation with early bound generics");
|
||||
}
|
||||
|
||||
// There is no way to instantiate `Self` param for caller if
|
||||
// 1. callee is a trait method
|
||||
// 2. delegation item isn't an associative item
|
||||
if let DefKind::AssocFn = self.tcx().def_kind(sig_id)
|
||||
&& let DefKind::Fn = self.tcx().def_kind(self.item_def_id())
|
||||
&& self.tcx().associated_item(sig_id).container
|
||||
== ty::AssocItemContainer::TraitContainer
|
||||
{
|
||||
try_emit("delegation to a trait method from a free function");
|
||||
}
|
||||
|
||||
if self.tcx().asyncness(sig_id) == ty::Asyncness::Yes {
|
||||
try_emit("delegation to async functions");
|
||||
}
|
||||
@ -2339,12 +2350,12 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ {
|
||||
)
|
||||
}
|
||||
hir::TyKind::TraitObject(bounds, lifetime, repr) => {
|
||||
self.maybe_lint_bare_trait(hir_ty, in_path);
|
||||
self.prohibit_or_lint_bare_trait_object_ty(hir_ty, in_path);
|
||||
|
||||
let repr = match repr {
|
||||
TraitObjectSyntax::Dyn | TraitObjectSyntax::None => ty::Dyn,
|
||||
TraitObjectSyntax::DynStar => ty::DynStar,
|
||||
};
|
||||
|
||||
self.lower_trait_object_ty(
|
||||
hir_ty.span,
|
||||
hir_ty.hir_id,
|
||||
|
@ -101,8 +101,9 @@ use rustc_errors::ErrorGuaranteed;
|
||||
use rustc_hir as hir;
|
||||
use rustc_hir::def::DefKind;
|
||||
use rustc_middle::middle;
|
||||
use rustc_middle::mir::interpret::GlobalId;
|
||||
use rustc_middle::query::Providers;
|
||||
use rustc_middle::ty::{Ty, TyCtxt};
|
||||
use rustc_middle::ty::{self, Ty, TyCtxt};
|
||||
use rustc_middle::util;
|
||||
use rustc_session::parse::feature_err;
|
||||
use rustc_span::{symbol::sym, Span};
|
||||
@ -186,7 +187,12 @@ pub fn check_crate(tcx: TyCtxt<'_>) -> Result<(), ErrorGuaranteed> {
|
||||
let def_kind = tcx.def_kind(item_def_id);
|
||||
match def_kind {
|
||||
DefKind::Static { .. } => tcx.ensure().eval_static_initializer(item_def_id),
|
||||
DefKind::Const => tcx.ensure().const_eval_poly(item_def_id.into()),
|
||||
DefKind::Const if tcx.generics_of(item_def_id).params.is_empty() => {
|
||||
let instance = ty::Instance::new(item_def_id.into(), ty::GenericArgs::empty());
|
||||
let cid = GlobalId { instance, promoted: None };
|
||||
let param_env = ty::ParamEnv::reveal_all();
|
||||
tcx.ensure().eval_to_const_value_raw(param_env.and(cid));
|
||||
}
|
||||
_ => (),
|
||||
}
|
||||
});
|
||||
|
@ -1,14 +1,14 @@
|
||||
use rustc_data_structures::fx::FxIndexMap;
|
||||
use rustc_infer::infer::outlives::components::{push_outlives_components, Component};
|
||||
use rustc_middle::ty::{self, Region, Ty, TyCtxt};
|
||||
use rustc_middle::ty::{GenericArg, GenericArgKind};
|
||||
use rustc_span::Span;
|
||||
use smallvec::smallvec;
|
||||
use std::collections::BTreeMap;
|
||||
|
||||
/// Tracks the `T: 'a` or `'a: 'a` predicates that we have inferred
|
||||
/// must be added to the struct header.
|
||||
pub(crate) type RequiredPredicates<'tcx> =
|
||||
BTreeMap<ty::OutlivesPredicate<GenericArg<'tcx>, ty::Region<'tcx>>, Span>;
|
||||
FxIndexMap<ty::OutlivesPredicate<GenericArg<'tcx>, ty::Region<'tcx>>, Span>;
|
||||
|
||||
/// Given a requirement `T: 'a` or `'b: 'a`, deduce the
|
||||
/// outlives_component and add it to `required_predicates`
|
||||
|
@ -122,6 +122,10 @@ hir_typeck_return_stmt_outside_of_fn_body =
|
||||
.encl_body_label = the {$statement_kind} is part of this body...
|
||||
.encl_fn_label = ...not the enclosing function body
|
||||
|
||||
hir_typeck_rpit_box_return_expr = if you change the return type to expect trait objects, box the returned expressions
|
||||
|
||||
hir_typeck_rpit_change_return_type = you could change the return type to be a boxed trait object
|
||||
|
||||
hir_typeck_rustcall_incorrect_args =
|
||||
functions with the "rust-call" ABI must take a single non-self tuple argument
|
||||
|
||||
|
@ -1,17 +1,13 @@
|
||||
use crate::coercion::{AsCoercionSite, CoerceMany};
|
||||
use crate::{Diverges, Expectation, FnCtxt, Needs};
|
||||
use rustc_errors::{Applicability, Diag};
|
||||
use rustc_hir::{
|
||||
self as hir,
|
||||
def::{CtorOf, DefKind, Res},
|
||||
ExprKind, PatKind,
|
||||
};
|
||||
use rustc_hir::def::{CtorOf, DefKind, Res};
|
||||
use rustc_hir::def_id::LocalDefId;
|
||||
use rustc_hir::{self as hir, ExprKind, PatKind};
|
||||
use rustc_hir_pretty::ty_to_string;
|
||||
use rustc_infer::infer::type_variable::{TypeVariableOrigin, TypeVariableOriginKind};
|
||||
use rustc_infer::traits::Obligation;
|
||||
use rustc_middle::ty::{self, Ty};
|
||||
use rustc_span::Span;
|
||||
use rustc_trait_selection::traits::query::evaluate_obligation::InferCtxtExt;
|
||||
use rustc_trait_selection::traits::{
|
||||
IfExpressionCause, MatchExpressionArmCause, ObligationCause, ObligationCauseCode,
|
||||
};
|
||||
@ -91,10 +87,8 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
|
||||
let arm_ty = self.check_expr_with_expectation(arm.body, expected);
|
||||
all_arms_diverge &= self.diverges.get();
|
||||
|
||||
let opt_suggest_box_span = prior_arm.and_then(|(_, prior_arm_ty, _)| {
|
||||
self.opt_suggest_box_span(prior_arm_ty, arm_ty, orig_expected)
|
||||
});
|
||||
let tail_defines_return_position_impl_trait =
|
||||
self.return_position_impl_trait_from_match_expectation(orig_expected);
|
||||
|
||||
let (arm_block_id, arm_span) = if let hir::ExprKind::Block(blk, _) = arm.body.kind {
|
||||
(Some(blk.hir_id), self.find_block_span(blk))
|
||||
@ -120,7 +114,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
scrut_span: scrut.span,
|
||||
source: match_src,
|
||||
prior_non_diverging_arms: prior_non_diverging_arms.clone(),
|
||||
opt_suggest_box_span,
|
||||
tail_defines_return_position_impl_trait,
|
||||
})),
|
||||
),
|
||||
};
|
||||
@ -243,7 +237,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
let can_coerce_to_return_ty = match self.ret_coercion.as_ref() {
|
||||
Some(ret_coercion) => {
|
||||
let ret_ty = ret_coercion.borrow().expected_ty();
|
||||
let ret_ty = self.inh.infcx.shallow_resolve(ret_ty);
|
||||
let ret_ty = self.infcx.shallow_resolve(ret_ty);
|
||||
self.can_coerce(arm_ty, ret_ty)
|
||||
&& prior_arm.map_or(true, |(_, ty, _)| self.can_coerce(ty, ret_ty))
|
||||
// The match arms need to unify for the case of `impl Trait`.
|
||||
@ -422,7 +416,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
else_expr: &'tcx hir::Expr<'tcx>,
|
||||
then_ty: Ty<'tcx>,
|
||||
else_ty: Ty<'tcx>,
|
||||
opt_suggest_box_span: Option<Span>,
|
||||
tail_defines_return_position_impl_trait: Option<LocalDefId>,
|
||||
) -> ObligationCause<'tcx> {
|
||||
let mut outer_span = if self.tcx.sess.source_map().is_multiline(span) {
|
||||
// The `if`/`else` isn't in one line in the output, include some context to make it
|
||||
@ -513,7 +507,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
then_ty,
|
||||
else_ty,
|
||||
outer_span,
|
||||
opt_suggest_box_span,
|
||||
tail_defines_return_position_impl_trait,
|
||||
})),
|
||||
)
|
||||
}
|
||||
@ -593,96 +587,37 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
/// When we have a `match` as a tail expression in a `fn` with a returned `impl Trait`
|
||||
/// we check if the different arms would work with boxed trait objects instead and
|
||||
/// provide a structured suggestion in that case.
|
||||
pub(crate) fn opt_suggest_box_span(
|
||||
// Does the expectation of the match define an RPIT?
|
||||
// (e.g. we're in the tail of a function body)
|
||||
//
|
||||
// Returns the `LocalDefId` of the RPIT, which is always identity-substituted.
|
||||
pub fn return_position_impl_trait_from_match_expectation(
|
||||
&self,
|
||||
first_ty: Ty<'tcx>,
|
||||
second_ty: Ty<'tcx>,
|
||||
orig_expected: Expectation<'tcx>,
|
||||
) -> Option<Span> {
|
||||
// FIXME(compiler-errors): This really shouldn't need to be done during the
|
||||
// "good" path of typeck, but here we are.
|
||||
match orig_expected {
|
||||
Expectation::ExpectHasType(expected) => {
|
||||
let TypeVariableOrigin {
|
||||
span,
|
||||
kind: TypeVariableOriginKind::OpaqueTypeInference(rpit_def_id),
|
||||
..
|
||||
} = self.type_var_origin(expected)?
|
||||
else {
|
||||
return None;
|
||||
};
|
||||
|
||||
let Some(rpit_local_def_id) = rpit_def_id.as_local() else {
|
||||
return None;
|
||||
};
|
||||
if !matches!(
|
||||
self.tcx.hir().expect_item(rpit_local_def_id).expect_opaque_ty().origin,
|
||||
hir::OpaqueTyOrigin::FnReturn(..)
|
||||
) {
|
||||
return None;
|
||||
}
|
||||
|
||||
let sig = self.body_fn_sig()?;
|
||||
|
||||
let args = sig.output().walk().find_map(|arg| {
|
||||
if let ty::GenericArgKind::Type(ty) = arg.unpack()
|
||||
&& let ty::Alias(ty::Opaque, ty::AliasTy { def_id, args, .. }) = *ty.kind()
|
||||
&& def_id == rpit_def_id
|
||||
{
|
||||
Some(args)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})?;
|
||||
|
||||
if !self.can_coerce(first_ty, expected) || !self.can_coerce(second_ty, expected) {
|
||||
return None;
|
||||
}
|
||||
|
||||
for ty in [first_ty, second_ty] {
|
||||
for (clause, _) in self
|
||||
.tcx
|
||||
.explicit_item_super_predicates(rpit_def_id)
|
||||
.iter_instantiated_copied(self.tcx, args)
|
||||
{
|
||||
let pred = clause.kind().rebind(match clause.kind().skip_binder() {
|
||||
ty::ClauseKind::Trait(trait_pred) => {
|
||||
assert!(matches!(
|
||||
*trait_pred.trait_ref.self_ty().kind(),
|
||||
ty::Alias(ty::Opaque, ty::AliasTy { def_id, args: alias_args, .. })
|
||||
if def_id == rpit_def_id && args == alias_args
|
||||
));
|
||||
ty::ClauseKind::Trait(trait_pred.with_self_ty(self.tcx, ty))
|
||||
}
|
||||
ty::ClauseKind::Projection(mut proj_pred) => {
|
||||
assert!(matches!(
|
||||
*proj_pred.projection_ty.self_ty().kind(),
|
||||
ty::Alias(ty::Opaque, ty::AliasTy { def_id, args: alias_args, .. })
|
||||
if def_id == rpit_def_id && args == alias_args
|
||||
));
|
||||
proj_pred = proj_pred.with_self_ty(self.tcx, ty);
|
||||
ty::ClauseKind::Projection(proj_pred)
|
||||
}
|
||||
_ => continue,
|
||||
});
|
||||
if !self.predicate_must_hold_modulo_regions(&Obligation::new(
|
||||
self.tcx,
|
||||
ObligationCause::misc(span, self.body_id),
|
||||
self.param_env,
|
||||
pred,
|
||||
)) {
|
||||
return None;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Some(span)
|
||||
}
|
||||
_ => None,
|
||||
expectation: Expectation<'tcx>,
|
||||
) -> Option<LocalDefId> {
|
||||
let expected_ty = expectation.to_option(self)?;
|
||||
let (def_id, args) = match *expected_ty.kind() {
|
||||
// FIXME: Could also check that the RPIT is not defined
|
||||
ty::Alias(ty::Opaque, alias_ty) => (alias_ty.def_id.as_local()?, alias_ty.args),
|
||||
// FIXME(-Znext-solver): Remove this branch once `replace_opaque_types_with_infer` is gone.
|
||||
ty::Infer(ty::TyVar(_)) => self
|
||||
.inner
|
||||
.borrow()
|
||||
.iter_opaque_types()
|
||||
.find(|(_, v)| v.ty == expected_ty)
|
||||
.map(|(k, _)| (k.def_id, k.args))?,
|
||||
_ => return None,
|
||||
};
|
||||
let hir::OpaqueTyOrigin::FnReturn(parent_def_id) = self.tcx.opaque_type_origin(def_id)
|
||||
else {
|
||||
return None;
|
||||
};
|
||||
if &args[0..self.tcx.generics_of(parent_def_id).count()]
|
||||
!= ty::GenericArgs::identity_for_item(self.tcx, parent_def_id).as_slice()
|
||||
{
|
||||
return None;
|
||||
}
|
||||
Some(def_id)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -848,7 +848,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
bound_vars,
|
||||
);
|
||||
|
||||
let c_result = self.inh.infcx.canonicalize_response(result);
|
||||
let c_result = self.infcx.canonicalize_response(result);
|
||||
self.typeck_results.borrow_mut().user_provided_sigs.insert(expr_def_id, c_result);
|
||||
|
||||
// Normalize only after registering in `user_provided_sigs`.
|
||||
|
@ -35,17 +35,18 @@
|
||||
//! // and are then unable to coerce `&7i32` to `&mut i32`.
|
||||
//! ```
|
||||
|
||||
use crate::errors::SuggestBoxingForReturnImplTrait;
|
||||
use crate::FnCtxt;
|
||||
use rustc_errors::{codes::*, struct_span_code_err, Applicability, Diag, MultiSpan};
|
||||
use rustc_hir as hir;
|
||||
use rustc_hir::def_id::DefId;
|
||||
use rustc_hir::def_id::{DefId, LocalDefId};
|
||||
use rustc_hir::intravisit::{self, Visitor};
|
||||
use rustc_hir::Expr;
|
||||
use rustc_hir_analysis::hir_ty_lowering::HirTyLowerer;
|
||||
use rustc_infer::infer::type_variable::{TypeVariableOrigin, TypeVariableOriginKind};
|
||||
use rustc_infer::infer::{Coercion, DefineOpaqueTypes, InferOk, InferResult};
|
||||
use rustc_infer::traits::TraitEngine;
|
||||
use rustc_infer::traits::TraitEngineExt as _;
|
||||
use rustc_infer::traits::{IfExpressionCause, MatchExpressionArmCause, TraitEngine};
|
||||
use rustc_infer::traits::{Obligation, PredicateObligation};
|
||||
use rustc_middle::lint::in_external_macro;
|
||||
use rustc_middle::traits::BuiltinImplSource;
|
||||
@ -59,6 +60,7 @@ use rustc_middle::ty::{self, GenericArgsRef, Ty, TyCtxt};
|
||||
use rustc_session::parse::feature_err;
|
||||
use rustc_span::symbol::sym;
|
||||
use rustc_span::DesugaringKind;
|
||||
use rustc_span::{BytePos, Span};
|
||||
use rustc_target::spec::abi::Abi;
|
||||
use rustc_trait_selection::infer::InferCtxtExt as _;
|
||||
use rustc_trait_selection::traits::error_reporting::TypeErrCtxtExt as _;
|
||||
@ -1638,6 +1640,77 @@ impl<'tcx, 'exprs, E: AsCoercionSite> CoerceMany<'tcx, 'exprs, E> {
|
||||
unsized_return = self.is_return_ty_definitely_unsized(fcx);
|
||||
}
|
||||
}
|
||||
ObligationCauseCode::MatchExpressionArm(box MatchExpressionArmCause {
|
||||
arm_span,
|
||||
arm_ty,
|
||||
prior_arm_ty,
|
||||
ref prior_non_diverging_arms,
|
||||
tail_defines_return_position_impl_trait: Some(rpit_def_id),
|
||||
..
|
||||
}) => {
|
||||
err = fcx.err_ctxt().report_mismatched_types(
|
||||
cause,
|
||||
expected,
|
||||
found,
|
||||
coercion_error,
|
||||
);
|
||||
// Check that we're actually in the second or later arm
|
||||
if prior_non_diverging_arms.len() > 0 {
|
||||
self.suggest_boxing_tail_for_return_position_impl_trait(
|
||||
fcx,
|
||||
&mut err,
|
||||
rpit_def_id,
|
||||
arm_ty,
|
||||
prior_arm_ty,
|
||||
prior_non_diverging_arms
|
||||
.iter()
|
||||
.chain(std::iter::once(&arm_span))
|
||||
.copied(),
|
||||
);
|
||||
}
|
||||
}
|
||||
ObligationCauseCode::IfExpression(box IfExpressionCause {
|
||||
then_id,
|
||||
else_id,
|
||||
then_ty,
|
||||
else_ty,
|
||||
tail_defines_return_position_impl_trait: Some(rpit_def_id),
|
||||
..
|
||||
}) => {
|
||||
err = fcx.err_ctxt().report_mismatched_types(
|
||||
cause,
|
||||
expected,
|
||||
found,
|
||||
coercion_error,
|
||||
);
|
||||
let then_span = fcx.find_block_span_from_hir_id(then_id);
|
||||
let else_span = fcx.find_block_span_from_hir_id(else_id);
|
||||
// don't suggest wrapping either blocks in `if .. {} else {}`
|
||||
let is_empty_arm = |id| {
|
||||
let hir::Node::Block(blk) = fcx.tcx.hir_node(id) else {
|
||||
return false;
|
||||
};
|
||||
if blk.expr.is_some() || !blk.stmts.is_empty() {
|
||||
return false;
|
||||
}
|
||||
let Some((_, hir::Node::Expr(expr))) =
|
||||
fcx.tcx.hir().parent_iter(id).nth(1)
|
||||
else {
|
||||
return false;
|
||||
};
|
||||
matches!(expr.kind, hir::ExprKind::If(..))
|
||||
};
|
||||
if !is_empty_arm(then_id) && !is_empty_arm(else_id) {
|
||||
self.suggest_boxing_tail_for_return_position_impl_trait(
|
||||
fcx,
|
||||
&mut err,
|
||||
rpit_def_id,
|
||||
then_ty,
|
||||
else_ty,
|
||||
[then_span, else_span].into_iter(),
|
||||
);
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
err = fcx.err_ctxt().report_mismatched_types(
|
||||
cause,
|
||||
@ -1677,6 +1750,70 @@ impl<'tcx, 'exprs, E: AsCoercionSite> CoerceMany<'tcx, 'exprs, E> {
|
||||
}
|
||||
}
|
||||
|
||||
fn suggest_boxing_tail_for_return_position_impl_trait(
|
||||
&self,
|
||||
fcx: &FnCtxt<'_, 'tcx>,
|
||||
err: &mut Diag<'_>,
|
||||
rpit_def_id: LocalDefId,
|
||||
a_ty: Ty<'tcx>,
|
||||
b_ty: Ty<'tcx>,
|
||||
arm_spans: impl Iterator<Item = Span>,
|
||||
) {
|
||||
let compatible = |ty: Ty<'tcx>| {
|
||||
fcx.probe(|_| {
|
||||
let ocx = ObligationCtxt::new(fcx);
|
||||
ocx.register_obligations(
|
||||
fcx.tcx
|
||||
.item_super_predicates(rpit_def_id)
|
||||
.instantiate_identity_iter()
|
||||
.filter_map(|clause| {
|
||||
let predicate = clause
|
||||
.kind()
|
||||
.map_bound(|clause| match clause {
|
||||
ty::ClauseKind::Trait(trait_pred) => Some(
|
||||
ty::ClauseKind::Trait(trait_pred.with_self_ty(fcx.tcx, ty)),
|
||||
),
|
||||
ty::ClauseKind::Projection(proj_pred) => {
|
||||
Some(ty::ClauseKind::Projection(
|
||||
proj_pred.with_self_ty(fcx.tcx, ty),
|
||||
))
|
||||
}
|
||||
_ => None,
|
||||
})
|
||||
.transpose()?;
|
||||
Some(Obligation::new(
|
||||
fcx.tcx,
|
||||
ObligationCause::dummy(),
|
||||
fcx.param_env,
|
||||
predicate,
|
||||
))
|
||||
}),
|
||||
);
|
||||
ocx.select_where_possible().is_empty()
|
||||
})
|
||||
};
|
||||
|
||||
if !compatible(a_ty) || !compatible(b_ty) {
|
||||
return;
|
||||
}
|
||||
|
||||
let rpid_def_span = fcx.tcx.def_span(rpit_def_id);
|
||||
err.subdiagnostic(
|
||||
fcx.tcx.dcx(),
|
||||
SuggestBoxingForReturnImplTrait::ChangeReturnType {
|
||||
start_sp: rpid_def_span.with_hi(rpid_def_span.lo() + BytePos(4)),
|
||||
end_sp: rpid_def_span.shrink_to_hi(),
|
||||
},
|
||||
);
|
||||
|
||||
let (starts, ends) =
|
||||
arm_spans.map(|span| (span.shrink_to_lo(), span.shrink_to_hi())).unzip();
|
||||
err.subdiagnostic(
|
||||
fcx.tcx.dcx(),
|
||||
SuggestBoxingForReturnImplTrait::BoxReturnExpr { starts, ends },
|
||||
);
|
||||
}
|
||||
|
||||
fn note_unreachable_loop_return(
|
||||
&self,
|
||||
err: &mut Diag<'_>,
|
||||
|
@ -621,3 +621,21 @@ pub struct NoteCallerChoosesTyForTyParam<'tcx> {
|
||||
pub ty_param_name: Symbol,
|
||||
pub found_ty: Ty<'tcx>,
|
||||
}
|
||||
|
||||
#[derive(Subdiagnostic)]
|
||||
pub enum SuggestBoxingForReturnImplTrait {
|
||||
#[multipart_suggestion(hir_typeck_rpit_change_return_type, applicability = "maybe-incorrect")]
|
||||
ChangeReturnType {
|
||||
#[suggestion_part(code = "Box<dyn")]
|
||||
start_sp: Span,
|
||||
#[suggestion_part(code = ">")]
|
||||
end_sp: Span,
|
||||
},
|
||||
#[multipart_suggestion(hir_typeck_rpit_box_return_expr, applicability = "maybe-incorrect")]
|
||||
BoxReturnExpr {
|
||||
#[suggestion_part(code = "Box::new(")]
|
||||
starts: Vec<Span>,
|
||||
#[suggestion_part(code = ")")]
|
||||
ends: Vec<Span>,
|
||||
},
|
||||
}
|
||||
|
@ -1088,7 +1088,8 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
let else_ty = self.check_expr_with_expectation(else_expr, expected);
|
||||
let else_diverges = self.diverges.get();
|
||||
|
||||
let opt_suggest_box_span = self.opt_suggest_box_span(then_ty, else_ty, orig_expected);
|
||||
let tail_defines_return_position_impl_trait =
|
||||
self.return_position_impl_trait_from_match_expectation(orig_expected);
|
||||
let if_cause = self.if_cause(
|
||||
sp,
|
||||
cond_expr.span,
|
||||
@ -1096,7 +1097,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
else_expr,
|
||||
then_ty,
|
||||
else_ty,
|
||||
opt_suggest_box_span,
|
||||
tail_defines_return_position_impl_trait,
|
||||
);
|
||||
|
||||
coerce.coerce(self, &if_cause, else_expr, else_ty);
|
||||
|
@ -347,7 +347,6 @@ impl<'tcx> FnCtxt<'_, 'tcx> {
|
||||
.any(|n| roots_reachable_from_non_diverging.visited(n));
|
||||
|
||||
let infer_var_infos: UnordBag<_> = self
|
||||
.inh
|
||||
.infer_var_info
|
||||
.borrow()
|
||||
.items()
|
||||
|
@ -526,7 +526,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
pub(in super::super) fn resolve_rvalue_scopes(&self, def_id: DefId) {
|
||||
let scope_tree = self.tcx.region_scope_tree(def_id);
|
||||
let rvalue_scopes = { rvalue_scopes::resolve_rvalue_scopes(self, scope_tree, def_id) };
|
||||
let mut typeck_results = self.inh.typeck_results.borrow_mut();
|
||||
let mut typeck_results = self.typeck_results.borrow_mut();
|
||||
typeck_results.rvalue_scopes = rvalue_scopes;
|
||||
}
|
||||
|
||||
|
@ -7,7 +7,7 @@ mod suggestions;
|
||||
use crate::coercion::DynamicCoerceMany;
|
||||
use crate::fallback::DivergingFallbackBehavior;
|
||||
use crate::fn_ctxt::checks::DivergingBlockBehavior;
|
||||
use crate::{CoroutineTypes, Diverges, EnclosingBreakables, Inherited};
|
||||
use crate::{CoroutineTypes, Diverges, EnclosingBreakables, TypeckRootCtxt};
|
||||
use hir::def_id::CRATE_DEF_ID;
|
||||
use rustc_errors::{DiagCtxt, ErrorGuaranteed};
|
||||
use rustc_hir as hir;
|
||||
@ -108,7 +108,7 @@ pub struct FnCtxt<'a, 'tcx> {
|
||||
|
||||
pub(super) enclosing_breakables: RefCell<EnclosingBreakables<'tcx>>,
|
||||
|
||||
pub(super) inh: &'a Inherited<'tcx>,
|
||||
pub(super) root_ctxt: &'a TypeckRootCtxt<'tcx>,
|
||||
|
||||
pub(super) fallback_has_occurred: Cell<bool>,
|
||||
|
||||
@ -118,12 +118,12 @@ pub struct FnCtxt<'a, 'tcx> {
|
||||
|
||||
impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
pub fn new(
|
||||
inh: &'a Inherited<'tcx>,
|
||||
root_ctxt: &'a TypeckRootCtxt<'tcx>,
|
||||
param_env: ty::ParamEnv<'tcx>,
|
||||
body_id: LocalDefId,
|
||||
) -> FnCtxt<'a, 'tcx> {
|
||||
let (diverging_fallback_behavior, diverging_block_behavior) =
|
||||
parse_never_type_options_attr(inh.tcx);
|
||||
parse_never_type_options_attr(root_ctxt.tcx);
|
||||
FnCtxt {
|
||||
body_id,
|
||||
param_env,
|
||||
@ -137,7 +137,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
stack: Vec::new(),
|
||||
by_id: Default::default(),
|
||||
}),
|
||||
inh,
|
||||
root_ctxt,
|
||||
fallback_has_occurred: Cell::new(false),
|
||||
diverging_fallback_behavior,
|
||||
diverging_block_behavior,
|
||||
@ -206,9 +206,9 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> Deref for FnCtxt<'a, 'tcx> {
|
||||
type Target = Inherited<'tcx>;
|
||||
type Target = TypeckRootCtxt<'tcx>;
|
||||
fn deref(&self) -> &Self::Target {
|
||||
self.inh
|
||||
self.root_ctxt
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -95,8 +95,7 @@ impl<'a, 'tcx> GatherLocalsVisitor<'a, 'tcx> {
|
||||
Some(ref ty) => {
|
||||
let o_ty = self.fcx.lower_ty(ty);
|
||||
|
||||
let c_ty =
|
||||
self.fcx.inh.infcx.canonicalize_user_type_annotation(UserType::Ty(o_ty.raw));
|
||||
let c_ty = self.fcx.infcx.canonicalize_user_type_annotation(UserType::Ty(o_ty.raw));
|
||||
debug!("visit_local: ty.hir_id={:?} o_ty={:?} c_ty={:?}", ty.hir_id, o_ty, c_ty);
|
||||
self.fcx
|
||||
.typeck_results
|
||||
|
@ -31,7 +31,6 @@ pub mod expr_use_visitor;
|
||||
mod fallback;
|
||||
mod fn_ctxt;
|
||||
mod gather_locals;
|
||||
mod inherited;
|
||||
mod intrinsicck;
|
||||
mod mem_categorization;
|
||||
mod method;
|
||||
@ -39,11 +38,12 @@ mod op;
|
||||
mod pat;
|
||||
mod place_op;
|
||||
mod rvalue_scopes;
|
||||
mod typeck_root_ctxt;
|
||||
mod upvar;
|
||||
mod writeback;
|
||||
|
||||
pub use fn_ctxt::FnCtxt;
|
||||
pub use inherited::Inherited;
|
||||
pub use typeck_root_ctxt::TypeckRootCtxt;
|
||||
|
||||
use crate::check::check_fn;
|
||||
use crate::coercion::DynamicCoerceMany;
|
||||
@ -83,20 +83,6 @@ macro_rules! type_error_struct {
|
||||
})
|
||||
}
|
||||
|
||||
/// If this `DefId` is a "primary tables entry", returns
|
||||
/// `Some((body_id, body_ty, fn_sig))`. Otherwise, returns `None`.
|
||||
///
|
||||
/// If this function returns `Some`, then `typeck_results(def_id)` will
|
||||
/// succeed; if it returns `None`, then `typeck_results(def_id)` may or
|
||||
/// may not succeed. In some cases where this function returns `None`
|
||||
/// (notably closures), `typeck_results(def_id)` would wind up
|
||||
/// redirecting to the owning function.
|
||||
fn primary_body_of(
|
||||
node: Node<'_>,
|
||||
) -> Option<(hir::BodyId, Option<&hir::Ty<'_>>, Option<&hir::FnSig<'_>>)> {
|
||||
Some((node.body_id()?, node.ty(), node.fn_sig()))
|
||||
}
|
||||
|
||||
fn has_typeck_results(tcx: TyCtxt<'_>, def_id: DefId) -> bool {
|
||||
// Closures' typeck results come from their outermost function,
|
||||
// as they are part of the same "inference environment".
|
||||
@ -106,7 +92,7 @@ fn has_typeck_results(tcx: TyCtxt<'_>, def_id: DefId) -> bool {
|
||||
}
|
||||
|
||||
if let Some(def_id) = def_id.as_local() {
|
||||
primary_body_of(tcx.hir_node_by_def_id(def_id)).is_some()
|
||||
tcx.hir_node_by_def_id(def_id).body_id().is_some()
|
||||
} else {
|
||||
false
|
||||
}
|
||||
@ -163,20 +149,20 @@ fn typeck_with_fallback<'tcx>(
|
||||
let span = tcx.hir().span(id);
|
||||
|
||||
// Figure out what primary body this item has.
|
||||
let (body_id, body_ty, fn_sig) = primary_body_of(node).unwrap_or_else(|| {
|
||||
let body_id = node.body_id().unwrap_or_else(|| {
|
||||
span_bug!(span, "can't type-check body of {:?}", def_id);
|
||||
});
|
||||
let body = tcx.hir().body(body_id);
|
||||
|
||||
let param_env = tcx.param_env(def_id);
|
||||
|
||||
let inh = Inherited::new(tcx, def_id);
|
||||
let root_ctxt = TypeckRootCtxt::new(tcx, def_id);
|
||||
if let Some(inspector) = inspector {
|
||||
inh.infcx.attach_obligation_inspector(inspector);
|
||||
root_ctxt.infcx.attach_obligation_inspector(inspector);
|
||||
}
|
||||
let mut fcx = FnCtxt::new(&inh, param_env, def_id);
|
||||
let mut fcx = FnCtxt::new(&root_ctxt, param_env, def_id);
|
||||
|
||||
if let Some(hir::FnSig { header, decl, .. }) = fn_sig {
|
||||
if let Some(hir::FnSig { header, decl, .. }) = node.fn_sig() {
|
||||
let fn_sig = if decl.output.get_infer_ret_ty().is_some() {
|
||||
fcx.lowerer().lower_fn_ty(id, header.unsafety, header.abi, decl, None, None)
|
||||
} else {
|
||||
@ -191,42 +177,7 @@ fn typeck_with_fallback<'tcx>(
|
||||
|
||||
check_fn(&mut fcx, fn_sig, None, decl, def_id, body, tcx.features().unsized_fn_params);
|
||||
} else {
|
||||
let expected_type = if let Some(&hir::Ty { kind: hir::TyKind::Infer, span, .. }) = body_ty {
|
||||
Some(fcx.next_ty_var(TypeVariableOrigin {
|
||||
kind: TypeVariableOriginKind::TypeInference,
|
||||
span,
|
||||
}))
|
||||
} else if let Node::AnonConst(_) = node {
|
||||
match tcx.parent_hir_node(id) {
|
||||
Node::Ty(&hir::Ty { kind: hir::TyKind::Typeof(ref anon_const), .. })
|
||||
if anon_const.hir_id == id =>
|
||||
{
|
||||
Some(fcx.next_ty_var(TypeVariableOrigin {
|
||||
kind: TypeVariableOriginKind::TypeInference,
|
||||
span,
|
||||
}))
|
||||
}
|
||||
Node::Expr(&hir::Expr { kind: hir::ExprKind::InlineAsm(asm), .. })
|
||||
| Node::Item(&hir::Item { kind: hir::ItemKind::GlobalAsm(asm), .. }) => {
|
||||
asm.operands.iter().find_map(|(op, _op_sp)| match op {
|
||||
hir::InlineAsmOperand::Const { anon_const } if anon_const.hir_id == id => {
|
||||
// Inline assembly constants must be integers.
|
||||
Some(fcx.next_int_var())
|
||||
}
|
||||
hir::InlineAsmOperand::SymFn { anon_const } if anon_const.hir_id == id => {
|
||||
Some(fcx.next_ty_var(TypeVariableOrigin {
|
||||
kind: TypeVariableOriginKind::MiscVariable,
|
||||
span,
|
||||
}))
|
||||
}
|
||||
_ => None,
|
||||
})
|
||||
}
|
||||
_ => None,
|
||||
}
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let expected_type = infer_type_if_missing(&fcx, node);
|
||||
let expected_type = expected_type.unwrap_or_else(fallback);
|
||||
|
||||
let expected_type = fcx.normalize(body.value.span, expected_type);
|
||||
@ -296,6 +247,59 @@ fn typeck_with_fallback<'tcx>(
|
||||
typeck_results
|
||||
}
|
||||
|
||||
fn infer_type_if_missing<'tcx>(fcx: &FnCtxt<'_, 'tcx>, node: Node<'tcx>) -> Option<Ty<'tcx>> {
|
||||
let tcx = fcx.tcx;
|
||||
let def_id = fcx.body_id;
|
||||
let expected_type = if let Some(&hir::Ty { kind: hir::TyKind::Infer, span, .. }) = node.ty() {
|
||||
if let Some(item) = tcx.opt_associated_item(def_id.into())
|
||||
&& let ty::AssocKind::Const = item.kind
|
||||
&& let ty::ImplContainer = item.container
|
||||
&& let Some(trait_item) = item.trait_item_def_id
|
||||
{
|
||||
let args =
|
||||
tcx.impl_trait_ref(item.container_id(tcx)).unwrap().instantiate_identity().args;
|
||||
Some(tcx.type_of(trait_item).instantiate(tcx, args))
|
||||
} else {
|
||||
Some(fcx.next_ty_var(TypeVariableOrigin {
|
||||
kind: TypeVariableOriginKind::TypeInference,
|
||||
span,
|
||||
}))
|
||||
}
|
||||
} else if let Node::AnonConst(_) = node {
|
||||
let id = tcx.local_def_id_to_hir_id(def_id);
|
||||
match tcx.parent_hir_node(id) {
|
||||
Node::Ty(&hir::Ty { kind: hir::TyKind::Typeof(ref anon_const), span, .. })
|
||||
if anon_const.hir_id == id =>
|
||||
{
|
||||
Some(fcx.next_ty_var(TypeVariableOrigin {
|
||||
kind: TypeVariableOriginKind::TypeInference,
|
||||
span,
|
||||
}))
|
||||
}
|
||||
Node::Expr(&hir::Expr { kind: hir::ExprKind::InlineAsm(asm), span, .. })
|
||||
| Node::Item(&hir::Item { kind: hir::ItemKind::GlobalAsm(asm), span, .. }) => {
|
||||
asm.operands.iter().find_map(|(op, _op_sp)| match op {
|
||||
hir::InlineAsmOperand::Const { anon_const } if anon_const.hir_id == id => {
|
||||
// Inline assembly constants must be integers.
|
||||
Some(fcx.next_int_var())
|
||||
}
|
||||
hir::InlineAsmOperand::SymFn { anon_const } if anon_const.hir_id == id => {
|
||||
Some(fcx.next_ty_var(TypeVariableOrigin {
|
||||
kind: TypeVariableOriginKind::MiscVariable,
|
||||
span,
|
||||
}))
|
||||
}
|
||||
_ => None,
|
||||
})
|
||||
}
|
||||
_ => None,
|
||||
}
|
||||
} else {
|
||||
None
|
||||
};
|
||||
expected_type
|
||||
}
|
||||
|
||||
/// When `check_fn` is invoked on a coroutine (i.e., a body that
|
||||
/// includes yield), it returns back some information about the yield
|
||||
/// points.
|
||||
|
@ -81,7 +81,7 @@ pub struct NoMatchData<'tcx> {
|
||||
|
||||
// A pared down enum describing just the places from which a method
|
||||
// candidate can arise. Used for error reporting only.
|
||||
#[derive(Copy, Clone, Debug, Eq, Ord, PartialEq, PartialOrd)]
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
|
||||
pub enum CandidateSource {
|
||||
Impl(DefId),
|
||||
Trait(DefId /* trait id */),
|
||||
|
@ -49,7 +49,6 @@ use std::borrow::Cow;
|
||||
use super::probe::{AutorefOrPtrAdjustment, IsSuggestion, Mode, ProbeScope};
|
||||
use super::{CandidateSource, MethodError, NoMatchData};
|
||||
use rustc_hir::intravisit::Visitor;
|
||||
use std::cmp::{self, Ordering};
|
||||
use std::iter;
|
||||
|
||||
impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
@ -1186,7 +1185,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
if !inherent_impls_candidate.is_empty() {
|
||||
inherent_impls_candidate.sort();
|
||||
inherent_impls_candidate.sort_by_key(|id| self.tcx.def_path_str(id));
|
||||
inherent_impls_candidate.dedup();
|
||||
|
||||
// number of types to show at most
|
||||
@ -1567,7 +1566,10 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
sources: &mut Vec<CandidateSource>,
|
||||
sugg_span: Option<Span>,
|
||||
) {
|
||||
sources.sort();
|
||||
sources.sort_by_key(|source| match source {
|
||||
CandidateSource::Trait(id) => (0, self.tcx.def_path_str(id)),
|
||||
CandidateSource::Impl(id) => (1, self.tcx.def_path_str(id)),
|
||||
});
|
||||
sources.dedup();
|
||||
// Dynamic limit to avoid hiding just one candidate, which is silly.
|
||||
let limit = if sources.len() == 5 { 5 } else { 4 };
|
||||
@ -2549,7 +2551,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
_ => None,
|
||||
})
|
||||
.collect();
|
||||
preds.sort_by_key(|pred| (pred.def_id(), pred.self_ty()));
|
||||
preds.sort_by_key(|pred| pred.trait_ref.to_string());
|
||||
let def_ids = preds
|
||||
.iter()
|
||||
.filter_map(|pred| match pred.self_ty().kind() {
|
||||
@ -2663,7 +2665,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
traits.push(trait_pred.def_id());
|
||||
}
|
||||
}
|
||||
traits.sort();
|
||||
traits.sort_by_key(|id| self.tcx.def_path_str(id));
|
||||
traits.dedup();
|
||||
|
||||
let len = traits.len();
|
||||
@ -2886,7 +2888,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
) -> bool {
|
||||
if !valid_out_of_scope_traits.is_empty() {
|
||||
let mut candidates = valid_out_of_scope_traits;
|
||||
candidates.sort();
|
||||
candidates.sort_by_key(|id| self.tcx.def_path_str(id));
|
||||
candidates.dedup();
|
||||
|
||||
// `TryFrom` and `FromIterator` have no methods
|
||||
@ -3212,8 +3214,9 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
}
|
||||
|
||||
if !candidates.is_empty() {
|
||||
// Sort from most relevant to least relevant.
|
||||
candidates.sort_by_key(|&info| cmp::Reverse(info));
|
||||
// Sort local crate results before others
|
||||
candidates
|
||||
.sort_by_key(|&info| (!info.def_id.is_local(), self.tcx.def_path_str(info.def_id)));
|
||||
candidates.dedup();
|
||||
|
||||
let param_type = match rcvr_ty.kind() {
|
||||
@ -3561,33 +3564,11 @@ pub enum SelfSource<'a> {
|
||||
MethodCall(&'a hir::Expr<'a> /* rcvr */),
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone)]
|
||||
#[derive(Copy, Clone, PartialEq, Eq)]
|
||||
pub struct TraitInfo {
|
||||
pub def_id: DefId,
|
||||
}
|
||||
|
||||
impl PartialEq for TraitInfo {
|
||||
fn eq(&self, other: &TraitInfo) -> bool {
|
||||
self.cmp(other) == Ordering::Equal
|
||||
}
|
||||
}
|
||||
impl Eq for TraitInfo {}
|
||||
impl PartialOrd for TraitInfo {
|
||||
fn partial_cmp(&self, other: &TraitInfo) -> Option<Ordering> {
|
||||
Some(self.cmp(other))
|
||||
}
|
||||
}
|
||||
impl Ord for TraitInfo {
|
||||
fn cmp(&self, other: &TraitInfo) -> Ordering {
|
||||
// Local crates are more important than remote ones (local:
|
||||
// `cnum == 0`), and otherwise we throw in the defid for totality.
|
||||
|
||||
let lhs = (other.def_id.krate, other.def_id);
|
||||
let rhs = (self.def_id.krate, self.def_id);
|
||||
lhs.cmp(&rhs)
|
||||
}
|
||||
}
|
||||
|
||||
/// Retrieves all traits in this crate and any dependent crates,
|
||||
/// and wraps them into `TraitInfo` for custom sorting.
|
||||
pub fn all_traits(tcx: TyCtxt<'_>) -> Vec<TraitInfo> {
|
||||
|
@ -388,8 +388,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
|
||||
if !pat_adjustments.is_empty() {
|
||||
debug!("default binding mode is now {:?}", def_bm);
|
||||
self.inh
|
||||
.typeck_results
|
||||
self.typeck_results
|
||||
.borrow_mut()
|
||||
.pat_adjustments_mut()
|
||||
.insert(pat.hir_id, pat_adjustments);
|
||||
@ -614,7 +613,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
_ => BindingMode::convert(ba),
|
||||
};
|
||||
// ...and store it in a side table:
|
||||
self.inh.typeck_results.borrow_mut().pat_binding_modes_mut().insert(pat.hir_id, bm);
|
||||
self.typeck_results.borrow_mut().pat_binding_modes_mut().insert(pat.hir_id, bm);
|
||||
|
||||
debug!("check_pat_ident: pat.hir_id={:?} bm={:?}", pat.hir_id, bm);
|
||||
|
||||
@ -2002,8 +2001,12 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
pat_info: PatInfo<'tcx, '_>,
|
||||
) -> Ty<'tcx> {
|
||||
let tcx = self.tcx;
|
||||
// FIXME(deref_patterns): use `DerefPure` for soundness
|
||||
// FIXME(deref_patterns): use `DerefMut` when required
|
||||
// Register a `DerefPure` bound, which is required by all `deref!()` pats.
|
||||
self.register_bound(
|
||||
expected,
|
||||
tcx.require_lang_item(hir::LangItem::DerefPure, Some(span)),
|
||||
self.misc(span),
|
||||
);
|
||||
// <expected as Deref>::Target
|
||||
let ty = Ty::new_projection(
|
||||
tcx,
|
||||
@ -2013,6 +2016,19 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
let ty = self.normalize(span, ty);
|
||||
let ty = self.try_structurally_resolve_type(span, ty);
|
||||
self.check_pat(inner, ty, pat_info);
|
||||
|
||||
// Check if the pattern has any `ref mut` bindings, which would require
|
||||
// `DerefMut` to be emitted in MIR building instead of just `Deref`.
|
||||
// We do this *after* checking the inner pattern, since we want to make
|
||||
// sure to apply any match-ergonomics adjustments.
|
||||
if self.typeck_results.borrow().pat_has_ref_mut_binding(inner) {
|
||||
self.register_bound(
|
||||
expected,
|
||||
tcx.require_lang_item(hir::LangItem::DerefMut, Some(span)),
|
||||
self.misc(span),
|
||||
);
|
||||
}
|
||||
|
||||
expected
|
||||
}
|
||||
|
||||
|
@ -16,7 +16,8 @@ use rustc_trait_selection::traits::{self, PredicateObligation, TraitEngine, Trai
|
||||
use std::cell::RefCell;
|
||||
use std::ops::Deref;
|
||||
|
||||
/// Closures defined within the function. For example:
|
||||
// Data shared between a "typeck root" and its nested bodies,
|
||||
/// e.g. closures defined within the function. For example:
|
||||
/// ```ignore (illustrative)
|
||||
/// fn foo() {
|
||||
/// bar(move|| { ... })
|
||||
@ -24,8 +25,9 @@ use std::ops::Deref;
|
||||
/// ```
|
||||
/// Here, the function `foo()` and the closure passed to
|
||||
/// `bar()` will each have their own `FnCtxt`, but they will
|
||||
/// share the inherited fields.
|
||||
pub struct Inherited<'tcx> {
|
||||
/// share the inference context, will process obligations together,
|
||||
/// can access each other's local types (scoping permitted), etc.
|
||||
pub struct TypeckRootCtxt<'tcx> {
|
||||
pub(super) infcx: InferCtxt<'tcx>,
|
||||
|
||||
pub(super) typeck_results: RefCell<ty::TypeckResults<'tcx>>,
|
||||
@ -65,14 +67,14 @@ pub struct Inherited<'tcx> {
|
||||
pub(super) infer_var_info: RefCell<UnordMap<ty::TyVid, ty::InferVarInfo>>,
|
||||
}
|
||||
|
||||
impl<'tcx> Deref for Inherited<'tcx> {
|
||||
impl<'tcx> Deref for TypeckRootCtxt<'tcx> {
|
||||
type Target = InferCtxt<'tcx>;
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.infcx
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx> Inherited<'tcx> {
|
||||
impl<'tcx> TypeckRootCtxt<'tcx> {
|
||||
pub fn new(tcx: TyCtxt<'tcx>, def_id: LocalDefId) -> Self {
|
||||
let hir_owner = tcx.local_def_id_to_hir_id(def_id).owner;
|
||||
|
||||
@ -83,7 +85,7 @@ impl<'tcx> Inherited<'tcx> {
|
||||
.build();
|
||||
let typeck_results = RefCell::new(ty::TypeckResults::new(hir_owner));
|
||||
|
||||
Inherited {
|
||||
TypeckRootCtxt {
|
||||
typeck_results,
|
||||
fulfillment_cx: RefCell::new(<dyn TraitEngine<'_>>::new(&infcx)),
|
||||
infcx,
|
@ -270,9 +270,6 @@ infer_ril_introduced_by = requirement introduced by this return type
|
||||
infer_ril_introduced_here = `'static` requirement introduced here
|
||||
infer_ril_static_introduced_by = "`'static` lifetime requirement introduced by the return type
|
||||
|
||||
infer_sbfrit_box_return_expr = if you change the return type to expect trait objects, box the returned expressions
|
||||
|
||||
infer_sbfrit_change_return_type = you could change the return type to be a boxed trait object
|
||||
infer_source_kind_closure_return =
|
||||
try giving this closure an explicit return type
|
||||
|
||||
|
@ -1262,24 +1262,6 @@ pub enum SuggestAccessingField<'a> {
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Subdiagnostic)]
|
||||
pub enum SuggestBoxingForReturnImplTrait {
|
||||
#[multipart_suggestion(infer_sbfrit_change_return_type, applicability = "maybe-incorrect")]
|
||||
ChangeReturnType {
|
||||
#[suggestion_part(code = "Box<dyn")]
|
||||
start_sp: Span,
|
||||
#[suggestion_part(code = ">")]
|
||||
end_sp: Span,
|
||||
},
|
||||
#[multipart_suggestion(infer_sbfrit_box_return_expr, applicability = "maybe-incorrect")]
|
||||
BoxReturnExpr {
|
||||
#[suggestion_part(code = "Box::new(")]
|
||||
starts: Vec<Span>,
|
||||
#[suggestion_part(code = ")")]
|
||||
ends: Vec<Span>,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Subdiagnostic)]
|
||||
#[multipart_suggestion(infer_stp_wrap_one, applicability = "maybe-incorrect")]
|
||||
pub struct SuggestTuplePatternOne {
|
||||
|
@ -784,7 +784,6 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> {
|
||||
prior_arm_ty,
|
||||
source,
|
||||
ref prior_non_diverging_arms,
|
||||
opt_suggest_box_span,
|
||||
scrut_span,
|
||||
..
|
||||
}) => match source {
|
||||
@ -853,17 +852,6 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> {
|
||||
) {
|
||||
err.subdiagnostic(self.dcx(), subdiag);
|
||||
}
|
||||
if let Some(ret_sp) = opt_suggest_box_span {
|
||||
// Get return type span and point to it.
|
||||
self.suggest_boxing_for_return_impl_trait(
|
||||
err,
|
||||
ret_sp,
|
||||
prior_non_diverging_arms
|
||||
.iter()
|
||||
.chain(std::iter::once(&arm_span))
|
||||
.copied(),
|
||||
);
|
||||
}
|
||||
}
|
||||
},
|
||||
ObligationCauseCode::IfExpression(box IfExpressionCause {
|
||||
@ -872,7 +860,7 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> {
|
||||
then_ty,
|
||||
else_ty,
|
||||
outer_span,
|
||||
opt_suggest_box_span,
|
||||
..
|
||||
}) => {
|
||||
let then_span = self.find_block_span_from_hir_id(then_id);
|
||||
let else_span = self.find_block_span_from_hir_id(else_id);
|
||||
@ -890,30 +878,6 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> {
|
||||
) {
|
||||
err.subdiagnostic(self.dcx(), subdiag);
|
||||
}
|
||||
// don't suggest wrapping either blocks in `if .. {} else {}`
|
||||
let is_empty_arm = |id| {
|
||||
let hir::Node::Block(blk) = self.tcx.hir_node(id) else {
|
||||
return false;
|
||||
};
|
||||
if blk.expr.is_some() || !blk.stmts.is_empty() {
|
||||
return false;
|
||||
}
|
||||
let Some((_, hir::Node::Expr(expr))) = self.tcx.hir().parent_iter(id).nth(1)
|
||||
else {
|
||||
return false;
|
||||
};
|
||||
matches!(expr.kind, hir::ExprKind::If(..))
|
||||
};
|
||||
if let Some(ret_sp) = opt_suggest_box_span
|
||||
&& !is_empty_arm(then_id)
|
||||
&& !is_empty_arm(else_id)
|
||||
{
|
||||
self.suggest_boxing_for_return_impl_trait(
|
||||
err,
|
||||
ret_sp,
|
||||
[then_span, else_span].into_iter(),
|
||||
);
|
||||
}
|
||||
}
|
||||
ObligationCauseCode::LetElse => {
|
||||
err.help("try adding a diverging expression, such as `return` or `panic!(..)`");
|
||||
@ -1074,7 +1038,8 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> {
|
||||
let (sig, reg) = ty::print::FmtPrinter::new(self.tcx, Namespace::TypeNS)
|
||||
.name_all_regions(sig)
|
||||
.unwrap();
|
||||
let lts: Vec<String> = reg.into_values().map(|kind| kind.to_string()).collect();
|
||||
let lts: Vec<String> =
|
||||
reg.into_items().map(|(_, kind)| kind.to_string()).into_sorted_stable_ord();
|
||||
(if lts.is_empty() { String::new() } else { format!("for<{}> ", lts.join(", ")) }, sig)
|
||||
};
|
||||
|
||||
|
@ -19,9 +19,8 @@ use rustc_span::{sym, BytePos, Span};
|
||||
|
||||
use crate::errors::{
|
||||
ConsiderAddingAwait, FnConsiderCasting, FnItemsAreDistinct, FnUniqTypes,
|
||||
FunctionPointerSuggestion, SuggestAccessingField, SuggestBoxingForReturnImplTrait,
|
||||
SuggestRemoveSemiOrReturnBinding, SuggestTuplePatternMany, SuggestTuplePatternOne,
|
||||
TypeErrorAdditionalDiags,
|
||||
FunctionPointerSuggestion, SuggestAccessingField, SuggestRemoveSemiOrReturnBinding,
|
||||
SuggestTuplePatternMany, SuggestTuplePatternOne, TypeErrorAdditionalDiags,
|
||||
};
|
||||
|
||||
use super::TypeErrCtxt;
|
||||
@ -80,28 +79,6 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn suggest_boxing_for_return_impl_trait(
|
||||
&self,
|
||||
err: &mut Diag<'_>,
|
||||
return_sp: Span,
|
||||
arm_spans: impl Iterator<Item = Span>,
|
||||
) {
|
||||
let sugg = SuggestBoxingForReturnImplTrait::ChangeReturnType {
|
||||
start_sp: return_sp.with_hi(return_sp.lo() + BytePos(4)),
|
||||
end_sp: return_sp.shrink_to_hi(),
|
||||
};
|
||||
err.subdiagnostic(self.dcx(), sugg);
|
||||
|
||||
let mut starts = Vec::new();
|
||||
let mut ends = Vec::new();
|
||||
for span in arm_spans {
|
||||
starts.push(span.shrink_to_lo());
|
||||
ends.push(span.shrink_to_hi());
|
||||
}
|
||||
let sugg = SuggestBoxingForReturnImplTrait::BoxReturnExpr { starts, ends };
|
||||
err.subdiagnostic(self.dcx(), sugg);
|
||||
}
|
||||
|
||||
pub(super) fn suggest_tuple_pattern(
|
||||
&self,
|
||||
cause: &ObligationCause<'tcx>,
|
||||
|
@ -229,6 +229,15 @@ impl<'tcx> InferCtxtInner<'tcx> {
|
||||
.expect("region constraints already solved")
|
||||
.with_log(&mut self.undo_log)
|
||||
}
|
||||
|
||||
// Iterates through the opaque type definitions without taking them; this holds the
|
||||
// `InferCtxtInner` lock, so make sure to not do anything with `InferCtxt` side-effects
|
||||
// while looping through this.
|
||||
pub fn iter_opaque_types(
|
||||
&self,
|
||||
) -> impl Iterator<Item = (ty::OpaqueTypeKey<'tcx>, ty::OpaqueHiddenType<'tcx>)> + '_ {
|
||||
self.opaque_type_storage.opaque_types.iter().map(|(&k, v)| (k, v.hidden_type))
|
||||
}
|
||||
}
|
||||
|
||||
pub struct InferCtxt<'tcx> {
|
||||
|
@ -73,7 +73,7 @@ impl<'tcx> InferCtxt<'tcx> {
|
||||
// for opaque types, and then use that kind to fix the spans for type errors
|
||||
// that we see later on.
|
||||
let ty_var = self.next_ty_var(TypeVariableOrigin {
|
||||
kind: TypeVariableOriginKind::OpaqueTypeInference(def_id),
|
||||
kind: TypeVariableOriginKind::MiscVariable,
|
||||
span,
|
||||
});
|
||||
obligations.extend(
|
||||
|
@ -47,7 +47,6 @@ pub enum TypeVariableOriginKind {
|
||||
MiscVariable,
|
||||
NormalizeProjectionType,
|
||||
TypeInference,
|
||||
OpaqueTypeInference(DefId),
|
||||
TypeParameterDefinition(Symbol, DefId),
|
||||
|
||||
/// One of the upvars or closure kind parameters in a `ClosureArgs`
|
||||
|
@ -10,7 +10,9 @@ use rustc_data_structures::sync::Lrc;
|
||||
use rustc_errors::registry::Registry;
|
||||
use rustc_errors::{DiagCtxt, ErrorGuaranteed};
|
||||
use rustc_lint::LintStore;
|
||||
|
||||
use rustc_middle::ty;
|
||||
use rustc_middle::ty::CurrentGcx;
|
||||
use rustc_middle::util::Providers;
|
||||
use rustc_parse::maybe_new_parser_from_source_str;
|
||||
use rustc_query_impl::QueryCtxt;
|
||||
@ -39,6 +41,7 @@ pub struct Compiler {
|
||||
pub sess: Session,
|
||||
pub codegen_backend: Box<dyn CodegenBackend>,
|
||||
pub(crate) override_queries: Option<fn(&Session, &mut Providers)>,
|
||||
pub(crate) current_gcx: CurrentGcx,
|
||||
}
|
||||
|
||||
/// Converts strings provided as `--cfg [cfgspec]` into a `Cfg`.
|
||||
@ -336,7 +339,7 @@ pub fn run_compiler<R: Send>(config: Config, f: impl FnOnce(&Compiler) -> R + Se
|
||||
util::run_in_thread_pool_with_globals(
|
||||
config.opts.edition,
|
||||
config.opts.unstable_opts.threads,
|
||||
|| {
|
||||
|current_gcx| {
|
||||
crate::callbacks::setup_callbacks();
|
||||
|
||||
let early_dcx = EarlyDiagCtxt::new(config.opts.error_format);
|
||||
@ -430,8 +433,12 @@ pub fn run_compiler<R: Send>(config: Config, f: impl FnOnce(&Compiler) -> R + Se
|
||||
}
|
||||
sess.lint_store = Some(Lrc::new(lint_store));
|
||||
|
||||
let compiler =
|
||||
Compiler { sess, codegen_backend, override_queries: config.override_queries };
|
||||
let compiler = Compiler {
|
||||
sess,
|
||||
codegen_backend,
|
||||
override_queries: config.override_queries,
|
||||
current_gcx,
|
||||
};
|
||||
|
||||
rustc_span::set_source_map(compiler.sess.psess.clone_source_map(), move || {
|
||||
// There are two paths out of `f`.
|
||||
|
@ -680,6 +680,7 @@ pub fn create_global_ctxt<'tcx>(
|
||||
incremental,
|
||||
),
|
||||
providers.hooks,
|
||||
compiler.current_gcx.clone(),
|
||||
)
|
||||
})
|
||||
})
|
||||
|
@ -315,30 +315,39 @@ fn test_search_paths_tracking_hash_different_order() {
|
||||
json_rendered: HumanReadableErrorType::Default(ColorConfig::Never),
|
||||
};
|
||||
|
||||
let push = |opts: &mut Options, search_path| {
|
||||
opts.search_paths.push(SearchPath::from_cli_opt(
|
||||
"not-a-sysroot".as_ref(),
|
||||
&opts.target_triple,
|
||||
&early_dcx,
|
||||
search_path,
|
||||
));
|
||||
};
|
||||
|
||||
// Reference
|
||||
v1.search_paths.push(SearchPath::from_cli_opt(&early_dcx, "native=abc"));
|
||||
v1.search_paths.push(SearchPath::from_cli_opt(&early_dcx, "crate=def"));
|
||||
v1.search_paths.push(SearchPath::from_cli_opt(&early_dcx, "dependency=ghi"));
|
||||
v1.search_paths.push(SearchPath::from_cli_opt(&early_dcx, "framework=jkl"));
|
||||
v1.search_paths.push(SearchPath::from_cli_opt(&early_dcx, "all=mno"));
|
||||
push(&mut v1, "native=abc");
|
||||
push(&mut v1, "crate=def");
|
||||
push(&mut v1, "dependency=ghi");
|
||||
push(&mut v1, "framework=jkl");
|
||||
push(&mut v1, "all=mno");
|
||||
|
||||
v2.search_paths.push(SearchPath::from_cli_opt(&early_dcx, "native=abc"));
|
||||
v2.search_paths.push(SearchPath::from_cli_opt(&early_dcx, "dependency=ghi"));
|
||||
v2.search_paths.push(SearchPath::from_cli_opt(&early_dcx, "crate=def"));
|
||||
v2.search_paths.push(SearchPath::from_cli_opt(&early_dcx, "framework=jkl"));
|
||||
v2.search_paths.push(SearchPath::from_cli_opt(&early_dcx, "all=mno"));
|
||||
push(&mut v2, "native=abc");
|
||||
push(&mut v2, "dependency=ghi");
|
||||
push(&mut v2, "crate=def");
|
||||
push(&mut v2, "framework=jkl");
|
||||
push(&mut v2, "all=mno");
|
||||
|
||||
v3.search_paths.push(SearchPath::from_cli_opt(&early_dcx, "crate=def"));
|
||||
v3.search_paths.push(SearchPath::from_cli_opt(&early_dcx, "framework=jkl"));
|
||||
v3.search_paths.push(SearchPath::from_cli_opt(&early_dcx, "native=abc"));
|
||||
v3.search_paths.push(SearchPath::from_cli_opt(&early_dcx, "dependency=ghi"));
|
||||
v3.search_paths.push(SearchPath::from_cli_opt(&early_dcx, "all=mno"));
|
||||
push(&mut v3, "crate=def");
|
||||
push(&mut v3, "framework=jkl");
|
||||
push(&mut v3, "native=abc");
|
||||
push(&mut v3, "dependency=ghi");
|
||||
push(&mut v3, "all=mno");
|
||||
|
||||
v4.search_paths.push(SearchPath::from_cli_opt(&early_dcx, "all=mno"));
|
||||
v4.search_paths.push(SearchPath::from_cli_opt(&early_dcx, "native=abc"));
|
||||
v4.search_paths.push(SearchPath::from_cli_opt(&early_dcx, "crate=def"));
|
||||
v4.search_paths.push(SearchPath::from_cli_opt(&early_dcx, "dependency=ghi"));
|
||||
v4.search_paths.push(SearchPath::from_cli_opt(&early_dcx, "framework=jkl"));
|
||||
push(&mut v4, "all=mno");
|
||||
push(&mut v4, "native=abc");
|
||||
push(&mut v4, "crate=def");
|
||||
push(&mut v4, "dependency=ghi");
|
||||
push(&mut v4, "framework=jkl");
|
||||
|
||||
assert_same_hash(&v1, &v2);
|
||||
assert_same_hash(&v1, &v3);
|
||||
|
@ -5,6 +5,7 @@ use rustc_codegen_ssa::traits::CodegenBackend;
|
||||
#[cfg(parallel_compiler)]
|
||||
use rustc_data_structures::sync;
|
||||
use rustc_metadata::{load_symbol_from_dylib, DylibError};
|
||||
use rustc_middle::ty::CurrentGcx;
|
||||
use rustc_parse::validate_attr;
|
||||
use rustc_session as session;
|
||||
use rustc_session::config::{Cfg, OutFileName, OutputFilenames, OutputTypes};
|
||||
@ -64,7 +65,7 @@ fn init_stack_size() -> usize {
|
||||
})
|
||||
}
|
||||
|
||||
pub(crate) fn run_in_thread_with_globals<F: FnOnce() -> R + Send, R: Send>(
|
||||
pub(crate) fn run_in_thread_with_globals<F: FnOnce(CurrentGcx) -> R + Send, R: Send>(
|
||||
edition: Edition,
|
||||
f: F,
|
||||
) -> R {
|
||||
@ -82,7 +83,9 @@ pub(crate) fn run_in_thread_with_globals<F: FnOnce() -> R + Send, R: Send>(
|
||||
// `unwrap` is ok here because `spawn_scoped` only panics if the thread
|
||||
// name contains null bytes.
|
||||
let r = builder
|
||||
.spawn_scoped(s, move || rustc_span::create_session_globals_then(edition, f))
|
||||
.spawn_scoped(s, move || {
|
||||
rustc_span::create_session_globals_then(edition, || f(CurrentGcx::new()))
|
||||
})
|
||||
.unwrap()
|
||||
.join();
|
||||
|
||||
@ -94,7 +97,7 @@ pub(crate) fn run_in_thread_with_globals<F: FnOnce() -> R + Send, R: Send>(
|
||||
}
|
||||
|
||||
#[cfg(not(parallel_compiler))]
|
||||
pub(crate) fn run_in_thread_pool_with_globals<F: FnOnce() -> R + Send, R: Send>(
|
||||
pub(crate) fn run_in_thread_pool_with_globals<F: FnOnce(CurrentGcx) -> R + Send, R: Send>(
|
||||
edition: Edition,
|
||||
_threads: usize,
|
||||
f: F,
|
||||
@ -103,7 +106,7 @@ pub(crate) fn run_in_thread_pool_with_globals<F: FnOnce() -> R + Send, R: Send>(
|
||||
}
|
||||
|
||||
#[cfg(parallel_compiler)]
|
||||
pub(crate) fn run_in_thread_pool_with_globals<F: FnOnce() -> R + Send, R: Send>(
|
||||
pub(crate) fn run_in_thread_pool_with_globals<F: FnOnce(CurrentGcx) -> R + Send, R: Send>(
|
||||
edition: Edition,
|
||||
threads: usize,
|
||||
f: F,
|
||||
@ -117,24 +120,34 @@ pub(crate) fn run_in_thread_pool_with_globals<F: FnOnce() -> R + Send, R: Send>(
|
||||
let registry = sync::Registry::new(std::num::NonZero::new(threads).unwrap());
|
||||
|
||||
if !sync::is_dyn_thread_safe() {
|
||||
return run_in_thread_with_globals(edition, || {
|
||||
return run_in_thread_with_globals(edition, |current_gcx| {
|
||||
// Register the thread for use with the `WorkerLocal` type.
|
||||
registry.register();
|
||||
|
||||
f()
|
||||
f(current_gcx)
|
||||
});
|
||||
}
|
||||
|
||||
let current_gcx = FromDyn::from(CurrentGcx::new());
|
||||
let current_gcx2 = current_gcx.clone();
|
||||
|
||||
let builder = rayon::ThreadPoolBuilder::new()
|
||||
.thread_name(|_| "rustc".to_string())
|
||||
.acquire_thread_handler(jobserver::acquire_thread)
|
||||
.release_thread_handler(jobserver::release_thread)
|
||||
.num_threads(threads)
|
||||
.deadlock_handler(|| {
|
||||
.deadlock_handler(move || {
|
||||
// On deadlock, creates a new thread and forwards information in thread
|
||||
// locals to it. The new thread runs the deadlock handler.
|
||||
let query_map =
|
||||
FromDyn::from(tls::with(|tcx| QueryCtxt::new(tcx).collect_active_jobs()));
|
||||
|
||||
// Get a `GlobalCtxt` reference from `CurrentGcx` as we cannot rely on having a
|
||||
// `TyCtxt` TLS reference here.
|
||||
let query_map = current_gcx2.access(|gcx| {
|
||||
tls::enter_context(&tls::ImplicitCtxt::new(gcx), || {
|
||||
tls::with(|tcx| QueryCtxt::new(tcx).collect_active_jobs())
|
||||
})
|
||||
});
|
||||
let query_map = FromDyn::from(query_map);
|
||||
let registry = rayon_core::Registry::current();
|
||||
thread::Builder::new()
|
||||
.name("rustc query cycle handler".to_string())
|
||||
@ -171,7 +184,7 @@ pub(crate) fn run_in_thread_pool_with_globals<F: FnOnce() -> R + Send, R: Send>(
|
||||
})
|
||||
},
|
||||
// Run `f` on the first thread in the thread pool.
|
||||
move |pool: &rayon::ThreadPool| pool.install(f),
|
||||
move |pool: &rayon::ThreadPool| pool.install(|| f(current_gcx.into_inner())),
|
||||
)
|
||||
.unwrap()
|
||||
})
|
||||
|
@ -865,7 +865,9 @@ trait UnusedDelimLint {
|
||||
(iter, UnusedDelimsCtx::ForIterExpr, true, None, Some(body.span.lo()), true)
|
||||
}
|
||||
|
||||
Match(ref head, ..) if Self::LINT_EXPR_IN_PATTERN_MATCHING_CTX => {
|
||||
Match(ref head, _, ast::MatchKind::Prefix)
|
||||
if Self::LINT_EXPR_IN_PATTERN_MATCHING_CTX =>
|
||||
{
|
||||
let left = e.span.lo() + rustc_span::BytePos(5);
|
||||
(head, UnusedDelimsCtx::MatchScrutineeExpr, true, Some(left), None, true)
|
||||
}
|
||||
|
@ -1992,7 +1992,11 @@ extern "C" void LLVMRustContextConfigureDiagnosticHandler(
|
||||
}
|
||||
}
|
||||
if (DiagnosticHandlerCallback) {
|
||||
#if LLVM_VERSION_GE(19, 0)
|
||||
DiagnosticHandlerCallback(&DI, DiagnosticHandlerContext);
|
||||
#else
|
||||
DiagnosticHandlerCallback(DI, DiagnosticHandlerContext);
|
||||
#endif
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
|
@ -652,8 +652,9 @@ impl<'tcx> Body<'tcx> {
|
||||
self.coroutine.as_ref().and_then(|coroutine| coroutine.resume_ty)
|
||||
}
|
||||
|
||||
/// Prefer going through [`TyCtxt::coroutine_layout`] rather than using this directly.
|
||||
#[inline]
|
||||
pub fn coroutine_layout(&self) -> Option<&CoroutineLayout<'tcx>> {
|
||||
pub fn coroutine_layout_raw(&self) -> Option<&CoroutineLayout<'tcx>> {
|
||||
self.coroutine.as_ref().and_then(|coroutine| coroutine.coroutine_layout.as_ref())
|
||||
}
|
||||
|
||||
|
@ -126,7 +126,7 @@ fn dump_matched_mir_node<'tcx, F>(
|
||||
Some(promoted) => write!(file, "::{promoted:?}`")?,
|
||||
}
|
||||
writeln!(file, " {disambiguator} {pass_name}")?;
|
||||
if let Some(ref layout) = body.coroutine_layout() {
|
||||
if let Some(ref layout) = body.coroutine_layout_raw() {
|
||||
writeln!(file, "/* coroutine_layout = {layout:#?} */")?;
|
||||
}
|
||||
writeln!(file)?;
|
||||
|
@ -1361,8 +1361,8 @@ pub enum NullOp<'tcx> {
|
||||
AlignOf,
|
||||
/// Returns the offset of a field
|
||||
OffsetOf(&'tcx List<(VariantIdx, FieldIdx)>),
|
||||
/// Returns whether we want to check for UB.
|
||||
/// This returns the value of `cfg!(debug_assertions)` at monomorphization time.
|
||||
/// Returns whether we should perform some UB-checking at runtime.
|
||||
/// See the `ub_checks` intrinsic docs for details.
|
||||
UbChecks,
|
||||
}
|
||||
|
||||
|
@ -149,44 +149,45 @@ impl<O> AssertKind<O> {
|
||||
matches!(self, OverflowNeg(..) | Overflow(Add | Sub | Mul | Shl | Shr, ..))
|
||||
}
|
||||
|
||||
/// Get the message that is printed at runtime when this assertion fails.
|
||||
/// Get the lang item that is invoked to print a static message when this assert fires.
|
||||
///
|
||||
/// The caller is expected to handle `BoundsCheck` and `MisalignedPointerDereference` by
|
||||
/// invoking the appropriate lang item (panic_bounds_check/panic_misaligned_pointer_dereference)
|
||||
/// instead of printing a static message.
|
||||
pub fn description(&self) -> &'static str {
|
||||
/// instead of printing a static message. Those have dynamic arguments that aren't present for
|
||||
/// the rest of the messages here.
|
||||
pub fn panic_function(&self) -> LangItem {
|
||||
use AssertKind::*;
|
||||
match self {
|
||||
Overflow(BinOp::Add, _, _) => "attempt to add with overflow",
|
||||
Overflow(BinOp::Sub, _, _) => "attempt to subtract with overflow",
|
||||
Overflow(BinOp::Mul, _, _) => "attempt to multiply with overflow",
|
||||
Overflow(BinOp::Div, _, _) => "attempt to divide with overflow",
|
||||
Overflow(BinOp::Rem, _, _) => "attempt to calculate the remainder with overflow",
|
||||
OverflowNeg(_) => "attempt to negate with overflow",
|
||||
Overflow(BinOp::Shr, _, _) => "attempt to shift right with overflow",
|
||||
Overflow(BinOp::Shl, _, _) => "attempt to shift left with overflow",
|
||||
Overflow(BinOp::Add, _, _) => LangItem::PanicAddOverflow,
|
||||
Overflow(BinOp::Sub, _, _) => LangItem::PanicSubOverflow,
|
||||
Overflow(BinOp::Mul, _, _) => LangItem::PanicMulOverflow,
|
||||
Overflow(BinOp::Div, _, _) => LangItem::PanicDivOverflow,
|
||||
Overflow(BinOp::Rem, _, _) => LangItem::PanicRemOverflow,
|
||||
OverflowNeg(_) => LangItem::PanicNegOverflow,
|
||||
Overflow(BinOp::Shr, _, _) => LangItem::PanicShrOverflow,
|
||||
Overflow(BinOp::Shl, _, _) => LangItem::PanicShlOverflow,
|
||||
Overflow(op, _, _) => bug!("{:?} cannot overflow", op),
|
||||
DivisionByZero(_) => "attempt to divide by zero",
|
||||
RemainderByZero(_) => "attempt to calculate the remainder with a divisor of zero",
|
||||
ResumedAfterReturn(CoroutineKind::Coroutine(_)) => "coroutine resumed after completion",
|
||||
DivisionByZero(_) => LangItem::PanicDivZero,
|
||||
RemainderByZero(_) => LangItem::PanicRemZero,
|
||||
ResumedAfterReturn(CoroutineKind::Coroutine(_)) => LangItem::PanicCoroutineResumed,
|
||||
ResumedAfterReturn(CoroutineKind::Desugared(CoroutineDesugaring::Async, _)) => {
|
||||
"`async fn` resumed after completion"
|
||||
LangItem::PanicAsyncFnResumed
|
||||
}
|
||||
ResumedAfterReturn(CoroutineKind::Desugared(CoroutineDesugaring::AsyncGen, _)) => {
|
||||
"`async gen fn` resumed after completion"
|
||||
LangItem::PanicAsyncGenFnResumed
|
||||
}
|
||||
ResumedAfterReturn(CoroutineKind::Desugared(CoroutineDesugaring::Gen, _)) => {
|
||||
"`gen fn` should just keep returning `None` after completion"
|
||||
LangItem::PanicGenFnNone
|
||||
}
|
||||
ResumedAfterPanic(CoroutineKind::Coroutine(_)) => "coroutine resumed after panicking",
|
||||
ResumedAfterPanic(CoroutineKind::Coroutine(_)) => LangItem::PanicCoroutineResumedPanic,
|
||||
ResumedAfterPanic(CoroutineKind::Desugared(CoroutineDesugaring::Async, _)) => {
|
||||
"`async fn` resumed after panicking"
|
||||
LangItem::PanicAsyncFnResumedPanic
|
||||
}
|
||||
ResumedAfterPanic(CoroutineKind::Desugared(CoroutineDesugaring::AsyncGen, _)) => {
|
||||
"`async gen fn` resumed after panicking"
|
||||
LangItem::PanicAsyncGenFnResumedPanic
|
||||
}
|
||||
ResumedAfterPanic(CoroutineKind::Desugared(CoroutineDesugaring::Gen, _)) => {
|
||||
"`gen fn` should just keep returning `None` after panicking"
|
||||
LangItem::PanicGenFnNonePanic
|
||||
}
|
||||
|
||||
BoundsCheck { .. } | MisalignedPointerDereference { .. } => {
|
||||
@ -198,7 +199,7 @@ impl<O> AssertKind<O> {
|
||||
/// Format the message arguments for the `assert(cond, msg..)` terminator in MIR printing.
|
||||
///
|
||||
/// Needs to be kept in sync with the run-time behavior (which is defined by
|
||||
/// `AssertKind::description` and the lang items mentioned in its docs).
|
||||
/// `AssertKind::panic_function` and the lang items mentioned in its docs).
|
||||
/// Note that we deliberately show more details here than we do at runtime, such as the actual
|
||||
/// numbers that overflowed -- it is much easier to do so here than at runtime.
|
||||
pub fn fmt_assert_args<W: fmt::Write>(&self, f: &mut W) -> fmt::Result
|
||||
@ -246,20 +247,44 @@ impl<O> AssertKind<O> {
|
||||
Overflow(BinOp::Shl, _, r) => {
|
||||
write!(f, "\"attempt to shift left by `{{}}`, which would overflow\", {r:?}")
|
||||
}
|
||||
Overflow(op, _, _) => bug!("{:?} cannot overflow", op),
|
||||
MisalignedPointerDereference { required, found } => {
|
||||
write!(
|
||||
f,
|
||||
"\"misaligned pointer dereference: address must be a multiple of {{}} but is {{}}\", {required:?}, {found:?}"
|
||||
)
|
||||
}
|
||||
_ => write!(f, "\"{}\"", self.description()),
|
||||
ResumedAfterReturn(CoroutineKind::Coroutine(_)) => {
|
||||
write!(f, "\"coroutine resumed after completion\"")
|
||||
}
|
||||
ResumedAfterReturn(CoroutineKind::Desugared(CoroutineDesugaring::Async, _)) => {
|
||||
write!(f, "\"`async fn` resumed after completion\"")
|
||||
}
|
||||
ResumedAfterReturn(CoroutineKind::Desugared(CoroutineDesugaring::AsyncGen, _)) => {
|
||||
write!(f, "\"`async gen fn` resumed after completion\"")
|
||||
}
|
||||
ResumedAfterReturn(CoroutineKind::Desugared(CoroutineDesugaring::Gen, _)) => {
|
||||
write!(f, "\"`gen fn` should just keep returning `None` after completion\"")
|
||||
}
|
||||
ResumedAfterPanic(CoroutineKind::Coroutine(_)) => {
|
||||
write!(f, "\"coroutine resumed after panicking\"")
|
||||
}
|
||||
ResumedAfterPanic(CoroutineKind::Desugared(CoroutineDesugaring::Async, _)) => {
|
||||
write!(f, "\"`async fn` resumed after panicking\"")
|
||||
}
|
||||
ResumedAfterPanic(CoroutineKind::Desugared(CoroutineDesugaring::AsyncGen, _)) => {
|
||||
write!(f, "\"`async gen fn` resumed after panicking\"")
|
||||
}
|
||||
ResumedAfterPanic(CoroutineKind::Desugared(CoroutineDesugaring::Gen, _)) => {
|
||||
write!(f, "\"`gen fn` should just keep returning `None` after panicking\"")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Format the diagnostic message for use in a lint (e.g. when the assertion fails during const-eval).
|
||||
///
|
||||
/// Needs to be kept in sync with the run-time behavior (which is defined by
|
||||
/// `AssertKind::description` and the lang items mentioned in its docs).
|
||||
/// `AssertKind::panic_function` and the lang items mentioned in its docs).
|
||||
/// Note that we deliberately show more details here than we do at runtime, such as the actual
|
||||
/// numbers that overflowed -- it is much easier to do so here than at runtime.
|
||||
pub fn diagnostic_message(&self) -> DiagMessage {
|
||||
|
@ -9,8 +9,7 @@ use crate::ty::{self, Ty, TyCtxt};
|
||||
use crate::ty::{GenericArg, GenericArgsRef};
|
||||
use rustc_hir::def_id::{CrateNum, DefId, LocalDefId, LocalModDefId, ModDefId, LOCAL_CRATE};
|
||||
use rustc_hir::hir_id::{HirId, OwnerId};
|
||||
use rustc_query_system::query::DefIdCacheSelector;
|
||||
use rustc_query_system::query::{DefaultCacheSelector, SingleCacheSelector, VecCacheSelector};
|
||||
use rustc_query_system::query::{DefIdCache, DefaultCache, SingleCache, VecCache};
|
||||
use rustc_span::symbol::{Ident, Symbol};
|
||||
use rustc_span::{Span, DUMMY_SP};
|
||||
use rustc_target::abi;
|
||||
@ -22,7 +21,7 @@ pub struct LocalCrate;
|
||||
/// The `Key` trait controls what types can legally be used as the key
|
||||
/// for a query.
|
||||
pub trait Key: Sized {
|
||||
// N.B. Most of the keys down below have `type CacheSelector = DefaultCacheSelector<Self>;`,
|
||||
// N.B. Most of the keys down below have `type Cache<V> = DefaultCache<Self, V>;`,
|
||||
// it would be reasonable to use associated type defaults, to remove the duplication...
|
||||
//
|
||||
// ...But r-a doesn't support them yet and using a default here causes r-a to not infer
|
||||
@ -30,7 +29,7 @@ pub trait Key: Sized {
|
||||
// type defaults, please restrain from using them here <3
|
||||
//
|
||||
// r-a issue: <https://github.com/rust-lang/rust-analyzer/issues/13693>
|
||||
type CacheSelector;
|
||||
type Cache<V>;
|
||||
|
||||
/// In the event that a cycle occurs, if no explicit span has been
|
||||
/// given for a query with key `self`, what span should we use?
|
||||
@ -56,7 +55,7 @@ pub trait AsLocalKey: Key {
|
||||
}
|
||||
|
||||
impl Key for () {
|
||||
type CacheSelector = SingleCacheSelector;
|
||||
type Cache<V> = SingleCache<V>;
|
||||
|
||||
fn default_span(&self, _: TyCtxt<'_>) -> Span {
|
||||
DUMMY_SP
|
||||
@ -64,7 +63,7 @@ impl Key for () {
|
||||
}
|
||||
|
||||
impl<'tcx> Key for ty::InstanceDef<'tcx> {
|
||||
type CacheSelector = DefaultCacheSelector<Self>;
|
||||
type Cache<V> = DefaultCache<Self, V>;
|
||||
|
||||
fn default_span(&self, tcx: TyCtxt<'_>) -> Span {
|
||||
tcx.def_span(self.def_id())
|
||||
@ -81,7 +80,7 @@ impl<'tcx> AsLocalKey for ty::InstanceDef<'tcx> {
|
||||
}
|
||||
|
||||
impl<'tcx> Key for ty::Instance<'tcx> {
|
||||
type CacheSelector = DefaultCacheSelector<Self>;
|
||||
type Cache<V> = DefaultCache<Self, V>;
|
||||
|
||||
fn default_span(&self, tcx: TyCtxt<'_>) -> Span {
|
||||
tcx.def_span(self.def_id())
|
||||
@ -89,7 +88,7 @@ impl<'tcx> Key for ty::Instance<'tcx> {
|
||||
}
|
||||
|
||||
impl<'tcx> Key for mir::interpret::GlobalId<'tcx> {
|
||||
type CacheSelector = DefaultCacheSelector<Self>;
|
||||
type Cache<V> = DefaultCache<Self, V>;
|
||||
|
||||
fn default_span(&self, tcx: TyCtxt<'_>) -> Span {
|
||||
self.instance.default_span(tcx)
|
||||
@ -97,7 +96,7 @@ impl<'tcx> Key for mir::interpret::GlobalId<'tcx> {
|
||||
}
|
||||
|
||||
impl<'tcx> Key for (Ty<'tcx>, Option<ty::PolyExistentialTraitRef<'tcx>>) {
|
||||
type CacheSelector = DefaultCacheSelector<Self>;
|
||||
type Cache<V> = DefaultCache<Self, V>;
|
||||
|
||||
fn default_span(&self, _: TyCtxt<'_>) -> Span {
|
||||
DUMMY_SP
|
||||
@ -105,7 +104,7 @@ impl<'tcx> Key for (Ty<'tcx>, Option<ty::PolyExistentialTraitRef<'tcx>>) {
|
||||
}
|
||||
|
||||
impl<'tcx> Key for mir::interpret::LitToConstInput<'tcx> {
|
||||
type CacheSelector = DefaultCacheSelector<Self>;
|
||||
type Cache<V> = DefaultCache<Self, V>;
|
||||
|
||||
fn default_span(&self, _tcx: TyCtxt<'_>) -> Span {
|
||||
DUMMY_SP
|
||||
@ -113,7 +112,7 @@ impl<'tcx> Key for mir::interpret::LitToConstInput<'tcx> {
|
||||
}
|
||||
|
||||
impl Key for CrateNum {
|
||||
type CacheSelector = VecCacheSelector<Self>;
|
||||
type Cache<V> = VecCache<Self, V>;
|
||||
|
||||
fn default_span(&self, _: TyCtxt<'_>) -> Span {
|
||||
DUMMY_SP
|
||||
@ -130,7 +129,7 @@ impl AsLocalKey for CrateNum {
|
||||
}
|
||||
|
||||
impl Key for OwnerId {
|
||||
type CacheSelector = VecCacheSelector<Self>;
|
||||
type Cache<V> = VecCache<Self, V>;
|
||||
|
||||
fn default_span(&self, tcx: TyCtxt<'_>) -> Span {
|
||||
self.to_def_id().default_span(tcx)
|
||||
@ -142,7 +141,7 @@ impl Key for OwnerId {
|
||||
}
|
||||
|
||||
impl Key for LocalDefId {
|
||||
type CacheSelector = VecCacheSelector<Self>;
|
||||
type Cache<V> = VecCache<Self, V>;
|
||||
|
||||
fn default_span(&self, tcx: TyCtxt<'_>) -> Span {
|
||||
self.to_def_id().default_span(tcx)
|
||||
@ -154,7 +153,7 @@ impl Key for LocalDefId {
|
||||
}
|
||||
|
||||
impl Key for DefId {
|
||||
type CacheSelector = DefIdCacheSelector;
|
||||
type Cache<V> = DefIdCache<V>;
|
||||
|
||||
fn default_span(&self, tcx: TyCtxt<'_>) -> Span {
|
||||
tcx.def_span(*self)
|
||||
@ -176,7 +175,7 @@ impl AsLocalKey for DefId {
|
||||
}
|
||||
|
||||
impl Key for LocalModDefId {
|
||||
type CacheSelector = DefaultCacheSelector<Self>;
|
||||
type Cache<V> = DefaultCache<Self, V>;
|
||||
|
||||
fn default_span(&self, tcx: TyCtxt<'_>) -> Span {
|
||||
tcx.def_span(*self)
|
||||
@ -189,7 +188,7 @@ impl Key for LocalModDefId {
|
||||
}
|
||||
|
||||
impl Key for ModDefId {
|
||||
type CacheSelector = DefaultCacheSelector<Self>;
|
||||
type Cache<V> = DefaultCache<Self, V>;
|
||||
|
||||
fn default_span(&self, tcx: TyCtxt<'_>) -> Span {
|
||||
tcx.def_span(*self)
|
||||
@ -211,7 +210,7 @@ impl AsLocalKey for ModDefId {
|
||||
}
|
||||
|
||||
impl Key for SimplifiedType {
|
||||
type CacheSelector = DefaultCacheSelector<Self>;
|
||||
type Cache<V> = DefaultCache<Self, V>;
|
||||
|
||||
fn default_span(&self, _: TyCtxt<'_>) -> Span {
|
||||
DUMMY_SP
|
||||
@ -219,7 +218,7 @@ impl Key for SimplifiedType {
|
||||
}
|
||||
|
||||
impl Key for (DefId, DefId) {
|
||||
type CacheSelector = DefaultCacheSelector<Self>;
|
||||
type Cache<V> = DefaultCache<Self, V>;
|
||||
|
||||
fn default_span(&self, tcx: TyCtxt<'_>) -> Span {
|
||||
self.1.default_span(tcx)
|
||||
@ -227,7 +226,7 @@ impl Key for (DefId, DefId) {
|
||||
}
|
||||
|
||||
impl<'tcx> Key for (ty::Instance<'tcx>, LocalDefId) {
|
||||
type CacheSelector = DefaultCacheSelector<Self>;
|
||||
type Cache<V> = DefaultCache<Self, V>;
|
||||
|
||||
fn default_span(&self, tcx: TyCtxt<'_>) -> Span {
|
||||
self.0.default_span(tcx)
|
||||
@ -235,7 +234,7 @@ impl<'tcx> Key for (ty::Instance<'tcx>, LocalDefId) {
|
||||
}
|
||||
|
||||
impl Key for (DefId, LocalDefId) {
|
||||
type CacheSelector = DefaultCacheSelector<Self>;
|
||||
type Cache<V> = DefaultCache<Self, V>;
|
||||
|
||||
fn default_span(&self, tcx: TyCtxt<'_>) -> Span {
|
||||
self.1.default_span(tcx)
|
||||
@ -243,7 +242,7 @@ impl Key for (DefId, LocalDefId) {
|
||||
}
|
||||
|
||||
impl Key for (LocalDefId, DefId) {
|
||||
type CacheSelector = DefaultCacheSelector<Self>;
|
||||
type Cache<V> = DefaultCache<Self, V>;
|
||||
|
||||
fn default_span(&self, tcx: TyCtxt<'_>) -> Span {
|
||||
self.0.default_span(tcx)
|
||||
@ -251,7 +250,7 @@ impl Key for (LocalDefId, DefId) {
|
||||
}
|
||||
|
||||
impl Key for (LocalDefId, LocalDefId) {
|
||||
type CacheSelector = DefaultCacheSelector<Self>;
|
||||
type Cache<V> = DefaultCache<Self, V>;
|
||||
|
||||
fn default_span(&self, tcx: TyCtxt<'_>) -> Span {
|
||||
self.0.default_span(tcx)
|
||||
@ -259,7 +258,7 @@ impl Key for (LocalDefId, LocalDefId) {
|
||||
}
|
||||
|
||||
impl Key for (DefId, Ident) {
|
||||
type CacheSelector = DefaultCacheSelector<Self>;
|
||||
type Cache<V> = DefaultCache<Self, V>;
|
||||
|
||||
fn default_span(&self, tcx: TyCtxt<'_>) -> Span {
|
||||
tcx.def_span(self.0)
|
||||
@ -272,7 +271,7 @@ impl Key for (DefId, Ident) {
|
||||
}
|
||||
|
||||
impl Key for (LocalDefId, LocalDefId, Ident) {
|
||||
type CacheSelector = DefaultCacheSelector<Self>;
|
||||
type Cache<V> = DefaultCache<Self, V>;
|
||||
|
||||
fn default_span(&self, tcx: TyCtxt<'_>) -> Span {
|
||||
self.1.default_span(tcx)
|
||||
@ -280,7 +279,7 @@ impl Key for (LocalDefId, LocalDefId, Ident) {
|
||||
}
|
||||
|
||||
impl Key for (CrateNum, DefId) {
|
||||
type CacheSelector = DefaultCacheSelector<Self>;
|
||||
type Cache<V> = DefaultCache<Self, V>;
|
||||
|
||||
fn default_span(&self, tcx: TyCtxt<'_>) -> Span {
|
||||
self.1.default_span(tcx)
|
||||
@ -297,7 +296,7 @@ impl AsLocalKey for (CrateNum, DefId) {
|
||||
}
|
||||
|
||||
impl Key for (CrateNum, SimplifiedType) {
|
||||
type CacheSelector = DefaultCacheSelector<Self>;
|
||||
type Cache<V> = DefaultCache<Self, V>;
|
||||
|
||||
fn default_span(&self, _: TyCtxt<'_>) -> Span {
|
||||
DUMMY_SP
|
||||
@ -314,7 +313,7 @@ impl AsLocalKey for (CrateNum, SimplifiedType) {
|
||||
}
|
||||
|
||||
impl Key for (DefId, SimplifiedType) {
|
||||
type CacheSelector = DefaultCacheSelector<Self>;
|
||||
type Cache<V> = DefaultCache<Self, V>;
|
||||
|
||||
fn default_span(&self, tcx: TyCtxt<'_>) -> Span {
|
||||
self.0.default_span(tcx)
|
||||
@ -322,7 +321,7 @@ impl Key for (DefId, SimplifiedType) {
|
||||
}
|
||||
|
||||
impl<'tcx> Key for GenericArgsRef<'tcx> {
|
||||
type CacheSelector = DefaultCacheSelector<Self>;
|
||||
type Cache<V> = DefaultCache<Self, V>;
|
||||
|
||||
fn default_span(&self, _: TyCtxt<'_>) -> Span {
|
||||
DUMMY_SP
|
||||
@ -330,7 +329,7 @@ impl<'tcx> Key for GenericArgsRef<'tcx> {
|
||||
}
|
||||
|
||||
impl<'tcx> Key for (DefId, GenericArgsRef<'tcx>) {
|
||||
type CacheSelector = DefaultCacheSelector<Self>;
|
||||
type Cache<V> = DefaultCache<Self, V>;
|
||||
|
||||
fn default_span(&self, tcx: TyCtxt<'_>) -> Span {
|
||||
self.0.default_span(tcx)
|
||||
@ -338,7 +337,7 @@ impl<'tcx> Key for (DefId, GenericArgsRef<'tcx>) {
|
||||
}
|
||||
|
||||
impl<'tcx> Key for (ty::UnevaluatedConst<'tcx>, ty::UnevaluatedConst<'tcx>) {
|
||||
type CacheSelector = DefaultCacheSelector<Self>;
|
||||
type Cache<V> = DefaultCache<Self, V>;
|
||||
|
||||
fn default_span(&self, tcx: TyCtxt<'_>) -> Span {
|
||||
(self.0).def.default_span(tcx)
|
||||
@ -346,7 +345,7 @@ impl<'tcx> Key for (ty::UnevaluatedConst<'tcx>, ty::UnevaluatedConst<'tcx>) {
|
||||
}
|
||||
|
||||
impl<'tcx> Key for (LocalDefId, DefId, GenericArgsRef<'tcx>) {
|
||||
type CacheSelector = DefaultCacheSelector<Self>;
|
||||
type Cache<V> = DefaultCache<Self, V>;
|
||||
|
||||
fn default_span(&self, tcx: TyCtxt<'_>) -> Span {
|
||||
self.0.default_span(tcx)
|
||||
@ -354,7 +353,7 @@ impl<'tcx> Key for (LocalDefId, DefId, GenericArgsRef<'tcx>) {
|
||||
}
|
||||
|
||||
impl<'tcx> Key for (ty::ParamEnv<'tcx>, ty::TraitRef<'tcx>) {
|
||||
type CacheSelector = DefaultCacheSelector<Self>;
|
||||
type Cache<V> = DefaultCache<Self, V>;
|
||||
|
||||
fn default_span(&self, tcx: TyCtxt<'_>) -> Span {
|
||||
tcx.def_span(self.1.def_id)
|
||||
@ -362,7 +361,7 @@ impl<'tcx> Key for (ty::ParamEnv<'tcx>, ty::TraitRef<'tcx>) {
|
||||
}
|
||||
|
||||
impl<'tcx> Key for ty::PolyTraitRef<'tcx> {
|
||||
type CacheSelector = DefaultCacheSelector<Self>;
|
||||
type Cache<V> = DefaultCache<Self, V>;
|
||||
|
||||
fn default_span(&self, tcx: TyCtxt<'_>) -> Span {
|
||||
tcx.def_span(self.def_id())
|
||||
@ -370,7 +369,7 @@ impl<'tcx> Key for ty::PolyTraitRef<'tcx> {
|
||||
}
|
||||
|
||||
impl<'tcx> Key for ty::PolyExistentialTraitRef<'tcx> {
|
||||
type CacheSelector = DefaultCacheSelector<Self>;
|
||||
type Cache<V> = DefaultCache<Self, V>;
|
||||
|
||||
fn default_span(&self, tcx: TyCtxt<'_>) -> Span {
|
||||
tcx.def_span(self.def_id())
|
||||
@ -378,7 +377,7 @@ impl<'tcx> Key for ty::PolyExistentialTraitRef<'tcx> {
|
||||
}
|
||||
|
||||
impl<'tcx> Key for (ty::PolyTraitRef<'tcx>, ty::PolyTraitRef<'tcx>) {
|
||||
type CacheSelector = DefaultCacheSelector<Self>;
|
||||
type Cache<V> = DefaultCache<Self, V>;
|
||||
|
||||
fn default_span(&self, tcx: TyCtxt<'_>) -> Span {
|
||||
tcx.def_span(self.0.def_id())
|
||||
@ -386,7 +385,7 @@ impl<'tcx> Key for (ty::PolyTraitRef<'tcx>, ty::PolyTraitRef<'tcx>) {
|
||||
}
|
||||
|
||||
impl<'tcx> Key for GenericArg<'tcx> {
|
||||
type CacheSelector = DefaultCacheSelector<Self>;
|
||||
type Cache<V> = DefaultCache<Self, V>;
|
||||
|
||||
fn default_span(&self, _: TyCtxt<'_>) -> Span {
|
||||
DUMMY_SP
|
||||
@ -394,7 +393,7 @@ impl<'tcx> Key for GenericArg<'tcx> {
|
||||
}
|
||||
|
||||
impl<'tcx> Key for ty::Const<'tcx> {
|
||||
type CacheSelector = DefaultCacheSelector<Self>;
|
||||
type Cache<V> = DefaultCache<Self, V>;
|
||||
|
||||
fn default_span(&self, _: TyCtxt<'_>) -> Span {
|
||||
DUMMY_SP
|
||||
@ -402,7 +401,7 @@ impl<'tcx> Key for ty::Const<'tcx> {
|
||||
}
|
||||
|
||||
impl<'tcx> Key for Ty<'tcx> {
|
||||
type CacheSelector = DefaultCacheSelector<Self>;
|
||||
type Cache<V> = DefaultCache<Self, V>;
|
||||
|
||||
fn default_span(&self, _: TyCtxt<'_>) -> Span {
|
||||
DUMMY_SP
|
||||
@ -418,7 +417,7 @@ impl<'tcx> Key for Ty<'tcx> {
|
||||
}
|
||||
|
||||
impl<'tcx> Key for TyAndLayout<'tcx> {
|
||||
type CacheSelector = DefaultCacheSelector<Self>;
|
||||
type Cache<V> = DefaultCache<Self, V>;
|
||||
|
||||
fn default_span(&self, _: TyCtxt<'_>) -> Span {
|
||||
DUMMY_SP
|
||||
@ -426,7 +425,7 @@ impl<'tcx> Key for TyAndLayout<'tcx> {
|
||||
}
|
||||
|
||||
impl<'tcx> Key for (Ty<'tcx>, Ty<'tcx>) {
|
||||
type CacheSelector = DefaultCacheSelector<Self>;
|
||||
type Cache<V> = DefaultCache<Self, V>;
|
||||
|
||||
fn default_span(&self, _: TyCtxt<'_>) -> Span {
|
||||
DUMMY_SP
|
||||
@ -434,7 +433,7 @@ impl<'tcx> Key for (Ty<'tcx>, Ty<'tcx>) {
|
||||
}
|
||||
|
||||
impl<'tcx> Key for &'tcx ty::List<ty::Clause<'tcx>> {
|
||||
type CacheSelector = DefaultCacheSelector<Self>;
|
||||
type Cache<V> = DefaultCache<Self, V>;
|
||||
|
||||
fn default_span(&self, _: TyCtxt<'_>) -> Span {
|
||||
DUMMY_SP
|
||||
@ -442,7 +441,7 @@ impl<'tcx> Key for &'tcx ty::List<ty::Clause<'tcx>> {
|
||||
}
|
||||
|
||||
impl<'tcx> Key for ty::ParamEnv<'tcx> {
|
||||
type CacheSelector = DefaultCacheSelector<Self>;
|
||||
type Cache<V> = DefaultCache<Self, V>;
|
||||
|
||||
fn default_span(&self, _: TyCtxt<'_>) -> Span {
|
||||
DUMMY_SP
|
||||
@ -450,7 +449,7 @@ impl<'tcx> Key for ty::ParamEnv<'tcx> {
|
||||
}
|
||||
|
||||
impl<'tcx, T: Key> Key for ty::ParamEnvAnd<'tcx, T> {
|
||||
type CacheSelector = DefaultCacheSelector<Self>;
|
||||
type Cache<V> = DefaultCache<Self, V>;
|
||||
|
||||
fn default_span(&self, tcx: TyCtxt<'_>) -> Span {
|
||||
self.value.default_span(tcx)
|
||||
@ -462,7 +461,7 @@ impl<'tcx, T: Key> Key for ty::ParamEnvAnd<'tcx, T> {
|
||||
}
|
||||
|
||||
impl Key for Symbol {
|
||||
type CacheSelector = DefaultCacheSelector<Self>;
|
||||
type Cache<V> = DefaultCache<Self, V>;
|
||||
|
||||
fn default_span(&self, _tcx: TyCtxt<'_>) -> Span {
|
||||
DUMMY_SP
|
||||
@ -470,7 +469,7 @@ impl Key for Symbol {
|
||||
}
|
||||
|
||||
impl Key for Option<Symbol> {
|
||||
type CacheSelector = DefaultCacheSelector<Self>;
|
||||
type Cache<V> = DefaultCache<Self, V>;
|
||||
|
||||
fn default_span(&self, _tcx: TyCtxt<'_>) -> Span {
|
||||
DUMMY_SP
|
||||
@ -480,7 +479,7 @@ impl Key for Option<Symbol> {
|
||||
/// Canonical query goals correspond to abstract trait operations that
|
||||
/// are not tied to any crate in particular.
|
||||
impl<'tcx, T: Clone> Key for Canonical<'tcx, T> {
|
||||
type CacheSelector = DefaultCacheSelector<Self>;
|
||||
type Cache<V> = DefaultCache<Self, V>;
|
||||
|
||||
fn default_span(&self, _tcx: TyCtxt<'_>) -> Span {
|
||||
DUMMY_SP
|
||||
@ -488,7 +487,7 @@ impl<'tcx, T: Clone> Key for Canonical<'tcx, T> {
|
||||
}
|
||||
|
||||
impl Key for (Symbol, u32, u32) {
|
||||
type CacheSelector = DefaultCacheSelector<Self>;
|
||||
type Cache<V> = DefaultCache<Self, V>;
|
||||
|
||||
fn default_span(&self, _tcx: TyCtxt<'_>) -> Span {
|
||||
DUMMY_SP
|
||||
@ -496,7 +495,7 @@ impl Key for (Symbol, u32, u32) {
|
||||
}
|
||||
|
||||
impl<'tcx> Key for (DefId, Ty<'tcx>, GenericArgsRef<'tcx>, ty::ParamEnv<'tcx>) {
|
||||
type CacheSelector = DefaultCacheSelector<Self>;
|
||||
type Cache<V> = DefaultCache<Self, V>;
|
||||
|
||||
fn default_span(&self, _tcx: TyCtxt<'_>) -> Span {
|
||||
DUMMY_SP
|
||||
@ -504,7 +503,7 @@ impl<'tcx> Key for (DefId, Ty<'tcx>, GenericArgsRef<'tcx>, ty::ParamEnv<'tcx>) {
|
||||
}
|
||||
|
||||
impl<'tcx> Key for (Ty<'tcx>, abi::VariantIdx) {
|
||||
type CacheSelector = DefaultCacheSelector<Self>;
|
||||
type Cache<V> = DefaultCache<Self, V>;
|
||||
|
||||
fn default_span(&self, _tcx: TyCtxt<'_>) -> Span {
|
||||
DUMMY_SP
|
||||
@ -512,7 +511,7 @@ impl<'tcx> Key for (Ty<'tcx>, abi::VariantIdx) {
|
||||
}
|
||||
|
||||
impl<'tcx> Key for (ty::Predicate<'tcx>, traits::WellFormedLoc) {
|
||||
type CacheSelector = DefaultCacheSelector<Self>;
|
||||
type Cache<V> = DefaultCache<Self, V>;
|
||||
|
||||
fn default_span(&self, _tcx: TyCtxt<'_>) -> Span {
|
||||
DUMMY_SP
|
||||
@ -520,7 +519,7 @@ impl<'tcx> Key for (ty::Predicate<'tcx>, traits::WellFormedLoc) {
|
||||
}
|
||||
|
||||
impl<'tcx> Key for (ty::PolyFnSig<'tcx>, &'tcx ty::List<Ty<'tcx>>) {
|
||||
type CacheSelector = DefaultCacheSelector<Self>;
|
||||
type Cache<V> = DefaultCache<Self, V>;
|
||||
|
||||
fn default_span(&self, _: TyCtxt<'_>) -> Span {
|
||||
DUMMY_SP
|
||||
@ -528,7 +527,7 @@ impl<'tcx> Key for (ty::PolyFnSig<'tcx>, &'tcx ty::List<Ty<'tcx>>) {
|
||||
}
|
||||
|
||||
impl<'tcx> Key for (ty::Instance<'tcx>, &'tcx ty::List<Ty<'tcx>>) {
|
||||
type CacheSelector = DefaultCacheSelector<Self>;
|
||||
type Cache<V> = DefaultCache<Self, V>;
|
||||
|
||||
fn default_span(&self, tcx: TyCtxt<'_>) -> Span {
|
||||
self.0.default_span(tcx)
|
||||
@ -536,7 +535,7 @@ impl<'tcx> Key for (ty::Instance<'tcx>, &'tcx ty::List<Ty<'tcx>>) {
|
||||
}
|
||||
|
||||
impl<'tcx> Key for (Ty<'tcx>, ty::ValTree<'tcx>) {
|
||||
type CacheSelector = DefaultCacheSelector<Self>;
|
||||
type Cache<V> = DefaultCache<Self, V>;
|
||||
|
||||
fn default_span(&self, _: TyCtxt<'_>) -> Span {
|
||||
DUMMY_SP
|
||||
@ -544,7 +543,7 @@ impl<'tcx> Key for (Ty<'tcx>, ty::ValTree<'tcx>) {
|
||||
}
|
||||
|
||||
impl Key for HirId {
|
||||
type CacheSelector = DefaultCacheSelector<Self>;
|
||||
type Cache<V> = DefaultCache<Self, V>;
|
||||
|
||||
fn default_span(&self, tcx: TyCtxt<'_>) -> Span {
|
||||
tcx.hir().span(*self)
|
||||
@ -557,7 +556,7 @@ impl Key for HirId {
|
||||
}
|
||||
|
||||
impl<'tcx> Key for (ValidityRequirement, ty::ParamEnvAnd<'tcx, Ty<'tcx>>) {
|
||||
type CacheSelector = DefaultCacheSelector<Self>;
|
||||
type Cache<V> = DefaultCache<Self, V>;
|
||||
|
||||
// Just forward to `Ty<'tcx>`
|
||||
|
||||
|
@ -73,7 +73,7 @@ use rustc_hir::lang_items::{LangItem, LanguageItems};
|
||||
use rustc_hir::{Crate, ItemLocalId, ItemLocalMap, TraitCandidate};
|
||||
use rustc_index::IndexVec;
|
||||
use rustc_query_system::ich::StableHashingContext;
|
||||
use rustc_query_system::query::{try_get_cached, CacheSelector, QueryCache, QueryMode, QueryState};
|
||||
use rustc_query_system::query::{try_get_cached, QueryCache, QueryMode, QueryState};
|
||||
use rustc_session::config::{EntryFnType, OptLevel, OutputFilenames, SymbolManglingVersion};
|
||||
use rustc_session::cstore::{CrateDepKind, CrateSource};
|
||||
use rustc_session::cstore::{ExternCrate, ForeignModule, LinkagePreference, NativeLib};
|
||||
|
@ -336,9 +336,7 @@ macro_rules! define_callbacks {
|
||||
))
|
||||
}
|
||||
|
||||
pub type Storage<'tcx> = <
|
||||
<$($K)* as keys::Key>::CacheSelector as CacheSelector<'tcx, Erase<$V>>
|
||||
>::Cache;
|
||||
pub type Storage<'tcx> = <$($K)* as keys::Key>::Cache<Erase<$V>>;
|
||||
|
||||
// Ensure that keys grow no larger than 64 bytes
|
||||
#[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
|
||||
|
@ -1018,7 +1018,7 @@ impl<'tcx> PatRangeBoundary<'tcx> {
|
||||
(Finite(mir::Const::Ty(a)), Finite(mir::Const::Ty(b)))
|
||||
if matches!(ty.kind(), ty::Uint(_) | ty::Char) =>
|
||||
{
|
||||
return Some(a.kind().cmp(&b.kind()));
|
||||
return Some(a.to_valtree().cmp(&b.to_valtree()));
|
||||
}
|
||||
(
|
||||
Finite(mir::Const::Val(mir::ConstValue::Scalar(Scalar::Int(a)), _)),
|
||||
|
@ -571,7 +571,8 @@ pub struct MatchExpressionArmCause<'tcx> {
|
||||
pub scrut_span: Span,
|
||||
pub source: hir::MatchSource,
|
||||
pub prior_non_diverging_arms: Vec<Span>,
|
||||
pub opt_suggest_box_span: Option<Span>,
|
||||
// Is the expectation of this match expression an RPIT?
|
||||
pub tail_defines_return_position_impl_trait: Option<LocalDefId>,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
|
||||
@ -582,7 +583,8 @@ pub struct IfExpressionCause<'tcx> {
|
||||
pub then_ty: Ty<'tcx>,
|
||||
pub else_ty: Ty<'tcx>,
|
||||
pub outer_span: Option<Span>,
|
||||
pub opt_suggest_box_span: Option<Span>,
|
||||
// Is the expectation of this match expression an RPIT?
|
||||
pub tail_defines_return_position_impl_trait: Option<LocalDefId>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, HashStable, TyEncodable, TyDecodable)]
|
||||
|
@ -18,7 +18,6 @@ use rustc_span::symbol::sym;
|
||||
use rustc_target::abi::{ReprOptions, VariantIdx, FIRST_VARIANT};
|
||||
|
||||
use std::cell::RefCell;
|
||||
use std::cmp::Ordering;
|
||||
use std::hash::{Hash, Hasher};
|
||||
use std::ops::Range;
|
||||
use std::str;
|
||||
@ -102,20 +101,6 @@ pub struct AdtDefData {
|
||||
repr: ReprOptions,
|
||||
}
|
||||
|
||||
impl PartialOrd for AdtDefData {
|
||||
fn partial_cmp(&self, other: &AdtDefData) -> Option<Ordering> {
|
||||
Some(self.cmp(other))
|
||||
}
|
||||
}
|
||||
|
||||
/// There should be only one AdtDef for each `did`, therefore
|
||||
/// it is fine to implement `Ord` only based on `did`.
|
||||
impl Ord for AdtDefData {
|
||||
fn cmp(&self, other: &AdtDefData) -> Ordering {
|
||||
self.did.cmp(&other.did)
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for AdtDefData {
|
||||
#[inline]
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
@ -180,7 +165,7 @@ impl<'a> HashStable<StableHashingContext<'a>> for AdtDefData {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, Ord, PartialOrd, HashStable)]
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, HashStable)]
|
||||
#[rustc_pass_by_value]
|
||||
pub struct AdtDef<'tcx>(pub Interned<'tcx, AdtDefData>);
|
||||
|
||||
|
@ -23,7 +23,7 @@ pub use valtree::*;
|
||||
pub type ConstKind<'tcx> = IrConstKind<TyCtxt<'tcx>>;
|
||||
|
||||
/// Use this rather than `ConstData`, whenever possible.
|
||||
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, HashStable)]
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, HashStable)]
|
||||
#[rustc_pass_by_value]
|
||||
pub struct Const<'tcx>(pub(super) Interned<'tcx, WithCachedTypeInfo<ConstData<'tcx>>>);
|
||||
|
||||
@ -52,7 +52,7 @@ impl<'tcx> ConstTy<TyCtxt<'tcx>> for Const<'tcx> {
|
||||
}
|
||||
|
||||
/// Typed constant value.
|
||||
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
|
||||
#[derive(HashStable, TyEncodable, TyDecodable)]
|
||||
pub struct ConstData<'tcx> {
|
||||
pub ty: Ty<'tcx>,
|
||||
|
@ -7,7 +7,7 @@ use rustc_hir::def_id::DefId;
|
||||
use rustc_macros::HashStable;
|
||||
|
||||
/// An unevaluated (potentially generic) constant used in the type-system.
|
||||
#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, TyEncodable, TyDecodable)]
|
||||
#[derive(Copy, Clone, Eq, PartialEq, TyEncodable, TyDecodable)]
|
||||
#[derive(Hash, HashStable, TypeFoldable, TypeVisitable)]
|
||||
pub struct UnevaluatedConst<'tcx> {
|
||||
pub def: DefId,
|
||||
@ -62,7 +62,7 @@ impl<'tcx> UnevaluatedConst<'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Hash)]
|
||||
#[derive(Copy, Clone, Eq, PartialEq, Hash)]
|
||||
#[derive(HashStable, TyEncodable, TyDecodable, TypeVisitable, TypeFoldable)]
|
||||
pub enum Expr<'tcx> {
|
||||
Binop(mir::BinOp, Const<'tcx>, Const<'tcx>),
|
||||
|
@ -32,6 +32,7 @@ use crate::ty::{
|
||||
};
|
||||
use crate::ty::{GenericArg, GenericArgs, GenericArgsRef};
|
||||
use rustc_ast::{self as ast, attr};
|
||||
use rustc_data_structures::defer;
|
||||
use rustc_data_structures::fingerprint::Fingerprint;
|
||||
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
|
||||
use rustc_data_structures::intern::Interned;
|
||||
@ -39,7 +40,7 @@ use rustc_data_structures::profiling::SelfProfilerRef;
|
||||
use rustc_data_structures::sharded::{IntoPointer, ShardedHashMap};
|
||||
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
|
||||
use rustc_data_structures::steal::Steal;
|
||||
use rustc_data_structures::sync::{self, FreezeReadGuard, Lock, Lrc, WorkerLocal};
|
||||
use rustc_data_structures::sync::{self, FreezeReadGuard, Lock, Lrc, RwLock, WorkerLocal};
|
||||
#[cfg(parallel_compiler)]
|
||||
use rustc_data_structures::sync::{DynSend, DynSync};
|
||||
use rustc_data_structures::unord::UnordSet;
|
||||
@ -723,6 +724,8 @@ pub struct GlobalCtxt<'tcx> {
|
||||
|
||||
/// Stores memory for globals (statics/consts).
|
||||
pub(crate) alloc_map: Lock<interpret::AllocMap<'tcx>>,
|
||||
|
||||
current_gcx: CurrentGcx,
|
||||
}
|
||||
|
||||
impl<'tcx> GlobalCtxt<'tcx> {
|
||||
@ -733,6 +736,19 @@ impl<'tcx> GlobalCtxt<'tcx> {
|
||||
F: FnOnce(TyCtxt<'tcx>) -> R,
|
||||
{
|
||||
let icx = tls::ImplicitCtxt::new(self);
|
||||
|
||||
// Reset `current_gcx` to `None` when we exit.
|
||||
let _on_drop = defer(move || {
|
||||
*self.current_gcx.value.write() = None;
|
||||
});
|
||||
|
||||
// Set this `GlobalCtxt` as the current one.
|
||||
{
|
||||
let mut guard = self.current_gcx.value.write();
|
||||
assert!(guard.is_none(), "no `GlobalCtxt` is currently set");
|
||||
*guard = Some(self as *const _ as *const ());
|
||||
}
|
||||
|
||||
tls::enter_context(&icx, || f(icx.tcx))
|
||||
}
|
||||
|
||||
@ -741,6 +757,39 @@ impl<'tcx> GlobalCtxt<'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
/// This is used to get a reference to a `GlobalCtxt` if one is available.
|
||||
///
|
||||
/// This is needed to allow the deadlock handler access to `GlobalCtxt` to look for query cycles.
|
||||
/// It cannot use the `TLV` global because that's only guaranteed to be defined on the thread
|
||||
/// creating the `GlobalCtxt`. Other threads have access to the `TLV` only inside Rayon jobs, but
|
||||
/// the deadlock handler is not called inside such a job.
|
||||
#[derive(Clone)]
|
||||
pub struct CurrentGcx {
|
||||
/// This stores a pointer to a `GlobalCtxt`. This is set to `Some` inside `GlobalCtxt::enter`
|
||||
/// and reset to `None` when that function returns or unwinds.
|
||||
value: Lrc<RwLock<Option<*const ()>>>,
|
||||
}
|
||||
|
||||
#[cfg(parallel_compiler)]
|
||||
unsafe impl DynSend for CurrentGcx {}
|
||||
#[cfg(parallel_compiler)]
|
||||
unsafe impl DynSync for CurrentGcx {}
|
||||
|
||||
impl CurrentGcx {
|
||||
pub fn new() -> Self {
|
||||
Self { value: Lrc::new(RwLock::new(None)) }
|
||||
}
|
||||
|
||||
pub fn access<R>(&self, f: impl for<'tcx> FnOnce(&'tcx GlobalCtxt<'tcx>) -> R) -> R {
|
||||
let read_guard = self.value.read();
|
||||
let gcx: *const GlobalCtxt<'_> = read_guard.unwrap() as *const _;
|
||||
// SAFETY: We hold the read lock for the `GlobalCtxt` pointer. That prevents
|
||||
// `GlobalCtxt::enter` from returning as it would first acquire the write lock.
|
||||
// This ensures the `GlobalCtxt` is live during `f`.
|
||||
f(unsafe { &*gcx })
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx> TyCtxt<'tcx> {
|
||||
/// Expects a body and returns its codegen attributes.
|
||||
///
|
||||
@ -859,6 +908,7 @@ impl<'tcx> TyCtxt<'tcx> {
|
||||
query_kinds: &'tcx [DepKindStruct<'tcx>],
|
||||
query_system: QuerySystem<'tcx>,
|
||||
hooks: crate::hooks::Providers,
|
||||
current_gcx: CurrentGcx,
|
||||
) -> GlobalCtxt<'tcx> {
|
||||
let data_layout = s.target.parse_data_layout().unwrap_or_else(|err| {
|
||||
s.dcx().emit_fatal(err);
|
||||
@ -893,6 +943,7 @@ impl<'tcx> TyCtxt<'tcx> {
|
||||
canonical_param_env_cache: Default::default(),
|
||||
data_layout,
|
||||
alloc_map: Lock::new(interpret::AllocMap::new()),
|
||||
current_gcx,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -2,8 +2,6 @@ use crate::ty::{self, Binder, BoundTy, Ty, TyCtxt, TypeVisitableExt};
|
||||
use rustc_data_structures::fx::FxIndexMap;
|
||||
use rustc_hir::def_id::DefId;
|
||||
|
||||
use std::collections::BTreeMap;
|
||||
|
||||
pub use rustc_type_ir::fold::{FallibleTypeFolder, TypeFoldable, TypeFolder, TypeSuperFoldable};
|
||||
|
||||
///////////////////////////////////////////////////////////////////////////
|
||||
@ -254,12 +252,12 @@ impl<'tcx> TyCtxt<'tcx> {
|
||||
self,
|
||||
value: Binder<'tcx, T>,
|
||||
mut fld_r: F,
|
||||
) -> (T, BTreeMap<ty::BoundRegion, ty::Region<'tcx>>)
|
||||
) -> (T, FxIndexMap<ty::BoundRegion, ty::Region<'tcx>>)
|
||||
where
|
||||
F: FnMut(ty::BoundRegion) -> ty::Region<'tcx>,
|
||||
T: TypeFoldable<TyCtxt<'tcx>>,
|
||||
{
|
||||
let mut region_map = BTreeMap::new();
|
||||
let mut region_map = FxIndexMap::default();
|
||||
let real_fld_r = |br: ty::BoundRegion| *region_map.entry(br).or_insert_with(|| fld_r(br));
|
||||
let value = self.instantiate_bound_regions_uncached(value, real_fld_r);
|
||||
(value, region_map)
|
||||
|
@ -17,7 +17,6 @@ use rustc_type_ir::WithCachedTypeInfo;
|
||||
use smallvec::SmallVec;
|
||||
|
||||
use core::intrinsics;
|
||||
use std::cmp::Ordering;
|
||||
use std::marker::PhantomData;
|
||||
use std::mem;
|
||||
use std::num::NonZero;
|
||||
@ -68,7 +67,7 @@ const TYPE_TAG: usize = 0b00;
|
||||
const REGION_TAG: usize = 0b01;
|
||||
const CONST_TAG: usize = 0b10;
|
||||
|
||||
#[derive(Debug, TyEncodable, TyDecodable, PartialEq, Eq, PartialOrd, Ord, HashStable)]
|
||||
#[derive(Debug, TyEncodable, TyDecodable, PartialEq, Eq, HashStable)]
|
||||
pub enum GenericArgKind<'tcx> {
|
||||
Lifetime(ty::Region<'tcx>),
|
||||
Type(Ty<'tcx>),
|
||||
@ -100,18 +99,6 @@ impl<'tcx> GenericArgKind<'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx> Ord for GenericArg<'tcx> {
|
||||
fn cmp(&self, other: &GenericArg<'tcx>) -> Ordering {
|
||||
self.unpack().cmp(&other.unpack())
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx> PartialOrd for GenericArg<'tcx> {
|
||||
fn partial_cmp(&self, other: &GenericArg<'tcx>) -> Option<Ordering> {
|
||||
Some(self.cmp(other))
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx> From<ty::Region<'tcx>> for GenericArg<'tcx> {
|
||||
#[inline]
|
||||
fn from(r: ty::Region<'tcx>) -> GenericArg<'tcx> {
|
||||
|
@ -60,6 +60,7 @@ pub use rustc_target::abi::{ReprFlags, ReprOptions};
|
||||
pub use rustc_type_ir::{DebugWithInfcx, InferCtxtLike, WithInfcx};
|
||||
pub use vtable::*;
|
||||
|
||||
use std::assert_matches::assert_matches;
|
||||
use std::fmt::Debug;
|
||||
use std::hash::{Hash, Hasher};
|
||||
use std::marker::PhantomData;
|
||||
@ -86,8 +87,8 @@ pub use self::consts::{
|
||||
Const, ConstData, ConstInt, ConstKind, Expr, ScalarInt, UnevaluatedConst, ValTree,
|
||||
};
|
||||
pub use self::context::{
|
||||
tls, CtxtInterners, DeducedParamAttrs, Feed, FreeRegionInfo, GlobalCtxt, Lift, TyCtxt,
|
||||
TyCtxtFeed,
|
||||
tls, CtxtInterners, CurrentGcx, DeducedParamAttrs, Feed, FreeRegionInfo, GlobalCtxt, Lift,
|
||||
TyCtxt, TyCtxtFeed,
|
||||
};
|
||||
pub use self::instance::{Instance, InstanceDef, ShortInstance, UnusedGenericParams};
|
||||
pub use self::list::List;
|
||||
@ -214,7 +215,6 @@ pub struct ResolverAstLowering {
|
||||
pub next_node_id: ast::NodeId,
|
||||
|
||||
pub node_id_to_def_id: NodeMap<LocalDefId>,
|
||||
pub def_id_to_node_id: IndexVec<LocalDefId, ast::NodeId>,
|
||||
|
||||
pub trait_map: NodeMap<Vec<hir::TraitCandidate>>,
|
||||
/// List functions and methods for which lifetime elision was successful.
|
||||
@ -516,7 +516,7 @@ pub struct CReaderCacheKey {
|
||||
}
|
||||
|
||||
/// Use this rather than `TyKind`, whenever possible.
|
||||
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, HashStable)]
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, HashStable)]
|
||||
#[rustc_diagnostic_item = "Ty"]
|
||||
#[rustc_pass_by_value]
|
||||
pub struct Ty<'tcx>(Interned<'tcx, WithCachedTypeInfo<TyKind<'tcx>>>);
|
||||
@ -701,7 +701,7 @@ const TAG_MASK: usize = 0b11;
|
||||
const TYPE_TAG: usize = 0b00;
|
||||
const CONST_TAG: usize = 0b01;
|
||||
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord, TyEncodable, TyDecodable)]
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash, TyEncodable, TyDecodable)]
|
||||
#[derive(HashStable, TypeFoldable, TypeVisitable)]
|
||||
pub enum TermKind<'tcx> {
|
||||
Ty(Ty<'tcx>),
|
||||
@ -1011,7 +1011,7 @@ impl PlaceholderLike for PlaceholderType {
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, HashStable)]
|
||||
#[derive(TyEncodable, TyDecodable, PartialOrd, Ord)]
|
||||
#[derive(TyEncodable, TyDecodable)]
|
||||
pub struct BoundConst<'tcx> {
|
||||
pub var: BoundVar,
|
||||
pub ty: Ty<'tcx>,
|
||||
@ -1858,8 +1858,40 @@ impl<'tcx> TyCtxt<'tcx> {
|
||||
|
||||
/// Returns layout of a coroutine. Layout might be unavailable if the
|
||||
/// coroutine is tainted by errors.
|
||||
pub fn coroutine_layout(self, def_id: DefId) -> Option<&'tcx CoroutineLayout<'tcx>> {
|
||||
self.optimized_mir(def_id).coroutine_layout()
|
||||
///
|
||||
/// Takes `coroutine_kind` which can be acquired from the `CoroutineArgs::kind_ty`,
|
||||
/// e.g. `args.as_coroutine().kind_ty()`.
|
||||
pub fn coroutine_layout(
|
||||
self,
|
||||
def_id: DefId,
|
||||
coroutine_kind_ty: Ty<'tcx>,
|
||||
) -> Option<&'tcx CoroutineLayout<'tcx>> {
|
||||
let mir = self.optimized_mir(def_id);
|
||||
// Regular coroutine
|
||||
if coroutine_kind_ty.is_unit() {
|
||||
mir.coroutine_layout_raw()
|
||||
} else {
|
||||
// If we have a `Coroutine` that comes from an coroutine-closure,
|
||||
// then it may be a by-move or by-ref body.
|
||||
let ty::Coroutine(_, identity_args) =
|
||||
*self.type_of(def_id).instantiate_identity().kind()
|
||||
else {
|
||||
unreachable!();
|
||||
};
|
||||
let identity_kind_ty = identity_args.as_coroutine().kind_ty();
|
||||
// If the types differ, then we must be getting the by-move body of
|
||||
// a by-ref coroutine.
|
||||
if identity_kind_ty == coroutine_kind_ty {
|
||||
mir.coroutine_layout_raw()
|
||||
} else {
|
||||
assert_matches!(coroutine_kind_ty.to_opt_closure_kind(), Some(ClosureKind::FnOnce));
|
||||
assert_matches!(
|
||||
identity_kind_ty.to_opt_closure_kind(),
|
||||
Some(ClosureKind::Fn | ClosureKind::FnMut)
|
||||
);
|
||||
mir.coroutine_by_move_body().unwrap().coroutine_layout_raw()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Given the `DefId` of an impl, returns the `DefId` of the trait it implements.
|
||||
|
@ -192,7 +192,7 @@ impl<'tcx> Clause<'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, PartialEq, PartialOrd, Ord, Eq, Hash, TyEncodable, TyDecodable)]
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash, TyEncodable, TyDecodable)]
|
||||
#[derive(HashStable, TypeFoldable, TypeVisitable, Lift)]
|
||||
pub enum ExistentialPredicate<'tcx> {
|
||||
/// E.g., `Iterator`.
|
||||
@ -336,7 +336,7 @@ impl<'tcx> ty::List<ty::PolyExistentialPredicate<'tcx>> {
|
||||
///
|
||||
/// Trait references also appear in object types like `Foo<U>`, but in
|
||||
/// that case the `Self` parameter is absent from the generic parameters.
|
||||
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, TyEncodable, TyDecodable)]
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, TyEncodable, TyDecodable)]
|
||||
#[derive(HashStable, TypeFoldable, TypeVisitable, Lift)]
|
||||
pub struct TraitRef<'tcx> {
|
||||
pub def_id: DefId,
|
||||
@ -420,7 +420,7 @@ impl<'tcx> IntoDiagArg for TraitRef<'tcx> {
|
||||
/// ```
|
||||
/// The generic parameters don't include the erased `Self`, only trait
|
||||
/// type and lifetime parameters (`[X, Y]` and `['a, 'b]` above).
|
||||
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, TyEncodable, TyDecodable)]
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, TyEncodable, TyDecodable)]
|
||||
#[derive(HashStable, TypeFoldable, TypeVisitable, Lift)]
|
||||
pub struct ExistentialTraitRef<'tcx> {
|
||||
pub def_id: DefId,
|
||||
@ -476,7 +476,7 @@ impl<'tcx> PolyExistentialTraitRef<'tcx> {
|
||||
}
|
||||
|
||||
/// A `ProjectionPredicate` for an `ExistentialTraitRef`.
|
||||
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, TyEncodable, TyDecodable)]
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug, TyEncodable, TyDecodable)]
|
||||
#[derive(HashStable, TypeFoldable, TypeVisitable, Lift)]
|
||||
pub struct ExistentialProjection<'tcx> {
|
||||
pub def_id: DefId,
|
||||
|
@ -10,6 +10,7 @@ use crate::ty::{
|
||||
use rustc_apfloat::ieee::{Double, Single};
|
||||
use rustc_apfloat::Float;
|
||||
use rustc_data_structures::fx::{FxHashMap, FxIndexMap};
|
||||
use rustc_data_structures::unord::UnordMap;
|
||||
use rustc_hir as hir;
|
||||
use rustc_hir::def::{self, CtorKind, DefKind, Namespace};
|
||||
use rustc_hir::def_id::{DefIdMap, DefIdSet, ModDefId, CRATE_DEF_ID, LOCAL_CRATE};
|
||||
@ -24,7 +25,6 @@ use rustc_target::spec::abi::Abi;
|
||||
use smallvec::SmallVec;
|
||||
|
||||
use std::cell::Cell;
|
||||
use std::collections::BTreeMap;
|
||||
use std::fmt::{self, Write as _};
|
||||
use std::iter;
|
||||
use std::ops::{Deref, DerefMut};
|
||||
@ -2537,7 +2537,7 @@ impl<'tcx> FmtPrinter<'_, 'tcx> {
|
||||
struct RegionFolder<'a, 'tcx> {
|
||||
tcx: TyCtxt<'tcx>,
|
||||
current_index: ty::DebruijnIndex,
|
||||
region_map: BTreeMap<ty::BoundRegion, ty::Region<'tcx>>,
|
||||
region_map: UnordMap<ty::BoundRegion, ty::Region<'tcx>>,
|
||||
name: &'a mut (
|
||||
dyn FnMut(
|
||||
Option<ty::DebruijnIndex>, // Debruijn index of the folded late-bound region
|
||||
@ -2614,7 +2614,7 @@ impl<'tcx> FmtPrinter<'_, 'tcx> {
|
||||
pub fn name_all_regions<T>(
|
||||
&mut self,
|
||||
value: &ty::Binder<'tcx, T>,
|
||||
) -> Result<(T, BTreeMap<ty::BoundRegion, ty::Region<'tcx>>), fmt::Error>
|
||||
) -> Result<(T, UnordMap<ty::BoundRegion, ty::Region<'tcx>>), fmt::Error>
|
||||
where
|
||||
T: Print<'tcx, Self> + TypeFoldable<TyCtxt<'tcx>>,
|
||||
{
|
||||
@ -2691,7 +2691,7 @@ impl<'tcx> FmtPrinter<'_, 'tcx> {
|
||||
write!(self, "{var:?}")?;
|
||||
}
|
||||
start_or_continue(self, "", "> ");
|
||||
(value.clone().skip_binder(), BTreeMap::default())
|
||||
(value.clone().skip_binder(), UnordMap::default())
|
||||
} else {
|
||||
let tcx = self.tcx;
|
||||
|
||||
@ -2763,7 +2763,7 @@ impl<'tcx> FmtPrinter<'_, 'tcx> {
|
||||
tcx,
|
||||
current_index: ty::INNERMOST,
|
||||
name: &mut name,
|
||||
region_map: BTreeMap::new(),
|
||||
region_map: UnordMap::default(),
|
||||
};
|
||||
let new_value = value.clone().skip_binder().fold_with(&mut folder);
|
||||
let region_map = folder.region_map;
|
||||
|
@ -14,7 +14,7 @@ use crate::ty::{self, BoundVar, TyCtxt, TypeFlags};
|
||||
pub type RegionKind<'tcx> = IrRegionKind<TyCtxt<'tcx>>;
|
||||
|
||||
/// Use this rather than `RegionKind`, whenever possible.
|
||||
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, HashStable)]
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, HashStable)]
|
||||
#[rustc_pass_by_value]
|
||||
pub struct Region<'tcx>(pub Interned<'tcx, RegionKind<'tcx>>);
|
||||
|
||||
@ -327,7 +327,7 @@ impl<'tcx> Deref for Region<'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, TyEncodable, TyDecodable, PartialOrd, Ord)]
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, TyEncodable, TyDecodable)]
|
||||
#[derive(HashStable)]
|
||||
pub struct EarlyParamRegion {
|
||||
pub def_id: DefId,
|
||||
@ -358,7 +358,7 @@ impl Atom for RegionVid {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, PartialOrd, Eq, Ord, Hash, TyEncodable, TyDecodable, Copy)]
|
||||
#[derive(Clone, PartialEq, Eq, Hash, TyEncodable, TyDecodable, Copy)]
|
||||
#[derive(HashStable)]
|
||||
/// The parameter representation of late-bound function parameters, "some region
|
||||
/// at least as big as the scope `fr.scope`".
|
||||
@ -367,7 +367,7 @@ pub struct LateParamRegion {
|
||||
pub bound_region: BoundRegionKind,
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, PartialOrd, Eq, Ord, Hash, TyEncodable, TyDecodable, Copy)]
|
||||
#[derive(Clone, PartialEq, Eq, Hash, TyEncodable, TyDecodable, Copy)]
|
||||
#[derive(HashStable)]
|
||||
pub enum BoundRegionKind {
|
||||
/// An anonymous region parameter for a given fn (&T)
|
||||
@ -384,7 +384,7 @@ pub enum BoundRegionKind {
|
||||
BrEnv,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, TyEncodable, TyDecodable, PartialOrd, Ord)]
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, TyEncodable, TyDecodable)]
|
||||
#[derive(HashStable)]
|
||||
pub struct BoundRegion {
|
||||
pub var: BoundVar,
|
||||
|
@ -694,7 +694,8 @@ impl<'tcx> CoroutineArgs<'tcx> {
|
||||
#[inline]
|
||||
pub fn variant_range(&self, def_id: DefId, tcx: TyCtxt<'tcx>) -> Range<VariantIdx> {
|
||||
// FIXME requires optimized MIR
|
||||
FIRST_VARIANT..tcx.coroutine_layout(def_id).unwrap().variant_fields.next_index()
|
||||
FIRST_VARIANT
|
||||
..tcx.coroutine_layout(def_id, tcx.types.unit).unwrap().variant_fields.next_index()
|
||||
}
|
||||
|
||||
/// The discriminant for the given variant. Panics if the `variant_index` is
|
||||
@ -754,7 +755,7 @@ impl<'tcx> CoroutineArgs<'tcx> {
|
||||
def_id: DefId,
|
||||
tcx: TyCtxt<'tcx>,
|
||||
) -> impl Iterator<Item: Iterator<Item = Ty<'tcx>> + Captures<'tcx>> {
|
||||
let layout = tcx.coroutine_layout(def_id).unwrap();
|
||||
let layout = tcx.coroutine_layout(def_id, self.kind_ty()).unwrap();
|
||||
layout.variant_fields.iter().map(move |variant| {
|
||||
variant.iter().map(move |field| {
|
||||
ty::EarlyBinder::bind(layout.field_tys[*field].ty).instantiate(tcx, self.args)
|
||||
@ -867,7 +868,7 @@ impl<'tcx> InlineConstArgs<'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, TyEncodable, TyDecodable)]
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, TyEncodable, TyDecodable)]
|
||||
#[derive(HashStable)]
|
||||
pub enum BoundVariableKind {
|
||||
Ty(BoundTyKind),
|
||||
@ -907,7 +908,7 @@ impl BoundVariableKind {
|
||||
/// e.g., `liberate_late_bound_regions`).
|
||||
///
|
||||
/// `Decodable` and `Encodable` are implemented for `Binder<T>` using the `impl_binder_encode_decode!` macro.
|
||||
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
|
||||
#[derive(HashStable, Lift)]
|
||||
pub struct Binder<'tcx, T> {
|
||||
value: T,
|
||||
@ -1108,7 +1109,7 @@ where
|
||||
/// * For a projection, this would be `<Ty as Trait<...>>::N<...>`.
|
||||
/// * For an inherent projection, this would be `Ty::N<...>`.
|
||||
/// * For an opaque type, there is no explicit syntax.
|
||||
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, TyEncodable, TyDecodable)]
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, TyEncodable, TyDecodable)]
|
||||
#[derive(HashStable, TypeFoldable, TypeVisitable, Lift)]
|
||||
pub struct AliasTy<'tcx> {
|
||||
/// The parameters of the associated or opaque item.
|
||||
@ -1277,7 +1278,7 @@ pub struct GenSig<'tcx> {
|
||||
/// - `inputs`: is the list of arguments and their modes.
|
||||
/// - `output`: is the return type.
|
||||
/// - `c_variadic`: indicates whether this is a C-variadic function.
|
||||
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, TyEncodable, TyDecodable)]
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, TyEncodable, TyDecodable)]
|
||||
#[derive(HashStable, TypeFoldable, TypeVisitable, Lift)]
|
||||
pub struct FnSig<'tcx> {
|
||||
pub inputs_and_output: &'tcx List<Ty<'tcx>>,
|
||||
@ -1402,14 +1403,14 @@ impl ParamConst {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, TyEncodable, TyDecodable)]
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Hash, TyEncodable, TyDecodable)]
|
||||
#[derive(HashStable)]
|
||||
pub struct BoundTy {
|
||||
pub var: BoundVar,
|
||||
pub kind: BoundTyKind,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, TyEncodable, TyDecodable)]
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug, TyEncodable, TyDecodable)]
|
||||
#[derive(HashStable)]
|
||||
pub enum BoundTyKind {
|
||||
Anon,
|
||||
@ -2660,7 +2661,7 @@ impl<'tcx> Ty<'tcx> {
|
||||
/// a miscompilation or unsoundness.
|
||||
///
|
||||
/// When in doubt, use `VarianceDiagInfo::default()`
|
||||
#[derive(Copy, Clone, Debug, Default, PartialEq, Eq, PartialOrd, Ord)]
|
||||
#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
|
||||
pub enum VarianceDiagInfo<'tcx> {
|
||||
/// No additional information - this is the default.
|
||||
/// We will not add any additional information to error messages.
|
||||
|
@ -430,6 +430,31 @@ impl<'tcx> TypeckResults<'tcx> {
|
||||
LocalTableInContextMut { hir_owner: self.hir_owner, data: &mut self.pat_adjustments }
|
||||
}
|
||||
|
||||
/// Does the pattern recursively contain a `ref mut` binding in it?
|
||||
///
|
||||
/// This is used to determined whether a `deref` pattern should emit a `Deref`
|
||||
/// or `DerefMut` call for its pattern scrutinee.
|
||||
///
|
||||
/// This is computed from the typeck results since we want to make
|
||||
/// sure to apply any match-ergonomics adjustments, which we cannot
|
||||
/// determine from the HIR alone.
|
||||
pub fn pat_has_ref_mut_binding(&self, pat: &'tcx hir::Pat<'tcx>) -> bool {
|
||||
let mut has_ref_mut = false;
|
||||
pat.walk(|pat| {
|
||||
if let hir::PatKind::Binding(_, id, _, _) = pat.kind
|
||||
&& let Some(ty::BindByReference(ty::Mutability::Mut)) =
|
||||
self.pat_binding_modes().get(id)
|
||||
{
|
||||
has_ref_mut = true;
|
||||
// No need to continue recursing
|
||||
false
|
||||
} else {
|
||||
true
|
||||
}
|
||||
});
|
||||
has_ref_mut
|
||||
}
|
||||
|
||||
/// For a given closure, returns the iterator of `ty::CapturedPlace`s that are captured
|
||||
/// by the closure.
|
||||
pub fn closure_min_captures_flattened(
|
||||
|
@ -1006,6 +1006,10 @@ struct Candidate<'pat, 'tcx> {
|
||||
/// If the candidate matches, bindings and ascriptions must be established.
|
||||
extra_data: PatternExtraData<'tcx>,
|
||||
|
||||
/// If we filled `self.subcandidate`, we store here the span of the or-pattern they came from.
|
||||
// Invariant: it is `None` iff `subcandidates.is_empty()`.
|
||||
or_span: Option<Span>,
|
||||
|
||||
/// The block before the `bindings` have been established.
|
||||
pre_binding_block: Option<BasicBlock>,
|
||||
/// The pre-binding block of the next candidate.
|
||||
@ -1028,6 +1032,7 @@ impl<'tcx, 'pat> Candidate<'pat, 'tcx> {
|
||||
extra_data: flat_pat.extra_data,
|
||||
has_guard,
|
||||
subcandidates: Vec::new(),
|
||||
or_span: None,
|
||||
otherwise_block: None,
|
||||
pre_binding_block: None,
|
||||
next_candidate_pre_binding_block: None,
|
||||
@ -1106,7 +1111,10 @@ impl<'pat, 'tcx> TestCase<'pat, 'tcx> {
|
||||
#[derive(Debug, Clone)]
|
||||
pub(crate) struct MatchPair<'pat, 'tcx> {
|
||||
/// This place...
|
||||
place: PlaceBuilder<'tcx>,
|
||||
// This can be `None` if it referred to a non-captured place in a closure.
|
||||
// Invariant: place.is_none() => test_case is Irrefutable
|
||||
// In other words this must be `Some(_)` after simplification.
|
||||
place: Option<Place<'tcx>>,
|
||||
|
||||
/// ... must pass this test...
|
||||
// Invariant: after creation and simplification in `Candidate::new()`, this must not be
|
||||
@ -1277,7 +1285,8 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
||||
//
|
||||
// only generates a single switch.
|
||||
candidate.subcandidates = self.create_or_subcandidates(pats, candidate.has_guard);
|
||||
candidate.match_pairs.pop();
|
||||
let first_match_pair = candidate.match_pairs.pop().unwrap();
|
||||
candidate.or_span = Some(first_match_pair.pattern.span);
|
||||
split_or_candidate = true;
|
||||
}
|
||||
}
|
||||
@ -1287,8 +1296,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
||||
// At least one of the candidates has been split into subcandidates.
|
||||
// We need to change the candidate list to include those.
|
||||
let mut new_candidates = Vec::new();
|
||||
|
||||
for candidate in candidates {
|
||||
for candidate in candidates.iter_mut() {
|
||||
candidate.visit_leaves(|leaf_candidate| new_candidates.push(leaf_candidate));
|
||||
}
|
||||
self.match_simplified_candidates(
|
||||
@ -1298,6 +1306,10 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
||||
otherwise_block,
|
||||
&mut *new_candidates,
|
||||
);
|
||||
|
||||
for candidate in candidates {
|
||||
self.merge_trivial_subcandidates(candidate);
|
||||
}
|
||||
} else {
|
||||
self.match_simplified_candidates(
|
||||
span,
|
||||
@ -1531,16 +1543,13 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
||||
&mut or_candidate_refs,
|
||||
);
|
||||
candidate.subcandidates = or_candidates;
|
||||
self.merge_trivial_subcandidates(candidate, self.source_info(or_span));
|
||||
candidate.or_span = Some(or_span);
|
||||
self.merge_trivial_subcandidates(candidate);
|
||||
}
|
||||
|
||||
/// Try to merge all of the subcandidates of the given candidate into one.
|
||||
/// This avoids exponentially large CFGs in cases like `(1 | 2, 3 | 4, ...)`.
|
||||
fn merge_trivial_subcandidates(
|
||||
&mut self,
|
||||
candidate: &mut Candidate<'_, 'tcx>,
|
||||
source_info: SourceInfo,
|
||||
) {
|
||||
fn merge_trivial_subcandidates(&mut self, candidate: &mut Candidate<'_, 'tcx>) {
|
||||
if candidate.subcandidates.is_empty() || candidate.has_guard {
|
||||
// FIXME(or_patterns; matthewjasper) Don't give up if we have a guard.
|
||||
return;
|
||||
@ -1550,7 +1559,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
||||
|
||||
// Not `Iterator::all` because we don't want to short-circuit.
|
||||
for subcandidate in &mut candidate.subcandidates {
|
||||
self.merge_trivial_subcandidates(subcandidate, source_info);
|
||||
self.merge_trivial_subcandidates(subcandidate);
|
||||
|
||||
// FIXME(or_patterns; matthewjasper) Try to be more aggressive here.
|
||||
can_merge &=
|
||||
@ -1559,6 +1568,8 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
||||
|
||||
if can_merge {
|
||||
let any_matches = self.cfg.start_new_block();
|
||||
let or_span = candidate.or_span.take().unwrap();
|
||||
let source_info = self.source_info(or_span);
|
||||
for subcandidate in mem::take(&mut candidate.subcandidates) {
|
||||
let or_block = subcandidate.pre_binding_block.unwrap();
|
||||
self.cfg.goto(or_block, source_info, any_matches);
|
||||
@ -1587,11 +1598,12 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
||||
fn pick_test(
|
||||
&mut self,
|
||||
candidates: &mut [&mut Candidate<'_, 'tcx>],
|
||||
) -> (PlaceBuilder<'tcx>, Test<'tcx>) {
|
||||
) -> (Place<'tcx>, Test<'tcx>) {
|
||||
// Extract the match-pair from the highest priority candidate
|
||||
let match_pair = &candidates.first().unwrap().match_pairs[0];
|
||||
let test = self.test(match_pair);
|
||||
let match_place = match_pair.place.clone();
|
||||
// Unwrap is ok after simplification.
|
||||
let match_place = match_pair.place.unwrap();
|
||||
debug!(?test, ?match_pair);
|
||||
|
||||
(match_place, test)
|
||||
@ -1632,7 +1644,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
||||
/// - candidate 1 becomes `[y @ false]` since we know that `x` was `false`.
|
||||
fn sort_candidates<'b, 'c, 'pat>(
|
||||
&mut self,
|
||||
match_place: &PlaceBuilder<'tcx>,
|
||||
match_place: Place<'tcx>,
|
||||
test: &Test<'tcx>,
|
||||
mut candidates: &'b mut [&'c mut Candidate<'pat, 'tcx>],
|
||||
) -> (
|
||||
@ -1650,7 +1662,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
||||
// sorting.
|
||||
while let Some(candidate) = candidates.first_mut() {
|
||||
let Some(branch) =
|
||||
self.sort_candidate(&match_place, test, candidate, &target_candidates)
|
||||
self.sort_candidate(match_place, test, candidate, &target_candidates)
|
||||
else {
|
||||
break;
|
||||
};
|
||||
@ -1778,7 +1790,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
||||
// For each of the N possible test outcomes, build the vector of candidates that applies if
|
||||
// the test has that particular outcome.
|
||||
let (remaining_candidates, target_candidates) =
|
||||
self.sort_candidates(&match_place, &test, candidates);
|
||||
self.sort_candidates(match_place, &test, candidates);
|
||||
|
||||
// The block that we should branch to if none of the
|
||||
// `target_candidates` match.
|
||||
@ -1818,7 +1830,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
||||
scrutinee_span,
|
||||
start_block,
|
||||
remainder_start,
|
||||
&match_place,
|
||||
match_place,
|
||||
&test,
|
||||
target_blocks,
|
||||
);
|
||||
|
@ -82,7 +82,8 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
||||
&*candidate.match_pairs
|
||||
{
|
||||
candidate.subcandidates = self.create_or_subcandidates(pats, has_guard);
|
||||
candidate.match_pairs.pop();
|
||||
let first_match_pair = candidate.match_pairs.pop().unwrap();
|
||||
candidate.or_span = Some(first_match_pair.pattern.span);
|
||||
}
|
||||
candidate
|
||||
})
|
||||
|
@ -5,7 +5,6 @@
|
||||
// identify what tests are needed, perform the tests, and then filter
|
||||
// the candidates based on the result.
|
||||
|
||||
use crate::build::expr::as_place::PlaceBuilder;
|
||||
use crate::build::matches::{Candidate, MatchPair, Test, TestBranch, TestCase, TestKind};
|
||||
use crate::build::Builder;
|
||||
use rustc_data_structures::fx::FxIndexMap;
|
||||
@ -55,18 +54,17 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
||||
Test { span: match_pair.pattern.span, kind }
|
||||
}
|
||||
|
||||
#[instrument(skip(self, target_blocks, place_builder), level = "debug")]
|
||||
#[instrument(skip(self, target_blocks, place), level = "debug")]
|
||||
pub(super) fn perform_test(
|
||||
&mut self,
|
||||
match_start_span: Span,
|
||||
scrutinee_span: Span,
|
||||
block: BasicBlock,
|
||||
otherwise_block: BasicBlock,
|
||||
place_builder: &PlaceBuilder<'tcx>,
|
||||
place: Place<'tcx>,
|
||||
test: &Test<'tcx>,
|
||||
target_blocks: FxIndexMap<TestBranch<'tcx>, BasicBlock>,
|
||||
) {
|
||||
let place = place_builder.to_place(self);
|
||||
let place_ty = place.ty(&self.local_decls, self.tcx);
|
||||
debug!(?place, ?place_ty);
|
||||
let target_block = |branch| target_blocks.get(&branch).copied().unwrap_or(otherwise_block);
|
||||
@ -475,7 +473,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
||||
/// tighter match code if we do something a bit different.
|
||||
pub(super) fn sort_candidate(
|
||||
&mut self,
|
||||
test_place: &PlaceBuilder<'tcx>,
|
||||
test_place: Place<'tcx>,
|
||||
test: &Test<'tcx>,
|
||||
candidate: &mut Candidate<'_, 'tcx>,
|
||||
sorted_candidates: &FxIndexMap<TestBranch<'tcx>, Vec<&mut Candidate<'_, 'tcx>>>,
|
||||
@ -486,8 +484,11 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
||||
// than one, but it'd be very unusual to have two sides that
|
||||
// both require tests; you'd expect one side to be simplified
|
||||
// away.)
|
||||
let (match_pair_index, match_pair) =
|
||||
candidate.match_pairs.iter().enumerate().find(|&(_, mp)| mp.place == *test_place)?;
|
||||
let (match_pair_index, match_pair) = candidate
|
||||
.match_pairs
|
||||
.iter()
|
||||
.enumerate()
|
||||
.find(|&(_, mp)| mp.place == Some(test_place))?;
|
||||
|
||||
let fully_matched;
|
||||
let ret = match (&test.kind, &match_pair.test_case) {
|
||||
@ -521,7 +522,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
||||
candidate
|
||||
.match_pairs
|
||||
.iter()
|
||||
.any(|mp| mp.place == *test_place && is_covering_range(&mp.test_case))
|
||||
.any(|mp| mp.place == Some(test_place) && is_covering_range(&mp.test_case))
|
||||
};
|
||||
if sorted_candidates
|
||||
.get(&TestBranch::Failure)
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user