mirror of
https://github.com/rust-lang/rust.git
synced 2025-04-15 21:47:04 +00:00
Auto merge of #138693 - matthiaskrgr:rollup-ejq8mwp, r=matthiaskrgr
Rollup of 10 pull requests Successful merges: - #136177 (clarify BufRead::{fill_buf, consume} docs) - #138654 (Remove the regex dependency from coretests) - #138655 (rustc-dev-guide sync) - #138656 (Remove double nesting in post-merge workflow) - #138658 (CI: mirror alpine and centos images to ghcr) - #138659 (coverage: Don't store a body span in `FunctionCoverageInfo`) - #138661 (Revert: Add *_value methods to proc_macro lib) - #138670 (Remove existing AFIDT implementation) - #138674 (Various codegen_llvm cleanups) - #138684 (use then in docs for `fuse` to enhance readability) r? `@ghost` `@rustbot` modify labels: rollup
This commit is contained in:
commit
a7fc463dd8
7
.github/workflows/ghcr.yml
vendored
7
.github/workflows/ghcr.yml
vendored
@ -5,6 +5,9 @@
|
||||
# Docker Hub has a rate limit, while ghcr.io doesn't.
|
||||
# Those images are pushed to ghcr.io by this job.
|
||||
#
|
||||
# While Docker Hub rate limit *shouldn't* be an issue on GitHub Actions,
|
||||
# it certainly is for AWS codebuild.
|
||||
#
|
||||
# Note that authenticating to DockerHub or other registries isn't possible
|
||||
# for PR jobs, because forks can't access secrets.
|
||||
# That's why we use ghcr.io: it has no rate limit and it doesn't require authentication.
|
||||
@ -54,6 +57,10 @@ jobs:
|
||||
"ubuntu:22.04"
|
||||
# Mirrored because used by all linux CI jobs, including mingw-check-tidy
|
||||
"moby/buildkit:buildx-stable-1"
|
||||
# Mirrored because used when CI is running inside a Docker container
|
||||
"alpine:3.4"
|
||||
# Mirrored because used by dist-x86_64-linux
|
||||
"centos:7"
|
||||
)
|
||||
|
||||
# Mirror each image from DockerHub to ghcr.io
|
||||
|
7
.github/workflows/post-merge.yml
vendored
7
.github/workflows/post-merge.yml
vendored
@ -35,13 +35,12 @@ jobs:
|
||||
|
||||
cd src/ci/citool
|
||||
|
||||
printf "*This is an experimental post-merge analysis report. You can ignore it.*\n\n" > output.log
|
||||
printf "<details>\n<summary>Post-merge report</summary>\n\n" >> output.log
|
||||
printf "<details>\n<summary>What is this?</summary>\n" >> output.log
|
||||
printf "This is an experimental post-merge analysis report that shows differences in test outcomes between the merged PR and its parent PR.\n" >> output.log
|
||||
printf "</details>\n\n" >> output.log
|
||||
|
||||
cargo run --release post-merge-report ${PARENT_COMMIT} ${{ github.sha }} >> output.log
|
||||
|
||||
printf "</details>\n" >> output.log
|
||||
|
||||
cat output.log
|
||||
|
||||
gh pr comment ${HEAD_PR} -F output.log
|
||||
|
17
Cargo.lock
17
Cargo.lock
@ -2082,13 +2082,6 @@ version = "0.7.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4ee93343901ab17bd981295f2cf0026d4ad018c7c31ba84549a4ddbb47a45104"
|
||||
|
||||
[[package]]
|
||||
name = "literal-escaper"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"rustc-std-workspace-std 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "lld-wrapper"
|
||||
version = "0.1.0"
|
||||
@ -3155,12 +3148,6 @@ version = "1.0.1"
|
||||
name = "rustc-std-workspace-std"
|
||||
version = "1.0.1"
|
||||
|
||||
[[package]]
|
||||
name = "rustc-std-workspace-std"
|
||||
version = "1.0.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "aba676a20abe46e5b0f1b0deae474aaaf31407e6c71147159890574599da04ef"
|
||||
|
||||
[[package]]
|
||||
name = "rustc_abi"
|
||||
version = "0.0.0"
|
||||
@ -3199,7 +3186,6 @@ name = "rustc_ast"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"bitflags",
|
||||
"literal-escaper",
|
||||
"memchr",
|
||||
"rustc_ast_ir",
|
||||
"rustc_data_structures",
|
||||
@ -3909,7 +3895,6 @@ name = "rustc_lexer"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"expect-test",
|
||||
"literal-escaper",
|
||||
"memchr",
|
||||
"unicode-properties",
|
||||
"unicode-xid",
|
||||
@ -4172,7 +4157,6 @@ name = "rustc_parse"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"bitflags",
|
||||
"literal-escaper",
|
||||
"rustc_ast",
|
||||
"rustc_ast_pretty",
|
||||
"rustc_data_structures",
|
||||
@ -4195,7 +4179,6 @@ dependencies = [
|
||||
name = "rustc_parse_format"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"literal-escaper",
|
||||
"rustc_index",
|
||||
"rustc_lexer",
|
||||
]
|
||||
|
@ -6,7 +6,6 @@ edition = "2024"
|
||||
[dependencies]
|
||||
# tidy-alphabetical-start
|
||||
bitflags = "2.4.1"
|
||||
literal-escaper = { path = "../../library/literal-escaper" }
|
||||
memchr = "2.7.4"
|
||||
rustc_ast_ir = { path = "../rustc_ast_ir" }
|
||||
rustc_data_structures = { path = "../rustc_data_structures" }
|
||||
|
@ -2,7 +2,7 @@
|
||||
|
||||
use std::{ascii, fmt, str};
|
||||
|
||||
use literal_escaper::{
|
||||
use rustc_lexer::unescape::{
|
||||
MixedUnit, Mode, byte_from_char, unescape_byte, unescape_char, unescape_mixed, unescape_unicode,
|
||||
};
|
||||
use rustc_span::{Span, Symbol, kw, sym};
|
||||
|
@ -105,9 +105,14 @@ fn fill_region_tables<'tcx>(
|
||||
ids_info: &'tcx CoverageIdsInfo,
|
||||
covfun: &mut CovfunRecord<'tcx>,
|
||||
) {
|
||||
// Currently a function's mappings must all be in the same file as its body span.
|
||||
// Currently a function's mappings must all be in the same file, so use the
|
||||
// first mapping's span to determine the file.
|
||||
let source_map = tcx.sess.source_map();
|
||||
let source_file = source_map.lookup_source_file(fn_cov_info.body_span.lo());
|
||||
let Some(first_span) = (try { fn_cov_info.mappings.first()?.span }) else {
|
||||
debug_assert!(false, "function has no mappings: {:?}", covfun.mangled_function_name);
|
||||
return;
|
||||
};
|
||||
let source_file = source_map.lookup_source_file(first_span.lo());
|
||||
|
||||
// Look up the global file ID for that file.
|
||||
let global_file_id = global_file_table.global_file_id_for_file(&source_file);
|
||||
@ -118,9 +123,8 @@ fn fill_region_tables<'tcx>(
|
||||
let ffi::Regions { code_regions, branch_regions, mcdc_branch_regions, mcdc_decision_regions } =
|
||||
&mut covfun.regions;
|
||||
|
||||
let make_cov_span = |span: Span| {
|
||||
spans::make_coverage_span(local_file_id, source_map, fn_cov_info, &source_file, span)
|
||||
};
|
||||
let make_cov_span =
|
||||
|span: Span| spans::make_coverage_span(local_file_id, source_map, &source_file, span);
|
||||
let discard_all = tcx.sess.coverage_discard_all_spans_in_codegen();
|
||||
|
||||
// For each counter/region pair in this function+file, convert it to a
|
||||
|
@ -1,4 +1,3 @@
|
||||
use rustc_middle::mir::coverage::FunctionCoverageInfo;
|
||||
use rustc_span::source_map::SourceMap;
|
||||
use rustc_span::{BytePos, Pos, SourceFile, Span};
|
||||
use tracing::debug;
|
||||
@ -19,11 +18,10 @@ use crate::coverageinfo::mapgen::LocalFileId;
|
||||
pub(crate) fn make_coverage_span(
|
||||
file_id: LocalFileId,
|
||||
source_map: &SourceMap,
|
||||
fn_cov_info: &FunctionCoverageInfo,
|
||||
file: &SourceFile,
|
||||
span: Span,
|
||||
) -> Option<ffi::CoverageSpan> {
|
||||
let span = ensure_non_empty_span(source_map, fn_cov_info, span)?;
|
||||
let span = ensure_non_empty_span(source_map, span)?;
|
||||
|
||||
let lo = span.lo();
|
||||
let hi = span.hi();
|
||||
@ -55,36 +53,22 @@ pub(crate) fn make_coverage_span(
|
||||
})
|
||||
}
|
||||
|
||||
fn ensure_non_empty_span(
|
||||
source_map: &SourceMap,
|
||||
fn_cov_info: &FunctionCoverageInfo,
|
||||
span: Span,
|
||||
) -> Option<Span> {
|
||||
fn ensure_non_empty_span(source_map: &SourceMap, span: Span) -> Option<Span> {
|
||||
if !span.is_empty() {
|
||||
return Some(span);
|
||||
}
|
||||
|
||||
let lo = span.lo();
|
||||
let hi = span.hi();
|
||||
|
||||
// The span is empty, so try to expand it to cover an adjacent '{' or '}',
|
||||
// but only within the bounds of the body span.
|
||||
let try_next = hi < fn_cov_info.body_span.hi();
|
||||
let try_prev = fn_cov_info.body_span.lo() < lo;
|
||||
if !(try_next || try_prev) {
|
||||
return None;
|
||||
}
|
||||
|
||||
// The span is empty, so try to enlarge it to cover an adjacent '{' or '}'.
|
||||
source_map
|
||||
.span_to_source(span, |src, start, end| try {
|
||||
// Adjusting span endpoints by `BytePos(1)` is normally a bug,
|
||||
// but in this case we have specifically checked that the character
|
||||
// we're skipping over is one of two specific ASCII characters, so
|
||||
// adjusting by exactly 1 byte is correct.
|
||||
if try_next && src.as_bytes()[end] == b'{' {
|
||||
Some(span.with_hi(hi + BytePos(1)))
|
||||
} else if try_prev && src.as_bytes()[start - 1] == b'}' {
|
||||
Some(span.with_lo(lo - BytePos(1)))
|
||||
if src.as_bytes().get(end).copied() == Some(b'{') {
|
||||
Some(span.with_hi(span.hi() + BytePos(1)))
|
||||
} else if start > 0 && src.as_bytes()[start - 1] == b'}' {
|
||||
Some(span.with_lo(span.lo() - BytePos(1)))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
|
@ -2,6 +2,7 @@ use std::borrow::Cow;
|
||||
use std::fmt::{self, Write};
|
||||
use std::hash::{Hash, Hasher};
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::sync::Arc;
|
||||
use std::{iter, ptr};
|
||||
|
||||
use libc::{c_char, c_longlong, c_uint};
|
||||
@ -38,8 +39,8 @@ use crate::debuginfo::metadata::type_map::build_type_with_children;
|
||||
use crate::debuginfo::utils::{WidePtrKind, wide_pointer_kind};
|
||||
use crate::llvm;
|
||||
use crate::llvm::debuginfo::{
|
||||
DIDescriptor, DIFile, DIFlags, DILexicalBlock, DIScope, DIType, DebugEmissionKind,
|
||||
DebugNameTableKind,
|
||||
DIBasicType, DIBuilder, DICompositeType, DIDescriptor, DIFile, DIFlags, DILexicalBlock,
|
||||
DIScope, DIType, DebugEmissionKind, DebugNameTableKind,
|
||||
};
|
||||
use crate::value::Value;
|
||||
|
||||
@ -68,7 +69,8 @@ pub(super) const UNKNOWN_COLUMN_NUMBER: c_uint = 0;
|
||||
|
||||
const NO_SCOPE_METADATA: Option<&DIScope> = None;
|
||||
/// A function that returns an empty list of generic parameter debuginfo nodes.
|
||||
const NO_GENERICS: for<'ll> fn(&CodegenCx<'ll, '_>) -> SmallVec<&'ll DIType> = |_| SmallVec::new();
|
||||
const NO_GENERICS: for<'ll> fn(&CodegenCx<'ll, '_>) -> SmallVec<Option<&'ll DIType>> =
|
||||
|_| SmallVec::new();
|
||||
|
||||
// SmallVec is used quite a bit in this module, so create a shorthand.
|
||||
// The actual number of elements is not so important.
|
||||
@ -243,7 +245,7 @@ fn build_pointer_or_reference_di_node<'ll, 'tcx>(
|
||||
cx,
|
||||
owner,
|
||||
addr_field_name,
|
||||
(addr_field.size, addr_field.align.abi),
|
||||
addr_field,
|
||||
layout.fields.offset(WIDE_PTR_ADDR),
|
||||
DIFlags::FlagZero,
|
||||
data_ptr_type_di_node,
|
||||
@ -253,7 +255,7 @@ fn build_pointer_or_reference_di_node<'ll, 'tcx>(
|
||||
cx,
|
||||
owner,
|
||||
extra_field_name,
|
||||
(extra_field.size, extra_field.align.abi),
|
||||
extra_field,
|
||||
layout.fields.offset(WIDE_PTR_EXTRA),
|
||||
DIFlags::FlagZero,
|
||||
type_di_node(cx, extra_field.ty),
|
||||
@ -311,12 +313,7 @@ fn build_subroutine_type_di_node<'ll, 'tcx>(
|
||||
|
||||
debug_context(cx).type_map.unique_id_to_di_node.borrow_mut().remove(&unique_type_id);
|
||||
|
||||
let fn_di_node = unsafe {
|
||||
llvm::LLVMRustDIBuilderCreateSubroutineType(
|
||||
DIB(cx),
|
||||
create_DIArray(DIB(cx), &signature_di_nodes[..]),
|
||||
)
|
||||
};
|
||||
let fn_di_node = create_subroutine_type(cx, create_DIArray(DIB(cx), &signature_di_nodes[..]));
|
||||
|
||||
// This is actually a function pointer, so wrap it in pointer DI.
|
||||
let name = compute_debuginfo_type_name(cx.tcx, fn_ty, false);
|
||||
@ -340,6 +337,13 @@ fn build_subroutine_type_di_node<'ll, 'tcx>(
|
||||
DINodeCreationResult::new(di_node, false)
|
||||
}
|
||||
|
||||
pub(super) fn create_subroutine_type<'ll>(
|
||||
cx: &CodegenCx<'ll, '_>,
|
||||
signature: &'ll DICompositeType,
|
||||
) -> &'ll DICompositeType {
|
||||
unsafe { llvm::LLVMRustDIBuilderCreateSubroutineType(DIB(cx), signature) }
|
||||
}
|
||||
|
||||
/// Create debuginfo for `dyn SomeTrait` types. Currently these are empty structs
|
||||
/// we with the correct type name (e.g. "dyn SomeTrait<Foo, Item=u32> + Sync").
|
||||
fn build_dyn_type_di_node<'ll, 'tcx>(
|
||||
@ -487,26 +491,22 @@ pub(crate) fn type_di_node<'ll, 'tcx>(cx: &CodegenCx<'ll, 'tcx>, t: Ty<'tcx>) ->
|
||||
// FIXME(mw): Cache this via a regular UniqueTypeId instead of an extra field in the debug context.
|
||||
fn recursion_marker_type_di_node<'ll, 'tcx>(cx: &CodegenCx<'ll, 'tcx>) -> &'ll DIType {
|
||||
*debug_context(cx).recursion_marker_type.get_or_init(move || {
|
||||
unsafe {
|
||||
// The choice of type here is pretty arbitrary -
|
||||
// anything reading the debuginfo for a recursive
|
||||
// type is going to see *something* weird - the only
|
||||
// question is what exactly it will see.
|
||||
//
|
||||
// FIXME: the name `<recur_type>` does not fit the naming scheme
|
||||
// of other types.
|
||||
//
|
||||
// FIXME: it might make sense to use an actual pointer type here
|
||||
// so that debuggers can show the address.
|
||||
let name = "<recur_type>";
|
||||
llvm::LLVMRustDIBuilderCreateBasicType(
|
||||
DIB(cx),
|
||||
name.as_c_char_ptr(),
|
||||
name.len(),
|
||||
cx.tcx.data_layout.pointer_size.bits(),
|
||||
dwarf_const::DW_ATE_unsigned,
|
||||
)
|
||||
}
|
||||
// The choice of type here is pretty arbitrary -
|
||||
// anything reading the debuginfo for a recursive
|
||||
// type is going to see *something* weird - the only
|
||||
// question is what exactly it will see.
|
||||
//
|
||||
// FIXME: the name `<recur_type>` does not fit the naming scheme
|
||||
// of other types.
|
||||
//
|
||||
// FIXME: it might make sense to use an actual pointer type here
|
||||
// so that debuggers can show the address.
|
||||
create_basic_type(
|
||||
cx,
|
||||
"<recur_type>",
|
||||
cx.tcx.data_layout.pointer_size,
|
||||
dwarf_const::DW_ATE_unsigned,
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
@ -620,42 +620,38 @@ pub(crate) fn file_metadata<'ll>(cx: &CodegenCx<'ll, '_>, source_file: &SourceFi
|
||||
let source =
|
||||
cx.sess().opts.unstable_opts.embed_source.then_some(()).and(source_file.src.as_ref());
|
||||
|
||||
unsafe {
|
||||
llvm::LLVMRustDIBuilderCreateFile(
|
||||
DIB(cx),
|
||||
file_name.as_c_char_ptr(),
|
||||
file_name.len(),
|
||||
directory.as_c_char_ptr(),
|
||||
directory.len(),
|
||||
hash_kind,
|
||||
hash_value.as_c_char_ptr(),
|
||||
hash_value.len(),
|
||||
source.map_or(ptr::null(), |x| x.as_c_char_ptr()),
|
||||
source.map_or(0, |x| x.len()),
|
||||
)
|
||||
}
|
||||
create_file(DIB(cx), &file_name, &directory, &hash_value, hash_kind, source)
|
||||
}
|
||||
}
|
||||
|
||||
fn unknown_file_metadata<'ll>(cx: &CodegenCx<'ll, '_>) -> &'ll DIFile {
|
||||
debug_context(cx).created_files.borrow_mut().entry(None).or_insert_with(|| unsafe {
|
||||
let file_name = "<unknown>";
|
||||
let directory = "";
|
||||
let hash_value = "";
|
||||
debug_context(cx).created_files.borrow_mut().entry(None).or_insert_with(|| {
|
||||
create_file(DIB(cx), "<unknown>", "", "", llvm::ChecksumKind::None, None)
|
||||
})
|
||||
}
|
||||
|
||||
fn create_file<'ll>(
|
||||
builder: &DIBuilder<'ll>,
|
||||
file_name: &str,
|
||||
directory: &str,
|
||||
hash_value: &str,
|
||||
hash_kind: llvm::ChecksumKind,
|
||||
source: Option<&Arc<String>>,
|
||||
) -> &'ll DIFile {
|
||||
unsafe {
|
||||
llvm::LLVMRustDIBuilderCreateFile(
|
||||
DIB(cx),
|
||||
builder,
|
||||
file_name.as_c_char_ptr(),
|
||||
file_name.len(),
|
||||
directory.as_c_char_ptr(),
|
||||
directory.len(),
|
||||
llvm::ChecksumKind::None,
|
||||
hash_kind,
|
||||
hash_value.as_c_char_ptr(),
|
||||
hash_value.len(),
|
||||
ptr::null(),
|
||||
0,
|
||||
source.map_or(ptr::null(), |x| x.as_c_char_ptr()),
|
||||
source.map_or(0, |x| x.len()),
|
||||
)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
trait MsvcBasicName {
|
||||
@ -742,7 +738,7 @@ fn build_cpp_f16_di_node<'ll, 'tcx>(cx: &CodegenCx<'ll, 'tcx>) -> DINodeCreation
|
||||
cx,
|
||||
float_di_node,
|
||||
"bits",
|
||||
cx.size_and_align_of(bits_ty),
|
||||
cx.layout_of(bits_ty),
|
||||
Size::ZERO,
|
||||
DIFlags::FlagZero,
|
||||
type_di_node(cx, bits_ty),
|
||||
@ -788,15 +784,7 @@ fn build_basic_type_di_node<'ll, 'tcx>(
|
||||
_ => bug!("debuginfo::build_basic_type_di_node - `t` is invalid type"),
|
||||
};
|
||||
|
||||
let ty_di_node = unsafe {
|
||||
llvm::LLVMRustDIBuilderCreateBasicType(
|
||||
DIB(cx),
|
||||
name.as_c_char_ptr(),
|
||||
name.len(),
|
||||
cx.size_of(t).bits(),
|
||||
encoding,
|
||||
)
|
||||
};
|
||||
let ty_di_node = create_basic_type(cx, name, cx.size_of(t), encoding);
|
||||
|
||||
if !cpp_like_debuginfo {
|
||||
return DINodeCreationResult::new(ty_di_node, false);
|
||||
@ -824,6 +812,23 @@ fn build_basic_type_di_node<'ll, 'tcx>(
|
||||
DINodeCreationResult::new(typedef_di_node, false)
|
||||
}
|
||||
|
||||
fn create_basic_type<'ll, 'tcx>(
|
||||
cx: &CodegenCx<'ll, 'tcx>,
|
||||
name: &str,
|
||||
size: Size,
|
||||
encoding: u32,
|
||||
) -> &'ll DIBasicType {
|
||||
unsafe {
|
||||
llvm::LLVMRustDIBuilderCreateBasicType(
|
||||
DIB(cx),
|
||||
name.as_c_char_ptr(),
|
||||
name.len(),
|
||||
size.bits(),
|
||||
encoding,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
fn build_foreign_type_di_node<'ll, 'tcx>(
|
||||
cx: &CodegenCx<'ll, 'tcx>,
|
||||
t: Ty<'tcx>,
|
||||
@ -929,17 +934,13 @@ pub(crate) fn build_compile_unit_di_node<'ll, 'tcx>(
|
||||
};
|
||||
|
||||
unsafe {
|
||||
let compile_unit_file = llvm::LLVMRustDIBuilderCreateFile(
|
||||
let compile_unit_file = create_file(
|
||||
debug_context.builder.as_ref(),
|
||||
name_in_debuginfo.as_c_char_ptr(),
|
||||
name_in_debuginfo.len(),
|
||||
work_dir.as_c_char_ptr(),
|
||||
work_dir.len(),
|
||||
&name_in_debuginfo,
|
||||
&work_dir,
|
||||
"",
|
||||
llvm::ChecksumKind::None,
|
||||
ptr::null(),
|
||||
0,
|
||||
ptr::null(),
|
||||
0,
|
||||
None,
|
||||
);
|
||||
|
||||
let unit_metadata = llvm::LLVMRustDIBuilderCreateCompileUnit(
|
||||
@ -971,7 +972,7 @@ fn build_field_di_node<'ll, 'tcx>(
|
||||
cx: &CodegenCx<'ll, 'tcx>,
|
||||
owner: &'ll DIScope,
|
||||
name: &str,
|
||||
size_and_align: (Size, Align),
|
||||
layout: TyAndLayout<'tcx>,
|
||||
offset: Size,
|
||||
flags: DIFlags,
|
||||
type_di_node: &'ll DIType,
|
||||
@ -983,6 +984,30 @@ fn build_field_di_node<'ll, 'tcx>(
|
||||
} else {
|
||||
(unknown_file_metadata(cx), UNKNOWN_LINE_NUMBER)
|
||||
};
|
||||
create_member_type(
|
||||
cx,
|
||||
owner,
|
||||
name,
|
||||
file_metadata,
|
||||
line_number,
|
||||
layout,
|
||||
offset,
|
||||
flags,
|
||||
type_di_node,
|
||||
)
|
||||
}
|
||||
|
||||
fn create_member_type<'ll, 'tcx>(
|
||||
cx: &CodegenCx<'ll, 'tcx>,
|
||||
owner: &'ll DIScope,
|
||||
name: &str,
|
||||
file_metadata: &'ll DIType,
|
||||
line_number: u32,
|
||||
layout: TyAndLayout<'tcx>,
|
||||
offset: Size,
|
||||
flags: DIFlags,
|
||||
type_di_node: &'ll DIType,
|
||||
) -> &'ll DIType {
|
||||
unsafe {
|
||||
llvm::LLVMRustDIBuilderCreateMemberType(
|
||||
DIB(cx),
|
||||
@ -991,8 +1016,8 @@ fn build_field_di_node<'ll, 'tcx>(
|
||||
name.len(),
|
||||
file_metadata,
|
||||
line_number,
|
||||
size_and_align.0.bits(),
|
||||
size_and_align.1.bits() as u32,
|
||||
layout.size.bits(),
|
||||
layout.align.abi.bits() as u32,
|
||||
offset.bits(),
|
||||
flags,
|
||||
type_di_node,
|
||||
@ -1076,7 +1101,7 @@ fn build_struct_type_di_node<'ll, 'tcx>(
|
||||
cx,
|
||||
owner,
|
||||
&field_name[..],
|
||||
(field_layout.size, field_layout.align.abi),
|
||||
field_layout,
|
||||
struct_type_and_layout.fields.offset(i),
|
||||
visibility_di_flags(cx, f.did, adt_def.did()),
|
||||
type_di_node(cx, field_layout.ty),
|
||||
@ -1126,7 +1151,7 @@ fn build_upvar_field_di_nodes<'ll, 'tcx>(
|
||||
cx,
|
||||
closure_or_coroutine_di_node,
|
||||
capture_name.as_str(),
|
||||
cx.size_and_align_of(up_var_ty),
|
||||
cx.layout_of(up_var_ty),
|
||||
layout.fields.offset(index),
|
||||
DIFlags::FlagZero,
|
||||
type_di_node(cx, up_var_ty),
|
||||
@ -1171,7 +1196,7 @@ fn build_tuple_type_di_node<'ll, 'tcx>(
|
||||
cx,
|
||||
tuple_di_node,
|
||||
&tuple_field_name(index),
|
||||
cx.size_and_align_of(component_type),
|
||||
cx.layout_of(component_type),
|
||||
tuple_type_and_layout.fields.offset(index),
|
||||
DIFlags::FlagZero,
|
||||
type_di_node(cx, component_type),
|
||||
@ -1269,7 +1294,7 @@ fn build_union_type_di_node<'ll, 'tcx>(
|
||||
cx,
|
||||
owner,
|
||||
f.name.as_str(),
|
||||
size_and_align_of(field_layout),
|
||||
field_layout,
|
||||
Size::ZERO,
|
||||
DIFlags::FlagZero,
|
||||
type_di_node(cx, field_layout.ty),
|
||||
@ -1287,32 +1312,33 @@ fn build_union_type_di_node<'ll, 'tcx>(
|
||||
fn build_generic_type_param_di_nodes<'ll, 'tcx>(
|
||||
cx: &CodegenCx<'ll, 'tcx>,
|
||||
ty: Ty<'tcx>,
|
||||
) -> SmallVec<&'ll DIType> {
|
||||
) -> SmallVec<Option<&'ll DIType>> {
|
||||
if let ty::Adt(def, args) = *ty.kind() {
|
||||
if args.types().next().is_some() {
|
||||
let generics = cx.tcx.generics_of(def.did());
|
||||
let names = get_parameter_names(cx, generics);
|
||||
let template_params: SmallVec<_> = iter::zip(args, names)
|
||||
.filter_map(|(kind, name)| {
|
||||
kind.as_type().map(|ty| {
|
||||
let actual_type = cx.tcx.normalize_erasing_regions(cx.typing_env(), ty);
|
||||
let actual_type_di_node = type_di_node(cx, actual_type);
|
||||
let name = name.as_str();
|
||||
unsafe {
|
||||
llvm::LLVMRustDIBuilderCreateTemplateTypeParameter(
|
||||
DIB(cx),
|
||||
None,
|
||||
name.as_c_char_ptr(),
|
||||
name.len(),
|
||||
actual_type_di_node,
|
||||
)
|
||||
}
|
||||
})
|
||||
})
|
||||
.collect();
|
||||
let generics = cx.tcx.generics_of(def.did());
|
||||
return get_template_parameters(cx, generics, args);
|
||||
}
|
||||
|
||||
return template_params;
|
||||
}
|
||||
return smallvec![];
|
||||
}
|
||||
|
||||
pub(super) fn get_template_parameters<'ll, 'tcx>(
|
||||
cx: &CodegenCx<'ll, 'tcx>,
|
||||
generics: &ty::Generics,
|
||||
args: ty::GenericArgsRef<'tcx>,
|
||||
) -> SmallVec<Option<&'ll DIType>> {
|
||||
if args.types().next().is_some() {
|
||||
let names = get_parameter_names(cx, generics);
|
||||
let template_params: SmallVec<_> = iter::zip(args, names)
|
||||
.filter_map(|(kind, name)| {
|
||||
kind.as_type().map(|ty| {
|
||||
let actual_type = cx.tcx.normalize_erasing_regions(cx.typing_env(), ty);
|
||||
let actual_type_di_node = type_di_node(cx, actual_type);
|
||||
Some(cx.create_template_type_parameter(name.as_str(), actual_type_di_node))
|
||||
})
|
||||
})
|
||||
.collect();
|
||||
|
||||
return template_params;
|
||||
}
|
||||
|
||||
return smallvec![];
|
||||
@ -1416,7 +1442,9 @@ fn build_vtable_type_di_node<'ll, 'tcx>(
|
||||
let void_pointer_ty = Ty::new_imm_ptr(tcx, tcx.types.unit);
|
||||
let void_pointer_type_di_node = type_di_node(cx, void_pointer_ty);
|
||||
let usize_di_node = type_di_node(cx, tcx.types.usize);
|
||||
let (pointer_size, pointer_align) = cx.size_and_align_of(void_pointer_ty);
|
||||
let pointer_layout = cx.layout_of(void_pointer_ty);
|
||||
let pointer_size = pointer_layout.size;
|
||||
let pointer_align = pointer_layout.align.abi;
|
||||
// If `usize` is not pointer-sized and -aligned then the size and alignment computations
|
||||
// for the vtable as a whole would be wrong. Let's make sure this holds even on weird
|
||||
// platforms.
|
||||
@ -1472,7 +1500,7 @@ fn build_vtable_type_di_node<'ll, 'tcx>(
|
||||
cx,
|
||||
vtable_type_di_node,
|
||||
&field_name,
|
||||
(pointer_size, pointer_align),
|
||||
pointer_layout,
|
||||
field_offset,
|
||||
DIFlags::FlagZero,
|
||||
field_type_di_node,
|
||||
|
@ -17,8 +17,8 @@ use crate::debuginfo::metadata::enums::DiscrResult;
|
||||
use crate::debuginfo::metadata::type_map::{self, Stub, UniqueTypeId};
|
||||
use crate::debuginfo::metadata::{
|
||||
DINodeCreationResult, NO_GENERICS, NO_SCOPE_METADATA, SmallVec, UNKNOWN_LINE_NUMBER,
|
||||
build_field_di_node, file_metadata, file_metadata_from_def_id, size_and_align_of, type_di_node,
|
||||
unknown_file_metadata, visibility_di_flags,
|
||||
build_field_di_node, create_member_type, file_metadata, file_metadata_from_def_id,
|
||||
size_and_align_of, type_di_node, unknown_file_metadata, visibility_di_flags,
|
||||
};
|
||||
use crate::debuginfo::utils::DIB;
|
||||
use crate::llvm::debuginfo::{DIFile, DIFlags, DIType};
|
||||
@ -370,9 +370,9 @@ fn build_single_variant_union_fields<'ll, 'tcx>(
|
||||
cx,
|
||||
enum_type_di_node,
|
||||
&variant_union_field_name(variant_index),
|
||||
// NOTE: We use the size and align of the entire type, not from variant_layout
|
||||
// NOTE: We use the layout of the entire type, not from variant_layout
|
||||
// since the later is sometimes smaller (if it has fewer fields).
|
||||
size_and_align_of(enum_type_and_layout),
|
||||
enum_type_and_layout,
|
||||
Size::ZERO,
|
||||
visibility_flags,
|
||||
variant_struct_type_wrapper_di_node,
|
||||
@ -560,7 +560,7 @@ fn build_variant_struct_wrapper_type_di_node<'ll, 'tcx>(
|
||||
cx,
|
||||
wrapper_struct_type_di_node,
|
||||
"value",
|
||||
size_and_align_of(enum_or_coroutine_type_and_layout),
|
||||
enum_or_coroutine_type_and_layout,
|
||||
Size::ZERO,
|
||||
DIFlags::FlagZero,
|
||||
variant_struct_type_di_node,
|
||||
@ -820,7 +820,6 @@ fn build_union_fields_for_direct_tag_enum_or_coroutine<'ll, 'tcx>(
|
||||
.unwrap_or_else(|| (unknown_file_metadata(cx), UNKNOWN_LINE_NUMBER));
|
||||
|
||||
let field_name = variant_union_field_name(variant_member_info.variant_index);
|
||||
let (size, align) = size_and_align_of(enum_type_and_layout);
|
||||
|
||||
let variant_struct_type_wrapper = build_variant_struct_wrapper_type_di_node(
|
||||
cx,
|
||||
@ -840,27 +839,23 @@ fn build_union_fields_for_direct_tag_enum_or_coroutine<'ll, 'tcx>(
|
||||
},
|
||||
);
|
||||
|
||||
// We use LLVMRustDIBuilderCreateMemberType() member type directly because
|
||||
// We use create_member_type() member type directly because
|
||||
// the build_field_di_node() function does not support specifying a source location,
|
||||
// which is something that we don't do anywhere else.
|
||||
unsafe {
|
||||
llvm::LLVMRustDIBuilderCreateMemberType(
|
||||
DIB(cx),
|
||||
enum_type_di_node,
|
||||
field_name.as_c_char_ptr(),
|
||||
field_name.len(),
|
||||
file_di_node,
|
||||
line_number,
|
||||
// NOTE: We use the size and align of the entire type, not from variant_layout
|
||||
// since the later is sometimes smaller (if it has fewer fields).
|
||||
size.bits(),
|
||||
align.bits() as u32,
|
||||
// Union fields are always at offset zero
|
||||
Size::ZERO.bits(),
|
||||
di_flags,
|
||||
variant_struct_type_wrapper,
|
||||
)
|
||||
}
|
||||
create_member_type(
|
||||
cx,
|
||||
enum_type_di_node,
|
||||
&field_name,
|
||||
file_di_node,
|
||||
line_number,
|
||||
// NOTE: We use the layout of the entire type, not from variant_layout
|
||||
// since the later is sometimes smaller (if it has fewer fields).
|
||||
enum_type_and_layout,
|
||||
// Union fields are always at offset zero
|
||||
Size::ZERO,
|
||||
di_flags,
|
||||
variant_struct_type_wrapper,
|
||||
)
|
||||
}));
|
||||
|
||||
assert_eq!(
|
||||
@ -874,7 +869,7 @@ fn build_union_fields_for_direct_tag_enum_or_coroutine<'ll, 'tcx>(
|
||||
|
||||
if is_128_bits {
|
||||
let type_di_node = type_di_node(cx, cx.tcx.types.u64);
|
||||
let size_and_align = cx.size_and_align_of(cx.tcx.types.u64);
|
||||
let u64_layout = cx.layout_of(cx.tcx.types.u64);
|
||||
|
||||
let (lo_offset, hi_offset) = match cx.tcx.data_layout.endian {
|
||||
Endian::Little => (0, 8),
|
||||
@ -889,7 +884,7 @@ fn build_union_fields_for_direct_tag_enum_or_coroutine<'ll, 'tcx>(
|
||||
cx,
|
||||
enum_type_di_node,
|
||||
TAG_FIELD_NAME_128_LO,
|
||||
size_and_align,
|
||||
u64_layout,
|
||||
lo_offset,
|
||||
di_flags,
|
||||
type_di_node,
|
||||
@ -900,7 +895,7 @@ fn build_union_fields_for_direct_tag_enum_or_coroutine<'ll, 'tcx>(
|
||||
cx,
|
||||
enum_type_di_node,
|
||||
TAG_FIELD_NAME_128_HI,
|
||||
size_and_align,
|
||||
u64_layout,
|
||||
hi_offset,
|
||||
DIFlags::FlagZero,
|
||||
type_di_node,
|
||||
@ -911,7 +906,7 @@ fn build_union_fields_for_direct_tag_enum_or_coroutine<'ll, 'tcx>(
|
||||
cx,
|
||||
enum_type_di_node,
|
||||
TAG_FIELD_NAME,
|
||||
cx.size_and_align_of(enum_type_and_layout.field(cx, tag_field).ty),
|
||||
enum_type_and_layout.field(cx, tag_field),
|
||||
enum_type_and_layout.fields.offset(tag_field),
|
||||
di_flags,
|
||||
tag_base_type_di_node,
|
||||
|
@ -249,7 +249,7 @@ fn build_enum_variant_struct_type_di_node<'ll, 'tcx>(
|
||||
cx,
|
||||
struct_type_di_node,
|
||||
&field_name,
|
||||
(field_layout.size, field_layout.align.abi),
|
||||
field_layout,
|
||||
variant_layout.fields.offset(field_index),
|
||||
di_flags,
|
||||
type_di_node(cx, field_layout.ty),
|
||||
@ -332,7 +332,7 @@ fn build_coroutine_variant_struct_type_di_node<'ll, 'tcx>(
|
||||
cx,
|
||||
variant_struct_type_di_node,
|
||||
&field_name,
|
||||
cx.size_and_align_of(field_type),
|
||||
cx.layout_of(field_type),
|
||||
variant_layout.fields.offset(field_index),
|
||||
DIFlags::FlagZero,
|
||||
type_di_node(cx, field_type),
|
||||
@ -352,7 +352,7 @@ fn build_coroutine_variant_struct_type_di_node<'ll, 'tcx>(
|
||||
cx,
|
||||
variant_struct_type_di_node,
|
||||
upvar_name.as_str(),
|
||||
cx.size_and_align_of(upvar_ty),
|
||||
cx.layout_of(upvar_ty),
|
||||
coroutine_type_and_layout.fields.offset(index),
|
||||
DIFlags::FlagZero,
|
||||
type_di_node(cx, upvar_ty),
|
||||
@ -363,6 +363,7 @@ fn build_coroutine_variant_struct_type_di_node<'ll, 'tcx>(
|
||||
|
||||
state_specific_fields.into_iter().chain(common_fields).collect()
|
||||
},
|
||||
// FIXME: this is a no-op. `build_generic_type_param_di_nodes` only works for Adts.
|
||||
|cx| build_generic_type_param_di_nodes(cx, coroutine_type_and_layout.ty),
|
||||
)
|
||||
.di_node
|
||||
|
@ -13,9 +13,9 @@ use smallvec::smallvec;
|
||||
use crate::common::{AsCCharPtr, CodegenCx};
|
||||
use crate::debuginfo::metadata::type_map::{self, Stub, StubInfo, UniqueTypeId};
|
||||
use crate::debuginfo::metadata::{
|
||||
DINodeCreationResult, NO_GENERICS, SmallVec, UNKNOWN_LINE_NUMBER, file_metadata,
|
||||
file_metadata_from_def_id, size_and_align_of, type_di_node, unknown_file_metadata,
|
||||
visibility_di_flags,
|
||||
DINodeCreationResult, NO_GENERICS, SmallVec, UNKNOWN_LINE_NUMBER, create_member_type,
|
||||
file_metadata, file_metadata_from_def_id, size_and_align_of, type_di_node,
|
||||
unknown_file_metadata, visibility_di_flags,
|
||||
};
|
||||
use crate::debuginfo::utils::{DIB, create_DIArray, get_namespace_for_item};
|
||||
use crate::llvm::debuginfo::{DIFile, DIFlags, DIType};
|
||||
@ -363,23 +363,22 @@ fn build_discr_member_di_node<'ll, 'tcx>(
|
||||
|
||||
&Variants::Multiple { tag_field, .. } => {
|
||||
let tag_base_type = tag_base_type(cx.tcx, enum_or_coroutine_type_and_layout);
|
||||
let (size, align) = cx.size_and_align_of(tag_base_type);
|
||||
let ty = type_di_node(cx, tag_base_type);
|
||||
let file = unknown_file_metadata(cx);
|
||||
|
||||
unsafe {
|
||||
Some(llvm::LLVMRustDIBuilderCreateMemberType(
|
||||
DIB(cx),
|
||||
containing_scope,
|
||||
tag_name.as_c_char_ptr(),
|
||||
tag_name.len(),
|
||||
unknown_file_metadata(cx),
|
||||
UNKNOWN_LINE_NUMBER,
|
||||
size.bits(),
|
||||
align.bits() as u32,
|
||||
enum_or_coroutine_type_and_layout.fields.offset(tag_field).bits(),
|
||||
DIFlags::FlagArtificial,
|
||||
type_di_node(cx, tag_base_type),
|
||||
))
|
||||
}
|
||||
let layout = cx.layout_of(tag_base_type);
|
||||
|
||||
Some(create_member_type(
|
||||
cx,
|
||||
containing_scope,
|
||||
&tag_name,
|
||||
file,
|
||||
UNKNOWN_LINE_NUMBER,
|
||||
layout,
|
||||
enum_or_coroutine_type_and_layout.fields.offset(tag_field),
|
||||
DIFlags::FlagArtificial,
|
||||
ty,
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -257,7 +257,7 @@ pub(super) fn build_type_with_children<'ll, 'tcx>(
|
||||
cx: &CodegenCx<'ll, 'tcx>,
|
||||
stub_info: StubInfo<'ll, 'tcx>,
|
||||
members: impl FnOnce(&CodegenCx<'ll, 'tcx>, &'ll DIType) -> SmallVec<&'ll DIType>,
|
||||
generics: impl FnOnce(&CodegenCx<'ll, 'tcx>) -> SmallVec<&'ll DIType>,
|
||||
generics: impl FnOnce(&CodegenCx<'ll, 'tcx>) -> SmallVec<Option<&'ll DIType>>,
|
||||
) -> DINodeCreationResult<'ll> {
|
||||
assert_eq!(debug_context(cx).type_map.di_node_for_unique_id(stub_info.unique_type_id), None);
|
||||
|
||||
@ -265,8 +265,7 @@ pub(super) fn build_type_with_children<'ll, 'tcx>(
|
||||
|
||||
let members: SmallVec<_> =
|
||||
members(cx, stub_info.metadata).into_iter().map(|node| Some(node)).collect();
|
||||
let generics: SmallVec<Option<&'ll DIType>> =
|
||||
generics(cx).into_iter().map(|node| Some(node)).collect();
|
||||
let generics = generics(cx);
|
||||
|
||||
if !(members.is_empty() && generics.is_empty()) {
|
||||
unsafe {
|
||||
|
@ -2,10 +2,11 @@
|
||||
|
||||
use std::cell::{OnceCell, RefCell};
|
||||
use std::ops::Range;
|
||||
use std::ptr;
|
||||
use std::sync::Arc;
|
||||
use std::{iter, ptr};
|
||||
|
||||
use libc::c_uint;
|
||||
use metadata::create_subroutine_type;
|
||||
use rustc_abi::Size;
|
||||
use rustc_codegen_ssa::debuginfo::type_names;
|
||||
use rustc_codegen_ssa::mir::debuginfo::VariableKind::*;
|
||||
@ -34,8 +35,8 @@ use crate::builder::Builder;
|
||||
use crate::common::{AsCCharPtr, CodegenCx};
|
||||
use crate::llvm;
|
||||
use crate::llvm::debuginfo::{
|
||||
DIArray, DIBuilderBox, DIFile, DIFlags, DILexicalBlock, DILocation, DISPFlags, DIScope, DIType,
|
||||
DIVariable,
|
||||
DIArray, DIBuilderBox, DIFile, DIFlags, DILexicalBlock, DILocation, DISPFlags, DIScope,
|
||||
DITemplateTypeParameter, DIType, DIVariable,
|
||||
};
|
||||
use crate::value::Value;
|
||||
|
||||
@ -251,7 +252,7 @@ struct DebugLoc {
|
||||
col: u32,
|
||||
}
|
||||
|
||||
impl CodegenCx<'_, '_> {
|
||||
impl<'ll> CodegenCx<'ll, '_> {
|
||||
/// Looks up debug source information about a `BytePos`.
|
||||
// FIXME(eddyb) rename this to better indicate it's a duplicate of
|
||||
// `lookup_char_pos` rather than `dbg_loc`, perhaps by making
|
||||
@ -279,6 +280,22 @@ impl CodegenCx<'_, '_> {
|
||||
DebugLoc { file, line, col }
|
||||
}
|
||||
}
|
||||
|
||||
fn create_template_type_parameter(
|
||||
&self,
|
||||
name: &str,
|
||||
actual_type_metadata: &'ll DIType,
|
||||
) -> &'ll DITemplateTypeParameter {
|
||||
unsafe {
|
||||
llvm::LLVMRustDIBuilderCreateTemplateTypeParameter(
|
||||
DIB(self),
|
||||
None,
|
||||
name.as_c_char_ptr(),
|
||||
name.len(),
|
||||
actual_type_metadata,
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'ll, 'tcx> DebugInfoCodegenMethods<'tcx> for CodegenCx<'ll, 'tcx> {
|
||||
@ -325,10 +342,8 @@ impl<'ll, 'tcx> DebugInfoCodegenMethods<'tcx> for CodegenCx<'ll, 'tcx> {
|
||||
let loc = self.lookup_debug_loc(span.lo());
|
||||
let file_metadata = file_metadata(self, &loc.file);
|
||||
|
||||
let function_type_metadata = unsafe {
|
||||
let fn_signature = get_function_signature(self, fn_abi);
|
||||
llvm::LLVMRustDIBuilderCreateSubroutineType(DIB(self), fn_signature)
|
||||
};
|
||||
let function_type_metadata =
|
||||
create_subroutine_type(self, get_function_signature(self, fn_abi));
|
||||
|
||||
let mut name = String::with_capacity(64);
|
||||
type_names::push_item_name(tcx, def_id, false, &mut name);
|
||||
@ -471,46 +486,10 @@ impl<'ll, 'tcx> DebugInfoCodegenMethods<'tcx> for CodegenCx<'ll, 'tcx> {
|
||||
generics: &ty::Generics,
|
||||
args: GenericArgsRef<'tcx>,
|
||||
) -> &'ll DIArray {
|
||||
if args.types().next().is_none() {
|
||||
return create_DIArray(DIB(cx), &[]);
|
||||
}
|
||||
|
||||
// Again, only create type information if full debuginfo is enabled
|
||||
let template_params: Vec<_> = if cx.sess().opts.debuginfo == DebugInfo::Full {
|
||||
let names = get_parameter_names(cx, generics);
|
||||
iter::zip(args, names)
|
||||
.filter_map(|(kind, name)| {
|
||||
kind.as_type().map(|ty| {
|
||||
let actual_type = cx.tcx.normalize_erasing_regions(cx.typing_env(), ty);
|
||||
let actual_type_metadata = type_di_node(cx, actual_type);
|
||||
let name = name.as_str();
|
||||
unsafe {
|
||||
Some(llvm::LLVMRustDIBuilderCreateTemplateTypeParameter(
|
||||
DIB(cx),
|
||||
None,
|
||||
name.as_c_char_ptr(),
|
||||
name.len(),
|
||||
actual_type_metadata,
|
||||
))
|
||||
}
|
||||
})
|
||||
})
|
||||
.collect()
|
||||
} else {
|
||||
vec![]
|
||||
};
|
||||
|
||||
let template_params = metadata::get_template_parameters(cx, generics, args);
|
||||
create_DIArray(DIB(cx), &template_params)
|
||||
}
|
||||
|
||||
fn get_parameter_names(cx: &CodegenCx<'_, '_>, generics: &ty::Generics) -> Vec<Symbol> {
|
||||
let mut names = generics.parent.map_or_else(Vec::new, |def_id| {
|
||||
get_parameter_names(cx, cx.tcx.generics_of(def_id))
|
||||
});
|
||||
names.extend(generics.own_params.iter().map(|param| param.name));
|
||||
names
|
||||
}
|
||||
|
||||
/// Returns a scope, plus `true` if that's a type scope for "class" methods,
|
||||
/// otherwise `false` for plain namespace scopes.
|
||||
fn get_containing_scope<'ll, 'tcx>(
|
||||
|
@ -176,7 +176,7 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ {
|
||||
.in_definition_order()
|
||||
// We only care about associated types.
|
||||
.filter(|item| item.kind == ty::AssocKind::Type)
|
||||
// No RPITITs -- even with `async_fn_in_dyn_trait`, they are implicit.
|
||||
// No RPITITs -- they're not dyn-compatible for now.
|
||||
.filter(|item| !item.is_impl_trait_in_trait())
|
||||
// If the associated type has a `where Self: Sized` bound,
|
||||
// we do not need to constrain the associated type.
|
||||
|
@ -16,7 +16,6 @@ Rust lexer used by rustc. No stability guarantees are provided.
|
||||
[dependencies]
|
||||
memchr = "2.7.4"
|
||||
unicode-xid = "0.2.0"
|
||||
literal-escaper = { path = "../../library/literal-escaper" }
|
||||
|
||||
[dependencies.unicode-properties]
|
||||
version = "0.1.0"
|
||||
|
@ -26,13 +26,11 @@
|
||||
// tidy-alphabetical-end
|
||||
|
||||
mod cursor;
|
||||
pub mod unescape;
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
|
||||
// FIXME: This is needed for rust-analyzer. Remove this dependency once rust-analyzer uses
|
||||
// `literal-escaper`.
|
||||
pub use literal_escaper as unescape;
|
||||
use unicode_properties::UnicodeEmoji;
|
||||
pub use unicode_xid::UNICODE_VERSION as UNICODE_XID_VERSION;
|
||||
|
||||
|
@ -194,7 +194,6 @@ pub struct Mapping {
|
||||
#[derive(TyEncodable, TyDecodable, Hash, HashStable)]
|
||||
pub struct FunctionCoverageInfo {
|
||||
pub function_source_hash: u64,
|
||||
pub body_span: Span,
|
||||
|
||||
/// Used in conjunction with `priority_list` to create physical counters
|
||||
/// and counter expressions, after MIR optimizations.
|
||||
|
@ -620,9 +620,8 @@ fn write_function_coverage_info(
|
||||
function_coverage_info: &coverage::FunctionCoverageInfo,
|
||||
w: &mut dyn io::Write,
|
||||
) -> io::Result<()> {
|
||||
let coverage::FunctionCoverageInfo { body_span, mappings, .. } = function_coverage_info;
|
||||
let coverage::FunctionCoverageInfo { mappings, .. } = function_coverage_info;
|
||||
|
||||
writeln!(w, "{INDENT}coverage body span: {body_span:?}")?;
|
||||
for coverage::Mapping { kind, span } in mappings {
|
||||
writeln!(w, "{INDENT}coverage {kind:?} => {span:?};")?;
|
||||
}
|
||||
|
@ -712,10 +712,7 @@ impl<'tcx> Instance<'tcx> {
|
||||
..
|
||||
})
|
||||
);
|
||||
// We also need to generate a shim if this is an AFIT.
|
||||
let needs_rpitit_shim =
|
||||
tcx.return_position_impl_trait_in_trait_shim_data(def).is_some();
|
||||
if needs_track_caller_shim || needs_rpitit_shim {
|
||||
if needs_track_caller_shim {
|
||||
if tcx.is_closure_like(def) {
|
||||
debug!(
|
||||
" => vtable fn pointer created for closure with #[track_caller]: {:?} for method {:?} {:?}",
|
||||
|
@ -149,7 +149,6 @@ mod opaque_types;
|
||||
mod parameterized;
|
||||
mod predicate;
|
||||
mod region;
|
||||
mod return_position_impl_trait_in_trait;
|
||||
mod rvalue_scopes;
|
||||
mod structural_impls;
|
||||
#[allow(hidden_glob_reexports)]
|
||||
|
@ -1,95 +0,0 @@
|
||||
use rustc_hir::def_id::DefId;
|
||||
|
||||
use crate::ty::{self, ExistentialPredicateStableCmpExt, TyCtxt};
|
||||
|
||||
impl<'tcx> TyCtxt<'tcx> {
|
||||
/// Given a `def_id` of a trait or impl method, compute whether that method needs to
|
||||
/// have an RPITIT shim applied to it for it to be dyn compatible. If so, return the
|
||||
/// `def_id` of the RPITIT, and also the args of trait method that returns the RPITIT.
|
||||
///
|
||||
/// NOTE that these args are not, in general, the same as than the RPITIT's args. They
|
||||
/// are a subset of those args, since they do not include the late-bound lifetimes of
|
||||
/// the RPITIT. Depending on the context, these will need to be dealt with in different
|
||||
/// ways -- in codegen, it's okay to fill them with ReErased.
|
||||
pub fn return_position_impl_trait_in_trait_shim_data(
|
||||
self,
|
||||
def_id: DefId,
|
||||
) -> Option<(DefId, ty::EarlyBinder<'tcx, ty::GenericArgsRef<'tcx>>)> {
|
||||
let assoc_item = self.opt_associated_item(def_id)?;
|
||||
|
||||
let (trait_item_def_id, opt_impl_def_id) = match assoc_item.container {
|
||||
ty::AssocItemContainer::Impl => {
|
||||
(assoc_item.trait_item_def_id?, Some(self.parent(def_id)))
|
||||
}
|
||||
ty::AssocItemContainer::Trait => (def_id, None),
|
||||
};
|
||||
|
||||
let sig = self.fn_sig(trait_item_def_id);
|
||||
|
||||
// Check if the trait returns an RPITIT.
|
||||
let ty::Alias(ty::Projection, ty::AliasTy { def_id, .. }) =
|
||||
*sig.skip_binder().skip_binder().output().kind()
|
||||
else {
|
||||
return None;
|
||||
};
|
||||
if !self.is_impl_trait_in_trait(def_id) {
|
||||
return None;
|
||||
}
|
||||
|
||||
let args = if let Some(impl_def_id) = opt_impl_def_id {
|
||||
// Rebase the args from the RPITIT onto the impl trait ref, so we can later
|
||||
// substitute them with the method args of the *impl* method, since that's
|
||||
// the instance we're building a vtable shim for.
|
||||
ty::GenericArgs::identity_for_item(self, trait_item_def_id).rebase_onto(
|
||||
self,
|
||||
self.parent(trait_item_def_id),
|
||||
self.impl_trait_ref(impl_def_id)
|
||||
.expect("expected impl trait ref from parent of impl item")
|
||||
.instantiate_identity()
|
||||
.args,
|
||||
)
|
||||
} else {
|
||||
// This is when we have a default trait implementation.
|
||||
ty::GenericArgs::identity_for_item(self, trait_item_def_id)
|
||||
};
|
||||
|
||||
Some((def_id, ty::EarlyBinder::bind(args)))
|
||||
}
|
||||
|
||||
/// Given a `DefId` of an RPITIT and its args, return the existential predicates
|
||||
/// that corresponds to the RPITIT's bounds with the self type erased.
|
||||
pub fn item_bounds_to_existential_predicates(
|
||||
self,
|
||||
def_id: DefId,
|
||||
args: ty::GenericArgsRef<'tcx>,
|
||||
) -> &'tcx ty::List<ty::PolyExistentialPredicate<'tcx>> {
|
||||
let mut bounds: Vec<_> = self
|
||||
.item_self_bounds(def_id)
|
||||
.iter_instantiated(self, args)
|
||||
.filter_map(|clause| {
|
||||
clause
|
||||
.kind()
|
||||
.map_bound(|clause| match clause {
|
||||
ty::ClauseKind::Trait(trait_pred) => Some(ty::ExistentialPredicate::Trait(
|
||||
ty::ExistentialTraitRef::erase_self_ty(self, trait_pred.trait_ref),
|
||||
)),
|
||||
ty::ClauseKind::Projection(projection_pred) => {
|
||||
Some(ty::ExistentialPredicate::Projection(
|
||||
ty::ExistentialProjection::erase_self_ty(self, projection_pred),
|
||||
))
|
||||
}
|
||||
ty::ClauseKind::TypeOutlives(_) => {
|
||||
// Type outlives bounds don't really turn into anything,
|
||||
// since we must use an intersection region for the `dyn*`'s
|
||||
// region anyways.
|
||||
None
|
||||
}
|
||||
_ => unreachable!("unexpected clause in item bounds: {clause:?}"),
|
||||
})
|
||||
.transpose()
|
||||
})
|
||||
.collect();
|
||||
bounds.sort_by(|a, b| a.skip_binder().stable_cmp(self, &b.skip_binder()));
|
||||
self.mk_poly_existential_predicates(&bounds)
|
||||
}
|
||||
}
|
@ -107,7 +107,6 @@ fn instrument_function_for_coverage<'tcx>(tcx: TyCtxt<'tcx>, mir_body: &mut mir:
|
||||
|
||||
mir_body.function_coverage_info = Some(Box::new(FunctionCoverageInfo {
|
||||
function_source_hash: hir_info.function_source_hash,
|
||||
body_span: hir_info.body_span,
|
||||
|
||||
node_flow_data,
|
||||
priority_list,
|
||||
|
@ -8,7 +8,6 @@ use rustc_hir::lang_items::LangItem;
|
||||
use rustc_index::{Idx, IndexVec};
|
||||
use rustc_middle::mir::*;
|
||||
use rustc_middle::query::Providers;
|
||||
use rustc_middle::ty::adjustment::PointerCoercion;
|
||||
use rustc_middle::ty::{
|
||||
self, CoroutineArgs, CoroutineArgsExt, EarlyBinder, GenericArgs, Ty, TyCtxt,
|
||||
};
|
||||
@ -718,12 +717,6 @@ fn build_call_shim<'tcx>(
|
||||
|
||||
let def_id = instance.def_id();
|
||||
|
||||
let rpitit_shim = if let ty::InstanceKind::ReifyShim(..) = instance {
|
||||
tcx.return_position_impl_trait_in_trait_shim_data(def_id)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let sig = tcx.fn_sig(def_id);
|
||||
let sig = sig.map_bound(|sig| tcx.instantiate_bound_regions_with_erased(sig));
|
||||
|
||||
@ -779,30 +772,7 @@ fn build_call_shim<'tcx>(
|
||||
let mut local_decls = local_decls_for_sig(&sig, span);
|
||||
let source_info = SourceInfo::outermost(span);
|
||||
|
||||
let mut destination = Place::return_place();
|
||||
if let Some((rpitit_def_id, fn_args)) = rpitit_shim {
|
||||
let rpitit_args =
|
||||
fn_args.instantiate_identity().extend_to(tcx, rpitit_def_id, |param, _| {
|
||||
match param.kind {
|
||||
ty::GenericParamDefKind::Lifetime => tcx.lifetimes.re_erased.into(),
|
||||
ty::GenericParamDefKind::Type { .. }
|
||||
| ty::GenericParamDefKind::Const { .. } => {
|
||||
unreachable!("rpitit should have no addition ty/ct")
|
||||
}
|
||||
}
|
||||
});
|
||||
let dyn_star_ty = Ty::new_dynamic(
|
||||
tcx,
|
||||
tcx.item_bounds_to_existential_predicates(rpitit_def_id, rpitit_args),
|
||||
tcx.lifetimes.re_erased,
|
||||
ty::DynStar,
|
||||
);
|
||||
destination = local_decls.push(local_decls[RETURN_PLACE].clone()).into();
|
||||
local_decls[RETURN_PLACE].ty = dyn_star_ty;
|
||||
let mut inputs_and_output = sig.inputs_and_output.to_vec();
|
||||
*inputs_and_output.last_mut().unwrap() = dyn_star_ty;
|
||||
sig.inputs_and_output = tcx.mk_type_list(&inputs_and_output);
|
||||
}
|
||||
let destination = Place::return_place();
|
||||
|
||||
let rcvr_place = || {
|
||||
assert!(rcvr_adjustment.is_some());
|
||||
@ -921,23 +891,7 @@ fn build_call_shim<'tcx>(
|
||||
);
|
||||
}
|
||||
// BB #1/#2 - return
|
||||
// NOTE: If this is an RPITIT in dyn, we also want to coerce
|
||||
// the return type of the function into a `dyn*`.
|
||||
let stmts = if rpitit_shim.is_some() {
|
||||
vec![Statement {
|
||||
source_info,
|
||||
kind: StatementKind::Assign(Box::new((
|
||||
Place::return_place(),
|
||||
Rvalue::Cast(
|
||||
CastKind::PointerCoercion(PointerCoercion::DynStar, CoercionSource::Implicit),
|
||||
Operand::Move(destination),
|
||||
sig.output(),
|
||||
),
|
||||
))),
|
||||
}]
|
||||
} else {
|
||||
vec![]
|
||||
};
|
||||
let stmts = vec![];
|
||||
block(&mut blocks, stmts, TerminatorKind::Return, false);
|
||||
if let Some(Adjustment::RefMut) = rcvr_adjustment {
|
||||
// BB #3 - drop if closure panics
|
||||
|
@ -6,7 +6,6 @@ edition = "2024"
|
||||
[dependencies]
|
||||
# tidy-alphabetical-start
|
||||
bitflags = "2.4.1"
|
||||
literal-escaper = { path = "../../library/literal-escaper" }
|
||||
rustc_ast = { path = "../rustc_ast" }
|
||||
rustc_ast_pretty = { path = "../rustc_ast_pretty" }
|
||||
rustc_data_structures = { path = "../rustc_data_structures" }
|
||||
|
@ -1,12 +1,12 @@
|
||||
use std::ops::Range;
|
||||
|
||||
use literal_escaper::{self, EscapeError, Mode};
|
||||
use rustc_ast::ast::{self, AttrStyle};
|
||||
use rustc_ast::token::{self, CommentKind, Delimiter, IdentIsRaw, Token, TokenKind};
|
||||
use rustc_ast::tokenstream::TokenStream;
|
||||
use rustc_ast::util::unicode::contains_text_flow_control_chars;
|
||||
use rustc_errors::codes::*;
|
||||
use rustc_errors::{Applicability, Diag, DiagCtxtHandle, StashKey};
|
||||
use rustc_lexer::unescape::{self, EscapeError, Mode};
|
||||
use rustc_lexer::{Base, Cursor, DocStyle, LiteralKind, RawStrError};
|
||||
use rustc_session::lint::BuiltinLintDiag;
|
||||
use rustc_session::lint::builtin::{
|
||||
@ -970,7 +970,7 @@ impl<'psess, 'src> Lexer<'psess, 'src> {
|
||||
postfix_len: u32,
|
||||
) -> (token::LitKind, Symbol) {
|
||||
self.cook_common(kind, mode, start, end, prefix_len, postfix_len, |src, mode, callback| {
|
||||
literal_escaper::unescape_unicode(src, mode, &mut |span, result| {
|
||||
unescape::unescape_unicode(src, mode, &mut |span, result| {
|
||||
callback(span, result.map(drop))
|
||||
})
|
||||
})
|
||||
@ -986,7 +986,7 @@ impl<'psess, 'src> Lexer<'psess, 'src> {
|
||||
postfix_len: u32,
|
||||
) -> (token::LitKind, Symbol) {
|
||||
self.cook_common(kind, mode, start, end, prefix_len, postfix_len, |src, mode, callback| {
|
||||
literal_escaper::unescape_mixed(src, mode, &mut |span, result| {
|
||||
unescape::unescape_mixed(src, mode, &mut |span, result| {
|
||||
callback(span, result.map(drop))
|
||||
})
|
||||
})
|
||||
|
@ -3,8 +3,8 @@
|
||||
use std::iter::once;
|
||||
use std::ops::Range;
|
||||
|
||||
use literal_escaper::{EscapeError, Mode};
|
||||
use rustc_errors::{Applicability, DiagCtxtHandle, ErrorGuaranteed};
|
||||
use rustc_lexer::unescape::{EscapeError, Mode};
|
||||
use rustc_span::{BytePos, Span};
|
||||
use tracing::debug;
|
||||
|
||||
|
@ -6,7 +6,6 @@ use core::ops::{Bound, ControlFlow};
|
||||
use ast::mut_visit::{self, MutVisitor};
|
||||
use ast::token::{IdentIsRaw, MetaVarKind};
|
||||
use ast::{CoroutineKind, ForLoopKind, GenBlockKind, MatchKind, Pat, Path, PathSegment, Recovered};
|
||||
use literal_escaper::unescape_char;
|
||||
use rustc_ast::ptr::P;
|
||||
use rustc_ast::token::{self, Delimiter, Token, TokenKind};
|
||||
use rustc_ast::tokenstream::TokenTree;
|
||||
@ -22,6 +21,7 @@ use rustc_ast::{
|
||||
use rustc_ast_pretty::pprust;
|
||||
use rustc_data_structures::stack::ensure_sufficient_stack;
|
||||
use rustc_errors::{Applicability, Diag, PResult, StashKey, Subdiagnostic};
|
||||
use rustc_lexer::unescape::unescape_char;
|
||||
use rustc_macros::Subdiagnostic;
|
||||
use rustc_session::errors::{ExprParenthesesNeeded, report_lit_error};
|
||||
use rustc_session::lint::BuiltinLintDiag;
|
||||
|
@ -5,7 +5,6 @@ edition = "2024"
|
||||
|
||||
[dependencies]
|
||||
# tidy-alphabetical-start
|
||||
literal-escaper = { path = "../../library/literal-escaper" }
|
||||
rustc_index = { path = "../rustc_index", default-features = false }
|
||||
rustc_lexer = { path = "../rustc_lexer" }
|
||||
# tidy-alphabetical-end
|
||||
|
@ -18,6 +18,7 @@
|
||||
pub use Alignment::*;
|
||||
pub use Count::*;
|
||||
pub use Position::*;
|
||||
use rustc_lexer::unescape;
|
||||
|
||||
// Note: copied from rustc_span
|
||||
/// Range inside of a `Span` used for diagnostics when we only have access to relative positions.
|
||||
@ -1093,14 +1094,12 @@ fn find_width_map_from_snippet(
|
||||
fn unescape_string(string: &str) -> Option<String> {
|
||||
let mut buf = String::new();
|
||||
let mut ok = true;
|
||||
literal_escaper::unescape_unicode(
|
||||
string,
|
||||
literal_escaper::Mode::Str,
|
||||
&mut |_, unescaped_char| match unescaped_char {
|
||||
unescape::unescape_unicode(string, unescape::Mode::Str, &mut |_, unescaped_char| {
|
||||
match unescaped_char {
|
||||
Ok(c) => buf.push(c),
|
||||
Err(_) => ok = false,
|
||||
},
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
ok.then_some(buf)
|
||||
}
|
||||
|
@ -9,7 +9,6 @@ use std::ops::ControlFlow;
|
||||
use rustc_errors::FatalError;
|
||||
use rustc_hir as hir;
|
||||
use rustc_hir::def_id::DefId;
|
||||
use rustc_middle::bug;
|
||||
use rustc_middle::query::Providers;
|
||||
use rustc_middle::ty::{
|
||||
self, EarlyBinder, GenericArgs, Ty, TyCtxt, TypeFoldable, TypeFolder, TypeSuperFoldable,
|
||||
@ -807,31 +806,8 @@ fn contains_illegal_impl_trait_in_trait<'tcx>(
|
||||
let ty = tcx.liberate_late_bound_regions(fn_def_id, ty);
|
||||
|
||||
if tcx.asyncness(fn_def_id).is_async() {
|
||||
// FIXME(async_fn_in_dyn_trait): Think of a better way to unify these code paths
|
||||
// to issue an appropriate feature suggestion when users try to use AFIDT.
|
||||
// Obviously we must only do this once AFIDT is finished enough to actually be usable.
|
||||
if tcx.features().async_fn_in_dyn_trait() {
|
||||
let ty::Alias(ty::Projection, proj) = *ty.kind() else {
|
||||
bug!("expected async fn in trait to return an RPITIT");
|
||||
};
|
||||
assert!(tcx.is_impl_trait_in_trait(proj.def_id));
|
||||
|
||||
// FIXME(async_fn_in_dyn_trait): We should check that this bound is legal too,
|
||||
// and stop relying on `async fn` in the definition.
|
||||
for bound in tcx.item_bounds(proj.def_id).instantiate(tcx, proj.args) {
|
||||
if let Some(violation) = bound
|
||||
.visit_with(&mut IllegalRpititVisitor { tcx, allowed: Some(proj) })
|
||||
.break_value()
|
||||
{
|
||||
return Some(violation);
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
} else {
|
||||
// Rendering the error as a separate `async-specific` message is better.
|
||||
Some(MethodViolationCode::AsyncFn)
|
||||
}
|
||||
// Rendering the error as a separate `async-specific` message is better.
|
||||
Some(MethodViolationCode::AsyncFn)
|
||||
} else {
|
||||
ty.visit_with(&mut IllegalRpititVisitor { tcx, allowed: None }).break_value()
|
||||
}
|
||||
|
@ -7,8 +7,8 @@ use rustc_data_structures::stack::ensure_sufficient_stack;
|
||||
use rustc_errors::ErrorGuaranteed;
|
||||
use rustc_hir::def::DefKind;
|
||||
use rustc_hir::lang_items::LangItem;
|
||||
use rustc_infer::infer::DefineOpaqueTypes;
|
||||
use rustc_infer::infer::resolve::OpportunisticRegionResolver;
|
||||
use rustc_infer::infer::{DefineOpaqueTypes, RegionVariableOrigin};
|
||||
use rustc_infer::traits::{ObligationCauseCode, PredicateObligations};
|
||||
use rustc_middle::traits::select::OverflowError;
|
||||
use rustc_middle::traits::{BuiltinImplSource, ImplSource, ImplSourceUserDefinedData};
|
||||
@ -18,8 +18,6 @@ use rustc_middle::ty::{
|
||||
};
|
||||
use rustc_middle::{bug, span_bug};
|
||||
use rustc_span::sym;
|
||||
use rustc_type_ir::elaborate;
|
||||
use thin_vec::thin_vec;
|
||||
use tracing::{debug, instrument};
|
||||
|
||||
use super::{
|
||||
@ -63,9 +61,6 @@ enum ProjectionCandidate<'tcx> {
|
||||
/// Bounds specified on an object type
|
||||
Object(ty::PolyProjectionPredicate<'tcx>),
|
||||
|
||||
/// Built-in bound for a dyn async fn in trait
|
||||
ObjectRpitit,
|
||||
|
||||
/// From an "impl" (or a "pseudo-impl" returned by select)
|
||||
Select(Selection<'tcx>),
|
||||
}
|
||||
@ -832,16 +827,6 @@ fn assemble_candidates_from_object_ty<'cx, 'tcx>(
|
||||
env_predicates,
|
||||
false,
|
||||
);
|
||||
|
||||
// `dyn Trait` automagically project their AFITs to `dyn* Future`.
|
||||
if tcx.is_impl_trait_in_trait(obligation.predicate.def_id)
|
||||
&& let Some(out_trait_def_id) = data.principal_def_id()
|
||||
&& let rpitit_trait_def_id = tcx.parent(obligation.predicate.def_id)
|
||||
&& elaborate::supertrait_def_ids(tcx, out_trait_def_id)
|
||||
.any(|trait_def_id| trait_def_id == rpitit_trait_def_id)
|
||||
{
|
||||
candidate_set.push_candidate(ProjectionCandidate::ObjectRpitit);
|
||||
}
|
||||
}
|
||||
|
||||
#[instrument(
|
||||
@ -1273,8 +1258,6 @@ fn confirm_candidate<'cx, 'tcx>(
|
||||
ProjectionCandidate::Select(impl_source) => {
|
||||
confirm_select_candidate(selcx, obligation, impl_source)
|
||||
}
|
||||
|
||||
ProjectionCandidate::ObjectRpitit => confirm_object_rpitit_candidate(selcx, obligation),
|
||||
};
|
||||
|
||||
// When checking for cycle during evaluation, we compare predicates with
|
||||
@ -2070,45 +2053,6 @@ fn confirm_impl_candidate<'cx, 'tcx>(
|
||||
}
|
||||
}
|
||||
|
||||
fn confirm_object_rpitit_candidate<'cx, 'tcx>(
|
||||
selcx: &mut SelectionContext<'cx, 'tcx>,
|
||||
obligation: &ProjectionTermObligation<'tcx>,
|
||||
) -> Progress<'tcx> {
|
||||
let tcx = selcx.tcx();
|
||||
let mut obligations = thin_vec![];
|
||||
|
||||
// Compute an intersection lifetime for all the input components of this GAT.
|
||||
let intersection =
|
||||
selcx.infcx.next_region_var(RegionVariableOrigin::MiscVariable(obligation.cause.span));
|
||||
for component in obligation.predicate.args {
|
||||
match component.unpack() {
|
||||
ty::GenericArgKind::Lifetime(lt) => {
|
||||
obligations.push(obligation.with(tcx, ty::OutlivesPredicate(lt, intersection)));
|
||||
}
|
||||
ty::GenericArgKind::Type(ty) => {
|
||||
obligations.push(obligation.with(tcx, ty::OutlivesPredicate(ty, intersection)));
|
||||
}
|
||||
ty::GenericArgKind::Const(_ct) => {
|
||||
// Consts have no outlives...
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Progress {
|
||||
term: Ty::new_dynamic(
|
||||
tcx,
|
||||
tcx.item_bounds_to_existential_predicates(
|
||||
obligation.predicate.def_id,
|
||||
obligation.predicate.args,
|
||||
),
|
||||
intersection,
|
||||
ty::DynStar,
|
||||
)
|
||||
.into(),
|
||||
obligations,
|
||||
}
|
||||
}
|
||||
|
||||
// Get obligations corresponding to the predicates from the where-clause of the
|
||||
// associated type itself.
|
||||
fn assoc_ty_own_obligations<'cx, 'tcx>(
|
||||
|
@ -616,12 +616,6 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||
for assoc_type in assoc_types {
|
||||
let defs: &ty::Generics = tcx.generics_of(assoc_type);
|
||||
|
||||
// When `async_fn_in_dyn_trait` is enabled, we don't need to check the
|
||||
// RPITIT for compatibility, since it's not provided by the user.
|
||||
if tcx.features().async_fn_in_dyn_trait() && tcx.is_impl_trait_in_trait(assoc_type) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if !defs.own_params.is_empty() {
|
||||
tcx.dcx().span_delayed_bug(
|
||||
obligation.cause.span,
|
||||
|
@ -55,31 +55,6 @@ fn fn_sig_for_fn_abi<'tcx>(
|
||||
sig.inputs_and_output = tcx.mk_type_list(&inputs_and_output);
|
||||
}
|
||||
|
||||
// Modify `fn() -> impl Future` to `fn() -> dyn* Future`.
|
||||
if let ty::InstanceKind::ReifyShim(def_id, _) = instance.def
|
||||
&& let Some((rpitit_def_id, fn_args)) =
|
||||
tcx.return_position_impl_trait_in_trait_shim_data(def_id)
|
||||
{
|
||||
let fn_args = fn_args.instantiate(tcx, args);
|
||||
let rpitit_args =
|
||||
fn_args.extend_to(tcx, rpitit_def_id, |param, _| match param.kind {
|
||||
ty::GenericParamDefKind::Lifetime => tcx.lifetimes.re_erased.into(),
|
||||
ty::GenericParamDefKind::Type { .. }
|
||||
| ty::GenericParamDefKind::Const { .. } => {
|
||||
unreachable!("rpitit should have no addition ty/ct")
|
||||
}
|
||||
});
|
||||
let dyn_star_ty = Ty::new_dynamic(
|
||||
tcx,
|
||||
tcx.item_bounds_to_existential_predicates(rpitit_def_id, rpitit_args),
|
||||
tcx.lifetimes.re_erased,
|
||||
ty::DynStar,
|
||||
);
|
||||
let mut inputs_and_output = sig.inputs_and_output.to_vec();
|
||||
*inputs_and_output.last_mut().unwrap() = dyn_star_ty;
|
||||
sig.inputs_and_output = tcx.mk_type_list(&inputs_and_output);
|
||||
}
|
||||
|
||||
sig
|
||||
}
|
||||
ty::Closure(def_id, args) => {
|
||||
|
@ -85,7 +85,6 @@ version = "0.0.0"
|
||||
dependencies = [
|
||||
"rand",
|
||||
"rand_xorshift",
|
||||
"regex",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -165,13 +164,6 @@ dependencies = [
|
||||
"rustc-std-workspace-core",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "literal-escaper"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"rustc-std-workspace-std",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "memchr"
|
||||
version = "2.7.4"
|
||||
@ -243,7 +235,6 @@ name = "proc_macro"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"core",
|
||||
"literal-escaper",
|
||||
"std",
|
||||
]
|
||||
|
||||
@ -312,31 +303,6 @@ dependencies = [
|
||||
"rand_core",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "regex"
|
||||
version = "1.11.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191"
|
||||
dependencies = [
|
||||
"regex-automata",
|
||||
"regex-syntax",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "regex-automata"
|
||||
version = "0.4.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908"
|
||||
dependencies = [
|
||||
"regex-syntax",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "regex-syntax"
|
||||
version = "0.8.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c"
|
||||
|
||||
[[package]]
|
||||
name = "rustc-demangle"
|
||||
version = "0.1.24"
|
||||
|
@ -8,7 +8,6 @@ members = [
|
||||
]
|
||||
|
||||
exclude = [
|
||||
"literal-escaper",
|
||||
# stdarch has its own Cargo workspace
|
||||
"stdarch",
|
||||
"windows_targets"
|
||||
|
@ -1704,11 +1704,7 @@ pub trait Iterator {
|
||||
/// self.state = self.state + 1;
|
||||
///
|
||||
/// // if it's even, Some(i32), else None
|
||||
/// if val % 2 == 0 {
|
||||
/// Some(val)
|
||||
/// } else {
|
||||
/// None
|
||||
/// }
|
||||
/// (val % 2 == 0).then_some(val)
|
||||
/// }
|
||||
/// }
|
||||
///
|
||||
|
@ -25,4 +25,3 @@ test = true
|
||||
[dev-dependencies]
|
||||
rand = { version = "0.9.0", default-features = false }
|
||||
rand_xorshift = { version = "0.4.0", default-features = false }
|
||||
regex = { version = "1.11.1", default-features = false }
|
||||
|
@ -22,32 +22,24 @@ fn test_pointer_formats_data_pointer() {
|
||||
#[test]
|
||||
fn test_fmt_debug_of_raw_pointers() {
|
||||
use core::fmt::Debug;
|
||||
use core::ptr;
|
||||
|
||||
fn check_fmt<T: Debug>(t: T, expected: &str) {
|
||||
use std::sync::LazyLock;
|
||||
|
||||
use regex::Regex;
|
||||
|
||||
static ADDR_REGEX: LazyLock<Regex> =
|
||||
LazyLock::new(|| Regex::new(r"0x[0-9a-fA-F]+").unwrap());
|
||||
|
||||
fn check_fmt<T: Debug>(t: T, start: &str, contains: &str) {
|
||||
let formatted = format!("{:?}", t);
|
||||
let normalized = ADDR_REGEX.replace_all(&formatted, "$$HEX");
|
||||
|
||||
assert_eq!(normalized, expected);
|
||||
assert!(formatted.starts_with(start), "{formatted:?} doesn't start with {start:?}");
|
||||
assert!(formatted.contains(contains), "{formatted:?} doesn't contain {contains:?}");
|
||||
}
|
||||
|
||||
let plain = &mut 100;
|
||||
check_fmt(plain as *mut i32, "$HEX");
|
||||
check_fmt(plain as *const i32, "$HEX");
|
||||
assert_eq!(format!("{:?}", ptr::without_provenance_mut::<i32>(0x100)), "0x100");
|
||||
assert_eq!(format!("{:?}", ptr::without_provenance::<i32>(0x100)), "0x100");
|
||||
|
||||
let slice = &mut [200, 300, 400][..];
|
||||
check_fmt(slice as *mut [i32], "Pointer { addr: $HEX, metadata: 3 }");
|
||||
check_fmt(slice as *const [i32], "Pointer { addr: $HEX, metadata: 3 }");
|
||||
let slice = ptr::slice_from_raw_parts(ptr::without_provenance::<i32>(0x100), 3);
|
||||
assert_eq!(format!("{:?}", slice as *mut [i32]), "Pointer { addr: 0x100, metadata: 3 }");
|
||||
assert_eq!(format!("{:?}", slice as *const [i32]), "Pointer { addr: 0x100, metadata: 3 }");
|
||||
|
||||
let vtable = &mut 500 as &mut dyn Debug;
|
||||
check_fmt(vtable as *mut dyn Debug, "Pointer { addr: $HEX, metadata: DynMetadata($HEX) }");
|
||||
check_fmt(vtable as *const dyn Debug, "Pointer { addr: $HEX, metadata: DynMetadata($HEX) }");
|
||||
check_fmt(vtable as *mut dyn Debug, "Pointer { addr: ", ", metadata: DynMetadata(");
|
||||
check_fmt(vtable as *const dyn Debug, "Pointer { addr: ", ", metadata: DynMetadata(");
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -1,10 +0,0 @@
|
||||
[package]
|
||||
name = "literal-escaper"
|
||||
version = "0.0.0"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
std = { version = '1.0.0', optional = true, package = 'rustc-std-workspace-std' }
|
||||
|
||||
[features]
|
||||
rustc-dep-of-std = ["dep:std"]
|
@ -1,4 +0,0 @@
|
||||
# literal-escaper
|
||||
|
||||
This crate provides code to unescape string literals. It is used by `rustc_lexer`
|
||||
and `proc_macro`.
|
@ -4,7 +4,6 @@ version = "0.0.0"
|
||||
edition = "2024"
|
||||
|
||||
[dependencies]
|
||||
literal-escaper = { path = "../literal-escaper", features = ["rustc-dep-of-std"] }
|
||||
std = { path = "../std" }
|
||||
# Workaround: when documenting this crate rustdoc will try to load crate named
|
||||
# `core` when resolving doc links. Without this line a different `core` will be
|
||||
|
@ -28,7 +28,6 @@
|
||||
#![feature(restricted_std)]
|
||||
#![feature(rustc_attrs)]
|
||||
#![feature(extend_one)]
|
||||
#![feature(stmt_expr_attributes)]
|
||||
#![recursion_limit = "256"]
|
||||
#![allow(internal_features)]
|
||||
#![deny(ffi_unwind_calls)]
|
||||
@ -52,24 +51,11 @@ use std::{error, fmt};
|
||||
|
||||
#[unstable(feature = "proc_macro_diagnostic", issue = "54140")]
|
||||
pub use diagnostic::{Diagnostic, Level, MultiSpan};
|
||||
#[unstable(feature = "proc_macro_value", issue = "136652")]
|
||||
pub use literal_escaper::EscapeError;
|
||||
use literal_escaper::{MixedUnit, Mode, byte_from_char, unescape_mixed, unescape_unicode};
|
||||
#[unstable(feature = "proc_macro_totokens", issue = "130977")]
|
||||
pub use to_tokens::ToTokens;
|
||||
|
||||
use crate::escape::{EscapeOptions, escape_bytes};
|
||||
|
||||
/// Errors returned when trying to retrieve a literal unescaped value.
|
||||
#[unstable(feature = "proc_macro_value", issue = "136652")]
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub enum ConversionErrorKind {
|
||||
/// The literal failed to be escaped, take a look at [`EscapeError`] for more information.
|
||||
FailedToUnescape(EscapeError),
|
||||
/// Trying to convert a literal with the wrong type.
|
||||
InvalidLiteralKind,
|
||||
}
|
||||
|
||||
/// Determines whether proc_macro has been made accessible to the currently
|
||||
/// running program.
|
||||
///
|
||||
@ -1465,107 +1451,6 @@ impl Literal {
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/// Returns the unescaped string value if the current literal is a string or a string literal.
|
||||
#[unstable(feature = "proc_macro_value", issue = "136652")]
|
||||
pub fn str_value(&self) -> Result<String, ConversionErrorKind> {
|
||||
self.0.symbol.with(|symbol| match self.0.kind {
|
||||
bridge::LitKind::Str => {
|
||||
if symbol.contains('\\') {
|
||||
let mut buf = String::with_capacity(symbol.len());
|
||||
let mut error = None;
|
||||
// Force-inlining here is aggressive but the closure is
|
||||
// called on every char in the string, so it can be hot in
|
||||
// programs with many long strings containing escapes.
|
||||
unescape_unicode(
|
||||
symbol,
|
||||
Mode::Str,
|
||||
&mut #[inline(always)]
|
||||
|_, c| match c {
|
||||
Ok(c) => buf.push(c),
|
||||
Err(err) => {
|
||||
if err.is_fatal() {
|
||||
error = Some(ConversionErrorKind::FailedToUnescape(err));
|
||||
}
|
||||
}
|
||||
},
|
||||
);
|
||||
if let Some(error) = error { Err(error) } else { Ok(buf) }
|
||||
} else {
|
||||
Ok(symbol.to_string())
|
||||
}
|
||||
}
|
||||
bridge::LitKind::StrRaw(_) => Ok(symbol.to_string()),
|
||||
_ => Err(ConversionErrorKind::InvalidLiteralKind),
|
||||
})
|
||||
}
|
||||
|
||||
/// Returns the unescaped string value if the current literal is a c-string or a c-string
|
||||
/// literal.
|
||||
#[unstable(feature = "proc_macro_value", issue = "136652")]
|
||||
pub fn cstr_value(&self) -> Result<Vec<u8>, ConversionErrorKind> {
|
||||
self.0.symbol.with(|symbol| match self.0.kind {
|
||||
bridge::LitKind::CStr => {
|
||||
let mut error = None;
|
||||
let mut buf = Vec::with_capacity(symbol.len());
|
||||
|
||||
unescape_mixed(symbol, Mode::CStr, &mut |_span, c| match c {
|
||||
Ok(MixedUnit::Char(c)) => {
|
||||
buf.extend_from_slice(c.encode_utf8(&mut [0; 4]).as_bytes())
|
||||
}
|
||||
Ok(MixedUnit::HighByte(b)) => buf.push(b),
|
||||
Err(err) => {
|
||||
if err.is_fatal() {
|
||||
error = Some(ConversionErrorKind::FailedToUnescape(err));
|
||||
}
|
||||
}
|
||||
});
|
||||
if let Some(error) = error {
|
||||
Err(error)
|
||||
} else {
|
||||
buf.push(0);
|
||||
Ok(buf)
|
||||
}
|
||||
}
|
||||
bridge::LitKind::CStrRaw(_) => {
|
||||
// Raw strings have no escapes so we can convert the symbol
|
||||
// directly to a `Lrc<u8>` after appending the terminating NUL
|
||||
// char.
|
||||
let mut buf = symbol.to_owned().into_bytes();
|
||||
buf.push(0);
|
||||
Ok(buf)
|
||||
}
|
||||
_ => Err(ConversionErrorKind::InvalidLiteralKind),
|
||||
})
|
||||
}
|
||||
|
||||
/// Returns the unescaped string value if the current literal is a byte string or a byte string
|
||||
/// literal.
|
||||
#[unstable(feature = "proc_macro_value", issue = "136652")]
|
||||
pub fn byte_str_value(&self) -> Result<Vec<u8>, ConversionErrorKind> {
|
||||
self.0.symbol.with(|symbol| match self.0.kind {
|
||||
bridge::LitKind::ByteStr => {
|
||||
let mut buf = Vec::with_capacity(symbol.len());
|
||||
let mut error = None;
|
||||
|
||||
unescape_unicode(symbol, Mode::ByteStr, &mut |_, c| match c {
|
||||
Ok(c) => buf.push(byte_from_char(c)),
|
||||
Err(err) => {
|
||||
if err.is_fatal() {
|
||||
error = Some(ConversionErrorKind::FailedToUnescape(err));
|
||||
}
|
||||
}
|
||||
});
|
||||
if let Some(error) = error { Err(error) } else { Ok(buf) }
|
||||
}
|
||||
bridge::LitKind::ByteStrRaw(_) => {
|
||||
// Raw strings have no escapes so we can convert the symbol
|
||||
// directly to a `Lrc<u8>`.
|
||||
Ok(symbol.to_owned().into_bytes())
|
||||
}
|
||||
_ => Err(ConversionErrorKind::InvalidLiteralKind),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// Parse a single literal from its stringified representation.
|
||||
|
@ -2251,24 +2251,18 @@ fn skip_until<R: BufRead + ?Sized>(r: &mut R, delim: u8) -> Result<usize> {
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
#[cfg_attr(not(test), rustc_diagnostic_item = "IoBufRead")]
|
||||
pub trait BufRead: Read {
|
||||
/// Returns the contents of the internal buffer, filling it with more data
|
||||
/// from the inner reader if it is empty.
|
||||
/// Returns the contents of the internal buffer, filling it with more data, via `Read` methods, if empty.
|
||||
///
|
||||
/// This function is a lower-level call. It needs to be paired with the
|
||||
/// [`consume`] method to function properly. When calling this
|
||||
/// method, none of the contents will be "read" in the sense that later
|
||||
/// calling `read` may return the same contents. As such, [`consume`] must
|
||||
/// be called with the number of bytes that are consumed from this buffer to
|
||||
/// ensure that the bytes are never returned twice.
|
||||
/// This is a lower-level method and is meant to be used together with [`consume`],
|
||||
/// which can be used to mark bytes that should not be returned by subsequent calls to `read`.
|
||||
///
|
||||
/// [`consume`]: BufRead::consume
|
||||
///
|
||||
/// An empty buffer returned indicates that the stream has reached EOF.
|
||||
/// Returns an empty buffer when the stream has reached EOF.
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// This function will return an I/O error if the underlying reader was
|
||||
/// read, but returned an error.
|
||||
/// This function will return an I/O error if a `Read` method was called, but returned an error.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
@ -2286,7 +2280,7 @@ pub trait BufRead: Read {
|
||||
/// // work with buffer
|
||||
/// println!("{buffer:?}");
|
||||
///
|
||||
/// // ensure the bytes we worked with aren't returned again later
|
||||
/// // mark the bytes we worked with as read
|
||||
/// let length = buffer.len();
|
||||
/// stdin.consume(length);
|
||||
/// # std::io::Result::Ok(())
|
||||
@ -2294,18 +2288,13 @@ pub trait BufRead: Read {
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
fn fill_buf(&mut self) -> Result<&[u8]>;
|
||||
|
||||
/// Tells this buffer that `amt` bytes have been consumed from the buffer,
|
||||
/// so they should no longer be returned in calls to `read`.
|
||||
/// Marks the given `amount` of additional bytes from the internal buffer as having been read.
|
||||
/// Subsequent calls to `read` only return bytes that have not been marked as read.
|
||||
///
|
||||
/// This function is a lower-level call. It needs to be paired with the
|
||||
/// [`fill_buf`] method to function properly. This function does
|
||||
/// not perform any I/O, it simply informs this object that some amount of
|
||||
/// its buffer, returned from [`fill_buf`], has been consumed and should
|
||||
/// no longer be returned. As such, this function may do odd things if
|
||||
/// [`fill_buf`] isn't called before calling it.
|
||||
/// This is a lower-level method and is meant to be used together with [`fill_buf`],
|
||||
/// which can be used to fill the internal buffer via `Read` methods.
|
||||
///
|
||||
/// The `amt` must be `<=` the number of bytes in the buffer returned by
|
||||
/// [`fill_buf`].
|
||||
/// It is a logic error if `amount` exceeds the number of unread bytes in the internal buffer, which is returned by [`fill_buf`].
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
@ -2314,9 +2303,9 @@ pub trait BufRead: Read {
|
||||
///
|
||||
/// [`fill_buf`]: BufRead::fill_buf
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
fn consume(&mut self, amt: usize);
|
||||
fn consume(&mut self, amount: usize);
|
||||
|
||||
/// Checks if the underlying `Read` has any data left to be read.
|
||||
/// Checks if there is any data left to be `read`.
|
||||
///
|
||||
/// This function may fill the buffer to check for data,
|
||||
/// so this functions returns `Result<bool>`, not `bool`.
|
||||
@ -2325,6 +2314,10 @@ pub trait BufRead: Read {
|
||||
/// returned slice is empty (which means that there is no data left,
|
||||
/// since EOF is reached).
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// This function will return an I/O error if a `Read` method was called, but returned an error.
|
||||
///
|
||||
/// Examples
|
||||
///
|
||||
/// ```
|
||||
|
@ -655,7 +655,7 @@ mod dist {
|
||||
let mut builder = Builder::new(&build);
|
||||
builder.run_step_descriptions(
|
||||
&Builder::get_step_descriptions(Kind::Build),
|
||||
&["compiler/rustc".into(), "std".into()],
|
||||
&["compiler/rustc".into(), "library".into()],
|
||||
);
|
||||
|
||||
assert_eq!(builder.config.stage, 2);
|
||||
|
@ -62,11 +62,6 @@ pub fn build(build: &mut Build) {
|
||||
let relative_path = krate.local_path(build);
|
||||
build.crates.insert(name.clone(), krate);
|
||||
let existing_path = build.crate_paths.insert(relative_path, name);
|
||||
// `literal-escaper` is both a dependency of `compiler/rustc_lexer` and of
|
||||
// `library/proc-macro`, making it appear multiple times in the workspace.
|
||||
if existing_path.as_deref() == Some("literal-escaper") {
|
||||
continue;
|
||||
}
|
||||
assert!(
|
||||
existing_path.is_none(),
|
||||
"multiple crates with the same path: {}",
|
||||
|
@ -716,7 +716,7 @@ impl Build {
|
||||
features.push("llvm");
|
||||
}
|
||||
// keep in sync with `bootstrap/compile.rs:rustc_cargo_env`
|
||||
if self.config.rust_randomize_layout && check("rustc_randomized_layouts") {
|
||||
if self.config.rust_randomize_layout {
|
||||
features.push("rustc_randomized_layouts");
|
||||
}
|
||||
|
||||
|
@ -20,7 +20,7 @@ use crate::cpu_usage::load_cpu_usage;
|
||||
use crate::datadog::upload_datadog_metric;
|
||||
use crate::jobs::RunType;
|
||||
use crate::metrics::{JobMetrics, download_auto_job_metrics, download_job_metrics, load_metrics};
|
||||
use crate::utils::{load_env_var, output_details};
|
||||
use crate::utils::load_env_var;
|
||||
|
||||
const CI_DIRECTORY: &str = concat!(env!("CARGO_MANIFEST_DIR"), "/..");
|
||||
const DOCKER_DIRECTORY: &str = concat!(env!("CARGO_MANIFEST_DIR"), "/../docker");
|
||||
@ -175,13 +175,6 @@ fn postprocess_metrics(
|
||||
fn post_merge_report(db: JobDatabase, current: String, parent: String) -> anyhow::Result<()> {
|
||||
let metrics = download_auto_job_metrics(&db, &parent, ¤t)?;
|
||||
|
||||
output_details("What is this?", || {
|
||||
println!(
|
||||
r#"This is an experimental post-merge analysis report that shows differences in
|
||||
test outcomes between the merged PR and its parent PR."#
|
||||
);
|
||||
});
|
||||
|
||||
println!("\nComparing {parent} (parent) -> {current} (this PR)\n");
|
||||
output_test_diffs(metrics);
|
||||
|
||||
|
@ -1 +1 @@
|
||||
8536f201ffdb2c24925d7f9e87996d7dca93428b
|
||||
493c38ba371929579fe136df26eccd9516347c7a
|
||||
|
@ -123,6 +123,30 @@ Another way is without a plugin, and creating your own logic in your
|
||||
configuration. The following code will work for any checkout of rust-lang/rust (newer than Febuary 2025):
|
||||
|
||||
```lua
|
||||
local function expand_config_variables(option)
|
||||
local var_placeholders = {
|
||||
['${workspaceFolder}'] = function(_)
|
||||
return vim.lsp.buf.list_workspace_folders()[1]
|
||||
end,
|
||||
}
|
||||
|
||||
if type(option) == "table" then
|
||||
local mt = getmetatable(option)
|
||||
local result = {}
|
||||
for k, v in pairs(option) do
|
||||
result[expand_config_variables(k)] = expand_config_variables(v)
|
||||
end
|
||||
return setmetatable(result, mt)
|
||||
end
|
||||
if type(option) ~= "string" then
|
||||
return option
|
||||
end
|
||||
local ret = option
|
||||
for key, fn in pairs(var_placeholders) do
|
||||
ret = ret:gsub(key, fn)
|
||||
end
|
||||
return ret
|
||||
end
|
||||
lspconfig.rust_analyzer.setup {
|
||||
root_dir = function()
|
||||
local default = lspconfig.rust_analyzer.config_def.default_config.root_dir()
|
||||
@ -142,7 +166,7 @@ lspconfig.rust_analyzer.setup {
|
||||
-- load rust-lang/rust settings
|
||||
local file = io.open(config)
|
||||
local json = vim.json.decode(file:read("*a"))
|
||||
client.config.settings["rust-analyzer"] = json.lsp["rust-analyzer"].initialization_options
|
||||
client.config.settings["rust-analyzer"] = expand_config_variables(json.lsp["rust-analyzer"].initialization_options)
|
||||
client.notify("workspace/didChangeConfiguration", { settings = client.config.settings })
|
||||
end
|
||||
return true
|
||||
|
@ -81,7 +81,7 @@ smaller user-facing changes.
|
||||
into a PR that ends up not getting merged!** [See this document][mcpinfo] for
|
||||
more info on MCPs.
|
||||
|
||||
[mcpinfo]: https://forge.rust-lang.org/compiler/mcp.html
|
||||
[mcpinfo]: https://forge.rust-lang.org/compiler/proposals-and-stabilization.html#how-do-i-submit-an-mcp
|
||||
[zulip]: https://rust-lang.zulipchat.com/#narrow/stream/131828-t-compiler
|
||||
|
||||
### Performance
|
||||
|
@ -44,7 +44,7 @@ like this; for example, the compiler team recommends
|
||||
filing a Major Change Proposal ([MCP][mcp]) as a lightweight way to
|
||||
garner support and feedback without requiring full consensus.
|
||||
|
||||
[mcp]: https://forge.rust-lang.org/compiler/mcp.html#public-facing-changes-require-rfcbot-fcp
|
||||
[mcp]: https://forge.rust-lang.org/compiler/proposals-and-stabilization.html#how-do-i-submit-an-mcp
|
||||
|
||||
You don't need to have the implementation fully ready for r+ to propose an FCP,
|
||||
but it is generally a good idea to have at least a proof
|
||||
|
@ -18,7 +18,7 @@ a subset of test collections, and merge queue CI will exercise all of the test
|
||||
collection.
|
||||
</div>
|
||||
|
||||
```bash
|
||||
```text
|
||||
./x test
|
||||
```
|
||||
|
||||
@ -45,7 +45,7 @@ tests. For example, a good "smoke test" that can be used after modifying rustc
|
||||
to see if things are generally working correctly would be to exercise the `ui`
|
||||
test suite ([`tests/ui`]):
|
||||
|
||||
```bash
|
||||
```text
|
||||
./x test tests/ui
|
||||
```
|
||||
|
||||
@ -53,14 +53,14 @@ Of course, the choice of test suites is
|
||||
somewhat arbitrary, and may not suit the task you are doing. For example, if you
|
||||
are hacking on debuginfo, you may be better off with the debuginfo test suite:
|
||||
|
||||
```bash
|
||||
```text
|
||||
./x test tests/debuginfo
|
||||
```
|
||||
|
||||
If you only need to test a specific subdirectory of tests for any given test
|
||||
suite, you can pass that directory as a filter to `./x test`:
|
||||
|
||||
```bash
|
||||
```text
|
||||
./x test tests/ui/const-generics
|
||||
```
|
||||
|
||||
@ -73,7 +73,7 @@ suite, you can pass that directory as a filter to `./x test`:
|
||||
|
||||
Likewise, you can test a single file by passing its path:
|
||||
|
||||
```bash
|
||||
```text
|
||||
./x test tests/ui/const-generics/const-test.rs
|
||||
```
|
||||
|
||||
@ -81,19 +81,19 @@ Likewise, you can test a single file by passing its path:
|
||||
have to use the `--test-args` argument as described
|
||||
[below](#running-an-individual-test).
|
||||
|
||||
```bash
|
||||
```text
|
||||
./x test src/tools/miri --test-args tests/fail/uninit/padding-enum.rs
|
||||
```
|
||||
|
||||
### Run only the tidy script
|
||||
|
||||
```bash
|
||||
```text
|
||||
./x test tidy
|
||||
```
|
||||
|
||||
### Run tests on the standard library
|
||||
|
||||
```bash
|
||||
```text
|
||||
./x test --stage 0 library/std
|
||||
```
|
||||
|
||||
@ -102,13 +102,13 @@ crates, you have to specify those explicitly.
|
||||
|
||||
### Run the tidy script and tests on the standard library
|
||||
|
||||
```bash
|
||||
```text
|
||||
./x test --stage 0 tidy library/std
|
||||
```
|
||||
|
||||
### Run tests on the standard library using a stage 1 compiler
|
||||
|
||||
```bash
|
||||
```text
|
||||
./x test --stage 1 library/std
|
||||
```
|
||||
|
||||
@ -122,7 +122,7 @@ the tests **usually** work fine with stage 1, there are some limitations.
|
||||
|
||||
### Run all tests using a stage 2 compiler
|
||||
|
||||
```bash
|
||||
```text
|
||||
./x test --stage 2
|
||||
```
|
||||
|
||||
@ -134,13 +134,13 @@ You almost never need to do this; CI will run these tests for you.
|
||||
|
||||
You may want to run unit tests on a specific file with following:
|
||||
|
||||
```bash
|
||||
```text
|
||||
./x test compiler/rustc_data_structures/src/thin_vec/tests.rs
|
||||
```
|
||||
|
||||
But unfortunately, it's impossible. You should invoke the following instead:
|
||||
|
||||
```bash
|
||||
```text
|
||||
./x test compiler/rustc_data_structures/ --test-args thin_vec
|
||||
```
|
||||
|
||||
@ -151,7 +151,7 @@ often the test they are trying to fix. As mentioned earlier, you may pass the
|
||||
full file path to achieve this, or alternatively one may invoke `x` with the
|
||||
`--test-args` option:
|
||||
|
||||
```bash
|
||||
```text
|
||||
./x test tests/ui --test-args issue-1234
|
||||
```
|
||||
|
||||
@ -203,7 +203,7 @@ When `--pass $mode` is passed, these tests will be forced to run under the given
|
||||
`$mode` unless the directive `//@ ignore-pass` exists in the test file. For
|
||||
example, you can run all the tests in `tests/ui` as `check-pass`:
|
||||
|
||||
```bash
|
||||
```text
|
||||
./x test tests/ui --pass check
|
||||
```
|
||||
|
||||
@ -219,7 +219,7 @@ first look for expected output in `foo.polonius.stderr`, falling back to the
|
||||
usual `foo.stderr` if not found. The following will run the UI test suite in
|
||||
Polonius mode:
|
||||
|
||||
```bash
|
||||
```text
|
||||
./x test tests/ui --compare-mode=polonius
|
||||
```
|
||||
|
||||
@ -232,7 +232,7 @@ just `.rs` files, so after [creating a rustup
|
||||
toolchain](../building/how-to-build-and-run.md#creating-a-rustup-toolchain), you
|
||||
can do something like:
|
||||
|
||||
```bash
|
||||
```text
|
||||
rustc +stage1 tests/ui/issue-1234.rs
|
||||
```
|
||||
|
||||
@ -252,7 +252,7 @@ execution* so be careful where it is used.
|
||||
To do this, first build `remote-test-server` for the remote machine, e.g. for
|
||||
RISC-V
|
||||
|
||||
```sh
|
||||
```text
|
||||
./x build src/tools/remote-test-server --target riscv64gc-unknown-linux-gnu
|
||||
```
|
||||
|
||||
@ -264,7 +264,7 @@ On the remote machine, run the `remote-test-server` with the `--bind
|
||||
0.0.0.0:12345` flag (and optionally `-v` for verbose output). Output should look
|
||||
like this:
|
||||
|
||||
```sh
|
||||
```text
|
||||
$ ./remote-test-server -v --bind 0.0.0.0:12345
|
||||
starting test server
|
||||
listening on 0.0.0.0:12345!
|
||||
@ -278,7 +278,7 @@ restrictive IP address when binding.
|
||||
You can test if the `remote-test-server` is working by connecting to it and
|
||||
sending `ping\n`. It should reply `pong`:
|
||||
|
||||
```sh
|
||||
```text
|
||||
$ nc $REMOTE_IP 12345
|
||||
ping
|
||||
pong
|
||||
@ -288,7 +288,7 @@ To run tests using the remote runner, set the `TEST_DEVICE_ADDR` environment
|
||||
variable then use `x` as usual. For example, to run `ui` tests for a RISC-V
|
||||
machine with the IP address `1.2.3.4` use
|
||||
|
||||
```sh
|
||||
```text
|
||||
export TEST_DEVICE_ADDR="1.2.3.4:12345"
|
||||
./x test tests/ui --target riscv64gc-unknown-linux-gnu
|
||||
```
|
||||
@ -296,7 +296,7 @@ export TEST_DEVICE_ADDR="1.2.3.4:12345"
|
||||
If `remote-test-server` was run with the verbose flag, output on the test
|
||||
machine may look something like
|
||||
|
||||
```
|
||||
```text
|
||||
[...]
|
||||
run "/tmp/work/test1007/a"
|
||||
run "/tmp/work/test1008/a"
|
||||
@ -362,21 +362,21 @@ codegen-backends = ["llvm", "gcc"]
|
||||
|
||||
Then you need to install libgccjit 12. For example with `apt`:
|
||||
|
||||
```bash
|
||||
$ apt install libgccjit-12-dev
|
||||
```text
|
||||
apt install libgccjit-12-dev
|
||||
```
|
||||
|
||||
Now you can run the following command:
|
||||
|
||||
```bash
|
||||
$ ./x test compiler/rustc_codegen_gcc/
|
||||
```text
|
||||
./x test compiler/rustc_codegen_gcc/
|
||||
```
|
||||
|
||||
If it cannot find the `.so` library (if you installed it with `apt` for example), you
|
||||
need to pass the library file path with `LIBRARY_PATH`:
|
||||
|
||||
```bash
|
||||
$ LIBRARY_PATH=/usr/lib/gcc/x86_64-linux-gnu/12/ ./x test compiler/rustc_codegen_gcc/
|
||||
```text
|
||||
LIBRARY_PATH=/usr/lib/gcc/x86_64-linux-gnu/12/ ./x test compiler/rustc_codegen_gcc/
|
||||
```
|
||||
|
||||
If you encounter bugs or problems, don't hesitate to open issues on the
|
||||
|
@ -318,7 +318,6 @@ const PERMITTED_RUSTC_DEPENDENCIES: &[&str] = &[
|
||||
"libloading",
|
||||
"linux-raw-sys",
|
||||
"litemap",
|
||||
"literal-escaper",
|
||||
"lock_api",
|
||||
"log",
|
||||
"matchers",
|
||||
@ -364,7 +363,6 @@ const PERMITTED_RUSTC_DEPENDENCIES: &[&str] = &[
|
||||
"rustc-rayon",
|
||||
"rustc-rayon-core",
|
||||
"rustc-stable-hash",
|
||||
"rustc-std-workspace-std",
|
||||
"rustc_apfloat",
|
||||
"rustix",
|
||||
"ruzstd", // via object in thorin-dwp
|
||||
|
@ -1,7 +0,0 @@
|
||||
//@ known-bug: #136286
|
||||
//@ compile-flags: --edition=2024
|
||||
|
||||
#![feature(async_fn_in_dyn_trait)]
|
||||
trait A {
|
||||
async fn b(self: A);
|
||||
}
|
@ -1,7 +0,0 @@
|
||||
//@ known-bug: #137706
|
||||
//@ needs-rustc-debug-assertions
|
||||
trait A {
|
||||
fn b() -> impl IntoIterator<Item = ()>;
|
||||
}
|
||||
|
||||
impl A<()> for dyn A {}
|
@ -1,6 +0,0 @@
|
||||
//@ known-bug: #137895
|
||||
trait A {
|
||||
fn b() -> impl ?Sized + 'a;
|
||||
}
|
||||
|
||||
impl A for dyn A {}
|
@ -26,7 +26,6 @@
|
||||
debug a => _9;
|
||||
}
|
||||
|
||||
+ coverage body span: $DIR/branch_match_arms.rs:14:11: 21:2 (#0)
|
||||
+ coverage Code { bcb: bcb0 } => $DIR/branch_match_arms.rs:14:1: 15:21 (#0);
|
||||
+ coverage Code { bcb: bcb1 } => $DIR/branch_match_arms.rs:16:17: 16:33 (#0);
|
||||
+ coverage Code { bcb: bcb3 } => $DIR/branch_match_arms.rs:17:17: 17:33 (#0);
|
||||
|
@ -4,8 +4,7 @@
|
||||
fn bar() -> bool {
|
||||
let mut _0: bool;
|
||||
|
||||
+ coverage body span: $DIR/instrument_coverage.rs:29:18: 31:2 (#0)
|
||||
+ coverage Code { bcb: bcb0 } => $DIR/instrument_coverage.rs:29:1: 31:2 (#0);
|
||||
+ coverage Code { bcb: bcb0 } => $DIR/instrument_coverage.rs:27:1: 29:2 (#0);
|
||||
+
|
||||
bb0: {
|
||||
+ Coverage::VirtualCounter(bcb0);
|
||||
|
@ -7,12 +7,11 @@
|
||||
let mut _2: bool;
|
||||
let mut _3: !;
|
||||
|
||||
+ coverage body span: $DIR/instrument_coverage.rs:14:11: 20:2 (#0)
|
||||
+ coverage Code { bcb: bcb0 } => $DIR/instrument_coverage.rs:14:1: 14:11 (#0);
|
||||
+ coverage Code { bcb: bcb1 } => $DIR/instrument_coverage.rs:16:12: 16:17 (#0);
|
||||
+ coverage Code { bcb: bcb2 } => $DIR/instrument_coverage.rs:17:13: 17:18 (#0);
|
||||
+ coverage Code { bcb: bcb3 } => $DIR/instrument_coverage.rs:18:10: 18:10 (#0);
|
||||
+ coverage Code { bcb: bcb2 } => $DIR/instrument_coverage.rs:20:2: 20:2 (#0);
|
||||
+ coverage Code { bcb: bcb0 } => $DIR/instrument_coverage.rs:13:1: 13:11 (#0);
|
||||
+ coverage Code { bcb: bcb1 } => $DIR/instrument_coverage.rs:15:12: 15:17 (#0);
|
||||
+ coverage Code { bcb: bcb2 } => $DIR/instrument_coverage.rs:16:13: 16:18 (#0);
|
||||
+ coverage Code { bcb: bcb3 } => $DIR/instrument_coverage.rs:17:10: 17:10 (#0);
|
||||
+ coverage Code { bcb: bcb2 } => $DIR/instrument_coverage.rs:19:2: 19:2 (#0);
|
||||
+
|
||||
bb0: {
|
||||
+ Coverage::VirtualCounter(bcb0);
|
||||
|
@ -7,7 +7,6 @@
|
||||
|
||||
// EMIT_MIR instrument_coverage.main.InstrumentCoverage.diff
|
||||
// CHECK-LABEL: fn main()
|
||||
// CHECK: coverage body span:
|
||||
// CHECK: coverage Code { bcb: bcb{{[0-9]+}} } =>
|
||||
// CHECK: bb0:
|
||||
// CHECK: Coverage::VirtualCounter
|
||||
@ -21,7 +20,6 @@ fn main() {
|
||||
|
||||
// EMIT_MIR instrument_coverage.bar.InstrumentCoverage.diff
|
||||
// CHECK-LABEL: fn bar()
|
||||
// CHECK: coverage body span:
|
||||
// CHECK: coverage Code { bcb: bcb{{[0-9]+}} } =>
|
||||
// CHECK: bb0:
|
||||
// CHECK: Coverage::VirtualCounter
|
||||
|
@ -7,7 +7,6 @@
|
||||
|
||||
coverage branch { true: BlockMarkerId(0), false: BlockMarkerId(1) } => $DIR/instrument_coverage_cleanup.rs:14:8: 14:36 (#0)
|
||||
|
||||
coverage body span: $DIR/instrument_coverage_cleanup.rs:13:11: 15:2 (#0)
|
||||
coverage Code { bcb: bcb0 } => $DIR/instrument_coverage_cleanup.rs:13:1: 14:36 (#0);
|
||||
coverage Code { bcb: bcb3 } => $DIR/instrument_coverage_cleanup.rs:14:37: 14:39 (#0);
|
||||
coverage Code { bcb: bcb1 } => $DIR/instrument_coverage_cleanup.rs:14:39: 14:39 (#0);
|
||||
|
@ -7,7 +7,6 @@
|
||||
|
||||
coverage branch { true: BlockMarkerId(0), false: BlockMarkerId(1) } => $DIR/instrument_coverage_cleanup.rs:14:8: 14:36 (#0)
|
||||
|
||||
+ coverage body span: $DIR/instrument_coverage_cleanup.rs:13:11: 15:2 (#0)
|
||||
+ coverage Code { bcb: bcb0 } => $DIR/instrument_coverage_cleanup.rs:13:1: 14:36 (#0);
|
||||
+ coverage Code { bcb: bcb3 } => $DIR/instrument_coverage_cleanup.rs:14:37: 14:39 (#0);
|
||||
+ coverage Code { bcb: bcb1 } => $DIR/instrument_coverage_cleanup.rs:14:39: 14:39 (#0);
|
||||
|
@ -1,11 +1,9 @@
|
||||
//@ aux-build:block-on.rs
|
||||
//@ edition: 2021
|
||||
//@ run-pass
|
||||
//@ check-run-results
|
||||
//@ known-bug: #133119
|
||||
|
||||
#![allow(refining_impl_trait)]
|
||||
#![feature(async_fn_in_dyn_trait)]
|
||||
//~^ WARN the feature `async_fn_in_dyn_trait` is incomplete
|
||||
|
||||
extern crate block_on;
|
||||
|
||||
|
@ -1,5 +1,5 @@
|
||||
warning: the feature `async_fn_in_dyn_trait` is incomplete and may not be safe to use and/or cause compiler crashes
|
||||
--> $DIR/mut-is-pointer-like.rs:7:12
|
||||
--> $DIR/mut-is-pointer-like.rs:6:12
|
||||
|
|
||||
LL | #![feature(async_fn_in_dyn_trait)]
|
||||
| ^^^^^^^^^^^^^^^^^^^^^
|
||||
@ -7,5 +7,65 @@ LL | #![feature(async_fn_in_dyn_trait)]
|
||||
= note: see issue #133119 <https://github.com/rust-lang/rust/issues/133119> for more information
|
||||
= note: `#[warn(incomplete_features)]` on by default
|
||||
|
||||
warning: 1 warning emitted
|
||||
error[E0038]: the trait `AsyncTrait` is not dyn compatible
|
||||
--> $DIR/mut-is-pointer-like.rs:35:16
|
||||
|
|
||||
LL | let x: Pin<&mut dyn AsyncTrait<Output = ()>> = f;
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ `AsyncTrait` is not dyn compatible
|
||||
|
|
||||
note: for a trait to be dyn compatible it needs to allow building a vtable
|
||||
for more information, visit <https://doc.rust-lang.org/reference/items/traits.html#dyn-compatibility>
|
||||
--> $DIR/mut-is-pointer-like.rs:16:14
|
||||
|
|
||||
LL | trait AsyncTrait {
|
||||
| ---------- this trait is not dyn compatible...
|
||||
...
|
||||
LL | async fn async_dispatch(self: Pin<&mut Self>) -> Self::Output;
|
||||
| ^^^^^^^^^^^^^^ ...because method `async_dispatch` is `async`
|
||||
= help: consider moving `async_dispatch` to another trait
|
||||
|
||||
error[E0038]: the trait `AsyncTrait` is not dyn compatible
|
||||
--> $DIR/mut-is-pointer-like.rs:35:56
|
||||
|
|
||||
LL | let x: Pin<&mut dyn AsyncTrait<Output = ()>> = f;
|
||||
| ^ `AsyncTrait` is not dyn compatible
|
||||
|
|
||||
note: for a trait to be dyn compatible it needs to allow building a vtable
|
||||
for more information, visit <https://doc.rust-lang.org/reference/items/traits.html#dyn-compatibility>
|
||||
--> $DIR/mut-is-pointer-like.rs:16:14
|
||||
|
|
||||
LL | trait AsyncTrait {
|
||||
| ---------- this trait is not dyn compatible...
|
||||
...
|
||||
LL | async fn async_dispatch(self: Pin<&mut Self>) -> Self::Output;
|
||||
| ^^^^^^^^^^^^^^ ...because method `async_dispatch` is `async`
|
||||
= help: consider moving `async_dispatch` to another trait
|
||||
= note: required for the cast from `Pin<&mut {async block@$DIR/mut-is-pointer-like.rs:32:32: 32:37}>` to `Pin<&mut dyn AsyncTrait<Output = ()>>`
|
||||
|
||||
error[E0277]: the trait bound `dyn AsyncTrait<Output = ()>: AsyncTrait` is not satisfied
|
||||
--> $DIR/mut-is-pointer-like.rs:36:11
|
||||
|
|
||||
LL | x.async_dispatch().await;
|
||||
| ^^^^^^^^^^^^^^ the trait `AsyncTrait` is not implemented for `dyn AsyncTrait<Output = ()>`
|
||||
|
||||
error[E0038]: the trait `AsyncTrait` is not dyn compatible
|
||||
--> $DIR/mut-is-pointer-like.rs:36:9
|
||||
|
|
||||
LL | x.async_dispatch().await;
|
||||
| ^^^^^^^^^^^^^^^^^^ `AsyncTrait` is not dyn compatible
|
||||
|
|
||||
note: for a trait to be dyn compatible it needs to allow building a vtable
|
||||
for more information, visit <https://doc.rust-lang.org/reference/items/traits.html#dyn-compatibility>
|
||||
--> $DIR/mut-is-pointer-like.rs:16:14
|
||||
|
|
||||
LL | trait AsyncTrait {
|
||||
| ---------- this trait is not dyn compatible...
|
||||
...
|
||||
LL | async fn async_dispatch(self: Pin<&mut Self>) -> Self::Output;
|
||||
| ^^^^^^^^^^^^^^ ...because method `async_dispatch` is `async`
|
||||
= help: consider moving `async_dispatch` to another trait
|
||||
|
||||
error: aborting due to 4 previous errors; 1 warning emitted
|
||||
|
||||
Some errors have detailed explanations: E0038, E0277.
|
||||
For more information about an error, try `rustc --explain E0038`.
|
||||
|
@ -1,11 +1,9 @@
|
||||
//@ aux-build:block-on.rs
|
||||
//@ edition: 2021
|
||||
//@ run-pass
|
||||
//@ check-run-results
|
||||
//@ known-bug: #133119
|
||||
|
||||
#![allow(refining_impl_trait)]
|
||||
#![feature(async_fn_in_dyn_trait)]
|
||||
//~^ WARN the feature `async_fn_in_dyn_trait` is incomplete
|
||||
|
||||
extern crate block_on;
|
||||
|
||||
|
@ -1,5 +1,5 @@
|
||||
warning: the feature `async_fn_in_dyn_trait` is incomplete and may not be safe to use and/or cause compiler crashes
|
||||
--> $DIR/works.rs:7:12
|
||||
--> $DIR/works.rs:6:12
|
||||
|
|
||||
LL | #![feature(async_fn_in_dyn_trait)]
|
||||
| ^^^^^^^^^^^^^^^^^^^^^
|
||||
@ -7,5 +7,75 @@ LL | #![feature(async_fn_in_dyn_trait)]
|
||||
= note: see issue #133119 <https://github.com/rust-lang/rust/issues/133119> for more information
|
||||
= note: `#[warn(incomplete_features)]` on by default
|
||||
|
||||
warning: 1 warning emitted
|
||||
error[E0038]: the trait `AsyncTrait` is not dyn compatible
|
||||
--> $DIR/works.rs:27:34
|
||||
|
|
||||
LL | let x: &dyn AsyncTrait = &"hello, world!";
|
||||
| ^^^^^^^^^^^^^^^^ `AsyncTrait` is not dyn compatible
|
||||
|
|
||||
note: for a trait to be dyn compatible it needs to allow building a vtable
|
||||
for more information, visit <https://doc.rust-lang.org/reference/items/traits.html#dyn-compatibility>
|
||||
--> $DIR/works.rs:14:14
|
||||
|
|
||||
LL | trait AsyncTrait {
|
||||
| ---------- this trait is not dyn compatible...
|
||||
LL | async fn async_dispatch(&self);
|
||||
| ^^^^^^^^^^^^^^ ...because method `async_dispatch` is `async`
|
||||
= help: consider moving `async_dispatch` to another trait
|
||||
= help: only type `&'static str` implements `AsyncTrait`; consider using it directly instead.
|
||||
= note: required for the cast from `&&'static str` to `&dyn AsyncTrait`
|
||||
|
||||
error[E0038]: the trait `AsyncTrait` is not dyn compatible
|
||||
--> $DIR/works.rs:27:16
|
||||
|
|
||||
LL | let x: &dyn AsyncTrait = &"hello, world!";
|
||||
| ^^^^^^^^^^^^^^^ `AsyncTrait` is not dyn compatible
|
||||
|
|
||||
note: for a trait to be dyn compatible it needs to allow building a vtable
|
||||
for more information, visit <https://doc.rust-lang.org/reference/items/traits.html#dyn-compatibility>
|
||||
--> $DIR/works.rs:14:14
|
||||
|
|
||||
LL | trait AsyncTrait {
|
||||
| ---------- this trait is not dyn compatible...
|
||||
LL | async fn async_dispatch(&self);
|
||||
| ^^^^^^^^^^^^^^ ...because method `async_dispatch` is `async`
|
||||
= help: consider moving `async_dispatch` to another trait
|
||||
= help: only type `&'static str` implements `AsyncTrait`; consider using it directly instead.
|
||||
|
||||
error[E0038]: the trait `AsyncTrait` is not dyn compatible
|
||||
--> $DIR/works.rs:28:11
|
||||
|
|
||||
LL | x.async_dispatch().await;
|
||||
| ^^^^^^^^^^^^^^ `AsyncTrait` is not dyn compatible
|
||||
|
|
||||
note: for a trait to be dyn compatible it needs to allow building a vtable
|
||||
for more information, visit <https://doc.rust-lang.org/reference/items/traits.html#dyn-compatibility>
|
||||
--> $DIR/works.rs:14:14
|
||||
|
|
||||
LL | trait AsyncTrait {
|
||||
| ---------- this trait is not dyn compatible...
|
||||
LL | async fn async_dispatch(&self);
|
||||
| ^^^^^^^^^^^^^^ ...because method `async_dispatch` is `async`
|
||||
= help: consider moving `async_dispatch` to another trait
|
||||
= help: only type `&'static str` implements `AsyncTrait`; consider using it directly instead.
|
||||
|
||||
error[E0038]: the trait `AsyncTrait` is not dyn compatible
|
||||
--> $DIR/works.rs:28:9
|
||||
|
|
||||
LL | x.async_dispatch().await;
|
||||
| ^^^^^^^^^^^^^^^^^^ `AsyncTrait` is not dyn compatible
|
||||
|
|
||||
note: for a trait to be dyn compatible it needs to allow building a vtable
|
||||
for more information, visit <https://doc.rust-lang.org/reference/items/traits.html#dyn-compatibility>
|
||||
--> $DIR/works.rs:14:14
|
||||
|
|
||||
LL | trait AsyncTrait {
|
||||
| ---------- this trait is not dyn compatible...
|
||||
LL | async fn async_dispatch(&self);
|
||||
| ^^^^^^^^^^^^^^ ...because method `async_dispatch` is `async`
|
||||
= help: consider moving `async_dispatch` to another trait
|
||||
= help: only type `&'static str` implements `AsyncTrait`; consider using it directly instead.
|
||||
|
||||
error: aborting due to 4 previous errors; 1 warning emitted
|
||||
|
||||
For more information about this error, try `rustc --explain E0038`.
|
||||
|
@ -1,7 +1,7 @@
|
||||
//@ edition: 2021
|
||||
//@ known-bug: #133119
|
||||
|
||||
#![feature(async_fn_in_dyn_trait)]
|
||||
//~^ WARN the feature `async_fn_in_dyn_trait` is incomplete
|
||||
|
||||
use std::future::Future;
|
||||
|
||||
@ -19,5 +19,5 @@ impl AsyncTrait for &'static str {
|
||||
|
||||
fn main() {
|
||||
let x: &dyn AsyncTrait = &"hello, world!";
|
||||
//~^ ERROR `impl Future<Output = ()>` needs to have the same ABI as a pointer
|
||||
// FIXME ~^ ERROR `impl Future<Output = ()>` needs to have the same ABI as a pointer
|
||||
}
|
||||
|
@ -1,5 +1,5 @@
|
||||
warning: the feature `async_fn_in_dyn_trait` is incomplete and may not be safe to use and/or cause compiler crashes
|
||||
--> $DIR/wrong-size.rs:3:12
|
||||
--> $DIR/wrong-size.rs:4:12
|
||||
|
|
||||
LL | #![feature(async_fn_in_dyn_trait)]
|
||||
| ^^^^^^^^^^^^^^^^^^^^^
|
||||
@ -7,15 +7,41 @@ LL | #![feature(async_fn_in_dyn_trait)]
|
||||
= note: see issue #133119 <https://github.com/rust-lang/rust/issues/133119> for more information
|
||||
= note: `#[warn(incomplete_features)]` on by default
|
||||
|
||||
error[E0277]: `impl Future<Output = ()>` needs to have the same ABI as a pointer
|
||||
error[E0038]: the trait `AsyncTrait` is not dyn compatible
|
||||
--> $DIR/wrong-size.rs:21:30
|
||||
|
|
||||
LL | let x: &dyn AsyncTrait = &"hello, world!";
|
||||
| ^^^^^^^^^^^^^^^^ `impl Future<Output = ()>` needs to be a pointer-like type
|
||||
| ^^^^^^^^^^^^^^^^ `AsyncTrait` is not dyn compatible
|
||||
|
|
||||
= help: the trait `for<'a> PointerLike` is not implemented for `impl Future<Output = ()>`
|
||||
note: for a trait to be dyn compatible it needs to allow building a vtable
|
||||
for more information, visit <https://doc.rust-lang.org/reference/items/traits.html#dyn-compatibility>
|
||||
--> $DIR/wrong-size.rs:9:14
|
||||
|
|
||||
LL | trait AsyncTrait {
|
||||
| ---------- this trait is not dyn compatible...
|
||||
LL | async fn async_dispatch(&self);
|
||||
| ^^^^^^^^^^^^^^ ...because method `async_dispatch` is `async`
|
||||
= help: consider moving `async_dispatch` to another trait
|
||||
= help: only type `&'static str` implements `AsyncTrait`; consider using it directly instead.
|
||||
= note: required for the cast from `&&'static str` to `&dyn AsyncTrait`
|
||||
|
||||
error: aborting due to 1 previous error; 1 warning emitted
|
||||
error[E0038]: the trait `AsyncTrait` is not dyn compatible
|
||||
--> $DIR/wrong-size.rs:21:12
|
||||
|
|
||||
LL | let x: &dyn AsyncTrait = &"hello, world!";
|
||||
| ^^^^^^^^^^^^^^^ `AsyncTrait` is not dyn compatible
|
||||
|
|
||||
note: for a trait to be dyn compatible it needs to allow building a vtable
|
||||
for more information, visit <https://doc.rust-lang.org/reference/items/traits.html#dyn-compatibility>
|
||||
--> $DIR/wrong-size.rs:9:14
|
||||
|
|
||||
LL | trait AsyncTrait {
|
||||
| ---------- this trait is not dyn compatible...
|
||||
LL | async fn async_dispatch(&self);
|
||||
| ^^^^^^^^^^^^^^ ...because method `async_dispatch` is `async`
|
||||
= help: consider moving `async_dispatch` to another trait
|
||||
= help: only type `&'static str` implements `AsyncTrait`; consider using it directly instead.
|
||||
|
||||
For more information about this error, try `rustc --explain E0277`.
|
||||
error: aborting due to 2 previous errors; 1 warning emitted
|
||||
|
||||
For more information about this error, try `rustc --explain E0038`.
|
||||
|
@ -1,3 +0,0 @@
|
||||
#![crate_type = "lib"]
|
||||
|
||||
extern crate literal_escaper; //~ ERROR
|
@ -1,13 +0,0 @@
|
||||
error[E0658]: use of unstable library feature `rustc_private`: this crate is being loaded from the sysroot, an unstable location; did you mean to load this crate from crates.io via `Cargo.toml` instead?
|
||||
--> $DIR/literal-escaper.rs:3:1
|
||||
|
|
||||
LL | extern crate literal_escaper;
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
|
||||
= note: see issue #27812 <https://github.com/rust-lang/rust/issues/27812> for more information
|
||||
= help: add `#![feature(rustc_private)]` to the crate attributes to enable
|
||||
= note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date
|
||||
|
||||
error: aborting due to 1 previous error
|
||||
|
||||
For more information about this error, try `rustc --explain E0658`.
|
@ -1,11 +1,10 @@
|
||||
// ignore-tidy-linelength
|
||||
|
||||
use proc_macro::{ConversionErrorKind, Literal};
|
||||
use proc_macro::Literal;
|
||||
|
||||
pub fn test() {
|
||||
test_display_literal();
|
||||
test_parse_literal();
|
||||
test_str_value_methods();
|
||||
}
|
||||
|
||||
fn test_display_literal() {
|
||||
@ -82,53 +81,3 @@ fn test_parse_literal() {
|
||||
assert!("- 10".parse::<Literal>().is_err());
|
||||
assert!("-'x'".parse::<Literal>().is_err());
|
||||
}
|
||||
|
||||
fn test_str_value_methods() {
|
||||
// Testing `str_value`
|
||||
let lit = "\"\n\"".parse::<Literal>().unwrap();
|
||||
assert_eq!(lit.str_value(), Ok("\n".to_string()));
|
||||
|
||||
let lit = "r#\"\n\"#".parse::<Literal>().unwrap();
|
||||
assert_eq!(lit.str_value(), Ok("\n".to_string()));
|
||||
|
||||
let lit = "1".parse::<Literal>().unwrap();
|
||||
assert_eq!(lit.str_value(), Err(ConversionErrorKind::InvalidLiteralKind));
|
||||
|
||||
let lit = "b\"\n\"".parse::<Literal>().unwrap();
|
||||
assert_eq!(lit.str_value(), Err(ConversionErrorKind::InvalidLiteralKind));
|
||||
|
||||
let lit = "c\"\n\"".parse::<Literal>().unwrap();
|
||||
assert_eq!(lit.str_value(), Err(ConversionErrorKind::InvalidLiteralKind));
|
||||
|
||||
// Testing `cstr_value`
|
||||
let lit = "\"\n\"".parse::<Literal>().unwrap();
|
||||
assert_eq!(lit.cstr_value(), Err(ConversionErrorKind::InvalidLiteralKind));
|
||||
|
||||
let lit = "r#\"\n\"#".parse::<Literal>().unwrap();
|
||||
assert_eq!(lit.cstr_value(), Err(ConversionErrorKind::InvalidLiteralKind));
|
||||
|
||||
let lit = "1".parse::<Literal>().unwrap();
|
||||
assert_eq!(lit.cstr_value(), Err(ConversionErrorKind::InvalidLiteralKind));
|
||||
|
||||
let lit = "b\"\n\"".parse::<Literal>().unwrap();
|
||||
assert_eq!(lit.cstr_value(), Err(ConversionErrorKind::InvalidLiteralKind));
|
||||
|
||||
let lit = "c\"\n\"".parse::<Literal>().unwrap();
|
||||
assert_eq!(lit.cstr_value(), Ok(vec![b'\n', 0]));
|
||||
|
||||
// Testing `byte_str_value`
|
||||
let lit = "\"\n\"".parse::<Literal>().unwrap();
|
||||
assert_eq!(lit.byte_str_value(), Err(ConversionErrorKind::InvalidLiteralKind));
|
||||
|
||||
let lit = "r#\"\n\"#".parse::<Literal>().unwrap();
|
||||
assert_eq!(lit.byte_str_value(), Err(ConversionErrorKind::InvalidLiteralKind));
|
||||
|
||||
let lit = "1".parse::<Literal>().unwrap();
|
||||
assert_eq!(lit.byte_str_value(), Err(ConversionErrorKind::InvalidLiteralKind));
|
||||
|
||||
let lit = "b\"\n\"".parse::<Literal>().unwrap();
|
||||
assert_eq!(lit.byte_str_value(), Ok(vec![b'\n']));
|
||||
|
||||
let lit = "c\"\n\"".parse::<Literal>().unwrap();
|
||||
assert_eq!(lit.byte_str_value(), Err(ConversionErrorKind::InvalidLiteralKind));
|
||||
}
|
||||
|
@ -1,7 +1,6 @@
|
||||
//@ edition: 2021
|
||||
|
||||
#![feature(proc_macro_span)]
|
||||
#![feature(proc_macro_value)]
|
||||
#![deny(dead_code)] // catch if a test function is never called
|
||||
|
||||
extern crate proc_macro;
|
||||
|
Loading…
Reference in New Issue
Block a user