mirror of
https://github.com/rust-lang/rust.git
synced 2025-02-16 17:03:35 +00:00
Merge from rustc
This commit is contained in:
commit
dc0faecfcf
2
.github/workflows/dependencies.yml
vendored
2
.github/workflows/dependencies.yml
vendored
@ -67,7 +67,7 @@ jobs:
|
||||
- name: cargo update rustbook
|
||||
run: |
|
||||
echo -e "\nrustbook dependencies:" >> cargo_update.log
|
||||
cargo update --manifest-path src/tools/rustbook 2>&1 | sed '/crates.io index/d' | tee -a cargo_update.log
|
||||
cargo update --manifest-path src/tools/rustbook/Cargo.toml 2>&1 | sed '/crates.io index/d' | tee -a cargo_update.log
|
||||
- name: upload Cargo.lock artifact for use in PR
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
|
@ -1775,9 +1775,9 @@ checksum = "ce23b50ad8242c51a442f3ff322d56b02f08852c77e4c0b4d3fd684abc89c683"
|
||||
|
||||
[[package]]
|
||||
name = "indexmap"
|
||||
version = "2.2.6"
|
||||
version = "2.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "168fb715dda47215e360912c096649d23d58bf392ac62f73919e831745e40f26"
|
||||
checksum = "93ead53efc7ea8ed3cfb0c79fc8023fbb782a5432b52830b6518941cebe6505c"
|
||||
dependencies = [
|
||||
"equivalent",
|
||||
"hashbrown",
|
||||
@ -3149,6 +3149,7 @@ dependencies = [
|
||||
"gimli 0.31.0",
|
||||
"object 0.36.2",
|
||||
"regex",
|
||||
"serde_json",
|
||||
"similar",
|
||||
"wasmparser 0.214.0",
|
||||
]
|
||||
|
@ -328,7 +328,7 @@ pub fn parse_asm_args<'a>(
|
||||
/// Otherwise, the suggestion will be incorrect.
|
||||
fn err_duplicate_option(p: &Parser<'_>, symbol: Symbol, span: Span) {
|
||||
// Tool-only output
|
||||
let full_span = if p.token.kind == token::Comma { span.to(p.token.span) } else { span };
|
||||
let full_span = if p.token == token::Comma { span.to(p.token.span) } else { span };
|
||||
p.dcx().emit_err(errors::AsmOptAlreadyprovided { span, symbol, full_span });
|
||||
}
|
||||
|
||||
@ -338,7 +338,7 @@ fn err_duplicate_option(p: &Parser<'_>, symbol: Symbol, span: Span) {
|
||||
/// Otherwise, the suggestion will be incorrect.
|
||||
fn err_unsupported_option(p: &Parser<'_>, symbol: Symbol, span: Span) {
|
||||
// Tool-only output
|
||||
let full_span = if p.token.kind == token::Comma { span.to(p.token.span) } else { span };
|
||||
let full_span = if p.token == token::Comma { span.to(p.token.span) } else { span };
|
||||
p.dcx().emit_err(errors::GlobalAsmUnsupportedOption { span, symbol, full_span });
|
||||
}
|
||||
|
||||
|
@ -106,9 +106,11 @@ pub struct LlvmArchiveBuilderBuilder;
|
||||
|
||||
impl ArchiveBuilderBuilder for LlvmArchiveBuilderBuilder {
|
||||
fn new_archive_builder<'a>(&self, sess: &'a Session) -> Box<dyn ArchiveBuilder + 'a> {
|
||||
// FIXME use ArArchiveBuilder on most targets again once reading thin archives is
|
||||
// implemented
|
||||
if true {
|
||||
// Keeping LlvmArchiveBuilder around in case of a regression caused by using
|
||||
// ArArchiveBuilder.
|
||||
// FIXME(#128955) remove a couple of months after #128936 gets merged in case
|
||||
// no regression is found.
|
||||
if false {
|
||||
Box::new(LlvmArchiveBuilder { sess, additions: Vec::new() })
|
||||
} else {
|
||||
Box::new(ArArchiveBuilder::new(sess, &LLVM_OBJECT_READER))
|
||||
@ -198,25 +200,11 @@ static LLVM_OBJECT_READER: ObjectReader = ObjectReader {
|
||||
get_xcoff_member_alignment: DEFAULT_OBJECT_READER.get_xcoff_member_alignment,
|
||||
};
|
||||
|
||||
fn should_use_llvm_reader(buf: &[u8]) -> bool {
|
||||
let is_bitcode = unsafe { llvm::LLVMRustIsBitcode(buf.as_ptr(), buf.len()) };
|
||||
|
||||
// COFF bigobj file, msvc LTO file or import library. See
|
||||
// https://github.com/llvm/llvm-project/blob/453f27bc9/llvm/lib/BinaryFormat/Magic.cpp#L38-L51
|
||||
let is_unsupported_windows_obj_file = buf.get(0..4) == Some(b"\0\0\xFF\xFF");
|
||||
|
||||
is_bitcode || is_unsupported_windows_obj_file
|
||||
}
|
||||
|
||||
#[deny(unsafe_op_in_unsafe_fn)]
|
||||
fn get_llvm_object_symbols(
|
||||
buf: &[u8],
|
||||
f: &mut dyn FnMut(&[u8]) -> io::Result<()>,
|
||||
) -> io::Result<bool> {
|
||||
if !should_use_llvm_reader(buf) {
|
||||
return (DEFAULT_OBJECT_READER.get_symbols)(buf, f);
|
||||
}
|
||||
|
||||
let mut state = Box::new(f);
|
||||
|
||||
let err = unsafe {
|
||||
@ -253,18 +241,10 @@ fn get_llvm_object_symbols(
|
||||
}
|
||||
|
||||
fn llvm_is_64_bit_object_file(buf: &[u8]) -> bool {
|
||||
if !should_use_llvm_reader(buf) {
|
||||
return (DEFAULT_OBJECT_READER.is_64_bit_object_file)(buf);
|
||||
}
|
||||
|
||||
unsafe { llvm::LLVMRustIs64BitSymbolicFile(buf.as_ptr(), buf.len()) }
|
||||
}
|
||||
|
||||
fn llvm_is_ec_object_file(buf: &[u8]) -> bool {
|
||||
if !should_use_llvm_reader(buf) {
|
||||
return (DEFAULT_OBJECT_READER.is_ec_object_file)(buf);
|
||||
}
|
||||
|
||||
unsafe { llvm::LLVMRustIsECObject(buf.as_ptr(), buf.len()) }
|
||||
}
|
||||
|
||||
|
@ -88,7 +88,7 @@ fn make_mir_scope<'ll, 'tcx>(
|
||||
let loc = cx.lookup_debug_loc(scope_data.span.lo());
|
||||
let file_metadata = file_metadata(cx, &loc.file);
|
||||
|
||||
let parent_dbg_scope = match scope_data.inlined {
|
||||
let dbg_scope = match scope_data.inlined {
|
||||
Some((callee, _)) => {
|
||||
// FIXME(eddyb) this would be `self.monomorphize(&callee)`
|
||||
// if this is moved to `rustc_codegen_ssa::mir::debuginfo`.
|
||||
@ -102,17 +102,15 @@ fn make_mir_scope<'ll, 'tcx>(
|
||||
cx.dbg_scope_fn(callee, callee_fn_abi, None)
|
||||
})
|
||||
}
|
||||
None => parent_scope.dbg_scope,
|
||||
};
|
||||
|
||||
let dbg_scope = unsafe {
|
||||
llvm::LLVMRustDIBuilderCreateLexicalBlock(
|
||||
DIB(cx),
|
||||
parent_dbg_scope,
|
||||
file_metadata,
|
||||
loc.line,
|
||||
loc.col,
|
||||
)
|
||||
None => unsafe {
|
||||
llvm::LLVMRustDIBuilderCreateLexicalBlock(
|
||||
DIB(cx),
|
||||
parent_scope.dbg_scope,
|
||||
file_metadata,
|
||||
loc.line,
|
||||
loc.col,
|
||||
)
|
||||
},
|
||||
};
|
||||
|
||||
let inlined_at = scope_data.inlined.map(|(_, callsite_span)| {
|
||||
|
@ -2,7 +2,7 @@ use std::borrow::Cow;
|
||||
|
||||
use libc::c_uint;
|
||||
use rustc_codegen_ssa::debuginfo::type_names::compute_debuginfo_type_name;
|
||||
use rustc_codegen_ssa::debuginfo::wants_c_like_enum_debuginfo;
|
||||
use rustc_codegen_ssa::debuginfo::{tag_base_type, wants_c_like_enum_debuginfo};
|
||||
use rustc_codegen_ssa::traits::ConstMethods;
|
||||
use rustc_index::IndexVec;
|
||||
use rustc_middle::bug;
|
||||
@ -12,7 +12,7 @@ use rustc_target::abi::{Align, Endian, Size, TagEncoding, VariantIdx, Variants};
|
||||
use smallvec::smallvec;
|
||||
|
||||
use crate::common::CodegenCx;
|
||||
use crate::debuginfo::metadata::enums::{tag_base_type, DiscrResult};
|
||||
use crate::debuginfo::metadata::enums::DiscrResult;
|
||||
use crate::debuginfo::metadata::type_map::{self, Stub, UniqueTypeId};
|
||||
use crate::debuginfo::metadata::{
|
||||
build_field_di_node, file_metadata, size_and_align_of, type_di_node, unknown_file_metadata,
|
||||
@ -190,7 +190,7 @@ pub(super) fn build_enum_type_di_node<'ll, 'tcx>(
|
||||
let enum_type_and_layout = cx.layout_of(enum_type);
|
||||
let enum_type_name = compute_debuginfo_type_name(cx.tcx, enum_type, false);
|
||||
|
||||
assert!(!wants_c_like_enum_debuginfo(enum_type_and_layout));
|
||||
assert!(!wants_c_like_enum_debuginfo(cx.tcx, enum_type_and_layout));
|
||||
|
||||
type_map::build_type_with_children(
|
||||
cx,
|
||||
@ -265,7 +265,7 @@ pub(super) fn build_coroutine_di_node<'ll, 'tcx>(
|
||||
let coroutine_type_and_layout = cx.layout_of(coroutine_type);
|
||||
let coroutine_type_name = compute_debuginfo_type_name(cx.tcx, coroutine_type, false);
|
||||
|
||||
assert!(!wants_c_like_enum_debuginfo(coroutine_type_and_layout));
|
||||
assert!(!wants_c_like_enum_debuginfo(cx.tcx, coroutine_type_and_layout));
|
||||
|
||||
type_map::build_type_with_children(
|
||||
cx,
|
||||
@ -381,7 +381,7 @@ fn build_union_fields_for_enum<'ll, 'tcx>(
|
||||
tag_field: usize,
|
||||
untagged_variant_index: Option<VariantIdx>,
|
||||
) -> SmallVec<&'ll DIType> {
|
||||
let tag_base_type = super::tag_base_type(cx, enum_type_and_layout);
|
||||
let tag_base_type = tag_base_type(cx.tcx, enum_type_and_layout);
|
||||
|
||||
let variant_names_type_di_node = build_variant_names_type_di_node(
|
||||
cx,
|
||||
@ -676,7 +676,7 @@ fn build_union_fields_for_direct_tag_coroutine<'ll, 'tcx>(
|
||||
let variant_range = coroutine_args.variant_range(coroutine_def_id, cx.tcx);
|
||||
let variant_count = (variant_range.start.as_u32()..variant_range.end.as_u32()).len();
|
||||
|
||||
let tag_base_type = tag_base_type(cx, coroutine_type_and_layout);
|
||||
let tag_base_type = tag_base_type(cx.tcx, coroutine_type_and_layout);
|
||||
|
||||
let variant_names_type_di_node = build_variant_names_type_di_node(
|
||||
cx,
|
||||
@ -803,7 +803,7 @@ fn build_union_fields_for_direct_tag_enum_or_coroutine<'ll, 'tcx>(
|
||||
|
||||
assert_eq!(
|
||||
cx.size_and_align_of(enum_type_and_layout.field(cx, tag_field).ty),
|
||||
cx.size_and_align_of(super::tag_base_type(cx, enum_type_and_layout))
|
||||
cx.size_and_align_of(self::tag_base_type(cx.tcx, enum_type_and_layout))
|
||||
);
|
||||
|
||||
// ... and a field for the tag. If the tag is 128 bits wide, this will actually
|
||||
|
@ -1,17 +1,15 @@
|
||||
use std::borrow::Cow;
|
||||
|
||||
use rustc_codegen_ssa::debuginfo::type_names::{compute_debuginfo_type_name, cpp_like_debuginfo};
|
||||
use rustc_codegen_ssa::debuginfo::wants_c_like_enum_debuginfo;
|
||||
use rustc_codegen_ssa::debuginfo::{tag_base_type, wants_c_like_enum_debuginfo};
|
||||
use rustc_hir::def::CtorKind;
|
||||
use rustc_index::IndexSlice;
|
||||
use rustc_middle::bug;
|
||||
use rustc_middle::mir::CoroutineLayout;
|
||||
use rustc_middle::ty::layout::{IntegerExt, LayoutOf, PrimitiveExt, TyAndLayout};
|
||||
use rustc_middle::ty::layout::{LayoutOf, TyAndLayout};
|
||||
use rustc_middle::ty::{self, AdtDef, CoroutineArgs, CoroutineArgsExt, Ty, VariantDef};
|
||||
use rustc_span::Symbol;
|
||||
use rustc_target::abi::{
|
||||
FieldIdx, HasDataLayout, Integer, Primitive, TagEncoding, VariantIdx, Variants,
|
||||
};
|
||||
use rustc_target::abi::{FieldIdx, TagEncoding, VariantIdx, Variants};
|
||||
|
||||
use super::type_map::{DINodeCreationResult, UniqueTypeId};
|
||||
use super::{size_and_align_of, SmallVec};
|
||||
@ -39,7 +37,7 @@ pub(super) fn build_enum_type_di_node<'ll, 'tcx>(
|
||||
|
||||
let enum_type_and_layout = cx.layout_of(enum_type);
|
||||
|
||||
if wants_c_like_enum_debuginfo(enum_type_and_layout) {
|
||||
if wants_c_like_enum_debuginfo(cx.tcx, enum_type_and_layout) {
|
||||
return build_c_style_enum_di_node(cx, enum_adt_def, enum_type_and_layout);
|
||||
}
|
||||
|
||||
@ -74,7 +72,7 @@ fn build_c_style_enum_di_node<'ll, 'tcx>(
|
||||
di_node: build_enumeration_type_di_node(
|
||||
cx,
|
||||
&compute_debuginfo_type_name(cx.tcx, enum_type_and_layout.ty, false),
|
||||
tag_base_type(cx, enum_type_and_layout),
|
||||
tag_base_type(cx.tcx, enum_type_and_layout),
|
||||
enum_adt_def.discriminants(cx.tcx).map(|(variant_index, discr)| {
|
||||
let name = Cow::from(enum_adt_def.variant(variant_index).name.as_str());
|
||||
(name, discr.val)
|
||||
@ -85,48 +83,6 @@ fn build_c_style_enum_di_node<'ll, 'tcx>(
|
||||
}
|
||||
}
|
||||
|
||||
/// Extract the type with which we want to describe the tag of the given enum or coroutine.
|
||||
fn tag_base_type<'ll, 'tcx>(
|
||||
cx: &CodegenCx<'ll, 'tcx>,
|
||||
enum_type_and_layout: TyAndLayout<'tcx>,
|
||||
) -> Ty<'tcx> {
|
||||
assert!(match enum_type_and_layout.ty.kind() {
|
||||
ty::Coroutine(..) => true,
|
||||
ty::Adt(adt_def, _) => adt_def.is_enum(),
|
||||
_ => false,
|
||||
});
|
||||
|
||||
match enum_type_and_layout.layout.variants() {
|
||||
// A single-variant enum has no discriminant.
|
||||
Variants::Single { .. } => {
|
||||
bug!("tag_base_type() called for enum without tag: {:?}", enum_type_and_layout)
|
||||
}
|
||||
|
||||
Variants::Multiple { tag_encoding: TagEncoding::Niche { .. }, tag, .. } => {
|
||||
// Niche tags are always normalized to unsized integers of the correct size.
|
||||
match tag.primitive() {
|
||||
Primitive::Int(t, _) => t,
|
||||
Primitive::Float(f) => Integer::from_size(f.size()).unwrap(),
|
||||
// FIXME(erikdesjardins): handle non-default addrspace ptr sizes
|
||||
Primitive::Pointer(_) => {
|
||||
// If the niche is the NULL value of a reference, then `discr_enum_ty` will be
|
||||
// a RawPtr. CodeView doesn't know what to do with enums whose base type is a
|
||||
// pointer so we fix this up to just be `usize`.
|
||||
// DWARF might be able to deal with this but with an integer type we are on
|
||||
// the safe side there too.
|
||||
cx.data_layout().ptr_sized_integer()
|
||||
}
|
||||
}
|
||||
.to_ty(cx.tcx, false)
|
||||
}
|
||||
|
||||
Variants::Multiple { tag_encoding: TagEncoding::Direct, tag, .. } => {
|
||||
// Direct tags preserve the sign.
|
||||
tag.primitive().to_ty(cx.tcx)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Build a DW_TAG_enumeration_type debuginfo node, with the given base type and variants.
|
||||
/// This is a helper function and does not register anything in the type map by itself.
|
||||
///
|
||||
|
@ -2,7 +2,7 @@ use std::borrow::Cow;
|
||||
|
||||
use libc::c_uint;
|
||||
use rustc_codegen_ssa::debuginfo::type_names::compute_debuginfo_type_name;
|
||||
use rustc_codegen_ssa::debuginfo::wants_c_like_enum_debuginfo;
|
||||
use rustc_codegen_ssa::debuginfo::{tag_base_type, wants_c_like_enum_debuginfo};
|
||||
use rustc_codegen_ssa::traits::ConstMethods;
|
||||
use rustc_middle::bug;
|
||||
use rustc_middle::ty::layout::{LayoutOf, TyAndLayout};
|
||||
@ -11,7 +11,6 @@ use rustc_target::abi::{Size, TagEncoding, VariantIdx, Variants};
|
||||
use smallvec::smallvec;
|
||||
|
||||
use crate::common::CodegenCx;
|
||||
use crate::debuginfo::metadata::enums::tag_base_type;
|
||||
use crate::debuginfo::metadata::type_map::{self, Stub, StubInfo, UniqueTypeId};
|
||||
use crate::debuginfo::metadata::{
|
||||
file_metadata, size_and_align_of, type_di_node, unknown_file_metadata, visibility_di_flags,
|
||||
@ -54,7 +53,7 @@ pub(super) fn build_enum_type_di_node<'ll, 'tcx>(
|
||||
|
||||
let visibility_flags = visibility_di_flags(cx, enum_adt_def.did(), enum_adt_def.did());
|
||||
|
||||
assert!(!wants_c_like_enum_debuginfo(enum_type_and_layout));
|
||||
assert!(!wants_c_like_enum_debuginfo(cx.tcx, enum_type_and_layout));
|
||||
|
||||
type_map::build_type_with_children(
|
||||
cx,
|
||||
@ -131,7 +130,7 @@ pub(super) fn build_coroutine_di_node<'ll, 'tcx>(
|
||||
let containing_scope = get_namespace_for_item(cx, coroutine_def_id);
|
||||
let coroutine_type_and_layout = cx.layout_of(coroutine_type);
|
||||
|
||||
assert!(!wants_c_like_enum_debuginfo(coroutine_type_and_layout));
|
||||
assert!(!wants_c_like_enum_debuginfo(cx.tcx, coroutine_type_and_layout));
|
||||
|
||||
let coroutine_type_name = compute_debuginfo_type_name(cx.tcx, coroutine_type, false);
|
||||
|
||||
@ -321,7 +320,7 @@ fn build_discr_member_di_node<'ll, 'tcx>(
|
||||
&Variants::Single { .. } => None,
|
||||
|
||||
&Variants::Multiple { tag_field, .. } => {
|
||||
let tag_base_type = tag_base_type(cx, enum_or_coroutine_type_and_layout);
|
||||
let tag_base_type = tag_base_type(cx.tcx, enum_or_coroutine_type_and_layout);
|
||||
let (size, align) = cx.size_and_align_of(tag_base_type);
|
||||
|
||||
unsafe {
|
||||
|
@ -307,10 +307,15 @@ impl<'a> ArchiveBuilder for ArArchiveBuilder<'a> {
|
||||
let file_name = String::from_utf8(entry.name().to_vec())
|
||||
.map_err(|err| io::Error::new(io::ErrorKind::InvalidData, err))?;
|
||||
if !skip(&file_name) {
|
||||
self.entries.push((
|
||||
file_name.into_bytes(),
|
||||
ArchiveEntry::FromArchive { archive_index, file_range: entry.file_range() },
|
||||
));
|
||||
if entry.is_thin() {
|
||||
let member_path = archive_path.parent().unwrap().join(Path::new(&file_name));
|
||||
self.entries.push((file_name.into_bytes(), ArchiveEntry::File(member_path)));
|
||||
} else {
|
||||
self.entries.push((
|
||||
file_name.into_bytes(),
|
||||
ArchiveEntry::FromArchive { archive_index, file_range: entry.file_range() },
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1,6 +1,7 @@
|
||||
use rustc_middle::ty::layout::TyAndLayout;
|
||||
use rustc_middle::ty::{self};
|
||||
use rustc_target::abi::Size;
|
||||
use rustc_middle::bug;
|
||||
use rustc_middle::ty::layout::{IntegerExt, PrimitiveExt, TyAndLayout};
|
||||
use rustc_middle::ty::{self, Ty, TyCtxt};
|
||||
use rustc_target::abi::{Integer, Primitive, Size, TagEncoding, Variants};
|
||||
|
||||
// FIXME(eddyb) find a place for this (or a way to replace it).
|
||||
pub mod type_names;
|
||||
@ -11,13 +12,25 @@ pub mod type_names;
|
||||
/// NOTE: This is somewhat inconsistent right now: For empty enums and enums with a single
|
||||
/// fieldless variant, we generate DW_TAG_struct_type, although a
|
||||
/// DW_TAG_enumeration_type would be a better fit.
|
||||
pub fn wants_c_like_enum_debuginfo(enum_type_and_layout: TyAndLayout<'_>) -> bool {
|
||||
pub fn wants_c_like_enum_debuginfo<'tcx>(
|
||||
tcx: TyCtxt<'tcx>,
|
||||
enum_type_and_layout: TyAndLayout<'tcx>,
|
||||
) -> bool {
|
||||
match enum_type_and_layout.ty.kind() {
|
||||
ty::Adt(adt_def, _) => {
|
||||
if !adt_def.is_enum() {
|
||||
return false;
|
||||
}
|
||||
|
||||
if type_names::cpp_like_debuginfo(tcx)
|
||||
&& tag_base_type_opt(tcx, enum_type_and_layout)
|
||||
.map(|ty| ty.primitive_size(tcx).bits())
|
||||
== Some(128)
|
||||
{
|
||||
// C++-like debuginfo never uses the C-like representation for 128-bit enums.
|
||||
return false;
|
||||
}
|
||||
|
||||
match adt_def.variants().len() {
|
||||
0 => false,
|
||||
1 => {
|
||||
@ -33,3 +46,51 @@ pub fn wants_c_like_enum_debuginfo(enum_type_and_layout: TyAndLayout<'_>) -> boo
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Extract the type with which we want to describe the tag of the given enum or coroutine.
|
||||
pub fn tag_base_type<'tcx>(tcx: TyCtxt<'tcx>, enum_type_and_layout: TyAndLayout<'tcx>) -> Ty<'tcx> {
|
||||
tag_base_type_opt(tcx, enum_type_and_layout).unwrap_or_else(|| {
|
||||
bug!("tag_base_type() called for enum without tag: {:?}", enum_type_and_layout)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn tag_base_type_opt<'tcx>(
|
||||
tcx: TyCtxt<'tcx>,
|
||||
enum_type_and_layout: TyAndLayout<'tcx>,
|
||||
) -> Option<Ty<'tcx>> {
|
||||
assert!(match enum_type_and_layout.ty.kind() {
|
||||
ty::Coroutine(..) => true,
|
||||
ty::Adt(adt_def, _) => adt_def.is_enum(),
|
||||
_ => false,
|
||||
});
|
||||
|
||||
match enum_type_and_layout.layout.variants() {
|
||||
// A single-variant enum has no discriminant.
|
||||
Variants::Single { .. } => None,
|
||||
|
||||
Variants::Multiple { tag_encoding: TagEncoding::Niche { .. }, tag, .. } => {
|
||||
// Niche tags are always normalized to unsized integers of the correct size.
|
||||
Some(
|
||||
match tag.primitive() {
|
||||
Primitive::Int(t, _) => t,
|
||||
Primitive::Float(f) => Integer::from_size(f.size()).unwrap(),
|
||||
// FIXME(erikdesjardins): handle non-default addrspace ptr sizes
|
||||
Primitive::Pointer(_) => {
|
||||
// If the niche is the NULL value of a reference, then `discr_enum_ty` will be
|
||||
// a RawPtr. CodeView doesn't know what to do with enums whose base type is a
|
||||
// pointer so we fix this up to just be `usize`.
|
||||
// DWARF might be able to deal with this but with an integer type we are on
|
||||
// the safe side there too.
|
||||
tcx.data_layout.ptr_sized_integer()
|
||||
}
|
||||
}
|
||||
.to_ty(tcx, false),
|
||||
)
|
||||
}
|
||||
|
||||
Variants::Multiple { tag_encoding: TagEncoding::Direct, tag, .. } => {
|
||||
// Direct tags preserve the sign.
|
||||
Some(tag.primitive().to_ty(tcx))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -85,7 +85,7 @@ fn push_debuginfo_type_name<'tcx>(
|
||||
let layout_for_cpp_like_fallback = if cpp_like_debuginfo && def.is_enum() {
|
||||
match tcx.layout_of(ParamEnv::reveal_all().and(t)) {
|
||||
Ok(layout) => {
|
||||
if !wants_c_like_enum_debuginfo(layout) {
|
||||
if !wants_c_like_enum_debuginfo(tcx, layout) {
|
||||
Some(layout)
|
||||
} else {
|
||||
// This is a C-like enum so we don't want to use the fallback encoding
|
||||
@ -106,6 +106,7 @@ fn push_debuginfo_type_name<'tcx>(
|
||||
|
||||
if let Some(ty_and_layout) = layout_for_cpp_like_fallback {
|
||||
msvc_enum_fallback(
|
||||
tcx,
|
||||
ty_and_layout,
|
||||
&|output, visited| {
|
||||
push_item_name(tcx, def.did(), true, output);
|
||||
@ -421,6 +422,7 @@ fn push_debuginfo_type_name<'tcx>(
|
||||
if cpp_like_debuginfo && t.is_coroutine() {
|
||||
let ty_and_layout = tcx.layout_of(ParamEnv::reveal_all().and(t)).unwrap();
|
||||
msvc_enum_fallback(
|
||||
tcx,
|
||||
ty_and_layout,
|
||||
&|output, visited| {
|
||||
push_closure_or_coroutine_name(tcx, def_id, args, true, output, visited);
|
||||
@ -455,12 +457,13 @@ fn push_debuginfo_type_name<'tcx>(
|
||||
// debugger. For more information, look in
|
||||
// rustc_codegen_llvm/src/debuginfo/metadata/enums/cpp_like.rs.
|
||||
fn msvc_enum_fallback<'tcx>(
|
||||
tcx: TyCtxt<'tcx>,
|
||||
ty_and_layout: TyAndLayout<'tcx>,
|
||||
push_inner: &dyn Fn(/*output*/ &mut String, /*visited*/ &mut FxHashSet<Ty<'tcx>>),
|
||||
output: &mut String,
|
||||
visited: &mut FxHashSet<Ty<'tcx>>,
|
||||
) {
|
||||
assert!(!wants_c_like_enum_debuginfo(ty_and_layout));
|
||||
assert!(!wants_c_like_enum_debuginfo(tcx, ty_and_layout));
|
||||
output.push_str("enum2$<");
|
||||
push_inner(output, visited);
|
||||
push_close_angle_bracket(true, output);
|
||||
|
@ -1,3 +1,4 @@
|
||||
use std::collections::hash_map::Entry;
|
||||
use std::ops::Range;
|
||||
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
@ -447,6 +448,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
|
||||
}
|
||||
|
||||
let mut per_local = IndexVec::from_elem(vec![], &self.mir.local_decls);
|
||||
let mut params_seen: FxHashMap<_, Bx::DIVariable> = Default::default();
|
||||
for var in &self.mir.var_debug_info {
|
||||
let dbg_scope_and_span = if full_debug_info {
|
||||
self.adjusted_span_and_dbg_scope(var.source_info)
|
||||
@ -491,7 +493,18 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
|
||||
VariableKind::LocalVariable
|
||||
};
|
||||
|
||||
self.cx.create_dbg_var(var.name, var_ty, dbg_scope, var_kind, span)
|
||||
if let VariableKind::ArgumentVariable(arg_index) = var_kind {
|
||||
match params_seen.entry((dbg_scope, arg_index)) {
|
||||
Entry::Occupied(o) => o.get().clone(),
|
||||
Entry::Vacant(v) => v
|
||||
.insert(
|
||||
self.cx.create_dbg_var(var.name, var_ty, dbg_scope, var_kind, span),
|
||||
)
|
||||
.clone(),
|
||||
}
|
||||
} else {
|
||||
self.cx.create_dbg_var(var.name, var_ty, dbg_scope, var_kind, span)
|
||||
}
|
||||
});
|
||||
|
||||
let fragment = if let Some(ref fragment) = var.composite {
|
||||
|
@ -106,7 +106,7 @@ pub struct FunctionCx<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> {
|
||||
locals: locals::Locals<'tcx, Bx::Value>,
|
||||
|
||||
/// All `VarDebugInfo` from the MIR body, partitioned by `Local`.
|
||||
/// This is `None` if no var`#[non_exhaustive]`iable debuginfo/names are needed.
|
||||
/// This is `None` if no variable debuginfo/names are needed.
|
||||
per_local_var_debug_info:
|
||||
Option<IndexVec<mir::Local, Vec<PerLocalVarDebugInfo<'tcx, Bx::DIVariable>>>>,
|
||||
|
||||
|
@ -1,4 +1,5 @@
|
||||
use std::any::Any;
|
||||
use std::hash::Hash;
|
||||
|
||||
use rustc_ast::expand::allocator::AllocatorKind;
|
||||
use rustc_data_structures::fx::FxIndexMap;
|
||||
@ -30,7 +31,7 @@ pub trait BackendTypes {
|
||||
|
||||
// FIXME(eddyb) find a common convention for all of the debuginfo-related
|
||||
// names (choose between `Dbg`, `Debug`, `DebugInfo`, `DI` etc.).
|
||||
type DIScope: Copy;
|
||||
type DIScope: Copy + Hash + PartialEq + Eq;
|
||||
type DILocation: Copy;
|
||||
type DIVariable: Copy;
|
||||
}
|
||||
|
@ -6,7 +6,6 @@
|
||||
#![feature(box_patterns)]
|
||||
#![feature(decl_macro)]
|
||||
#![feature(if_let_guard)]
|
||||
#![feature(is_none_or)]
|
||||
#![feature(let_chains)]
|
||||
#![feature(never_type)]
|
||||
#![feature(rustdoc_internals)]
|
||||
|
@ -10,7 +10,7 @@ bitflags = "2.4.1"
|
||||
either = "1.0"
|
||||
elsa = "=1.7.1"
|
||||
ena = "0.14.3"
|
||||
indexmap = { version = "2.0.0" }
|
||||
indexmap = { version = "2.4.0" }
|
||||
jobserver_crate = { version = "0.1.28", package = "jobserver" }
|
||||
measureme = "11"
|
||||
rustc-hash = "1.1.0"
|
||||
|
@ -1154,7 +1154,7 @@ fn check_matcher_core<'tt>(
|
||||
&& matches!(kind, NonterminalKind::Pat(PatParam { inferred: true }))
|
||||
&& matches!(
|
||||
next_token,
|
||||
TokenTree::Token(token) if token.kind == BinOp(token::BinOpToken::Or)
|
||||
TokenTree::Token(token) if *token == BinOp(token::BinOpToken::Or)
|
||||
)
|
||||
{
|
||||
// It is suggestion to use pat_param, for example: $x:pat -> $x:pat_param.
|
||||
|
@ -54,18 +54,24 @@ pub(super) fn parse(
|
||||
|
||||
// For each token tree in `input`, parse the token into a `self::TokenTree`, consuming
|
||||
// additional trees if need be.
|
||||
let mut trees = input.trees();
|
||||
let mut trees = input.trees().peekable();
|
||||
while let Some(tree) = trees.next() {
|
||||
// Given the parsed tree, if there is a metavar and we are expecting matchers, actually
|
||||
// parse out the matcher (i.e., in `$id:ident` this would parse the `:` and `ident`).
|
||||
let tree = parse_tree(tree, &mut trees, parsing_patterns, sess, node_id, features, edition);
|
||||
match tree {
|
||||
TokenTree::MetaVar(start_sp, ident) if parsing_patterns => {
|
||||
let span = match trees.next() {
|
||||
// Not consuming the next token immediately, as it may not be a colon
|
||||
let span = match trees.peek() {
|
||||
Some(&tokenstream::TokenTree::Token(
|
||||
Token { kind: token::Colon, span: colon_span },
|
||||
_,
|
||||
)) => {
|
||||
// Consume the colon first
|
||||
trees.next();
|
||||
|
||||
// It's ok to consume the next tree no matter how,
|
||||
// since if it's not a token then it will be an invalid declaration.
|
||||
match trees.next() {
|
||||
Some(tokenstream::TokenTree::Token(token, _)) => match token.ident() {
|
||||
Some((fragment, _)) => {
|
||||
@ -125,12 +131,13 @@ pub(super) fn parse(
|
||||
}
|
||||
_ => token.span,
|
||||
},
|
||||
Some(tree) => tree.span(),
|
||||
None => colon_span,
|
||||
// Invalid, return a nice source location
|
||||
_ => colon_span.with_lo(start_sp.lo()),
|
||||
}
|
||||
}
|
||||
Some(tree) => tree.span(),
|
||||
None => start_sp,
|
||||
// Whether it's none or some other tree, it doesn't belong to
|
||||
// the current meta variable, returning the original span.
|
||||
_ => start_sp,
|
||||
};
|
||||
|
||||
result.push(TokenTree::MetaVarDecl(span, ident, None));
|
||||
|
@ -578,12 +578,6 @@ pub const BUILTIN_ATTRIBUTES: &[BuiltinAttribute] = &[
|
||||
EncodeCrossCrate::No, coroutines, experimental!(coroutines)
|
||||
),
|
||||
|
||||
// `#[pointee]` attribute to designate the pointee type in SmartPointer derive-macro
|
||||
gated!(
|
||||
pointee, Normal, template!(Word), ErrorFollowing,
|
||||
EncodeCrossCrate::No, derive_smart_pointer, experimental!(pointee)
|
||||
),
|
||||
|
||||
// RFC 3543
|
||||
// `#[patchable_function_entry(prefix_nops = m, entry_nops = n)]`
|
||||
gated!(
|
||||
|
@ -14,7 +14,7 @@ use rustc_middle::span_bug;
|
||||
use rustc_middle::ty::visit::{TypeVisitable, TypeVisitableExt};
|
||||
use rustc_middle::ty::{self, GenericArgs, Ty, TyCtxt, TypeSuperVisitable, TypeVisitor};
|
||||
use rustc_span::def_id::LocalDefId;
|
||||
use rustc_span::Span;
|
||||
use rustc_span::{Span, DUMMY_SP};
|
||||
use rustc_target::spec::abi::Abi;
|
||||
use rustc_trait_selection::error_reporting::traits::ArgKind;
|
||||
use rustc_trait_selection::traits;
|
||||
@ -539,6 +539,10 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
/// we identify the `FnOnce<Args, Output = ?Fut>` bound, and if the output type is
|
||||
/// an inference variable `?Fut`, we check if that is bounded by a `Future<Output = Ty>`
|
||||
/// projection.
|
||||
///
|
||||
/// This function is actually best-effort with the return type; if we don't find a
|
||||
/// `Future` projection, we still will return arguments that we extracted from the `FnOnce`
|
||||
/// projection, and the output will be an unconstrained type variable instead.
|
||||
fn extract_sig_from_projection_and_future_bound(
|
||||
&self,
|
||||
cause_span: Option<Span>,
|
||||
@ -564,24 +568,43 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
};
|
||||
|
||||
// FIXME: We may want to elaborate here, though I assume this will be exceedingly rare.
|
||||
let mut return_ty = None;
|
||||
for bound in self.obligations_for_self_ty(return_vid) {
|
||||
if let Some(ret_projection) = bound.predicate.as_projection_clause()
|
||||
&& let Some(ret_projection) = ret_projection.no_bound_vars()
|
||||
&& self.tcx.is_lang_item(ret_projection.def_id(), LangItem::FutureOutput)
|
||||
{
|
||||
let sig = projection.rebind(self.tcx.mk_fn_sig(
|
||||
input_tys,
|
||||
ret_projection.term.expect_type(),
|
||||
false,
|
||||
hir::Safety::Safe,
|
||||
Abi::Rust,
|
||||
));
|
||||
|
||||
return Some(ExpectedSig { cause_span, sig });
|
||||
return_ty = Some(ret_projection.term.expect_type());
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
// SUBTLE: If we didn't find a `Future<Output = ...>` bound for the return
|
||||
// vid, we still want to attempt to provide inference guidance for the async
|
||||
// closure's arguments. Instantiate a new vid to plug into the output type.
|
||||
//
|
||||
// You may be wondering, what if it's higher-ranked? Well, given that we
|
||||
// found a type variable for the `FnOnce::Output` projection above, we know
|
||||
// that the output can't mention any of the vars.
|
||||
//
|
||||
// Also note that we use a fresh var here for the signature since the signature
|
||||
// records the output of the *future*, and `return_vid` above is the type
|
||||
// variable of the future, not its output.
|
||||
//
|
||||
// FIXME: We probably should store this signature inference output in a way
|
||||
// that does not misuse a `FnSig` type, but that can be done separately.
|
||||
let return_ty =
|
||||
return_ty.unwrap_or_else(|| self.next_ty_var(cause_span.unwrap_or(DUMMY_SP)));
|
||||
|
||||
let sig = projection.rebind(self.tcx.mk_fn_sig(
|
||||
input_tys,
|
||||
return_ty,
|
||||
false,
|
||||
hir::Safety::Safe,
|
||||
Abi::Rust,
|
||||
));
|
||||
|
||||
return Some(ExpectedSig { cause_span, sig });
|
||||
}
|
||||
|
||||
fn sig_of_closure(
|
||||
|
@ -5,7 +5,6 @@
|
||||
#![feature(box_patterns)]
|
||||
#![feature(control_flow_enum)]
|
||||
#![feature(if_let_guard)]
|
||||
#![feature(is_none_or)]
|
||||
#![feature(let_chains)]
|
||||
#![feature(never_type)]
|
||||
#![feature(try_blocks)]
|
||||
|
@ -188,6 +188,11 @@ impl<I: Idx, T> IndexVec<I, T> {
|
||||
let min_new_len = elem.index() + 1;
|
||||
self.raw.resize_with(min_new_len, fill_value);
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn append(&mut self, other: &mut Self) {
|
||||
self.raw.append(&mut other.raw);
|
||||
}
|
||||
}
|
||||
|
||||
/// `IndexVec` is often used as a map, so it provides some map-like APIs.
|
||||
|
@ -1,7 +1,6 @@
|
||||
// tidy-alphabetical-start
|
||||
#![feature(decl_macro)]
|
||||
#![feature(let_chains)]
|
||||
#![feature(thread_spawn_unchecked)]
|
||||
#![feature(try_blocks)]
|
||||
// tidy-alphabetical-end
|
||||
|
||||
|
@ -1853,7 +1853,7 @@ impl KeywordIdents {
|
||||
if !prev_dollar {
|
||||
self.check_ident_token(cx, UnderMacro(true), ident);
|
||||
}
|
||||
} else if token.kind == TokenKind::Dollar {
|
||||
} else if *token == TokenKind::Dollar {
|
||||
prev_dollar = true;
|
||||
continue;
|
||||
}
|
||||
|
@ -77,22 +77,18 @@ LLVMRustGetSymbols(char *BufPtr, size_t BufLen, void *State,
|
||||
Expected<std::unique_ptr<object::SymbolicFile>> ObjOrErr =
|
||||
getSymbolicFile(Buf->getMemBufferRef(), Context);
|
||||
if (!ObjOrErr) {
|
||||
Error E = ObjOrErr.takeError();
|
||||
SmallString<0> ErrorBuf;
|
||||
auto Error = raw_svector_ostream(ErrorBuf);
|
||||
Error << E << '\0';
|
||||
return ErrorCallback(Error.str().data());
|
||||
return ErrorCallback(toString(ObjOrErr.takeError()).c_str());
|
||||
}
|
||||
std::unique_ptr<object::SymbolicFile> Obj = std::move(*ObjOrErr);
|
||||
if (Obj == nullptr) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
for (const object::BasicSymbolRef &S : Obj->symbols()) {
|
||||
if (!isArchiveSymbol(S))
|
||||
continue;
|
||||
if (Error E = S.printName(SymName)) {
|
||||
SmallString<0> ErrorBuf;
|
||||
auto Error = raw_svector_ostream(ErrorBuf);
|
||||
Error << E << '\0';
|
||||
return ErrorCallback(Error.str().data());
|
||||
return ErrorCallback(toString(std::move(E)).c_str());
|
||||
}
|
||||
SymName << '\0';
|
||||
if (void *E = Callback(State, SymNameBuf.str().data())) {
|
||||
|
@ -65,10 +65,9 @@ use crate::query::plumbing::{
|
||||
};
|
||||
use crate::traits::query::{
|
||||
CanonicalAliasGoal, CanonicalPredicateGoal, CanonicalTyGoal,
|
||||
CanonicalTypeOpAscribeUserTypeGoal, CanonicalTypeOpEqGoal, CanonicalTypeOpNormalizeGoal,
|
||||
CanonicalTypeOpProvePredicateGoal, CanonicalTypeOpSubtypeGoal, DropckConstraint,
|
||||
DropckOutlivesResult, MethodAutoderefStepsResult, NoSolution, NormalizationResult,
|
||||
OutlivesBound,
|
||||
CanonicalTypeOpAscribeUserTypeGoal, CanonicalTypeOpNormalizeGoal,
|
||||
CanonicalTypeOpProvePredicateGoal, DropckConstraint, DropckOutlivesResult,
|
||||
MethodAutoderefStepsResult, NoSolution, NormalizationResult, OutlivesBound,
|
||||
};
|
||||
use crate::traits::{
|
||||
specialization_graph, CodegenObligationError, EvaluationResult, ImplSource,
|
||||
@ -2090,26 +2089,6 @@ rustc_queries! {
|
||||
desc { "evaluating `type_op_ascribe_user_type` `{:?}`", goal.value.value }
|
||||
}
|
||||
|
||||
/// Do not call this query directly: part of the `Eq` type-op
|
||||
query type_op_eq(
|
||||
goal: CanonicalTypeOpEqGoal<'tcx>
|
||||
) -> Result<
|
||||
&'tcx Canonical<'tcx, canonical::QueryResponse<'tcx, ()>>,
|
||||
NoSolution,
|
||||
> {
|
||||
desc { "evaluating `type_op_eq` `{:?}`", goal.value.value }
|
||||
}
|
||||
|
||||
/// Do not call this query directly: part of the `Subtype` type-op
|
||||
query type_op_subtype(
|
||||
goal: CanonicalTypeOpSubtypeGoal<'tcx>
|
||||
) -> Result<
|
||||
&'tcx Canonical<'tcx, canonical::QueryResponse<'tcx, ()>>,
|
||||
NoSolution,
|
||||
> {
|
||||
desc { "evaluating `type_op_subtype` `{:?}`", goal.value.value }
|
||||
}
|
||||
|
||||
/// Do not call this query directly: part of the `ProvePredicate` type-op
|
||||
query type_op_prove_predicate(
|
||||
goal: CanonicalTypeOpProvePredicateGoal<'tcx>
|
||||
|
@ -305,6 +305,10 @@ impl<'tcx> Const<'tcx> {
|
||||
// mir.
|
||||
match tcx.at(expr.span).lit_to_const(lit_input) {
|
||||
Ok(c) => return Some(c),
|
||||
Err(_) if lit_input.ty.has_aliases() => {
|
||||
// allow the `ty` to be an alias type, though we cannot handle it here
|
||||
return None;
|
||||
}
|
||||
Err(e) => {
|
||||
tcx.dcx().span_delayed_bug(
|
||||
expr.span,
|
||||
|
@ -970,6 +970,10 @@ impl<'tcx> rustc_type_ir::inherent::Ty<TyCtxt<'tcx>> for Ty<'tcx> {
|
||||
|
||||
/// Type utilities
|
||||
impl<'tcx> Ty<'tcx> {
|
||||
// It would be nicer if this returned the value instead of a reference,
|
||||
// like how `Predicate::kind` and `Region::kind` do. (It would result in
|
||||
// many fewer subsequent dereferences.) But that gives a small but
|
||||
// noticeable performance hit. See #126069 for details.
|
||||
#[inline(always)]
|
||||
pub fn kind(self) -> &'tcx TyKind<'tcx> {
|
||||
self.0.0
|
||||
|
@ -78,6 +78,8 @@ use rustc_middle::mir::{self, dump_mir, MirPass};
|
||||
use rustc_middle::ty::{self, InstanceKind, Ty, TyCtxt, TypeVisitableExt};
|
||||
use rustc_target::abi::{FieldIdx, VariantIdx};
|
||||
|
||||
use crate::pass_manager::validate_body;
|
||||
|
||||
pub struct ByMoveBody;
|
||||
|
||||
impl<'tcx> MirPass<'tcx> for ByMoveBody {
|
||||
@ -131,20 +133,40 @@ impl<'tcx> MirPass<'tcx> for ByMoveBody {
|
||||
|(parent_field_idx, parent_capture), (child_field_idx, child_capture)| {
|
||||
// Store this set of additional projections (fields and derefs).
|
||||
// We need to re-apply them later.
|
||||
let child_precise_captures =
|
||||
&child_capture.place.projections[parent_capture.place.projections.len()..];
|
||||
let mut child_precise_captures = child_capture.place.projections
|
||||
[parent_capture.place.projections.len()..]
|
||||
.to_vec();
|
||||
|
||||
// If the parent captures by-move, and the child captures by-ref, then we
|
||||
// need to peel an additional `deref` off of the body of the child.
|
||||
let needs_deref = child_capture.is_by_ref() && !parent_capture.is_by_ref();
|
||||
if needs_deref {
|
||||
assert_ne!(
|
||||
coroutine_kind,
|
||||
ty::ClosureKind::FnOnce,
|
||||
// If the parent capture is by-ref, then we need to apply an additional
|
||||
// deref before applying any further projections to this place.
|
||||
if parent_capture.is_by_ref() {
|
||||
child_precise_captures.insert(
|
||||
0,
|
||||
Projection { ty: parent_capture.place.ty(), kind: ProjectionKind::Deref },
|
||||
);
|
||||
}
|
||||
// If the child capture is by-ref, then we need to apply a "ref"
|
||||
// projection (i.e. `&`) at the end. But wait! We don't have that
|
||||
// as a projection kind. So instead, we can apply its dual and
|
||||
// *peel* a deref off of the place when it shows up in the MIR body.
|
||||
// Luckily, by construction this is always possible.
|
||||
let peel_deref = if child_capture.is_by_ref() {
|
||||
assert!(
|
||||
parent_capture.is_by_ref() || coroutine_kind != ty::ClosureKind::FnOnce,
|
||||
"`FnOnce` coroutine-closures return coroutines that capture from \
|
||||
their body; it will always result in a borrowck error!"
|
||||
);
|
||||
}
|
||||
true
|
||||
} else {
|
||||
false
|
||||
};
|
||||
|
||||
// Regarding the behavior above, you may think that it's redundant to both
|
||||
// insert a deref and then peel a deref if the parent and child are both
|
||||
// captured by-ref. This would be correct, except for the case where we have
|
||||
// precise capturing projections, since the inserted deref is to the *beginning*
|
||||
// and the peeled deref is at the *end*. I cannot seem to actually find a
|
||||
// case where this happens, though, but let's keep this code flexible.
|
||||
|
||||
// Finally, store the type of the parent's captured place. We need
|
||||
// this when building the field projection in the MIR body later on.
|
||||
@ -164,7 +186,7 @@ impl<'tcx> MirPass<'tcx> for ByMoveBody {
|
||||
(
|
||||
FieldIdx::from_usize(parent_field_idx + num_args),
|
||||
parent_capture_ty,
|
||||
needs_deref,
|
||||
peel_deref,
|
||||
child_precise_captures,
|
||||
),
|
||||
)
|
||||
@ -192,6 +214,10 @@ impl<'tcx> MirPass<'tcx> for ByMoveBody {
|
||||
let mut by_move_body = body.clone();
|
||||
MakeByMoveBody { tcx, field_remapping, by_move_coroutine_ty }.visit_body(&mut by_move_body);
|
||||
dump_mir(tcx, false, "coroutine_by_move", &0, &by_move_body, |_, _| Ok(()));
|
||||
|
||||
// Let's just always validate this body.
|
||||
validate_body(tcx, &mut by_move_body, "Initial coroutine_by_move body".to_string());
|
||||
|
||||
// FIXME: use query feeding to generate the body right here and then only store the `DefId` of the new body.
|
||||
by_move_body.source = mir::MirSource::from_instance(InstanceKind::CoroutineKindShim {
|
||||
coroutine_def_id: coroutine_def_id.to_def_id(),
|
||||
@ -202,7 +228,7 @@ impl<'tcx> MirPass<'tcx> for ByMoveBody {
|
||||
|
||||
struct MakeByMoveBody<'tcx> {
|
||||
tcx: TyCtxt<'tcx>,
|
||||
field_remapping: UnordMap<FieldIdx, (FieldIdx, Ty<'tcx>, bool, &'tcx [Projection<'tcx>])>,
|
||||
field_remapping: UnordMap<FieldIdx, (FieldIdx, Ty<'tcx>, bool, Vec<Projection<'tcx>>)>,
|
||||
by_move_coroutine_ty: Ty<'tcx>,
|
||||
}
|
||||
|
||||
@ -223,14 +249,14 @@ impl<'tcx> MutVisitor<'tcx> for MakeByMoveBody<'tcx> {
|
||||
if place.local == ty::CAPTURE_STRUCT_LOCAL
|
||||
&& let Some((&mir::ProjectionElem::Field(idx, _), projection)) =
|
||||
place.projection.split_first()
|
||||
&& let Some(&(remapped_idx, remapped_ty, needs_deref, bridging_projections)) =
|
||||
&& let Some(&(remapped_idx, remapped_ty, peel_deref, ref bridging_projections)) =
|
||||
self.field_remapping.get(&idx)
|
||||
{
|
||||
// As noted before, if the parent closure captures a field by value, and
|
||||
// the child captures a field by ref, then for the by-move body we're
|
||||
// generating, we also are taking that field by value. Peel off a deref,
|
||||
// since a layer of ref'ing has now become redundant.
|
||||
let final_projections = if needs_deref {
|
||||
let final_projections = if peel_deref {
|
||||
let Some((mir::ProjectionElem::Deref, projection)) = projection.split_first()
|
||||
else {
|
||||
bug!(
|
||||
|
@ -726,7 +726,7 @@ impl<'tcx> Inliner<'tcx> {
|
||||
|
||||
// Insert all of the (mapped) parts of the callee body into the caller.
|
||||
caller_body.local_decls.extend(callee_body.drain_vars_and_temps());
|
||||
caller_body.source_scopes.extend(&mut callee_body.source_scopes.drain(..));
|
||||
caller_body.source_scopes.append(&mut callee_body.source_scopes);
|
||||
if self
|
||||
.tcx
|
||||
.sess
|
||||
@ -740,7 +740,7 @@ impl<'tcx> Inliner<'tcx> {
|
||||
// still getting consistent results from the mir-opt tests.
|
||||
caller_body.var_debug_info.append(&mut callee_body.var_debug_info);
|
||||
}
|
||||
caller_body.basic_blocks_mut().extend(callee_body.basic_blocks_mut().drain(..));
|
||||
caller_body.basic_blocks_mut().append(callee_body.basic_blocks_mut());
|
||||
|
||||
caller_body[callsite.block].terminator = Some(Terminator {
|
||||
source_info: callsite.source_info,
|
||||
|
@ -371,7 +371,7 @@ fn merge_codegen_units<'tcx>(
|
||||
// Move the items from `cgu_src` to `cgu_dst`. Some of them may be
|
||||
// duplicate inlined items, in which case the destination CGU is
|
||||
// unaffected. Recalculate size estimates afterwards.
|
||||
cgu_dst.items_mut().extend(cgu_src.items_mut().drain(..));
|
||||
cgu_dst.items_mut().append(cgu_src.items_mut());
|
||||
cgu_dst.compute_size_estimate();
|
||||
|
||||
// Record that `cgu_dst` now contains all the stuff that was in
|
||||
@ -410,7 +410,7 @@ fn merge_codegen_units<'tcx>(
|
||||
// Move the items from `smallest` to `second_smallest`. Some of them
|
||||
// may be duplicate inlined items, in which case the destination CGU is
|
||||
// unaffected. Recalculate size estimates afterwards.
|
||||
second_smallest.items_mut().extend(smallest.items_mut().drain(..));
|
||||
second_smallest.items_mut().append(smallest.items_mut());
|
||||
second_smallest.compute_size_estimate();
|
||||
|
||||
// Don't update `cgu_contents`, that's only for incremental builds.
|
||||
|
@ -229,7 +229,7 @@ impl<'psess, 'src> TokenTreesReader<'psess, 'src> {
|
||||
} else {
|
||||
let this_spacing = if next_tok.is_punct() {
|
||||
Spacing::Joint
|
||||
} else if next_tok.kind == token::Eof {
|
||||
} else if next_tok == token::Eof {
|
||||
Spacing::Alone
|
||||
} else {
|
||||
Spacing::JointHidden
|
||||
|
@ -8,7 +8,10 @@ use rustc_span::{sym, BytePos, Span};
|
||||
use thin_vec::ThinVec;
|
||||
use tracing::debug;
|
||||
|
||||
use super::{AttrWrapper, Capturing, FnParseMode, ForceCollect, Parser, ParserRange, PathStyle};
|
||||
use super::{
|
||||
AttrWrapper, Capturing, FnParseMode, ForceCollect, Parser, ParserRange, PathStyle, Trailing,
|
||||
UsePreAttrPos,
|
||||
};
|
||||
use crate::{errors, fluent_generated as fluent, maybe_whole};
|
||||
|
||||
// Public for rustfmt usage
|
||||
@ -162,7 +165,7 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
loop {
|
||||
// skip any other attributes, we want the item
|
||||
if snapshot.token.kind == token::Pound {
|
||||
if snapshot.token == token::Pound {
|
||||
if let Err(err) = snapshot.parse_attribute(InnerAttrPolicy::Permitted) {
|
||||
err.cancel();
|
||||
return Some(replacement_span);
|
||||
@ -257,7 +260,8 @@ impl<'a> Parser<'a> {
|
||||
pub fn parse_attr_item(&mut self, force_collect: ForceCollect) -> PResult<'a, ast::AttrItem> {
|
||||
maybe_whole!(self, NtMeta, |attr| attr.into_inner());
|
||||
|
||||
let do_parse = |this: &mut Self, _empty_attrs| {
|
||||
// Attr items don't have attributes.
|
||||
self.collect_tokens(None, AttrWrapper::empty(), force_collect, |this, _empty_attrs| {
|
||||
let is_unsafe = this.eat_keyword(kw::Unsafe);
|
||||
let unsafety = if is_unsafe {
|
||||
let unsafe_span = this.prev_token.span;
|
||||
@ -273,10 +277,12 @@ impl<'a> Parser<'a> {
|
||||
if is_unsafe {
|
||||
this.expect(&token::CloseDelim(Delimiter::Parenthesis))?;
|
||||
}
|
||||
Ok((ast::AttrItem { unsafety, path, args, tokens: None }, false))
|
||||
};
|
||||
// Attr items don't have attributes.
|
||||
self.collect_tokens_trailing_token(AttrWrapper::empty(), force_collect, do_parse)
|
||||
Ok((
|
||||
ast::AttrItem { unsafety, path, args, tokens: None },
|
||||
Trailing::No,
|
||||
UsePreAttrPos::No,
|
||||
))
|
||||
})
|
||||
}
|
||||
|
||||
/// Parses attributes that appear after the opening of an item. These should
|
||||
@ -309,8 +315,8 @@ impl<'a> Parser<'a> {
|
||||
};
|
||||
if let Some(attr) = attr {
|
||||
// If we are currently capturing tokens (i.e. we are within a call to
|
||||
// `Parser::collect_tokens_trailing_tokens`) record the token positions of this
|
||||
// inner attribute, for possible later processing in a `LazyAttrTokenStream`.
|
||||
// `Parser::collect_tokens`) record the token positions of this inner attribute,
|
||||
// for possible later processing in a `LazyAttrTokenStream`.
|
||||
if let Capturing::Yes = self.capture_state.capturing {
|
||||
let end_pos = self.num_bump_calls;
|
||||
let parser_range = ParserRange(start_pos..end_pos);
|
||||
@ -343,7 +349,7 @@ impl<'a> Parser<'a> {
|
||||
|
||||
// Presumably, the majority of the time there will only be one attr.
|
||||
let mut expanded_attrs = Vec::with_capacity(1);
|
||||
while self.token.kind != token::Eof {
|
||||
while self.token != token::Eof {
|
||||
let lo = self.token.span;
|
||||
let item = self.parse_attr_item(ForceCollect::Yes)?;
|
||||
expanded_attrs.push((item, lo.to(self.prev_token.span)));
|
||||
@ -359,7 +365,7 @@ impl<'a> Parser<'a> {
|
||||
pub(crate) fn parse_meta_seq_top(&mut self) -> PResult<'a, ThinVec<ast::NestedMetaItem>> {
|
||||
// Presumably, the majority of the time there will only be one attr.
|
||||
let mut nmis = ThinVec::with_capacity(1);
|
||||
while self.token.kind != token::Eof {
|
||||
while self.token != token::Eof {
|
||||
nmis.push(self.parse_meta_item_inner()?);
|
||||
if !self.eat(&token::Comma) {
|
||||
break;
|
||||
|
@ -12,9 +12,23 @@ use rustc_span::{sym, Span, DUMMY_SP};
|
||||
|
||||
use super::{
|
||||
Capturing, FlatToken, ForceCollect, NodeRange, NodeReplacement, Parser, ParserRange,
|
||||
TokenCursor,
|
||||
TokenCursor, Trailing,
|
||||
};
|
||||
|
||||
// When collecting tokens, this fully captures the start point. Usually its
|
||||
// just after outer attributes, but occasionally it's before.
|
||||
#[derive(Clone, Debug)]
|
||||
pub(super) struct CollectPos {
|
||||
start_token: (Token, Spacing),
|
||||
cursor_snapshot: TokenCursor,
|
||||
start_pos: u32,
|
||||
}
|
||||
|
||||
pub(super) enum UsePreAttrPos {
|
||||
No,
|
||||
Yes,
|
||||
}
|
||||
|
||||
/// A wrapper type to ensure that the parser handles outer attributes correctly.
|
||||
/// When we parse outer attributes, we need to ensure that we capture tokens
|
||||
/// for the attribute target. This allows us to perform cfg-expansion on
|
||||
@ -22,30 +36,32 @@ use super::{
|
||||
///
|
||||
/// This wrapper prevents direct access to the underlying `ast::AttrVec`.
|
||||
/// Parsing code can only get access to the underlying attributes
|
||||
/// by passing an `AttrWrapper` to `collect_tokens_trailing_token`.
|
||||
/// by passing an `AttrWrapper` to `collect_tokens`.
|
||||
/// This makes it difficult to accidentally construct an AST node
|
||||
/// (which stores an `ast::AttrVec`) without first collecting tokens.
|
||||
///
|
||||
/// This struct has its own module, to ensure that the parser code
|
||||
/// cannot directly access the `attrs` field.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct AttrWrapper {
|
||||
pub(super) struct AttrWrapper {
|
||||
attrs: AttrVec,
|
||||
// The start of the outer attributes in the parser's token stream.
|
||||
// This lets us create a `NodeReplacement` for the entire attribute
|
||||
// target, including outer attributes.
|
||||
start_pos: u32,
|
||||
// target, including outer attributes. `None` if there are no outer
|
||||
// attributes.
|
||||
start_pos: Option<u32>,
|
||||
}
|
||||
|
||||
impl AttrWrapper {
|
||||
pub(super) fn new(attrs: AttrVec, start_pos: u32) -> AttrWrapper {
|
||||
AttrWrapper { attrs, start_pos }
|
||||
}
|
||||
pub fn empty() -> AttrWrapper {
|
||||
AttrWrapper { attrs: AttrVec::new(), start_pos: u32::MAX }
|
||||
AttrWrapper { attrs, start_pos: Some(start_pos) }
|
||||
}
|
||||
|
||||
pub(crate) fn take_for_recovery(self, psess: &ParseSess) -> AttrVec {
|
||||
pub(super) fn empty() -> AttrWrapper {
|
||||
AttrWrapper { attrs: AttrVec::new(), start_pos: None }
|
||||
}
|
||||
|
||||
pub(super) fn take_for_recovery(self, psess: &ParseSess) -> AttrVec {
|
||||
psess.dcx().span_delayed_bug(
|
||||
self.attrs.get(0).map(|attr| attr.span).unwrap_or(DUMMY_SP),
|
||||
"AttrVec is taken for recovery but no error is produced",
|
||||
@ -56,12 +72,12 @@ impl AttrWrapper {
|
||||
|
||||
/// Prepend `self.attrs` to `attrs`.
|
||||
// FIXME: require passing an NT to prevent misuse of this method
|
||||
pub(crate) fn prepend_to_nt_inner(mut self, attrs: &mut AttrVec) {
|
||||
pub(super) fn prepend_to_nt_inner(mut self, attrs: &mut AttrVec) {
|
||||
mem::swap(attrs, &mut self.attrs);
|
||||
attrs.extend(self.attrs);
|
||||
}
|
||||
|
||||
pub fn is_empty(&self) -> bool {
|
||||
pub(super) fn is_empty(&self) -> bool {
|
||||
self.attrs.is_empty()
|
||||
}
|
||||
}
|
||||
@ -77,7 +93,7 @@ fn has_cfg_or_cfg_attr(attrs: &[Attribute]) -> bool {
|
||||
}
|
||||
|
||||
// From a value of this type we can reconstruct the `TokenStream` seen by the
|
||||
// `f` callback passed to a call to `Parser::collect_tokens_trailing_token`, by
|
||||
// `f` callback passed to a call to `Parser::collect_tokens`, by
|
||||
// replaying the getting of the tokens. This saves us producing a `TokenStream`
|
||||
// if it is never needed, e.g. a captured `macro_rules!` argument that is never
|
||||
// passed to a proc macro. In practice, token stream creation happens rarely
|
||||
@ -166,16 +182,30 @@ impl ToAttrTokenStream for LazyAttrTokenStreamImpl {
|
||||
}
|
||||
|
||||
impl<'a> Parser<'a> {
|
||||
pub(super) fn collect_pos(&self) -> CollectPos {
|
||||
CollectPos {
|
||||
start_token: (self.token.clone(), self.token_spacing),
|
||||
cursor_snapshot: self.token_cursor.clone(),
|
||||
start_pos: self.num_bump_calls,
|
||||
}
|
||||
}
|
||||
|
||||
/// Parses code with `f`. If appropriate, it records the tokens (in
|
||||
/// `LazyAttrTokenStream` form) that were parsed in the result, accessible
|
||||
/// via the `HasTokens` trait. The second (bool) part of the callback's
|
||||
/// via the `HasTokens` trait. The `Trailing` part of the callback's
|
||||
/// result indicates if an extra token should be captured, e.g. a comma or
|
||||
/// semicolon.
|
||||
/// semicolon. The `UsePreAttrPos` part of the callback's result indicates
|
||||
/// if we should use `pre_attr_pos` as the collection start position (only
|
||||
/// required in a few cases).
|
||||
///
|
||||
/// The `attrs` passed in are in `AttrWrapper` form, which is opaque. The
|
||||
/// `AttrVec` within is passed to `f`. See the comment on `AttrWrapper` for
|
||||
/// details.
|
||||
///
|
||||
/// `pre_attr_pos` is the position before the outer attributes (or the node
|
||||
/// itself, if no outer attributes are present). It is only needed if `f`
|
||||
/// can return `UsePreAttrPos::Yes`.
|
||||
///
|
||||
/// Note: If your callback consumes an opening delimiter (including the
|
||||
/// case where `self.token` is an opening delimiter on entry to this
|
||||
/// function), you must also consume the corresponding closing delimiter.
|
||||
@ -197,11 +227,12 @@ impl<'a> Parser<'a> {
|
||||
/// } // 32..33
|
||||
/// } // 33..34
|
||||
/// ```
|
||||
pub fn collect_tokens_trailing_token<R: HasAttrs + HasTokens>(
|
||||
pub(super) fn collect_tokens<R: HasAttrs + HasTokens>(
|
||||
&mut self,
|
||||
pre_attr_pos: Option<CollectPos>,
|
||||
attrs: AttrWrapper,
|
||||
force_collect: ForceCollect,
|
||||
f: impl FnOnce(&mut Self, ast::AttrVec) -> PResult<'a, (R, bool)>,
|
||||
f: impl FnOnce(&mut Self, AttrVec) -> PResult<'a, (R, Trailing, UsePreAttrPos)>,
|
||||
) -> PResult<'a, R> {
|
||||
// We must collect if anything could observe the collected tokens, i.e.
|
||||
// if any of the following conditions hold.
|
||||
@ -220,23 +251,20 @@ impl<'a> Parser<'a> {
|
||||
return Ok(f(self, attrs.attrs)?.0);
|
||||
}
|
||||
|
||||
let start_token = (self.token.clone(), self.token_spacing);
|
||||
let cursor_snapshot = self.token_cursor.clone();
|
||||
let start_pos = self.num_bump_calls;
|
||||
let mut collect_pos = self.collect_pos();
|
||||
let has_outer_attrs = !attrs.attrs.is_empty();
|
||||
let parser_replacements_start = self.capture_state.parser_replacements.len();
|
||||
|
||||
// We set and restore `Capturing::Yes` on either side of the call to
|
||||
// `f`, so we can distinguish the outermost call to
|
||||
// `collect_tokens_trailing_token` (e.g. parsing `m` in the example
|
||||
// above) from any inner (indirectly recursive) calls (e.g. parsing `g`
|
||||
// in the example above). This distinction is used below and in
|
||||
// `Parser::parse_inner_attributes`.
|
||||
let (mut ret, capture_trailing) = {
|
||||
// `f`, so we can distinguish the outermost call to `collect_tokens`
|
||||
// (e.g. parsing `m` in the example above) from any inner (indirectly
|
||||
// recursive) calls (e.g. parsing `g` in the example above). This
|
||||
// distinction is used below and in `Parser::parse_inner_attributes`.
|
||||
let (mut ret, capture_trailing, use_pre_attr_pos) = {
|
||||
let prev_capturing = mem::replace(&mut self.capture_state.capturing, Capturing::Yes);
|
||||
let ret_and_trailing = f(self, attrs.attrs);
|
||||
let res = f(self, attrs.attrs);
|
||||
self.capture_state.capturing = prev_capturing;
|
||||
ret_and_trailing?
|
||||
res?
|
||||
};
|
||||
|
||||
// When we're not in `capture_cfg` mode, then skip collecting and
|
||||
@ -279,10 +307,18 @@ impl<'a> Parser<'a> {
|
||||
return Ok(ret);
|
||||
}
|
||||
|
||||
// Replace the post-attribute collection start position with the
|
||||
// pre-attribute position supplied, if `f` indicated it is necessary.
|
||||
// (The caller is responsible for providing a non-`None` `pre_attr_pos`
|
||||
// if this is a possibility.)
|
||||
if matches!(use_pre_attr_pos, UsePreAttrPos::Yes) {
|
||||
collect_pos = pre_attr_pos.unwrap();
|
||||
}
|
||||
|
||||
let parser_replacements_end = self.capture_state.parser_replacements.len();
|
||||
|
||||
assert!(
|
||||
!(self.break_last_token && capture_trailing),
|
||||
!(self.break_last_token && matches!(capture_trailing, Trailing::Yes)),
|
||||
"Cannot set break_last_token and have trailing token"
|
||||
);
|
||||
|
||||
@ -294,7 +330,7 @@ impl<'a> Parser<'a> {
|
||||
// `AttrTokenStream`, we will create the proper token.
|
||||
+ self.break_last_token as u32;
|
||||
|
||||
let num_calls = end_pos - start_pos;
|
||||
let num_calls = end_pos - collect_pos.start_pos;
|
||||
|
||||
// Take the captured `ParserRange`s for any inner attributes that we parsed in
|
||||
// `Parser::parse_inner_attributes`, and pair them in a `ParserReplacement` with `None`,
|
||||
@ -328,7 +364,9 @@ impl<'a> Parser<'a> {
|
||||
.iter()
|
||||
.cloned()
|
||||
.chain(inner_attr_parser_replacements.iter().cloned())
|
||||
.map(|(parser_range, data)| (NodeRange::new(parser_range, start_pos), data))
|
||||
.map(|(parser_range, data)| {
|
||||
(NodeRange::new(parser_range, collect_pos.start_pos), data)
|
||||
})
|
||||
.collect()
|
||||
};
|
||||
|
||||
@ -355,9 +393,9 @@ impl<'a> Parser<'a> {
|
||||
// - `tokens`: lazy tokens for `g` (with its inner attr deleted).
|
||||
|
||||
let tokens = LazyAttrTokenStream::new(LazyAttrTokenStreamImpl {
|
||||
start_token,
|
||||
start_token: collect_pos.start_token,
|
||||
cursor_snapshot: collect_pos.cursor_snapshot,
|
||||
num_calls,
|
||||
cursor_snapshot,
|
||||
break_last_token: self.break_last_token,
|
||||
node_replacements,
|
||||
});
|
||||
@ -368,9 +406,9 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
|
||||
// If `capture_cfg` is set and we're inside a recursive call to
|
||||
// `collect_tokens_trailing_token`, then we need to register a replace range
|
||||
// if we have `#[cfg]` or `#[cfg_attr]`. This allows us to run eager cfg-expansion
|
||||
// on the captured token stream.
|
||||
// `collect_tokens`, then we need to register a replace range if we
|
||||
// have `#[cfg]` or `#[cfg_attr]`. This allows us to run eager
|
||||
// cfg-expansion on the captured token stream.
|
||||
if self.capture_cfg
|
||||
&& matches!(self.capture_state.capturing, Capturing::Yes)
|
||||
&& has_cfg_or_cfg_attr(ret.attrs())
|
||||
@ -389,7 +427,8 @@ impl<'a> Parser<'a> {
|
||||
// Set things up so that the entire AST node that we just parsed, including attributes,
|
||||
// will be replaced with `target` in the lazy token stream. This will allow us to
|
||||
// cfg-expand this AST node.
|
||||
let start_pos = if has_outer_attrs { attrs.start_pos } else { start_pos };
|
||||
let start_pos =
|
||||
if has_outer_attrs { attrs.start_pos.unwrap() } else { collect_pos.start_pos };
|
||||
let target = AttrsTarget { attrs: ret.attrs().iter().cloned().collect(), tokens };
|
||||
self.capture_state
|
||||
.parser_replacements
|
||||
@ -490,7 +529,6 @@ mod size_asserts {
|
||||
|
||||
use super::*;
|
||||
// tidy-alphabetical-start
|
||||
static_assert_size!(AttrWrapper, 16);
|
||||
static_assert_size!(LazyAttrTokenStreamImpl, 96);
|
||||
// tidy-alphabetical-end
|
||||
}
|
||||
|
@ -474,8 +474,8 @@ impl<'a> Parser<'a> {
|
||||
// If this isn't the case however, and the suggestion is a token the
|
||||
// content of which is the same as the found token's, we remove it as well.
|
||||
if !eq {
|
||||
if let TokenType::Token(kind) = &token {
|
||||
if kind == &self.token.kind {
|
||||
if let TokenType::Token(kind) = token {
|
||||
if self.token == *kind {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
@ -506,7 +506,7 @@ impl<'a> Parser<'a> {
|
||||
} else if !sm.is_multiline(self.prev_token.span.until(self.token.span)) {
|
||||
// The current token is in the same line as the prior token, not recoverable.
|
||||
} else if [token::Comma, token::Colon].contains(&self.token.kind)
|
||||
&& self.prev_token.kind == token::CloseDelim(Delimiter::Parenthesis)
|
||||
&& self.prev_token == token::CloseDelim(Delimiter::Parenthesis)
|
||||
{
|
||||
// Likely typo: The current token is on a new line and is expected to be
|
||||
// `.`, `;`, `?`, or an operator after a close delimiter token.
|
||||
@ -518,7 +518,7 @@ impl<'a> Parser<'a> {
|
||||
// https://github.com/rust-lang/rust/issues/72253
|
||||
} else if self.look_ahead(1, |t| {
|
||||
t == &token::CloseDelim(Delimiter::Brace)
|
||||
|| t.can_begin_expr() && t.kind != token::Colon
|
||||
|| t.can_begin_expr() && *t != token::Colon
|
||||
}) && [token::Comma, token::Colon].contains(&self.token.kind)
|
||||
{
|
||||
// Likely typo: `,` → `;` or `:` → `;`. This is triggered if the current token is
|
||||
@ -562,7 +562,7 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
if self.token.kind == TokenKind::EqEq
|
||||
if self.token == TokenKind::EqEq
|
||||
&& self.prev_token.is_ident()
|
||||
&& expected.iter().any(|tok| matches!(tok, TokenType::Token(TokenKind::Eq)))
|
||||
{
|
||||
@ -655,9 +655,9 @@ impl<'a> Parser<'a> {
|
||||
// positive for a `cr#` that wasn't intended to start a c-string literal, but identifying
|
||||
// that in the parser requires unbounded lookahead, so we only add a hint to the existing
|
||||
// error rather than replacing it entirely.
|
||||
if ((self.prev_token.kind == TokenKind::Ident(sym::c, IdentIsRaw::No)
|
||||
if ((self.prev_token == TokenKind::Ident(sym::c, IdentIsRaw::No)
|
||||
&& matches!(&self.token.kind, TokenKind::Literal(token::Lit { kind: token::Str, .. })))
|
||||
|| (self.prev_token.kind == TokenKind::Ident(sym::cr, IdentIsRaw::No)
|
||||
|| (self.prev_token == TokenKind::Ident(sym::cr, IdentIsRaw::No)
|
||||
&& matches!(
|
||||
&self.token.kind,
|
||||
TokenKind::Literal(token::Lit { kind: token::Str, .. }) | token::Pound
|
||||
@ -673,7 +673,7 @@ impl<'a> Parser<'a> {
|
||||
// `pub` may be used for an item or `pub(crate)`
|
||||
if self.prev_token.is_ident_named(sym::public)
|
||||
&& (self.token.can_begin_item()
|
||||
|| self.token.kind == TokenKind::OpenDelim(Delimiter::Parenthesis))
|
||||
|| self.token == TokenKind::OpenDelim(Delimiter::Parenthesis))
|
||||
{
|
||||
err.span_suggestion_short(
|
||||
self.prev_token.span,
|
||||
@ -772,7 +772,7 @@ impl<'a> Parser<'a> {
|
||||
),
|
||||
);
|
||||
if self.token == token::Pound
|
||||
&& self.look_ahead(1, |t| t.kind == token::OpenDelim(Delimiter::Bracket))
|
||||
&& self.look_ahead(1, |t| *t == token::OpenDelim(Delimiter::Bracket))
|
||||
{
|
||||
// We have
|
||||
// #[attr]
|
||||
@ -867,7 +867,7 @@ impl<'a> Parser<'a> {
|
||||
let str_span = self.prev_token.span;
|
||||
let mut span = self.token.span;
|
||||
let mut count = 0;
|
||||
while self.token.kind == TokenKind::Pound
|
||||
while self.token == TokenKind::Pound
|
||||
&& !sm.is_multiline(span.shrink_to_hi().until(self.token.span.shrink_to_lo()))
|
||||
{
|
||||
span = span.with_hi(self.token.span.hi());
|
||||
@ -1167,7 +1167,7 @@ impl<'a> Parser<'a> {
|
||||
return;
|
||||
}
|
||||
|
||||
if token::PathSep == self.token.kind && segment.args.is_none() {
|
||||
if self.token == token::PathSep && segment.args.is_none() {
|
||||
let snapshot = self.create_snapshot_for_diagnostic();
|
||||
self.bump();
|
||||
let lo = self.token.span;
|
||||
@ -1176,13 +1176,11 @@ impl<'a> Parser<'a> {
|
||||
let span = lo.to(self.prev_token.span);
|
||||
// Detect trailing `>` like in `x.collect::Vec<_>>()`.
|
||||
let mut trailing_span = self.prev_token.span.shrink_to_hi();
|
||||
while self.token.kind == token::BinOp(token::Shr)
|
||||
|| self.token.kind == token::Gt
|
||||
{
|
||||
while self.token == token::BinOp(token::Shr) || self.token == token::Gt {
|
||||
trailing_span = trailing_span.to(self.token.span);
|
||||
self.bump();
|
||||
}
|
||||
if self.token.kind == token::OpenDelim(Delimiter::Parenthesis) {
|
||||
if self.token == token::OpenDelim(Delimiter::Parenthesis) {
|
||||
// Recover from bad turbofish: `foo.collect::Vec<_>()`.
|
||||
segment.args = Some(AngleBracketedArgs { args, span }.into());
|
||||
|
||||
@ -1430,7 +1428,7 @@ impl<'a> Parser<'a> {
|
||||
self.restore_snapshot(snapshot);
|
||||
}
|
||||
}
|
||||
return if token::PathSep == self.token.kind {
|
||||
return if self.token == token::PathSep {
|
||||
// We have some certainty that this was a bad turbofish at this point.
|
||||
// `foo< bar >::`
|
||||
if let ExprKind::Binary(o, ..) = inner_op.kind
|
||||
@ -1462,7 +1460,7 @@ impl<'a> Parser<'a> {
|
||||
Err(self.dcx().create_err(err))
|
||||
}
|
||||
}
|
||||
} else if token::OpenDelim(Delimiter::Parenthesis) == self.token.kind {
|
||||
} else if self.token == token::OpenDelim(Delimiter::Parenthesis) {
|
||||
// We have high certainty that this was a bad turbofish at this point.
|
||||
// `foo< bar >(`
|
||||
if let ExprKind::Binary(o, ..) = inner_op.kind
|
||||
@ -1528,7 +1526,7 @@ impl<'a> Parser<'a> {
|
||||
];
|
||||
self.consume_tts(1, &modifiers);
|
||||
|
||||
if self.token.kind == token::Eof {
|
||||
if self.token == token::Eof {
|
||||
// Not entirely sure that what we consumed were fn arguments, rollback.
|
||||
self.restore_snapshot(snapshot);
|
||||
Err(())
|
||||
@ -1811,7 +1809,7 @@ impl<'a> Parser<'a> {
|
||||
/// This function gets called in places where a semicolon is NOT expected and if there's a
|
||||
/// semicolon it emits the appropriate error and returns true.
|
||||
pub fn maybe_consume_incorrect_semicolon(&mut self, previous_item: Option<&Item>) -> bool {
|
||||
if self.token.kind != TokenKind::Semi {
|
||||
if self.token != TokenKind::Semi {
|
||||
return false;
|
||||
}
|
||||
|
||||
@ -2405,10 +2403,10 @@ impl<'a> Parser<'a> {
|
||||
modifier: &[(token::TokenKind, i64)],
|
||||
) {
|
||||
while acc > 0 {
|
||||
if let Some((_, val)) = modifier.iter().find(|(t, _)| *t == self.token.kind) {
|
||||
if let Some((_, val)) = modifier.iter().find(|(t, _)| self.token == *t) {
|
||||
acc += *val;
|
||||
}
|
||||
if self.token.kind == token::Eof {
|
||||
if self.token == token::Eof {
|
||||
break;
|
||||
}
|
||||
self.bump();
|
||||
@ -2489,13 +2487,14 @@ impl<'a> Parser<'a> {
|
||||
pub(super) fn handle_unambiguous_unbraced_const_arg(&mut self) -> PResult<'a, P<Expr>> {
|
||||
let start = self.token.span;
|
||||
let attrs = self.parse_outer_attributes()?;
|
||||
let expr = self.parse_expr_res(Restrictions::CONST_EXPR, attrs).map_err(|mut err| {
|
||||
err.span_label(
|
||||
start.shrink_to_lo(),
|
||||
"while parsing a const generic argument starting here",
|
||||
);
|
||||
err
|
||||
})?;
|
||||
let (expr, _) =
|
||||
self.parse_expr_res(Restrictions::CONST_EXPR, attrs).map_err(|mut err| {
|
||||
err.span_label(
|
||||
start.shrink_to_lo(),
|
||||
"while parsing a const generic argument starting here",
|
||||
);
|
||||
err
|
||||
})?;
|
||||
if !self.expr_is_valid_const_arg(&expr) {
|
||||
self.dcx().emit_err(ConstGenericWithoutBraces {
|
||||
span: expr.span,
|
||||
@ -2598,7 +2597,7 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
})
|
||||
.is_some()
|
||||
|| self.token.kind == TokenKind::Dot;
|
||||
|| self.token == TokenKind::Dot;
|
||||
// This will be true when a trait object type `Foo +` or a path which was a `const fn` with
|
||||
// type params has been parsed.
|
||||
let was_op =
|
||||
@ -2615,9 +2614,9 @@ impl<'a> Parser<'a> {
|
||||
let attrs = self.parse_outer_attributes()?;
|
||||
self.parse_expr_res(Restrictions::CONST_EXPR, attrs)
|
||||
})() {
|
||||
Ok(expr) => {
|
||||
Ok((expr, _)) => {
|
||||
// Find a mistake like `MyTrait<Assoc == S::Assoc>`.
|
||||
if token::EqEq == snapshot.token.kind {
|
||||
if snapshot.token == token::EqEq {
|
||||
err.span_suggestion(
|
||||
snapshot.token.span,
|
||||
"if you meant to use an associated type binding, replace `==` with `=`",
|
||||
@ -2627,7 +2626,7 @@ impl<'a> Parser<'a> {
|
||||
let guar = err.emit();
|
||||
let value = self.mk_expr_err(start.to(expr.span), guar);
|
||||
return Ok(GenericArg::Const(AnonConst { id: ast::DUMMY_NODE_ID, value }));
|
||||
} else if token::Colon == snapshot.token.kind
|
||||
} else if snapshot.token == token::Colon
|
||||
&& expr.span.lo() == snapshot.token.span.hi()
|
||||
&& matches!(expr.kind, ExprKind::Path(..))
|
||||
{
|
||||
@ -2642,8 +2641,7 @@ impl<'a> Parser<'a> {
|
||||
return Ok(GenericArg::Type(
|
||||
self.mk_ty(start.to(expr.span), TyKind::Err(guar)),
|
||||
));
|
||||
} else if token::Comma == self.token.kind || self.token.kind.should_end_const_arg()
|
||||
{
|
||||
} else if self.token == token::Comma || self.token.kind.should_end_const_arg() {
|
||||
// Avoid the following output by checking that we consumed a full const arg:
|
||||
// help: expressions must be enclosed in braces to be used as const generic
|
||||
// arguments
|
||||
@ -2674,7 +2672,7 @@ impl<'a> Parser<'a> {
|
||||
})() {
|
||||
// Since we don't know the exact reason why we failed to parse the type or the
|
||||
// expression, employ a simple heuristic to weed out some pathological cases.
|
||||
Ok(expr) if let token::Comma | token::Gt = snapshot.token.kind => {
|
||||
Ok((expr, _)) if let token::Comma | token::Gt = snapshot.token.kind => {
|
||||
self.restore_snapshot(snapshot);
|
||||
Some(expr)
|
||||
}
|
||||
@ -2846,8 +2844,8 @@ impl<'a> Parser<'a> {
|
||||
pub(crate) fn maybe_recover_unexpected_block_label(&mut self) -> bool {
|
||||
// Check for `'a : {`
|
||||
if !(self.check_lifetime()
|
||||
&& self.look_ahead(1, |tok| tok.kind == token::Colon)
|
||||
&& self.look_ahead(2, |tok| tok.kind == token::OpenDelim(Delimiter::Brace)))
|
||||
&& self.look_ahead(1, |t| *t == token::Colon)
|
||||
&& self.look_ahead(2, |t| *t == token::OpenDelim(Delimiter::Brace)))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
@ -3001,7 +2999,7 @@ impl<'a> Parser<'a> {
|
||||
// >>>>>>>
|
||||
let mut end = None;
|
||||
loop {
|
||||
if self.token.kind == TokenKind::Eof {
|
||||
if self.token == TokenKind::Eof {
|
||||
break;
|
||||
}
|
||||
if let Some(span) = self.conflict_marker(&TokenKind::OrOr, &TokenKind::BinOp(token::Or))
|
||||
|
@ -36,7 +36,7 @@ use super::pat::{CommaRecoveryMode, Expected, RecoverColon, RecoverComma};
|
||||
use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign};
|
||||
use super::{
|
||||
AttrWrapper, BlockMode, ClosureSpans, ForceCollect, Parser, PathStyle, Restrictions,
|
||||
SemiColonMode, SeqSep, TokenType, Trailing,
|
||||
SemiColonMode, SeqSep, TokenType, Trailing, UsePreAttrPos,
|
||||
};
|
||||
use crate::{errors, maybe_recover_from_interpolated_ty_qpath};
|
||||
|
||||
@ -59,15 +59,30 @@ impl<'a> Parser<'a> {
|
||||
self.current_closure.take();
|
||||
|
||||
let attrs = self.parse_outer_attributes()?;
|
||||
self.parse_expr_res(Restrictions::empty(), attrs)
|
||||
self.parse_expr_res(Restrictions::empty(), attrs).map(|res| res.0)
|
||||
}
|
||||
|
||||
/// Parses an expression, forcing tokens to be collected.
|
||||
pub fn parse_expr_force_collect(&mut self) -> PResult<'a, P<Expr>> {
|
||||
self.current_closure.take();
|
||||
|
||||
// If the expression is associative (e.g. `1 + 2`), then any preceding
|
||||
// outer attribute actually belongs to the first inner sub-expression.
|
||||
// In which case we must use the pre-attr pos to include the attribute
|
||||
// in the collected tokens for the outer expression.
|
||||
let pre_attr_pos = self.collect_pos();
|
||||
let attrs = self.parse_outer_attributes()?;
|
||||
self.collect_tokens_no_attrs(|this| this.parse_expr_res(Restrictions::empty(), attrs))
|
||||
self.collect_tokens(
|
||||
Some(pre_attr_pos),
|
||||
AttrWrapper::empty(),
|
||||
ForceCollect::Yes,
|
||||
|this, _empty_attrs| {
|
||||
let (expr, is_assoc) = this.parse_expr_res(Restrictions::empty(), attrs)?;
|
||||
let use_pre_attr_pos =
|
||||
if is_assoc { UsePreAttrPos::Yes } else { UsePreAttrPos::No };
|
||||
Ok((expr, Trailing::No, use_pre_attr_pos))
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
pub fn parse_expr_anon_const(&mut self) -> PResult<'a, AnonConst> {
|
||||
@ -77,7 +92,7 @@ impl<'a> Parser<'a> {
|
||||
fn parse_expr_catch_underscore(&mut self, restrictions: Restrictions) -> PResult<'a, P<Expr>> {
|
||||
let attrs = self.parse_outer_attributes()?;
|
||||
match self.parse_expr_res(restrictions, attrs) {
|
||||
Ok(expr) => Ok(expr),
|
||||
Ok((expr, _)) => Ok(expr),
|
||||
Err(err) => match self.token.ident() {
|
||||
Some((Ident { name: kw::Underscore, .. }, IdentIsRaw::No))
|
||||
if self.may_recover() && self.look_ahead(1, |t| t == &token::Comma) =>
|
||||
@ -104,18 +119,20 @@ impl<'a> Parser<'a> {
|
||||
&mut self,
|
||||
r: Restrictions,
|
||||
attrs: AttrWrapper,
|
||||
) -> PResult<'a, P<Expr>> {
|
||||
) -> PResult<'a, (P<Expr>, bool)> {
|
||||
self.with_res(r, |this| this.parse_expr_assoc_with(0, attrs))
|
||||
}
|
||||
|
||||
/// Parses an associative expression with operators of at least `min_prec` precedence.
|
||||
/// The `bool` in the return value indicates if it was an assoc expr, i.e. with an operator
|
||||
/// followed by a subexpression (e.g. `1 + 2`).
|
||||
pub(super) fn parse_expr_assoc_with(
|
||||
&mut self,
|
||||
min_prec: usize,
|
||||
attrs: AttrWrapper,
|
||||
) -> PResult<'a, P<Expr>> {
|
||||
) -> PResult<'a, (P<Expr>, bool)> {
|
||||
let lhs = if self.token.is_range_separator() {
|
||||
return self.parse_expr_prefix_range(attrs);
|
||||
return self.parse_expr_prefix_range(attrs).map(|res| (res, false));
|
||||
} else {
|
||||
self.parse_expr_prefix(attrs)?
|
||||
};
|
||||
@ -123,15 +140,17 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
|
||||
/// Parses the rest of an associative expression (i.e. the part after the lhs) with operators
|
||||
/// of at least `min_prec` precedence.
|
||||
/// of at least `min_prec` precedence. The `bool` in the return value indicates if something
|
||||
/// was actually parsed.
|
||||
pub(super) fn parse_expr_assoc_rest_with(
|
||||
&mut self,
|
||||
min_prec: usize,
|
||||
starts_stmt: bool,
|
||||
mut lhs: P<Expr>,
|
||||
) -> PResult<'a, P<Expr>> {
|
||||
) -> PResult<'a, (P<Expr>, bool)> {
|
||||
let mut parsed_something = false;
|
||||
if !self.should_continue_as_assoc_expr(&lhs) {
|
||||
return Ok(lhs);
|
||||
return Ok((lhs, parsed_something));
|
||||
}
|
||||
|
||||
self.expected_tokens.push(TokenType::Operator);
|
||||
@ -156,16 +175,17 @@ impl<'a> Parser<'a> {
|
||||
self.err_larrow_operator(self.token.span);
|
||||
}
|
||||
|
||||
parsed_something = true;
|
||||
self.bump();
|
||||
if op.node.is_comparison() {
|
||||
if let Some(expr) = self.check_no_chained_comparison(&lhs, &op)? {
|
||||
return Ok(expr);
|
||||
return Ok((expr, parsed_something));
|
||||
}
|
||||
}
|
||||
|
||||
// Look for JS' `===` and `!==` and recover
|
||||
if (op.node == AssocOp::Equal || op.node == AssocOp::NotEqual)
|
||||
&& self.token.kind == token::Eq
|
||||
&& self.token == token::Eq
|
||||
&& self.prev_token.span.hi() == self.token.span.lo()
|
||||
{
|
||||
let sp = op.span.to(self.token.span);
|
||||
@ -190,7 +210,7 @@ impl<'a> Parser<'a> {
|
||||
|
||||
// Look for PHP's `<>` and recover
|
||||
if op.node == AssocOp::Less
|
||||
&& self.token.kind == token::Gt
|
||||
&& self.token == token::Gt
|
||||
&& self.prev_token.span.hi() == self.token.span.lo()
|
||||
{
|
||||
let sp = op.span.to(self.token.span);
|
||||
@ -208,7 +228,7 @@ impl<'a> Parser<'a> {
|
||||
|
||||
// Look for C++'s `<=>` and recover
|
||||
if op.node == AssocOp::LessEqual
|
||||
&& self.token.kind == token::Gt
|
||||
&& self.token == token::Gt
|
||||
&& self.prev_token.span.hi() == self.token.span.lo()
|
||||
{
|
||||
let sp = op.span.to(self.token.span);
|
||||
@ -263,7 +283,7 @@ impl<'a> Parser<'a> {
|
||||
// the special cases. The code is here only for future convenience.
|
||||
Fixity::None => 1,
|
||||
};
|
||||
let rhs = self.with_res(restrictions - Restrictions::STMT_EXPR, |this| {
|
||||
let (rhs, _) = self.with_res(restrictions - Restrictions::STMT_EXPR, |this| {
|
||||
let attrs = this.parse_outer_attributes()?;
|
||||
this.parse_expr_assoc_with(prec + prec_adjustment, attrs)
|
||||
})?;
|
||||
@ -319,7 +339,7 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
Ok(lhs)
|
||||
Ok((lhs, parsed_something))
|
||||
}
|
||||
|
||||
fn should_continue_as_assoc_expr(&mut self, lhs: &Expr) -> bool {
|
||||
@ -441,7 +461,8 @@ impl<'a> Parser<'a> {
|
||||
let attrs = self.parse_outer_attributes()?;
|
||||
Some(
|
||||
self.parse_expr_assoc_with(prec + 1, attrs)
|
||||
.map_err(|err| self.maybe_err_dotdotlt_syntax(maybe_lt, err))?,
|
||||
.map_err(|err| self.maybe_err_dotdotlt_syntax(maybe_lt, err))?
|
||||
.0,
|
||||
)
|
||||
} else {
|
||||
None
|
||||
@ -498,7 +519,7 @@ impl<'a> Parser<'a> {
|
||||
// RHS must be parsed with more associativity than the dots.
|
||||
let attrs = this.parse_outer_attributes()?;
|
||||
this.parse_expr_assoc_with(op.unwrap().precedence() + 1, attrs)
|
||||
.map(|x| (lo.to(x.span), Some(x)))
|
||||
.map(|(x, _)| (lo.to(x.span), Some(x)))
|
||||
.map_err(|err| this.maybe_err_dotdotlt_syntax(maybe_lt, err))?
|
||||
} else {
|
||||
(lo, None)
|
||||
@ -882,7 +903,7 @@ impl<'a> Parser<'a> {
|
||||
let mut res = ensure_sufficient_stack(|| {
|
||||
loop {
|
||||
let has_question =
|
||||
if self.prev_token.kind == TokenKind::Ident(kw::Return, IdentIsRaw::No) {
|
||||
if self.prev_token == TokenKind::Ident(kw::Return, IdentIsRaw::No) {
|
||||
// We are using noexpect here because we don't expect a `?` directly after
|
||||
// a `return` which could be suggested otherwise.
|
||||
self.eat_noexpect(&token::Question)
|
||||
@ -894,20 +915,19 @@ impl<'a> Parser<'a> {
|
||||
e = self.mk_expr(lo.to(self.prev_token.span), ExprKind::Try(e));
|
||||
continue;
|
||||
}
|
||||
let has_dot =
|
||||
if self.prev_token.kind == TokenKind::Ident(kw::Return, IdentIsRaw::No) {
|
||||
// We are using noexpect here because we don't expect a `.` directly after
|
||||
// a `return` which could be suggested otherwise.
|
||||
self.eat_noexpect(&token::Dot)
|
||||
} else if self.token.kind == TokenKind::RArrow && self.may_recover() {
|
||||
// Recovery for `expr->suffix`.
|
||||
self.bump();
|
||||
let span = self.prev_token.span;
|
||||
self.dcx().emit_err(errors::ExprRArrowCall { span });
|
||||
true
|
||||
} else {
|
||||
self.eat(&token::Dot)
|
||||
};
|
||||
let has_dot = if self.prev_token == TokenKind::Ident(kw::Return, IdentIsRaw::No) {
|
||||
// We are using noexpect here because we don't expect a `.` directly after
|
||||
// a `return` which could be suggested otherwise.
|
||||
self.eat_noexpect(&token::Dot)
|
||||
} else if self.token == TokenKind::RArrow && self.may_recover() {
|
||||
// Recovery for `expr->suffix`.
|
||||
self.bump();
|
||||
let span = self.prev_token.span;
|
||||
self.dcx().emit_err(errors::ExprRArrowCall { span });
|
||||
true
|
||||
} else {
|
||||
self.eat(&token::Dot)
|
||||
};
|
||||
if has_dot {
|
||||
// expr.f
|
||||
e = self.parse_dot_suffix_expr(lo, e)?;
|
||||
@ -1206,7 +1226,7 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
|
||||
fn mk_expr_tuple_field_access(
|
||||
&mut self,
|
||||
&self,
|
||||
lo: Span,
|
||||
ident_span: Span,
|
||||
base: P<Expr>,
|
||||
@ -1221,7 +1241,7 @@ impl<'a> Parser<'a> {
|
||||
|
||||
/// Parse a function call expression, `expr(...)`.
|
||||
fn parse_expr_fn_call(&mut self, lo: Span, fun: P<Expr>) -> P<Expr> {
|
||||
let snapshot = if self.token.kind == token::OpenDelim(Delimiter::Parenthesis) {
|
||||
let snapshot = if self.token == token::OpenDelim(Delimiter::Parenthesis) {
|
||||
Some((self.create_snapshot_for_diagnostic(), fun.kind.clone()))
|
||||
} else {
|
||||
None
|
||||
@ -1585,7 +1605,7 @@ impl<'a> Parser<'a> {
|
||||
// Suggests using '<=' if there is an error parsing qpath when the previous token
|
||||
// is an '=' token. Only emits suggestion if the '<' token and '=' token are
|
||||
// directly adjacent (i.e. '=<')
|
||||
if maybe_eq_tok.kind == TokenKind::Eq && maybe_eq_tok.span.hi() == lt_span.lo() {
|
||||
if maybe_eq_tok == TokenKind::Eq && maybe_eq_tok.span.hi() == lt_span.lo() {
|
||||
let eq_lt = maybe_eq_tok.span.to(lt_span);
|
||||
err.span_suggestion(eq_lt, "did you mean", "<=", Applicability::Unspecified);
|
||||
}
|
||||
@ -2230,7 +2250,7 @@ impl<'a> Parser<'a> {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
if self.token.kind == token::Comma {
|
||||
if self.token == token::Comma {
|
||||
if !self.psess.source_map().is_multiline(prev_span.until(self.token.span)) {
|
||||
return Ok(());
|
||||
}
|
||||
@ -2336,7 +2356,7 @@ impl<'a> Parser<'a> {
|
||||
let token = self.token.clone();
|
||||
let attrs = self.parse_outer_attributes()?;
|
||||
match self.parse_expr_res(restrictions, attrs) {
|
||||
Ok(expr) => expr,
|
||||
Ok((expr, _)) => expr,
|
||||
Err(err) => self.recover_closure_body(err, before, prev, token, lo, decl_hi)?,
|
||||
}
|
||||
}
|
||||
@ -2360,7 +2380,7 @@ impl<'a> Parser<'a> {
|
||||
None => {}
|
||||
}
|
||||
|
||||
if self.token.kind == TokenKind::Semi
|
||||
if self.token == TokenKind::Semi
|
||||
&& matches!(self.token_cursor.stack.last(), Some((.., Delimiter::Parenthesis)))
|
||||
&& self.may_recover()
|
||||
{
|
||||
@ -2446,7 +2466,7 @@ impl<'a> Parser<'a> {
|
||||
fn parse_fn_block_param(&mut self) -> PResult<'a, Param> {
|
||||
let lo = self.token.span;
|
||||
let attrs = self.parse_outer_attributes()?;
|
||||
self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| {
|
||||
self.collect_tokens(None, attrs, ForceCollect::No, |this, attrs| {
|
||||
let pat = this.parse_pat_no_top_alt(Some(Expected::ParameterName), None)?;
|
||||
let ty = if this.eat(&token::Colon) {
|
||||
this.parse_ty()?
|
||||
@ -2463,7 +2483,8 @@ impl<'a> Parser<'a> {
|
||||
id: DUMMY_NODE_ID,
|
||||
is_placeholder: false,
|
||||
},
|
||||
this.token == token::Comma,
|
||||
Trailing::from(this.token == token::Comma),
|
||||
UsePreAttrPos::No,
|
||||
))
|
||||
})
|
||||
}
|
||||
@ -2557,7 +2578,7 @@ impl<'a> Parser<'a> {
|
||||
);
|
||||
} else {
|
||||
// Look for usages of '=>' where '>=' might be intended
|
||||
if maybe_fatarrow.kind == token::FatArrow {
|
||||
if maybe_fatarrow == token::FatArrow {
|
||||
err.span_suggestion(
|
||||
maybe_fatarrow.span,
|
||||
"you might have meant to write a \"greater than or equal to\" comparison",
|
||||
@ -2584,7 +2605,7 @@ impl<'a> Parser<'a> {
|
||||
/// Parses the condition of a `if` or `while` expression.
|
||||
fn parse_expr_cond(&mut self) -> PResult<'a, P<Expr>> {
|
||||
let attrs = self.parse_outer_attributes()?;
|
||||
let mut cond =
|
||||
let (mut cond, _) =
|
||||
self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL | Restrictions::ALLOW_LET, attrs)?;
|
||||
|
||||
CondChecker::new(self).visit_expr(&mut cond);
|
||||
@ -2606,7 +2627,7 @@ impl<'a> Parser<'a> {
|
||||
missing_let: None,
|
||||
comparison: None,
|
||||
};
|
||||
if self.prev_token.kind == token::BinOp(token::Or) {
|
||||
if self.prev_token == token::BinOp(token::Or) {
|
||||
// This was part of a closure, the that part of the parser recover.
|
||||
return Err(self.dcx().create_err(err));
|
||||
} else {
|
||||
@ -2633,7 +2654,7 @@ impl<'a> Parser<'a> {
|
||||
self.expect(&token::Eq)?;
|
||||
}
|
||||
let attrs = self.parse_outer_attributes()?;
|
||||
let expr = self.parse_expr_assoc_with(1 + prec_let_scrutinee_needs_par(), attrs)?;
|
||||
let (expr, _) = self.parse_expr_assoc_with(1 + prec_let_scrutinee_needs_par(), attrs)?;
|
||||
let span = lo.to(expr.span);
|
||||
Ok(self.mk_expr(span, ExprKind::Let(pat, expr, span, recovered)))
|
||||
}
|
||||
@ -2742,7 +2763,7 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
|
||||
fn parse_for_head(&mut self) -> PResult<'a, (P<Pat>, P<Expr>)> {
|
||||
let begin_paren = if self.token.kind == token::OpenDelim(Delimiter::Parenthesis) {
|
||||
let begin_paren = if self.token == token::OpenDelim(Delimiter::Parenthesis) {
|
||||
// Record whether we are about to parse `for (`.
|
||||
// This is used below for recovery in case of `for ( $stuff ) $block`
|
||||
// in which case we will suggest `for $stuff $block`.
|
||||
@ -2767,7 +2788,7 @@ impl<'a> Parser<'a> {
|
||||
// We know for sure we have seen `for ($SOMETHING in`. In the happy path this would
|
||||
// happen right before the return of this method.
|
||||
let attrs = self.parse_outer_attributes()?;
|
||||
let expr = match self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, attrs) {
|
||||
let (expr, _) = match self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, attrs) {
|
||||
Ok(expr) => expr,
|
||||
Err(expr_err) => {
|
||||
// We don't know what followed the `in`, so cancel and bubble up the
|
||||
@ -2776,7 +2797,7 @@ impl<'a> Parser<'a> {
|
||||
return Err(err);
|
||||
}
|
||||
};
|
||||
return if self.token.kind == token::CloseDelim(Delimiter::Parenthesis) {
|
||||
return if self.token == token::CloseDelim(Delimiter::Parenthesis) {
|
||||
// We know for sure we have seen `for ($SOMETHING in $EXPR)`, so we recover the
|
||||
// parser state and emit a targeted suggestion.
|
||||
let span = vec![start_span, self.token.span];
|
||||
@ -2802,7 +2823,7 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
self.check_for_for_in_in_typo(self.prev_token.span);
|
||||
let attrs = self.parse_outer_attributes()?;
|
||||
let expr = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, attrs)?;
|
||||
let (expr, _) = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, attrs)?;
|
||||
Ok((pat, expr))
|
||||
}
|
||||
|
||||
@ -2922,7 +2943,7 @@ impl<'a> Parser<'a> {
|
||||
fn parse_expr_match(&mut self) -> PResult<'a, P<Expr>> {
|
||||
let match_span = self.prev_token.span;
|
||||
let attrs = self.parse_outer_attributes()?;
|
||||
let scrutinee = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, attrs)?;
|
||||
let (scrutinee, _) = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, attrs)?;
|
||||
|
||||
self.parse_match_block(match_span, match_span, scrutinee, MatchKind::Prefix)
|
||||
}
|
||||
@ -2995,7 +3016,7 @@ impl<'a> Parser<'a> {
|
||||
first_expr: &P<Expr>,
|
||||
arrow_span: Span,
|
||||
) -> Option<(Span, ErrorGuaranteed)> {
|
||||
if self.token.kind != token::Semi {
|
||||
if self.token != token::Semi {
|
||||
return None;
|
||||
}
|
||||
let start_snapshot = self.create_snapshot_for_diagnostic();
|
||||
@ -3024,18 +3045,18 @@ impl<'a> Parser<'a> {
|
||||
// We might have either a `,` -> `;` typo, or a block without braces. We need
|
||||
// a more subtle parsing strategy.
|
||||
loop {
|
||||
if self.token.kind == token::CloseDelim(Delimiter::Brace) {
|
||||
if self.token == token::CloseDelim(Delimiter::Brace) {
|
||||
// We have reached the closing brace of the `match` expression.
|
||||
return Some(err(self, stmts));
|
||||
}
|
||||
if self.token.kind == token::Comma {
|
||||
if self.token == token::Comma {
|
||||
self.restore_snapshot(start_snapshot);
|
||||
return None;
|
||||
}
|
||||
let pre_pat_snapshot = self.create_snapshot_for_diagnostic();
|
||||
match self.parse_pat_no_top_alt(None, None) {
|
||||
Ok(_pat) => {
|
||||
if self.token.kind == token::FatArrow {
|
||||
if self.token == token::FatArrow {
|
||||
// Reached arm end.
|
||||
self.restore_snapshot(pre_pat_snapshot);
|
||||
return Some(err(self, stmts));
|
||||
@ -3070,7 +3091,7 @@ impl<'a> Parser<'a> {
|
||||
|
||||
pub(super) fn parse_arm(&mut self) -> PResult<'a, Arm> {
|
||||
let attrs = self.parse_outer_attributes()?;
|
||||
self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| {
|
||||
self.collect_tokens(None, attrs, ForceCollect::No, |this, attrs| {
|
||||
let lo = this.token.span;
|
||||
let (pat, guard) = this.parse_match_arm_pat_and_guard()?;
|
||||
|
||||
@ -3127,7 +3148,7 @@ impl<'a> Parser<'a> {
|
||||
let arm_start_span = this.token.span;
|
||||
|
||||
let attrs = this.parse_outer_attributes()?;
|
||||
let expr =
|
||||
let (expr, _) =
|
||||
this.parse_expr_res(Restrictions::STMT_EXPR, attrs).map_err(|mut err| {
|
||||
err.span_label(arrow_span, "while parsing the `match` arm starting here");
|
||||
err
|
||||
@ -3244,7 +3265,8 @@ impl<'a> Parser<'a> {
|
||||
id: DUMMY_NODE_ID,
|
||||
is_placeholder: false,
|
||||
},
|
||||
false,
|
||||
Trailing::No,
|
||||
UsePreAttrPos::No,
|
||||
))
|
||||
})
|
||||
}
|
||||
@ -3286,7 +3308,7 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
|
||||
fn parse_match_arm_pat_and_guard(&mut self) -> PResult<'a, (P<Pat>, Option<P<Expr>>)> {
|
||||
if self.token.kind == token::OpenDelim(Delimiter::Parenthesis) {
|
||||
if self.token == token::OpenDelim(Delimiter::Parenthesis) {
|
||||
// Detect and recover from `($pat if $cond) => $arm`.
|
||||
let left = self.token.span;
|
||||
match self.parse_pat_allow_top_alt(
|
||||
@ -3335,8 +3357,9 @@ impl<'a> Parser<'a> {
|
||||
|
||||
fn parse_match_guard_condition(&mut self) -> PResult<'a, P<Expr>> {
|
||||
let attrs = self.parse_outer_attributes()?;
|
||||
self.parse_expr_res(Restrictions::ALLOW_LET | Restrictions::IN_IF_GUARD, attrs).map_err(
|
||||
|mut err| {
|
||||
match self.parse_expr_res(Restrictions::ALLOW_LET | Restrictions::IN_IF_GUARD, attrs) {
|
||||
Ok((expr, _)) => Ok(expr),
|
||||
Err(mut err) => {
|
||||
if self.prev_token == token::OpenDelim(Delimiter::Brace) {
|
||||
let sugg_sp = self.prev_token.span.shrink_to_lo();
|
||||
// Consume everything within the braces, let's avoid further parse
|
||||
@ -3344,7 +3367,7 @@ impl<'a> Parser<'a> {
|
||||
self.recover_stmt_(SemiColonMode::Ignore, BlockMode::Ignore);
|
||||
let msg = "you might have meant to start a match arm after the match guard";
|
||||
if self.eat(&token::CloseDelim(Delimiter::Brace)) {
|
||||
let applicability = if self.token.kind != token::FatArrow {
|
||||
let applicability = if self.token != token::FatArrow {
|
||||
// We have high confidence that we indeed didn't have a struct
|
||||
// literal in the match guard, but rather we had some operation
|
||||
// that ended in a path, immediately followed by a block that was
|
||||
@ -3356,9 +3379,9 @@ impl<'a> Parser<'a> {
|
||||
err.span_suggestion_verbose(sugg_sp, msg, "=> ", applicability);
|
||||
}
|
||||
}
|
||||
err
|
||||
},
|
||||
)
|
||||
Err(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn is_builtin(&self) -> bool {
|
||||
@ -3565,7 +3588,7 @@ impl<'a> Parser<'a> {
|
||||
&& self.look_ahead(1, |t| {
|
||||
AssocOp::from_token(t).is_some()
|
||||
|| matches!(t.kind, token::OpenDelim(_))
|
||||
|| t.kind == token::Dot
|
||||
|| *t == token::Dot
|
||||
})
|
||||
{
|
||||
// Looks like they tried to write a shorthand, complex expression.
|
||||
@ -3709,7 +3732,7 @@ impl<'a> Parser<'a> {
|
||||
fn parse_expr_field(&mut self) -> PResult<'a, ExprField> {
|
||||
let attrs = self.parse_outer_attributes()?;
|
||||
self.recover_vcs_conflict_marker();
|
||||
self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| {
|
||||
self.collect_tokens(None, attrs, ForceCollect::No, |this, attrs| {
|
||||
let lo = this.token.span;
|
||||
|
||||
// Check if a colon exists one ahead. This means we're parsing a fieldname.
|
||||
@ -3753,7 +3776,8 @@ impl<'a> Parser<'a> {
|
||||
id: DUMMY_NODE_ID,
|
||||
is_placeholder: false,
|
||||
},
|
||||
this.token == token::Comma,
|
||||
Trailing::from(this.token == token::Comma),
|
||||
UsePreAttrPos::No,
|
||||
))
|
||||
})
|
||||
}
|
||||
@ -3847,15 +3871,17 @@ impl<'a> Parser<'a> {
|
||||
attrs: AttrWrapper,
|
||||
f: impl FnOnce(&mut Self, ast::AttrVec) -> PResult<'a, P<Expr>>,
|
||||
) -> PResult<'a, P<Expr>> {
|
||||
self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| {
|
||||
self.collect_tokens(None, attrs, ForceCollect::No, |this, attrs| {
|
||||
let res = f(this, attrs)?;
|
||||
let trailing = (this.restrictions.contains(Restrictions::STMT_EXPR)
|
||||
&& this.token.kind == token::Semi)
|
||||
// FIXME: pass an additional condition through from the place
|
||||
// where we know we need a comma, rather than assuming that
|
||||
// `#[attr] expr,` always captures a trailing comma.
|
||||
|| this.token.kind == token::Comma;
|
||||
Ok((res, trailing))
|
||||
let trailing = Trailing::from(
|
||||
this.restrictions.contains(Restrictions::STMT_EXPR)
|
||||
&& this.token == token::Semi
|
||||
// FIXME: pass an additional condition through from the place
|
||||
// where we know we need a comma, rather than assuming that
|
||||
// `#[attr] expr,` always captures a trailing comma.
|
||||
|| this.token == token::Comma,
|
||||
);
|
||||
Ok((res, trailing, UsePreAttrPos::No))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
@ -7,7 +7,7 @@ use rustc_span::symbol::{kw, Ident};
|
||||
use rustc_span::Span;
|
||||
use thin_vec::ThinVec;
|
||||
|
||||
use super::{ForceCollect, Parser};
|
||||
use super::{ForceCollect, Parser, Trailing, UsePreAttrPos};
|
||||
use crate::errors::{
|
||||
self, MultipleWhereClauses, UnexpectedDefaultValueForLifetimeInGenericParameters,
|
||||
UnexpectedSelfInGenericParameters, WhereClauseBeforeTupleStructBody,
|
||||
@ -169,94 +169,88 @@ impl<'a> Parser<'a> {
|
||||
let mut done = false;
|
||||
while !done {
|
||||
let attrs = self.parse_outer_attributes()?;
|
||||
let param =
|
||||
self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| {
|
||||
if this.eat_keyword_noexpect(kw::SelfUpper) {
|
||||
// `Self` as a generic param is invalid. Here we emit the diagnostic and continue parsing
|
||||
// as if `Self` never existed.
|
||||
this.dcx().emit_err(UnexpectedSelfInGenericParameters {
|
||||
span: this.prev_token.span,
|
||||
});
|
||||
let param = self.collect_tokens(None, attrs, ForceCollect::No, |this, attrs| {
|
||||
if this.eat_keyword_noexpect(kw::SelfUpper) {
|
||||
// `Self` as a generic param is invalid. Here we emit the diagnostic and continue parsing
|
||||
// as if `Self` never existed.
|
||||
this.dcx()
|
||||
.emit_err(UnexpectedSelfInGenericParameters { span: this.prev_token.span });
|
||||
|
||||
// Eat a trailing comma, if it exists.
|
||||
let _ = this.eat(&token::Comma);
|
||||
}
|
||||
// Eat a trailing comma, if it exists.
|
||||
let _ = this.eat(&token::Comma);
|
||||
}
|
||||
|
||||
let param = if this.check_lifetime() {
|
||||
let lifetime = this.expect_lifetime();
|
||||
// Parse lifetime parameter.
|
||||
let (colon_span, bounds) = if this.eat(&token::Colon) {
|
||||
(Some(this.prev_token.span), this.parse_lt_param_bounds())
|
||||
} else {
|
||||
(None, Vec::new())
|
||||
};
|
||||
|
||||
if this.check_noexpect(&token::Eq)
|
||||
&& this.look_ahead(1, |t| t.is_lifetime())
|
||||
{
|
||||
let lo = this.token.span;
|
||||
// Parse `= 'lifetime`.
|
||||
this.bump(); // `=`
|
||||
this.bump(); // `'lifetime`
|
||||
let span = lo.to(this.prev_token.span);
|
||||
this.dcx().emit_err(
|
||||
UnexpectedDefaultValueForLifetimeInGenericParameters { span },
|
||||
);
|
||||
}
|
||||
|
||||
Some(ast::GenericParam {
|
||||
ident: lifetime.ident,
|
||||
id: lifetime.id,
|
||||
attrs,
|
||||
bounds,
|
||||
kind: ast::GenericParamKind::Lifetime,
|
||||
is_placeholder: false,
|
||||
colon_span,
|
||||
})
|
||||
} else if this.check_keyword(kw::Const) {
|
||||
// Parse const parameter.
|
||||
Some(this.parse_const_param(attrs)?)
|
||||
} else if this.check_ident() {
|
||||
// Parse type parameter.
|
||||
Some(this.parse_ty_param(attrs)?)
|
||||
} else if this.token.can_begin_type() {
|
||||
// Trying to write an associated type bound? (#26271)
|
||||
let snapshot = this.create_snapshot_for_diagnostic();
|
||||
match this.parse_ty_where_predicate() {
|
||||
Ok(where_predicate) => {
|
||||
this.dcx().emit_err(errors::BadAssocTypeBounds {
|
||||
span: where_predicate.span(),
|
||||
});
|
||||
// FIXME - try to continue parsing other generics?
|
||||
return Ok((None, false));
|
||||
}
|
||||
Err(err) => {
|
||||
err.cancel();
|
||||
// FIXME - maybe we should overwrite 'self' outside of `collect_tokens`?
|
||||
this.restore_snapshot(snapshot);
|
||||
return Ok((None, false));
|
||||
}
|
||||
}
|
||||
let param = if this.check_lifetime() {
|
||||
let lifetime = this.expect_lifetime();
|
||||
// Parse lifetime parameter.
|
||||
let (colon_span, bounds) = if this.eat(&token::Colon) {
|
||||
(Some(this.prev_token.span), this.parse_lt_param_bounds())
|
||||
} else {
|
||||
// Check for trailing attributes and stop parsing.
|
||||
if !attrs.is_empty() {
|
||||
if !params.is_empty() {
|
||||
this.dcx()
|
||||
.emit_err(errors::AttrAfterGeneric { span: attrs[0].span });
|
||||
} else {
|
||||
this.dcx()
|
||||
.emit_err(errors::AttrWithoutGenerics { span: attrs[0].span });
|
||||
}
|
||||
}
|
||||
return Ok((None, false));
|
||||
(None, Vec::new())
|
||||
};
|
||||
|
||||
if !this.eat(&token::Comma) {
|
||||
done = true;
|
||||
if this.check_noexpect(&token::Eq) && this.look_ahead(1, |t| t.is_lifetime()) {
|
||||
let lo = this.token.span;
|
||||
// Parse `= 'lifetime`.
|
||||
this.bump(); // `=`
|
||||
this.bump(); // `'lifetime`
|
||||
let span = lo.to(this.prev_token.span);
|
||||
this.dcx().emit_err(UnexpectedDefaultValueForLifetimeInGenericParameters {
|
||||
span,
|
||||
});
|
||||
}
|
||||
// We just ate the comma, so no need to capture the trailing token.
|
||||
Ok((param, false))
|
||||
})?;
|
||||
|
||||
Some(ast::GenericParam {
|
||||
ident: lifetime.ident,
|
||||
id: lifetime.id,
|
||||
attrs,
|
||||
bounds,
|
||||
kind: ast::GenericParamKind::Lifetime,
|
||||
is_placeholder: false,
|
||||
colon_span,
|
||||
})
|
||||
} else if this.check_keyword(kw::Const) {
|
||||
// Parse const parameter.
|
||||
Some(this.parse_const_param(attrs)?)
|
||||
} else if this.check_ident() {
|
||||
// Parse type parameter.
|
||||
Some(this.parse_ty_param(attrs)?)
|
||||
} else if this.token.can_begin_type() {
|
||||
// Trying to write an associated type bound? (#26271)
|
||||
let snapshot = this.create_snapshot_for_diagnostic();
|
||||
match this.parse_ty_where_predicate() {
|
||||
Ok(where_predicate) => {
|
||||
this.dcx().emit_err(errors::BadAssocTypeBounds {
|
||||
span: where_predicate.span(),
|
||||
});
|
||||
// FIXME - try to continue parsing other generics?
|
||||
}
|
||||
Err(err) => {
|
||||
err.cancel();
|
||||
// FIXME - maybe we should overwrite 'self' outside of `collect_tokens`?
|
||||
this.restore_snapshot(snapshot);
|
||||
}
|
||||
}
|
||||
return Ok((None, Trailing::No, UsePreAttrPos::No));
|
||||
} else {
|
||||
// Check for trailing attributes and stop parsing.
|
||||
if !attrs.is_empty() {
|
||||
if !params.is_empty() {
|
||||
this.dcx().emit_err(errors::AttrAfterGeneric { span: attrs[0].span });
|
||||
} else {
|
||||
this.dcx()
|
||||
.emit_err(errors::AttrWithoutGenerics { span: attrs[0].span });
|
||||
}
|
||||
}
|
||||
return Ok((None, Trailing::No, UsePreAttrPos::No));
|
||||
};
|
||||
|
||||
if !this.eat(&token::Comma) {
|
||||
done = true;
|
||||
}
|
||||
// We just ate the comma, so no need to capture the trailing token.
|
||||
Ok((param, Trailing::No, UsePreAttrPos::No))
|
||||
})?;
|
||||
|
||||
if let Some(param) = param {
|
||||
params.push(param);
|
||||
@ -393,7 +387,7 @@ impl<'a> Parser<'a> {
|
||||
|
||||
if let Some(struct_) = struct_
|
||||
&& self.may_recover()
|
||||
&& self.token.kind == token::OpenDelim(Delimiter::Parenthesis)
|
||||
&& self.token == token::OpenDelim(Delimiter::Parenthesis)
|
||||
{
|
||||
snapshot = Some((struct_, self.create_snapshot_for_diagnostic()));
|
||||
};
|
||||
|
@ -20,7 +20,9 @@ use tracing::debug;
|
||||
|
||||
use super::diagnostics::{dummy_arg, ConsumeClosingDelim};
|
||||
use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign};
|
||||
use super::{AttrWrapper, FollowedByType, ForceCollect, Parser, PathStyle, Trailing};
|
||||
use super::{
|
||||
AttrWrapper, FollowedByType, ForceCollect, Parser, PathStyle, Trailing, UsePreAttrPos,
|
||||
};
|
||||
use crate::errors::{self, MacroExpandsToAdtField};
|
||||
use crate::{fluent_generated as fluent, maybe_whole};
|
||||
|
||||
@ -127,7 +129,7 @@ impl<'a> Parser<'a> {
|
||||
Some(item.into_inner())
|
||||
});
|
||||
|
||||
self.collect_tokens_trailing_token(attrs, force_collect, |this, mut attrs| {
|
||||
self.collect_tokens(None, attrs, force_collect, |this, mut attrs| {
|
||||
let lo = this.token.span;
|
||||
let vis = this.parse_visibility(FollowedByType::No)?;
|
||||
let mut def = this.parse_defaultness();
|
||||
@ -145,7 +147,7 @@ impl<'a> Parser<'a> {
|
||||
let span = lo.to(this.prev_token.span);
|
||||
let id = DUMMY_NODE_ID;
|
||||
let item = Item { ident, attrs, id, kind, vis, span, tokens: None };
|
||||
return Ok((Some(item), false));
|
||||
return Ok((Some(item), Trailing::No, UsePreAttrPos::No));
|
||||
}
|
||||
|
||||
// At this point, we have failed to parse an item.
|
||||
@ -160,7 +162,7 @@ impl<'a> Parser<'a> {
|
||||
if !attrs_allowed {
|
||||
this.recover_attrs_no_item(&attrs)?;
|
||||
}
|
||||
Ok((None, false))
|
||||
Ok((None, Trailing::No, UsePreAttrPos::No))
|
||||
})
|
||||
}
|
||||
|
||||
@ -354,7 +356,7 @@ impl<'a> Parser<'a> {
|
||||
fn is_reuse_path_item(&mut self) -> bool {
|
||||
// no: `reuse ::path` for compatibility reasons with macro invocations
|
||||
self.token.is_keyword(kw::Reuse)
|
||||
&& self.look_ahead(1, |t| t.is_path_start() && t.kind != token::PathSep)
|
||||
&& self.look_ahead(1, |t| t.is_path_start() && *t != token::PathSep)
|
||||
}
|
||||
|
||||
/// Are we sure this could not possibly be a macro invocation?
|
||||
@ -499,7 +501,7 @@ impl<'a> Parser<'a> {
|
||||
let mut err = self.dcx().struct_span_err(end.span, msg);
|
||||
if end.is_doc_comment() {
|
||||
err.span_label(end.span, "this doc comment doesn't document anything");
|
||||
} else if self.token.kind == TokenKind::Semi {
|
||||
} else if self.token == TokenKind::Semi {
|
||||
err.span_suggestion_verbose(
|
||||
self.token.span,
|
||||
"consider removing this semicolon",
|
||||
@ -777,12 +779,12 @@ impl<'a> Parser<'a> {
|
||||
&& self
|
||||
.span_to_snippet(self.prev_token.span)
|
||||
.is_ok_and(|snippet| snippet == "}")
|
||||
&& self.token.kind == token::Semi;
|
||||
&& self.token == token::Semi;
|
||||
let mut semicolon_span = self.token.span;
|
||||
if !is_unnecessary_semicolon {
|
||||
// #105369, Detect spurious `;` before assoc fn body
|
||||
is_unnecessary_semicolon = self.token == token::OpenDelim(Delimiter::Brace)
|
||||
&& self.prev_token.kind == token::Semi;
|
||||
&& self.prev_token == token::Semi;
|
||||
semicolon_span = self.prev_token.span;
|
||||
}
|
||||
// We have to bail or we'll potentially never make progress.
|
||||
@ -1194,7 +1196,7 @@ impl<'a> Parser<'a> {
|
||||
// FIXME: This recovery should be tested better.
|
||||
if safety == Safety::Default
|
||||
&& self.token.is_keyword(kw::Unsafe)
|
||||
&& self.look_ahead(1, |t| t.kind == token::OpenDelim(Delimiter::Brace))
|
||||
&& self.look_ahead(1, |t| *t == token::OpenDelim(Delimiter::Brace))
|
||||
{
|
||||
self.expect(&token::OpenDelim(Delimiter::Brace)).unwrap_err().emit();
|
||||
safety = Safety::Unsafe(self.token.span);
|
||||
@ -1258,7 +1260,7 @@ impl<'a> Parser<'a> {
|
||||
&& self.is_keyword_ahead(1, &[kw::Extern])
|
||||
&& self.look_ahead(
|
||||
2 + self.look_ahead(2, |t| t.can_begin_string_literal() as usize),
|
||||
|t| t.kind == token::OpenDelim(Delimiter::Brace),
|
||||
|t| *t == token::OpenDelim(Delimiter::Brace),
|
||||
)
|
||||
}
|
||||
|
||||
@ -1343,7 +1345,7 @@ impl<'a> Parser<'a> {
|
||||
) -> PResult<'a, (Ident, StaticItem)> {
|
||||
let ident = self.parse_ident()?;
|
||||
|
||||
if self.token.kind == TokenKind::Lt && self.may_recover() {
|
||||
if self.token == TokenKind::Lt && self.may_recover() {
|
||||
let generics = self.parse_generics()?;
|
||||
self.dcx().emit_err(errors::StaticWithGenerics { span: generics.span });
|
||||
}
|
||||
@ -1546,86 +1548,82 @@ impl<'a> Parser<'a> {
|
||||
self.recover_vcs_conflict_marker();
|
||||
let help = "enum variants can be `Variant`, `Variant = <integer>`, \
|
||||
`Variant(Type, ..., TypeN)` or `Variant { fields: Types }`";
|
||||
self.collect_tokens_trailing_token(
|
||||
variant_attrs,
|
||||
ForceCollect::No,
|
||||
|this, variant_attrs| {
|
||||
let vlo = this.token.span;
|
||||
self.collect_tokens(None, variant_attrs, ForceCollect::No, |this, variant_attrs| {
|
||||
let vlo = this.token.span;
|
||||
|
||||
let vis = this.parse_visibility(FollowedByType::No)?;
|
||||
if !this.recover_nested_adt_item(kw::Enum)? {
|
||||
return Ok((None, false));
|
||||
}
|
||||
let ident = this.parse_field_ident("enum", vlo)?;
|
||||
let vis = this.parse_visibility(FollowedByType::No)?;
|
||||
if !this.recover_nested_adt_item(kw::Enum)? {
|
||||
return Ok((None, Trailing::No, UsePreAttrPos::No));
|
||||
}
|
||||
let ident = this.parse_field_ident("enum", vlo)?;
|
||||
|
||||
if this.token == token::Not {
|
||||
if let Err(err) = this.unexpected() {
|
||||
err.with_note(fluent::parse_macro_expands_to_enum_variant).emit();
|
||||
}
|
||||
|
||||
this.bump();
|
||||
this.parse_delim_args()?;
|
||||
|
||||
return Ok((None, this.token == token::Comma));
|
||||
if this.token == token::Not {
|
||||
if let Err(err) = this.unexpected() {
|
||||
err.with_note(fluent::parse_macro_expands_to_enum_variant).emit();
|
||||
}
|
||||
|
||||
let struct_def = if this.check(&token::OpenDelim(Delimiter::Brace)) {
|
||||
// Parse a struct variant.
|
||||
let (fields, recovered) =
|
||||
match this.parse_record_struct_body("struct", ident.span, false) {
|
||||
Ok((fields, recovered)) => (fields, recovered),
|
||||
Err(mut err) => {
|
||||
if this.token == token::Colon {
|
||||
// We handle `enum` to `struct` suggestion in the caller.
|
||||
return Err(err);
|
||||
}
|
||||
this.eat_to_tokens(&[&token::CloseDelim(Delimiter::Brace)]);
|
||||
this.bump(); // }
|
||||
err.span_label(span, "while parsing this enum");
|
||||
err.help(help);
|
||||
let guar = err.emit();
|
||||
(thin_vec![], Recovered::Yes(guar))
|
||||
}
|
||||
};
|
||||
VariantData::Struct { fields, recovered: recovered.into() }
|
||||
} else if this.check(&token::OpenDelim(Delimiter::Parenthesis)) {
|
||||
let body = match this.parse_tuple_struct_body() {
|
||||
Ok(body) => body,
|
||||
this.bump();
|
||||
this.parse_delim_args()?;
|
||||
|
||||
return Ok((None, Trailing::from(this.token == token::Comma), UsePreAttrPos::No));
|
||||
}
|
||||
|
||||
let struct_def = if this.check(&token::OpenDelim(Delimiter::Brace)) {
|
||||
// Parse a struct variant.
|
||||
let (fields, recovered) =
|
||||
match this.parse_record_struct_body("struct", ident.span, false) {
|
||||
Ok((fields, recovered)) => (fields, recovered),
|
||||
Err(mut err) => {
|
||||
if this.token == token::Colon {
|
||||
// We handle `enum` to `struct` suggestion in the caller.
|
||||
return Err(err);
|
||||
}
|
||||
this.eat_to_tokens(&[&token::CloseDelim(Delimiter::Parenthesis)]);
|
||||
this.bump(); // )
|
||||
this.eat_to_tokens(&[&token::CloseDelim(Delimiter::Brace)]);
|
||||
this.bump(); // }
|
||||
err.span_label(span, "while parsing this enum");
|
||||
err.help(help);
|
||||
err.emit();
|
||||
thin_vec![]
|
||||
let guar = err.emit();
|
||||
(thin_vec![], Recovered::Yes(guar))
|
||||
}
|
||||
};
|
||||
VariantData::Tuple(body, DUMMY_NODE_ID)
|
||||
} else {
|
||||
VariantData::Unit(DUMMY_NODE_ID)
|
||||
VariantData::Struct { fields, recovered: recovered.into() }
|
||||
} else if this.check(&token::OpenDelim(Delimiter::Parenthesis)) {
|
||||
let body = match this.parse_tuple_struct_body() {
|
||||
Ok(body) => body,
|
||||
Err(mut err) => {
|
||||
if this.token == token::Colon {
|
||||
// We handle `enum` to `struct` suggestion in the caller.
|
||||
return Err(err);
|
||||
}
|
||||
this.eat_to_tokens(&[&token::CloseDelim(Delimiter::Parenthesis)]);
|
||||
this.bump(); // )
|
||||
err.span_label(span, "while parsing this enum");
|
||||
err.help(help);
|
||||
err.emit();
|
||||
thin_vec![]
|
||||
}
|
||||
};
|
||||
VariantData::Tuple(body, DUMMY_NODE_ID)
|
||||
} else {
|
||||
VariantData::Unit(DUMMY_NODE_ID)
|
||||
};
|
||||
|
||||
let disr_expr =
|
||||
if this.eat(&token::Eq) { Some(this.parse_expr_anon_const()?) } else { None };
|
||||
let disr_expr =
|
||||
if this.eat(&token::Eq) { Some(this.parse_expr_anon_const()?) } else { None };
|
||||
|
||||
let vr = ast::Variant {
|
||||
ident,
|
||||
vis,
|
||||
id: DUMMY_NODE_ID,
|
||||
attrs: variant_attrs,
|
||||
data: struct_def,
|
||||
disr_expr,
|
||||
span: vlo.to(this.prev_token.span),
|
||||
is_placeholder: false,
|
||||
};
|
||||
let vr = ast::Variant {
|
||||
ident,
|
||||
vis,
|
||||
id: DUMMY_NODE_ID,
|
||||
attrs: variant_attrs,
|
||||
data: struct_def,
|
||||
disr_expr,
|
||||
span: vlo.to(this.prev_token.span),
|
||||
is_placeholder: false,
|
||||
};
|
||||
|
||||
Ok((Some(vr), this.token == token::Comma))
|
||||
},
|
||||
)
|
||||
Ok((Some(vr), Trailing::from(this.token == token::Comma), UsePreAttrPos::No))
|
||||
})
|
||||
.map_err(|mut err| {
|
||||
err.help(help);
|
||||
err
|
||||
@ -1777,7 +1775,7 @@ impl<'a> Parser<'a> {
|
||||
// Unit like structs are handled in parse_item_struct function
|
||||
self.parse_paren_comma_seq(|p| {
|
||||
let attrs = p.parse_outer_attributes()?;
|
||||
p.collect_tokens_trailing_token(attrs, ForceCollect::No, |p, attrs| {
|
||||
p.collect_tokens(None, attrs, ForceCollect::No, |p, attrs| {
|
||||
let mut snapshot = None;
|
||||
if p.is_vcs_conflict_marker(&TokenKind::BinOp(token::Shl), &TokenKind::Lt) {
|
||||
// Account for `<<<<<<<` diff markers. We can't proactively error here because
|
||||
@ -1815,7 +1813,8 @@ impl<'a> Parser<'a> {
|
||||
attrs,
|
||||
is_placeholder: false,
|
||||
},
|
||||
p.token == token::Comma,
|
||||
Trailing::from(p.token == token::Comma),
|
||||
UsePreAttrPos::No,
|
||||
))
|
||||
})
|
||||
})
|
||||
@ -1827,10 +1826,11 @@ impl<'a> Parser<'a> {
|
||||
self.recover_vcs_conflict_marker();
|
||||
let attrs = self.parse_outer_attributes()?;
|
||||
self.recover_vcs_conflict_marker();
|
||||
self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| {
|
||||
self.collect_tokens(None, attrs, ForceCollect::No, |this, attrs| {
|
||||
let lo = this.token.span;
|
||||
let vis = this.parse_visibility(FollowedByType::No)?;
|
||||
this.parse_single_struct_field(adt_ty, lo, vis, attrs).map(|field| (field, false))
|
||||
this.parse_single_struct_field(adt_ty, lo, vis, attrs)
|
||||
.map(|field| (field, Trailing::No, UsePreAttrPos::No))
|
||||
})
|
||||
}
|
||||
|
||||
@ -1914,7 +1914,7 @@ impl<'a> Parser<'a> {
|
||||
let mut err = self.dcx().struct_span_err(sp, msg);
|
||||
|
||||
if self.token.is_ident()
|
||||
|| (self.token.kind == TokenKind::Pound
|
||||
|| (self.token == TokenKind::Pound
|
||||
&& (self.look_ahead(1, |t| t == &token::OpenDelim(Delimiter::Bracket))))
|
||||
{
|
||||
// This is likely another field, TokenKind::Pound is used for `#[..]`
|
||||
@ -1937,8 +1937,8 @@ impl<'a> Parser<'a> {
|
||||
fn expect_field_ty_separator(&mut self) -> PResult<'a, ()> {
|
||||
if let Err(err) = self.expect(&token::Colon) {
|
||||
let sm = self.psess.source_map();
|
||||
let eq_typo = self.token.kind == token::Eq && self.look_ahead(1, |t| t.is_path_start());
|
||||
let semi_typo = self.token.kind == token::Semi
|
||||
let eq_typo = self.token == token::Eq && self.look_ahead(1, |t| t.is_path_start());
|
||||
let semi_typo = self.token == token::Semi
|
||||
&& self.look_ahead(1, |t| {
|
||||
t.is_path_start()
|
||||
// We check that we are in a situation like `foo; bar` to avoid bad suggestions
|
||||
@ -1974,7 +1974,7 @@ impl<'a> Parser<'a> {
|
||||
attrs: AttrVec,
|
||||
) -> PResult<'a, FieldDef> {
|
||||
let name = self.parse_field_ident(adt_ty, lo)?;
|
||||
if self.token.kind == token::Not {
|
||||
if self.token == token::Not {
|
||||
if let Err(mut err) = self.unexpected() {
|
||||
// Encounter the macro invocation
|
||||
err.subdiagnostic(MacroExpandsToAdtField { adt_ty });
|
||||
@ -1983,10 +1983,10 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
self.expect_field_ty_separator()?;
|
||||
let ty = self.parse_ty_for_field_def()?;
|
||||
if self.token.kind == token::Colon && self.look_ahead(1, |tok| tok.kind != token::Colon) {
|
||||
if self.token == token::Colon && self.look_ahead(1, |t| *t != token::Colon) {
|
||||
self.dcx().emit_err(errors::SingleColonStructType { span: self.token.span });
|
||||
}
|
||||
if self.token.kind == token::Eq {
|
||||
if self.token == token::Eq {
|
||||
self.bump();
|
||||
let const_expr = self.parse_expr_anon_const()?;
|
||||
let sp = ty.span.shrink_to_hi().to(const_expr.value.span);
|
||||
@ -2064,7 +2064,7 @@ impl<'a> Parser<'a> {
|
||||
.parse_ident_common(false)
|
||||
// Cancel this error, we don't need it.
|
||||
.map_err(|err| err.cancel())
|
||||
&& self.token.kind == TokenKind::Colon
|
||||
&& self.token == TokenKind::Colon
|
||||
{
|
||||
err.span_suggestion(
|
||||
removal_span,
|
||||
@ -2367,12 +2367,12 @@ impl<'a> Parser<'a> {
|
||||
match self.expected_one_of_not_found(&[], expected) {
|
||||
Ok(error_guaranteed) => Ok(error_guaranteed),
|
||||
Err(mut err) => {
|
||||
if self.token.kind == token::CloseDelim(Delimiter::Brace) {
|
||||
if self.token == token::CloseDelim(Delimiter::Brace) {
|
||||
// The enclosing `mod`, `trait` or `impl` is being closed, so keep the `fn` in
|
||||
// the AST for typechecking.
|
||||
err.span_label(ident_span, "while parsing this `fn`");
|
||||
Ok(err.emit())
|
||||
} else if self.token.kind == token::RArrow
|
||||
} else if self.token == token::RArrow
|
||||
&& let Some(fn_params_end) = fn_params_end
|
||||
{
|
||||
// Instead of a function body, the parser has encountered a right arrow
|
||||
@ -2445,7 +2445,7 @@ impl<'a> Parser<'a> {
|
||||
fn_params_end: Option<Span>,
|
||||
) -> PResult<'a, Option<P<Block>>> {
|
||||
let has_semi = if req_body {
|
||||
self.token.kind == TokenKind::Semi
|
||||
self.token == TokenKind::Semi
|
||||
} else {
|
||||
// Only include `;` in list of expected tokens if body is not required
|
||||
self.check(&TokenKind::Semi)
|
||||
@ -2458,7 +2458,7 @@ impl<'a> Parser<'a> {
|
||||
} else if self.check(&token::OpenDelim(Delimiter::Brace)) || self.token.is_whole_block() {
|
||||
self.parse_block_common(self.token.span, BlockCheckMode::Default, false)
|
||||
.map(|(attrs, body)| (attrs, Some(body)))?
|
||||
} else if self.token.kind == token::Eq {
|
||||
} else if self.token == token::Eq {
|
||||
// Recover `fn foo() = $expr;`.
|
||||
self.bump(); // `=`
|
||||
let eq_sp = self.prev_token.span;
|
||||
@ -2761,7 +2761,7 @@ impl<'a> Parser<'a> {
|
||||
pub(super) fn parse_fn_params(&mut self, req_name: ReqName) -> PResult<'a, ThinVec<Param>> {
|
||||
let mut first_param = true;
|
||||
// Parse the arguments, starting out with `self` being allowed...
|
||||
if self.token.kind != TokenKind::OpenDelim(Delimiter::Parenthesis)
|
||||
if self.token != TokenKind::OpenDelim(Delimiter::Parenthesis)
|
||||
// might be typo'd trait impl, handled elsewhere
|
||||
&& !self.token.is_keyword(kw::For)
|
||||
{
|
||||
@ -2805,12 +2805,12 @@ impl<'a> Parser<'a> {
|
||||
fn parse_param_general(&mut self, req_name: ReqName, first_param: bool) -> PResult<'a, Param> {
|
||||
let lo = self.token.span;
|
||||
let attrs = self.parse_outer_attributes()?;
|
||||
self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| {
|
||||
self.collect_tokens(None, attrs, ForceCollect::No, |this, attrs| {
|
||||
// Possibly parse `self`. Recover if we parsed it and it wasn't allowed here.
|
||||
if let Some(mut param) = this.parse_self_param()? {
|
||||
param.attrs = attrs;
|
||||
let res = if first_param { Ok(param) } else { this.recover_bad_self_param(param) };
|
||||
return Ok((res?, false));
|
||||
return Ok((res?, Trailing::No, UsePreAttrPos::No));
|
||||
}
|
||||
|
||||
let is_name_required = match this.token.kind {
|
||||
@ -2826,7 +2826,7 @@ impl<'a> Parser<'a> {
|
||||
this.parameter_without_type(&mut err, pat, is_name_required, first_param)
|
||||
{
|
||||
let guar = err.emit();
|
||||
Ok((dummy_arg(ident, guar), false))
|
||||
Ok((dummy_arg(ident, guar), Trailing::No, UsePreAttrPos::No))
|
||||
} else {
|
||||
Err(err)
|
||||
};
|
||||
@ -2869,7 +2869,8 @@ impl<'a> Parser<'a> {
|
||||
|
||||
Ok((
|
||||
Param { attrs, id: ast::DUMMY_NODE_ID, is_placeholder: false, pat, span, ty },
|
||||
false,
|
||||
Trailing::No,
|
||||
UsePreAttrPos::No,
|
||||
))
|
||||
})
|
||||
}
|
||||
|
@ -14,7 +14,7 @@ use std::assert_matches::debug_assert_matches;
|
||||
use std::ops::Range;
|
||||
use std::{fmt, mem, slice};
|
||||
|
||||
use attr_wrapper::AttrWrapper;
|
||||
use attr_wrapper::{AttrWrapper, UsePreAttrPos};
|
||||
pub use diagnostics::AttemptLocalParseRecovery;
|
||||
pub(crate) use expr::ForbiddenLetReason;
|
||||
pub(crate) use item::FnParseMode;
|
||||
@ -238,6 +238,7 @@ impl NodeRange {
|
||||
// is the position of the function's start token. This gives
|
||||
// `NodeRange(10..15)`.
|
||||
fn new(ParserRange(parser_range): ParserRange, start_pos: u32) -> NodeRange {
|
||||
assert!(parser_range.start >= start_pos && parser_range.end >= start_pos);
|
||||
NodeRange((parser_range.start - start_pos)..(parser_range.end - start_pos))
|
||||
}
|
||||
}
|
||||
@ -253,7 +254,7 @@ enum Capturing {
|
||||
Yes,
|
||||
}
|
||||
|
||||
// This state is used by `Parser::collect_tokens_trailing_token`.
|
||||
// This state is used by `Parser::collect_tokens`.
|
||||
#[derive(Clone, Debug)]
|
||||
struct CaptureState {
|
||||
capturing: Capturing,
|
||||
@ -388,6 +389,12 @@ enum Trailing {
|
||||
Yes,
|
||||
}
|
||||
|
||||
impl From<bool> for Trailing {
|
||||
fn from(b: bool) -> Trailing {
|
||||
if b { Trailing::Yes } else { Trailing::No }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
|
||||
pub(super) enum TokenDescription {
|
||||
ReservedIdentifier,
|
||||
@ -459,8 +466,8 @@ impl<'a> Parser<'a> {
|
||||
parser.bump();
|
||||
|
||||
// Change this from 1 back to 0 after the bump. This eases debugging of
|
||||
// `Parser::collect_tokens_trailing_token` nicer because it makes the
|
||||
// token positions 0-indexed which is nicer than 1-indexed.
|
||||
// `Parser::collect_tokens` because 0-indexed token positions are nicer
|
||||
// than 1-indexed token positions.
|
||||
parser.num_bump_calls = 0;
|
||||
|
||||
parser
|
||||
@ -527,7 +534,7 @@ impl<'a> Parser<'a> {
|
||||
} else if inedible.contains(&self.token.kind) {
|
||||
// leave it in the input
|
||||
Ok(Recovered::No)
|
||||
} else if self.token.kind != token::Eof
|
||||
} else if self.token != token::Eof
|
||||
&& self.last_unexpected_token_span == Some(self.token.span)
|
||||
{
|
||||
FatalError.raise();
|
||||
@ -756,7 +763,7 @@ impl<'a> Parser<'a> {
|
||||
/// compound tokens like multi-character operators in process.
|
||||
/// Returns `true` if the token was eaten.
|
||||
fn break_and_eat(&mut self, expected: TokenKind) -> bool {
|
||||
if self.token.kind == expected {
|
||||
if self.token == expected {
|
||||
self.bump();
|
||||
return true;
|
||||
}
|
||||
@ -882,7 +889,7 @@ impl<'a> Parser<'a> {
|
||||
let token_str = pprust::token_kind_to_string(t);
|
||||
|
||||
match self.current_closure.take() {
|
||||
Some(closure_spans) if self.token.kind == TokenKind::Semi => {
|
||||
Some(closure_spans) if self.token == TokenKind::Semi => {
|
||||
// Finding a semicolon instead of a comma
|
||||
// after a closure body indicates that the
|
||||
// closure body may be a block but the user
|
||||
@ -910,7 +917,7 @@ impl<'a> Parser<'a> {
|
||||
// If this was a missing `@` in a binding pattern
|
||||
// bail with a suggestion
|
||||
// https://github.com/rust-lang/rust/issues/72373
|
||||
if self.prev_token.is_ident() && self.token.kind == token::DotDot {
|
||||
if self.prev_token.is_ident() && self.token == token::DotDot {
|
||||
let msg = format!(
|
||||
"if you meant to bind the contents of the rest of the array \
|
||||
pattern into `{}`, use `@`",
|
||||
@ -1546,11 +1553,9 @@ impl<'a> Parser<'a> {
|
||||
) -> PResult<'a, R> {
|
||||
// The only reason to call `collect_tokens_no_attrs` is if you want tokens, so use
|
||||
// `ForceCollect::Yes`
|
||||
self.collect_tokens_trailing_token(
|
||||
AttrWrapper::empty(),
|
||||
ForceCollect::Yes,
|
||||
|this, _attrs| Ok((f(this)?, false)),
|
||||
)
|
||||
self.collect_tokens(None, AttrWrapper::empty(), ForceCollect::Yes, |this, _attrs| {
|
||||
Ok((f(this)?, Trailing::No, UsePreAttrPos::No))
|
||||
})
|
||||
}
|
||||
|
||||
/// `::{` or `::*`
|
||||
|
@ -13,7 +13,7 @@ use rustc_span::symbol::{kw, sym, Ident};
|
||||
use rustc_span::{BytePos, ErrorGuaranteed, Span};
|
||||
use thin_vec::{thin_vec, ThinVec};
|
||||
|
||||
use super::{ForceCollect, Parser, PathStyle, Restrictions, Trailing};
|
||||
use super::{ForceCollect, Parser, PathStyle, Restrictions, Trailing, UsePreAttrPos};
|
||||
use crate::errors::{
|
||||
self, AmbiguousRangePattern, DotDotDotForRemainingFields, DotDotDotRangeToPatternNotAllowed,
|
||||
DotDotDotRestPattern, EnumPatternInsteadOfIdentifier, ExpectedBindingLeftOfAt,
|
||||
@ -369,7 +369,7 @@ impl<'a> Parser<'a> {
|
||||
.and_then(|(ident, _)| ident.name.as_str().chars().next())
|
||||
.is_some_and(char::is_lowercase)
|
||||
})
|
||||
&& self.look_ahead(2, |tok| tok.kind == token::OpenDelim(Delimiter::Parenthesis));
|
||||
&& self.look_ahead(2, |t| *t == token::OpenDelim(Delimiter::Parenthesis));
|
||||
|
||||
// Check for operators.
|
||||
// `|` is excluded as it is used in pattern alternatives and lambdas,
|
||||
@ -377,9 +377,9 @@ impl<'a> Parser<'a> {
|
||||
// `[` is included for indexing operations,
|
||||
// `[]` is excluded as `a[]` isn't an expression and should be recovered as `a, []` (cf. `tests/ui/parser/pat-lt-bracket-7.rs`)
|
||||
let has_trailing_operator = matches!(self.token.kind, token::BinOp(op) if op != BinOpToken::Or)
|
||||
|| self.token.kind == token::Question
|
||||
|| (self.token.kind == token::OpenDelim(Delimiter::Bracket)
|
||||
&& self.look_ahead(1, |tok| tok.kind != token::CloseDelim(Delimiter::Bracket)));
|
||||
|| self.token == token::Question
|
||||
|| (self.token == token::OpenDelim(Delimiter::Bracket)
|
||||
&& self.look_ahead(1, |t| *t != token::CloseDelim(Delimiter::Bracket)));
|
||||
|
||||
if !has_trailing_method && !has_trailing_operator {
|
||||
// Nothing to recover here.
|
||||
@ -403,7 +403,7 @@ impl<'a> Parser<'a> {
|
||||
|
||||
// Parse an associative expression such as `+ expr`, `% expr`, ...
|
||||
// Assignements, ranges and `|` are disabled by [`Restrictions::IS_PAT`].
|
||||
if let Ok(expr) =
|
||||
if let Ok((expr, _)) =
|
||||
snapshot.parse_expr_assoc_rest_with(0, false, expr).map_err(|err| err.cancel())
|
||||
{
|
||||
// We got a valid expression.
|
||||
@ -413,7 +413,7 @@ impl<'a> Parser<'a> {
|
||||
let is_bound = is_end_bound
|
||||
// is_start_bound: either `..` or `)..`
|
||||
|| self.token.is_range_separator()
|
||||
|| self.token.kind == token::CloseDelim(Delimiter::Parenthesis)
|
||||
|| self.token == token::CloseDelim(Delimiter::Parenthesis)
|
||||
&& self.look_ahead(1, Token::is_range_separator);
|
||||
|
||||
// Check that `parse_expr_assoc_with` didn't eat a rhs.
|
||||
@ -450,7 +450,7 @@ impl<'a> Parser<'a> {
|
||||
lo = self.token.span;
|
||||
}
|
||||
|
||||
let pat = if self.check(&token::BinOp(token::And)) || self.token.kind == token::AndAnd {
|
||||
let pat = if self.check(&token::BinOp(token::And)) || self.token == token::AndAnd {
|
||||
self.parse_pat_deref(expected)?
|
||||
} else if self.check(&token::OpenDelim(Delimiter::Parenthesis)) {
|
||||
self.parse_pat_tuple_or_parens()?
|
||||
@ -625,7 +625,7 @@ impl<'a> Parser<'a> {
|
||||
///
|
||||
/// [and]: https://docs.microsoft.com/en-us/dotnet/fsharp/language-reference/pattern-matching
|
||||
fn recover_intersection_pat(&mut self, lhs: P<Pat>) -> PResult<'a, P<Pat>> {
|
||||
if self.token.kind != token::At {
|
||||
if self.token != token::At {
|
||||
// Next token is not `@` so it's not going to be an intersection pattern.
|
||||
return Ok(lhs);
|
||||
}
|
||||
@ -958,14 +958,14 @@ impl<'a> Parser<'a> {
|
||||
self.check_inline_const(dist)
|
||||
|| self.look_ahead(dist, |t| {
|
||||
t.is_path_start() // e.g. `MY_CONST`;
|
||||
|| t.kind == token::Dot // e.g. `.5` for recovery;
|
||||
|| *t == token::Dot // e.g. `.5` for recovery;
|
||||
|| matches!(t.kind, token::Literal(..) | token::BinOp(token::Minus))
|
||||
|| t.is_bool_lit()
|
||||
|| t.is_whole_expr()
|
||||
|| t.is_lifetime() // recover `'a` instead of `'a'`
|
||||
|| (self.may_recover() // recover leading `(`
|
||||
&& t.kind == token::OpenDelim(Delimiter::Parenthesis)
|
||||
&& self.look_ahead(dist + 1, |t| t.kind != token::OpenDelim(Delimiter::Parenthesis))
|
||||
&& *t == token::OpenDelim(Delimiter::Parenthesis)
|
||||
&& self.look_ahead(dist + 1, |t| *t != token::OpenDelim(Delimiter::Parenthesis))
|
||||
&& self.is_pat_range_end_start(dist + 1))
|
||||
})
|
||||
}
|
||||
@ -1302,24 +1302,23 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
let field =
|
||||
self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| {
|
||||
let field = match this.parse_pat_field(lo, attrs) {
|
||||
Ok(field) => Ok(field),
|
||||
Err(err) => {
|
||||
if let Some(delayed_err) = delayed_err.take() {
|
||||
delayed_err.emit();
|
||||
}
|
||||
return Err(err);
|
||||
let field = self.collect_tokens(None, attrs, ForceCollect::No, |this, attrs| {
|
||||
let field = match this.parse_pat_field(lo, attrs) {
|
||||
Ok(field) => Ok(field),
|
||||
Err(err) => {
|
||||
if let Some(delayed_err) = delayed_err.take() {
|
||||
delayed_err.emit();
|
||||
}
|
||||
}?;
|
||||
ate_comma = this.eat(&token::Comma);
|
||||
return Err(err);
|
||||
}
|
||||
}?;
|
||||
ate_comma = this.eat(&token::Comma);
|
||||
|
||||
last_non_comma_dotdot_span = Some(this.prev_token.span);
|
||||
last_non_comma_dotdot_span = Some(this.prev_token.span);
|
||||
|
||||
// We just ate a comma, so there's no need to capture a trailing token.
|
||||
Ok((field, false))
|
||||
})?;
|
||||
// We just ate a comma, so there's no need to capture a trailing token.
|
||||
Ok((field, Trailing::No, UsePreAttrPos::No))
|
||||
})?;
|
||||
|
||||
fields.push(field)
|
||||
}
|
||||
|
@ -358,9 +358,9 @@ impl<'a> Parser<'a> {
|
||||
})?;
|
||||
let span = lo.to(self.prev_token.span);
|
||||
AngleBracketedArgs { args, span }.into()
|
||||
} else if self.token.kind == token::OpenDelim(Delimiter::Parenthesis)
|
||||
} else if self.token == token::OpenDelim(Delimiter::Parenthesis)
|
||||
// FIXME(return_type_notation): Could also recover `...` here.
|
||||
&& self.look_ahead(1, |tok| tok.kind == token::DotDot)
|
||||
&& self.look_ahead(1, |t| *t == token::DotDot)
|
||||
{
|
||||
self.bump(); // (
|
||||
self.bump(); // ..
|
||||
@ -384,7 +384,7 @@ impl<'a> Parser<'a> {
|
||||
let token_before_parsing = self.token.clone();
|
||||
let mut snapshot = None;
|
||||
if self.may_recover()
|
||||
&& prev_token_before_parsing.kind == token::PathSep
|
||||
&& prev_token_before_parsing == token::PathSep
|
||||
&& (style == PathStyle::Expr && self.token.can_begin_expr()
|
||||
|| style == PathStyle::Pat && self.token.can_begin_pattern())
|
||||
{
|
||||
@ -393,7 +393,7 @@ impl<'a> Parser<'a> {
|
||||
|
||||
let (inputs, _) = match self.parse_paren_comma_seq(|p| p.parse_ty()) {
|
||||
Ok(output) => output,
|
||||
Err(mut error) if prev_token_before_parsing.kind == token::PathSep => {
|
||||
Err(mut error) if prev_token_before_parsing == token::PathSep => {
|
||||
error.span_label(
|
||||
prev_token_before_parsing.span.to(token_before_parsing.span),
|
||||
"while parsing this parenthesized list of type arguments starting here",
|
||||
@ -913,7 +913,7 @@ impl<'a> Parser<'a> {
|
||||
let snapshot = self.create_snapshot_for_diagnostic();
|
||||
let attrs = self.parse_outer_attributes()?;
|
||||
match self.parse_expr_res(Restrictions::CONST_EXPR, attrs) {
|
||||
Ok(expr) => {
|
||||
Ok((expr, _)) => {
|
||||
return Ok(Some(self.dummy_const_arg_needs_braces(
|
||||
self.dcx().struct_span_err(expr.span, "invalid const generic expression"),
|
||||
expr.span,
|
||||
|
@ -21,6 +21,7 @@ use super::pat::{PatternLocation, RecoverComma};
|
||||
use super::path::PathStyle;
|
||||
use super::{
|
||||
AttrWrapper, BlockMode, FnParseMode, ForceCollect, Parser, Restrictions, SemiColonMode,
|
||||
Trailing, UsePreAttrPos,
|
||||
};
|
||||
use crate::errors::MalformedLoopLabel;
|
||||
use crate::{errors, maybe_whole};
|
||||
@ -45,6 +46,7 @@ impl<'a> Parser<'a> {
|
||||
capture_semi: bool,
|
||||
force_collect: ForceCollect,
|
||||
) -> PResult<'a, Option<Stmt>> {
|
||||
let pre_attr_pos = self.collect_pos();
|
||||
let attrs = self.parse_outer_attributes()?;
|
||||
let lo = self.token.span;
|
||||
|
||||
@ -65,11 +67,15 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
|
||||
Ok(Some(if self.token.is_keyword(kw::Let) {
|
||||
self.collect_tokens_trailing_token(attrs, force_collect, |this, attrs| {
|
||||
self.collect_tokens(None, attrs, force_collect, |this, attrs| {
|
||||
this.expect_keyword(kw::Let)?;
|
||||
let local = this.parse_local(attrs)?;
|
||||
let trailing = capture_semi && this.token.kind == token::Semi;
|
||||
Ok((this.mk_stmt(lo.to(this.prev_token.span), StmtKind::Let(local)), trailing))
|
||||
let trailing = Trailing::from(capture_semi && this.token == token::Semi);
|
||||
Ok((
|
||||
this.mk_stmt(lo.to(this.prev_token.span), StmtKind::Let(local)),
|
||||
trailing,
|
||||
UsePreAttrPos::No,
|
||||
))
|
||||
})?
|
||||
} else if self.is_kw_followed_by_ident(kw::Mut) && self.may_recover() {
|
||||
self.recover_stmt_local_after_let(
|
||||
@ -103,10 +109,18 @@ impl<'a> Parser<'a> {
|
||||
// or `auto trait` items. We aim to parse an arbitrary path `a::b` but not something
|
||||
// that starts like a path (1 token), but it fact not a path.
|
||||
// Also, we avoid stealing syntax from `parse_item_`.
|
||||
let stmt = self.collect_tokens_trailing_token(
|
||||
//
|
||||
// `UsePreAttrPos::Yes` here means the attribute belongs unconditionally to the
|
||||
// expression, not the statement. (But the statement attributes/tokens are obtained
|
||||
// from the expression anyway, because `Stmt` delegates `HasAttrs`/`HasTokens` to
|
||||
// the things within `StmtKind`.)
|
||||
let stmt = self.collect_tokens(
|
||||
Some(pre_attr_pos),
|
||||
AttrWrapper::empty(),
|
||||
force_collect,
|
||||
|this, _empty_attrs| Ok((this.parse_stmt_path_start(lo, attrs)?, false)),
|
||||
|this, _empty_attrs| {
|
||||
Ok((this.parse_stmt_path_start(lo, attrs)?, Trailing::No, UsePreAttrPos::Yes))
|
||||
},
|
||||
);
|
||||
match stmt {
|
||||
Ok(stmt) => stmt,
|
||||
@ -128,12 +142,15 @@ impl<'a> Parser<'a> {
|
||||
self.error_outer_attrs(attrs);
|
||||
self.mk_stmt(lo, StmtKind::Empty)
|
||||
} else if self.token != token::CloseDelim(Delimiter::Brace) {
|
||||
// Remainder are line-expr stmts.
|
||||
let e = self.collect_tokens_trailing_token(
|
||||
// Remainder are line-expr stmts. This is similar to the `parse_stmt_path_start` case
|
||||
// above.
|
||||
let e = self.collect_tokens(
|
||||
Some(pre_attr_pos),
|
||||
AttrWrapper::empty(),
|
||||
force_collect,
|
||||
|this, _empty_attrs| {
|
||||
Ok((this.parse_expr_res(Restrictions::STMT_EXPR, attrs)?, false))
|
||||
let (expr, _) = this.parse_expr_res(Restrictions::STMT_EXPR, attrs)?;
|
||||
Ok((expr, Trailing::No, UsePreAttrPos::Yes))
|
||||
},
|
||||
)?;
|
||||
if matches!(e.kind, ExprKind::Assign(..)) && self.eat_keyword(kw::Else) {
|
||||
@ -150,12 +167,16 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
|
||||
fn parse_stmt_path_start(&mut self, lo: Span, attrs: AttrWrapper) -> PResult<'a, Stmt> {
|
||||
let stmt = self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| {
|
||||
let stmt = self.collect_tokens(None, attrs, ForceCollect::No, |this, attrs| {
|
||||
let path = this.parse_path(PathStyle::Expr)?;
|
||||
|
||||
if this.eat(&token::Not) {
|
||||
let stmt_mac = this.parse_stmt_mac(lo, attrs, path)?;
|
||||
return Ok((stmt_mac, this.token == token::Semi));
|
||||
return Ok((
|
||||
stmt_mac,
|
||||
Trailing::from(this.token == token::Semi),
|
||||
UsePreAttrPos::No,
|
||||
));
|
||||
}
|
||||
|
||||
let expr = if this.eat(&token::OpenDelim(Delimiter::Brace)) {
|
||||
@ -169,13 +190,17 @@ impl<'a> Parser<'a> {
|
||||
this.parse_expr_dot_or_call_with(attrs, expr, lo)
|
||||
})?;
|
||||
// `DUMMY_SP` will get overwritten later in this function
|
||||
Ok((this.mk_stmt(rustc_span::DUMMY_SP, StmtKind::Expr(expr)), false))
|
||||
Ok((
|
||||
this.mk_stmt(rustc_span::DUMMY_SP, StmtKind::Expr(expr)),
|
||||
Trailing::No,
|
||||
UsePreAttrPos::No,
|
||||
))
|
||||
})?;
|
||||
|
||||
if let StmtKind::Expr(expr) = stmt.kind {
|
||||
// Perform this outside of the `collect_tokens_trailing_token` closure,
|
||||
// since our outer attributes do not apply to this part of the expression
|
||||
let expr = self.with_res(Restrictions::STMT_EXPR, |this| {
|
||||
// Perform this outside of the `collect_tokens` closure, since our
|
||||
// outer attributes do not apply to this part of the expression.
|
||||
let (expr, _) = self.with_res(Restrictions::STMT_EXPR, |this| {
|
||||
this.parse_expr_assoc_rest_with(0, true, expr)
|
||||
})?;
|
||||
Ok(self.mk_stmt(lo.to(self.prev_token.span), StmtKind::Expr(expr)))
|
||||
@ -209,7 +234,7 @@ impl<'a> Parser<'a> {
|
||||
let e = self.mk_expr(lo.to(hi), ExprKind::MacCall(mac));
|
||||
let e = self.maybe_recover_from_bad_qpath(e)?;
|
||||
let e = self.parse_expr_dot_or_call_with(attrs, e, lo)?;
|
||||
let e = self.parse_expr_assoc_rest_with(0, false, e)?;
|
||||
let (e, _) = self.parse_expr_assoc_rest_with(0, false, e)?;
|
||||
StmtKind::Expr(e)
|
||||
};
|
||||
Ok(self.mk_stmt(lo.to(hi), kind))
|
||||
@ -239,10 +264,14 @@ impl<'a> Parser<'a> {
|
||||
subdiagnostic: fn(Span) -> errors::InvalidVariableDeclarationSub,
|
||||
force_collect: ForceCollect,
|
||||
) -> PResult<'a, Stmt> {
|
||||
let stmt = self.collect_tokens_trailing_token(attrs, force_collect, |this, attrs| {
|
||||
let stmt = self.collect_tokens(None, attrs, force_collect, |this, attrs| {
|
||||
let local = this.parse_local(attrs)?;
|
||||
// FIXME - maybe capture semicolon in recovery?
|
||||
Ok((this.mk_stmt(lo.to(this.prev_token.span), StmtKind::Let(local)), false))
|
||||
Ok((
|
||||
this.mk_stmt(lo.to(this.prev_token.span), StmtKind::Let(local)),
|
||||
Trailing::No,
|
||||
UsePreAttrPos::No,
|
||||
))
|
||||
})?;
|
||||
self.dcx()
|
||||
.emit_err(errors::InvalidVariableDeclaration { span: lo, sub: subdiagnostic(lo) });
|
||||
@ -760,7 +789,7 @@ impl<'a> Parser<'a> {
|
||||
)
|
||||
),
|
||||
);
|
||||
let suggest_eq = if self.token.kind == token::Dot
|
||||
let suggest_eq = if self.token == token::Dot
|
||||
&& let _ = self.bump()
|
||||
&& let mut snapshot = self.create_snapshot_for_diagnostic()
|
||||
&& let Ok(_) = snapshot
|
||||
|
@ -420,7 +420,7 @@ impl<'a> Parser<'a> {
|
||||
let mut trailing_plus = false;
|
||||
let (ts, trailing) = self.parse_paren_comma_seq(|p| {
|
||||
let ty = p.parse_ty()?;
|
||||
trailing_plus = p.prev_token.kind == TokenKind::BinOp(token::Plus);
|
||||
trailing_plus = p.prev_token == TokenKind::BinOp(token::Plus);
|
||||
Ok(ty)
|
||||
})?;
|
||||
|
||||
@ -499,8 +499,8 @@ impl<'a> Parser<'a> {
|
||||
let elt_ty = match self.parse_ty() {
|
||||
Ok(ty) => ty,
|
||||
Err(err)
|
||||
if self.look_ahead(1, |t| t.kind == token::CloseDelim(Delimiter::Bracket))
|
||||
| self.look_ahead(1, |t| t.kind == token::Semi) =>
|
||||
if self.look_ahead(1, |t| *t == token::CloseDelim(Delimiter::Bracket))
|
||||
| self.look_ahead(1, |t| *t == token::Semi) =>
|
||||
{
|
||||
// Recover from `[LIT; EXPR]` and `[LIT]`
|
||||
self.bump();
|
||||
@ -601,7 +601,7 @@ impl<'a> Parser<'a> {
|
||||
let span_start = self.token.span;
|
||||
let ast::FnHeader { ext, safety, constness, coroutine_kind } =
|
||||
self.parse_fn_front_matter(&inherited_vis, Case::Sensitive)?;
|
||||
if self.may_recover() && self.token.kind == TokenKind::Lt {
|
||||
if self.may_recover() && self.token == TokenKind::Lt {
|
||||
self.recover_fn_ptr_with_generics(lo, &mut params, param_insertion_point)?;
|
||||
}
|
||||
let decl = self.parse_fn_decl(|_| false, AllowPlus::No, recover_return_sign)?;
|
||||
@ -681,7 +681,7 @@ impl<'a> Parser<'a> {
|
||||
// Always parse bounds greedily for better error recovery.
|
||||
let bounds = self.parse_generic_bounds()?;
|
||||
|
||||
*impl_dyn_multi = bounds.len() > 1 || self.prev_token.kind == TokenKind::BinOp(token::Plus);
|
||||
*impl_dyn_multi = bounds.len() > 1 || self.prev_token == TokenKind::BinOp(token::Plus);
|
||||
|
||||
Ok(TyKind::ImplTrait(ast::DUMMY_NODE_ID, bounds))
|
||||
}
|
||||
@ -727,8 +727,7 @@ impl<'a> Parser<'a> {
|
||||
self.check_keyword(kw::Dyn)
|
||||
&& (self.token.uninterpolated_span().at_least_rust_2018()
|
||||
|| self.look_ahead(1, |t| {
|
||||
(can_begin_dyn_bound_in_edition_2015(t)
|
||||
|| t.kind == TokenKind::BinOp(token::Star))
|
||||
(can_begin_dyn_bound_in_edition_2015(t) || *t == TokenKind::BinOp(token::Star))
|
||||
&& !can_continue_type_after_non_fn_ident(t)
|
||||
}))
|
||||
}
|
||||
@ -750,7 +749,7 @@ impl<'a> Parser<'a> {
|
||||
|
||||
// Always parse bounds greedily for better error recovery.
|
||||
let bounds = self.parse_generic_bounds()?;
|
||||
*impl_dyn_multi = bounds.len() > 1 || self.prev_token.kind == TokenKind::BinOp(token::Plus);
|
||||
*impl_dyn_multi = bounds.len() > 1 || self.prev_token == TokenKind::BinOp(token::Plus);
|
||||
Ok(TyKind::TraitObject(bounds, syntax))
|
||||
}
|
||||
|
||||
@ -1060,7 +1059,7 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
|
||||
let mut path = if self.token.is_keyword(kw::Fn)
|
||||
&& self.look_ahead(1, |tok| tok.kind == TokenKind::OpenDelim(Delimiter::Parenthesis))
|
||||
&& self.look_ahead(1, |t| *t == TokenKind::OpenDelim(Delimiter::Parenthesis))
|
||||
&& let Some(path) = self.recover_path_from_fn()
|
||||
{
|
||||
path
|
||||
|
@ -774,17 +774,16 @@ impl<'p, 'tcx: 'p> RustcPatCtxt<'p, 'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
/// Convert to a [`print::Pat`] for diagnostic purposes.
|
||||
fn hoist_pat_range(&self, range: &IntRange, ty: RevealedTy<'tcx>) -> print::Pat<'tcx> {
|
||||
use print::{Pat, PatKind};
|
||||
/// Prints an [`IntRange`] to a string for diagnostic purposes.
|
||||
fn print_pat_range(&self, range: &IntRange, ty: RevealedTy<'tcx>) -> String {
|
||||
use MaybeInfiniteInt::*;
|
||||
let cx = self;
|
||||
let kind = if matches!((range.lo, range.hi), (NegInfinity, PosInfinity)) {
|
||||
PatKind::Wild
|
||||
if matches!((range.lo, range.hi), (NegInfinity, PosInfinity)) {
|
||||
"_".to_string()
|
||||
} else if range.is_singleton() {
|
||||
let lo = cx.hoist_pat_range_bdy(range.lo, ty);
|
||||
let value = lo.as_finite().unwrap();
|
||||
PatKind::Constant { value }
|
||||
value.to_string()
|
||||
} else {
|
||||
// We convert to an inclusive range for diagnostics.
|
||||
let mut end = rustc_hir::RangeEnd::Included;
|
||||
@ -807,32 +806,24 @@ impl<'p, 'tcx: 'p> RustcPatCtxt<'p, 'tcx> {
|
||||
range.hi
|
||||
};
|
||||
let hi = cx.hoist_pat_range_bdy(hi, ty);
|
||||
PatKind::Range(Box::new(PatRange { lo, hi, end, ty: ty.inner() }))
|
||||
};
|
||||
|
||||
Pat { ty: ty.inner(), kind }
|
||||
PatRange { lo, hi, end, ty: ty.inner() }.to_string()
|
||||
}
|
||||
}
|
||||
|
||||
/// Prints a [`WitnessPat`] to an owned string, for diagnostic purposes.
|
||||
///
|
||||
/// This panics for patterns that don't appear in diagnostics, like float ranges.
|
||||
pub fn print_witness_pat(&self, pat: &WitnessPat<'p, 'tcx>) -> String {
|
||||
// This works by converting the witness pattern to a `print::Pat`
|
||||
// and then printing that, but callers don't need to know that.
|
||||
self.hoist_witness_pat(pat).to_string()
|
||||
}
|
||||
|
||||
/// Convert to a [`print::Pat`] for diagnostic purposes. This panics for patterns that don't
|
||||
/// appear in diagnostics, like float ranges.
|
||||
fn hoist_witness_pat(&self, pat: &WitnessPat<'p, 'tcx>) -> print::Pat<'tcx> {
|
||||
use print::{FieldPat, Pat, PatKind};
|
||||
let cx = self;
|
||||
let hoist = |p| Box::new(cx.hoist_witness_pat(p));
|
||||
let kind = match pat.ctor() {
|
||||
Bool(b) => PatKind::Constant { value: mir::Const::from_bool(cx.tcx, *b) },
|
||||
IntRange(range) => return self.hoist_pat_range(range, *pat.ty()),
|
||||
let print = |p| cx.print_witness_pat(p);
|
||||
match pat.ctor() {
|
||||
Bool(b) => b.to_string(),
|
||||
Str(s) => s.to_string(),
|
||||
IntRange(range) => return self.print_pat_range(range, *pat.ty()),
|
||||
Struct if pat.ty().is_box() => {
|
||||
// Outside of the `alloc` crate, the only way to create a struct pattern
|
||||
// of type `Box` is to use a `box` pattern via #[feature(box_patterns)].
|
||||
PatKind::Box { subpattern: hoist(&pat.fields[0]) }
|
||||
format!("box {}", print(&pat.fields[0]))
|
||||
}
|
||||
Struct | Variant(_) | UnionField => {
|
||||
let enum_info = match *pat.ty().kind() {
|
||||
@ -847,12 +838,29 @@ impl<'p, 'tcx: 'p> RustcPatCtxt<'p, 'tcx> {
|
||||
let subpatterns = pat
|
||||
.iter_fields()
|
||||
.enumerate()
|
||||
.map(|(i, pat)| FieldPat { field: FieldIdx::new(i), pattern: hoist(pat) })
|
||||
.map(|(i, pat)| print::FieldPat {
|
||||
field: FieldIdx::new(i),
|
||||
pattern: print(pat),
|
||||
is_wildcard: would_print_as_wildcard(cx.tcx, pat),
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
PatKind::StructLike { enum_info, subpatterns }
|
||||
let mut s = String::new();
|
||||
print::write_struct_like(
|
||||
&mut s,
|
||||
self.tcx,
|
||||
pat.ty().inner(),
|
||||
&enum_info,
|
||||
&subpatterns,
|
||||
)
|
||||
.unwrap();
|
||||
s
|
||||
}
|
||||
Ref => {
|
||||
let mut s = String::new();
|
||||
print::write_ref_like(&mut s, pat.ty().inner(), &print(&pat.fields[0])).unwrap();
|
||||
s
|
||||
}
|
||||
Ref => PatKind::Deref { subpattern: hoist(&pat.fields[0]) },
|
||||
Slice(slice) => {
|
||||
let (prefix_len, has_dot_dot) = match slice.kind {
|
||||
SliceKind::FixedLen(len) => (len, false),
|
||||
@ -879,14 +887,15 @@ impl<'p, 'tcx: 'p> RustcPatCtxt<'p, 'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
let prefix = prefix.iter().map(hoist).collect();
|
||||
let suffix = suffix.iter().map(hoist).collect();
|
||||
let prefix = prefix.iter().map(print).collect::<Vec<_>>();
|
||||
let suffix = suffix.iter().map(print).collect::<Vec<_>>();
|
||||
|
||||
PatKind::Slice { prefix, has_dot_dot, suffix }
|
||||
let mut s = String::new();
|
||||
print::write_slice_like(&mut s, &prefix, has_dot_dot, &suffix).unwrap();
|
||||
s
|
||||
}
|
||||
&Str(value) => PatKind::Constant { value },
|
||||
Never if self.tcx.features().never_patterns => PatKind::Never,
|
||||
Never | Wildcard | NonExhaustive | Hidden | PrivateUninhabited => PatKind::Wild,
|
||||
Never if self.tcx.features().never_patterns => "!".to_string(),
|
||||
Never | Wildcard | NonExhaustive | Hidden | PrivateUninhabited => "_".to_string(),
|
||||
Missing { .. } => bug!(
|
||||
"trying to convert a `Missing` constructor into a `Pat`; this is probably a bug,
|
||||
`Missing` should have been processed in `apply_constructors`"
|
||||
@ -894,9 +903,7 @@ impl<'p, 'tcx: 'p> RustcPatCtxt<'p, 'tcx> {
|
||||
F16Range(..) | F32Range(..) | F64Range(..) | F128Range(..) | Opaque(..) | Or => {
|
||||
bug!("can't convert to pattern: {:?}", pat)
|
||||
}
|
||||
};
|
||||
|
||||
Pat { ty: pat.ty().inner(), kind }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -972,7 +979,7 @@ impl<'p, 'tcx: 'p> PatCx for RustcPatCtxt<'p, 'tcx> {
|
||||
overlaps_on: IntRange,
|
||||
overlaps_with: &[&crate::pat::DeconstructedPat<Self>],
|
||||
) {
|
||||
let overlap_as_pat = self.hoist_pat_range(&overlaps_on, *pat.ty());
|
||||
let overlap_as_pat = self.print_pat_range(&overlaps_on, *pat.ty());
|
||||
let overlaps: Vec<_> = overlaps_with
|
||||
.iter()
|
||||
.map(|pat| pat.data().span)
|
||||
@ -1012,7 +1019,7 @@ impl<'p, 'tcx: 'p> PatCx for RustcPatCtxt<'p, 'tcx> {
|
||||
suggested_range.end = rustc_hir::RangeEnd::Included;
|
||||
suggested_range.to_string()
|
||||
};
|
||||
let gap_as_pat = self.hoist_pat_range(&gap, *pat.ty());
|
||||
let gap_as_pat = self.print_pat_range(&gap, *pat.ty());
|
||||
if gapped_with.is_empty() {
|
||||
// If `gapped_with` is empty, `gap == T::MAX`.
|
||||
self.tcx.emit_node_span_lint(
|
||||
|
@ -11,75 +11,16 @@
|
||||
|
||||
use std::fmt;
|
||||
|
||||
use rustc_middle::thir::PatRange;
|
||||
use rustc_middle::bug;
|
||||
use rustc_middle::ty::{self, AdtDef, Ty, TyCtxt};
|
||||
use rustc_middle::{bug, mir};
|
||||
use rustc_span::sym;
|
||||
use rustc_target::abi::{FieldIdx, VariantIdx};
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub(crate) struct FieldPat<'tcx> {
|
||||
pub(crate) struct FieldPat {
|
||||
pub(crate) field: FieldIdx,
|
||||
pub(crate) pattern: Box<Pat<'tcx>>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub(crate) struct Pat<'tcx> {
|
||||
pub(crate) ty: Ty<'tcx>,
|
||||
pub(crate) kind: PatKind<'tcx>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub(crate) enum PatKind<'tcx> {
|
||||
Wild,
|
||||
|
||||
StructLike {
|
||||
enum_info: EnumInfo<'tcx>,
|
||||
subpatterns: Vec<FieldPat<'tcx>>,
|
||||
},
|
||||
|
||||
Box {
|
||||
subpattern: Box<Pat<'tcx>>,
|
||||
},
|
||||
|
||||
Deref {
|
||||
subpattern: Box<Pat<'tcx>>,
|
||||
},
|
||||
|
||||
Constant {
|
||||
value: mir::Const<'tcx>,
|
||||
},
|
||||
|
||||
Range(Box<PatRange<'tcx>>),
|
||||
|
||||
Slice {
|
||||
prefix: Box<[Box<Pat<'tcx>>]>,
|
||||
/// True if this slice-like pattern should include a `..` between the
|
||||
/// prefix and suffix.
|
||||
has_dot_dot: bool,
|
||||
suffix: Box<[Box<Pat<'tcx>>]>,
|
||||
},
|
||||
|
||||
Never,
|
||||
}
|
||||
|
||||
impl<'tcx> fmt::Display for Pat<'tcx> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self.kind {
|
||||
PatKind::Wild => write!(f, "_"),
|
||||
PatKind::Never => write!(f, "!"),
|
||||
PatKind::Box { ref subpattern } => write!(f, "box {subpattern}"),
|
||||
PatKind::StructLike { ref enum_info, ref subpatterns } => {
|
||||
ty::tls::with(|tcx| write_struct_like(f, tcx, self.ty, enum_info, subpatterns))
|
||||
}
|
||||
PatKind::Deref { ref subpattern } => write_ref_like(f, self.ty, subpattern),
|
||||
PatKind::Constant { value } => write!(f, "{value}"),
|
||||
PatKind::Range(ref range) => write!(f, "{range}"),
|
||||
PatKind::Slice { ref prefix, has_dot_dot, ref suffix } => {
|
||||
write_slice_like(f, prefix, has_dot_dot, suffix)
|
||||
}
|
||||
}
|
||||
}
|
||||
pub(crate) pattern: String,
|
||||
pub(crate) is_wildcard: bool,
|
||||
}
|
||||
|
||||
/// Returns a closure that will return `""` when called the first time,
|
||||
@ -103,12 +44,12 @@ pub(crate) enum EnumInfo<'tcx> {
|
||||
NotEnum,
|
||||
}
|
||||
|
||||
fn write_struct_like<'tcx>(
|
||||
pub(crate) fn write_struct_like<'tcx>(
|
||||
f: &mut impl fmt::Write,
|
||||
tcx: TyCtxt<'_>,
|
||||
ty: Ty<'tcx>,
|
||||
enum_info: &EnumInfo<'tcx>,
|
||||
subpatterns: &[FieldPat<'tcx>],
|
||||
subpatterns: &[FieldPat],
|
||||
) -> fmt::Result {
|
||||
let variant_and_name = match *enum_info {
|
||||
EnumInfo::Enum { adt_def, variant_index } => {
|
||||
@ -139,12 +80,12 @@ fn write_struct_like<'tcx>(
|
||||
write!(f, " {{ ")?;
|
||||
|
||||
let mut printed = 0;
|
||||
for p in subpatterns {
|
||||
if let PatKind::Wild = p.pattern.kind {
|
||||
for &FieldPat { field, ref pattern, is_wildcard } in subpatterns {
|
||||
if is_wildcard {
|
||||
continue;
|
||||
}
|
||||
let name = variant.fields[p.field].name;
|
||||
write!(f, "{}{}: {}", start_or_comma(), name, p.pattern)?;
|
||||
let field_name = variant.fields[field].name;
|
||||
write!(f, "{}{field_name}: {pattern}", start_or_comma())?;
|
||||
printed += 1;
|
||||
}
|
||||
|
||||
@ -184,10 +125,10 @@ fn write_struct_like<'tcx>(
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn write_ref_like<'tcx>(
|
||||
pub(crate) fn write_ref_like<'tcx>(
|
||||
f: &mut impl fmt::Write,
|
||||
ty: Ty<'tcx>,
|
||||
subpattern: &Pat<'tcx>,
|
||||
subpattern: &str,
|
||||
) -> fmt::Result {
|
||||
match ty.kind() {
|
||||
ty::Ref(_, _, mutbl) => {
|
||||
@ -198,11 +139,11 @@ fn write_ref_like<'tcx>(
|
||||
write!(f, "{subpattern}")
|
||||
}
|
||||
|
||||
fn write_slice_like<'tcx>(
|
||||
pub(crate) fn write_slice_like(
|
||||
f: &mut impl fmt::Write,
|
||||
prefix: &[Box<Pat<'tcx>>],
|
||||
prefix: &[String],
|
||||
has_dot_dot: bool,
|
||||
suffix: &[Box<Pat<'tcx>>],
|
||||
suffix: &[String],
|
||||
) -> fmt::Result {
|
||||
let mut start_or_comma = start_or_comma();
|
||||
write!(f, "[")?;
|
||||
|
@ -1188,7 +1188,12 @@ fn validate_commandline_args_with_session_available(sess: &Session) {
|
||||
|
||||
// Sanitizers can only be used on platforms that we know have working sanitizer codegen.
|
||||
let supported_sanitizers = sess.target.options.supported_sanitizers;
|
||||
let unsupported_sanitizers = sess.opts.unstable_opts.sanitizer - supported_sanitizers;
|
||||
let mut unsupported_sanitizers = sess.opts.unstable_opts.sanitizer - supported_sanitizers;
|
||||
// Niche: if `fixed-x18`, or effectively switching on `reserved-x18` flag, is enabled
|
||||
// we should allow Shadow Call Stack sanitizer.
|
||||
if sess.opts.unstable_opts.fixed_x18 && sess.target.arch == "aarch64" {
|
||||
unsupported_sanitizers -= SanitizerSet::SHADOWCALLSTACK;
|
||||
}
|
||||
match unsupported_sanitizers.into_iter().count() {
|
||||
0 => {}
|
||||
1 => {
|
||||
|
@ -1561,6 +1561,7 @@ supported_targets! {
|
||||
("powerpc-unknown-linux-gnu", powerpc_unknown_linux_gnu),
|
||||
("powerpc-unknown-linux-gnuspe", powerpc_unknown_linux_gnuspe),
|
||||
("powerpc-unknown-linux-musl", powerpc_unknown_linux_musl),
|
||||
("powerpc-unknown-linux-muslspe", powerpc_unknown_linux_muslspe),
|
||||
("powerpc64-ibm-aix", powerpc64_ibm_aix),
|
||||
("powerpc64-unknown-linux-gnu", powerpc64_unknown_linux_gnu),
|
||||
("powerpc64-unknown-linux-musl", powerpc64_unknown_linux_musl),
|
||||
|
@ -0,0 +1,28 @@
|
||||
use crate::abi::Endian;
|
||||
use crate::spec::{base, Cc, LinkerFlavor, Lld, StackProbeType, Target, TargetOptions};
|
||||
|
||||
pub fn target() -> Target {
|
||||
let mut base = base::linux_musl::opts();
|
||||
base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-mspe"]);
|
||||
base.max_atomic_width = Some(32);
|
||||
base.stack_probes = StackProbeType::Inline;
|
||||
|
||||
Target {
|
||||
llvm_target: "powerpc-unknown-linux-muslspe".into(),
|
||||
metadata: crate::spec::TargetMetadata {
|
||||
description: Some("PowerPC SPE Linux with musl".into()),
|
||||
tier: Some(3),
|
||||
host_tools: Some(false),
|
||||
std: Some(true),
|
||||
},
|
||||
pointer_width: 32,
|
||||
data_layout: "E-m:e-p:32:32-Fn32-i64:64-n32".into(),
|
||||
arch: "powerpc".into(),
|
||||
options: TargetOptions {
|
||||
abi: "spe".into(),
|
||||
endian: Endian::Big,
|
||||
mcount: "_mcount".into(),
|
||||
..base
|
||||
},
|
||||
}
|
||||
}
|
@ -1,33 +0,0 @@
|
||||
pub use rustc_middle::traits::query::type_op::Eq;
|
||||
use rustc_middle::traits::query::NoSolution;
|
||||
use rustc_middle::traits::ObligationCause;
|
||||
use rustc_middle::ty::{ParamEnvAnd, TyCtxt};
|
||||
|
||||
use crate::infer::canonical::{Canonical, CanonicalQueryResponse};
|
||||
use crate::traits::ObligationCtxt;
|
||||
|
||||
impl<'tcx> super::QueryTypeOp<'tcx> for Eq<'tcx> {
|
||||
type QueryResponse = ();
|
||||
|
||||
fn try_fast_path(
|
||||
_tcx: TyCtxt<'tcx>,
|
||||
key: &ParamEnvAnd<'tcx, Eq<'tcx>>,
|
||||
) -> Option<Self::QueryResponse> {
|
||||
if key.value.a == key.value.b { Some(()) } else { None }
|
||||
}
|
||||
|
||||
fn perform_query(
|
||||
tcx: TyCtxt<'tcx>,
|
||||
canonicalized: Canonical<'tcx, ParamEnvAnd<'tcx, Self>>,
|
||||
) -> Result<CanonicalQueryResponse<'tcx, ()>, NoSolution> {
|
||||
tcx.type_op_eq(canonicalized)
|
||||
}
|
||||
|
||||
fn perform_locally_with_next_solver(
|
||||
ocx: &ObligationCtxt<'_, 'tcx>,
|
||||
key: ParamEnvAnd<'tcx, Self>,
|
||||
) -> Result<Self::QueryResponse, NoSolution> {
|
||||
ocx.eq(&ObligationCause::dummy(), key.param_env, key.value.a, key.value.b)?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
@ -16,12 +16,10 @@ use crate::traits::{ObligationCause, ObligationCtxt};
|
||||
|
||||
pub mod ascribe_user_type;
|
||||
pub mod custom;
|
||||
pub mod eq;
|
||||
pub mod implied_outlives_bounds;
|
||||
pub mod normalize;
|
||||
pub mod outlives;
|
||||
pub mod prove_predicate;
|
||||
pub mod subtype;
|
||||
|
||||
pub use rustc_middle::traits::query::type_op::*;
|
||||
|
||||
@ -170,44 +168,12 @@ where
|
||||
// collecting region constraints via `region_constraints`.
|
||||
let (mut output, _) = scrape_region_constraints(
|
||||
infcx,
|
||||
|_ocx| {
|
||||
let (output, ei, mut obligations, _) =
|
||||
|ocx| {
|
||||
let (output, ei, obligations, _) =
|
||||
Q::fully_perform_into(self, infcx, &mut region_constraints, span)?;
|
||||
error_info = ei;
|
||||
|
||||
// Typically, instantiating NLL query results does not
|
||||
// create obligations. However, in some cases there
|
||||
// are unresolved type variables, and unify them *can*
|
||||
// create obligations. In that case, we have to go
|
||||
// fulfill them. We do this via a (recursive) query.
|
||||
while !obligations.is_empty() {
|
||||
trace!("{:#?}", obligations);
|
||||
let mut progress = false;
|
||||
for obligation in std::mem::take(&mut obligations) {
|
||||
let obligation = infcx.resolve_vars_if_possible(obligation);
|
||||
match ProvePredicate::fully_perform_into(
|
||||
obligation.param_env.and(ProvePredicate::new(obligation.predicate)),
|
||||
infcx,
|
||||
&mut region_constraints,
|
||||
span,
|
||||
) {
|
||||
Ok(((), _, new, certainty)) => {
|
||||
obligations.extend(new);
|
||||
progress = true;
|
||||
if let Certainty::Ambiguous = certainty {
|
||||
obligations.push(obligation);
|
||||
}
|
||||
}
|
||||
Err(_) => obligations.push(obligation),
|
||||
}
|
||||
}
|
||||
if !progress {
|
||||
infcx.dcx().span_bug(
|
||||
span,
|
||||
format!("ambiguity processing {obligations:?} from {self:?}"),
|
||||
);
|
||||
}
|
||||
}
|
||||
ocx.register_obligations(obligations);
|
||||
Ok(output)
|
||||
},
|
||||
"fully_perform",
|
||||
|
@ -1,30 +0,0 @@
|
||||
pub use rustc_middle::traits::query::type_op::Subtype;
|
||||
use rustc_middle::traits::query::NoSolution;
|
||||
use rustc_middle::traits::ObligationCause;
|
||||
use rustc_middle::ty::{ParamEnvAnd, TyCtxt};
|
||||
|
||||
use crate::infer::canonical::{Canonical, CanonicalQueryResponse};
|
||||
use crate::traits::ObligationCtxt;
|
||||
|
||||
impl<'tcx> super::QueryTypeOp<'tcx> for Subtype<'tcx> {
|
||||
type QueryResponse = ();
|
||||
|
||||
fn try_fast_path(_tcx: TyCtxt<'tcx>, key: &ParamEnvAnd<'tcx, Self>) -> Option<()> {
|
||||
if key.value.sub == key.value.sup { Some(()) } else { None }
|
||||
}
|
||||
|
||||
fn perform_query(
|
||||
tcx: TyCtxt<'tcx>,
|
||||
canonicalized: Canonical<'tcx, ParamEnvAnd<'tcx, Self>>,
|
||||
) -> Result<CanonicalQueryResponse<'tcx, ()>, NoSolution> {
|
||||
tcx.type_op_subtype(canonicalized)
|
||||
}
|
||||
|
||||
fn perform_locally_with_next_solver(
|
||||
ocx: &ObligationCtxt<'_, 'tcx>,
|
||||
key: ParamEnvAnd<'tcx, Self>,
|
||||
) -> Result<Self::QueryResponse, NoSolution> {
|
||||
ocx.sub(&ObligationCause::dummy(), key.param_env, key.value.sub, key.value.sup)?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
@ -10,18 +10,14 @@ use rustc_trait_selection::traits::query::normalize::QueryNormalizeExt;
|
||||
use rustc_trait_selection::traits::query::type_op::ascribe_user_type::{
|
||||
type_op_ascribe_user_type_with_span, AscribeUserType,
|
||||
};
|
||||
use rustc_trait_selection::traits::query::type_op::eq::Eq;
|
||||
use rustc_trait_selection::traits::query::type_op::normalize::Normalize;
|
||||
use rustc_trait_selection::traits::query::type_op::prove_predicate::ProvePredicate;
|
||||
use rustc_trait_selection::traits::query::type_op::subtype::Subtype;
|
||||
use rustc_trait_selection::traits::{Normalized, Obligation, ObligationCause, ObligationCtxt};
|
||||
|
||||
pub(crate) fn provide(p: &mut Providers) {
|
||||
*p = Providers {
|
||||
type_op_ascribe_user_type,
|
||||
type_op_eq,
|
||||
type_op_prove_predicate,
|
||||
type_op_subtype,
|
||||
type_op_normalize_ty,
|
||||
type_op_normalize_clause,
|
||||
type_op_normalize_fn_sig,
|
||||
@ -39,16 +35,6 @@ fn type_op_ascribe_user_type<'tcx>(
|
||||
})
|
||||
}
|
||||
|
||||
fn type_op_eq<'tcx>(
|
||||
tcx: TyCtxt<'tcx>,
|
||||
canonicalized: Canonical<'tcx, ParamEnvAnd<'tcx, Eq<'tcx>>>,
|
||||
) -> Result<&'tcx Canonical<'tcx, QueryResponse<'tcx, ()>>, NoSolution> {
|
||||
tcx.infer_ctxt().enter_canonical_trait_query(&canonicalized, |ocx, key| {
|
||||
let (param_env, Eq { a, b }) = key.into_parts();
|
||||
Ok(ocx.eq(&ObligationCause::dummy(), param_env, a, b)?)
|
||||
})
|
||||
}
|
||||
|
||||
fn type_op_normalize<'tcx, T>(
|
||||
ocx: &ObligationCtxt<'_, 'tcx>,
|
||||
key: ParamEnvAnd<'tcx, Normalize<T>>,
|
||||
@ -91,16 +77,6 @@ fn type_op_normalize_poly_fn_sig<'tcx>(
|
||||
tcx.infer_ctxt().enter_canonical_trait_query(&canonicalized, type_op_normalize)
|
||||
}
|
||||
|
||||
fn type_op_subtype<'tcx>(
|
||||
tcx: TyCtxt<'tcx>,
|
||||
canonicalized: Canonical<'tcx, ParamEnvAnd<'tcx, Subtype<'tcx>>>,
|
||||
) -> Result<&'tcx Canonical<'tcx, QueryResponse<'tcx, ()>>, NoSolution> {
|
||||
tcx.infer_ctxt().enter_canonical_trait_query(&canonicalized, |ocx, key| {
|
||||
let (param_env, Subtype { sub, sup }) = key.into_parts();
|
||||
Ok(ocx.sup(&ObligationCause::dummy(), param_env, sup, sub)?)
|
||||
})
|
||||
}
|
||||
|
||||
fn type_op_prove_predicate<'tcx>(
|
||||
tcx: TyCtxt<'tcx>,
|
||||
canonicalized: Canonical<'tcx, ParamEnvAnd<'tcx, ProvePredicate<'tcx>>>,
|
||||
|
@ -34,13 +34,12 @@ impl<T> Ready<T> {
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// #![feature(ready_into_inner)]
|
||||
/// use std::future;
|
||||
///
|
||||
/// let a = future::ready(1);
|
||||
/// assert_eq!(a.into_inner(), 1);
|
||||
/// ```
|
||||
#[unstable(feature = "ready_into_inner", issue = "101196")]
|
||||
#[stable(feature = "ready_into_inner", since = "CURRENT_RUSTC_VERSION")]
|
||||
#[must_use]
|
||||
#[inline]
|
||||
pub fn into_inner(self) -> T {
|
||||
|
@ -317,3 +317,60 @@ impl<I: Iterator + TrustedRandomAccess> SpecTake for Take<I> {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[stable(feature = "exact_size_take_repeat", since = "CURRENT_RUSTC_VERSION")]
|
||||
impl<T: Clone> DoubleEndedIterator for Take<crate::iter::Repeat<T>> {
|
||||
#[inline]
|
||||
fn next_back(&mut self) -> Option<Self::Item> {
|
||||
self.next()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
|
||||
self.nth(n)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn try_rfold<Acc, Fold, R>(&mut self, init: Acc, fold: Fold) -> R
|
||||
where
|
||||
Self: Sized,
|
||||
Fold: FnMut(Acc, Self::Item) -> R,
|
||||
R: Try<Output = Acc>,
|
||||
{
|
||||
self.try_fold(init, fold)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn rfold<Acc, Fold>(self, init: Acc, fold: Fold) -> Acc
|
||||
where
|
||||
Self: Sized,
|
||||
Fold: FnMut(Acc, Self::Item) -> Acc,
|
||||
{
|
||||
self.fold(init, fold)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
#[rustc_inherit_overflow_checks]
|
||||
fn advance_back_by(&mut self, n: usize) -> Result<(), NonZero<usize>> {
|
||||
self.advance_by(n)
|
||||
}
|
||||
}
|
||||
|
||||
// Note: It may be tempting to impl DoubleEndedIterator for Take<RepeatWith>.
|
||||
// One must fight that temptation since such implementation wouldn’t be correct
|
||||
// because we have no way to return value of nth invocation of repeater followed
|
||||
// by n-1st without remembering all results.
|
||||
|
||||
#[stable(feature = "exact_size_take_repeat", since = "CURRENT_RUSTC_VERSION")]
|
||||
impl<T: Clone> ExactSizeIterator for Take<crate::iter::Repeat<T>> {
|
||||
fn len(&self) -> usize {
|
||||
self.n
|
||||
}
|
||||
}
|
||||
|
||||
#[stable(feature = "exact_size_take_repeat", since = "CURRENT_RUSTC_VERSION")]
|
||||
impl<F: FnMut() -> A, A> ExactSizeIterator for Take<crate::iter::RepeatWith<F>> {
|
||||
fn len(&self) -> usize {
|
||||
self.n
|
||||
}
|
||||
}
|
||||
|
@ -1060,7 +1060,7 @@ pub trait FnPtr: Copy + Clone {
|
||||
}
|
||||
|
||||
/// Derive macro generating impls of traits related to smart pointers.
|
||||
#[rustc_builtin_macro]
|
||||
#[rustc_builtin_macro(SmartPointer, attributes(pointee))]
|
||||
#[allow_internal_unstable(dispatch_from_dyn, coerce_unsized, unsize)]
|
||||
#[unstable(feature = "derive_smart_pointer", issue = "123430")]
|
||||
pub macro SmartPointer($item:item) {
|
||||
|
@ -1,6 +1,7 @@
|
||||
use super::display_buffer::DisplayBuffer;
|
||||
use crate::cmp::Ordering;
|
||||
use crate::fmt::{self, Write};
|
||||
use crate::hash::{Hash, Hasher};
|
||||
use crate::iter;
|
||||
use crate::mem::transmute;
|
||||
use crate::ops::{BitAnd, BitAndAssign, BitOr, BitOrAssign, Not};
|
||||
@ -67,12 +68,22 @@ pub enum IpAddr {
|
||||
/// assert!("0000000.0.0.0".parse::<Ipv4Addr>().is_err()); // first octet is a zero in octal
|
||||
/// assert!("0xcb.0x0.0x71.0x00".parse::<Ipv4Addr>().is_err()); // all octets are in hex
|
||||
/// ```
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
|
||||
#[derive(Copy, Clone, PartialEq, Eq)]
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
pub struct Ipv4Addr {
|
||||
octets: [u8; 4],
|
||||
}
|
||||
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
impl Hash for Ipv4Addr {
|
||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||
// Hashers are often more efficient at hashing a fixed-width integer
|
||||
// than a bytestring, so convert before hashing. We don't use to_bits()
|
||||
// here as that may involve a byteswap which is unnecessary.
|
||||
u32::from_ne_bytes(self.octets).hash(state);
|
||||
}
|
||||
}
|
||||
|
||||
/// An IPv6 address.
|
||||
///
|
||||
/// IPv6 addresses are defined as 128-bit integers in [IETF RFC 4291].
|
||||
@ -149,12 +160,22 @@ pub struct Ipv4Addr {
|
||||
/// assert_eq!("::1".parse(), Ok(localhost));
|
||||
/// assert_eq!(localhost.is_loopback(), true);
|
||||
/// ```
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
|
||||
#[derive(Copy, Clone, PartialEq, Eq)]
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
pub struct Ipv6Addr {
|
||||
octets: [u8; 16],
|
||||
}
|
||||
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
impl Hash for Ipv6Addr {
|
||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||
// Hashers are often more efficient at hashing a fixed-width integer
|
||||
// than a bytestring, so convert before hashing. We don't use to_bits()
|
||||
// here as that may involve unnecessary byteswaps.
|
||||
u128::from_ne_bytes(self.octets).hash(state);
|
||||
}
|
||||
}
|
||||
|
||||
/// Scope of an [IPv6 multicast address] as defined in [IETF RFC 7346 section 2].
|
||||
///
|
||||
/// # Stability Guarantees
|
||||
|
@ -656,8 +656,6 @@ impl<T> Option<T> {
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// #![feature(is_none_or)]
|
||||
///
|
||||
/// let x: Option<u32> = Some(2);
|
||||
/// assert_eq!(x.is_none_or(|x| x > 1), true);
|
||||
///
|
||||
@ -669,7 +667,7 @@ impl<T> Option<T> {
|
||||
/// ```
|
||||
#[must_use]
|
||||
#[inline]
|
||||
#[unstable(feature = "is_none_or", issue = "126383")]
|
||||
#[stable(feature = "is_none_or", since = "CURRENT_RUSTC_VERSION")]
|
||||
pub fn is_none_or(self, f: impl FnOnce(T) -> bool) -> bool {
|
||||
match self {
|
||||
None => true,
|
||||
|
@ -530,10 +530,18 @@ impl Waker {
|
||||
|
||||
/// Returns a reference to a `Waker` that does nothing when used.
|
||||
///
|
||||
// Note! Much of the documentation for this method is duplicated
|
||||
// in the docs for `LocalWaker::noop`.
|
||||
// If you edit it, consider editing the other copy too.
|
||||
//
|
||||
/// This is mostly useful for writing tests that need a [`Context`] to poll
|
||||
/// some futures, but are not expecting those futures to wake the waker or
|
||||
/// do not need to do anything specific if it happens.
|
||||
///
|
||||
/// More generally, using `Waker::noop()` to poll a future
|
||||
/// means discarding the notification of when the future should be polled again.
|
||||
/// So it should only be used when such a notification will not be needed to make progress.
|
||||
///
|
||||
/// If an owned `Waker` is needed, `clone()` this one.
|
||||
///
|
||||
/// # Examples
|
||||
@ -783,12 +791,22 @@ impl LocalWaker {
|
||||
Self { waker }
|
||||
}
|
||||
|
||||
/// Creates a new `LocalWaker` that does nothing when `wake` is called.
|
||||
/// Returns a reference to a `LocalWaker` that does nothing when used.
|
||||
///
|
||||
// Note! Much of the documentation for this method is duplicated
|
||||
// in the docs for `Waker::noop`.
|
||||
// If you edit it, consider editing the other copy too.
|
||||
//
|
||||
/// This is mostly useful for writing tests that need a [`Context`] to poll
|
||||
/// some futures, but are not expecting those futures to wake the waker or
|
||||
/// do not need to do anything specific if it happens.
|
||||
///
|
||||
/// More generally, using `LocalWaker::noop()` to poll a future
|
||||
/// means discarding the notification of when the future should be polled again,
|
||||
/// So it should only be used when such a notification will not be needed to make progress.
|
||||
///
|
||||
/// If an owned `LocalWaker` is needed, `clone()` this one.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
|
@ -170,3 +170,93 @@ fn test_byref_take_consumed_items() {
|
||||
assert_eq!(count, 70);
|
||||
assert_eq!(inner, 90..90);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_exact_size_take_repeat() {
|
||||
let mut iter = core::iter::repeat(42).take(40);
|
||||
assert_eq!((40, Some(40)), iter.size_hint());
|
||||
assert_eq!(40, iter.len());
|
||||
|
||||
assert_eq!(Some(42), iter.next());
|
||||
assert_eq!((39, Some(39)), iter.size_hint());
|
||||
assert_eq!(39, iter.len());
|
||||
|
||||
assert_eq!(Some(42), iter.next_back());
|
||||
assert_eq!((38, Some(38)), iter.size_hint());
|
||||
assert_eq!(38, iter.len());
|
||||
|
||||
assert_eq!(Some(42), iter.nth(3));
|
||||
assert_eq!((34, Some(34)), iter.size_hint());
|
||||
assert_eq!(34, iter.len());
|
||||
|
||||
assert_eq!(Some(42), iter.nth_back(3));
|
||||
assert_eq!((30, Some(30)), iter.size_hint());
|
||||
assert_eq!(30, iter.len());
|
||||
|
||||
assert_eq!(Ok(()), iter.advance_by(10));
|
||||
assert_eq!((20, Some(20)), iter.size_hint());
|
||||
assert_eq!(20, iter.len());
|
||||
|
||||
assert_eq!(Ok(()), iter.advance_back_by(10));
|
||||
assert_eq!((10, Some(10)), iter.size_hint());
|
||||
assert_eq!(10, iter.len());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_exact_size_take_repeat_with() {
|
||||
let mut counter = 0;
|
||||
let mut iter = core::iter::repeat_with(move || {
|
||||
counter += 1;
|
||||
counter
|
||||
})
|
||||
.take(40);
|
||||
assert_eq!((40, Some(40)), iter.size_hint());
|
||||
assert_eq!(40, iter.len());
|
||||
|
||||
assert_eq!(Some(1), iter.next());
|
||||
assert_eq!((39, Some(39)), iter.size_hint());
|
||||
assert_eq!(39, iter.len());
|
||||
|
||||
assert_eq!(Some(5), iter.nth(3));
|
||||
assert_eq!((35, Some(35)), iter.size_hint());
|
||||
assert_eq!(35, iter.len());
|
||||
|
||||
assert_eq!(Ok(()), iter.advance_by(10));
|
||||
assert_eq!((25, Some(25)), iter.size_hint());
|
||||
assert_eq!(25, iter.len());
|
||||
|
||||
assert_eq!(Some(16), iter.next());
|
||||
assert_eq!((24, Some(24)), iter.size_hint());
|
||||
assert_eq!(24, iter.len());
|
||||
}
|
||||
|
||||
// This is https://github.com/rust-lang/rust/issues/104729 with all uses of
|
||||
// repeat(0) were replaced by repeat(0).take(20).
|
||||
#[test]
|
||||
fn test_reverse_on_zip() {
|
||||
let vec_1 = [1; 10];
|
||||
|
||||
let zipped_iter = vec_1.iter().copied().zip(core::iter::repeat(0).take(20));
|
||||
|
||||
// Forward
|
||||
for (one, zero) in zipped_iter {
|
||||
assert_eq!((1, 0), (one, zero));
|
||||
}
|
||||
|
||||
let rev_vec_iter = vec_1.iter().rev();
|
||||
let rev_repeat_iter = std::iter::repeat(0).take(20).rev();
|
||||
|
||||
// Manual reversed zip
|
||||
let rev_zipped_iter = rev_vec_iter.zip(rev_repeat_iter);
|
||||
|
||||
for (&one, zero) in rev_zipped_iter {
|
||||
assert_eq!((1, 0), (one, zero));
|
||||
}
|
||||
|
||||
let zipped_iter = vec_1.iter().zip(core::iter::repeat(0).take(20));
|
||||
|
||||
// Cannot call rev here for automatic reversed zip constuction
|
||||
for (&one, zero) in zipped_iter.rev() {
|
||||
assert_eq!((1, 0), (one, zero));
|
||||
}
|
||||
}
|
||||
|
@ -109,13 +109,21 @@ pub trait CommandExt: Sealed {
|
||||
/// Schedules a closure to be run just before the `exec` function is
|
||||
/// invoked.
|
||||
///
|
||||
/// This method is stable and usable, but it should be unsafe. To fix
|
||||
/// that, it got deprecated in favor of the unsafe [`pre_exec`].
|
||||
/// `before_exec` used to be a safe method, but it needs to be unsafe since the closure may only
|
||||
/// perform operations that are *async-signal-safe*. Hence it got deprecated in favor of the
|
||||
/// unsafe [`pre_exec`]. Meanwhile, Rust gained the ability to make an existing safe method
|
||||
/// fully unsafe in a new edition, which is how `before_exec` became `unsafe`. It still also
|
||||
/// remains deprecated; `pre_exec` should be used instead.
|
||||
///
|
||||
/// [`pre_exec`]: CommandExt::pre_exec
|
||||
#[stable(feature = "process_exec", since = "1.15.0")]
|
||||
#[deprecated(since = "1.37.0", note = "should be unsafe, use `pre_exec` instead")]
|
||||
fn before_exec<F>(&mut self, f: F) -> &mut process::Command
|
||||
#[cfg_attr(bootstrap, rustc_deprecated_safe_2024)]
|
||||
#[cfg_attr(
|
||||
not(bootstrap),
|
||||
rustc_deprecated_safe_2024(audit_that = "the closure is async-signal-safe")
|
||||
)]
|
||||
unsafe fn before_exec<F>(&mut self, f: F) -> &mut process::Command
|
||||
where
|
||||
F: FnMut() -> io::Result<()> + Send + Sync + 'static,
|
||||
{
|
||||
|
@ -412,7 +412,6 @@ impl Builder {
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// #![feature(thread_spawn_unchecked)]
|
||||
/// use std::thread;
|
||||
///
|
||||
/// let builder = thread::Builder::new();
|
||||
@ -433,7 +432,7 @@ impl Builder {
|
||||
/// ```
|
||||
///
|
||||
/// [`io::Result`]: crate::io::Result
|
||||
#[unstable(feature = "thread_spawn_unchecked", issue = "55132")]
|
||||
#[stable(feature = "thread_spawn_unchecked", since = "CURRENT_RUSTC_VERSION")]
|
||||
pub unsafe fn spawn_unchecked<F, T>(self, f: F) -> io::Result<JoinHandle<T>>
|
||||
where
|
||||
F: FnOnce() -> T,
|
||||
|
@ -36,6 +36,14 @@ macro_rules! check_ci_llvm {
|
||||
};
|
||||
}
|
||||
|
||||
/// This file is embedded in the overlay directory of the tarball sources. It is
|
||||
/// useful in scenarios where developers want to see how the tarball sources were
|
||||
/// generated.
|
||||
///
|
||||
/// We also use this file to compare the host's config.toml against the CI rustc builder
|
||||
/// configuration to detect any incompatible options.
|
||||
pub(crate) const BUILDER_CONFIG_FILENAME: &str = "builder-config";
|
||||
|
||||
#[derive(Clone, Default)]
|
||||
pub enum DryRun {
|
||||
/// This isn't a dry run.
|
||||
@ -47,7 +55,7 @@ pub enum DryRun {
|
||||
UserSelected,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Default, PartialEq, Eq)]
|
||||
#[derive(Copy, Clone, Default, Debug, Eq, PartialEq)]
|
||||
pub enum DebuginfoLevel {
|
||||
#[default]
|
||||
None,
|
||||
@ -117,7 +125,7 @@ impl Display for DebuginfoLevel {
|
||||
/// 2) MSVC
|
||||
/// - Self-contained: `-Clinker=<path to rust-lld>`
|
||||
/// - External: `-Clinker=lld`
|
||||
#[derive(Default, Copy, Clone)]
|
||||
#[derive(Copy, Clone, Default, Debug, PartialEq)]
|
||||
pub enum LldMode {
|
||||
/// Do not use LLD
|
||||
#[default]
|
||||
@ -1203,40 +1211,42 @@ impl Config {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn parse(flags: Flags) -> Config {
|
||||
#[cfg(test)]
|
||||
fn get_toml(_: &Path) -> TomlConfig {
|
||||
TomlConfig::default()
|
||||
}
|
||||
|
||||
#[cfg(not(test))]
|
||||
fn get_toml(file: &Path) -> TomlConfig {
|
||||
let contents =
|
||||
t!(fs::read_to_string(file), format!("config file {} not found", file.display()));
|
||||
// Deserialize to Value and then TomlConfig to prevent the Deserialize impl of
|
||||
// TomlConfig and sub types to be monomorphized 5x by toml.
|
||||
toml::from_str(&contents)
|
||||
.and_then(|table: toml::Value| TomlConfig::deserialize(table))
|
||||
.unwrap_or_else(|err| {
|
||||
if let Ok(Some(changes)) = toml::from_str(&contents)
|
||||
.and_then(|table: toml::Value| ChangeIdWrapper::deserialize(table)).map(|change_id| change_id.inner.map(crate::find_recent_config_change_ids))
|
||||
{
|
||||
if !changes.is_empty() {
|
||||
println!(
|
||||
"WARNING: There have been changes to x.py since you last updated:\n{}",
|
||||
crate::human_readable_changes(&changes)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
eprintln!("failed to parse TOML configuration '{}': {err}", file.display());
|
||||
exit!(2);
|
||||
})
|
||||
}
|
||||
Self::parse_inner(flags, get_toml)
|
||||
#[cfg(test)]
|
||||
fn get_toml(_: &Path) -> Result<TomlConfig, toml::de::Error> {
|
||||
Ok(TomlConfig::default())
|
||||
}
|
||||
|
||||
pub(crate) fn parse_inner(mut flags: Flags, get_toml: impl Fn(&Path) -> TomlConfig) -> Config {
|
||||
#[cfg(not(test))]
|
||||
fn get_toml(file: &Path) -> Result<TomlConfig, toml::de::Error> {
|
||||
let contents =
|
||||
t!(fs::read_to_string(file), format!("config file {} not found", file.display()));
|
||||
// Deserialize to Value and then TomlConfig to prevent the Deserialize impl of
|
||||
// TomlConfig and sub types to be monomorphized 5x by toml.
|
||||
toml::from_str(&contents)
|
||||
.and_then(|table: toml::Value| TomlConfig::deserialize(table))
|
||||
.inspect_err(|_| {
|
||||
if let Ok(Some(changes)) = toml::from_str(&contents)
|
||||
.and_then(|table: toml::Value| ChangeIdWrapper::deserialize(table))
|
||||
.map(|change_id| change_id.inner.map(crate::find_recent_config_change_ids))
|
||||
{
|
||||
if !changes.is_empty() {
|
||||
println!(
|
||||
"WARNING: There have been changes to x.py since you last updated:\n{}",
|
||||
crate::human_readable_changes(&changes)
|
||||
);
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
pub fn parse(flags: Flags) -> Config {
|
||||
Self::parse_inner(flags, Self::get_toml)
|
||||
}
|
||||
|
||||
pub(crate) fn parse_inner(
|
||||
mut flags: Flags,
|
||||
get_toml: impl Fn(&Path) -> Result<TomlConfig, toml::de::Error>,
|
||||
) -> Config {
|
||||
let mut config = Config::default_opts();
|
||||
|
||||
// Set flags.
|
||||
@ -1344,7 +1354,10 @@ impl Config {
|
||||
} else {
|
||||
toml_path.clone()
|
||||
});
|
||||
get_toml(&toml_path)
|
||||
get_toml(&toml_path).unwrap_or_else(|e| {
|
||||
eprintln!("ERROR: Failed to parse '{}': {e}", toml_path.display());
|
||||
exit!(2);
|
||||
})
|
||||
} else {
|
||||
config.config = None;
|
||||
TomlConfig::default()
|
||||
@ -1375,7 +1388,13 @@ impl Config {
|
||||
include_path.push("bootstrap");
|
||||
include_path.push("defaults");
|
||||
include_path.push(format!("config.{include}.toml"));
|
||||
let included_toml = get_toml(&include_path);
|
||||
let included_toml = get_toml(&include_path).unwrap_or_else(|e| {
|
||||
eprintln!(
|
||||
"ERROR: Failed to parse default config profile at '{}': {e}",
|
||||
include_path.display()
|
||||
);
|
||||
exit!(2);
|
||||
});
|
||||
toml.merge(included_toml, ReplaceOpt::IgnoreDuplicate);
|
||||
}
|
||||
|
||||
@ -1591,24 +1610,6 @@ impl Config {
|
||||
let mut is_user_configured_rust_channel = false;
|
||||
|
||||
if let Some(rust) = toml.rust {
|
||||
if let Some(commit) = config.download_ci_rustc_commit(rust.download_rustc.clone()) {
|
||||
// Primarily used by CI runners to avoid handling download-rustc incompatible
|
||||
// options one by one on shell scripts.
|
||||
let disable_ci_rustc_if_incompatible =
|
||||
env::var_os("DISABLE_CI_RUSTC_IF_INCOMPATIBLE")
|
||||
.is_some_and(|s| s == "1" || s == "true");
|
||||
|
||||
if let Err(e) = check_incompatible_options_for_ci_rustc(&rust) {
|
||||
if disable_ci_rustc_if_incompatible {
|
||||
config.download_rustc_commit = None;
|
||||
} else {
|
||||
panic!("{}", e);
|
||||
}
|
||||
} else {
|
||||
config.download_rustc_commit = Some(commit);
|
||||
}
|
||||
}
|
||||
|
||||
let Rust {
|
||||
optimize: optimize_toml,
|
||||
debug: debug_toml,
|
||||
@ -1656,7 +1657,7 @@ impl Config {
|
||||
new_symbol_mangling,
|
||||
profile_generate,
|
||||
profile_use,
|
||||
download_rustc: _,
|
||||
download_rustc,
|
||||
lto,
|
||||
validate_mir_opts,
|
||||
frame_pointers,
|
||||
@ -1668,6 +1669,8 @@ impl Config {
|
||||
is_user_configured_rust_channel = channel.is_some();
|
||||
set(&mut config.channel, channel.clone());
|
||||
|
||||
config.download_rustc_commit = config.download_ci_rustc_commit(download_rustc);
|
||||
|
||||
debug = debug_toml;
|
||||
debug_assertions = debug_assertions_toml;
|
||||
debug_assertions_std = debug_assertions_std_toml;
|
||||
@ -2345,6 +2348,45 @@ impl Config {
|
||||
None => None,
|
||||
Some(commit) => {
|
||||
self.download_ci_rustc(commit);
|
||||
|
||||
if let Some(config_path) = &self.config {
|
||||
let builder_config_path =
|
||||
self.out.join(self.build.triple).join("ci-rustc").join(BUILDER_CONFIG_FILENAME);
|
||||
|
||||
let ci_config_toml = match Self::get_toml(&builder_config_path) {
|
||||
Ok(ci_config_toml) => ci_config_toml,
|
||||
Err(e) if e.to_string().contains("unknown field") => {
|
||||
println!("WARNING: CI rustc has some fields that are no longer supported in bootstrap; download-rustc will be disabled.");
|
||||
println!("HELP: Consider rebasing to a newer commit if available.");
|
||||
return None;
|
||||
},
|
||||
Err(e) => {
|
||||
eprintln!("ERROR: Failed to parse CI rustc config at '{}': {e}", builder_config_path.display());
|
||||
exit!(2);
|
||||
},
|
||||
};
|
||||
|
||||
let current_config_toml = Self::get_toml(config_path).unwrap();
|
||||
|
||||
// Check the config compatibility
|
||||
// FIXME: this doesn't cover `--set` flags yet.
|
||||
let res = check_incompatible_options_for_ci_rustc(
|
||||
current_config_toml,
|
||||
ci_config_toml,
|
||||
);
|
||||
|
||||
let disable_ci_rustc_if_incompatible =
|
||||
env::var_os("DISABLE_CI_RUSTC_IF_INCOMPATIBLE")
|
||||
.is_some_and(|s| s == "1" || s == "true");
|
||||
|
||||
if disable_ci_rustc_if_incompatible && res.is_err() {
|
||||
println!("WARNING: download-rustc is disabled with `DISABLE_CI_RUSTC_IF_INCOMPATIBLE` env.");
|
||||
return None;
|
||||
}
|
||||
|
||||
res.unwrap();
|
||||
}
|
||||
|
||||
Some(commit.clone())
|
||||
}
|
||||
})
|
||||
@ -2662,31 +2704,52 @@ impl Config {
|
||||
}
|
||||
}
|
||||
|
||||
/// Checks the CI rustc incompatible options by destructuring the `Rust` instance
|
||||
/// and makes sure that no rust options from config.toml are missed.
|
||||
fn check_incompatible_options_for_ci_rustc(rust: &Rust) -> Result<(), String> {
|
||||
/// Compares the current Rust options against those in the CI rustc builder and detects any incompatible options.
|
||||
/// It does this by destructuring the `Rust` instance to make sure every `Rust` field is covered and not missing.
|
||||
fn check_incompatible_options_for_ci_rustc(
|
||||
current_config_toml: TomlConfig,
|
||||
ci_config_toml: TomlConfig,
|
||||
) -> Result<(), String> {
|
||||
macro_rules! err {
|
||||
($name:expr) => {
|
||||
if $name.is_some() {
|
||||
return Err(format!(
|
||||
"ERROR: Setting `rust.{}` is incompatible with `rust.download-rustc`.",
|
||||
stringify!($name).replace("_", "-")
|
||||
));
|
||||
}
|
||||
($current:expr, $expected:expr) => {
|
||||
if let Some(current) = &$current {
|
||||
if Some(current) != $expected.as_ref() {
|
||||
return Err(format!(
|
||||
"ERROR: Setting `rust.{}` is incompatible with `rust.download-rustc`. \
|
||||
Current value: {:?}, Expected value(s): {}{:?}",
|
||||
stringify!($expected).replace("_", "-"),
|
||||
$current,
|
||||
if $expected.is_some() { "None/" } else { "" },
|
||||
$expected,
|
||||
));
|
||||
};
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
macro_rules! warn {
|
||||
($name:expr) => {
|
||||
if $name.is_some() {
|
||||
println!(
|
||||
"WARNING: `rust.{}` has no effect with `rust.download-rustc`.",
|
||||
stringify!($name).replace("_", "-")
|
||||
);
|
||||
}
|
||||
($current:expr, $expected:expr) => {
|
||||
if let Some(current) = &$current {
|
||||
if Some(current) != $expected.as_ref() {
|
||||
println!(
|
||||
"WARNING: `rust.{}` has no effect with `rust.download-rustc`. \
|
||||
Current value: {:?}, Expected value(s): {}{:?}",
|
||||
stringify!($expected).replace("_", "-"),
|
||||
$current,
|
||||
if $expected.is_some() { "None/" } else { "" },
|
||||
$expected,
|
||||
);
|
||||
};
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
let (Some(current_rust_config), Some(ci_rust_config)) =
|
||||
(current_config_toml.rust, ci_config_toml.rust)
|
||||
else {
|
||||
return Ok(());
|
||||
};
|
||||
|
||||
let Rust {
|
||||
// Following options are the CI rustc incompatible ones.
|
||||
optimize,
|
||||
@ -2744,7 +2807,7 @@ fn check_incompatible_options_for_ci_rustc(rust: &Rust) -> Result<(), String> {
|
||||
download_rustc: _,
|
||||
validate_mir_opts: _,
|
||||
frame_pointers: _,
|
||||
} = rust;
|
||||
} = ci_rust_config;
|
||||
|
||||
// There are two kinds of checks for CI rustc incompatible options:
|
||||
// 1. Checking an option that may change the compiler behaviour/output.
|
||||
@ -2752,22 +2815,23 @@ fn check_incompatible_options_for_ci_rustc(rust: &Rust) -> Result<(), String> {
|
||||
//
|
||||
// If the option belongs to the first category, we call `err` macro for a hard error;
|
||||
// otherwise, we just print a warning with `warn` macro.
|
||||
err!(optimize);
|
||||
err!(debug_logging);
|
||||
err!(debuginfo_level_rustc);
|
||||
err!(default_linker);
|
||||
err!(rpath);
|
||||
err!(strip);
|
||||
err!(stack_protector);
|
||||
err!(lld_mode);
|
||||
err!(llvm_tools);
|
||||
err!(llvm_bitcode_linker);
|
||||
err!(jemalloc);
|
||||
err!(lto);
|
||||
|
||||
warn!(channel);
|
||||
warn!(description);
|
||||
warn!(incremental);
|
||||
err!(current_rust_config.optimize, optimize);
|
||||
err!(current_rust_config.debug_logging, debug_logging);
|
||||
err!(current_rust_config.debuginfo_level_rustc, debuginfo_level_rustc);
|
||||
err!(current_rust_config.rpath, rpath);
|
||||
err!(current_rust_config.strip, strip);
|
||||
err!(current_rust_config.lld_mode, lld_mode);
|
||||
err!(current_rust_config.llvm_tools, llvm_tools);
|
||||
err!(current_rust_config.llvm_bitcode_linker, llvm_bitcode_linker);
|
||||
err!(current_rust_config.jemalloc, jemalloc);
|
||||
err!(current_rust_config.default_linker, default_linker);
|
||||
err!(current_rust_config.stack_protector, stack_protector);
|
||||
err!(current_rust_config.lto, lto);
|
||||
|
||||
warn!(current_rust_config.channel, channel);
|
||||
warn!(current_rust_config.description, description);
|
||||
warn!(current_rust_config.incremental, incremental);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
@ -14,7 +14,7 @@ use crate::core::config::{LldMode, Target, TargetSelection, TomlConfig};
|
||||
fn parse(config: &str) -> Config {
|
||||
Config::parse_inner(
|
||||
Flags::parse(&["check".to_string(), "--config=/does/not/exist".to_string()]),
|
||||
|&_| toml::from_str(&config).unwrap(),
|
||||
|&_| toml::from_str(&config),
|
||||
)
|
||||
}
|
||||
|
||||
@ -151,7 +151,6 @@ runner = "x86_64-runner"
|
||||
|
||||
"#,
|
||||
)
|
||||
.unwrap()
|
||||
},
|
||||
);
|
||||
assert_eq!(config.change_id, Some(1), "setting top-level value");
|
||||
@ -208,13 +207,13 @@ fn override_toml_duplicate() {
|
||||
"--set=change-id=1".to_owned(),
|
||||
"--set=change-id=2".to_owned(),
|
||||
]),
|
||||
|&_| toml::from_str("change-id = 0").unwrap(),
|
||||
|&_| toml::from_str("change-id = 0"),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn profile_user_dist() {
|
||||
fn get_toml(file: &Path) -> TomlConfig {
|
||||
fn get_toml(file: &Path) -> Result<TomlConfig, toml::de::Error> {
|
||||
let contents =
|
||||
if file.ends_with("config.toml") || env::var_os("RUST_BOOTSTRAP_CONFIG").is_some() {
|
||||
"profile = \"user\"".to_owned()
|
||||
@ -223,9 +222,7 @@ fn profile_user_dist() {
|
||||
std::fs::read_to_string(file).unwrap()
|
||||
};
|
||||
|
||||
toml::from_str(&contents)
|
||||
.and_then(|table: toml::Value| TomlConfig::deserialize(table))
|
||||
.unwrap()
|
||||
toml::from_str(&contents).and_then(|table: toml::Value| TomlConfig::deserialize(table))
|
||||
}
|
||||
Config::parse_inner(Flags::parse(&["check".to_owned()]), get_toml);
|
||||
}
|
||||
|
@ -9,6 +9,7 @@ use std::sync::OnceLock;
|
||||
use build_helper::ci::CiEnv;
|
||||
use xz2::bufread::XzDecoder;
|
||||
|
||||
use crate::core::config::BUILDER_CONFIG_FILENAME;
|
||||
use crate::utils::exec::{command, BootstrapCommand};
|
||||
use crate::utils::helpers::{check_run, exe, hex_encode, move_file, program_out_of_date};
|
||||
use crate::{t, Config};
|
||||
@ -273,11 +274,12 @@ impl Config {
|
||||
|
||||
let mut tar = tar::Archive::new(decompressor);
|
||||
|
||||
let is_ci_rustc = dst.ends_with("ci-rustc");
|
||||
|
||||
// `compile::Sysroot` needs to know the contents of the `rustc-dev` tarball to avoid adding
|
||||
// it to the sysroot unless it was explicitly requested. But parsing the 100 MB tarball is slow.
|
||||
// Cache the entries when we extract it so we only have to read it once.
|
||||
let mut recorded_entries =
|
||||
if dst.ends_with("ci-rustc") { recorded_entries(dst, pattern) } else { None };
|
||||
let mut recorded_entries = if is_ci_rustc { recorded_entries(dst, pattern) } else { None };
|
||||
|
||||
for member in t!(tar.entries()) {
|
||||
let mut member = t!(member);
|
||||
@ -287,10 +289,12 @@ impl Config {
|
||||
continue;
|
||||
}
|
||||
let mut short_path = t!(original_path.strip_prefix(directory_prefix));
|
||||
if !short_path.starts_with(pattern) {
|
||||
let is_builder_config = short_path.to_str() == Some(BUILDER_CONFIG_FILENAME);
|
||||
|
||||
if !(short_path.starts_with(pattern) || (is_ci_rustc && is_builder_config)) {
|
||||
continue;
|
||||
}
|
||||
short_path = t!(short_path.strip_prefix(pattern));
|
||||
short_path = short_path.strip_prefix(pattern).unwrap_or(short_path);
|
||||
let dst_path = dst.join(short_path);
|
||||
self.verbose(|| {
|
||||
println!("extracting {} to {}", original_path.display(), dst.display())
|
||||
@ -703,9 +707,7 @@ download-rustc = false
|
||||
let file_times = fs::FileTimes::new().set_accessed(now).set_modified(now);
|
||||
|
||||
let llvm_config = llvm_root.join("bin").join(exe("llvm-config", self.build));
|
||||
let llvm_config_file = t!(File::options().write(true).open(llvm_config));
|
||||
|
||||
t!(llvm_config_file.set_times(file_times));
|
||||
t!(crate::utils::helpers::set_file_times(llvm_config, file_times));
|
||||
|
||||
if self.should_fix_bins_and_dylibs() {
|
||||
let llvm_lib = llvm_root.join("lib");
|
||||
|
@ -37,7 +37,9 @@ use crate::core::builder;
|
||||
use crate::core::builder::{Builder, Kind};
|
||||
use crate::core::config::{flags, DryRun, LldMode, LlvmLibunwind, Target, TargetSelection};
|
||||
use crate::utils::exec::{command, BehaviorOnFailure, BootstrapCommand, CommandOutput, OutputMode};
|
||||
use crate::utils::helpers::{self, dir_is_empty, exe, libdir, mtime, output, symlink_dir};
|
||||
use crate::utils::helpers::{
|
||||
self, dir_is_empty, exe, libdir, mtime, output, set_file_times, symlink_dir,
|
||||
};
|
||||
|
||||
mod core;
|
||||
mod utils;
|
||||
@ -1056,11 +1058,29 @@ Executed at: {executed_at}"#,
|
||||
}
|
||||
};
|
||||
|
||||
let fail = |message: &str| {
|
||||
let fail = |message: &str, output: CommandOutput| -> ! {
|
||||
if self.is_verbose() {
|
||||
println!("{message}");
|
||||
} else {
|
||||
println!("Command has failed. Rerun with -v to see more details.");
|
||||
let (stdout, stderr) = (output.stdout_if_present(), output.stderr_if_present());
|
||||
// If the command captures output, the user would not see any indication that
|
||||
// it has failed. In this case, print a more verbose error, since to provide more
|
||||
// context.
|
||||
if stdout.is_some() || stderr.is_some() {
|
||||
if let Some(stdout) =
|
||||
output.stdout_if_present().take_if(|s| !s.trim().is_empty())
|
||||
{
|
||||
println!("STDOUT:\n{stdout}\n");
|
||||
}
|
||||
if let Some(stderr) =
|
||||
output.stderr_if_present().take_if(|s| !s.trim().is_empty())
|
||||
{
|
||||
println!("STDERR:\n{stderr}\n");
|
||||
}
|
||||
println!("Command {command:?} has failed. Rerun with -v to see more details.");
|
||||
} else {
|
||||
println!("Command has failed. Rerun with -v to see more details.");
|
||||
}
|
||||
}
|
||||
exit!(1);
|
||||
};
|
||||
@ -1069,14 +1089,14 @@ Executed at: {executed_at}"#,
|
||||
match command.failure_behavior {
|
||||
BehaviorOnFailure::DelayFail => {
|
||||
if self.fail_fast {
|
||||
fail(&message);
|
||||
fail(&message, output);
|
||||
}
|
||||
|
||||
let mut failures = self.delayed_failures.borrow_mut();
|
||||
failures.push(message);
|
||||
}
|
||||
BehaviorOnFailure::Exit => {
|
||||
fail(&message);
|
||||
fail(&message, output);
|
||||
}
|
||||
BehaviorOnFailure::Ignore => {
|
||||
// If failures are allowed, either the error has been printed already
|
||||
@ -1774,21 +1794,20 @@ Executed at: {executed_at}"#,
|
||||
}
|
||||
}
|
||||
if let Ok(()) = fs::hard_link(&src, dst) {
|
||||
// Attempt to "easy copy" by creating a hard link
|
||||
// (symlinks don't work on windows), but if that fails
|
||||
// just fall back to a slow `copy` operation.
|
||||
// Attempt to "easy copy" by creating a hard link (symlinks are priviledged on windows),
|
||||
// but if that fails just fall back to a slow `copy` operation.
|
||||
} else {
|
||||
if let Err(e) = fs::copy(&src, dst) {
|
||||
panic!("failed to copy `{}` to `{}`: {}", src.display(), dst.display(), e)
|
||||
}
|
||||
t!(fs::set_permissions(dst, metadata.permissions()));
|
||||
|
||||
// Restore file times because changing permissions on e.g. Linux using `chmod` can cause
|
||||
// file access time to change.
|
||||
let file_times = fs::FileTimes::new()
|
||||
.set_accessed(t!(metadata.accessed()))
|
||||
.set_modified(t!(metadata.modified()));
|
||||
|
||||
let dst_file = t!(fs::File::open(dst));
|
||||
t!(dst_file.set_times(file_times));
|
||||
t!(set_file_times(dst, file_times));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -291,6 +291,11 @@ impl CommandOutput {
|
||||
.expect("Cannot parse process stdout as UTF-8")
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn stdout_if_present(&self) -> Option<String> {
|
||||
self.stdout.as_ref().and_then(|s| String::from_utf8(s.clone()).ok())
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn stdout_if_ok(&self) -> Option<String> {
|
||||
if self.is_success() { Some(self.stdout()) } else { None }
|
||||
@ -303,6 +308,11 @@ impl CommandOutput {
|
||||
)
|
||||
.expect("Cannot parse process stderr as UTF-8")
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn stderr_if_present(&self) -> Option<String> {
|
||||
self.stderr.as_ref().and_then(|s| String::from_utf8(s.clone()).ok())
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for CommandOutput {
|
||||
|
@ -544,3 +544,15 @@ pub fn get_closest_merge_base_commit(
|
||||
|
||||
Ok(output_result(git.as_command_mut())?.trim().to_owned())
|
||||
}
|
||||
|
||||
/// Sets the file times for a given file at `path`.
|
||||
pub fn set_file_times<P: AsRef<Path>>(path: P, times: fs::FileTimes) -> io::Result<()> {
|
||||
// Windows requires file to be writable to modify file times. But on Linux CI the file does not
|
||||
// need to be writable to modify file times and might be read-only.
|
||||
let f = if cfg!(windows) {
|
||||
fs::File::options().write(true).open(path)?
|
||||
} else {
|
||||
fs::File::open(path)?
|
||||
};
|
||||
f.set_times(times)
|
||||
}
|
||||
|
@ -3,7 +3,8 @@ use std::io::Write;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use crate::utils::helpers::{
|
||||
check_cfg_arg, extract_beta_rev, hex_encode, make, program_out_of_date, symlink_dir,
|
||||
check_cfg_arg, extract_beta_rev, hex_encode, make, program_out_of_date, set_file_times,
|
||||
symlink_dir,
|
||||
};
|
||||
use crate::{Config, Flags};
|
||||
|
||||
@ -92,3 +93,25 @@ fn test_symlink_dir() {
|
||||
#[cfg(not(windows))]
|
||||
fs::remove_file(link_path).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_set_file_times_sanity_check() {
|
||||
let config =
|
||||
Config::parse(Flags::parse(&["check".to_owned(), "--config=/does/not/exist".to_owned()]));
|
||||
let tempfile = config.tempdir().join(".tmp-file");
|
||||
|
||||
{
|
||||
File::create(&tempfile).unwrap().write_all(b"dummy value").unwrap();
|
||||
assert!(tempfile.exists());
|
||||
}
|
||||
|
||||
// This might only fail on Windows (if file is default read-only then we try to modify file
|
||||
// times).
|
||||
let unix_epoch = std::time::SystemTime::UNIX_EPOCH;
|
||||
let target_time = fs::FileTimes::new().set_accessed(unix_epoch).set_modified(unix_epoch);
|
||||
set_file_times(&tempfile, target_time).unwrap();
|
||||
|
||||
let found_metadata = fs::metadata(tempfile).unwrap();
|
||||
assert_eq!(found_metadata.accessed().unwrap(), unix_epoch);
|
||||
assert_eq!(found_metadata.modified().unwrap(), unix_epoch)
|
||||
}
|
||||
|
@ -9,6 +9,7 @@ use std::path::{Path, PathBuf};
|
||||
|
||||
use crate::core::build_steps::dist::distdir;
|
||||
use crate::core::builder::{Builder, Kind};
|
||||
use crate::core::config::BUILDER_CONFIG_FILENAME;
|
||||
use crate::utils::exec::BootstrapCommand;
|
||||
use crate::utils::helpers::{move_file, t};
|
||||
use crate::utils::{channel, helpers};
|
||||
@ -320,7 +321,7 @@ impl<'a> Tarball<'a> {
|
||||
|
||||
// Add config file if present.
|
||||
if let Some(config) = &self.builder.config.config {
|
||||
self.add_renamed_file(config, &self.overlay_dir, "builder-config");
|
||||
self.add_renamed_file(config, &self.overlay_dir, BUILDER_CONFIG_FILENAME);
|
||||
}
|
||||
|
||||
for file in self.overlay.legal_and_readme() {
|
||||
|
@ -1 +1 @@
|
||||
Subproject commit 67fa536768013d9d5a13f3a06790521d511ef711
|
||||
Subproject commit 04bc1396bb857f35b5dda1d773c9571e1f253304
|
@ -1 +1 @@
|
||||
Subproject commit 5454de3d12b9ccc6375b629cf7ccda8264640aac
|
||||
Subproject commit aeeb287d41a0332c210da122bea8e0e91844ab3e
|
@ -1 +1 @@
|
||||
Subproject commit 0ebdacadbda8ce2cd8fbf93985e15af61a7ab895
|
||||
Subproject commit 6ecf95c5f2bfa0e6314dfe282bf775fd1405f7e9
|
@ -1 +1 @@
|
||||
Subproject commit 2e191814f163ee1e77e2d6094eee4dd78a289c5b
|
||||
Subproject commit 62cd0df95061ba0ac886333f5cd7f3012f149da1
|
@ -1 +1 @@
|
||||
Subproject commit 89aecb6951b77bc746da73df8c9f2b2ceaad494a
|
||||
Subproject commit 8f94061936e492159f4f6c09c0f917a7521893ff
|
@ -1 +1 @@
|
||||
Subproject commit 0c4d55cb59fe440d1a630e4e5774d043968edb3f
|
||||
Subproject commit 43d83780db545a1ed6d45773312fc578987e3968
|
@ -61,6 +61,7 @@
|
||||
- [mipsisa\*r6\*-unknown-linux-gnu\*](platform-support/mips-release-6.md)
|
||||
- [nvptx64-nvidia-cuda](platform-support/nvptx64-nvidia-cuda.md)
|
||||
- [powerpc-unknown-openbsd](platform-support/powerpc-unknown-openbsd.md)
|
||||
- [powerpc-unknown-linux-muslspe](platform-support/powerpc-unknown-linux-muslspe.md)
|
||||
- [powerpc64-ibm-aix](platform-support/aix.md)
|
||||
- [riscv32im-risc0-zkvm-elf](platform-support/riscv32im-risc0-zkvm-elf.md)
|
||||
- [riscv32imac-unknown-xous-elf](platform-support/riscv32imac-unknown-xous-elf.md)
|
||||
|
@ -332,6 +332,7 @@ target | std | host | notes
|
||||
`msp430-none-elf` | * | | 16-bit MSP430 microcontrollers
|
||||
`powerpc-unknown-linux-gnuspe` | ✓ | | PowerPC SPE Linux
|
||||
`powerpc-unknown-linux-musl` | ? | | PowerPC Linux with musl 1.2.3
|
||||
[`powerpc-unknown-linux-muslspe`](platform-support/powerpc-unknown-linux-muslspe.md) | ? | | PowerPC SPE Linux
|
||||
[`powerpc-unknown-netbsd`](platform-support/netbsd.md) | ✓ | ✓ | NetBSD 32-bit powerpc systems
|
||||
[`powerpc-unknown-openbsd`](platform-support/powerpc-unknown-openbsd.md) | * | |
|
||||
[`powerpc-wrs-vxworks-spe`](platform-support/vxworks.md) | ✓ | |
|
||||
|
@ -61,3 +61,8 @@ Currently the `riscv64-linux-android` target requires the following architecture
|
||||
* `Zba` (address calculation instructions)
|
||||
* `Zbb` (base instructions)
|
||||
* `Zbs` (single-bit instructions)
|
||||
|
||||
### aarch64-linux-android on Nightly compilers
|
||||
|
||||
As soon as `-Zfixed-x18` compiler flag is supplied, the [`ShadowCallStack` sanitizer](https://releases.llvm.org/7.0.1/tools/clang/docs/ShadowCallStack.html)
|
||||
instrumentation is also made avaiable by supplying the second compiler flag `-Zsanitizer=shadow-call-stack`.
|
||||
|
@ -0,0 +1,32 @@
|
||||
# powerpc-unknown-linux-muslspe
|
||||
|
||||
**Tier: 3**
|
||||
|
||||
This target is very similar to already existing ones like `powerpc_unknown_linux_musl` and `powerpc_unknown_linux_gnuspe`.
|
||||
This one has PowerPC SPE support for musl. Unfortunately, the last supported gcc version with PowerPC SPE is 8.4.0.
|
||||
|
||||
## Target maintainers
|
||||
|
||||
- [@BKPepe](https://github.com/BKPepe)
|
||||
|
||||
## Requirements
|
||||
|
||||
This target is cross-compiled. There is no support for `std`. There is no
|
||||
default allocator, but it's possible to use `alloc` by supplying an allocator.
|
||||
|
||||
This target generated binaries in the ELF format.
|
||||
|
||||
## Building the target
|
||||
|
||||
This target was tested and used within the `OpenWrt` build system for CZ.NIC Turris 1.x routers using Freescale P2020.
|
||||
|
||||
## Building Rust programs
|
||||
|
||||
Rust does not yet ship pre-compiled artifacts for this target. To compile for
|
||||
this target, you will either need to build Rust with the target enabled (see
|
||||
"Building the target" above), or build your own copy of `core` by using
|
||||
`build-std` or similar.
|
||||
|
||||
## Testing
|
||||
|
||||
This is a cross-compiled target and there is no support to run rustc test suite.
|
@ -515,6 +515,9 @@ pub fn no_documentation() {}
|
||||
|
||||
Note that the third item is the crate root, which in this case is undocumented.
|
||||
|
||||
If you want the JSON output to be displayed on `stdout` instead of having a file generated, you can
|
||||
use `-o -`.
|
||||
|
||||
### `-w`/`--output-format`: output format
|
||||
|
||||
`--output-format json` emits documentation in the experimental
|
||||
|
@ -1,7 +1,7 @@
|
||||
# `fixed-x18`
|
||||
|
||||
This option prevents the compiler from using the x18 register. It is only
|
||||
supported on aarch64.
|
||||
supported on `aarch64`.
|
||||
|
||||
From the [ABI spec][arm-abi]:
|
||||
|
||||
@ -23,6 +23,11 @@ Currently, the `-Zsanitizer=shadow-call-stack` flag is only supported on
|
||||
platforms that always treat x18 as a reserved register, and the `-Zfixed-x18`
|
||||
flag is not required to use the sanitizer on such platforms. However, the
|
||||
sanitizer may be supported on targets where this is not the case in the future.
|
||||
One way to do so now on Nightly compilers is to explicitly supply this `-Zfixed-x18`
|
||||
flag with `aarch64` targets, so that the sanitizer is available for instrumentation
|
||||
on targets like `aarch64-unknown-none`, for instance. However, discretion is still
|
||||
required to make sure that the runtime support is in place for this sanitizer
|
||||
to be effective.
|
||||
|
||||
It is undefined behavior for `-Zsanitizer=shadow-call-stack` code to call into
|
||||
code where x18 is a temporary register. On the other hand, when you are *not*
|
||||
|
@ -787,6 +787,10 @@ A runtime must be provided by the application or operating system.
|
||||
|
||||
See the [Clang ShadowCallStack documentation][clang-scs] for more details.
|
||||
|
||||
* `aarch64-unknown-none`
|
||||
|
||||
In addition to support from a runtime by the application or operating system, the `-Zfixed-x18` flag is also mandatory.
|
||||
|
||||
# ThreadSanitizer
|
||||
|
||||
ThreadSanitizer is a data race detection tool. It is supported on the following
|
||||
|
@ -286,6 +286,9 @@ pub(crate) struct RenderOptions {
|
||||
pub(crate) no_emit_shared: bool,
|
||||
/// If `true`, HTML source code pages won't be generated.
|
||||
pub(crate) html_no_source: bool,
|
||||
/// This field is only used for the JSON output. If it's set to true, no file will be created
|
||||
/// and content will be displayed in stdout directly.
|
||||
pub(crate) output_to_stdout: bool,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
|
||||
@ -548,16 +551,17 @@ impl Options {
|
||||
dcx.fatal("the `--test` flag must be passed to enable `--no-run`");
|
||||
}
|
||||
|
||||
let mut output_to_stdout = false;
|
||||
let test_builder_wrappers =
|
||||
matches.opt_strs("test-builder-wrapper").iter().map(PathBuf::from).collect();
|
||||
let out_dir = matches.opt_str("out-dir").map(|s| PathBuf::from(&s));
|
||||
let output = matches.opt_str("output").map(|s| PathBuf::from(&s));
|
||||
let output = match (out_dir, output) {
|
||||
let output = match (matches.opt_str("out-dir"), matches.opt_str("output")) {
|
||||
(Some(_), Some(_)) => {
|
||||
dcx.fatal("cannot use both 'out-dir' and 'output' at once");
|
||||
}
|
||||
(Some(out_dir), None) => out_dir,
|
||||
(None, Some(output)) => output,
|
||||
(Some(out_dir), None) | (None, Some(out_dir)) => {
|
||||
output_to_stdout = out_dir == "-";
|
||||
PathBuf::from(out_dir)
|
||||
}
|
||||
(None, None) => PathBuf::from("doc"),
|
||||
};
|
||||
|
||||
@ -818,6 +822,7 @@ impl Options {
|
||||
call_locations,
|
||||
no_emit_shared: false,
|
||||
html_no_source,
|
||||
output_to_stdout,
|
||||
};
|
||||
Some((options, render_options))
|
||||
}
|
||||
|
@ -9,16 +9,19 @@ mod import_finder;
|
||||
|
||||
use std::cell::RefCell;
|
||||
use std::fs::{create_dir_all, File};
|
||||
use std::io::{BufWriter, Write};
|
||||
use std::io::{stdout, BufWriter, Write};
|
||||
use std::path::PathBuf;
|
||||
use std::rc::Rc;
|
||||
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
use rustc_hir::def_id::{DefId, DefIdSet};
|
||||
use rustc_middle::ty::TyCtxt;
|
||||
use rustc_session::Session;
|
||||
use rustc_span::def_id::LOCAL_CRATE;
|
||||
use rustdoc_json_types as types;
|
||||
// It's important to use the FxHashMap from rustdoc_json_types here, instead of
|
||||
// the one from rustc_data_structures, as they're different types due to sysroots.
|
||||
// See #110051 and #127456 for details
|
||||
use rustdoc_json_types::FxHashMap;
|
||||
|
||||
use crate::clean::types::{ExternalCrate, ExternalLocation};
|
||||
use crate::clean::ItemKind;
|
||||
@ -37,7 +40,7 @@ pub(crate) struct JsonRenderer<'tcx> {
|
||||
/// level field of the JSON blob.
|
||||
index: Rc<RefCell<FxHashMap<types::Id, types::Item>>>,
|
||||
/// The directory where the blob will be written to.
|
||||
out_path: PathBuf,
|
||||
out_path: Option<PathBuf>,
|
||||
cache: Rc<Cache>,
|
||||
imported_items: DefIdSet,
|
||||
}
|
||||
@ -97,6 +100,20 @@ impl<'tcx> JsonRenderer<'tcx> {
|
||||
})
|
||||
.unwrap_or_default()
|
||||
}
|
||||
|
||||
fn write<T: Write>(
|
||||
&self,
|
||||
output: types::Crate,
|
||||
mut writer: BufWriter<T>,
|
||||
path: &str,
|
||||
) -> Result<(), Error> {
|
||||
self.tcx
|
||||
.sess
|
||||
.time("rustdoc_json_serialization", || serde_json::ser::to_writer(&mut writer, &output))
|
||||
.unwrap();
|
||||
try_err!(writer.flush(), path);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx> FormatRenderer<'tcx> for JsonRenderer<'tcx> {
|
||||
@ -120,7 +137,7 @@ impl<'tcx> FormatRenderer<'tcx> for JsonRenderer<'tcx> {
|
||||
JsonRenderer {
|
||||
tcx,
|
||||
index: Rc::new(RefCell::new(FxHashMap::default())),
|
||||
out_path: options.output,
|
||||
out_path: if options.output_to_stdout { None } else { Some(options.output) },
|
||||
cache: Rc::new(cache),
|
||||
imported_items,
|
||||
},
|
||||
@ -220,14 +237,11 @@ impl<'tcx> FormatRenderer<'tcx> for JsonRenderer<'tcx> {
|
||||
let index = (*self.index).clone().into_inner();
|
||||
|
||||
debug!("Constructing Output");
|
||||
// This needs to be the default HashMap for compatibility with the public interface for
|
||||
// rustdoc-json-types
|
||||
#[allow(rustc::default_hash_types)]
|
||||
let output = types::Crate {
|
||||
root: types::Id(format!("0:0:{}", e.name(self.tcx).as_u32())),
|
||||
crate_version: self.cache.crate_version.clone(),
|
||||
includes_private: self.cache.document_private,
|
||||
index: index.into_iter().collect(),
|
||||
index,
|
||||
paths: self
|
||||
.cache
|
||||
.paths
|
||||
@ -264,20 +278,21 @@ impl<'tcx> FormatRenderer<'tcx> for JsonRenderer<'tcx> {
|
||||
.collect(),
|
||||
format_version: types::FORMAT_VERSION,
|
||||
};
|
||||
let out_dir = self.out_path.clone();
|
||||
try_err!(create_dir_all(&out_dir), out_dir);
|
||||
if let Some(ref out_path) = self.out_path {
|
||||
let out_dir = out_path.clone();
|
||||
try_err!(create_dir_all(&out_dir), out_dir);
|
||||
|
||||
let mut p = out_dir;
|
||||
p.push(output.index.get(&output.root).unwrap().name.clone().unwrap());
|
||||
p.set_extension("json");
|
||||
let mut file = BufWriter::new(try_err!(File::create(&p), p));
|
||||
self.tcx
|
||||
.sess
|
||||
.time("rustdoc_json_serialization", || serde_json::ser::to_writer(&mut file, &output))
|
||||
.unwrap();
|
||||
try_err!(file.flush(), p);
|
||||
|
||||
Ok(())
|
||||
let mut p = out_dir;
|
||||
p.push(output.index.get(&output.root).unwrap().name.clone().unwrap());
|
||||
p.set_extension("json");
|
||||
self.write(
|
||||
output,
|
||||
BufWriter::new(try_err!(File::create(&p), p)),
|
||||
&p.display().to_string(),
|
||||
)
|
||||
} else {
|
||||
self.write(output, BufWriter::new(stdout()), "<stdout>")
|
||||
}
|
||||
}
|
||||
|
||||
fn cache(&self) -> &Cache {
|
||||
|
@ -1 +1 @@
|
||||
Subproject commit 57ae1a3474057fead2c438928ed368b3740bf0ec
|
||||
Subproject commit ccf4c38bdd73f1a37ec266c73bdaef80e39f8cf6
|
@ -5,7 +5,7 @@
|
||||
|
||||
use std::path::PathBuf;
|
||||
|
||||
use rustc_hash::FxHashMap;
|
||||
pub use rustc_hash::FxHashMap;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
/// The version of JSON output that this crate represents.
|
||||
|
@ -1 +1 @@
|
||||
Subproject commit 0d8d22f83b066503f6b2b755925197e959e58b4f
|
||||
Subproject commit 2f738d617c6ead388f899802dd1a7fd66858a691
|
@ -470,7 +470,7 @@ fn setup_mod_file(path: &Path, lint: &LintData<'_>) -> io::Result<&'static str>
|
||||
});
|
||||
|
||||
// Find both the last lint declaration (declare_clippy_lint!) and the lint pass impl
|
||||
while let Some(LintDeclSearchResult { content, .. }) = iter.find(|result| result.token_kind == TokenKind::Ident) {
|
||||
while let Some(LintDeclSearchResult { content, .. }) = iter.find(|result| result.token == TokenKind::Ident) {
|
||||
let mut iter = iter
|
||||
.by_ref()
|
||||
.filter(|t| !matches!(t.token_kind, TokenKind::Whitespace | TokenKind::LineComment { .. }));
|
||||
@ -480,7 +480,7 @@ fn setup_mod_file(path: &Path, lint: &LintData<'_>) -> io::Result<&'static str>
|
||||
// matches `!{`
|
||||
match_tokens!(iter, Bang OpenBrace);
|
||||
if let Some(LintDeclSearchResult { range, .. }) =
|
||||
iter.find(|result| result.token_kind == TokenKind::CloseBrace)
|
||||
iter.find(|result| result.token == TokenKind::CloseBrace)
|
||||
{
|
||||
last_decl_curly_offset = Some(range.end);
|
||||
}
|
||||
|
@ -12,7 +12,6 @@
|
||||
#![feature(let_chains)]
|
||||
#![feature(trait_upcasting)]
|
||||
#![feature(strict_overflow_ops)]
|
||||
#![feature(is_none_or)]
|
||||
// Configure clippy and other lints
|
||||
#![allow(
|
||||
clippy::collapsible_else_if,
|
||||
|
@ -11,3 +11,4 @@ wasmparser = { version = "0.214", default-features = false, features = ["std"] }
|
||||
regex = "1.8" # 1.8 to avoid memchr 2.6.0, as 2.5.0 is pinned in the workspace
|
||||
gimli = "0.31.0"
|
||||
build_helper = { path = "../build_helper" }
|
||||
serde_json = "1.0"
|
||||
|
@ -285,12 +285,24 @@ impl LlvmAr {
|
||||
self
|
||||
}
|
||||
|
||||
/// Like `obj_to_ar` except creating a thin archive.
|
||||
pub fn obj_to_thin_ar(&mut self) -> &mut Self {
|
||||
self.cmd.arg("rcus").arg("--thin");
|
||||
self
|
||||
}
|
||||
|
||||
/// Extract archive members back to files.
|
||||
pub fn extract(&mut self) -> &mut Self {
|
||||
self.cmd.arg("x");
|
||||
self
|
||||
}
|
||||
|
||||
/// Print the table of contents.
|
||||
pub fn table_of_contents(&mut self) -> &mut Self {
|
||||
self.cmd.arg("t");
|
||||
self
|
||||
}
|
||||
|
||||
/// Provide an output, then an input file. Bundled in one function, as llvm-ar has
|
||||
/// no "--output"-style flag.
|
||||
pub fn output_input(&mut self, out: impl AsRef<Path>, input: impl AsRef<Path>) -> &mut Self {
|
||||
|
@ -71,14 +71,8 @@ impl Rustdoc {
|
||||
self
|
||||
}
|
||||
|
||||
/// Specify path to the output folder.
|
||||
pub fn output<P: AsRef<Path>>(&mut self, path: P) -> &mut Self {
|
||||
self.cmd.arg("-o");
|
||||
self.cmd.arg(path.as_ref());
|
||||
self
|
||||
}
|
||||
|
||||
/// Specify output directory.
|
||||
#[doc(alias = "output")]
|
||||
pub fn out_dir<P: AsRef<Path>>(&mut self, path: P) -> &mut Self {
|
||||
self.cmd.arg("--out-dir").arg(path.as_ref());
|
||||
self
|
||||
|
@ -38,6 +38,7 @@ pub use bstr;
|
||||
pub use gimli;
|
||||
pub use object;
|
||||
pub use regex;
|
||||
pub use serde_json;
|
||||
pub use wasmparser;
|
||||
|
||||
// Re-exports of external dependencies.
|
||||
|
@ -84,3 +84,18 @@ pub fn has_suffix<P: AsRef<Path>>(path: P, suffix: &str) -> bool {
|
||||
pub fn filename_contains<P: AsRef<Path>>(path: P, needle: &str) -> bool {
|
||||
path.as_ref().file_name().is_some_and(|name| name.to_str().unwrap().contains(needle))
|
||||
}
|
||||
|
||||
/// Helper for reading entries in a given directory and its children.
|
||||
pub fn read_dir_entries_recursive<P: AsRef<Path>, F: FnMut(&Path)>(dir: P, mut callback: F) {
|
||||
fn read_dir_entries_recursive_inner<P: AsRef<Path>, F: FnMut(&Path)>(dir: P, callback: &mut F) {
|
||||
for entry in rfs::read_dir(dir) {
|
||||
let path = entry.unwrap().path();
|
||||
callback(&path);
|
||||
if path.is_dir() {
|
||||
read_dir_entries_recursive_inner(path, callback);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
read_dir_entries_recursive_inner(dir, &mut callback);
|
||||
}
|
||||
|
@ -1462,7 +1462,7 @@ fn generic_args_sans_defaults<'ga>(
|
||||
// otherwise, if the arg is equal to the param default, hide it (unless the
|
||||
// default is an error which can happen for the trait Self type)
|
||||
#[allow(unstable_name_collisions)]
|
||||
default_parameters.get(i).is_none_or(|default_parameter| {
|
||||
IsNoneOr::is_none_or(default_parameters.get(i), |default_parameter| {
|
||||
// !is_err(default_parameter.skip_binders())
|
||||
// &&
|
||||
arg != &default_parameter.clone().substitute(Interner, ¶meters)
|
||||
|
@ -15,8 +15,10 @@ extern crate rustc_abi;
|
||||
#[cfg(not(feature = "in-rust-tree"))]
|
||||
extern crate ra_ap_rustc_abi as rustc_abi;
|
||||
|
||||
// Use the crates.io version unconditionally until the API settles enough that we can switch to
|
||||
// using the in-tree one.
|
||||
#[cfg(feature = "in-rust-tree")]
|
||||
extern crate rustc_pattern_analysis;
|
||||
|
||||
#[cfg(not(feature = "in-rust-tree"))]
|
||||
extern crate ra_ap_rustc_pattern_analysis as rustc_pattern_analysis;
|
||||
|
||||
mod builder;
|
||||
|
@ -303,6 +303,12 @@ dependencies = [
|
||||
"crypto-common",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "doc-comment"
|
||||
version = "0.3.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fea41bba32d969b513997752735605054bc0dfa92b4c56bf1189f2e174be7a10"
|
||||
|
||||
[[package]]
|
||||
name = "elasticlunr-rs"
|
||||
version = "3.0.2"
|
||||
@ -465,6 +471,21 @@ dependencies = [
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "html_parser"
|
||||
version = "0.7.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f6f56db07b6612644f6f7719f8ef944f75fff9d6378fdf3d316fd32194184abd"
|
||||
dependencies = [
|
||||
"doc-comment",
|
||||
"pest",
|
||||
"pest_derive",
|
||||
"serde",
|
||||
"serde_derive",
|
||||
"serde_json",
|
||||
"thiserror",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "humantime"
|
||||
version = "2.1.0"
|
||||
@ -680,13 +701,13 @@ name = "mdbook-trpl-listing"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"clap",
|
||||
"html_parser",
|
||||
"mdbook",
|
||||
"pulldown-cmark",
|
||||
"pulldown-cmark-to-cmark",
|
||||
"serde_json",
|
||||
"thiserror",
|
||||
"toml 0.8.14",
|
||||
"xmlparser",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -1767,12 +1788,6 @@ dependencies = [
|
||||
"memchr",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "xmlparser"
|
||||
version = "0.13.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "66fee0b777b0f5ac1c69bb06d361268faafa61cd4682ae064a171c16c433e9e4"
|
||||
|
||||
[[package]]
|
||||
name = "yaml-rust"
|
||||
version = "0.4.5"
|
||||
|
@ -84,9 +84,7 @@ pub(crate) struct ParsedMacroArgs {
|
||||
fn check_keyword<'a, 'b: 'a>(parser: &'a mut Parser<'b>) -> Option<MacroArg> {
|
||||
for &keyword in RUST_KW.iter() {
|
||||
if parser.token.is_keyword(keyword)
|
||||
&& parser.look_ahead(1, |t| {
|
||||
t.kind == TokenKind::Eof || t.kind == TokenKind::Comma
|
||||
})
|
||||
&& parser.look_ahead(1, |t| *t == TokenKind::Eof || *t == TokenKind::Comma)
|
||||
{
|
||||
parser.bump();
|
||||
return Some(MacroArg::Keyword(
|
||||
@ -131,7 +129,7 @@ pub(crate) fn parse_macro_args(
|
||||
Some(arg) => {
|
||||
args.push(arg);
|
||||
parser.bump();
|
||||
if parser.token.kind == TokenKind::Eof && args.len() == 2 {
|
||||
if parser.token == TokenKind::Eof && args.len() == 2 {
|
||||
vec_with_semi = true;
|
||||
break;
|
||||
}
|
||||
@ -150,7 +148,7 @@ pub(crate) fn parse_macro_args(
|
||||
|
||||
parser.bump();
|
||||
|
||||
if parser.token.kind == TokenKind::Eof {
|
||||
if parser.token == TokenKind::Eof {
|
||||
trailing_comma = true;
|
||||
break;
|
||||
}
|
||||
|
@ -1,21 +1,13 @@
|
||||
run-make/branch-protection-check-IBT/Makefile
|
||||
run-make/cat-and-grep-sanity-check/Makefile
|
||||
run-make/dep-info-doesnt-run-much/Makefile
|
||||
run-make/dep-info-spaces/Makefile
|
||||
run-make/dep-info/Makefile
|
||||
run-make/emit-to-stdout/Makefile
|
||||
run-make/extern-fn-reachable/Makefile
|
||||
run-make/incr-add-rust-src-component/Makefile
|
||||
run-make/issue-84395-lto-embed-bitcode/Makefile
|
||||
run-make/jobserver-error/Makefile
|
||||
run-make/libs-through-symlinks/Makefile
|
||||
run-make/libtest-json/Makefile
|
||||
run-make/libtest-junit/Makefile
|
||||
run-make/libtest-thread-limit/Makefile
|
||||
run-make/macos-deployment-target/Makefile
|
||||
run-make/native-link-modifier-bundle/Makefile
|
||||
run-make/reproducible-build/Makefile
|
||||
run-make/rlib-format-packed-bundled-libs/Makefile
|
||||
run-make/split-debuginfo/Makefile
|
||||
run-make/symbol-mangling-hashed/Makefile
|
||||
run-make/translation/Makefile
|
||||
|
@ -345,6 +345,9 @@
|
||||
//@ revisions: powerpc_unknown_linux_musl
|
||||
//@ [powerpc_unknown_linux_musl] compile-flags: --target powerpc-unknown-linux-musl
|
||||
//@ [powerpc_unknown_linux_musl] needs-llvm-components: powerpc
|
||||
//@ revisions: powerpc_unknown_linux_muslspe
|
||||
//@ [powerpc_unknown_linux_muslspe] compile-flags: --target powerpc-unknown-linux-muslspe
|
||||
//@ [powerpc_unknown_linux_muslspe] needs-llvm-components: powerpc
|
||||
//@ revisions: powerpc_unknown_netbsd
|
||||
//@ [powerpc_unknown_netbsd] compile-flags: --target powerpc-unknown-netbsd
|
||||
//@ [powerpc_unknown_netbsd] needs-llvm-components: powerpc
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user