diff --git a/compiler/rustc_ast/src/token.rs b/compiler/rustc_ast/src/token.rs index 1522d12cbf9..85d9687c600 100644 --- a/compiler/rustc_ast/src/token.rs +++ b/compiler/rustc_ast/src/token.rs @@ -50,12 +50,11 @@ pub enum Delimiter { Brace, /// `[ ... ]` Bracket, - /// `/*«*/ ... /*»*/` + /// `Ø ... Ø` /// An invisible delimiter, that may, for example, appear around tokens coming from a /// "macro variable" `$var`. It is important to preserve operator priorities in cases like /// `$var * 3` where `$var` is `1 + 2`. - /// Invisible delimiters are not directly writable in normal Rust code except as comments. - /// Therefore, they might not survive a roundtrip of a token stream through a string. + /// Invisible delimiters might not survive roundtrip of a token stream through a string. Invisible, } diff --git a/compiler/rustc_ast_pretty/src/pprust/state.rs b/compiler/rustc_ast_pretty/src/pprust/state.rs index 7357ddf2134..b80a553b418 100644 --- a/compiler/rustc_ast_pretty/src/pprust/state.rs +++ b/compiler/rustc_ast_pretty/src/pprust/state.rs @@ -590,29 +590,15 @@ pub trait PrintState<'a>: std::ops::Deref + std::ops::Dere self.nbsp(); } self.word("{"); - let empty = tts.is_empty(); - if !empty { + if !tts.is_empty() { self.space(); } self.ibox(0); self.print_tts(tts, convert_dollar_crate); self.end(); + let empty = tts.is_empty(); self.bclose(span, empty); } - Some(Delimiter::Invisible) => { - self.word("/*«*/"); - let empty = tts.is_empty(); - if !empty { - self.space(); - } - self.ibox(0); - self.print_tts(tts, convert_dollar_crate); - self.end(); - if !empty { - self.space(); - } - self.word("/*»*/"); - } Some(delim) => { let token_str = self.token_kind_to_string(&token::OpenDelim(delim)); self.word(token_str); @@ -786,8 +772,9 @@ pub trait PrintState<'a>: std::ops::Deref + std::ops::Dere token::CloseDelim(Delimiter::Bracket) => "]".into(), token::OpenDelim(Delimiter::Brace) => "{".into(), token::CloseDelim(Delimiter::Brace) => "}".into(), - token::OpenDelim(Delimiter::Invisible) => "/*«*/".into(), - token::CloseDelim(Delimiter::Invisible) => "/*»*/".into(), + token::OpenDelim(Delimiter::Invisible) | token::CloseDelim(Delimiter::Invisible) => { + "".into() + } token::Pound => "#".into(), token::Dollar => "$".into(), token::Question => "?".into(), diff --git a/compiler/rustc_builtin_macros/src/assert.rs b/compiler/rustc_builtin_macros/src/assert.rs index a984980dea9..0c75187193c 100644 --- a/compiler/rustc_builtin_macros/src/assert.rs +++ b/compiler/rustc_builtin_macros/src/assert.rs @@ -1,11 +1,13 @@ +mod context; + use crate::edition_panic::use_panic_2021; use rustc_ast::ptr::P; use rustc_ast::token; use rustc_ast::tokenstream::{DelimSpan, TokenStream}; -use rustc_ast::{self as ast, *}; +use rustc_ast::{Expr, ExprKind, MacArgs, MacCall, MacDelimiter, Path, PathSegment, UnOp}; use rustc_ast_pretty::pprust; use rustc_errors::{Applicability, PResult}; -use rustc_expand::base::*; +use rustc_expand::base::{DummyResult, ExtCtxt, MacEager, MacResult}; use rustc_parse::parser::Parser; use rustc_span::symbol::{sym, Ident, Symbol}; use rustc_span::{Span, DUMMY_SP}; @@ -25,13 +27,13 @@ pub fn expand_assert<'cx>( // `core::panic` and `std::panic` are different macros, so we use call-site // context to pick up whichever is currently in scope. - let sp = cx.with_call_site_ctxt(span); + let call_site_span = cx.with_call_site_ctxt(span); - let panic_call = if let Some(tokens) = custom_message { - let path = if use_panic_2021(span) { + let panic_path = || { + if use_panic_2021(span) { // On edition 2021, we always call `$crate::panic::panic_2021!()`. Path { - span: sp, + span: call_site_span, segments: cx .std_path(&[sym::panic, sym::panic_2021]) .into_iter() @@ -42,27 +44,40 @@ pub fn expand_assert<'cx>( } else { // Before edition 2021, we call `panic!()` unqualified, // such that it calls either `std::panic!()` or `core::panic!()`. - Path::from_ident(Ident::new(sym::panic, sp)) - }; - // Pass the custom message to panic!(). - cx.expr( - sp, + Path::from_ident(Ident::new(sym::panic, call_site_span)) + } + }; + + // Simply uses the user provided message instead of generating custom outputs + let expr = if let Some(tokens) = custom_message { + let then = cx.expr( + call_site_span, ExprKind::MacCall(MacCall { - path, + path: panic_path(), args: P(MacArgs::Delimited( - DelimSpan::from_single(sp), + DelimSpan::from_single(call_site_span), MacDelimiter::Parenthesis, tokens, )), prior_type_ascription: None, }), - ) - } else { + ); + expr_if_not(cx, call_site_span, cond_expr, then, None) + } + // If `generic_assert` is enabled, generates rich captured outputs + // + // FIXME(c410-f3r) See https://github.com/rust-lang/rust/issues/96949 + else if let Some(features) = cx.ecfg.features && features.generic_assert { + context::Context::new(cx, call_site_span).build(cond_expr, panic_path()) + } + // If `generic_assert` is not enabled, only outputs a literal "assertion failed: ..." + // string + else { // Pass our own message directly to $crate::panicking::panic(), // because it might contain `{` and `}` that should always be // passed literally. - cx.expr_call_global( - sp, + let then = cx.expr_call_global( + call_site_span, cx.std_path(&[sym::panicking, sym::panic]), vec![cx.expr_str( DUMMY_SP, @@ -71,18 +86,29 @@ pub fn expand_assert<'cx>( pprust::expr_to_string(&cond_expr).escape_debug() )), )], - ) + ); + expr_if_not(cx, call_site_span, cond_expr, then, None) }; - let if_expr = - cx.expr_if(sp, cx.expr(sp, ExprKind::Unary(UnOp::Not, cond_expr)), panic_call, None); - MacEager::expr(if_expr) + + MacEager::expr(expr) } struct Assert { - cond_expr: P, + cond_expr: P, custom_message: Option, } +// if !{ ... } { ... } else { ... } +fn expr_if_not( + cx: &ExtCtxt<'_>, + span: Span, + cond: P, + then: P, + els: Option>, +) -> P { + cx.expr_if(span, cx.expr(span, ExprKind::Unary(UnOp::Not, cond)), then, els) +} + fn parse_assert<'a>(cx: &mut ExtCtxt<'a>, sp: Span, stream: TokenStream) -> PResult<'a, Assert> { let mut parser = cx.new_parser_from_tts(stream); diff --git a/compiler/rustc_builtin_macros/src/assert/context.rs b/compiler/rustc_builtin_macros/src/assert/context.rs new file mode 100644 index 00000000000..8d187a4be8a --- /dev/null +++ b/compiler/rustc_builtin_macros/src/assert/context.rs @@ -0,0 +1,44 @@ +use rustc_ast::{ptr::P, Expr, Path}; +use rustc_expand::base::ExtCtxt; +use rustc_span::Span; + +pub(super) struct Context<'cx, 'a> { + cx: &'cx ExtCtxt<'a>, + span: Span, +} + +impl<'cx, 'a> Context<'cx, 'a> { + pub(super) fn new(cx: &'cx ExtCtxt<'a>, span: Span) -> Self { + Self { cx, span } + } + + /// Builds the whole `assert!` expression. + /// + /// { + /// use ::core::asserting::{ ... }; + /// + /// let mut __capture0 = Capture::new(); + /// ... + /// ... + /// ... + /// + /// if !{ + /// ... + /// ... + /// ... + /// } { + /// panic!( + /// "Assertion failed: ... \n With expansion: ...", + /// __capture0, + /// ... + /// ... + /// ... + /// ); + /// } + /// } + pub(super) fn build(self, _cond_expr: P, _panic_path: Path) -> P { + let Self { cx, span, .. } = self; + let stmts = Vec::new(); + cx.expr_block(cx.block(span, stmts)) + } +} diff --git a/compiler/rustc_builtin_macros/src/lib.rs b/compiler/rustc_builtin_macros/src/lib.rs index 0c9e3c22bcf..48b1470ced5 100644 --- a/compiler/rustc_builtin_macros/src/lib.rs +++ b/compiler/rustc_builtin_macros/src/lib.rs @@ -1,17 +1,18 @@ //! This crate contains implementations of built-in macros and other code generating facilities //! injecting code into the crate before it is lowered to HIR. +#![allow(rustc::potential_query_instability)] #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")] #![feature(array_windows)] #![feature(box_patterns)] #![feature(decl_macro)] #![feature(is_sorted)] -#![feature(nll)] +#![feature(let_chains)] #![feature(let_else)] +#![feature(nll)] #![feature(proc_macro_internals)] #![feature(proc_macro_quote)] #![recursion_limit = "256"] -#![allow(rustc::potential_query_instability)] extern crate proc_macro; diff --git a/compiler/rustc_expand/src/build.rs b/compiler/rustc_expand/src/build.rs index 56d0263269b..e73c31c98fe 100644 --- a/compiler/rustc_expand/src/build.rs +++ b/compiler/rustc_expand/src/build.rs @@ -160,7 +160,7 @@ impl<'a> ExtCtxt<'a> { attrs: AttrVec::new(), tokens: None, }); - ast::Stmt { id: ast::DUMMY_NODE_ID, kind: ast::StmtKind::Local(local), span: sp } + self.stmt_local(local, sp) } // Generates `let _: Type;`, which is usually used for type assertions. @@ -174,6 +174,10 @@ impl<'a> ExtCtxt<'a> { attrs: AttrVec::new(), tokens: None, }); + self.stmt_local(local, span) + } + + pub fn stmt_local(&self, local: P, span: Span) -> ast::Stmt { ast::Stmt { id: ast::DUMMY_NODE_ID, kind: ast::StmtKind::Local(local), span } } diff --git a/compiler/rustc_feature/src/active.rs b/compiler/rustc_feature/src/active.rs index 5a02661513c..1466e8dfc92 100644 --- a/compiler/rustc_feature/src/active.rs +++ b/compiler/rustc_feature/src/active.rs @@ -150,6 +150,8 @@ declare_features! ( (active, allow_internal_unstable, "1.0.0", None, None), /// Allows identifying the `compiler_builtins` crate. (active, compiler_builtins, "1.13.0", None, None), + /// Outputs useful `assert!` messages + (active, generic_assert, "1.63.0", None, None), /// Allows using the `rust-intrinsic`'s "ABI". (active, intrinsics, "1.0.0", None, None), /// Allows using `#[lang = ".."]` attribute for linking items to special compiler logic. diff --git a/compiler/rustc_span/src/symbol.rs b/compiler/rustc_span/src/symbol.rs index 1954cdc0bff..5f301962061 100644 --- a/compiler/rustc_span/src/symbol.rs +++ b/compiler/rustc_span/src/symbol.rs @@ -733,6 +733,7 @@ symbols! { generator_state, generators, generic_arg_infer, + generic_assert, generic_associated_types, generic_associated_types_extended, generic_const_exprs, diff --git a/library/core/src/fmt/mod.rs b/library/core/src/fmt/mod.rs index dde9bc383d2..63655ae8a24 100644 --- a/library/core/src/fmt/mod.rs +++ b/library/core/src/fmt/mod.rs @@ -2233,37 +2233,43 @@ impl Display for char { #[stable(feature = "rust1", since = "1.0.0")] impl Pointer for *const T { fn fmt(&self, f: &mut Formatter<'_>) -> Result { - /// Since the formatting will be identical for all pointer types, use a non-monomorphized - /// implementation for the actual formatting to reduce the amount of codegen work needed - fn inner(ptr: *const (), f: &mut Formatter<'_>) -> Result { - let old_width = f.width; - let old_flags = f.flags; - - // The alternate flag is already treated by LowerHex as being special- - // it denotes whether to prefix with 0x. We use it to work out whether - // or not to zero extend, and then unconditionally set it to get the - // prefix. - if f.alternate() { - f.flags |= 1 << (FlagV1::SignAwareZeroPad as u32); - - if f.width.is_none() { - f.width = Some((usize::BITS / 4) as usize + 2); - } - } - f.flags |= 1 << (FlagV1::Alternate as u32); - - let ret = LowerHex::fmt(&(ptr.addr()), f); - - f.width = old_width; - f.flags = old_flags; - - ret - } - - inner(*self as *const (), f) + // Cast is needed here because `.addr()` requires `T: Sized`. + pointer_fmt_inner((*self as *const ()).addr(), f) } } +/// Since the formatting will be identical for all pointer types, use a non-monomorphized +/// implementation for the actual formatting to reduce the amount of codegen work needed. +/// +/// This uses `ptr_addr: usize` and not `ptr: *const ()` to be able to use this for +/// `fn(...) -> ...` without using [problematic] "Oxford Casts". +/// +/// [problematic]: https://github.com/rust-lang/rust/issues/95489 +pub(crate) fn pointer_fmt_inner(ptr_addr: usize, f: &mut Formatter<'_>) -> Result { + let old_width = f.width; + let old_flags = f.flags; + + // The alternate flag is already treated by LowerHex as being special- + // it denotes whether to prefix with 0x. We use it to work out whether + // or not to zero extend, and then unconditionally set it to get the + // prefix. + if f.alternate() { + f.flags |= 1 << (FlagV1::SignAwareZeroPad as u32); + + if f.width.is_none() { + f.width = Some((usize::BITS / 4) as usize + 2); + } + } + f.flags |= 1 << (FlagV1::Alternate as u32); + + let ret = LowerHex::fmt(&ptr_addr, f); + + f.width = old_width; + f.flags = old_flags; + + ret +} + #[stable(feature = "rust1", since = "1.0.0")] impl Pointer for *mut T { fn fmt(&self, f: &mut Formatter<'_>) -> Result { diff --git a/library/core/src/ptr/mod.rs b/library/core/src/ptr/mod.rs index 6b1e63e0cfa..5b04ae7b07e 100644 --- a/library/core/src/ptr/mod.rs +++ b/library/core/src/ptr/mod.rs @@ -1878,24 +1878,14 @@ macro_rules! fnptr_impls_safety_abi { #[stable(feature = "fnptr_impls", since = "1.4.0")] impl fmt::Pointer for $FnTy { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - // HACK: The intermediate cast as usize is required for AVR - // so that the address space of the source function pointer - // is preserved in the final function pointer. - // - // https://github.com/avr-rust/rust/issues/143 - fmt::Pointer::fmt(&(*self as usize as *const ()), f) + fmt::pointer_fmt_inner(*self as usize, f) } } #[stable(feature = "fnptr_impls", since = "1.4.0")] impl fmt::Debug for $FnTy { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - // HACK: The intermediate cast as usize is required for AVR - // so that the address space of the source function pointer - // is preserved in the final function pointer. - // - // https://github.com/avr-rust/rust/issues/143 - fmt::Pointer::fmt(&(*self as usize as *const ()), f) + fmt::pointer_fmt_inner(*self as usize, f) } } } diff --git a/library/proc_macro/src/lib.rs b/library/proc_macro/src/lib.rs index 6f7c6305afc..f1c5eaad868 100644 --- a/library/proc_macro/src/lib.rs +++ b/library/proc_macro/src/lib.rs @@ -703,12 +703,11 @@ pub enum Delimiter { /// `[ ... ]` #[stable(feature = "proc_macro_lib2", since = "1.29.0")] Bracket, - /// `/*«*/ ... /*»*/` + /// `Ø ... Ø` /// An invisible delimiter, that may, for example, appear around tokens coming from a /// "macro variable" `$var`. It is important to preserve operator priorities in cases like /// `$var * 3` where `$var` is `1 + 2`. - /// Invisible delimiters are not directly writable in normal Rust code except as comments. - /// Therefore, they might not survive a roundtrip of a token stream through a string. + /// Invisible delimiters might not survive roundtrip of a token stream through a string. #[stable(feature = "proc_macro_lib2", since = "1.29.0")] None, } diff --git a/src/librustdoc/json/conversions.rs b/src/librustdoc/json/conversions.rs index 6a8e4787676..51a2abc50bc 100644 --- a/src/librustdoc/json/conversions.rs +++ b/src/librustdoc/json/conversions.rs @@ -10,7 +10,7 @@ use std::fmt; use rustc_ast::ast; use rustc_hir::{def::CtorKind, def_id::DefId}; use rustc_middle::ty::{self, TyCtxt}; -use rustc_span::Pos; +use rustc_span::{Pos, Symbol}; use rustc_target::spec::abi::Abi as RustcAbi; use rustdoc_json_types::*; @@ -29,7 +29,9 @@ impl JsonRenderer<'_> { .get(&item.item_id) .into_iter() .flatten() - .map(|clean::ItemLink { link, did, .. }| (link.clone(), from_item_id((*did).into()))) + .map(|clean::ItemLink { link, did, .. }| { + (link.clone(), from_item_id((*did).into(), self.tcx)) + }) .collect(); let docs = item.attrs.collapsed_doc_value(); let attrs = item @@ -45,7 +47,7 @@ impl JsonRenderer<'_> { _ => from_clean_item(item, self.tcx), }; Some(Item { - id: from_item_id(item_id), + id: from_item_id_with_name(item_id, self.tcx, name), crate_id: item_id.krate().as_u32(), name: name.map(|sym| sym.to_string()), span: self.convert_span(span), @@ -84,7 +86,7 @@ impl JsonRenderer<'_> { Inherited => Visibility::Default, Restricted(did) if did.is_crate_root() => Visibility::Crate, Restricted(did) => Visibility::Restricted { - parent: from_item_id(did.into()), + parent: from_item_id(did.into(), self.tcx), path: self.tcx.def_path(did).to_string_no_crate_verbose(), }, } @@ -173,22 +175,39 @@ impl FromWithTcx for TypeBindingKind { } } -pub(crate) fn from_item_id(item_id: ItemId) -> Id { - struct DisplayDefId(DefId); +/// It generates an ID as follows: +/// +/// `CRATE_ID:ITEM_ID[:NAME_ID]` (if there is no name, NAME_ID is not generated). +pub(crate) fn from_item_id(item_id: ItemId, tcx: TyCtxt<'_>) -> Id { + from_item_id_with_name(item_id, tcx, None) +} - impl fmt::Display for DisplayDefId { +// FIXME: this function (and appending the name at the end of the ID) should be removed when +// reexports are not inlined anymore for json format. It should be done in #93518. +pub(crate) fn from_item_id_with_name(item_id: ItemId, tcx: TyCtxt<'_>, name: Option) -> Id { + struct DisplayDefId<'a>(DefId, TyCtxt<'a>, Option); + + impl<'a> fmt::Display for DisplayDefId<'a> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "{}:{}", self.0.krate.as_u32(), u32::from(self.0.index)) + let name = match self.2 { + Some(name) => format!(":{}", name.as_u32()), + None => self + .1 + .opt_item_name(self.0) + .map(|n| format!(":{}", n.as_u32())) + .unwrap_or_default(), + }; + write!(f, "{}:{}{}", self.0.krate.as_u32(), u32::from(self.0.index), name) } } match item_id { - ItemId::DefId(did) => Id(format!("{}", DisplayDefId(did))), + ItemId::DefId(did) => Id(format!("{}", DisplayDefId(did, tcx, name))), ItemId::Blanket { for_, impl_id } => { - Id(format!("b:{}-{}", DisplayDefId(impl_id), DisplayDefId(for_))) + Id(format!("b:{}-{}", DisplayDefId(impl_id, tcx, None), DisplayDefId(for_, tcx, name))) } ItemId::Auto { for_, trait_ } => { - Id(format!("a:{}-{}", DisplayDefId(trait_), DisplayDefId(for_))) + Id(format!("a:{}-{}", DisplayDefId(trait_, tcx, None), DisplayDefId(for_, tcx, name))) } ItemId::Primitive(ty, krate) => Id(format!("p:{}:{}", krate.as_u32(), ty.as_sym())), } @@ -201,7 +220,7 @@ fn from_clean_item(item: clean::Item, tcx: TyCtxt<'_>) -> ItemEnum { let header = item.fn_header(tcx); match *item.kind { - ModuleItem(m) => ItemEnum::Module(Module { is_crate, items: ids(m.items) }), + ModuleItem(m) => ItemEnum::Module(Module { is_crate, items: ids(m.items, tcx) }), ImportItem(i) => ItemEnum::Import(i.into_tcx(tcx)), StructItem(s) => ItemEnum::Struct(s.into_tcx(tcx)), UnionItem(u) => ItemEnum::Union(u.into_tcx(tcx)), @@ -255,7 +274,7 @@ impl FromWithTcx for Struct { struct_type: from_ctor_kind(struct_type), generics: generics.into_tcx(tcx), fields_stripped, - fields: ids(fields), + fields: ids(fields, tcx), impls: Vec::new(), // Added in JsonRenderer::item } } @@ -268,7 +287,7 @@ impl FromWithTcx for Union { Union { generics: generics.into_tcx(tcx), fields_stripped, - fields: ids(fields), + fields: ids(fields, tcx), impls: Vec::new(), // Added in JsonRenderer::item } } @@ -413,7 +432,7 @@ impl FromWithTcx for Type { match ty { clean::Type::Path { path } => Type::ResolvedPath { name: path.whole_name(), - id: from_item_id(path.def_id().into()), + id: from_item_id(path.def_id().into(), tcx), args: path.segments.last().map(|args| Box::new(args.clone().args.into_tcx(tcx))), param_names: Vec::new(), }, @@ -422,7 +441,7 @@ impl FromWithTcx for Type { Type::ResolvedPath { name: first_trait.whole_name(), - id: from_item_id(first_trait.def_id().into()), + id: from_item_id(first_trait.def_id().into(), tcx), args: first_trait .segments .last() @@ -517,7 +536,7 @@ impl FromWithTcx for Trait { Trait { is_auto, is_unsafe: unsafety == rustc_hir::Unsafety::Unsafe, - items: ids(items), + items: ids(items, tcx), generics: generics.into_tcx(tcx), bounds: bounds.into_iter().map(|x| x.into_tcx(tcx)).collect(), implementations: Vec::new(), // Added in JsonRenderer::item @@ -550,7 +569,7 @@ impl FromWithTcx for Impl { .collect(), trait_, for_: for_.into_tcx(tcx), - items: ids(items), + items: ids(items, tcx), negative: negative_polarity, synthetic, blanket_impl: blanket_impl.map(|x| x.into_tcx(tcx)), @@ -593,21 +612,21 @@ impl FromWithTcx for Enum { Enum { generics: generics.into_tcx(tcx), variants_stripped, - variants: ids(variants), + variants: ids(variants, tcx), impls: Vec::new(), // Added in JsonRenderer::item } } } impl FromWithTcx for Struct { - fn from_tcx(struct_: clean::VariantStruct, _tcx: TyCtxt<'_>) -> Self { + fn from_tcx(struct_: clean::VariantStruct, tcx: TyCtxt<'_>) -> Self { let fields_stripped = struct_.has_stripped_entries(); let clean::VariantStruct { struct_type, fields } = struct_; Struct { struct_type: from_ctor_kind(struct_type), generics: Default::default(), fields_stripped, - fields: ids(fields), + fields: ids(fields, tcx), impls: Vec::new(), } } @@ -630,25 +649,25 @@ impl FromWithTcx for Variant { }) .collect(), ), - Struct(s) => Variant::Struct(ids(s.fields)), + Struct(s) => Variant::Struct(ids(s.fields, tcx)), } } } impl FromWithTcx for Import { - fn from_tcx(import: clean::Import, _tcx: TyCtxt<'_>) -> Self { + fn from_tcx(import: clean::Import, tcx: TyCtxt<'_>) -> Self { use clean::ImportKind::*; match import.kind { Simple(s) => Import { source: import.source.path.whole_name(), name: s.to_string(), - id: import.source.did.map(ItemId::from).map(from_item_id), + id: import.source.did.map(ItemId::from).map(|i| from_item_id(i, tcx)), glob: false, }, Glob => Import { source: import.source.path.whole_name(), name: import.source.path.last().to_string(), - id: import.source.did.map(ItemId::from).map(from_item_id), + id: import.source.did.map(ItemId::from).map(|i| from_item_id(i, tcx)), glob: true, }, } @@ -742,6 +761,10 @@ impl FromWithTcx for ItemKind { } } -fn ids(items: impl IntoIterator) -> Vec { - items.into_iter().filter(|x| !x.is_stripped()).map(|i| from_item_id(i.item_id)).collect() +fn ids(items: impl IntoIterator, tcx: TyCtxt<'_>) -> Vec { + items + .into_iter() + .filter(|x| !x.is_stripped()) + .map(|i| from_item_id_with_name(i.item_id, tcx, i.name)) + .collect() } diff --git a/src/librustdoc/json/mod.rs b/src/librustdoc/json/mod.rs index 08f61056d85..f338050bee0 100644 --- a/src/librustdoc/json/mod.rs +++ b/src/librustdoc/json/mod.rs @@ -16,6 +16,7 @@ use rustc_data_structures::fx::FxHashMap; use rustc_hir::def_id::DefId; use rustc_middle::ty::TyCtxt; use rustc_session::Session; +use rustc_span::def_id::LOCAL_CRATE; use rustdoc_json_types as types; @@ -25,7 +26,7 @@ use crate::docfs::PathError; use crate::error::Error; use crate::formats::cache::Cache; use crate::formats::FormatRenderer; -use crate::json::conversions::{from_item_id, IntoWithTcx}; +use crate::json::conversions::{from_item_id, from_item_id_with_name, IntoWithTcx}; use crate::{clean, try_err}; #[derive(Clone)] @@ -54,7 +55,7 @@ impl<'tcx> JsonRenderer<'tcx> { .map(|i| { let item = &i.impl_item; self.item(item.clone()).unwrap(); - from_item_id(item.item_id) + from_item_id_with_name(item.item_id, self.tcx, item.name) }) .collect() }) @@ -86,7 +87,7 @@ impl<'tcx> JsonRenderer<'tcx> { if item.item_id.is_local() || is_primitive_impl { self.item(item.clone()).unwrap(); - Some(from_item_id(item.item_id)) + Some(from_item_id_with_name(item.item_id, self.tcx, item.name)) } else { None } @@ -105,10 +106,11 @@ impl<'tcx> JsonRenderer<'tcx> { if !id.is_local() { let trait_item = &trait_item.trait_; trait_item.items.clone().into_iter().for_each(|i| self.item(i).unwrap()); + let item_id = from_item_id(id.into(), self.tcx); Some(( - from_item_id(id.into()), + item_id.clone(), types::Item { - id: from_item_id(id.into()), + id: item_id, crate_id: id.krate.as_u32(), name: self .cache @@ -176,6 +178,7 @@ impl<'tcx> FormatRenderer<'tcx> for JsonRenderer<'tcx> { // Flatten items that recursively store other items item.kind.inner_items().for_each(|i| self.item(i.clone()).unwrap()); + let name = item.name; let item_id = item.item_id; if let Some(mut new_item) = self.convert_item(item) { if let types::ItemEnum::Trait(ref mut t) = new_item.inner { @@ -187,7 +190,10 @@ impl<'tcx> FormatRenderer<'tcx> for JsonRenderer<'tcx> { } else if let types::ItemEnum::Union(ref mut u) = new_item.inner { u.impls = self.get_impls(item_id.expect_def_id()) } - let removed = self.index.borrow_mut().insert(from_item_id(item_id), new_item.clone()); + let removed = self + .index + .borrow_mut() + .insert(from_item_id_with_name(item_id, self.tcx, name), new_item.clone()); // FIXME(adotinthevoid): Currently, the index is duplicated. This is a sanity check // to make sure the items are unique. The main place this happens is when an item, is @@ -211,13 +217,15 @@ impl<'tcx> FormatRenderer<'tcx> for JsonRenderer<'tcx> { self.get_impls(*primitive); } + let e = ExternalCrate { crate_num: LOCAL_CRATE }; + let mut index = (*self.index).clone().into_inner(); index.extend(self.get_trait_items()); // This needs to be the default HashMap for compatibility with the public interface for // rustdoc-json-types #[allow(rustc::default_hash_types)] let output = types::Crate { - root: types::Id(String::from("0:0")), + root: types::Id(format!("0:0:{}", e.name(self.tcx).as_u32())), crate_version: self.cache.crate_version.clone(), includes_private: self.cache.document_private, index: index.into_iter().collect(), @@ -229,7 +237,7 @@ impl<'tcx> FormatRenderer<'tcx> for JsonRenderer<'tcx> { .chain(self.cache.external_paths.clone().into_iter()) .map(|(k, (path, kind))| { ( - from_item_id(k.into()), + from_item_id(k.into(), self.tcx), types::ItemSummary { crate_id: k.krate.as_u32(), path: path.iter().map(|s| s.to_string()).collect(), diff --git a/src/librustdoc/visit_ast.rs b/src/librustdoc/visit_ast.rs index d57868caf7a..00553d3f007 100644 --- a/src/librustdoc/visit_ast.rs +++ b/src/librustdoc/visit_ast.rs @@ -286,8 +286,8 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> { self.visit_foreign_item(item, None, om); } } - // If we're inlining, skip private items. - _ if self.inlining && !is_pub => {} + // If we're inlining, skip private items or item reexported as "_". + _ if self.inlining && (!is_pub || renamed == Some(kw::Underscore)) => {} hir::ItemKind::GlobalAsm(..) => {} hir::ItemKind::Use(_, hir::UseKind::ListStem) => {} hir::ItemKind::Use(path, kind) => { diff --git a/src/test/rustdoc-json/reexport/same_type_reexported_more_than_once.rs b/src/test/rustdoc-json/reexport/same_type_reexported_more_than_once.rs new file mode 100644 index 00000000000..fd6ac8372d9 --- /dev/null +++ b/src/test/rustdoc-json/reexport/same_type_reexported_more_than_once.rs @@ -0,0 +1,17 @@ +// Regression test for https://github.com/rust-lang/rust/issues/97432. + +#![feature(no_core)] +#![no_std] +#![no_core] + +// @has same_type_reexported_more_than_once.json +// @set trait_id = - "$.index[*][?(@.name=='Trait')].id" +// @has - "$.index[*][?(@.name=='same_type_reexported_more_than_once')].inner.items[*]" $trait_id +pub use inner::Trait; +// @set reexport_id = - "$.index[*][?(@.name=='Reexport')].id" +// @has - "$.index[*][?(@.name=='same_type_reexported_more_than_once')].inner.items[*]" $reexport_id +pub use inner::Trait as Reexport; + +mod inner { + pub trait Trait {} +} diff --git a/src/test/rustdoc/anonymous-reexport.rs b/src/test/rustdoc/anonymous-reexport.rs new file mode 100644 index 00000000000..6b884ff14df --- /dev/null +++ b/src/test/rustdoc/anonymous-reexport.rs @@ -0,0 +1,22 @@ +#![crate_name = "foo"] + +// This test ensures we don't display anonymous (non-inline) re-exports of public items. + +// @has 'foo/index.html' +// @has - '//*[@id="main-content"]' '' +// We check that the only "h2" present is for "Bla". +// @count - '//*[@id="main-content"]/h2' 1 +// @has - '//*[@id="main-content"]/h2' 'Structs' +// @count - '//*[@id="main-content"]//a[@class="struct"]' 1 + +mod ext { + pub trait Foo {} + pub trait Bar {} + pub struct S; +} + +pub use crate::ext::Foo as _; +pub use crate::ext::Bar as _; +pub use crate::ext::S as _; + +pub struct Bla; diff --git a/src/test/ui/proc-macro/auxiliary/expand-expr.rs b/src/test/ui/proc-macro/auxiliary/expand-expr.rs index a2e30e2e93b..2bc34f3c6bf 100644 --- a/src/test/ui/proc-macro/auxiliary/expand-expr.rs +++ b/src/test/ui/proc-macro/auxiliary/expand-expr.rs @@ -12,15 +12,6 @@ use std::str::FromStr; #[proc_macro] pub fn expand_expr_is(input: TokenStream) -> TokenStream { - expand_expr_is_inner(input, false) -} - -#[proc_macro] -pub fn expand_expr_is_trim(input: TokenStream) -> TokenStream { - expand_expr_is_inner(input, true) -} - -fn expand_expr_is_inner(input: TokenStream, trim_invisible: bool) -> TokenStream { let mut iter = input.into_iter(); let mut expected_tts = Vec::new(); loop { @@ -31,18 +22,14 @@ fn expand_expr_is_inner(input: TokenStream, trim_invisible: bool) -> TokenStream } } - // If requested, trim the "invisible" delimiters at the start and end. - let expected = expected_tts.into_iter().collect::().to_string(); - let expected = if trim_invisible { - let len1 = "/*«*/ ".len(); - let len2 = " /*»*/".len(); - &expected[len1..expected.len() - len2] - } else { - &expected[..] - }; - let expanded = iter.collect::().expand_expr().unwrap().to_string(); - - assert_eq!(expected, expanded); + let expected = expected_tts.into_iter().collect::(); + let expanded = iter.collect::().expand_expr().expect("expand_expr failed"); + assert!( + expected.to_string() == expanded.to_string(), + "assert failed\nexpected: `{}`\nexpanded: `{}`", + expected.to_string(), + expanded.to_string() + ); TokenStream::new() } diff --git a/src/test/ui/proc-macro/capture-macro-rules-invoke.stdout b/src/test/ui/proc-macro/capture-macro-rules-invoke.stdout index 3d0e7eaff00..4de8746a1b4 100644 --- a/src/test/ui/proc-macro/capture-macro-rules-invoke.stdout +++ b/src/test/ui/proc-macro/capture-macro-rules-invoke.stdout @@ -1,5 +1,4 @@ PRINT-BANG INPUT (DISPLAY): self -PRINT-BANG RE-COLLECTED (DISPLAY): /*«*/ self /*»*/ PRINT-BANG INPUT (DEBUG): TokenStream [ Group { delimiter: None, @@ -14,10 +13,8 @@ PRINT-BANG INPUT (DEBUG): TokenStream [ ] PRINT-BANG INPUT (DISPLAY): 1 + 1, { "a" }, let a = 1;, String, my_name, 'a, my_val = 30, std::option::Option, pub(in some::path) , [a b c], -30 -PRINT-BANG RE-COLLECTED (DISPLAY): /*«*/ 1 + 1 /*»*/, /*«*/ { "a" } /*»*/, /*«*/ let a = 1 /*»*/, /*«*/ -String /*»*/, my_name, /*«*/ 'a /*»*/, /*«*/ my_val = 30 /*»*/, /*«*/ -std :: option :: Option /*»*/, /*«*/ pub(in some :: path) /*»*/, [a b c], -/*«*/ - 30 /*»*/ +PRINT-BANG RE-COLLECTED (DISPLAY): 1 + 1, { "a" }, let a = 1, String, my_name, 'a, my_val = 30, +std :: option :: Option, pub(in some :: path), [a b c], - 30 PRINT-BANG INPUT (DEBUG): TokenStream [ Group { delimiter: None, @@ -298,7 +295,6 @@ PRINT-BANG INPUT (DEBUG): TokenStream [ }, ] PRINT-BANG INPUT (DISPLAY): (a, b) -PRINT-BANG RE-COLLECTED (DISPLAY): /*«*/ (a, b) /*»*/ PRINT-BANG INPUT (DEBUG): TokenStream [ Group { delimiter: None, diff --git a/src/test/ui/proc-macro/capture-unglued-token.stdout b/src/test/ui/proc-macro/capture-unglued-token.stdout index 5fe6ff72b45..7e6b540332c 100644 --- a/src/test/ui/proc-macro/capture-unglued-token.stdout +++ b/src/test/ui/proc-macro/capture-unglued-token.stdout @@ -1,5 +1,5 @@ PRINT-BANG INPUT (DISPLAY): Vec -PRINT-BANG RE-COLLECTED (DISPLAY): /*«*/ Vec < u8 > /*»*/ +PRINT-BANG RE-COLLECTED (DISPLAY): Vec < u8 > PRINT-BANG INPUT (DEBUG): TokenStream [ Group { delimiter: None, diff --git a/src/test/ui/proc-macro/expand-expr.rs b/src/test/ui/proc-macro/expand-expr.rs index edcb30f892c..d1146d97030 100644 --- a/src/test/ui/proc-macro/expand-expr.rs +++ b/src/test/ui/proc-macro/expand-expr.rs @@ -2,9 +2,9 @@ extern crate expand_expr; -use expand_expr::{check_expand_expr_file, echo_pm, expand_expr_fail, expand_expr_is}; -use expand_expr::{expand_expr_is_trim, recursive_expand}; - +use expand_expr::{ + check_expand_expr_file, echo_pm, expand_expr_fail, expand_expr_is, recursive_expand, +}; // Check builtin macros can be expanded. @@ -47,21 +47,21 @@ macro_rules! echo_expr { macro_rules! simple_lit { ($l:literal) => { - expand_expr_is_trim!($l, $l); - expand_expr_is_trim!($l, echo_lit!($l)); - expand_expr_is_trim!($l, echo_expr!($l)); - expand_expr_is_trim!($l, echo_tts!($l)); - expand_expr_is_trim!($l, echo_pm!($l)); + expand_expr_is!($l, $l); + expand_expr_is!($l, echo_lit!($l)); + expand_expr_is!($l, echo_expr!($l)); + expand_expr_is!($l, echo_tts!($l)); + expand_expr_is!($l, echo_pm!($l)); const _: () = { macro_rules! mac { () => { $l }; } - expand_expr_is_trim!($l, mac!()); - expand_expr_is_trim!($l, echo_expr!(mac!())); - expand_expr_is_trim!($l, echo_tts!(mac!())); - expand_expr_is_trim!($l, echo_pm!(mac!())); + expand_expr_is!($l, mac!()); + expand_expr_is!($l, echo_expr!(mac!())); + expand_expr_is!($l, echo_tts!(mac!())); + expand_expr_is!($l, echo_pm!(mac!())); }; }; } diff --git a/src/test/ui/proc-macro/expr-stmt-nonterminal-tokens.stdout b/src/test/ui/proc-macro/expr-stmt-nonterminal-tokens.stdout index 04b516fd254..686d53e8876 100644 --- a/src/test/ui/proc-macro/expr-stmt-nonterminal-tokens.stdout +++ b/src/test/ui/proc-macro/expr-stmt-nonterminal-tokens.stdout @@ -1,6 +1,5 @@ PRINT-DERIVE INPUT (DISPLAY): enum E { V = { let _ = #[allow(warnings)] 0 ; 0 }, } -PRINT-DERIVE DEEP-RE-COLLECTED (DISPLAY): enum E -{ V = { let _ = /*«*/ #[allow(warnings)] #[allow(warnings)] 0 /*»*/ ; 0 }, } +PRINT-DERIVE DEEP-RE-COLLECTED (DISPLAY): enum E { V = { let _ = #[allow(warnings)] #[allow(warnings)] 0 ; 0 }, } PRINT-DERIVE INPUT (DEBUG): TokenStream [ Ident { ident: "enum", @@ -124,7 +123,7 @@ PRINT-DERIVE INPUT (DEBUG): TokenStream [ }, ] PRINT-DERIVE INPUT (DISPLAY): enum E { V = { let _ = { 0; } ; 0 }, } -PRINT-DERIVE DEEP-RE-COLLECTED (DISPLAY): enum E { V = { let _ = { /*«*/ 0 /*»*/ } ; 0 }, } +PRINT-DERIVE DEEP-RE-COLLECTED (DISPLAY): enum E { V = { let _ = { 0 } ; 0 }, } PRINT-DERIVE INPUT (DEBUG): TokenStream [ Ident { ident: "enum", @@ -204,7 +203,6 @@ PRINT-DERIVE INPUT (DEBUG): TokenStream [ }, ] PRINT-DERIVE INPUT (DISPLAY): enum E { V = { let _ = { {} } ; 0 }, } -PRINT-DERIVE DEEP-RE-COLLECTED (DISPLAY): enum E { V = { let _ = { /*«*/ {} /*»*/ } ; 0 }, } PRINT-DERIVE INPUT (DEBUG): TokenStream [ Ident { ident: "enum", @@ -283,7 +281,7 @@ PRINT-DERIVE INPUT (DEBUG): TokenStream [ }, ] PRINT-DERIVE INPUT (DISPLAY): enum E { V = { let _ = { PATH; } ; 0 }, } -PRINT-DERIVE DEEP-RE-COLLECTED (DISPLAY): enum E { V = { let _ = { /*«*/ PATH /*»*/ } ; 0 }, } +PRINT-DERIVE DEEP-RE-COLLECTED (DISPLAY): enum E { V = { let _ = { PATH } ; 0 }, } PRINT-DERIVE INPUT (DEBUG): TokenStream [ Ident { ident: "enum", @@ -361,7 +359,7 @@ PRINT-DERIVE INPUT (DEBUG): TokenStream [ }, ] PRINT-DERIVE INPUT (DISPLAY): enum E { V = { let _ = { 0 + 1; } ; 0 }, } -PRINT-DERIVE DEEP-RE-COLLECTED (DISPLAY): enum E { V = { let _ = { /*«*/ 0 + 1 /*»*/ } ; 0 }, } +PRINT-DERIVE DEEP-RE-COLLECTED (DISPLAY): enum E { V = { let _ = { 0 + 1 } ; 0 }, } PRINT-DERIVE INPUT (DEBUG): TokenStream [ Ident { ident: "enum", @@ -452,7 +450,7 @@ PRINT-DERIVE INPUT (DEBUG): TokenStream [ }, ] PRINT-DERIVE INPUT (DISPLAY): enum E { V = { let _ = { PATH + 1; } ; 0 }, } -PRINT-DERIVE DEEP-RE-COLLECTED (DISPLAY): enum E { V = { let _ = { /*«*/ PATH + 1 /*»*/ } ; 0 }, } +PRINT-DERIVE DEEP-RE-COLLECTED (DISPLAY): enum E { V = { let _ = { PATH + 1 } ; 0 }, } PRINT-DERIVE INPUT (DEBUG): TokenStream [ Ident { ident: "enum", diff --git a/src/test/ui/proc-macro/issue-75734-pp-paren.stdout b/src/test/ui/proc-macro/issue-75734-pp-paren.stdout index 55818969c71..0fda6654ff3 100644 --- a/src/test/ui/proc-macro/issue-75734-pp-paren.stdout +++ b/src/test/ui/proc-macro/issue-75734-pp-paren.stdout @@ -96,7 +96,6 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [ }, ] PRINT-BANG INPUT (DISPLAY): 1 + 1 * 2 -PRINT-BANG RE-COLLECTED (DISPLAY): /*«*/ 1 + 1 /*»*/ * 2 PRINT-BANG INPUT (DEBUG): TokenStream [ Group { delimiter: None, diff --git a/src/test/ui/proc-macro/issue-78675-captured-inner-attrs.stdout b/src/test/ui/proc-macro/issue-78675-captured-inner-attrs.stdout index 6cf8043c34f..60a400a5dea 100644 --- a/src/test/ui/proc-macro/issue-78675-captured-inner-attrs.stdout +++ b/src/test/ui/proc-macro/issue-78675-captured-inner-attrs.stdout @@ -1,7 +1,7 @@ PRINT-BANG INPUT (DISPLAY): foo! { #[fake_attr] mod bar { #![doc = r" Foo"] } } -PRINT-BANG DEEP-RE-COLLECTED (DISPLAY): foo! { #[fake_attr] /*«*/ mod bar { #! [doc = r" Foo"] } /*»*/ } +PRINT-BANG DEEP-RE-COLLECTED (DISPLAY): foo! { #[fake_attr] mod bar { #! [doc = r" Foo"] } } PRINT-BANG INPUT (DEBUG): TokenStream [ Ident { ident: "foo", diff --git a/src/test/ui/proc-macro/issue-80760-empty-stmt.stdout b/src/test/ui/proc-macro/issue-80760-empty-stmt.stdout index adbd653ead4..4b7ed874307 100644 --- a/src/test/ui/proc-macro/issue-80760-empty-stmt.stdout +++ b/src/test/ui/proc-macro/issue-80760-empty-stmt.stdout @@ -1,5 +1,4 @@ PRINT-BANG INPUT (DISPLAY): ; -PRINT-BANG RE-COLLECTED (DISPLAY): /*«*/ ; /*»*/ PRINT-BANG INPUT (DEBUG): TokenStream [ Group { delimiter: None, diff --git a/src/test/ui/proc-macro/nested-nonterminal-tokens.stdout b/src/test/ui/proc-macro/nested-nonterminal-tokens.stdout index b912e426d5d..a3d24dd26fe 100644 --- a/src/test/ui/proc-macro/nested-nonterminal-tokens.stdout +++ b/src/test/ui/proc-macro/nested-nonterminal-tokens.stdout @@ -1,6 +1,4 @@ PRINT-BANG INPUT (DISPLAY): 0 + 1 + 2 + 3 -PRINT-BANG RE-COLLECTED (DISPLAY): /*«*/ 0 + 1 + 2 /*»*/ + 3 -PRINT-BANG DEEP-RE-COLLECTED (DISPLAY): /*«*/ /*«*/ /*«*/ 0 /*»*/ + 1 /*»*/ + 2 /*»*/ + 3 PRINT-BANG INPUT (DEBUG): TokenStream [ Group { delimiter: None, diff --git a/src/test/ui/proc-macro/nodelim-groups.stdout b/src/test/ui/proc-macro/nodelim-groups.stdout index 0d2f33b4175..6b410f0bfb7 100644 --- a/src/test/ui/proc-macro/nodelim-groups.stdout +++ b/src/test/ui/proc-macro/nodelim-groups.stdout @@ -1,5 +1,4 @@ PRINT-BANG INPUT (DISPLAY): "hi" 1 + (25) + 1 (1 + 1) -PRINT-BANG RE-COLLECTED (DISPLAY): "hi" /*«*/ 1 + (25) + 1 /*»*/ (1 + 1) PRINT-BANG INPUT (DEBUG): TokenStream [ Literal { kind: Str, @@ -72,9 +71,6 @@ PRINT-BANG INPUT (DEBUG): TokenStream [ }, ] PRINT-BANG INPUT (DISPLAY): "hi" "hello".len() + "world".len() (1 + 1) -PRINT-BANG RE-COLLECTED (DISPLAY): "hi" /*«*/ "hello".len() + "world".len() /*»*/ (1 + 1) -PRINT-BANG DEEP-RE-COLLECTED (DISPLAY): "hi" /*«*/ /*«*/ "hello".len() /*»*/ + /*«*/ "world".len() /*»*/ /*»*/ -(1 + 1) PRINT-BANG INPUT (DEBUG): TokenStream [ Literal { kind: Str, diff --git a/src/test/ui/proc-macro/nonterminal-expansion.stdout b/src/test/ui/proc-macro/nonterminal-expansion.stdout index 32981e7011d..4d884348f2c 100644 --- a/src/test/ui/proc-macro/nonterminal-expansion.stdout +++ b/src/test/ui/proc-macro/nonterminal-expansion.stdout @@ -1,5 +1,5 @@ PRINT-ATTR_ARGS INPUT (DISPLAY): a, line!(), b -PRINT-ATTR_ARGS RE-COLLECTED (DISPLAY): a, /*«*/ line! () /*»*/, b +PRINT-ATTR_ARGS RE-COLLECTED (DISPLAY): a, line! (), b PRINT-ATTR_ARGS INPUT (DEBUG): TokenStream [ Ident { ident: "a", diff --git a/src/test/ui/proc-macro/nonterminal-token-hygiene.stdout b/src/test/ui/proc-macro/nonterminal-token-hygiene.stdout index ba18ca75d7f..c08e5308138 100644 --- a/src/test/ui/proc-macro/nonterminal-token-hygiene.stdout +++ b/src/test/ui/proc-macro/nonterminal-token-hygiene.stdout @@ -1,5 +1,5 @@ PRINT-BANG INPUT (DISPLAY): struct S; -PRINT-BANG RE-COLLECTED (DISPLAY): /*«*/ struct S ; /*»*/ +PRINT-BANG RE-COLLECTED (DISPLAY): struct S ; PRINT-BANG INPUT (DEBUG): TokenStream [ Group { delimiter: None, diff --git a/src/test/ui/proc-macro/parent-source-spans.rs b/src/test/ui/proc-macro/parent-source-spans.rs index 71e5065a87a..354657db4db 100644 --- a/src/test/ui/proc-macro/parent-source-spans.rs +++ b/src/test/ui/proc-macro/parent-source-spans.rs @@ -8,16 +8,16 @@ use parent_source_spans::parent_source_spans; macro one($a:expr, $b:expr) { two!($a, $b); - //~^ ERROR first parent: /*«*/ "hello" /*»*/ - //~| ERROR second parent: /*«*/ "world" /*»*/ + //~^ ERROR first parent: "hello" + //~| ERROR second parent: "world" } macro two($a:expr, $b:expr) { three!($a, $b); - //~^ ERROR first final: /*«*/ "hello" /*»*/ - //~| ERROR second final: /*«*/ "world" /*»*/ - //~| ERROR first final: /*«*/ "yay" /*»*/ - //~| ERROR second final: /*«*/ "rust" /*»*/ + //~^ ERROR first final: "hello" + //~| ERROR second final: "world" + //~| ERROR first final: "yay" + //~| ERROR second final: "rust" } // forwarding tokens directly doesn't create a new source chain @@ -34,16 +34,16 @@ macro four($($tokens:tt)*) { fn main() { one!("hello", "world"); - //~^ ERROR first grandparent: /*«*/ "hello" /*»*/ - //~| ERROR second grandparent: /*«*/ "world" /*»*/ - //~| ERROR first source: /*«*/ "hello" /*»*/ - //~| ERROR second source: /*«*/ "world" /*»*/ + //~^ ERROR first grandparent: "hello" + //~| ERROR second grandparent: "world" + //~| ERROR first source: "hello" + //~| ERROR second source: "world" two!("yay", "rust"); - //~^ ERROR first parent: /*«*/ "yay" /*»*/ - //~| ERROR second parent: /*«*/ "rust" /*»*/ - //~| ERROR first source: /*«*/ "yay" /*»*/ - //~| ERROR second source: /*«*/ "rust" /*»*/ + //~^ ERROR first parent: "yay" + //~| ERROR second parent: "rust" + //~| ERROR first source: "yay" + //~| ERROR second source: "rust" three!("hip", "hop"); //~^ ERROR first final: "hip" diff --git a/src/test/ui/proc-macro/parent-source-spans.stderr b/src/test/ui/proc-macro/parent-source-spans.stderr index e42218ea701..4548269b507 100644 --- a/src/test/ui/proc-macro/parent-source-spans.stderr +++ b/src/test/ui/proc-macro/parent-source-spans.stderr @@ -1,4 +1,4 @@ -error: first final: /*«*/ "hello" /*»*/ +error: first final: "hello" --> $DIR/parent-source-spans.rs:16:12 | LL | three!($a, $b); @@ -9,7 +9,7 @@ LL | one!("hello", "world"); | = note: this error originates in the macro `two` (in Nightly builds, run with -Z macro-backtrace for more info) -error: second final: /*«*/ "world" /*»*/ +error: second final: "world" --> $DIR/parent-source-spans.rs:16:16 | LL | three!($a, $b); @@ -20,7 +20,7 @@ LL | one!("hello", "world"); | = note: this error originates in the macro `two` (in Nightly builds, run with -Z macro-backtrace for more info) -error: first parent: /*«*/ "hello" /*»*/ +error: first parent: "hello" --> $DIR/parent-source-spans.rs:10:5 | LL | two!($a, $b); @@ -31,7 +31,7 @@ LL | one!("hello", "world"); | = note: this error originates in the macro `one` (in Nightly builds, run with -Z macro-backtrace for more info) -error: second parent: /*«*/ "world" /*»*/ +error: second parent: "world" --> $DIR/parent-source-spans.rs:10:5 | LL | two!($a, $b); @@ -42,31 +42,31 @@ LL | one!("hello", "world"); | = note: this error originates in the macro `one` (in Nightly builds, run with -Z macro-backtrace for more info) -error: first grandparent: /*«*/ "hello" /*»*/ +error: first grandparent: "hello" --> $DIR/parent-source-spans.rs:36:5 | LL | one!("hello", "world"); | ^^^^^^^^^^^^^^^^^^^^^^ -error: second grandparent: /*«*/ "world" /*»*/ +error: second grandparent: "world" --> $DIR/parent-source-spans.rs:36:5 | LL | one!("hello", "world"); | ^^^^^^^^^^^^^^^^^^^^^^ -error: first source: /*«*/ "hello" /*»*/ +error: first source: "hello" --> $DIR/parent-source-spans.rs:36:5 | LL | one!("hello", "world"); | ^^^^^^^^^^^^^^^^^^^^^^ -error: second source: /*«*/ "world" /*»*/ +error: second source: "world" --> $DIR/parent-source-spans.rs:36:5 | LL | one!("hello", "world"); | ^^^^^^^^^^^^^^^^^^^^^^ -error: first final: /*«*/ "yay" /*»*/ +error: first final: "yay" --> $DIR/parent-source-spans.rs:16:12 | LL | three!($a, $b); @@ -77,7 +77,7 @@ LL | two!("yay", "rust"); | = note: this error originates in the macro `two` (in Nightly builds, run with -Z macro-backtrace for more info) -error: second final: /*«*/ "rust" /*»*/ +error: second final: "rust" --> $DIR/parent-source-spans.rs:16:16 | LL | three!($a, $b); @@ -88,25 +88,25 @@ LL | two!("yay", "rust"); | = note: this error originates in the macro `two` (in Nightly builds, run with -Z macro-backtrace for more info) -error: first parent: /*«*/ "yay" /*»*/ +error: first parent: "yay" --> $DIR/parent-source-spans.rs:42:5 | LL | two!("yay", "rust"); | ^^^^^^^^^^^^^^^^^^^ -error: second parent: /*«*/ "rust" /*»*/ +error: second parent: "rust" --> $DIR/parent-source-spans.rs:42:5 | LL | two!("yay", "rust"); | ^^^^^^^^^^^^^^^^^^^ -error: first source: /*«*/ "yay" /*»*/ +error: first source: "yay" --> $DIR/parent-source-spans.rs:42:5 | LL | two!("yay", "rust"); | ^^^^^^^^^^^^^^^^^^^ -error: second source: /*«*/ "rust" /*»*/ +error: second source: "rust" --> $DIR/parent-source-spans.rs:42:5 | LL | two!("yay", "rust"); diff --git a/src/test/ui/rfc-2011-nicer-assert-messages/feature-gate-generic_assert.rs b/src/test/ui/rfc-2011-nicer-assert-messages/feature-gate-generic_assert.rs new file mode 100644 index 00000000000..f70ca87e304 --- /dev/null +++ b/src/test/ui/rfc-2011-nicer-assert-messages/feature-gate-generic_assert.rs @@ -0,0 +1,26 @@ +// compile-flags: --test +// run-pass + +// `generic_assert` is completely unimplemented and doesn't generate any logic, thus the +// reason why this test currently passes +#![feature(core_intrinsics, generic_assert, generic_assert_internals)] + +use std::fmt::{Debug, Formatter}; + +#[derive(Clone, Copy, PartialEq)] +struct CopyDebug(i32); + +impl Debug for CopyDebug { + fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), std::fmt::Error> { + f.write_str("With great power comes great electricity bills") + } +} + +#[test] +fn test() { + let _copy_debug = CopyDebug(1); + assert!(_copy_debug == CopyDebug(3)); +} + +fn main() { +}