Auto merge of #2712 - RalfJung:rustup, r=RalfJung

Rustup
This commit is contained in:
bors 2022-12-03 12:32:28 +00:00
commit 840f227b00
728 changed files with 10521 additions and 7131 deletions

View File

@ -73,6 +73,8 @@ Björn Steinbrink <bsteinbr@gmail.com> <B.Steinbrink@gmx.de>
blake2-ppc <ulrik.sverdrup@gmail.com> <blake2-ppc>
boolean_coercion <booleancoercion@gmail.com>
Boris Egorov <jightuse@gmail.com> <egorov@linux.com>
bors <bors@rust-lang.org> bors[bot] <26634292+bors[bot]@users.noreply.github.com>
bors <bors@rust-lang.org> bors[bot] <bors[bot]@users.noreply.github.com>
Braden Nelson <moonheart08@users.noreply.github.com>
Brandon Sanderson <singingboyo@gmail.com> Brandon Sanderson <singingboyo@hotmail.com>
Brett Cannon <brett@python.org> Brett Cannon <brettcannon@users.noreply.github.com>
@ -139,6 +141,10 @@ David Ross <daboross@daboross.net>
David Wood <david@davidtw.co> <david.wood@huawei.com>
Deadbeef <ent3rm4n@gmail.com>
Deadbeef <ent3rm4n@gmail.com> <fee1-dead-beef@protonmail.com>
dependabot[bot] <dependabot[bot]@users.noreply.github.com> <27856297+dependabot-preview[bot]@users.noreply.github.com>
dependabot[bot] <dependabot[bot]@users.noreply.github.com> <49699333+dependabot[bot]@users.noreply.github.com>
dependabot[bot] <dependabot[bot]@users.noreply.github.com> <dependabot-preview[bot]@users.noreply.github.com>
dependabot[bot] <dependabot[bot]@users.noreply.github.com> <support@dependabot.com>
Derek Chiang <derekchiang93@gmail.com> Derek Chiang (Enchi Jiang) <derekchiang93@gmail.com>
DeveloperC <DeveloperC@protonmail.com>
Devin Ragotzy <devin.ragotzy@gmail.com>
@ -229,7 +235,7 @@ Jacob <jacob.macritchie@gmail.com>
Jacob Greenfield <xales@naveria.com>
Jacob Pratt <jacob@jhpratt.dev> <the.z.cuber@gmail.com>
Jake Vossen <jake@vossen.dev>
Jakob Degen <jakob@degen.com>
Jakob Degen <jakob.e.degen@gmail.com> <jakob@degen.com>
Jakob Lautrup Nysom <jako3047@gmail.com>
Jakub Adam Wieczorek <jakub.adam.wieczorek@gmail.com>
Jakub Adam Wieczorek <jakub.adam.wieczorek@gmail.com> <jakub.bukaj@yahoo.com>
@ -396,6 +402,10 @@ Nathaniel Herman <nherman@post.harvard.edu> Nathaniel Herman <nherman@college.ha
Neil Pankey <npankey@gmail.com> <neil@wire.im>
Ngo Iok Ui (Wu Yu Wei) <wusyong9104@gmail.com>
Nicholas Baron <nicholas.baron.ten@gmail.com>
Nicholas Bishop <nbishop@nbishop.net> <nicholasbishop@gmail.com>
Nicholas Bishop <nbishop@nbishop.net> <nicholasbishop@google.com>
Nicholas Nethercote <n.nethercote@gmail.com> <nnethercote@apple.com>
Nicholas Nethercote <n.nethercote@gmail.com> <nnethercote@mozilla.com>
Nick Platt <platt.nicholas@gmail.com>
Niclas Schwarzlose <15schnic@gmail.com>
Nicolas Abram <abramlujan@gmail.com>
@ -522,6 +532,7 @@ Tomas Koutsky <tomas@stepnivlk.net>
Torsten Weber <TorstenWeber12@gmail.com>
Torsten Weber <TorstenWeber12@gmail.com> <torstenweber12@gmail.com>
Trevor Spiteri <tspiteri@ieee.org> <trevor.spiteri@um.edu.mt>
Tshepang Mbambo <tshepang@gmail.com>
Ty Overby <ty@pre-alpha.com>
Tyler Mandry <tmandry@gmail.com> <tmandry@google.com>
Tyler Ruckinger <t.ruckinger@gmail.com>

View File

@ -288,7 +288,6 @@ name = "cargo"
version = "0.68.0"
dependencies = [
"anyhow",
"atty",
"bytesize",
"cargo-platform 0.1.2",
"cargo-test-macro",
@ -298,7 +297,7 @@ dependencies = [
"crates-io",
"curl",
"curl-sys",
"env_logger 0.9.0",
"env_logger 0.10.0",
"filetime",
"flate2",
"fwdansi",
@ -312,6 +311,7 @@ dependencies = [
"ignore",
"im-rc",
"indexmap",
"is-terminal",
"itertools",
"jobserver",
"lazy_static",
@ -870,6 +870,12 @@ dependencies = [
"memchr",
]
[[package]]
name = "convert_case"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6245d59a3e82a7fc217c5828a6692dbc6dfb63a0c8c90495621f7b9d79704a0e"
[[package]]
name = "core"
version = "0.0.0"
@ -1060,6 +1066,19 @@ dependencies = [
"syn",
]
[[package]]
name = "derive_more"
version = "0.99.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4fb810d30a7c1953f91334de7244731fc3f3c10d7fe163338a35b9f640960321"
dependencies = [
"convert_case",
"proc-macro2",
"quote",
"rustc_version",
"syn",
]
[[package]]
name = "diff"
version = "0.1.13"
@ -1213,6 +1232,40 @@ dependencies = [
"termcolor",
]
[[package]]
name = "env_logger"
version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "85cdab6a89accf66733ad5a1693a4dcced6aeff64602b634530dd73c1f3ee9f0"
dependencies = [
"humantime 2.0.1",
"is-terminal",
"log",
"regex",
"termcolor",
]
[[package]]
name = "errno"
version = "0.2.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f639046355ee4f37944e44f60642c6f3a7efa3cf6b78c78a0d989a8ce6c396a1"
dependencies = [
"errno-dragonfly",
"libc",
"winapi",
]
[[package]]
name = "errno-dragonfly"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "aa68f1b12764fab894d2755d2518754e71b4fd80ecfb822714a1206c2aab39bf"
dependencies = [
"cc",
"libc",
]
[[package]]
name = "error_index_generator"
version = "0.0.0"
@ -1907,6 +1960,28 @@ dependencies = [
"unic-langid",
]
[[package]]
name = "io-lifetimes"
version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e394faa0efb47f9f227f1cd89978f854542b318a6f64fa695489c9c993056656"
dependencies = [
"libc",
"windows-sys",
]
[[package]]
name = "is-terminal"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "aae5bc6e2eb41c9def29a3e0f1306382807764b9b53112030eff57435667352d"
dependencies = [
"hermit-abi 0.2.6",
"io-lifetimes",
"rustix",
"windows-sys",
]
[[package]]
name = "itertools"
version = "0.10.5"
@ -2116,6 +2191,12 @@ dependencies = [
"walkdir",
]
[[package]]
name = "linux-raw-sys"
version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8f9f08d8963a6c613f4b1a78f4f4a4dbfadf8e6545b2d72861731e4858b8b47f"
[[package]]
name = "litemap"
version = "0.6.0"
@ -3193,6 +3274,7 @@ version = "1.0.0"
dependencies = [
"bstr 0.2.17",
"clap 3.2.20",
"libc",
"libz-sys",
"rand 0.8.5",
"regex",
@ -3916,6 +3998,7 @@ version = "0.0.0"
dependencies = [
"bitflags",
"chalk-ir",
"derive_more",
"either",
"gsgdt",
"polonius-engine",
@ -4508,6 +4591,20 @@ dependencies = [
"unicode_categories",
]
[[package]]
name = "rustix"
version = "0.36.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0b1fbb4dfc4eb1d390c02df47760bb19a84bb80b301ecc947ab5406394d8223e"
dependencies = [
"bitflags",
"errno",
"io-lifetimes",
"libc",
"linux-raw-sys",
"windows-sys",
]
[[package]]
name = "rustversion"
version = "1.0.5"

View File

@ -211,6 +211,102 @@ pub enum TargetDataLayoutErrors<'a> {
}
impl TargetDataLayout {
/// Parse data layout from an [llvm data layout string](https://llvm.org/docs/LangRef.html#data-layout)
///
/// This function doesn't fill `c_enum_min_size` and it will always be `I32` since it can not be
/// determined from llvm string.
pub fn parse_from_llvm_datalayout_string<'a>(
input: &'a str,
) -> Result<TargetDataLayout, TargetDataLayoutErrors<'a>> {
// Parse an address space index from a string.
let parse_address_space = |s: &'a str, cause: &'a str| {
s.parse::<u32>().map(AddressSpace).map_err(|err| {
TargetDataLayoutErrors::InvalidAddressSpace { addr_space: s, cause, err }
})
};
// Parse a bit count from a string.
let parse_bits = |s: &'a str, kind: &'a str, cause: &'a str| {
s.parse::<u64>().map_err(|err| TargetDataLayoutErrors::InvalidBits {
kind,
bit: s,
cause,
err,
})
};
// Parse a size string.
let size = |s: &'a str, cause: &'a str| parse_bits(s, "size", cause).map(Size::from_bits);
// Parse an alignment string.
let align = |s: &[&'a str], cause: &'a str| {
if s.is_empty() {
return Err(TargetDataLayoutErrors::MissingAlignment { cause });
}
let align_from_bits = |bits| {
Align::from_bits(bits)
.map_err(|err| TargetDataLayoutErrors::InvalidAlignment { cause, err })
};
let abi = parse_bits(s[0], "alignment", cause)?;
let pref = s.get(1).map_or(Ok(abi), |pref| parse_bits(pref, "alignment", cause))?;
Ok(AbiAndPrefAlign { abi: align_from_bits(abi)?, pref: align_from_bits(pref)? })
};
let mut dl = TargetDataLayout::default();
let mut i128_align_src = 64;
for spec in input.split('-') {
let spec_parts = spec.split(':').collect::<Vec<_>>();
match &*spec_parts {
["e"] => dl.endian = Endian::Little,
["E"] => dl.endian = Endian::Big,
[p] if p.starts_with('P') => {
dl.instruction_address_space = parse_address_space(&p[1..], "P")?
}
["a", ref a @ ..] => dl.aggregate_align = align(a, "a")?,
["f32", ref a @ ..] => dl.f32_align = align(a, "f32")?,
["f64", ref a @ ..] => dl.f64_align = align(a, "f64")?,
[p @ "p", s, ref a @ ..] | [p @ "p0", s, ref a @ ..] => {
dl.pointer_size = size(s, p)?;
dl.pointer_align = align(a, p)?;
}
[s, ref a @ ..] if s.starts_with('i') => {
let Ok(bits) = s[1..].parse::<u64>() else {
size(&s[1..], "i")?; // For the user error.
continue;
};
let a = align(a, s)?;
match bits {
1 => dl.i1_align = a,
8 => dl.i8_align = a,
16 => dl.i16_align = a,
32 => dl.i32_align = a,
64 => dl.i64_align = a,
_ => {}
}
if bits >= i128_align_src && bits <= 128 {
// Default alignment for i128 is decided by taking the alignment of
// largest-sized i{64..=128}.
i128_align_src = bits;
dl.i128_align = a;
}
}
[s, ref a @ ..] if s.starts_with('v') => {
let v_size = size(&s[1..], "v")?;
let a = align(a, s)?;
if let Some(v) = dl.vector_align.iter_mut().find(|v| v.0 == v_size) {
v.1 = a;
continue;
}
// No existing entry, add a new one.
dl.vector_align.push((v_size, a));
}
_ => {} // Ignore everything else.
}
}
Ok(dl)
}
/// Returns exclusive upper bound on object size.
///
/// The theoretical maximum object size is defined as the maximum positive `isize` value.

View File

@ -13,7 +13,7 @@
//! - [`FnDecl`], [`FnHeader`] and [`Param`]: Metadata associated with a function declaration.
//! - [`Generics`], [`GenericParam`], [`WhereClause`]: Metadata associated with generic parameters.
//! - [`EnumDef`] and [`Variant`]: Enum declaration.
//! - [`Lit`] and [`LitKind`]: Literal expressions.
//! - [`MetaItemLit`] and [`LitKind`]: Literal expressions.
//! - [`MacroDef`], [`MacStmtStyle`], [`MacCall`], [`MacDelimiter`]: Macro definition and invocation.
//! - [`Attribute`]: Metadata associated with item.
//! - [`UnOp`], [`BinOp`], and [`BinOpKind`]: Unary and binary operators.
@ -111,8 +111,8 @@ impl<CTX: rustc_span::HashStableContext> HashStable<CTX> for Path {
}
impl Path {
// Convert a span and an identifier to the corresponding
// one-segment path.
/// Convert a span and an identifier to the corresponding
/// one-segment path.
pub fn from_ident(ident: Ident) -> Path {
Path { segments: thin_vec![PathSegment::from_ident(ident)], span: ident.span, tokens: None }
}
@ -479,20 +479,10 @@ pub struct Crate {
pub is_placeholder: bool,
}
/// Possible values inside of compile-time attribute lists.
///
/// E.g., the '..' in `#[name(..)]`.
#[derive(Clone, Encodable, Decodable, Debug, HashStable_Generic)]
pub enum NestedMetaItem {
/// A full MetaItem, for recursive meta items.
MetaItem(MetaItem),
/// A literal.
///
/// E.g., `"foo"`, `64`, `true`.
Literal(Lit),
}
/// A spanned compile-time attribute item.
/// A semantic representation of a meta item. A meta item is a slightly
/// restricted form of an attribute -- it can only contain expressions in
/// certain leaf positions, rather than arbitrary token streams -- that is used
/// for most built-in attributes.
///
/// E.g., `#[test]`, `#[derive(..)]`, `#[rustfmt::skip]` or `#[feature = "foo"]`.
#[derive(Clone, Encodable, Decodable, Debug, HashStable_Generic)]
@ -502,23 +492,37 @@ pub struct MetaItem {
pub span: Span,
}
/// A compile-time attribute item.
///
/// E.g., `#[test]`, `#[derive(..)]` or `#[feature = "foo"]`.
/// The meta item kind, containing the data after the initial path.
#[derive(Clone, Encodable, Decodable, Debug, HashStable_Generic)]
pub enum MetaItemKind {
/// Word meta item.
///
/// E.g., `test` as in `#[test]`.
/// E.g., `#[test]`, which lacks any arguments after `test`.
Word,
/// List meta item.
///
/// E.g., `derive(..)` as in `#[derive(..)]`.
/// E.g., `#[derive(..)]`, where the field represents the `..`.
List(Vec<NestedMetaItem>),
/// Name value meta item.
///
/// E.g., `feature = "foo"` as in `#[feature = "foo"]`.
NameValue(Lit),
/// E.g., `#[feature = "foo"]`, where the field represents the `"foo"`.
NameValue(MetaItemLit),
}
/// Values inside meta item lists.
///
/// E.g., each of `Clone`, `Copy` in `#[derive(Clone, Copy)]`.
#[derive(Clone, Encodable, Decodable, Debug, HashStable_Generic)]
pub enum NestedMetaItem {
/// A full MetaItem, for recursive meta items.
MetaItem(MetaItem),
/// A literal.
///
/// E.g., `"foo"`, `64`, `true`.
Lit(MetaItemLit),
}
/// A block (`{ .. }`).
@ -1283,7 +1287,7 @@ impl Expr {
)
}
// To a first-order approximation, is this a pattern
/// To a first-order approximation, is this a pattern?
pub fn is_approximately_pattern(&self) -> bool {
match &self.peel_parens().kind {
ExprKind::Box(_)
@ -1599,12 +1603,12 @@ pub enum AttrArgs {
}
// The RHS of an `AttrArgs::Eq` starts out as an expression. Once macro
// expansion is completed, all cases end up either as a literal, which is the
// form used after lowering to HIR, or as an error.
// expansion is completed, all cases end up either as a meta item literal,
// which is the form used after lowering to HIR, or as an error.
#[derive(Clone, Encodable, Decodable, Debug)]
pub enum AttrArgsEq {
Ast(P<Expr>),
Hir(Lit),
Hir(MetaItemLit),
}
impl AttrArgs {
@ -1726,19 +1730,18 @@ pub enum StrStyle {
Raw(u8),
}
/// An AST literal.
/// A literal in a meta item.
#[derive(Clone, Encodable, Decodable, Debug, HashStable_Generic)]
pub struct Lit {
pub struct MetaItemLit {
/// The original literal token as written in source code.
pub token_lit: token::Lit,
/// The "semantic" representation of the literal lowered from the original tokens.
/// Strings are unescaped, hexadecimal forms are eliminated, etc.
/// FIXME: Remove this and only create the semantic representation during lowering to HIR.
pub kind: LitKind,
pub span: Span,
}
/// Same as `Lit`, but restricted to string literals.
/// Similar to `MetaItemLit`, but restricted to string literals.
#[derive(Clone, Copy, Encodable, Decodable, Debug)]
pub struct StrLit {
/// The original literal token as written in source code.
@ -1747,7 +1750,6 @@ pub struct StrLit {
pub suffix: Option<Symbol>,
pub span: Span,
/// The unescaped "semantic" representation of the literal lowered from the original token.
/// FIXME: Remove this and only create the semantic representation during lowering to HIR.
pub symbol_unescaped: Symbol,
}
@ -1783,6 +1785,8 @@ pub enum LitFloatType {
Unsuffixed,
}
/// This type is used within both `ast::MetaItemLit` and `hir::Lit`.
///
/// Note that the entire literal (including the suffix) is considered when
/// deciding the `LitKind`. This means that float literals like `1f32` are
/// classified by this type as `Float`. This is different to `token::LitKind`
@ -2513,10 +2517,7 @@ pub struct Variant {
#[derive(Clone, Encodable, Decodable, Debug)]
pub enum UseTreeKind {
/// `use prefix` or `use prefix as rename`
///
/// The extra `NodeId`s are for HIR lowering, when additional statements are created for each
/// namespace.
Simple(Option<Ident>, NodeId, NodeId),
Simple(Option<Ident>),
/// `use prefix::{...}`
Nested(Vec<(UseTree, NodeId)>),
/// `use prefix::*`
@ -2535,8 +2536,8 @@ pub struct UseTree {
impl UseTree {
pub fn ident(&self) -> Ident {
match self.kind {
UseTreeKind::Simple(Some(rename), ..) => rename,
UseTreeKind::Simple(None, ..) => {
UseTreeKind::Simple(Some(rename)) => rename,
UseTreeKind::Simple(None) => {
self.prefix.segments.last().expect("empty prefix in a simple import").ident
}
_ => panic!("`UseTree::ident` can only be used on a simple import"),
@ -2570,17 +2571,10 @@ impl<D: Decoder> Decodable<D> for AttrId {
}
}
#[derive(Clone, Encodable, Decodable, Debug, HashStable_Generic)]
pub struct AttrItem {
pub path: Path,
pub args: AttrArgs,
pub tokens: Option<LazyAttrTokenStream>,
}
/// A list of attributes.
pub type AttrVec = ThinVec<Attribute>;
/// Metadata associated with an item.
/// A syntax-level representation of an attribute.
#[derive(Clone, Encodable, Decodable, Debug)]
pub struct Attribute {
pub kind: AttrKind,
@ -2591,12 +2585,6 @@ pub struct Attribute {
pub span: Span,
}
#[derive(Clone, Encodable, Decodable, Debug)]
pub struct NormalAttr {
pub item: AttrItem,
pub tokens: Option<LazyAttrTokenStream>,
}
#[derive(Clone, Encodable, Decodable, Debug)]
pub enum AttrKind {
/// A normal attribute.
@ -2608,6 +2596,19 @@ pub enum AttrKind {
DocComment(CommentKind, Symbol),
}
#[derive(Clone, Encodable, Decodable, Debug)]
pub struct NormalAttr {
pub item: AttrItem,
pub tokens: Option<LazyAttrTokenStream>,
}
#[derive(Clone, Encodable, Decodable, Debug, HashStable_Generic)]
pub struct AttrItem {
pub path: Path,
pub args: AttrArgs,
pub tokens: Option<LazyAttrTokenStream>,
}
/// `TraitRef`s appear in impls.
///
/// Resolution maps each `TraitRef`'s `ref_id` to its defining trait; that's all
@ -3096,9 +3097,9 @@ mod size_asserts {
static_assert_size!(Impl, 184);
static_assert_size!(Item, 184);
static_assert_size!(ItemKind, 112);
static_assert_size!(Lit, 48);
static_assert_size!(LitKind, 24);
static_assert_size!(Local, 72);
static_assert_size!(MetaItemLit, 48);
static_assert_size!(Param, 40);
static_assert_size!(Pat, 88);
static_assert_size!(Path, 24);

View File

@ -1,10 +1,10 @@
//! Functions dealing with attributes and meta items.
use crate::ast;
use crate::ast::{AttrArgs, AttrArgsEq, AttrId, AttrItem, AttrKind, AttrStyle, Attribute};
use crate::ast::{DelimArgs, Lit, LitKind};
use crate::ast::{MacDelimiter, MetaItem, MetaItemKind, NestedMetaItem};
use crate::ast::{Path, PathSegment};
use crate::ast::{AttrArgs, AttrArgsEq, AttrId, AttrItem, AttrKind, AttrStyle, AttrVec, Attribute};
use crate::ast::{DelimArgs, Expr, ExprKind, LitKind, MetaItemLit};
use crate::ast::{MacDelimiter, MetaItem, MetaItemKind, NestedMetaItem, NormalAttr};
use crate::ast::{Path, PathSegment, StrStyle, DUMMY_NODE_ID};
use crate::ptr::P;
use crate::token::{self, CommentKind, Delimiter, Token};
use crate::tokenstream::{DelimSpan, Spacing, TokenTree};
@ -12,7 +12,6 @@ use crate::tokenstream::{LazyAttrTokenStream, TokenStream};
use crate::util::comments;
use rustc_data_structures::sync::WorkerLocal;
use rustc_index::bit_set::GrowableBitSet;
use rustc_span::source_map::BytePos;
use rustc_span::symbol::{sym, Ident, Symbol};
use rustc_span::Span;
use std::cell::Cell;
@ -26,9 +25,9 @@ use thin_vec::thin_vec;
pub struct MarkedAttrs(GrowableBitSet<AttrId>);
impl MarkedAttrs {
// We have no idea how many attributes there will be, so just
// initiate the vectors with 0 bits. We'll grow them as necessary.
pub fn new() -> Self {
// We have no idea how many attributes there will be, so just
// initiate the vectors with 0 bits. We'll grow them as necessary.
MarkedAttrs(GrowableBitSet::new_empty())
}
@ -50,10 +49,10 @@ impl NestedMetaItem {
}
}
/// Returns the `Lit` if `self` is a `NestedMetaItem::Literal`s.
pub fn literal(&self) -> Option<&Lit> {
/// Returns the `MetaItemLit` if `self` is a `NestedMetaItem::Literal`s.
pub fn lit(&self) -> Option<&MetaItemLit> {
match self {
NestedMetaItem::Literal(lit) => Some(lit),
NestedMetaItem::Lit(lit) => Some(lit),
_ => None,
}
}
@ -78,12 +77,12 @@ impl NestedMetaItem {
}
/// Returns a name and single literal value tuple of the `MetaItem`.
pub fn name_value_literal(&self) -> Option<(Symbol, &Lit)> {
pub fn name_value_literal(&self) -> Option<(Symbol, &MetaItemLit)> {
self.meta_item().and_then(|meta_item| {
meta_item.meta_item_list().and_then(|meta_item_list| {
if meta_item_list.len() == 1
&& let Some(ident) = meta_item.ident()
&& let Some(lit) = meta_item_list[0].literal()
&& let Some(lit) = meta_item_list[0].lit()
{
return Some((ident.name, lit));
}
@ -174,10 +173,12 @@ impl MetaItem {
self.ident().unwrap_or_else(Ident::empty).name
}
// Example:
// #[attribute(name = "value")]
// ^^^^^^^^^^^^^^
pub fn name_value_literal(&self) -> Option<&Lit> {
/// ```text
/// Example:
/// #[attribute(name = "value")]
/// ^^^^^^^^^^^^^^
/// ```
pub fn name_value_literal(&self) -> Option<&MetaItemLit> {
match &self.kind {
MetaItemKind::NameValue(v) => Some(v),
_ => None,
@ -221,11 +222,7 @@ impl AttrItem {
}
pub fn meta(&self, span: Span) -> Option<MetaItem> {
Some(MetaItem {
path: self.path.clone(),
kind: MetaItemKind::from_attr_args(&self.args)?,
span,
})
Some(MetaItem { path: self.path.clone(), kind: self.meta_kind()?, span })
}
pub fn meta_kind(&self) -> Option<MetaItemKind> {
@ -327,26 +324,13 @@ impl Attribute {
/* Constructors */
pub fn mk_name_value_item_str(ident: Ident, str: Symbol, str_span: Span) -> MetaItem {
let lit_kind = LitKind::Str(str, ast::StrStyle::Cooked);
mk_name_value_item(ident, lit_kind, str_span)
mk_name_value_item(ident, LitKind::Str(str, ast::StrStyle::Cooked), str_span)
}
pub fn mk_name_value_item(ident: Ident, lit_kind: LitKind, lit_span: Span) -> MetaItem {
let lit = Lit::from_lit_kind(lit_kind, lit_span);
pub fn mk_name_value_item(ident: Ident, kind: LitKind, lit_span: Span) -> MetaItem {
let lit = MetaItemLit { token_lit: kind.to_token_lit(), kind, span: lit_span };
let span = ident.span.to(lit_span);
MetaItem { path: Path::from_ident(ident), span, kind: MetaItemKind::NameValue(lit) }
}
pub fn mk_list_item(ident: Ident, items: Vec<NestedMetaItem>) -> MetaItem {
MetaItem { path: Path::from_ident(ident), span: ident.span, kind: MetaItemKind::List(items) }
}
pub fn mk_word_item(ident: Ident) -> MetaItem {
MetaItem { path: Path::from_ident(ident), span: ident.span, kind: MetaItemKind::Word }
}
pub fn mk_nested_word_item(ident: Ident) -> NestedMetaItem {
NestedMetaItem::MetaItem(mk_word_item(ident))
MetaItem { path: Path::from_ident(ident), kind: MetaItemKind::NameValue(lit), span }
}
pub struct AttrIdGenerator(WorkerLocal<Cell<u32>>);
@ -404,21 +388,58 @@ pub fn mk_attr_from_item(
span: Span,
) -> Attribute {
Attribute {
kind: AttrKind::Normal(P(ast::NormalAttr { item, tokens })),
kind: AttrKind::Normal(P(NormalAttr { item, tokens })),
id: g.mk_attr_id(),
style,
span,
}
}
/// Returns an inner attribute with the given value and span.
pub fn mk_attr_inner(g: &AttrIdGenerator, item: MetaItem) -> Attribute {
mk_attr(g, AttrStyle::Inner, item.path, item.kind.attr_args(item.span), item.span)
pub fn mk_attr_word(g: &AttrIdGenerator, style: AttrStyle, name: Symbol, span: Span) -> Attribute {
let path = Path::from_ident(Ident::new(name, span));
let args = AttrArgs::Empty;
mk_attr(g, style, path, args, span)
}
/// Returns an outer attribute with the given value and span.
pub fn mk_attr_outer(g: &AttrIdGenerator, item: MetaItem) -> Attribute {
mk_attr(g, AttrStyle::Outer, item.path, item.kind.attr_args(item.span), item.span)
pub fn mk_attr_name_value_str(
g: &AttrIdGenerator,
style: AttrStyle,
name: Symbol,
val: Symbol,
span: Span,
) -> Attribute {
let lit = LitKind::Str(val, StrStyle::Cooked).to_token_lit();
let expr = P(Expr {
id: DUMMY_NODE_ID,
kind: ExprKind::Lit(lit),
span,
attrs: AttrVec::new(),
tokens: None,
});
let path = Path::from_ident(Ident::new(name, span));
let args = AttrArgs::Eq(span, AttrArgsEq::Ast(expr));
mk_attr(g, style, path, args, span)
}
pub fn mk_attr_nested_word(
g: &AttrIdGenerator,
style: AttrStyle,
outer: Symbol,
inner: Symbol,
span: Span,
) -> Attribute {
let inner_tokens = TokenStream::new(vec![TokenTree::Token(
Token::from_ast_ident(Ident::new(inner, span)),
Spacing::Alone,
)]);
let outer_ident = Ident::new(outer, span);
let path = Path::from_ident(outer_ident);
let attr_args = AttrArgs::Delimited(DelimArgs {
dspan: DelimSpan::from_single(span),
delim: MacDelimiter::Parenthesis,
tokens: inner_tokens,
});
mk_attr(g, style, path, attr_args, span)
}
pub fn mk_doc_comment(
@ -436,23 +457,6 @@ pub fn list_contains_name(items: &[NestedMetaItem], name: Symbol) -> bool {
}
impl MetaItem {
fn token_trees(&self) -> Vec<TokenTree> {
let mut idents = vec![];
let mut last_pos = BytePos(0_u32);
for (i, segment) in self.path.segments.iter().enumerate() {
let is_first = i == 0;
if !is_first {
let mod_sep_span =
Span::new(last_pos, segment.ident.span.lo(), segment.ident.span.ctxt(), None);
idents.push(TokenTree::token_alone(token::ModSep, mod_sep_span));
}
idents.push(TokenTree::Token(Token::from_ast_ident(segment.ident), Spacing::Alone));
last_pos = segment.ident.span.hi();
}
idents.extend(self.kind.token_trees(self.span));
idents
}
fn from_tokens<I>(tokens: &mut iter::Peekable<I>) -> Option<MetaItem>
where
I: Iterator<Item = TokenTree>,
@ -524,62 +528,6 @@ impl MetaItemKind {
}
}
pub fn attr_args(&self, span: Span) -> AttrArgs {
match self {
MetaItemKind::Word => AttrArgs::Empty,
MetaItemKind::NameValue(lit) => {
let expr = P(ast::Expr {
id: ast::DUMMY_NODE_ID,
kind: ast::ExprKind::Lit(lit.token_lit.clone()),
span: lit.span,
attrs: ast::AttrVec::new(),
tokens: None,
});
AttrArgs::Eq(span, AttrArgsEq::Ast(expr))
}
MetaItemKind::List(list) => {
let mut tts = Vec::new();
for (i, item) in list.iter().enumerate() {
if i > 0 {
tts.push(TokenTree::token_alone(token::Comma, span));
}
tts.extend(item.token_trees())
}
AttrArgs::Delimited(DelimArgs {
dspan: DelimSpan::from_single(span),
delim: MacDelimiter::Parenthesis,
tokens: TokenStream::new(tts),
})
}
}
}
fn token_trees(&self, span: Span) -> Vec<TokenTree> {
match self {
MetaItemKind::Word => vec![],
MetaItemKind::NameValue(lit) => {
vec![
TokenTree::token_alone(token::Eq, span),
TokenTree::Token(lit.to_token(), Spacing::Alone),
]
}
MetaItemKind::List(list) => {
let mut tokens = Vec::new();
for (i, item) in list.iter().enumerate() {
if i > 0 {
tokens.push(TokenTree::token_alone(token::Comma, span));
}
tokens.extend(item.token_trees())
}
vec![TokenTree::Delimited(
DelimSpan::from_single(span),
Delimiter::Parenthesis,
TokenStream::new(tokens),
)]
}
}
}
fn list_from_tokens(tokens: TokenStream) -> Option<MetaItemKind> {
let mut tokens = tokens.into_trees().peekable();
let mut result = Vec::new();
@ -602,7 +550,7 @@ impl MetaItemKind {
MetaItemKind::name_value_from_tokens(&mut inner_tokens.into_trees())
}
Some(TokenTree::Token(token, _)) => {
Lit::from_token(&token).map(MetaItemKind::NameValue)
MetaItemLit::from_token(&token).map(MetaItemKind::NameValue)
}
_ => None,
}
@ -618,9 +566,9 @@ impl MetaItemKind {
}) => MetaItemKind::list_from_tokens(tokens.clone()),
AttrArgs::Delimited(..) => None,
AttrArgs::Eq(_, AttrArgsEq::Ast(expr)) => match expr.kind {
ast::ExprKind::Lit(token_lit) => {
ExprKind::Lit(token_lit) => {
// Turn failures to `None`, we'll get parse errors elsewhere.
Lit::from_token_lit(token_lit, expr.span)
MetaItemLit::from_token_lit(token_lit, expr.span)
.ok()
.map(|lit| MetaItemKind::NameValue(lit))
}
@ -653,16 +601,7 @@ impl NestedMetaItem {
pub fn span(&self) -> Span {
match self {
NestedMetaItem::MetaItem(item) => item.span,
NestedMetaItem::Literal(lit) => lit.span,
}
}
fn token_trees(&self) -> Vec<TokenTree> {
match self {
NestedMetaItem::MetaItem(item) => item.token_trees(),
NestedMetaItem::Literal(lit) => {
vec![TokenTree::Token(lit.to_token(), Spacing::Alone)]
}
NestedMetaItem::Lit(lit) => lit.span,
}
}
@ -672,10 +611,10 @@ impl NestedMetaItem {
{
match tokens.peek() {
Some(TokenTree::Token(token, _))
if let Some(lit) = Lit::from_token(token) =>
if let Some(lit) = MetaItemLit::from_token(token) =>
{
tokens.next();
return Some(NestedMetaItem::Literal(lit));
return Some(NestedMetaItem::Lit(lit));
}
Some(TokenTree::Delimited(_, Delimiter::Invisible, inner_tokens)) => {
let inner_tokens = inner_tokens.clone();

View File

@ -410,11 +410,7 @@ pub fn noop_visit_use_tree<T: MutVisitor>(use_tree: &mut UseTree, vis: &mut T) {
let UseTree { prefix, kind, span } = use_tree;
vis.visit_path(prefix);
match kind {
UseTreeKind::Simple(rename, id1, id2) => {
visit_opt(rename, |rename| vis.visit_ident(rename));
vis.visit_id(id1);
vis.visit_id(id2);
}
UseTreeKind::Simple(rename) => visit_opt(rename, |rename| vis.visit_ident(rename)),
UseTreeKind::Nested(items) => {
for (tree, id) in items {
vis.visit_use_tree(tree);
@ -628,7 +624,7 @@ pub fn noop_visit_macro_def<T: MutVisitor>(macro_def: &mut MacroDef, vis: &mut T
pub fn noop_visit_meta_list_item<T: MutVisitor>(li: &mut NestedMetaItem, vis: &mut T) {
match li {
NestedMetaItem::MetaItem(mi) => vis.visit_meta_item(mi),
NestedMetaItem::Literal(_lit) => {}
NestedMetaItem::Lit(_lit) => {}
}
}
@ -725,10 +721,10 @@ pub fn visit_lazy_tts<T: MutVisitor>(lazy_tts: &mut Option<LazyAttrTokenStream>,
visit_lazy_tts_opt_mut(lazy_tts.as_mut(), vis);
}
/// Applies ident visitor if it's an ident; applies other visits to interpolated nodes.
/// In practice the ident part is not actually used by specific visitors right now,
/// but there's a test below checking that it works.
// No `noop_` prefix because there isn't a corresponding method in `MutVisitor`.
// Applies ident visitor if it's an ident; applies other visits to interpolated nodes.
// In practice the ident part is not actually used by specific visitors right now,
// but there's a test below checking that it works.
pub fn visit_token<T: MutVisitor>(t: &mut Token, vis: &mut T) {
let Token { kind, span } = t;
match kind {

View File

@ -302,9 +302,9 @@ impl TokenKind {
Literal(Lit::new(kind, symbol, suffix))
}
// An approximation to proc-macro-style single-character operators used by rustc parser.
// If the operator token can be broken into two tokens, the first of which is single-character,
// then this function performs that operation, otherwise it returns `None`.
/// An approximation to proc-macro-style single-character operators used by rustc parser.
/// If the operator token can be broken into two tokens, the first of which is single-character,
/// then this function performs that operation, otherwise it returns `None`.
pub fn break_two_token_op(&self) -> Option<(TokenKind, TokenKind)> {
Some(match *self {
Le => (Lt, Eq),
@ -538,10 +538,10 @@ impl Token {
}
}
// A convenience function for matching on identifiers during parsing.
// Turns interpolated identifier (`$i: ident`) or lifetime (`$l: lifetime`) token
// into the regular identifier or lifetime token it refers to,
// otherwise returns the original token.
/// A convenience function for matching on identifiers during parsing.
/// Turns interpolated identifier (`$i: ident`) or lifetime (`$l: lifetime`) token
/// into the regular identifier or lifetime token it refers to,
/// otherwise returns the original token.
pub fn uninterpolate(&self) -> Cow<'_, Token> {
match &self.kind {
Interpolated(nt) => match **nt {
@ -621,7 +621,7 @@ impl Token {
false
}
// Is the token an interpolated block (`$b:block`)?
/// Is the token an interpolated block (`$b:block`)?
pub fn is_whole_block(&self) -> bool {
if let Interpolated(nt) = &self.kind && let NtBlock(..) = **nt {
return true;
@ -665,8 +665,8 @@ impl Token {
self.is_non_raw_ident_where(Ident::is_path_segment_keyword)
}
// Returns true for reserved identifiers used internally for elided lifetimes,
// unnamed method parameters, crate root module, error recovery etc.
/// Returns true for reserved identifiers used internally for elided lifetimes,
/// unnamed method parameters, crate root module, error recovery etc.
pub fn is_special_ident(&self) -> bool {
self.is_non_raw_ident_where(Ident::is_special)
}

View File

@ -86,12 +86,12 @@ impl TokenTree {
}
}
// Create a `TokenTree::Token` with alone spacing.
/// Create a `TokenTree::Token` with alone spacing.
pub fn token_alone(kind: TokenKind, span: Span) -> TokenTree {
TokenTree::Token(Token::new(kind, span), Spacing::Alone)
}
// Create a `TokenTree::Token` with joint spacing.
/// Create a `TokenTree::Token` with joint spacing.
pub fn token_joint(kind: TokenKind, span: Span) -> TokenTree {
TokenTree::Token(Token::new(kind, span), Spacing::Joint)
}
@ -413,17 +413,17 @@ impl TokenStream {
TokenStream(Lrc::new(self.0.iter().enumerate().map(|(i, tree)| f(i, tree)).collect()))
}
// Create a token stream containing a single token with alone spacing.
/// Create a token stream containing a single token with alone spacing.
pub fn token_alone(kind: TokenKind, span: Span) -> TokenStream {
TokenStream::new(vec![TokenTree::token_alone(kind, span)])
}
// Create a token stream containing a single token with joint spacing.
/// Create a token stream containing a single token with joint spacing.
pub fn token_joint(kind: TokenKind, span: Span) -> TokenStream {
TokenStream::new(vec![TokenTree::token_joint(kind, span)])
}
// Create a token stream containing a single `Delimited`.
/// Create a token stream containing a single `Delimited`.
pub fn delimited(span: DelimSpan, delim: Delimiter, tts: TokenStream) -> TokenStream {
TokenStream::new(vec![TokenTree::Delimited(span, delim, tts)])
}
@ -522,8 +522,8 @@ impl TokenStream {
}
}
// Push `tt` onto the end of the stream, possibly gluing it to the last
// token. Uses `make_mut` to maximize efficiency.
/// Push `tt` onto the end of the stream, possibly gluing it to the last
/// token. Uses `make_mut` to maximize efficiency.
pub fn push_tree(&mut self, tt: TokenTree) {
let vec_mut = Lrc::make_mut(&mut self.0);
@ -534,9 +534,9 @@ impl TokenStream {
}
}
// Push `stream` onto the end of the stream, possibly gluing the first
// token tree to the last token. (No other token trees will be glued.)
// Uses `make_mut` to maximize efficiency.
/// Push `stream` onto the end of the stream, possibly gluing the first
/// token tree to the last token. (No other token trees will be glued.)
/// Uses `make_mut` to maximize efficiency.
pub fn push_stream(&mut self, stream: TokenStream) {
let vec_mut = Lrc::make_mut(&mut self.0);

View File

@ -1,8 +1,7 @@
//! Code related to parsing literals.
use crate::ast::{self, Lit, LitKind};
use crate::ast::{self, LitKind, MetaItemLit};
use crate::token::{self, Token};
use rustc_data_structures::sync::Lrc;
use rustc_lexer::unescape::{byte_from_char, unescape_byte, unescape_char, unescape_literal, Mode};
use rustc_span::symbol::{kw, sym, Symbol};
use rustc_span::Span;
@ -196,39 +195,16 @@ impl LitKind {
}
}
impl Lit {
/// Converts literal token into an AST literal.
pub fn from_token_lit(token_lit: token::Lit, span: Span) -> Result<Lit, LitError> {
Ok(Lit { token_lit, kind: LitKind::from_token_lit(token_lit)?, span })
impl MetaItemLit {
/// Converts token literal into a meta item literal.
pub fn from_token_lit(token_lit: token::Lit, span: Span) -> Result<MetaItemLit, LitError> {
Ok(MetaItemLit { token_lit, kind: LitKind::from_token_lit(token_lit)?, span })
}
/// Converts an arbitrary token into an AST literal.
pub fn from_token(token: &Token) -> Option<Lit> {
/// Converts an arbitrary token into meta item literal.
pub fn from_token(token: &Token) -> Option<MetaItemLit> {
token::Lit::from_token(token)
.and_then(|token_lit| Lit::from_token_lit(token_lit, token.span).ok())
}
/// Attempts to recover an AST literal from semantic literal.
/// This function is used when the original token doesn't exist (e.g. the literal is created
/// by an AST-based macro) or unavailable (e.g. from HIR pretty-printing).
pub fn from_lit_kind(kind: LitKind, span: Span) -> Lit {
Lit { token_lit: kind.to_token_lit(), kind, span }
}
/// Recovers an AST literal from a string of bytes produced by `include_bytes!`.
/// This requires ASCII-escaping the string, which can result in poor performance
/// for very large strings of bytes.
pub fn from_included_bytes(bytes: &Lrc<[u8]>, span: Span) -> Lit {
Self::from_lit_kind(LitKind::ByteStr(bytes.clone()), span)
}
/// Losslessly convert an AST literal into a token.
pub fn to_token(&self) -> Token {
let kind = match self.token_lit.kind {
token::Bool => token::Ident(self.token_lit.symbol, false),
_ => token::Literal(self.token_lit),
};
Token::new(kind, self.span)
.and_then(|token_lit| MetaItemLit::from_token_lit(token_lit, token.span).ok())
}
}

View File

@ -439,7 +439,7 @@ pub fn walk_path<'a, V: Visitor<'a>>(visitor: &mut V, path: &'a Path) {
pub fn walk_use_tree<'a, V: Visitor<'a>>(visitor: &mut V, use_tree: &'a UseTree, id: NodeId) {
visitor.visit_path(&use_tree.prefix, id);
match &use_tree.kind {
UseTreeKind::Simple(rename, ..) => {
UseTreeKind::Simple(rename) => {
// The extra IDs are handled during HIR lowering.
if let &Some(rename) = rename {
visitor.visit_ident(rename);

View File

@ -1606,16 +1606,13 @@ impl<'hir> LoweringContext<'_, 'hir> {
};
// `#[allow(unreachable_code)]`
let attr = {
// `allow(unreachable_code)`
let allow = {
let allow_ident = Ident::new(sym::allow, self.lower_span(span));
let uc_ident = Ident::new(sym::unreachable_code, self.lower_span(span));
let uc_nested = attr::mk_nested_word_item(uc_ident);
attr::mk_list_item(allow_ident, vec![uc_nested])
};
attr::mk_attr_outer(&self.tcx.sess.parse_sess.attr_id_generator, allow)
};
let attr = attr::mk_attr_nested_word(
&self.tcx.sess.parse_sess.attr_id_generator,
AttrStyle::Outer,
sym::allow,
sym::unreachable_code,
self.lower_span(span),
);
let attrs: AttrVec = thin_vec![attr];
// `ControlFlow::Continue(val) => #[allow(unreachable_code)] val,`

View File

@ -77,7 +77,7 @@ impl<'a, 'hir> NodeCollector<'a, 'hir> {
if hir_id.owner != self.owner {
span_bug!(
span,
"inconsistent DepNode at `{:?}` for `{:?}`: \
"inconsistent HirId at `{:?}` for `{:?}`: \
current_dep_node_owner={} ({:?}), hir_id.owner={} ({:?})",
self.source_map.span_to_diagnostic_string(span),
node,

View File

@ -19,7 +19,6 @@ use rustc_span::symbol::{kw, sym, Ident};
use rustc_span::{Span, Symbol};
use rustc_target::spec::abi;
use smallvec::{smallvec, SmallVec};
use std::iter;
use thin_vec::ThinVec;
pub(super) struct ItemLowerer<'a, 'hir> {
@ -179,36 +178,22 @@ impl<'hir> LoweringContext<'_, 'hir> {
let mut node_ids =
smallvec![hir::ItemId { owner_id: hir::OwnerId { def_id: self.local_def_id(i.id) } }];
if let ItemKind::Use(use_tree) = &i.kind {
self.lower_item_id_use_tree(use_tree, i.id, &mut node_ids);
self.lower_item_id_use_tree(use_tree, &mut node_ids);
}
node_ids
}
fn lower_item_id_use_tree(
&mut self,
tree: &UseTree,
base_id: NodeId,
vec: &mut SmallVec<[hir::ItemId; 1]>,
) {
fn lower_item_id_use_tree(&mut self, tree: &UseTree, vec: &mut SmallVec<[hir::ItemId; 1]>) {
match &tree.kind {
UseTreeKind::Nested(nested_vec) => {
for &(ref nested, id) in nested_vec {
vec.push(hir::ItemId {
owner_id: hir::OwnerId { def_id: self.local_def_id(id) },
});
self.lower_item_id_use_tree(nested, id, vec);
}
}
UseTreeKind::Glob => {}
UseTreeKind::Simple(_, id1, id2) => {
for (_, id) in
iter::zip(self.expect_full_res_from_use(base_id).skip(1), [*id1, *id2])
{
vec.push(hir::ItemId {
owner_id: hir::OwnerId { def_id: self.local_def_id(id) },
});
self.lower_item_id_use_tree(nested, vec);
}
}
UseTreeKind::Simple(..) | UseTreeKind::Glob => {}
}
}
@ -489,7 +474,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
let segments = prefix.segments.iter().chain(path.segments.iter()).cloned().collect();
match tree.kind {
UseTreeKind::Simple(rename, id1, id2) => {
UseTreeKind::Simple(rename) => {
*ident = tree.ident();
// First, apply the prefix to the path.
@ -505,66 +490,16 @@ impl<'hir> LoweringContext<'_, 'hir> {
}
}
let mut resolutions = self.expect_full_res_from_use(id).fuse();
// We want to return *something* from this function, so hold onto the first item
// for later.
let ret_res = self.lower_res(resolutions.next().unwrap_or(Res::Err));
// Here, we are looping over namespaces, if they exist for the definition
// being imported. We only handle type and value namespaces because we
// won't be dealing with macros in the rest of the compiler.
// Essentially a single `use` which imports two names is desugared into
// two imports.
for new_node_id in [id1, id2] {
let new_id = self.local_def_id(new_node_id);
let Some(res) = resolutions.next() else {
debug_assert!(self.children.iter().find(|(id, _)| id == &new_id).is_none());
// Associate an HirId to both ids even if there is no resolution.
self.children.push((
new_id,
hir::MaybeOwner::NonOwner(hir::HirId::make_owner(new_id))),
);
continue;
};
let ident = *ident;
let mut path = path.clone();
for seg in &mut path.segments {
// Give the cloned segment the same resolution information
// as the old one (this is needed for stability checking).
let new_id = self.next_node_id();
self.resolver.clone_res(seg.id, new_id);
seg.id = new_id;
}
let span = path.span;
self.with_hir_id_owner(new_node_id, |this| {
let res = this.lower_res(res);
let path = this.lower_path_extra(res, &path, ParamMode::Explicit);
let kind = hir::ItemKind::Use(path, hir::UseKind::Single);
if let Some(attrs) = attrs {
this.attrs.insert(hir::ItemLocalId::new(0), attrs);
}
let item = hir::Item {
owner_id: hir::OwnerId { def_id: new_id },
ident: this.lower_ident(ident),
kind,
vis_span,
span: this.lower_span(span),
};
hir::OwnerNode::Item(this.arena.alloc(item))
});
}
let path = self.lower_path_extra(ret_res, &path, ParamMode::Explicit);
let res =
self.expect_full_res_from_use(id).map(|res| self.lower_res(res)).collect();
let path = self.lower_use_path(res, &path, ParamMode::Explicit);
hir::ItemKind::Use(path, hir::UseKind::Single)
}
UseTreeKind::Glob => {
let path = self.lower_path(
id,
&Path { segments, span: path.span, tokens: None },
ParamMode::Explicit,
);
let res = self.expect_full_res(id);
let res = smallvec![self.lower_res(res)];
let path = Path { segments, span: path.span, tokens: None };
let path = self.lower_use_path(res, &path, ParamMode::Explicit);
hir::ItemKind::Use(path, hir::UseKind::Glob)
}
UseTreeKind::Nested(ref trees) => {
@ -634,9 +569,9 @@ impl<'hir> LoweringContext<'_, 'hir> {
});
}
let res = self.expect_full_res_from_use(id).next().unwrap_or(Res::Err);
let res = self.lower_res(res);
let path = self.lower_path_extra(res, &prefix, ParamMode::Explicit);
let res =
self.expect_full_res_from_use(id).map(|res| self.lower_res(res)).collect();
let path = self.lower_use_path(res, &prefix, ParamMode::Explicit);
hir::ItemKind::Use(path, hir::UseKind::ListStem)
}
}

View File

@ -497,7 +497,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
self.tcx.hir().def_key(self.local_def_id(node_id)),
);
let def_id = self.tcx.create_def(parent, data);
let def_id = self.tcx.create_def(parent, data).def_id();
debug!("create_def: def_id_to_node_id[{:?}] <-> {:?}", def_id, node_id);
self.resolver.node_id_to_def_id.insert(node_id, def_id);
@ -948,17 +948,12 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
AttrArgs::Eq(eq_span, AttrArgsEq::Ast(expr)) => {
// In valid code the value always ends up as a single literal. Otherwise, a dummy
// literal suffices because the error is handled elsewhere.
let lit = if let ExprKind::Lit(token_lit) = expr.kind {
match Lit::from_token_lit(token_lit, expr.span) {
Ok(lit) => lit,
Err(_err) => Lit {
token_lit: token::Lit::new(token::LitKind::Err, kw::Empty, None),
kind: LitKind::Err,
span: DUMMY_SP,
},
}
let lit = if let ExprKind::Lit(token_lit) = expr.kind
&& let Ok(lit) = MetaItemLit::from_token_lit(token_lit, expr.span)
{
lit
} else {
Lit {
MetaItemLit {
token_lit: token::Lit::new(token::LitKind::Err, kw::Empty, None),
kind: LitKind::Err,
span: DUMMY_SP,

View File

@ -12,7 +12,7 @@ use rustc_hir::GenericArg;
use rustc_span::symbol::{kw, Ident};
use rustc_span::{BytePos, Span, DUMMY_SP};
use smallvec::smallvec;
use smallvec::{smallvec, SmallVec};
impl<'a, 'hir> LoweringContext<'a, 'hir> {
#[instrument(level = "trace", skip(self))]
@ -144,13 +144,13 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
);
}
pub(crate) fn lower_path_extra(
pub(crate) fn lower_use_path(
&mut self,
res: Res,
res: SmallVec<[Res; 3]>,
p: &Path,
param_mode: ParamMode,
) -> &'hir hir::Path<'hir> {
self.arena.alloc(hir::Path {
) -> &'hir hir::UsePath<'hir> {
self.arena.alloc(hir::UsePath {
res,
segments: self.arena.alloc_from_iter(p.segments.iter().map(|segment| {
self.lower_path_segment(
@ -165,17 +165,6 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
})
}
pub(crate) fn lower_path(
&mut self,
id: NodeId,
p: &Path,
param_mode: ParamMode,
) -> &'hir hir::Path<'hir> {
let res = self.expect_full_res(id);
let res = self.lower_res(res);
self.lower_path_extra(res, p, param_mode)
}
pub(crate) fn lower_path_segment(
&mut self,
path_span: Span,

View File

@ -209,7 +209,7 @@ impl<'a> AstValidator<'a> {
// Mirrors `visit::walk_ty`, but tracks relevant state.
fn walk_ty(&mut self, t: &'a Ty) {
match t.kind {
match &t.kind {
TyKind::ImplTrait(..) => {
self.with_impl_trait(Some(t.span), |this| visit::walk_ty(this, t))
}
@ -217,7 +217,7 @@ impl<'a> AstValidator<'a> {
.with_banned_tilde_const(DisallowTildeConstContext::TraitObject, |this| {
visit::walk_ty(this, t)
}),
TyKind::Path(ref qself, ref path) => {
TyKind::Path(qself, path) => {
// We allow these:
// - `Option<impl Trait>`
// - `option::Option<impl Trait>`
@ -231,7 +231,7 @@ impl<'a> AstValidator<'a> {
// (for cases like `<impl Trait>::Foo>`)
// but we allow `impl Trait` in `GenericArgs`
// iff there are no more PathSegments.
if let Some(ref qself) = *qself {
if let Some(qself) = qself {
// `impl Trait` in `qself` is always illegal
self.with_banned_impl_trait(|this| this.visit_ty(&qself.ty));
}
@ -738,8 +738,8 @@ impl<'a> AstValidator<'a> {
}
fn visit_ty_common(&mut self, ty: &'a Ty) {
match ty.kind {
TyKind::BareFn(ref bfty) => {
match &ty.kind {
TyKind::BareFn(bfty) => {
self.check_fn_decl(&bfty.decl, SelfSemantic::No);
Self::check_decl_no_pat(&bfty.decl, |span, _, _| {
struct_span_err!(
@ -756,10 +756,10 @@ impl<'a> AstValidator<'a> {
self.maybe_lint_missing_abi(sig_span, ty.id);
}
}
TyKind::TraitObject(ref bounds, ..) => {
TyKind::TraitObject(bounds, ..) => {
let mut any_lifetime_bounds = false;
for bound in bounds {
if let GenericBound::Outlives(ref lifetime) = *bound {
if let GenericBound::Outlives(lifetime) = bound {
if any_lifetime_bounds {
struct_span_err!(
self.session,
@ -774,7 +774,7 @@ impl<'a> AstValidator<'a> {
}
}
}
TyKind::ImplTrait(_, ref bounds) => {
TyKind::ImplTrait(_, bounds) => {
if self.is_impl_trait_banned {
struct_span_err!(
self.session,
@ -842,8 +842,8 @@ fn validate_generic_param_order(
let (kind, bounds, span) = (&param.kind, &param.bounds, ident.span);
let (ord_kind, ident) = match &param.kind {
GenericParamKind::Lifetime => (ParamKindOrd::Lifetime, ident.to_string()),
GenericParamKind::Type { default: _ } => (ParamKindOrd::TypeOrConst, ident.to_string()),
GenericParamKind::Const { ref ty, kw_span: _, default: _ } => {
GenericParamKind::Type { .. } => (ParamKindOrd::TypeOrConst, ident.to_string()),
GenericParamKind::Const { ty, .. } => {
let ty = pprust::ty_to_string(ty);
(ParamKindOrd::TypeOrConst, format!("const {}: {}", ident, ty))
}
@ -912,7 +912,7 @@ fn validate_generic_param_order(
impl<'a> Visitor<'a> for AstValidator<'a> {
fn visit_attribute(&mut self, attr: &Attribute) {
validate_attr::check_meta(&self.session.parse_sess, attr);
validate_attr::check_attr(&self.session.parse_sess, attr);
}
fn visit_expr(&mut self, expr: &'a Expr) {
@ -948,8 +948,8 @@ impl<'a> Visitor<'a> for AstValidator<'a> {
}
ExprKind::Paren(local_expr) => {
fn has_let_expr(expr: &Expr) -> bool {
match expr.kind {
ExprKind::Binary(_, ref lhs, ref rhs) => has_let_expr(lhs) || has_let_expr(rhs),
match &expr.kind {
ExprKind::Binary(_, lhs, rhs) => has_let_expr(lhs) || has_let_expr(rhs),
ExprKind::Let(..) => true,
_ => false,
}
@ -1005,18 +1005,18 @@ impl<'a> Visitor<'a> for AstValidator<'a> {
self.check_nomangle_item_asciionly(item.ident, item.span);
}
match item.kind {
match &item.kind {
ItemKind::Impl(box Impl {
unsafety,
polarity,
defaultness: _,
constness,
ref generics,
of_trait: Some(ref t),
ref self_ty,
ref items,
generics,
of_trait: Some(t),
self_ty,
items,
}) => {
self.with_in_trait_impl(true, Some(constness), |this| {
self.with_in_trait_impl(true, Some(*constness), |this| {
this.invalid_visibility(&item.vis, None);
if let TyKind::Err = self_ty.kind {
this.err_handler()
@ -1027,7 +1027,8 @@ impl<'a> Visitor<'a> for AstValidator<'a> {
.help("use `auto trait Trait {}` instead")
.emit();
}
if let (Unsafe::Yes(span), ImplPolarity::Negative(sp)) = (unsafety, polarity) {
if let (&Unsafe::Yes(span), &ImplPolarity::Negative(sp)) = (unsafety, polarity)
{
struct_span_err!(
this.session,
sp.to(t.path.span),
@ -1061,7 +1062,7 @@ impl<'a> Visitor<'a> for AstValidator<'a> {
constness,
generics: _,
of_trait: None,
ref self_ty,
self_ty,
items: _,
}) => {
let error = |annotation_span, annotation| {
@ -1078,25 +1079,25 @@ impl<'a> Visitor<'a> for AstValidator<'a> {
&item.vis,
Some(InvalidVisibilityNote::IndividualImplItems),
);
if let Unsafe::Yes(span) = unsafety {
if let &Unsafe::Yes(span) = unsafety {
error(span, "unsafe").code(error_code!(E0197)).emit();
}
if let ImplPolarity::Negative(span) = polarity {
if let &ImplPolarity::Negative(span) = polarity {
error(span, "negative").emit();
}
if let Defaultness::Default(def_span) = defaultness {
if let &Defaultness::Default(def_span) = defaultness {
error(def_span, "`default`")
.note("only trait implementations may be annotated with `default`")
.emit();
}
if let Const::Yes(span) = constness {
if let &Const::Yes(span) = constness {
error(span, "`const`")
.note("only trait implementations may be annotated with `const`")
.emit();
}
}
ItemKind::Fn(box Fn { defaultness, ref sig, ref generics, ref body }) => {
self.check_defaultness(item.span, defaultness);
ItemKind::Fn(box Fn { defaultness, sig, generics, body }) => {
self.check_defaultness(item.span, *defaultness);
if body.is_none() {
self.session.emit_err(FnWithoutBody {
@ -1132,7 +1133,7 @@ impl<'a> Visitor<'a> for AstValidator<'a> {
&item.vis,
Some(InvalidVisibilityNote::IndividualForeignItems),
);
if let Unsafe::Yes(span) = unsafety {
if let &Unsafe::Yes(span) = unsafety {
self.err_handler().span_err(span, "extern block cannot be declared unsafe");
}
if abi.is_none() {
@ -1142,7 +1143,7 @@ impl<'a> Visitor<'a> for AstValidator<'a> {
self.extern_mod = old_item;
return; // Avoid visiting again.
}
ItemKind::Enum(ref def, _) => {
ItemKind::Enum(def, _) => {
for variant in &def.variants {
self.invalid_visibility(&variant.vis, None);
for field in variant.data.fields() {
@ -1150,8 +1151,8 @@ impl<'a> Visitor<'a> for AstValidator<'a> {
}
}
}
ItemKind::Trait(box Trait { is_auto, ref generics, ref bounds, ref items, .. }) => {
if is_auto == IsAuto::Yes {
ItemKind::Trait(box Trait { is_auto, generics, bounds, items, .. }) => {
if *is_auto == IsAuto::Yes {
// Auto traits cannot have generics, super traits nor contain items.
self.deny_generic_params(generics, item.ident.span);
self.deny_super_traits(bounds, item.ident.span);
@ -1171,8 +1172,8 @@ impl<'a> Visitor<'a> for AstValidator<'a> {
walk_list!(self, visit_attribute, &item.attrs);
return; // Avoid visiting again
}
ItemKind::Mod(unsafety, ref mod_kind) => {
if let Unsafe::Yes(span) = unsafety {
ItemKind::Mod(unsafety, mod_kind) => {
if let &Unsafe::Yes(span) = unsafety {
self.err_handler().span_err(span, "module cannot be declared unsafe");
}
// Ensure that `path` attributes on modules are recorded as used (cf. issue #35584).
@ -1182,13 +1183,13 @@ impl<'a> Visitor<'a> for AstValidator<'a> {
self.check_mod_file_item_asciionly(item.ident);
}
}
ItemKind::Union(ref vdata, ..) => {
ItemKind::Union(vdata, ..) => {
if vdata.fields().is_empty() {
self.err_handler().span_err(item.span, "unions cannot have zero fields");
}
}
ItemKind::Const(def, .., None) => {
self.check_defaultness(item.span, def);
self.check_defaultness(item.span, *def);
self.session.emit_err(ConstWithoutBody {
span: item.span,
replace_span: self.ending_semi_or_hi(item.span),
@ -1200,14 +1201,8 @@ impl<'a> Visitor<'a> for AstValidator<'a> {
replace_span: self.ending_semi_or_hi(item.span),
});
}
ItemKind::TyAlias(box TyAlias {
defaultness,
where_clauses,
ref bounds,
ref ty,
..
}) => {
self.check_defaultness(item.span, defaultness);
ItemKind::TyAlias(box TyAlias { defaultness, where_clauses, bounds, ty, .. }) => {
self.check_defaultness(item.span, *defaultness);
if ty.is_none() {
self.session.emit_err(TyAliasWithoutBody {
span: item.span,
@ -1266,8 +1261,8 @@ impl<'a> Visitor<'a> for AstValidator<'a> {
// Mirrors `visit::walk_generic_args`, but tracks relevant state.
fn visit_generic_args(&mut self, generic_args: &'a GenericArgs) {
match *generic_args {
GenericArgs::AngleBracketed(ref data) => {
match generic_args {
GenericArgs::AngleBracketed(data) => {
self.check_generic_args_before_constraints(data);
for arg in &data.args {
@ -1283,7 +1278,7 @@ impl<'a> Visitor<'a> for AstValidator<'a> {
}
}
}
GenericArgs::Parenthesized(ref data) => {
GenericArgs::Parenthesized(data) => {
walk_list!(self, visit_ty, &data.inputs);
if let FnRetTy::Ty(ty) = &data.output {
// `-> Foo` syntax is essentially an associated type binding,
@ -1319,7 +1314,7 @@ impl<'a> Visitor<'a> for AstValidator<'a> {
validate_generic_param_order(self.err_handler(), &generics.params, generics.span);
for predicate in &generics.where_clause.predicates {
if let WherePredicate::EqPredicate(ref predicate) = *predicate {
if let WherePredicate::EqPredicate(predicate) = predicate {
deny_equality_constraints(self, predicate, generics);
}
}
@ -1368,7 +1363,7 @@ impl<'a> Visitor<'a> for AstValidator<'a> {
}
fn visit_param_bound(&mut self, bound: &'a GenericBound, ctxt: BoundKind) {
if let GenericBound::Trait(ref poly, modify) = *bound {
if let GenericBound::Trait(poly, modify) = bound {
match (ctxt, modify) {
(BoundKind::SuperTraits, TraitBoundModifier::Maybe) => {
let mut err = self
@ -1573,8 +1568,8 @@ impl<'a> Visitor<'a> for AstValidator<'a> {
self.check_item_named(item.ident, "const");
}
match item.kind {
AssocItemKind::Type(box TyAlias { ref generics, ref bounds, ref ty, .. })
match &item.kind {
AssocItemKind::Type(box TyAlias { generics, bounds, ty, .. })
if ctxt == AssocCtxt::Trait =>
{
self.visit_vis(&item.vis);
@ -1586,7 +1581,7 @@ impl<'a> Visitor<'a> for AstValidator<'a> {
});
walk_list!(self, visit_ty, ty);
}
AssocItemKind::Fn(box Fn { ref sig, ref generics, ref body, .. })
AssocItemKind::Fn(box Fn { sig, generics, body, .. })
if self.in_const_trait_impl
|| ctxt == AssocCtxt::Trait
|| matches!(sig.header.constness, Const::Yes(_)) =>

View File

@ -198,8 +198,8 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> {
}
fn visit_item(&mut self, i: &'a ast::Item) {
match i.kind {
ast::ItemKind::ForeignMod(ref foreign_module) => {
match &i.kind {
ast::ItemKind::ForeignMod(foreign_module) => {
if let Some(abi) = foreign_module.abi {
self.check_abi(abi, ast::Const::No);
}
@ -233,8 +233,8 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> {
}
}
ast::ItemKind::Impl(box ast::Impl { polarity, defaultness, ref of_trait, .. }) => {
if let ast::ImplPolarity::Negative(span) = polarity {
ast::ItemKind::Impl(box ast::Impl { polarity, defaultness, of_trait, .. }) => {
if let &ast::ImplPolarity::Negative(span) = polarity {
gate_feature_post!(
&self,
negative_impls,
@ -267,7 +267,7 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> {
gate_feature_post!(&self, decl_macro, i.span, msg);
}
ast::ItemKind::TyAlias(box ast::TyAlias { ty: Some(ref ty), .. }) => {
ast::ItemKind::TyAlias(box ast::TyAlias { ty: Some(ty), .. }) => {
self.check_impl_trait(&ty)
}
@ -302,8 +302,8 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> {
}
fn visit_ty(&mut self, ty: &'a ast::Ty) {
match ty.kind {
ast::TyKind::BareFn(ref bare_fn_ty) => {
match &ty.kind {
ast::TyKind::BareFn(bare_fn_ty) => {
// Function pointers cannot be `const`
self.check_extern(bare_fn_ty.ext, ast::Const::No);
}
@ -319,7 +319,7 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> {
}
fn visit_fn_ret_ty(&mut self, ret_ty: &'a ast::FnRetTy) {
if let ast::FnRetTy::Ty(ref output_ty) = *ret_ty {
if let ast::FnRetTy::Ty(output_ty) = ret_ty {
if let ast::TyKind::Never = output_ty.kind {
// Do nothing.
} else {
@ -455,9 +455,9 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> {
}
fn visit_assoc_item(&mut self, i: &'a ast::AssocItem, ctxt: AssocCtxt) {
let is_fn = match i.kind {
let is_fn = match &i.kind {
ast::AssocItemKind::Fn(_) => true,
ast::AssocItemKind::Type(box ast::TyAlias { ref ty, .. }) => {
ast::AssocItemKind::Type(box ast::TyAlias { ty, .. }) => {
if let (Some(_), AssocCtxt::Trait) = (ty, ctxt) {
gate_feature_post!(
&self,

View File

@ -36,8 +36,8 @@ impl Printer {
self.nbsp()
}
// Synthesizes a comment that was not textually present in the original
// source file.
/// Synthesizes a comment that was not textually present in the original
/// source file.
pub fn synth_comment(&mut self, text: impl Into<Cow<'static, str>>) {
self.word("/*");
self.space();

View File

@ -11,7 +11,7 @@ use rustc_ast::tokenstream::{TokenStream, TokenTree};
use rustc_ast::util::classify;
use rustc_ast::util::comments::{gather_comments, Comment, CommentStyle};
use rustc_ast::util::parser;
use rustc_ast::{self as ast, AttrArgs, AttrArgsEq, BlockCheckMode, Mutability, PatKind};
use rustc_ast::{self as ast, AttrArgs, AttrArgsEq, BlockCheckMode, PatKind};
use rustc_ast::{attr, BindingAnnotation, ByRef, DelimArgs, RangeEnd, RangeSyntax, Term};
use rustc_ast::{GenericArg, GenericBound, SelfKind, TraitBoundModifier};
use rustc_ast::{InlineAsmOperand, InlineAsmRegOrRegClass};
@ -19,7 +19,7 @@ use rustc_ast::{InlineAsmOptions, InlineAsmTemplatePiece};
use rustc_span::edition::Edition;
use rustc_span::source_map::{SourceMap, Spanned};
use rustc_span::symbol::{kw, sym, Ident, IdentPrinter, Symbol};
use rustc_span::{BytePos, FileName, Span};
use rustc_span::{BytePos, FileName, Span, DUMMY_SP};
use rustc_ast::attr::AttrIdGenerator;
use std::borrow::Cow;
@ -64,6 +64,7 @@ impl<'a> Comments<'a> {
Comments { sm, comments, current: 0 }
}
// FIXME: This shouldn't probably clone lmao
pub fn next(&self) -> Option<Comment> {
self.comments.get(self.current).cloned()
}
@ -119,17 +120,20 @@ pub fn print_crate<'a>(
// of the feature gate, so we fake them up here.
// `#![feature(prelude_import)]`
let pi_nested = attr::mk_nested_word_item(Ident::with_dummy_span(sym::prelude_import));
let list = attr::mk_list_item(Ident::with_dummy_span(sym::feature), vec![pi_nested]);
let fake_attr = attr::mk_attr_inner(g, list);
let fake_attr = attr::mk_attr_nested_word(
g,
ast::AttrStyle::Inner,
sym::feature,
sym::prelude_import,
DUMMY_SP,
);
s.print_attribute(&fake_attr);
// Currently, in Rust 2018 we don't have `extern crate std;` at the crate
// root, so this is not needed, and actually breaks things.
if edition == Edition::Edition2015 {
// `#![no_std]`
let no_std_meta = attr::mk_word_item(Ident::with_dummy_span(sym::no_std));
let fake_attr = attr::mk_attr_inner(g, no_std_meta);
let fake_attr = attr::mk_attr_word(g, ast::AttrStyle::Inner, sym::no_std, DUMMY_SP);
s.print_attribute(&fake_attr);
}
}
@ -268,10 +272,10 @@ pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::Dere
fn maybe_print_comment(&mut self, pos: BytePos) -> bool {
let mut has_comment = false;
while let Some(ref cmnt) = self.next_comment() {
while let Some(cmnt) = self.next_comment() {
if cmnt.pos < pos {
has_comment = true;
self.print_comment(cmnt);
self.print_comment(&cmnt);
} else {
break;
}
@ -366,12 +370,12 @@ pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::Dere
if self.next_comment().is_none() {
self.hardbreak();
}
while let Some(ref cmnt) = self.next_comment() {
self.print_comment(cmnt)
while let Some(cmnt) = self.next_comment() {
self.print_comment(&cmnt)
}
}
fn print_literal(&mut self, lit: &ast::Lit) {
fn print_meta_item_lit(&mut self, lit: &ast::MetaItemLit) {
self.print_token_literal(lit.token_lit, lit.span)
}
@ -446,8 +450,8 @@ pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::Dere
self.hardbreak_if_not_bol();
}
self.maybe_print_comment(attr.span.lo());
match attr.kind {
ast::AttrKind::Normal(ref normal) => {
match &attr.kind {
ast::AttrKind::Normal(normal) => {
match attr.style {
ast::AttrStyle::Inner => self.word("#!["),
ast::AttrStyle::Outer => self.word("#["),
@ -456,7 +460,7 @@ pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::Dere
self.word("]");
}
ast::AttrKind::DocComment(comment_kind, data) => {
self.word(doc_comment_to_string(comment_kind, attr.style, data));
self.word(doc_comment_to_string(*comment_kind, attr.style, *data));
self.hardbreak()
}
}
@ -488,7 +492,7 @@ pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::Dere
self.print_path(&item.path, false, 0);
self.space();
self.word_space("=");
let token_str = self.literal_to_string(lit);
let token_str = self.meta_item_lit_to_string(lit);
self.word(token_str);
}
}
@ -497,22 +501,22 @@ pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::Dere
fn print_meta_list_item(&mut self, item: &ast::NestedMetaItem) {
match item {
ast::NestedMetaItem::MetaItem(ref mi) => self.print_meta_item(mi),
ast::NestedMetaItem::Literal(ref lit) => self.print_literal(lit),
ast::NestedMetaItem::MetaItem(mi) => self.print_meta_item(mi),
ast::NestedMetaItem::Lit(lit) => self.print_meta_item_lit(lit),
}
}
fn print_meta_item(&mut self, item: &ast::MetaItem) {
self.ibox(INDENT_UNIT);
match item.kind {
match &item.kind {
ast::MetaItemKind::Word => self.print_path(&item.path, false, 0),
ast::MetaItemKind::NameValue(ref value) => {
ast::MetaItemKind::NameValue(value) => {
self.print_path(&item.path, false, 0);
self.space();
self.word_space("=");
self.print_literal(value);
self.print_meta_item_lit(value);
}
ast::MetaItemKind::List(ref items) => {
ast::MetaItemKind::List(items) => {
self.print_path(&item.path, false, 0);
self.popen();
self.commasep(Consistent, &items, |s, i| s.print_meta_list_item(i));
@ -657,7 +661,7 @@ pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::Dere
fn print_path_segment(&mut self, segment: &ast::PathSegment, colons_before_params: bool) {
if segment.ident.name != kw::PathRoot {
self.print_ident(segment.ident);
if let Some(ref args) = segment.args {
if let Some(args) = &segment.args {
self.print_generic_args(args, colons_before_params);
}
}
@ -712,19 +716,19 @@ pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::Dere
}
fn nonterminal_to_string(&self, nt: &Nonterminal) -> String {
match *nt {
token::NtExpr(ref e) => self.expr_to_string(e),
token::NtMeta(ref e) => self.attr_item_to_string(e),
token::NtTy(ref e) => self.ty_to_string(e),
token::NtPath(ref e) => self.path_to_string(e),
token::NtItem(ref e) => self.item_to_string(e),
token::NtBlock(ref e) => self.block_to_string(e),
token::NtStmt(ref e) => self.stmt_to_string(e),
token::NtPat(ref e) => self.pat_to_string(e),
token::NtIdent(e, is_raw) => IdentPrinter::for_ast_ident(e, is_raw).to_string(),
match nt {
token::NtExpr(e) => self.expr_to_string(e),
token::NtMeta(e) => self.attr_item_to_string(e),
token::NtTy(e) => self.ty_to_string(e),
token::NtPath(e) => self.path_to_string(e),
token::NtItem(e) => self.item_to_string(e),
token::NtBlock(e) => self.block_to_string(e),
token::NtStmt(e) => self.stmt_to_string(e),
token::NtPat(e) => self.pat_to_string(e),
token::NtIdent(e, is_raw) => IdentPrinter::for_ast_ident(*e, *is_raw).to_string(),
token::NtLifetime(e) => e.to_string(),
token::NtLiteral(ref e) => self.expr_to_string(e),
token::NtVis(ref e) => self.vis_to_string(e),
token::NtLiteral(e) => self.expr_to_string(e),
token::NtVis(e) => self.vis_to_string(e),
}
}
@ -825,8 +829,8 @@ pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::Dere
Self::to_string(|s| s.print_expr(e))
}
fn literal_to_string(&self, lit: &ast::Lit) -> String {
Self::to_string(|s| s.print_literal(lit))
fn meta_item_lit_to_string(&self, lit: &ast::MetaItemLit) -> String {
Self::to_string(|s| s.print_meta_item_lit(lit))
}
fn tt_to_string(&self, tt: &TokenTree) -> String {
@ -917,8 +921,8 @@ impl<'a> PrintState<'a> for State<'a> {
self.word("::")
}
match *args {
ast::GenericArgs::AngleBracketed(ref data) => {
match args {
ast::GenericArgs::AngleBracketed(data) => {
self.word("<");
self.commasep(Inconsistent, &data.args, |s, arg| match arg {
ast::AngleBracketedArg::Arg(a) => s.print_generic_arg(a),
@ -927,7 +931,7 @@ impl<'a> PrintState<'a> for State<'a> {
self.word(">")
}
ast::GenericArgs::Parenthesized(ref data) => {
ast::GenericArgs::Parenthesized(data) => {
self.word("(");
self.commasep(Inconsistent, &data.inputs, |s, ty| s.print_type(ty));
self.word(")");
@ -1011,17 +1015,17 @@ impl<'a> State<'a> {
pub fn print_type(&mut self, ty: &ast::Ty) {
self.maybe_print_comment(ty.span.lo());
self.ibox(0);
match ty.kind {
ast::TyKind::Slice(ref ty) => {
match &ty.kind {
ast::TyKind::Slice(ty) => {
self.word("[");
self.print_type(ty);
self.word("]");
}
ast::TyKind::Ptr(ref mt) => {
ast::TyKind::Ptr(mt) => {
self.word("*");
self.print_mt(mt, true);
}
ast::TyKind::Rptr(ref lifetime, ref mt) => {
ast::TyKind::Rptr(lifetime, mt) => {
self.word("&");
self.print_opt_lifetime(lifetime);
self.print_mt(mt, false);
@ -1029,7 +1033,7 @@ impl<'a> State<'a> {
ast::TyKind::Never => {
self.word("!");
}
ast::TyKind::Tup(ref elts) => {
ast::TyKind::Tup(elts) => {
self.popen();
self.commasep(Inconsistent, &elts, |s, ty| s.print_type(ty));
if elts.len() == 1 {
@ -1037,36 +1041,36 @@ impl<'a> State<'a> {
}
self.pclose();
}
ast::TyKind::Paren(ref typ) => {
ast::TyKind::Paren(typ) => {
self.popen();
self.print_type(typ);
self.pclose();
}
ast::TyKind::BareFn(ref f) => {
ast::TyKind::BareFn(f) => {
self.print_ty_fn(f.ext, f.unsafety, &f.decl, None, &f.generic_params);
}
ast::TyKind::Path(None, ref path) => {
ast::TyKind::Path(None, path) => {
self.print_path(path, false, 0);
}
ast::TyKind::Path(Some(ref qself), ref path) => self.print_qpath(path, qself, false),
ast::TyKind::TraitObject(ref bounds, syntax) => {
if syntax == ast::TraitObjectSyntax::Dyn {
ast::TyKind::Path(Some(qself), path) => self.print_qpath(path, qself, false),
ast::TyKind::TraitObject(bounds, syntax) => {
if *syntax == ast::TraitObjectSyntax::Dyn {
self.word_nbsp("dyn");
}
self.print_type_bounds(bounds);
}
ast::TyKind::ImplTrait(_, ref bounds) => {
ast::TyKind::ImplTrait(_, bounds) => {
self.word_nbsp("impl");
self.print_type_bounds(bounds);
}
ast::TyKind::Array(ref ty, ref length) => {
ast::TyKind::Array(ty, length) => {
self.word("[");
self.print_type(ty);
self.word("; ");
self.print_expr(&length.value);
self.word("]");
}
ast::TyKind::Typeof(ref e) => {
ast::TyKind::Typeof(e) => {
self.word("typeof(");
self.print_expr(&e.value);
self.word(")");
@ -1082,7 +1086,7 @@ impl<'a> State<'a> {
ast::TyKind::ImplicitSelf => {
self.word("Self");
}
ast::TyKind::MacCall(ref m) => {
ast::TyKind::MacCall(m) => {
self.print_mac(m);
}
ast::TyKind::CVarArgs => {
@ -1111,8 +1115,8 @@ impl<'a> State<'a> {
pub(crate) fn print_stmt(&mut self, st: &ast::Stmt) {
self.maybe_print_comment(st.span.lo());
match st.kind {
ast::StmtKind::Local(ref loc) => {
match &st.kind {
ast::StmtKind::Local(loc) => {
self.print_outer_attributes(&loc.attrs);
self.space_if_not_bol();
self.ibox(INDENT_UNIT);
@ -1135,15 +1139,15 @@ impl<'a> State<'a> {
self.word(";");
self.end(); // `let` ibox
}
ast::StmtKind::Item(ref item) => self.print_item(item),
ast::StmtKind::Expr(ref expr) => {
ast::StmtKind::Item(item) => self.print_item(item),
ast::StmtKind::Expr(expr) => {
self.space_if_not_bol();
self.print_expr_outer_attr_style(expr, false);
if classify::expr_requires_semi_to_be_stmt(expr) {
self.word(";");
}
}
ast::StmtKind::Semi(ref expr) => {
ast::StmtKind::Semi(expr) => {
self.space_if_not_bol();
self.print_expr_outer_attr_style(expr, false);
self.word(";");
@ -1152,7 +1156,7 @@ impl<'a> State<'a> {
self.space_if_not_bol();
self.word(";");
}
ast::StmtKind::MacCall(ref mac) => {
ast::StmtKind::MacCall(mac) => {
self.space_if_not_bol();
self.print_outer_attributes(&mac.attrs);
self.print_mac(&mac.mac);
@ -1193,8 +1197,8 @@ impl<'a> State<'a> {
let has_attrs = self.print_inner_attributes(attrs);
for (i, st) in blk.stmts.iter().enumerate() {
match st.kind {
ast::StmtKind::Expr(ref expr) if i == blk.stmts.len() - 1 => {
match &st.kind {
ast::StmtKind::Expr(expr) if i == blk.stmts.len() - 1 => {
self.maybe_print_comment(st.span.lo());
self.space_if_not_bol();
self.print_expr_outer_attr_style(expr, false);
@ -1362,7 +1366,7 @@ impl<'a> State<'a> {
pub(crate) fn print_local_decl(&mut self, loc: &ast::Local) {
self.print_pat(&loc.pat);
if let Some(ref ty) = loc.ty {
if let Some(ty) = &loc.ty {
self.word_space(":");
self.print_type(ty);
}
@ -1386,7 +1390,7 @@ impl<'a> State<'a> {
for item_segment in &path.segments[qself.position..] {
self.word("::");
self.print_ident(item_segment.ident);
if let Some(ref args) = item_segment.args {
if let Some(args) = &item_segment.args {
self.print_generic_args(args, colons_before_params)
}
}
@ -1397,23 +1401,23 @@ impl<'a> State<'a> {
self.ann.pre(self, AnnNode::Pat(pat));
/* Pat isn't normalized, but the beauty of it
is that it doesn't matter */
match pat.kind {
match &pat.kind {
PatKind::Wild => self.word("_"),
PatKind::Ident(BindingAnnotation(by_ref, mutbl), ident, ref sub) => {
if by_ref == ByRef::Yes {
PatKind::Ident(BindingAnnotation(by_ref, mutbl), ident, sub) => {
if *by_ref == ByRef::Yes {
self.word_nbsp("ref");
}
if mutbl == Mutability::Mut {
if mutbl.is_mut() {
self.word_nbsp("mut");
}
self.print_ident(ident);
if let Some(ref p) = *sub {
self.print_ident(*ident);
if let Some(p) = sub {
self.space();
self.word_space("@");
self.print_pat(p);
}
}
PatKind::TupleStruct(ref qself, ref path, ref elts) => {
PatKind::TupleStruct(qself, path, elts) => {
if let Some(qself) = qself {
self.print_qpath(path, qself, true);
} else {
@ -1423,16 +1427,16 @@ impl<'a> State<'a> {
self.commasep(Inconsistent, &elts, |s, p| s.print_pat(p));
self.pclose();
}
PatKind::Or(ref pats) => {
PatKind::Or(pats) => {
self.strsep("|", true, Inconsistent, &pats, |s, p| s.print_pat(p));
}
PatKind::Path(None, ref path) => {
PatKind::Path(None, path) => {
self.print_path(path, true, 0);
}
PatKind::Path(Some(ref qself), ref path) => {
PatKind::Path(Some(qself), path) => {
self.print_qpath(path, qself, false);
}
PatKind::Struct(ref qself, ref path, ref fields, etc) => {
PatKind::Struct(qself, path, fields, etc) => {
if let Some(qself) = qself {
self.print_qpath(path, qself, true);
} else {
@ -1458,7 +1462,7 @@ impl<'a> State<'a> {
},
|f| f.pat.span,
);
if etc {
if *etc {
if !fields.is_empty() {
self.word_space(",");
}
@ -1469,7 +1473,7 @@ impl<'a> State<'a> {
}
self.word("}");
}
PatKind::Tuple(ref elts) => {
PatKind::Tuple(elts) => {
self.popen();
self.commasep(Inconsistent, &elts, |s, p| s.print_pat(p));
if elts.len() == 1 {
@ -1477,13 +1481,13 @@ impl<'a> State<'a> {
}
self.pclose();
}
PatKind::Box(ref inner) => {
PatKind::Box(inner) => {
self.word("box ");
self.print_pat(inner);
}
PatKind::Ref(ref inner, mutbl) => {
PatKind::Ref(inner, mutbl) => {
self.word("&");
if mutbl == Mutability::Mut {
if mutbl.is_mut() {
self.word("mut ");
}
if let PatKind::Ident(ast::BindingAnnotation::MUT, ..) = inner.kind {
@ -1494,12 +1498,12 @@ impl<'a> State<'a> {
self.print_pat(inner);
}
}
PatKind::Lit(ref e) => self.print_expr(&**e),
PatKind::Range(ref begin, ref end, Spanned { node: ref end_kind, .. }) => {
PatKind::Lit(e) => self.print_expr(&**e),
PatKind::Range(begin, end, Spanned { node: end_kind, .. }) => {
if let Some(e) = begin {
self.print_expr(e);
}
match *end_kind {
match end_kind {
RangeEnd::Included(RangeSyntax::DotDotDot) => self.word("..."),
RangeEnd::Included(RangeSyntax::DotDotEq) => self.word("..="),
RangeEnd::Excluded => self.word(".."),
@ -1508,36 +1512,36 @@ impl<'a> State<'a> {
self.print_expr(e);
}
}
PatKind::Slice(ref elts) => {
PatKind::Slice(elts) => {
self.word("[");
self.commasep(Inconsistent, &elts, |s, p| s.print_pat(p));
self.word("]");
}
PatKind::Rest => self.word(".."),
PatKind::Paren(ref inner) => {
PatKind::Paren(inner) => {
self.popen();
self.print_pat(inner);
self.pclose();
}
PatKind::MacCall(ref m) => self.print_mac(m),
PatKind::MacCall(m) => self.print_mac(m),
}
self.ann.post(self, AnnNode::Pat(pat))
}
fn print_explicit_self(&mut self, explicit_self: &ast::ExplicitSelf) {
match explicit_self.node {
match &explicit_self.node {
SelfKind::Value(m) => {
self.print_mutability(m, false);
self.print_mutability(*m, false);
self.word("self")
}
SelfKind::Region(ref lt, m) => {
SelfKind::Region(lt, m) => {
self.word("&");
self.print_opt_lifetime(lt);
self.print_mutability(m, false);
self.print_mutability(*m, false);
self.word("self")
}
SelfKind::Explicit(ref typ, m) => {
self.print_mutability(m, false);
SelfKind::Explicit(typ, m) => {
self.print_mutability(*m, false);
self.word("self");
self.word_space(":");
self.print_type(typ)
@ -1599,7 +1603,7 @@ impl<'a> State<'a> {
self.commasep(Inconsistent, &generic_params, |s, param| {
s.print_outer_attributes_inline(&param.attrs);
match param.kind {
match &param.kind {
ast::GenericParamKind::Lifetime => {
let lt = ast::Lifetime { id: param.id, ident: param.ident };
s.print_lifetime(lt);
@ -1608,19 +1612,19 @@ impl<'a> State<'a> {
s.print_lifetime_bounds(&param.bounds)
}
}
ast::GenericParamKind::Type { ref default } => {
ast::GenericParamKind::Type { default } => {
s.print_ident(param.ident);
if !param.bounds.is_empty() {
s.word_nbsp(":");
s.print_type_bounds(&param.bounds);
}
if let Some(ref default) = default {
if let Some(default) = default {
s.space();
s.word_space("=");
s.print_type(default)
}
}
ast::GenericParamKind::Const { ref ty, kw_span: _, ref default } => {
ast::GenericParamKind::Const { ty, default, .. } => {
s.word_space("const");
s.print_ident(param.ident);
s.space();
@ -1630,7 +1634,7 @@ impl<'a> State<'a> {
s.word_nbsp(":");
s.print_type_bounds(&param.bounds);
}
if let Some(ref default) = default {
if let Some(default) = default {
s.space();
s.word_space("=");
s.print_expr(&default.value);
@ -1712,9 +1716,9 @@ impl<'a> State<'a> {
where_clause: ast::WhereClause {
has_where_token: false,
predicates: Vec::new(),
span: rustc_span::DUMMY_SP,
span: DUMMY_SP,
},
span: rustc_span::DUMMY_SP,
span: DUMMY_SP,
};
let header = ast::FnHeader { unsafety, ext, ..ast::FnHeader::default() };
self.print_fn(decl, header, name, &generics);

View File

@ -8,9 +8,9 @@ use rustc_ast::{self as ast, BlockCheckMode};
impl<'a> State<'a> {
fn print_else(&mut self, els: Option<&ast::Expr>) {
if let Some(_else) = els {
match _else.kind {
match &_else.kind {
// Another `else if` block.
ast::ExprKind::If(ref i, ref then, ref e) => {
ast::ExprKind::If(i, then, e) => {
self.cbox(INDENT_UNIT - 1);
self.ibox(0);
self.word(" else if ");
@ -20,7 +20,7 @@ impl<'a> State<'a> {
self.print_else(e.as_deref())
}
// Final `else` block.
ast::ExprKind::Block(ref b, _) => {
ast::ExprKind::Block(b, _) => {
self.cbox(INDENT_UNIT - 1);
self.ibox(0);
self.word(" else ");
@ -58,10 +58,10 @@ impl<'a> State<'a> {
self.print_expr_cond_paren(expr, Self::cond_needs_par(expr))
}
// Does `expr` need parentheses when printed in a condition position?
//
// These cases need parens due to the parse error observed in #26461: `if return {}`
// parses as the erroneous construct `if (return {})`, not `if (return) {}`.
/// Does `expr` need parentheses when printed in a condition position?
///
/// These cases need parens due to the parse error observed in #26461: `if return {}`
/// parses as the erroneous construct `if (return {})`, not `if (return) {}`.
pub(super) fn cond_needs_par(expr: &ast::Expr) -> bool {
match expr.kind {
ast::ExprKind::Break(..)
@ -202,7 +202,7 @@ impl<'a> State<'a> {
self.print_expr_maybe_paren(receiver, parser::PREC_POSTFIX);
self.word(".");
self.print_ident(segment.ident);
if let Some(ref args) = segment.args {
if let Some(args) = &segment.args {
self.print_generic_args(args, true);
}
self.print_call_post(base_args)
@ -284,73 +284,66 @@ impl<'a> State<'a> {
self.ibox(INDENT_UNIT);
self.ann.pre(self, AnnNode::Expr(expr));
match expr.kind {
ast::ExprKind::Box(ref expr) => {
match &expr.kind {
ast::ExprKind::Box(expr) => {
self.word_space("box");
self.print_expr_maybe_paren(expr, parser::PREC_PREFIX);
}
ast::ExprKind::Array(ref exprs) => {
ast::ExprKind::Array(exprs) => {
self.print_expr_vec(exprs);
}
ast::ExprKind::ConstBlock(ref anon_const) => {
ast::ExprKind::ConstBlock(anon_const) => {
self.print_expr_anon_const(anon_const, attrs);
}
ast::ExprKind::Repeat(ref element, ref count) => {
ast::ExprKind::Repeat(element, count) => {
self.print_expr_repeat(element, count);
}
ast::ExprKind::Struct(ref se) => {
ast::ExprKind::Struct(se) => {
self.print_expr_struct(&se.qself, &se.path, &se.fields, &se.rest);
}
ast::ExprKind::Tup(ref exprs) => {
ast::ExprKind::Tup(exprs) => {
self.print_expr_tup(exprs);
}
ast::ExprKind::Call(ref func, ref args) => {
ast::ExprKind::Call(func, args) => {
self.print_expr_call(func, &args);
}
ast::ExprKind::MethodCall(box ast::MethodCall {
ref seg,
ref receiver,
ref args,
..
}) => {
ast::ExprKind::MethodCall(box ast::MethodCall { seg, receiver, args, .. }) => {
self.print_expr_method_call(seg, &receiver, &args);
}
ast::ExprKind::Binary(op, ref lhs, ref rhs) => {
self.print_expr_binary(op, lhs, rhs);
ast::ExprKind::Binary(op, lhs, rhs) => {
self.print_expr_binary(*op, lhs, rhs);
}
ast::ExprKind::Unary(op, ref expr) => {
self.print_expr_unary(op, expr);
ast::ExprKind::Unary(op, expr) => {
self.print_expr_unary(*op, expr);
}
ast::ExprKind::AddrOf(k, m, ref expr) => {
self.print_expr_addr_of(k, m, expr);
ast::ExprKind::AddrOf(k, m, expr) => {
self.print_expr_addr_of(*k, *m, expr);
}
ast::ExprKind::Lit(token_lit) => {
self.print_token_literal(token_lit, expr.span);
self.print_token_literal(*token_lit, expr.span);
}
ast::ExprKind::IncludedBytes(ref bytes) => {
let lit = ast::Lit::from_included_bytes(bytes, expr.span);
self.print_literal(&lit)
ast::ExprKind::IncludedBytes(bytes) => {
let lit = ast::LitKind::ByteStr(bytes.clone()).to_token_lit();
self.print_token_literal(lit, expr.span)
}
ast::ExprKind::Cast(ref expr, ref ty) => {
ast::ExprKind::Cast(expr, ty) => {
let prec = AssocOp::As.precedence() as i8;
self.print_expr_maybe_paren(expr, prec);
self.space();
self.word_space("as");
self.print_type(ty);
}
ast::ExprKind::Type(ref expr, ref ty) => {
ast::ExprKind::Type(expr, ty) => {
let prec = AssocOp::Colon.precedence() as i8;
self.print_expr_maybe_paren(expr, prec);
self.word_space(":");
self.print_type(ty);
}
ast::ExprKind::Let(ref pat, ref scrutinee, _) => {
ast::ExprKind::Let(pat, scrutinee, _) => {
self.print_let(pat, scrutinee);
}
ast::ExprKind::If(ref test, ref blk, ref elseopt) => {
self.print_if(test, blk, elseopt.as_deref())
}
ast::ExprKind::While(ref test, ref blk, opt_label) => {
ast::ExprKind::If(test, blk, elseopt) => self.print_if(test, blk, elseopt.as_deref()),
ast::ExprKind::While(test, blk, opt_label) => {
if let Some(label) = opt_label {
self.print_ident(label.ident);
self.word_space(":");
@ -362,7 +355,7 @@ impl<'a> State<'a> {
self.space();
self.print_block_with_attrs(blk, attrs);
}
ast::ExprKind::ForLoop(ref pat, ref iter, ref blk, opt_label) => {
ast::ExprKind::ForLoop(pat, iter, blk, opt_label) => {
if let Some(label) = opt_label {
self.print_ident(label.ident);
self.word_space(":");
@ -377,7 +370,7 @@ impl<'a> State<'a> {
self.space();
self.print_block_with_attrs(blk, attrs);
}
ast::ExprKind::Loop(ref blk, opt_label, _) => {
ast::ExprKind::Loop(blk, opt_label, _) => {
if let Some(label) = opt_label {
self.print_ident(label.ident);
self.word_space(":");
@ -387,7 +380,7 @@ impl<'a> State<'a> {
self.word_nbsp("loop");
self.print_block_with_attrs(blk, attrs);
}
ast::ExprKind::Match(ref expr, ref arms) => {
ast::ExprKind::Match(expr, arms) => {
self.cbox(0);
self.ibox(0);
self.word_nbsp("match");
@ -402,18 +395,18 @@ impl<'a> State<'a> {
self.bclose(expr.span, empty);
}
ast::ExprKind::Closure(box ast::Closure {
ref binder,
binder,
capture_clause,
asyncness,
movability,
ref fn_decl,
ref body,
fn_decl,
body,
fn_decl_span: _,
}) => {
self.print_closure_binder(binder);
self.print_movability(movability);
self.print_asyncness(asyncness);
self.print_capture_clause(capture_clause);
self.print_movability(*movability);
self.print_asyncness(*asyncness);
self.print_capture_clause(*capture_clause);
self.print_fn_params_and_ret(fn_decl, true);
self.space();
@ -425,7 +418,7 @@ impl<'a> State<'a> {
// empty box to satisfy the close.
self.ibox(0);
}
ast::ExprKind::Block(ref blk, opt_label) => {
ast::ExprKind::Block(blk, opt_label) => {
if let Some(label) = opt_label {
self.print_ident(label.ident);
self.word_space(":");
@ -436,26 +429,26 @@ impl<'a> State<'a> {
self.ibox(0);
self.print_block_with_attrs(blk, attrs);
}
ast::ExprKind::Async(capture_clause, _, ref blk) => {
ast::ExprKind::Async(capture_clause, _, blk) => {
self.word_nbsp("async");
self.print_capture_clause(capture_clause);
self.print_capture_clause(*capture_clause);
// cbox/ibox in analogy to the `ExprKind::Block` arm above
self.cbox(0);
self.ibox(0);
self.print_block_with_attrs(blk, attrs);
}
ast::ExprKind::Await(ref expr) => {
ast::ExprKind::Await(expr) => {
self.print_expr_maybe_paren(expr, parser::PREC_POSTFIX);
self.word(".await");
}
ast::ExprKind::Assign(ref lhs, ref rhs, _) => {
ast::ExprKind::Assign(lhs, rhs, _) => {
let prec = AssocOp::Assign.precedence() as i8;
self.print_expr_maybe_paren(lhs, prec + 1);
self.space();
self.word_space("=");
self.print_expr_maybe_paren(rhs, prec);
}
ast::ExprKind::AssignOp(op, ref lhs, ref rhs) => {
ast::ExprKind::AssignOp(op, lhs, rhs) => {
let prec = AssocOp::Assign.precedence() as i8;
self.print_expr_maybe_paren(lhs, prec + 1);
self.space();
@ -463,45 +456,44 @@ impl<'a> State<'a> {
self.word_space("=");
self.print_expr_maybe_paren(rhs, prec);
}
ast::ExprKind::Field(ref expr, ident) => {
ast::ExprKind::Field(expr, ident) => {
self.print_expr_maybe_paren(expr, parser::PREC_POSTFIX);
self.word(".");
self.print_ident(ident);
self.print_ident(*ident);
}
ast::ExprKind::Index(ref expr, ref index) => {
ast::ExprKind::Index(expr, index) => {
self.print_expr_maybe_paren(expr, parser::PREC_POSTFIX);
self.word("[");
self.print_expr(index);
self.word("]");
}
ast::ExprKind::Range(ref start, ref end, limits) => {
ast::ExprKind::Range(start, end, limits) => {
// Special case for `Range`. `AssocOp` claims that `Range` has higher precedence
// than `Assign`, but `x .. x = x` gives a parse error instead of `x .. (x = x)`.
// Here we use a fake precedence value so that any child with lower precedence than
// a "normal" binop gets parenthesized. (`LOr` is the lowest-precedence binop.)
let fake_prec = AssocOp::LOr.precedence() as i8;
if let Some(ref e) = *start {
if let Some(e) = start {
self.print_expr_maybe_paren(e, fake_prec);
}
if limits == ast::RangeLimits::HalfOpen {
self.word("..");
} else {
self.word("..=");
match limits {
ast::RangeLimits::HalfOpen => self.word(".."),
ast::RangeLimits::Closed => self.word("..="),
}
if let Some(ref e) = *end {
if let Some(e) = end {
self.print_expr_maybe_paren(e, fake_prec);
}
}
ast::ExprKind::Underscore => self.word("_"),
ast::ExprKind::Path(None, ref path) => self.print_path(path, true, 0),
ast::ExprKind::Path(Some(ref qself), ref path) => self.print_qpath(path, qself, true),
ast::ExprKind::Break(opt_label, ref opt_expr) => {
ast::ExprKind::Path(None, path) => self.print_path(path, true, 0),
ast::ExprKind::Path(Some(qself), path) => self.print_qpath(path, qself, true),
ast::ExprKind::Break(opt_label, opt_expr) => {
self.word("break");
if let Some(label) = opt_label {
self.space();
self.print_ident(label.ident);
}
if let Some(ref expr) = *opt_expr {
if let Some(expr) = opt_expr {
self.space();
self.print_expr_maybe_paren(expr, parser::PREC_JUMP);
}
@ -513,45 +505,45 @@ impl<'a> State<'a> {
self.print_ident(label.ident);
}
}
ast::ExprKind::Ret(ref result) => {
ast::ExprKind::Ret(result) => {
self.word("return");
if let Some(ref expr) = *result {
if let Some(expr) = result {
self.word(" ");
self.print_expr_maybe_paren(expr, parser::PREC_JUMP);
}
}
ast::ExprKind::Yeet(ref result) => {
ast::ExprKind::Yeet(result) => {
self.word("do");
self.word(" ");
self.word("yeet");
if let Some(ref expr) = *result {
if let Some(expr) = result {
self.word(" ");
self.print_expr_maybe_paren(expr, parser::PREC_JUMP);
}
}
ast::ExprKind::InlineAsm(ref a) => {
ast::ExprKind::InlineAsm(a) => {
self.word("asm!");
self.print_inline_asm(a);
}
ast::ExprKind::MacCall(ref m) => self.print_mac(m),
ast::ExprKind::Paren(ref e) => {
ast::ExprKind::MacCall(m) => self.print_mac(m),
ast::ExprKind::Paren(e) => {
self.popen();
self.print_expr(e);
self.pclose();
}
ast::ExprKind::Yield(ref e) => {
ast::ExprKind::Yield(e) => {
self.word("yield");
if let Some(ref expr) = *e {
if let Some(expr) = e {
self.space();
self.print_expr_maybe_paren(expr, parser::PREC_JUMP);
}
}
ast::ExprKind::Try(ref e) => {
ast::ExprKind::Try(e) => {
self.print_expr_maybe_paren(e, parser::PREC_POSTFIX);
self.word("?")
}
ast::ExprKind::TryBlock(ref blk) => {
ast::ExprKind::TryBlock(blk) => {
self.cbox(0);
self.ibox(0);
self.word_nbsp("try");
@ -578,15 +570,15 @@ impl<'a> State<'a> {
self.print_outer_attributes(&arm.attrs);
self.print_pat(&arm.pat);
self.space();
if let Some(ref e) = arm.guard {
if let Some(e) = &arm.guard {
self.word_space("if");
self.print_expr(e);
self.space();
}
self.word_space("=>");
match arm.body.kind {
ast::ExprKind::Block(ref blk, opt_label) => {
match &arm.body.kind {
ast::ExprKind::Block(blk, opt_label) => {
if let Some(label) = opt_label {
self.print_ident(label.ident);
self.word_space(":");

View File

@ -136,10 +136,10 @@ impl<'a> State<'a> {
self.maybe_print_comment(item.span.lo());
self.print_outer_attributes(&item.attrs);
self.ann.pre(self, AnnNode::Item(item));
match item.kind {
match &item.kind {
ast::ItemKind::ExternCrate(orig_name) => {
self.head(visibility_qualified(&item.vis, "extern crate"));
if let Some(orig_name) = orig_name {
if let &Some(orig_name) = orig_name {
self.print_name(orig_name);
self.space();
self.word("as");
@ -150,35 +150,41 @@ impl<'a> State<'a> {
self.end(); // end inner head-block
self.end(); // end outer head-block
}
ast::ItemKind::Use(ref tree) => {
ast::ItemKind::Use(tree) => {
self.print_visibility(&item.vis);
self.word_nbsp("use");
self.print_use_tree(tree);
self.word(";");
}
ast::ItemKind::Static(ref ty, mutbl, ref body) => {
ast::ItemKind::Static(ty, mutbl, body) => {
let def = ast::Defaultness::Final;
self.print_item_const(item.ident, Some(mutbl), ty, body.as_deref(), &item.vis, def);
self.print_item_const(
item.ident,
Some(*mutbl),
ty,
body.as_deref(),
&item.vis,
def,
);
}
ast::ItemKind::Const(def, ref ty, ref body) => {
self.print_item_const(item.ident, None, ty, body.as_deref(), &item.vis, def);
ast::ItemKind::Const(def, ty, body) => {
self.print_item_const(item.ident, None, ty, body.as_deref(), &item.vis, *def);
}
ast::ItemKind::Fn(box ast::Fn { defaultness, ref sig, ref generics, ref body }) => {
let body = body.as_deref();
ast::ItemKind::Fn(box ast::Fn { defaultness, sig, generics, body }) => {
self.print_fn_full(
sig,
item.ident,
generics,
&item.vis,
defaultness,
body,
*defaultness,
body.as_deref(),
&item.attrs,
);
}
ast::ItemKind::Mod(unsafety, ref mod_kind) => {
ast::ItemKind::Mod(unsafety, mod_kind) => {
self.head(Self::to_string(|s| {
s.print_visibility(&item.vis);
s.print_unsafety(unsafety);
s.print_unsafety(*unsafety);
s.word("mod");
}));
self.print_ident(item.ident);
@ -201,7 +207,7 @@ impl<'a> State<'a> {
}
}
}
ast::ItemKind::ForeignMod(ref nmod) => {
ast::ItemKind::ForeignMod(nmod) => {
self.head(Self::to_string(|s| {
s.print_unsafety(nmod.unsafety);
s.word("extern");
@ -215,7 +221,7 @@ impl<'a> State<'a> {
let empty = item.attrs.is_empty() && nmod.items.is_empty();
self.bclose(item.span, empty);
}
ast::ItemKind::GlobalAsm(ref asm) => {
ast::ItemKind::GlobalAsm(asm) => {
self.head(visibility_qualified(&item.vis, "global_asm!"));
self.print_inline_asm(asm);
self.word(";");
@ -224,32 +230,31 @@ impl<'a> State<'a> {
}
ast::ItemKind::TyAlias(box ast::TyAlias {
defaultness,
ref generics,
generics,
where_clauses,
where_predicates_split,
ref bounds,
ref ty,
bounds,
ty,
}) => {
let ty = ty.as_deref();
self.print_associated_type(
item.ident,
generics,
where_clauses,
where_predicates_split,
*where_clauses,
*where_predicates_split,
bounds,
ty,
ty.as_deref(),
&item.vis,
defaultness,
*defaultness,
);
}
ast::ItemKind::Enum(ref enum_definition, ref params) => {
ast::ItemKind::Enum(enum_definition, params) => {
self.print_enum_def(enum_definition, params, item.ident, item.span, &item.vis);
}
ast::ItemKind::Struct(ref struct_def, ref generics) => {
ast::ItemKind::Struct(struct_def, generics) => {
self.head(visibility_qualified(&item.vis, "struct"));
self.print_struct(struct_def, generics, item.ident, item.span, true);
}
ast::ItemKind::Union(ref struct_def, ref generics) => {
ast::ItemKind::Union(struct_def, generics) => {
self.head(visibility_qualified(&item.vis, "union"));
self.print_struct(struct_def, generics, item.ident, item.span, true);
}
@ -258,15 +263,15 @@ impl<'a> State<'a> {
polarity,
defaultness,
constness,
ref generics,
ref of_trait,
ref self_ty,
ref items,
generics,
of_trait,
self_ty,
items,
}) => {
self.head("");
self.print_visibility(&item.vis);
self.print_defaultness(defaultness);
self.print_unsafety(unsafety);
self.print_defaultness(*defaultness);
self.print_unsafety(*unsafety);
self.word("impl");
if generics.params.is_empty() {
@ -276,13 +281,13 @@ impl<'a> State<'a> {
self.space();
}
self.print_constness(constness);
self.print_constness(*constness);
if let ast::ImplPolarity::Negative(_) = polarity {
self.word("!");
}
if let Some(ref t) = *of_trait {
if let Some(t) = of_trait {
self.print_trait_ref(t);
self.space();
self.word_space("for");
@ -303,21 +308,21 @@ impl<'a> State<'a> {
ast::ItemKind::Trait(box ast::Trait {
is_auto,
unsafety,
ref generics,
ref bounds,
ref items,
generics,
bounds,
items,
..
}) => {
self.head("");
self.print_visibility(&item.vis);
self.print_unsafety(unsafety);
self.print_is_auto(is_auto);
self.print_unsafety(*unsafety);
self.print_is_auto(*is_auto);
self.word_nbsp("trait");
self.print_ident(item.ident);
self.print_generic_params(&generics.params);
let mut real_bounds = Vec::with_capacity(bounds.len());
for b in bounds.iter() {
if let GenericBound::Trait(ref ptr, ast::TraitBoundModifier::Maybe) = *b {
if let GenericBound::Trait(ptr, ast::TraitBoundModifier::Maybe) = b {
self.space();
self.word_space("for ?");
self.print_trait_ref(&ptr.trait_ref);
@ -339,14 +344,14 @@ impl<'a> State<'a> {
let empty = item.attrs.is_empty() && items.is_empty();
self.bclose(item.span, empty);
}
ast::ItemKind::TraitAlias(ref generics, ref bounds) => {
ast::ItemKind::TraitAlias(generics, bounds) => {
self.head(visibility_qualified(&item.vis, "trait"));
self.print_ident(item.ident);
self.print_generic_params(&generics.params);
let mut real_bounds = Vec::with_capacity(bounds.len());
// FIXME(durka) this seems to be some quite outdated syntax
for b in bounds.iter() {
if let GenericBound::Trait(ref ptr, ast::TraitBoundModifier::Maybe) = *b {
if let GenericBound::Trait(ptr, ast::TraitBoundModifier::Maybe) = b {
self.space();
self.word_space("for ?");
self.print_trait_ref(&ptr.trait_ref);
@ -364,13 +369,13 @@ impl<'a> State<'a> {
self.end(); // end inner head-block
self.end(); // end outer head-block
}
ast::ItemKind::MacCall(ref mac) => {
ast::ItemKind::MacCall(mac) => {
self.print_mac(mac);
if mac.args.need_semicolon() {
self.word(";");
}
}
ast::ItemKind::MacroDef(ref macro_def) => {
ast::ItemKind::MacroDef(macro_def) => {
self.print_mac_def(macro_def, &item.ident, item.span, |state| {
state.print_visibility(&item.vis)
});
@ -412,11 +417,11 @@ impl<'a> State<'a> {
}
pub(crate) fn print_visibility(&mut self, vis: &ast::Visibility) {
match vis.kind {
match &vis.kind {
ast::VisibilityKind::Public => self.word_nbsp("pub"),
ast::VisibilityKind::Restricted { ref path, id: _, shorthand } => {
ast::VisibilityKind::Restricted { path, shorthand, .. } => {
let path = Self::to_string(|s| s.print_path(path, false, 0));
if shorthand && (path == "crate" || path == "self" || path == "super") {
if *shorthand && (path == "crate" || path == "self" || path == "super") {
self.word_nbsp(format!("pub({})", path))
} else {
self.word_nbsp(format!("pub(in {})", path))
@ -465,7 +470,7 @@ impl<'a> State<'a> {
) {
self.print_ident(ident);
self.print_generic_params(&generics.params);
match struct_def {
match &struct_def {
ast::VariantData::Tuple(..) | ast::VariantData::Unit(..) => {
if let ast::VariantData::Tuple(..) = struct_def {
self.popen();
@ -484,7 +489,7 @@ impl<'a> State<'a> {
self.end();
self.end(); // Close the outer-box.
}
ast::VariantData::Struct(ref fields, ..) => {
ast::VariantData::Struct(fields, ..) => {
self.print_where_clause(&generics.where_clause);
self.print_record_struct_body(fields, span);
}
@ -496,7 +501,7 @@ impl<'a> State<'a> {
self.print_visibility(&v.vis);
let generics = ast::Generics::default();
self.print_struct(&v.data, &generics, v.ident, v.span, false);
if let Some(ref d) = v.disr_expr {
if let Some(d) = &v.disr_expr {
self.space();
self.word_space("=");
self.print_expr(&d.value)
@ -657,10 +662,10 @@ impl<'a> State<'a> {
}
fn print_use_tree(&mut self, tree: &ast::UseTree) {
match tree.kind {
ast::UseTreeKind::Simple(rename, ..) => {
match &tree.kind {
ast::UseTreeKind::Simple(rename) => {
self.print_path(&tree.prefix, false, 0);
if let Some(rename) = rename {
if let &Some(rename) = rename {
self.nbsp();
self.word_nbsp("as");
self.print_ident(rename);
@ -673,7 +678,7 @@ impl<'a> State<'a> {
}
self.word("*");
}
ast::UseTreeKind::Nested(ref items) => {
ast::UseTreeKind::Nested(items) => {
if !tree.prefix.segments.is_empty() {
self.print_path(&tree.prefix, false, 0);
self.word("::");

View File

@ -1,7 +1,7 @@
//! Parsing and validation of builtin attributes
use rustc_ast as ast;
use rustc_ast::{Attribute, Lit, LitKind, MetaItem, MetaItemKind, NestedMetaItem, NodeId};
use rustc_ast::{Attribute, LitKind, MetaItem, MetaItemKind, MetaItemLit, NestedMetaItem, NodeId};
use rustc_ast_pretty::pprust;
use rustc_feature::{find_gated_cfg, is_builtin_attr_name, Features, GatedCfg};
use rustc_macros::HashStable_Generic;
@ -486,7 +486,7 @@ where
continue 'outer;
}
},
NestedMetaItem::Literal(lit) => {
NestedMetaItem::Lit(lit) => {
handle_errors(
&sess.parse_sess,
lit.span,
@ -658,11 +658,11 @@ pub fn eval_condition(
ast::MetaItemKind::List(ref mis) if cfg.name_or_empty() == sym::version => {
try_gate_cfg(sym::version, cfg.span, sess, features);
let (min_version, span) = match &mis[..] {
[NestedMetaItem::Literal(Lit { kind: LitKind::Str(sym, ..), span, .. })] => {
[NestedMetaItem::Lit(MetaItemLit { kind: LitKind::Str(sym, ..), span, .. })] => {
(sym, span)
}
[
NestedMetaItem::Literal(Lit { span, .. })
NestedMetaItem::Lit(MetaItemLit { span, .. })
| NestedMetaItem::MetaItem(MetaItem { span, .. }),
] => {
sess.emit_err(session_diagnostics::ExpectedVersionLiteral { span: *span });
@ -899,7 +899,7 @@ where
continue 'outer;
}
},
NestedMetaItem::Literal(lit) => {
NestedMetaItem::Lit(lit) => {
handle_errors(
&sess.parse_sess,
lit.span,

View File

@ -41,7 +41,7 @@ pub(crate) struct IncorrectMetaItem {
pub span: Span,
}
// Error code: E0541
/// Error code: E0541
pub(crate) struct UnknownMetaItem<'a> {
pub span: Span,
pub item: String,
@ -200,7 +200,7 @@ pub(crate) struct InvalidReprHintNoValue {
pub name: String,
}
// Error code: E0565
/// Error code: E0565
pub(crate) struct UnsupportedLiteral {
pub span: Span,
pub reason: UnsupportedLiteralReason,

View File

@ -291,11 +291,11 @@ where
// FIXME(lqd): Unify and de-duplicate the following with the actual
// `rustc_traits::type_op::type_op_normalize` query to allow the span we need in the
// `ObligationCause`. The normalization results are currently different between
// `AtExt::normalize` used in the query and `normalize` called below: the former fails
// to normalize the `nll/relate_tys/impl-fn-ignore-binder-via-bottom.rs` test. Check
// after #85499 lands to see if its fixes have erased this difference.
// `QueryNormalizeExt::query_normalize` used in the query and `normalize` called below:
// the former fails to normalize the `nll/relate_tys/impl-fn-ignore-binder-via-bottom.rs` test.
// Check after #85499 lands to see if its fixes have erased this difference.
let (param_env, value) = key.into_parts();
let _ = ocx.normalize(cause, param_env, value.value);
let _ = ocx.normalize(&cause, param_env, value.value);
try_extract_error_from_fulfill_cx(&ocx, placeholder_region, error_region)
}

View File

@ -732,13 +732,15 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
let tcx = self.infcx.tcx;
// Try to find predicates on *generic params* that would allow copying `ty`
let infcx = tcx.infer_ctxt().build();
if infcx
.type_implements_trait(
tcx.lang_items().clone_trait().unwrap(),
[tcx.erase_regions(ty)],
self.param_env,
)
.must_apply_modulo_regions()
if let Some(clone_trait_def) = tcx.lang_items().clone_trait()
&& infcx
.type_implements_trait(
clone_trait_def,
[tcx.erase_regions(ty)],
self.param_env,
)
.must_apply_modulo_regions()
{
err.span_suggestion_verbose(
span.shrink_to_hi(),

View File

@ -590,7 +590,7 @@ impl UseSpans<'_> {
}
}
// Add a span label to the arguments of the closure, if it exists.
/// Add a span label to the arguments of the closure, if it exists.
pub(super) fn args_span_label(self, err: &mut Diagnostic, message: impl Into<String>) {
if let UseSpans::ClosureUse { args_span, .. } = self {
err.span_label(args_span, message);
@ -628,7 +628,7 @@ impl UseSpans<'_> {
}
}
// Add a span label to the use of the captured variable, if it exists.
/// Add a span label to the use of the captured variable, if it exists.
pub(super) fn var_span_label(
self,
err: &mut Diagnostic,

View File

@ -514,12 +514,7 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
span: *span,
ty_err: match output_ty.kind() {
ty::Closure(_, _) => FnMutReturnTypeErr::ReturnClosure { span: *span },
ty::Generator(def, ..)
if matches!(
self.infcx.tcx.generator_kind(def),
Some(hir::GeneratorKind::Async(_))
) =>
{
ty::Generator(def, ..) if self.infcx.tcx.generator_is_async(*def) => {
FnMutReturnTypeErr::ReturnAsyncBlock { span: *span }
}
_ => FnMutReturnTypeErr::ReturnRef { span: *span },

View File

@ -83,7 +83,7 @@ mod type_check;
mod universal_regions;
mod used_muts;
// A public API provided for the Rust compiler consumers.
/// A public API provided for the Rust compiler consumers.
pub mod consumers;
use borrow_set::{BorrowData, BorrowSet};

View File

@ -1705,6 +1705,7 @@ impl<'tcx> RegionInferenceContext<'tcx> {
});
}
#[instrument(level = "debug", skip(self, infcx, errors_buffer))]
fn check_member_constraints(
&self,
infcx: &InferCtxt<'tcx>,
@ -1712,22 +1713,21 @@ impl<'tcx> RegionInferenceContext<'tcx> {
) {
let member_constraints = self.member_constraints.clone();
for m_c_i in member_constraints.all_indices() {
debug!("check_member_constraint(m_c_i={:?})", m_c_i);
debug!(?m_c_i);
let m_c = &member_constraints[m_c_i];
let member_region_vid = m_c.member_region_vid;
debug!(
"check_member_constraint: member_region_vid={:?} with value {}",
member_region_vid,
self.region_value_str(member_region_vid),
?member_region_vid,
value = ?self.region_value_str(member_region_vid),
);
let choice_regions = member_constraints.choice_regions(m_c_i);
debug!("check_member_constraint: choice_regions={:?}", choice_regions);
debug!(?choice_regions);
// Did the member region wind up equal to any of the option regions?
if let Some(o) =
choice_regions.iter().find(|&&o_r| self.eval_equal(o_r, m_c.member_region_vid))
{
debug!("check_member_constraint: evaluated as equal to {:?}", o);
debug!("evaluated as equal to {:?}", o);
continue;
}

View File

@ -121,8 +121,8 @@ pub(super) fn populate_access_facts<'a, 'tcx>(
}
}
// For every potentially drop()-touched region `region` in `local`'s type
// (`kind`), emit a Polonius `use_of_var_derefs_origin(local, origin)` fact.
/// For every potentially drop()-touched region `region` in `local`'s type
/// (`kind`), emit a Polonius `use_of_var_derefs_origin(local, origin)` fact.
pub(super) fn add_drop_of_var_derefs_origin<'tcx>(
typeck: &mut TypeChecker<'_, 'tcx>,
local: Local,

View File

@ -95,9 +95,7 @@ fn generate_handler(cx: &ExtCtxt<'_>, handler: Ident, span: Span, sig_span: Span
body,
}));
let special = sym::rustc_std_internal_symbol;
let special = cx.meta_word(span, special);
let attrs = thin_vec![cx.attribute(special)];
let attrs = thin_vec![cx.attr_word(sym::rustc_std_internal_symbol, span)];
let item = cx.item(span, Ident::from_str_and_span("__rg_oom", span), attrs, kind);
cx.stmt_item(sig_span, item)

View File

@ -1,5 +1,4 @@
use rustc_ast::{
attr,
ptr::P,
token,
tokenstream::{DelimSpan, TokenStream, TokenTree},
@ -107,7 +106,7 @@ impl<'cx, 'a> Context<'cx, 'a> {
(
UseTree {
prefix: this.cx.path(this.span, vec![Ident::with_dummy_span(sym)]),
kind: UseTreeKind::Simple(None, DUMMY_NODE_ID, DUMMY_NODE_ID),
kind: UseTreeKind::Simple(None),
span: this.span,
},
DUMMY_NODE_ID,
@ -118,10 +117,7 @@ impl<'cx, 'a> Context<'cx, 'a> {
self.cx.item(
self.span,
Ident::empty(),
thin_vec![self.cx.attribute(attr::mk_list_item(
Ident::new(sym::allow, self.span),
vec![attr::mk_nested_word_item(Ident::new(sym::unused_imports, self.span))],
))],
thin_vec![self.cx.attr_nested_word(sym::allow, sym::unused_imports, self.span)],
ItemKind::Use(UseTree {
prefix: self.cx.path(self.span, self.cx.std_path(&[sym::asserting])),
kind: UseTreeKind::Nested(vec![

View File

@ -37,10 +37,10 @@ impl MultiItemModifier for Expander {
_is_derive_const: bool,
) -> ExpandResult<Vec<Annotatable>, Annotatable> {
let template = AttributeTemplate { list: Some("path"), ..Default::default() };
let attr = &ecx.attribute(meta_item.clone());
validate_attr::check_builtin_attribute(
validate_attr::check_builtin_meta_item(
&ecx.sess.parse_sess,
attr,
&meta_item,
ast::AttrStyle::Outer,
sym::cfg_accessible,
template,
);

View File

@ -2,6 +2,7 @@ use rustc_ast as ast;
use rustc_ast::{ptr::P, tokenstream::TokenStream};
use rustc_errors::Applicability;
use rustc_expand::base::{self, DummyResult};
use rustc_session::errors::report_lit_error;
use rustc_span::Span;
/// Emits errors for literal expressions that are invalid inside and outside of an array.
@ -68,7 +69,10 @@ fn invalid_type_err(
Ok(ast::LitKind::Int(_, _)) => {
cx.span_err(span, "numeric literal is not a `u8`");
}
_ => unreachable!(),
Ok(ast::LitKind::ByteStr(_) | ast::LitKind::Byte(_)) => unreachable!(),
Err(err) => {
report_lit_error(&cx.sess.parse_sess, err, token_lit, span);
}
}
}

View File

@ -1,7 +1,7 @@
use crate::cfg_eval::cfg_eval;
use rustc_ast as ast;
use rustc_ast::{attr, token, GenericParamKind, ItemKind, MetaItemKind, NestedMetaItem, StmtKind};
use rustc_ast::{token, GenericParamKind, ItemKind, MetaItemKind, NestedMetaItem, StmtKind};
use rustc_errors::{struct_span_err, Applicability};
use rustc_expand::base::{Annotatable, ExpandResult, ExtCtxt, Indeterminate, MultiItemModifier};
use rustc_feature::AttributeTemplate;
@ -33,34 +33,36 @@ impl MultiItemModifier for Expander {
ecx.resolver.resolve_derives(ecx.current_expansion.id, ecx.force_mode, &|| {
let template =
AttributeTemplate { list: Some("Trait1, Trait2, ..."), ..Default::default() };
let attr =
attr::mk_attr_outer(&sess.parse_sess.attr_id_generator, meta_item.clone());
validate_attr::check_builtin_attribute(
validate_attr::check_builtin_meta_item(
&sess.parse_sess,
&attr,
&meta_item,
ast::AttrStyle::Outer,
sym::derive,
template,
);
let mut resolutions: Vec<_> = attr
.meta_item_list()
.unwrap_or_default()
.into_iter()
.filter_map(|nested_meta| match nested_meta {
NestedMetaItem::MetaItem(meta) => Some(meta),
NestedMetaItem::Literal(lit) => {
// Reject `#[derive("Debug")]`.
report_unexpected_literal(sess, &lit);
None
}
})
.map(|meta| {
// Reject `#[derive(Debug = "value", Debug(abc))]`, but recover the paths.
report_path_args(sess, &meta);
meta.path
})
.map(|path| (path, dummy_annotatable(), None, self.0))
.collect();
let mut resolutions = match &meta_item.kind {
MetaItemKind::List(list) => {
list.iter()
.filter_map(|nested_meta| match nested_meta {
NestedMetaItem::MetaItem(meta) => Some(meta),
NestedMetaItem::Lit(lit) => {
// Reject `#[derive("Debug")]`.
report_unexpected_meta_item_lit(sess, &lit);
None
}
})
.map(|meta| {
// Reject `#[derive(Debug = "value", Debug(abc))]`, but recover the
// paths.
report_path_args(sess, &meta);
meta.path.clone()
})
.map(|path| (path, dummy_annotatable(), None, self.0))
.collect()
}
_ => vec![],
};
// Do not configure or clone items unless necessary.
match &mut resolutions[..] {
@ -127,7 +129,7 @@ fn report_bad_target(sess: &Session, item: &Annotatable, span: Span) -> bool {
bad_target
}
fn report_unexpected_literal(sess: &Session, lit: &ast::Lit) {
fn report_unexpected_meta_item_lit(sess: &Session, lit: &ast::MetaItemLit) {
let help_msg = match lit.token_lit.kind {
token::Str if rustc_lexer::is_ident(lit.token_lit.symbol.as_str()) => {
format!("try using `#[derive({})]`", lit.token_lit.symbol)

View File

@ -68,8 +68,7 @@ pub fn expand_deriving_clone(
_ => cx.span_bug(span, "`#[derive(Clone)]` on trait item or impl item"),
}
let inline = cx.meta_word(span, sym::inline);
let attrs = thin_vec![cx.attribute(inline)];
let attrs = thin_vec![cx.attr_word(sym::inline, span)];
let trait_def = TraitDef {
span,
path: path_std!(clone::Clone),

View File

@ -5,7 +5,7 @@ use crate::deriving::path_std;
use rustc_ast::{self as ast, MetaItem};
use rustc_data_structures::fx::FxHashSet;
use rustc_expand::base::{Annotatable, ExtCtxt};
use rustc_span::symbol::{sym, Ident};
use rustc_span::symbol::sym;
use rustc_span::Span;
use thin_vec::thin_vec;
@ -18,11 +18,11 @@ pub fn expand_deriving_eq(
is_const: bool,
) {
let span = cx.with_def_site_ctxt(span);
let inline = cx.meta_word(span, sym::inline);
let hidden = rustc_ast::attr::mk_nested_word_item(Ident::new(sym::hidden, span));
let doc = rustc_ast::attr::mk_list_item(Ident::new(sym::doc, span), vec![hidden]);
let no_coverage = cx.meta_word(span, sym::no_coverage);
let attrs = thin_vec![cx.attribute(inline), cx.attribute(doc), cx.attribute(no_coverage)];
let attrs = thin_vec![
cx.attr_word(sym::inline, span),
cx.attr_nested_word(sym::doc, sym::hidden, span),
cx.attr_word(sym::no_coverage, span)
];
let trait_def = TraitDef {
span,
path: path_std!(cmp::Eq),

View File

@ -15,8 +15,7 @@ pub fn expand_deriving_ord(
push: &mut dyn FnMut(Annotatable),
is_const: bool,
) {
let inline = cx.meta_word(span, sym::inline);
let attrs = thin_vec![cx.attribute(inline)];
let attrs = thin_vec![cx.attr_word(sym::inline, span)];
let trait_def = TraitDef {
span,
path: path_std!(cmp::Ord),

View File

@ -68,8 +68,7 @@ pub fn expand_deriving_partial_eq(
// No need to generate `ne`, the default suffices, and not generating it is
// faster.
let inline = cx.meta_word(span, sym::inline);
let attrs = thin_vec![cx.attribute(inline)];
let attrs = thin_vec![cx.attr_word(sym::inline, span)];
let methods = vec![MethodDef {
name: sym::eq,
generics: Bounds::empty(),

View File

@ -19,8 +19,7 @@ pub fn expand_deriving_partial_ord(
let ret_ty =
Path(Path::new_(pathvec_std!(option::Option), vec![Box::new(ordering_ty)], PathKind::Std));
let inline = cx.meta_word(span, sym::inline);
let attrs = thin_vec![cx.attribute(inline)];
let attrs = thin_vec![cx.attr_word(sym::inline, span)];
let partial_cmp_def = MethodDef {
name: sym::partial_cmp,

View File

@ -20,8 +20,7 @@ pub fn expand_deriving_default(
) {
item.visit_with(&mut DetectNonVariantDefaultAttr { cx });
let inline = cx.meta_word(span, sym::inline);
let attrs = thin_vec![cx.attribute(inline)];
let attrs = thin_vec![cx.attr_word(sym::inline, span)];
let trait_def = TraitDef {
span,
path: Path::new(vec![kw::Default, sym::Default]),
@ -146,7 +145,7 @@ fn extract_default_variant<'a>(
let suggestion = default_variants
.iter()
.filter_map(|v| {
if v.ident == variant.ident {
if v.span == variant.span {
None
} else {
Some((cx.sess.find_by_name(&v.attrs, kw::Default)?.span, String::new()))

View File

@ -300,12 +300,12 @@ struct TypeParameter {
ty: P<ast::Ty>,
}
// The code snippets built up for derived code are sometimes used as blocks
// (e.g. in a function body) and sometimes used as expressions (e.g. in a match
// arm). This structure avoids committing to either form until necessary,
// avoiding the insertion of any unnecessary blocks.
//
// The statements come before the expression.
/// The code snippets built up for derived code are sometimes used as blocks
/// (e.g. in a function body) and sometimes used as expressions (e.g. in a match
/// arm). This structure avoids committing to either form until necessary,
/// avoiding the insertion of any unnecessary blocks.
///
/// The statements come before the expression.
pub struct BlockOrExpr(Vec<ast::Stmt>, Option<P<Expr>>);
impl BlockOrExpr {
@ -718,7 +718,7 @@ impl<'a> TraitDef<'a> {
let path = cx.path_all(self.span, false, vec![type_ident], self_params);
let self_type = cx.ty_path(path);
let attr = cx.attribute(cx.meta_word(self.span, sym::automatically_derived));
let attr = cx.attr_word(sym::automatically_derived, self.span);
let attrs = thin_vec![attr];
let opt_trait_ref = Some(trait_ref);

View File

@ -188,7 +188,7 @@ fn inject_impl_of_structural_trait(
.cloned(),
);
// Mark as `automatically_derived` to avoid some silly lints.
attrs.push(cx.attribute(cx.meta_word(span, sym::automatically_derived)));
attrs.push(cx.attr_word(sym::automatically_derived, span));
let newitem = cx.item(
span,

View File

@ -6,15 +6,15 @@ use rustc_span::edition::Edition;
use rustc_span::symbol::sym;
use rustc_span::Span;
// This expands to either
// - `$crate::panic::panic_2015!(...)` or
// - `$crate::panic::panic_2021!(...)`
// depending on the edition.
//
// This is used for both std::panic!() and core::panic!().
//
// `$crate` will refer to either the `std` or `core` crate depending on which
// one we're expanding from.
/// This expands to either
/// - `$crate::panic::panic_2015!(...)` or
/// - `$crate::panic::panic_2021!(...)`
/// depending on the edition.
///
/// This is used for both std::panic!() and core::panic!().
///
/// `$crate` will refer to either the `std` or `core` crate depending on which
/// one we're expanding from.
pub fn expand_panic<'cx>(
cx: &'cx mut ExtCtxt<'_>,
sp: Span,
@ -24,10 +24,10 @@ pub fn expand_panic<'cx>(
expand(mac, cx, sp, tts)
}
// This expands to either
// - `$crate::panic::unreachable_2015!(...)` or
// - `$crate::panic::unreachable_2021!(...)`
// depending on the edition.
/// This expands to either
/// - `$crate::panic::unreachable_2015!(...)` or
/// - `$crate::panic::unreachable_2021!(...)`
/// depending on the edition.
pub fn expand_unreachable<'cx>(
cx: &'cx mut ExtCtxt<'_>,
sp: Span,

View File

@ -115,9 +115,7 @@ impl AllocFnFactory<'_, '_> {
}
fn attrs(&self) -> AttrVec {
let special = sym::rustc_std_internal_symbol;
let special = self.cx.meta_word(self.span, special);
thin_vec![self.cx.attribute(special)]
thin_vec![self.cx.attr_word(sym::rustc_std_internal_symbol, self.span)]
}
fn arg_ty(

View File

@ -45,6 +45,7 @@ mod log_syntax;
mod source_util;
mod test;
mod trace_macros;
mod type_ascribe;
mod util;
pub mod asm;
@ -92,6 +93,7 @@ pub fn register_builtin_macros(resolver: &mut dyn ResolverExpand) {
unreachable: edition_panic::expand_unreachable,
stringify: source_util::expand_stringify,
trace_macros: trace_macros::expand_trace_macros,
type_ascribe: type_ascribe::expand_type_ascribe,
}
register_attr! {

View File

@ -1,6 +1,3 @@
use std::mem;
use rustc_ast::attr;
use rustc_ast::ptr::P;
use rustc_ast::visit::{self, Visitor};
use rustc_ast::{self as ast, NodeId};
@ -13,6 +10,7 @@ use rustc_span::source_map::SourceMap;
use rustc_span::symbol::{kw, sym, Ident, Symbol};
use rustc_span::{Span, DUMMY_SP};
use smallvec::smallvec;
use std::mem;
struct ProcMacroDerive {
id: NodeId,
@ -365,14 +363,8 @@ fn mk_decls(cx: &mut ExtCtxt<'_>, macros: &[ProcMacro]) -> P<ast::Item> {
cx.expr_array_ref(span, decls),
)
.map(|mut i| {
let attr = cx.meta_word(span, sym::rustc_proc_macro_decls);
i.attrs.push(cx.attribute(attr));
let deprecated_attr = attr::mk_nested_word_item(Ident::new(sym::deprecated, span));
let allow_deprecated_attr =
attr::mk_list_item(Ident::new(sym::allow, span), vec![deprecated_attr]);
i.attrs.push(cx.attribute(allow_deprecated_attr));
i.attrs.push(cx.attr_word(sym::rustc_proc_macro_decls, span));
i.attrs.push(cx.attr_nested_word(sym::allow, sym::deprecated, span));
i
});

View File

@ -164,7 +164,7 @@ pub fn expand_include<'cx>(
Box::new(ExpandResult { p, node_id: cx.current_expansion.lint_node_id })
}
// include_str! : read the given file, insert it as a literal string expr
/// `include_str!`: read the given file, insert it as a literal string expr
pub fn expand_include_str(
cx: &mut ExtCtxt<'_>,
sp: Span,

View File

@ -52,7 +52,7 @@ pub fn inject(
cx.item(
span,
ident,
thin_vec![cx.attribute(cx.meta_word(span, sym::macro_use))],
thin_vec![cx.attr_word(sym::macro_use, span)],
ast::ItemKind::ExternCrate(None),
),
);
@ -79,7 +79,7 @@ pub fn inject(
let use_item = cx.item(
span,
Ident::empty(),
thin_vec![cx.attribute(cx.meta_word(span, sym::prelude_import))],
thin_vec![cx.attr_word(sym::prelude_import, span)],
ast::ItemKind::Use(ast::UseTree {
prefix: cx.path(span, import_path),
kind: ast::UseTreeKind::Glob,

View File

@ -2,7 +2,6 @@
/// Ideally, this code would be in libtest but for efficiency and error messages it lives here.
use crate::util::{check_builtin_macro_attribute, warn_on_duplicate_attribute};
use rustc_ast as ast;
use rustc_ast::attr;
use rustc_ast::ptr::P;
use rustc_ast_pretty::pprust;
use rustc_errors::Applicability;
@ -13,13 +12,13 @@ use rustc_span::Span;
use std::iter;
use thin_vec::thin_vec;
// #[test_case] is used by custom test authors to mark tests
// When building for test, it needs to make the item public and gensym the name
// Otherwise, we'll omit the item. This behavior means that any item annotated
// with #[test_case] is never addressable.
//
// We mark item with an inert attribute "rustc_test_marker" which the test generation
// logic will pick up on.
/// #[test_case] is used by custom test authors to mark tests
/// When building for test, it needs to make the item public and gensym the name
/// Otherwise, we'll omit the item. This behavior means that any item annotated
/// with #[test_case] is never addressable.
///
/// We mark item with an inert attribute "rustc_test_marker" which the test generation
/// logic will pick up on.
pub fn expand_test_case(
ecx: &mut ExtCtxt<'_>,
attr_sp: Span,
@ -47,11 +46,7 @@ pub fn expand_test_case(
tokens: None,
};
item.ident.span = item.ident.span.with_ctxt(sp.ctxt());
item.attrs.push(ecx.attribute(attr::mk_name_value_item_str(
Ident::new(sym::rustc_test_marker, sp),
test_path_symbol,
sp,
)));
item.attrs.push(ecx.attr_name_value_str(sym::rustc_test_marker, test_path_symbol, sp));
item
});
@ -241,16 +236,9 @@ pub fn expand_test_or_bench(
Ident::new(item.ident.name, sp),
thin_vec![
// #[cfg(test)]
cx.attribute(attr::mk_list_item(
Ident::new(sym::cfg, attr_sp),
vec![attr::mk_nested_word_item(Ident::new(sym::test, attr_sp))],
)),
cx.attr_nested_word(sym::cfg, sym::test, attr_sp),
// #[rustc_test_marker = "test_case_sort_key"]
cx.attribute(attr::mk_name_value_item_str(
Ident::new(sym::rustc_test_marker, attr_sp),
test_path_symbol,
attr_sp,
)),
cx.attr_name_value_str(sym::rustc_test_marker, test_path_symbol, attr_sp),
]
.into(),
// const $ident: test::TestDescAndFn =

View File

@ -34,8 +34,8 @@ struct TestCtxt<'a> {
test_runner: Option<ast::Path>,
}
// Traverse the crate, collecting all the test functions, eliding any
// existing main functions, and synthesizing a main test harness
/// Traverse the crate, collecting all the test functions, eliding any
/// existing main functions, and synthesizing a main test harness
pub fn inject(sess: &Session, resolver: &mut dyn ResolverExpand, krate: &mut ast::Crate) {
let span_diagnostic = sess.diagnostic();
let panic_strategy = sess.panic_strategy();
@ -185,13 +185,12 @@ impl<'a> MutVisitor for EntryPointCleaner<'a> {
let item = match entry_point_type(self.sess, &item, self.depth) {
EntryPointType::MainNamed | EntryPointType::RustcMainAttr | EntryPointType::Start => {
item.map(|ast::Item { id, ident, attrs, kind, vis, span, tokens }| {
let allow_ident = Ident::new(sym::allow, self.def_site);
let dc_nested =
attr::mk_nested_word_item(Ident::new(sym::dead_code, self.def_site));
let allow_dead_code_item = attr::mk_list_item(allow_ident, vec![dc_nested]);
let allow_dead_code = attr::mk_attr_outer(
let allow_dead_code = attr::mk_attr_nested_word(
&self.sess.parse_sess.attr_id_generator,
allow_dead_code_item,
ast::AttrStyle::Outer,
sym::allow,
sym::dead_code,
self.def_site,
);
let attrs = attrs
.into_iter()
@ -309,8 +308,7 @@ fn mk_main(cx: &mut TestCtxt<'_>) -> P<ast::Item> {
);
// #[rustc_main]
let main_meta = ecx.meta_word(sp, sym::rustc_main);
let main_attr = ecx.attribute(main_meta);
let main_attr = ecx.attr_word(sym::rustc_main, sp);
// pub fn main() { ... }
let main_ret_ty = ecx.ty(sp, ast::TyKind::Tup(vec![]));

View File

@ -0,0 +1,35 @@
use rustc_ast::ptr::P;
use rustc_ast::tokenstream::TokenStream;
use rustc_ast::{token, Expr, ExprKind, Ty};
use rustc_errors::PResult;
use rustc_expand::base::{self, DummyResult, ExtCtxt, MacEager};
use rustc_span::Span;
pub fn expand_type_ascribe(
cx: &mut ExtCtxt<'_>,
span: Span,
tts: TokenStream,
) -> Box<dyn base::MacResult + 'static> {
let (expr, ty) = match parse_ascribe(cx, tts) {
Ok(parsed) => parsed,
Err(mut err) => {
err.emit();
return DummyResult::any(span);
}
};
let asc_expr = cx.expr(span, ExprKind::Type(expr, ty));
return MacEager::expr(asc_expr);
}
fn parse_ascribe<'a>(cx: &mut ExtCtxt<'a>, stream: TokenStream) -> PResult<'a, (P<Expr>, P<Ty>)> {
let mut parser = cx.new_parser_from_tts(stream);
let expr = parser.parse_expr()?;
parser.expect(&token::Comma)?;
let ty = parser.parse_ty()?;
Ok((expr, ty))
}

View File

@ -1,4 +1,4 @@
use rustc_ast::{Attribute, MetaItem};
use rustc_ast::{AttrStyle, Attribute, MetaItem};
use rustc_expand::base::{Annotatable, ExtCtxt};
use rustc_feature::AttributeTemplate;
use rustc_lint_defs::builtin::DUPLICATE_MACRO_ATTRIBUTES;
@ -8,8 +8,13 @@ use rustc_span::Symbol;
pub fn check_builtin_macro_attribute(ecx: &ExtCtxt<'_>, meta_item: &MetaItem, name: Symbol) {
// All the built-in macro attributes are "words" at the moment.
let template = AttributeTemplate { word: true, ..Default::default() };
let attr = ecx.attribute(meta_item.clone());
validate_attr::check_builtin_attribute(&ecx.sess.parse_sess, &attr, name, template);
validate_attr::check_builtin_meta_item(
&ecx.sess.parse_sess,
&meta_item,
AttrStyle::Outer,
name,
template,
);
}
/// Emit a warning if the item is annotated with the given attribute. This is used to diagnose when

View File

@ -108,8 +108,8 @@ impl<'tcx> CValue<'tcx> {
}
// FIXME remove
// Forces the data value of a dyn* value to the stack and returns a pointer to it as well as the
// vtable pointer.
/// Forces the data value of a dyn* value to the stack and returns a pointer to it as well as the
/// vtable pointer.
pub(crate) fn dyn_star_force_data_on_stack(
self,
fx: &mut FunctionCx<'_, '_, 'tcx>,

View File

@ -13,17 +13,17 @@
// The minimum alignment guaranteed by the architecture. This value is used to
// add fast paths for low alignment values.
#[cfg(all(any(target_arch = "x86",
#[cfg(any(target_arch = "x86",
target_arch = "arm",
target_arch = "mips",
target_arch = "powerpc",
target_arch = "powerpc64")))]
target_arch = "powerpc64"))]
const MIN_ALIGN: usize = 8;
#[cfg(all(any(target_arch = "x86_64",
#[cfg(any(target_arch = "x86_64",
target_arch = "aarch64",
target_arch = "mips64",
target_arch = "s390x",
target_arch = "sparc64")))]
target_arch = "sparc64"))]
const MIN_ALIGN: usize = 16;
pub struct System;

View File

@ -88,9 +88,9 @@ pub struct CodegenCx<'gcc, 'tcx> {
pub vtables: RefCell<FxHashMap<(Ty<'tcx>, Option<ty::PolyExistentialTraitRef<'tcx>>), RValue<'gcc>>>,
// TODO(antoyo): improve the SSA API to not require those.
// Mapping from function pointer type to indexes of on stack parameters.
/// Mapping from function pointer type to indexes of on stack parameters.
pub on_stack_params: RefCell<FxHashMap<FunctionPtrType<'gcc>, FxHashSet<usize>>>,
// Mapping from function to indexes of on stack parameters.
/// Mapping from function to indexes of on stack parameters.
pub on_stack_function_params: RefCell<FxHashMap<Function<'gcc>, FxHashSet<usize>>>,
/// Cache of emitted const globals (value -> global)

View File

@ -295,8 +295,18 @@ impl<'ll> CodegenCx<'ll, '_> {
llvm::set_thread_local_mode(g, self.tls_model);
}
let dso_local = unsafe { self.should_assume_dso_local(g, true) };
if dso_local {
unsafe {
llvm::LLVMRustSetDSOLocal(g, true);
}
}
if !def_id.is_local() {
let needs_dll_storage_attr = self.use_dll_storage_attrs && !self.tcx.is_foreign_item(def_id) &&
// Local definitions can never be imported, so we must not apply
// the DLLImport annotation.
!dso_local &&
// ThinLTO can't handle this workaround in all cases, so we don't
// emit the attrs. Instead we make them unnecessary by disallowing
// dynamic linking when linker plugin based LTO is enabled.
@ -340,12 +350,6 @@ impl<'ll> CodegenCx<'ll, '_> {
}
}
unsafe {
if self.should_assume_dso_local(g, true) {
llvm::LLVMRustSetDSOLocal(g, true);
}
}
self.instances.borrow_mut().insert(instance, g);
g
}

View File

@ -37,7 +37,7 @@ const VAR_ALIGN_BYTES: usize = 8;
/// A context object for maintaining all state needed by the coverageinfo module.
pub struct CrateCoverageContext<'ll, 'tcx> {
// Coverage data for each instrumented function identified by DefId.
/// Coverage data for each instrumented function identified by DefId.
pub(crate) function_coverage_map: RefCell<FxHashMap<Instance<'tcx>, FunctionCoverage<'tcx>>>,
pub(crate) pgo_func_name_var_map: RefCell<FxHashMap<Instance<'tcx>, &'ll llvm::Value>>,
}

View File

@ -35,7 +35,7 @@ pub enum LLVMRustResult {
pub struct LLVMRustCOFFShortExport {
pub name: *const c_char,
pub ordinal_present: bool,
// value of `ordinal` only important when `ordinal_present` is true
/// value of `ordinal` only important when `ordinal_present` is true
pub ordinal: u16,
}

View File

@ -194,8 +194,8 @@ pub fn to_llvm_features<'a>(sess: &Session, s: &'a str) -> SmallVec<[&'a str; 2]
}
}
// Given a map from target_features to whether they are enabled or disabled,
// ensure only valid combinations are allowed.
/// Given a map from target_features to whether they are enabled or disabled,
/// ensure only valid combinations are allowed.
pub fn check_tied_features(
sess: &Session,
features: &FxHashMap<&str, bool>,
@ -213,8 +213,8 @@ pub fn check_tied_features(
return None;
}
// Used to generate cfg variables and apply features
// Must express features in the way Rust understands them
/// Used to generate cfg variables and apply features
/// Must express features in the way Rust understands them
pub fn target_features(sess: &Session, allow_unstable: bool) -> Vec<Symbol> {
let target_machine = create_informational_target_machine(sess);
let mut features: Vec<Symbol> = supported_target_features(sess)
@ -292,30 +292,33 @@ fn llvm_target_features(tm: &llvm::TargetMachine) -> Vec<(&str, &str)> {
}
fn print_target_features(sess: &Session, tm: &llvm::TargetMachine) {
let mut target_features = llvm_target_features(tm);
let mut llvm_target_features = llvm_target_features(tm);
let mut known_llvm_target_features = FxHashSet::<&'static str>::default();
let mut rustc_target_features = supported_target_features(sess)
.iter()
.filter_map(|(feature, _gate)| {
for llvm_feature in to_llvm_features(sess, *feature) {
.map(|(feature, _gate)| {
let desc = if let Some(llvm_feature) = to_llvm_features(sess, *feature).first() {
// LLVM asserts that these are sorted. LLVM and Rust both use byte comparison for these strings.
match target_features.binary_search_by_key(&llvm_feature, |(f, _d)| f).ok().map(
|index| {
let (_f, desc) = target_features.remove(index);
(*feature, desc)
},
) {
Some(v) => return Some(v),
None => {}
match llvm_target_features.binary_search_by_key(&llvm_feature, |(f, _d)| f).ok() {
Some(index) => {
known_llvm_target_features.insert(llvm_feature);
llvm_target_features[index].1
}
None => "",
}
}
None
} else {
""
};
(*feature, desc)
})
.collect::<Vec<_>>();
rustc_target_features.extend_from_slice(&[(
"crt-static",
"Enables C Run-time Libraries to be statically linked",
)]);
let max_feature_len = target_features
llvm_target_features.retain(|(f, _d)| !known_llvm_target_features.contains(f));
let max_feature_len = llvm_target_features
.iter()
.chain(rustc_target_features.iter())
.map(|(feature, _desc)| feature.len())
@ -327,10 +330,10 @@ fn print_target_features(sess: &Session, tm: &llvm::TargetMachine) {
println!(" {1:0$} - {2}.", max_feature_len, feature, desc);
}
println!("\nCode-generation features supported by LLVM for this target:");
for (feature, desc) in &target_features {
for (feature, desc) in &llvm_target_features {
println!(" {1:0$} - {2}.", max_feature_len, feature, desc);
}
if target_features.is_empty() {
if llvm_target_features.is_empty() {
println!(" Target features listing is not supported by this LLVM version.");
}
println!("\nUse +feature to enable a feature, or -feature to disable it.");

View File

@ -238,7 +238,7 @@ impl Type {
unsafe { llvm::LLVMInt8TypeInContext(llcx) }
}
// Creates an integer type with the given number of bits, e.g., i24
/// Creates an integer type with the given number of bits, e.g., i24
pub fn ix_llcx(llcx: &llvm::Context, num_bits: u64) -> &Type {
unsafe { llvm::LLVMIntTypeInContext(llcx, num_bits as c_uint) }
}

View File

@ -377,12 +377,8 @@ fn link_rlib<'a>(
find_native_static_library(name.as_str(), lib.verbatim, &lib_search_paths, sess);
if sess.opts.unstable_opts.packed_bundled_libs && flavor == RlibFlavor::Normal {
let filename = lib.filename.unwrap();
let lib_path = find_native_static_library(
filename.as_str(),
Some(true),
&lib_search_paths,
sess,
);
let lib_path =
find_native_static_library(filename.as_str(), true, &lib_search_paths, sess);
let src = read(lib_path)
.map_err(|e| sess.emit_fatal(errors::ReadFileError { message: e }))?;
let (data, _) = create_wrapper_file(sess, b".bundled_lib".to_vec(), &src);
@ -465,7 +461,7 @@ fn collate_raw_dylibs<'a, 'b>(
for lib in used_libraries {
if lib.kind == NativeLibKind::RawDylib {
let ext = if matches!(lib.verbatim, Some(true)) { "" } else { ".dll" };
let ext = if lib.verbatim { "" } else { ".dll" };
let name = format!("{}{}", lib.name.expect("unnamed raw-dylib library"), ext);
let imports = dylib_table.entry(name.clone()).or_default();
for import in &lib.dll_imports {
@ -1179,7 +1175,7 @@ pub fn ignored_for_lto(sess: &Session, info: &CrateInfo, cnum: CrateNum) -> bool
&& (info.compiler_builtins == Some(cnum) || info.is_no_builtins.contains(&cnum))
}
// This functions tries to determine the appropriate linker (and corresponding LinkerFlavor) to use
/// This functions tries to determine the appropriate linker (and corresponding LinkerFlavor) to use
pub fn linker_and_flavor(sess: &Session) -> (PathBuf, LinkerFlavor) {
fn infer_from(
sess: &Session,
@ -1335,7 +1331,7 @@ fn print_native_static_libs(sess: &Session, all_native_libs: &[NativeLib]) {
NativeLibKind::Static { bundle: Some(false), .. }
| NativeLibKind::Dylib { .. }
| NativeLibKind::Unspecified => {
let verbatim = lib.verbatim.unwrap_or(false);
let verbatim = lib.verbatim;
if sess.target.is_like_msvc {
Some(format!("{}{}", name, if verbatim { "" } else { ".lib" }))
} else if sess.target.linker_flavor.is_gnu() {
@ -2306,7 +2302,7 @@ fn add_native_libs_from_crate(
_ => &codegen_results.crate_info.native_libraries[&cnum],
};
let mut last = (None, NativeLibKind::Unspecified, None);
let mut last = (None, NativeLibKind::Unspecified, false);
for lib in native_libs {
let Some(name) = lib.name else {
continue;
@ -2323,7 +2319,7 @@ fn add_native_libs_from_crate(
};
let name = name.as_str();
let verbatim = lib.verbatim.unwrap_or(false);
let verbatim = lib.verbatim;
match lib.kind {
NativeLibKind::Static { bundle, whole_archive } => {
if link_static {

View File

@ -34,9 +34,9 @@ pub fn disable_localization(linker: &mut Command) {
linker.env("VSLANG", "1033");
}
// The third parameter is for env vars, used on windows to set up the
// path for MSVC to find its DLLs, and gcc to find its bundled
// toolchain
/// The third parameter is for env vars, used on windows to set up the
/// path for MSVC to find its DLLs, and gcc to find its bundled
/// toolchain
pub fn get_linker<'a>(
sess: &'a Session,
linker: &Path,
@ -515,7 +515,7 @@ impl<'a> Linker for GccLinker<'a> {
// -force_load is the macOS equivalent of --whole-archive, but it
// involves passing the full path to the library to link.
self.linker_arg("-force_load");
let lib = find_native_static_library(lib, Some(verbatim), search_path, &self.sess);
let lib = find_native_static_library(lib, verbatim, search_path, &self.sess);
self.linker_arg(&lib);
}
}

View File

@ -191,38 +191,38 @@ pub enum MetadataPosition {
Last,
}
// For rlibs we "pack" rustc metadata into a dummy object file.
//
// Historically it was needed because rustc linked rlibs as whole-archive in some cases.
// In that case linkers try to include all files located in an archive, so if metadata is stored
// in an archive then it needs to be of a form that the linker is able to process.
// Now it's not clear whether metadata still needs to be wrapped into an object file or not.
//
// Note, though, that we don't actually want this metadata to show up in any
// final output of the compiler. Instead this is purely for rustc's own
// metadata tracking purposes.
//
// With the above in mind, each "flavor" of object format gets special
// handling here depending on the target:
//
// * MachO - macos-like targets will insert the metadata into a section that
// is sort of fake dwarf debug info. Inspecting the source of the macos
// linker this causes these sections to be skipped automatically because
// it's not in an allowlist of otherwise well known dwarf section names to
// go into the final artifact.
//
// * WebAssembly - we actually don't have any container format for this
// target. WebAssembly doesn't support the `dylib` crate type anyway so
// there's no need for us to support this at this time. Consequently the
// metadata bytes are simply stored as-is into an rlib.
//
// * COFF - Windows-like targets create an object with a section that has
// the `IMAGE_SCN_LNK_REMOVE` flag set which ensures that if the linker
// ever sees the section it doesn't process it and it's removed.
//
// * ELF - All other targets are similar to Windows in that there's a
// `SHF_EXCLUDE` flag we can set on sections in an object file to get
// automatically removed from the final output.
/// For rlibs we "pack" rustc metadata into a dummy object file.
///
/// Historically it was needed because rustc linked rlibs as whole-archive in some cases.
/// In that case linkers try to include all files located in an archive, so if metadata is stored
/// in an archive then it needs to be of a form that the linker is able to process.
/// Now it's not clear whether metadata still needs to be wrapped into an object file or not.
///
/// Note, though, that we don't actually want this metadata to show up in any
/// final output of the compiler. Instead this is purely for rustc's own
/// metadata tracking purposes.
///
/// With the above in mind, each "flavor" of object format gets special
/// handling here depending on the target:
///
/// * MachO - macos-like targets will insert the metadata into a section that
/// is sort of fake dwarf debug info. Inspecting the source of the macos
/// linker this causes these sections to be skipped automatically because
/// it's not in an allowlist of otherwise well known dwarf section names to
/// go into the final artifact.
///
/// * WebAssembly - we actually don't have any container format for this
/// target. WebAssembly doesn't support the `dylib` crate type anyway so
/// there's no need for us to support this at this time. Consequently the
/// metadata bytes are simply stored as-is into an rlib.
///
/// * COFF - Windows-like targets create an object with a section that has
/// the `IMAGE_SCN_LNK_REMOVE` flag set which ensures that if the linker
/// ever sees the section it doesn't process it and it's removed.
///
/// * ELF - All other targets are similar to Windows in that there's a
/// `SHF_EXCLUDE` flag we can set on sections in an object file to get
/// automatically removed from the final output.
pub fn create_wrapper_file(
sess: &Session,
section_name: Vec<u8>,

View File

@ -340,20 +340,20 @@ pub struct CodegenContext<B: WriteBackendMethods> {
pub split_debuginfo: rustc_target::spec::SplitDebuginfo,
pub split_dwarf_kind: rustc_session::config::SplitDwarfKind,
// Number of cgus excluding the allocator/metadata modules
/// Number of cgus excluding the allocator/metadata modules
pub total_cgus: usize,
// Handler to use for diagnostics produced during codegen.
/// Handler to use for diagnostics produced during codegen.
pub diag_emitter: SharedEmitter,
// LLVM optimizations for which we want to print remarks.
/// LLVM optimizations for which we want to print remarks.
pub remark: Passes,
// Worker thread number
/// Worker thread number
pub worker: usize,
// The incremental compilation session directory, or None if we are not
// compiling incrementally
/// The incremental compilation session directory, or None if we are not
/// compiling incrementally
pub incr_comp_session_dir: Option<PathBuf>,
// Used to update CGU re-use information during the thinlto phase.
/// Used to update CGU re-use information during the thinlto phase.
pub cgu_reuse_tracker: CguReuseTracker,
// Channel back to the main control thread to send messages to
/// Channel back to the main control thread to send messages to
pub coordinator_send: Sender<Box<dyn Any + Send>>,
}
@ -756,7 +756,7 @@ fn execute_work_item<B: ExtraBackendMethods>(
}
}
// Actual LTO type we end up choosing based on multiple factors.
/// Actual LTO type we end up choosing based on multiple factors.
pub enum ComputedLtoType {
No,
Thin,

View File

@ -1,4 +1,4 @@
// Type Names for Debug Info.
//! Type Names for Debug Info.
// Notes on targeting MSVC:
// In general, MSVC's debugger attempts to parse all arguments as C++ expressions,
@ -26,10 +26,10 @@ use std::fmt::Write;
use crate::debuginfo::wants_c_like_enum_debuginfo;
// Compute the name of the type as it should be stored in debuginfo. Does not do
// any caching, i.e., calling the function twice with the same type will also do
// the work twice. The `qualified` parameter only affects the first level of the
// type name, further levels (i.e., type parameters) are always fully qualified.
/// Compute the name of the type as it should be stored in debuginfo. Does not do
/// any caching, i.e., calling the function twice with the same type will also do
/// the work twice. The `qualified` parameter only affects the first level of the
/// type name, further levels (i.e., type parameters) are always fully qualified.
pub fn compute_debuginfo_type_name<'tcx>(
tcx: TyCtxt<'tcx>,
t: Ty<'tcx>,

View File

@ -116,7 +116,7 @@ pub struct NativeLib {
pub name: Option<Symbol>,
pub filename: Option<Symbol>,
pub cfg: Option<ast::MetaItem>,
pub verbatim: Option<bool>,
pub verbatim: bool,
pub dll_imports: Vec<cstore::DllImport>,
}
@ -127,7 +127,7 @@ impl From<&cstore::NativeLib> for NativeLib {
filename: lib.filename,
name: lib.name,
cfg: lib.cfg.clone(),
verbatim: lib.verbatim,
verbatim: lib.verbatim.unwrap_or(false),
dll_imports: lib.dll_imports.clone(),
}
}

View File

@ -40,10 +40,10 @@ pub enum OperandValue<V> {
/// instead.
#[derive(Copy, Clone)]
pub struct OperandRef<'tcx, V> {
// The value.
/// The value.
pub val: OperandValue<V>,
// The layout of value, based on its Rust type.
/// The layout of value, based on its Rust type.
pub layout: TyAndLayout<'tcx>,
}

View File

@ -66,7 +66,7 @@ fn eval_body_using_ecx<'mir, 'tcx>(
)?;
// The main interpreter loop.
ecx.run()?;
while ecx.step()? {}
// Intern the result
let intern_kind = if cid.promoted.is_some() {

View File

@ -417,8 +417,8 @@ pub trait Machine<'mir, 'tcx>: Sized {
}
}
// A lot of the flexibility above is just needed for `Miri`, but all "compile-time" machines
// (CTFE and ConstProp) use the same instance. Here, we share that code.
/// A lot of the flexibility above is just needed for `Miri`, but all "compile-time" machines
/// (CTFE and ConstProp) use the same instance. Here, we share that code.
pub macro compile_time_machine(<$mir: lifetime, $tcx: lifetime>) {
type Provenance = AllocId;
type ProvenanceExtra = ();

View File

@ -206,8 +206,8 @@ where
}
}
// Iterates over all fields of an array. Much more efficient than doing the
// same by repeatedly calling `operand_index`.
/// Iterates over all fields of an array. Much more efficient than doing the
/// same by repeatedly calling `operand_index`.
pub fn operand_array_fields<'a>(
&self,
base: &'a OpTy<'tcx, Prov>,

View File

@ -32,11 +32,6 @@ fn binop_right_homogeneous(op: mir::BinOp) -> bool {
}
impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
pub fn run(&mut self) -> InterpResult<'tcx> {
while self.step()? {}
Ok(())
}
/// Returns `true` as long as there are more things to do.
///
/// This is used by [priroda](https://github.com/oli-obk/priroda)

View File

@ -324,7 +324,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValueMut<'mir, 'tcx, M>
macro_rules! make_value_visitor {
($visitor_trait:ident, $value_trait:ident, $($mutability:ident)?) => {
// How to traverse a value and what to do when we are at the leaves.
/// How to traverse a value and what to do when we are at the leaves.
pub trait $visitor_trait<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>>: Sized {
type V: $value_trait<'mir, 'tcx, M>;

View File

@ -761,8 +761,7 @@ impl<'tcx> Visitor<'tcx> for Checker<'_, 'tcx> {
hir_id,
ObligationCauseCode::ItemObligation(callee),
);
let normalized_predicates =
ocx.normalize(cause.clone(), param_env, predicates);
let normalized_predicates = ocx.normalize(&cause, param_env, predicates);
ocx.register_obligations(traits::predicates_for_generics(
|_, _| cause.clone(),
self.param_env,

View File

@ -75,14 +75,14 @@ pub fn rustc_allow_const_fn_unstable(
attr::rustc_allow_const_fn_unstable(&tcx.sess, attrs).any(|name| name == feature_gate)
}
// Returns `true` if the given `const fn` is "const-stable".
//
// Panics if the given `DefId` does not refer to a `const fn`.
//
// Const-stability is only relevant for `const fn` within a `staged_api` crate. Only "const-stable"
// functions can be called in a const-context by users of the stable compiler. "const-stable"
// functions are subject to more stringent restrictions than "const-unstable" functions: They
// cannot use unstable features and can only call other "const-stable" functions.
/// Returns `true` if the given `const fn` is "const-stable".
///
/// Panics if the given `DefId` does not refer to a `const fn`.
///
/// Const-stability is only relevant for `const fn` within a `staged_api` crate. Only "const-stable"
/// functions can be called in a const-context by users of the stable compiler. "const-stable"
/// functions are subject to more stringent restrictions than "const-unstable" functions: They
/// cannot use unstable features and can only call other "const-stable" functions.
pub fn is_const_stable_const_fn(tcx: TyCtxt<'_>, def_id: DefId) -> bool {
// A default body in a `#[const_trait]` is not const-stable because const
// trait fns currently cannot be const-stable. We shouldn't

View File

@ -376,7 +376,7 @@ impl<'tcx> NonConstOp<'tcx> for Generator {
ccx: &ConstCx<'_, 'tcx>,
span: Span,
) -> DiagnosticBuilder<'tcx, ErrorGuaranteed> {
let msg = format!("{}s are not allowed in {}s", self.0, ccx.const_kind());
let msg = format!("{}s are not allowed in {}s", self.0.descr(), ccx.const_kind());
if let hir::GeneratorKind::Async(hir::AsyncGeneratorKind::Block) = self.0 {
ccx.tcx.sess.create_feature_err(
UnallowedOpInConstContext { span, msg },
@ -686,7 +686,7 @@ impl<'tcx> NonConstOp<'tcx> for ThreadLocalAccess {
}
}
// Types that cannot appear in the signature or locals of a `const fn`.
/// Types that cannot appear in the signature or locals of a `const fn`.
pub mod ty {
use super::*;

View File

@ -318,14 +318,14 @@ impl<'tcx> Validator<'_, 'tcx> {
match elem {
ProjectionElem::Deref => {
let mut promotable = false;
// When a static is used by-value, that gets desugared to `*STATIC_ADDR`,
// and we need to be able to promote this. So check if this deref matches
// that specific pattern.
// We need to make sure this is a `Deref` of a local with no further projections.
// Discussion can be found at
// https://github.com/rust-lang/rust/pull/74945#discussion_r463063247
if let Some(local) = place_base.as_local() {
// This is a special treatment for cases like *&STATIC where STATIC is a
// global static variable.
// This pattern is generated only when global static variables are directly
// accessed and is qualified for promotion safely.
if let TempState::Defined { location, .. } = self.temps[local] {
let def_stmt = self.body[location.block]
.statements

View File

@ -9,10 +9,11 @@ use std::iter::TrustedLen;
/// Expand `lhs = Rvalue::Aggregate(kind, operands)` into assignments to the fields.
///
/// Produces something like
///
/// ```ignore (ilustrative)
/// (lhs as Variant).field0 = arg0; // We only have a downcast if this is an enum
/// (lhs as Variant).field1 = arg1;
/// discriminant(lhs) = variant_index; // If lhs is an enum or generator.
/// ```
pub fn expand_aggregate<'tcx>(
orig_lhs: Place<'tcx>,
operands: impl Iterator<Item = (Operand<'tcx>, Ty<'tcx>)> + TrustedLen,

View File

@ -46,8 +46,8 @@ pub fn is_subtype<'tcx>(
let infcx = builder.build();
let ocx = ObligationCtxt::new(&infcx);
let cause = ObligationCause::dummy();
let src = ocx.normalize(cause.clone(), param_env, src);
let dest = ocx.normalize(cause.clone(), param_env, dest);
let src = ocx.normalize(&cause, param_env, src);
let dest = ocx.normalize(&cause, param_env, dest);
match ocx.sub(&cause, param_env, src, dest) {
Ok(()) => {}
Err(_) => return false,

View File

@ -10,8 +10,8 @@ mod index_map;
pub use index_map::SortedIndexMultiMap;
/// `SortedMap` is a data structure with similar characteristics as BTreeMap but
/// slightly different trade-offs: lookup, insertion, and removal are *O*(log(*n*))
/// and elements can be iterated in order cheaply.
/// slightly different trade-offs: lookup is *O*(log(*n*)), insertion and removal
/// are *O*(*n*) but elements can be iterated in order cheaply.
///
/// `SortedMap` can be faster than a `BTreeMap` for small sizes (<50) since it
/// stores data in a more compact way. It also supports accessing contiguous

View File

@ -399,7 +399,7 @@ where
}
}
impl<A, CTX> HashStable<CTX> for SmallVec<[A; 1]>
impl<A, const N: usize, CTX> HashStable<CTX> for SmallVec<[A; N]>
where
A: HashStable<CTX>,
{

View File

@ -1336,8 +1336,8 @@ mod signal_handler {
}
}
// When an error signal (such as SIGABRT or SIGSEGV) is delivered to the
// process, print a stack trace and then exit.
/// When an error signal (such as SIGABRT or SIGSEGV) is delivered to the
/// process, print a stack trace and then exit.
pub(super) fn install() {
unsafe {
const ALT_STACK_SIZE: usize = libc::MINSIGSTKSZ + 64 * 1024;

View File

@ -2282,7 +2282,7 @@ impl FileWithAnnotatedLines {
}
// Find overlapping multiline annotations, put them at different depths
multiline_annotations.sort_by_key(|&(_, ref ml)| (ml.line_start, ml.line_end));
multiline_annotations.sort_by_key(|&(_, ref ml)| (ml.line_start, usize::MAX - ml.line_end));
for (_, ann) in multiline_annotations.clone() {
for (_, a) in multiline_annotations.iter_mut() {
// Move all other multiline annotations overlapping with this one
@ -2300,8 +2300,14 @@ impl FileWithAnnotatedLines {
}
let mut max_depth = 0; // max overlapping multiline spans
for (file, ann) in multiline_annotations {
for (_, ann) in &multiline_annotations {
max_depth = max(max_depth, ann.depth);
}
// Change order of multispan depth to minimize the number of overlaps in the ASCII art.
for (_, a) in multiline_annotations.iter_mut() {
a.depth = max_depth - a.depth + 1;
}
for (file, ann) in multiline_annotations {
let mut end_ann = ann.as_end();
if !ann.overlaps_exactly {
// avoid output like

View File

@ -16,6 +16,7 @@ use rustc_errors::{
use rustc_lint_defs::builtin::PROC_MACRO_BACK_COMPAT;
use rustc_lint_defs::{BufferedEarlyLint, BuiltinLintDiagnostics};
use rustc_parse::{self, parser, MACRO_ARGUMENTS};
use rustc_session::errors::report_lit_error;
use rustc_session::{parse::ParseSess, Limit, Session};
use rustc_span::def_id::{CrateNum, DefId, LocalDefId};
use rustc_span::edition::Edition;
@ -242,8 +243,8 @@ pub enum ExpandResult<T, U> {
Retry(U),
}
// `meta_item` is the attribute, and `item` is the item being modified.
pub trait MultiItemModifier {
/// `meta_item` is the attribute, and `item` is the item being modified.
fn expand(
&self,
ecx: &mut ExtCtxt<'_>,
@ -1245,7 +1246,10 @@ pub fn expr_to_spanned_string<'a>(
Some((err, true))
}
Ok(ast::LitKind::Err) => None,
Err(_) => None,
Err(err) => {
report_lit_error(&cx.sess.parse_sess, err, token_lit, expr.span);
None
}
_ => Some((cx.struct_span_err(expr.span, err_msg), false)),
},
ast::ExprKind::Err => None,

View File

@ -193,7 +193,7 @@ impl<'a> ExtCtxt<'a> {
self.stmt_local(local, sp)
}
// Generates `let _: Type;`, which is usually used for type assertions.
/// Generates `let _: Type;`, which is usually used for type assertions.
pub fn stmt_let_type_only(&self, span: Span, ty: P<ast::Ty>) -> ast::Stmt {
let local = P(ast::Local {
pat: self.pat_wild(span),
@ -579,8 +579,6 @@ impl<'a> ExtCtxt<'a> {
attrs: ast::AttrVec,
kind: ast::ItemKind,
) -> P<ast::Item> {
// FIXME: Would be nice if our generated code didn't violate
// Rust coding conventions
P(ast::Item {
ident: name,
attrs,
@ -618,11 +616,23 @@ impl<'a> ExtCtxt<'a> {
self.item(span, name, AttrVec::new(), ast::ItemKind::Const(def, ty, Some(expr)))
}
pub fn attribute(&self, mi: ast::MetaItem) -> ast::Attribute {
attr::mk_attr_outer(&self.sess.parse_sess.attr_id_generator, mi)
// Builds `#[name]`.
pub fn attr_word(&self, name: Symbol, span: Span) -> ast::Attribute {
let g = &self.sess.parse_sess.attr_id_generator;
attr::mk_attr_word(g, ast::AttrStyle::Outer, name, span)
}
pub fn meta_word(&self, sp: Span, w: Symbol) -> ast::MetaItem {
attr::mk_word_item(Ident::new(w, sp))
// Builds `#[name = val]`.
//
// Note: `span` is used for both the identifer and the value.
pub fn attr_name_value_str(&self, name: Symbol, val: Symbol, span: Span) -> ast::Attribute {
let g = &self.sess.parse_sess.attr_id_generator;
attr::mk_attr_name_value_str(g, ast::AttrStyle::Outer, name, val, span)
}
// Builds `#[outer(inner)]`.
pub fn attr_nested_word(&self, outer: Symbol, inner: Symbol, span: Span) -> ast::Attribute {
let g = &self.sess.parse_sess.attr_id_generator;
attr::mk_attr_nested_word(g, ast::AttrStyle::Outer, outer, inner, span)
}
}

View File

@ -200,7 +200,7 @@ fn get_features(
features
}
// `cfg_attr`-process the crate's attributes and compute the crate's features.
/// `cfg_attr`-process the crate's attributes and compute the crate's features.
pub fn features(
sess: &Session,
mut krate: ast::Crate,

View File

@ -401,7 +401,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
krate
}
// Recursively expand all macro invocations in this AST fragment.
/// Recursively expand all macro invocations in this AST fragment.
pub fn fully_expand_fragment(&mut self, input_fragment: AstFragment) -> AstFragment {
let orig_expansion_data = self.cx.current_expansion.clone();
let orig_force_mode = self.cx.force_mode;
@ -1644,7 +1644,7 @@ impl<'a, 'b> InvocationCollector<'a, 'b> {
let mut span: Option<Span> = None;
while let Some(attr) = attrs.next() {
rustc_ast_passes::feature_gate::check_attribute(attr, self.cx.sess, features);
validate_attr::check_meta(&self.cx.sess.parse_sess, attr);
validate_attr::check_attr(&self.cx.sess.parse_sess, attr);
let current_span = if let Some(sp) = span { sp.to(attr.span) } else { attr.span };
span = Some(current_span);
@ -1931,9 +1931,12 @@ pub struct ExpansionConfig<'feat> {
pub features: Option<&'feat Features>,
pub recursion_limit: Limit,
pub trace_mac: bool,
pub should_test: bool, // If false, strip `#[test]` nodes
pub span_debug: bool, // If true, use verbose debugging for `proc_macro::Span`
pub proc_macro_backtrace: bool, // If true, show backtraces for proc-macro panics
/// If false, strip `#[test]` nodes
pub should_test: bool,
/// If true, use verbose debugging for `proc_macro::Span`
pub span_debug: bool,
/// If true, show backtraces for proc-macro panics
pub proc_macro_backtrace: bool,
}
impl<'feat> ExpansionConfig<'feat> {

View File

@ -526,11 +526,8 @@ impl server::TokenStream for Rustc<'_, '_> {
Ok(tokenstream::TokenStream::token_alone(token::Literal(*token_lit), expr.span))
}
ast::ExprKind::IncludedBytes(bytes) => {
let lit = ast::Lit::from_included_bytes(bytes, expr.span);
Ok(tokenstream::TokenStream::token_alone(
token::TokenKind::Literal(lit.token_lit),
expr.span,
))
let lit = ast::LitKind::ByteStr(bytes.clone()).to_token_lit();
Ok(tokenstream::TokenStream::token_alone(token::TokenKind::Literal(lit), expr.span))
}
ast::ExprKind::Unary(ast::UnOp::Neg, e) => match &e.kind {
ast::ExprKind::Lit(token_lit) => match token_lit {

View File

@ -272,13 +272,13 @@ error: foo
--> test.rs:3:3
|
3 | X0 Y0
| ____^__-
| | ___|
| ___^__-
| |___|
| ||
4 | || X1 Y1
5 | || X2 Y2
| ||____^__- `Y` is a good letter too
| |____|
| |_____|
| `X` is a good letter
"#,
@ -311,12 +311,12 @@ error: foo
--> test.rs:3:3
|
3 | X0 Y0
| ____^__-
| | ___|
| ___^__-
| |___|
| ||
4 | || Y1 X1
| ||____-__^ `X` is a good letter
| |_____|
| |____|
| `Y` is a good letter too
"#,
@ -351,13 +351,13 @@ error: foo
--> test.rs:3:6
|
3 | X0 Y0 Z0
| ______^
4 | | X1 Y1 Z1
| |_________-
| _______^
4 | | X1 Y1 Z1
| | _________-
5 | || X2 Y2 Z2
| ||____^ `X` is a good letter
6 | | X3 Y3 Z3
| |_____- `Y` is a good letter too
6 | | X3 Y3 Z3
| |____- `Y` is a good letter too
"#,
);
@ -395,15 +395,15 @@ error: foo
--> test.rs:3:3
|
3 | X0 Y0 Z0
| _____^__-__-
| | ____|__|
| || ___|
| ___^__-__-
| |___|__|
| ||___|
| |||
4 | ||| X1 Y1 Z1
5 | ||| X2 Y2 Z2
| |||____^__-__- `Z` label
| ||____|__|
| |____| `Y` is a good letter too
| ||_____|__|
| |______| `Y` is a good letter too
| `X` is a good letter
"#,
@ -487,17 +487,17 @@ error: foo
--> test.rs:3:6
|
3 | X0 Y0 Z0
| ______^
4 | | X1 Y1 Z1
| |____^_-
| _______^
4 | | X1 Y1 Z1
| | ____^_-
| ||____|
| | `X` is a good letter
5 | | X2 Y2 Z2
| |____-______- `Y` is a good letter too
| ____|
| |
6 | | X3 Y3 Z3
| |________- `Z`
| | `X` is a good letter
5 | | X2 Y2 Z2
| |___-______- `Y` is a good letter too
| ___|
| |
6 | | X3 Y3 Z3
| |_______- `Z`
"#,
);
@ -570,14 +570,14 @@ error: foo
--> test.rs:3:6
|
3 | X0 Y0 Z0
| ______^
4 | | X1 Y1 Z1
| |____^____-
| _______^
4 | | X1 Y1 Z1
| | ____^____-
| ||____|
| | `X` is a good letter
5 | | X2 Y2 Z2
6 | | X3 Y3 Z3
| |___________- `Y` is a good letter too
| | `X` is a good letter
5 | | X2 Y2 Z2
6 | | X3 Y3 Z3
| |__________- `Y` is a good letter too
"#,
);
@ -941,18 +941,18 @@ error: foo
--> test.rs:3:6
|
3 | X0 Y0 Z0
| ______^
4 | | X1 Y1 Z1
| |____^____-
| _______^
4 | | X1 Y1 Z1
| | ____^____-
| ||____|
| | `X` is a good letter
5 | | 1
6 | | 2
7 | | 3
... |
15 | | X2 Y2 Z2
16 | | X3 Y3 Z3
| |___________- `Y` is a good letter too
| | `X` is a good letter
5 | | 1
6 | | 2
7 | | 3
... |
15 | | X2 Y2 Z2
16 | | X3 Y3 Z3
| |__________- `Y` is a good letter too
"#,
);
@ -996,21 +996,21 @@ error: foo
--> test.rs:3:6
|
3 | X0 Y0 Z0
| ______^
4 | | 1
5 | | 2
6 | | 3
7 | | X1 Y1 Z1
| |_________-
| _______^
4 | | 1
5 | | 2
6 | | 3
7 | | X1 Y1 Z1
| | _________-
8 | || 4
9 | || 5
10 | || 6
11 | || X2 Y2 Z2
| ||__________- `Z` is a good letter too
... |
15 | | 10
16 | | X3 Y3 Z3
| |_______^ `Y` is a good letter
... |
15 | | 10
16 | | X3 Y3 Z3
| |________^ `Y` is a good letter
"#,
);

View File

@ -237,6 +237,8 @@ declare_features! (
(accepted, native_link_modifiers, "1.61.0", Some(81490), None),
/// Allows specifying the bundle link modifier
(accepted, native_link_modifiers_bundle, "1.63.0", Some(81490), None),
/// Allows specifying the verbatim link modifier
(accepted, native_link_modifiers_verbatim, "CURRENT_RUSTC_VERSION", Some(81490), None),
/// Allows specifying the whole-archive link modifier
(accepted, native_link_modifiers_whole_archive, "1.61.0", Some(81490), None),
/// Allows using non lexical lifetimes (RFC 2094).

View File

@ -455,8 +455,6 @@ declare_features! (
(active, naked_functions, "1.9.0", Some(32408), None),
/// Allows specifying the as-needed link modifier
(active, native_link_modifiers_as_needed, "1.53.0", Some(81490), None),
/// Allows specifying the verbatim link modifier
(active, native_link_modifiers_verbatim, "1.53.0", Some(81490), None),
/// Allow negative trait implementations.
(active, negative_impls, "1.44.0", Some(68318), None),
/// Allows the `!` type. Does not imply 'exhaustive_patterns' (below) any more.

View File

@ -39,6 +39,7 @@ macro_rules! arena_types {
[] param: rustc_hir::Param<'tcx>,
[] pat: rustc_hir::Pat<'tcx>,
[] path: rustc_hir::Path<'tcx>,
[] use_path: rustc_hir::UsePath<'tcx>,
[] path_segment: rustc_hir::PathSegment<'tcx>,
[] poly_trait_ref: rustc_hir::PolyTraitRef<'tcx>,
[] qpath: rustc_hir::QPath<'tcx>,

View File

@ -368,10 +368,6 @@ impl Definitions {
LocalDefId { local_def_index: self.table.allocate(key, def_path_hash) }
}
pub fn iter_local_def_id(&self) -> impl Iterator<Item = LocalDefId> + '_ {
self.table.def_path_hashes.indices().map(|local_def_index| LocalDefId { local_def_index })
}
#[inline(always)]
pub fn local_def_path_hash_to_def_id(
&self,
@ -389,6 +385,10 @@ impl Definitions {
pub fn def_path_hash_to_def_index_map(&self) -> &DefPathHashMap {
&self.table.def_path_hash_to_index
}
pub fn num_definitions(&self) -> usize {
self.table.def_path_hashes.len()
}
}
#[derive(Copy, Clone, PartialEq, Debug)]

View File

@ -183,14 +183,17 @@ impl Lifetime {
/// `std::cmp::PartialEq`. It's represented as a sequence of identifiers,
/// along with a bunch of supporting information.
#[derive(Debug, HashStable_Generic)]
pub struct Path<'hir> {
pub struct Path<'hir, R = Res> {
pub span: Span,
/// The resolution for the path.
pub res: Res,
pub res: R,
/// The segments in the path: the things separated by `::`.
pub segments: &'hir [PathSegment<'hir>],
}
/// Up to three resolutions for type, value and macro namespaces.
pub type UsePath<'hir> = Path<'hir, SmallVec<[Res; 3]>>;
impl Path<'_> {
pub fn is_global(&self) -> bool {
!self.segments.is_empty() && self.segments[0].ident.name == kw::PathRoot
@ -981,8 +984,8 @@ pub struct Pat<'hir> {
pub hir_id: HirId,
pub kind: PatKind<'hir>,
pub span: Span,
// Whether to use default binding modes.
// At present, this is false only for destructuring assignment.
/// Whether to use default binding modes.
/// At present, this is false only for destructuring assignment.
pub default_binding_modes: bool,
}
@ -1090,7 +1093,7 @@ impl fmt::Display for RangeEnd {
pub struct DotDotPos(u32);
impl DotDotPos {
// Panics if n >= u32::MAX.
/// Panics if n >= u32::MAX.
pub fn new(n: Option<usize>) -> Self {
match n {
Some(n) => {
@ -1526,9 +1529,9 @@ pub enum AsyncGeneratorKind {
impl fmt::Display for AsyncGeneratorKind {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str(match self {
AsyncGeneratorKind::Block => "`async` block",
AsyncGeneratorKind::Closure => "`async` closure body",
AsyncGeneratorKind::Fn => "`async fn` body",
AsyncGeneratorKind::Block => "async block",
AsyncGeneratorKind::Closure => "async closure body",
AsyncGeneratorKind::Fn => "async fn body",
})
}
}
@ -1694,10 +1697,10 @@ impl Expr<'_> {
}
}
// Whether this looks like a place expr, without checking for deref
// adjustments.
// This will return `true` in some potentially surprising cases such as
// `CONSTANT.field`.
/// Whether this looks like a place expr, without checking for deref
/// adjustments.
/// This will return `true` in some potentially surprising cases such as
/// `CONSTANT.field`.
pub fn is_syntactic_place_expr(&self) -> bool {
self.is_place_expr(|_| true)
}
@ -1838,7 +1841,7 @@ impl Expr<'_> {
}
}
// To a first-order approximation, is this a pattern
/// To a first-order approximation, is this a pattern?
pub fn is_approximately_pattern(&self) -> bool {
match &self.kind {
ExprKind::Box(_)
@ -2160,11 +2163,11 @@ impl fmt::Display for LoopIdError {
#[derive(Copy, Clone, Encodable, Debug, HashStable_Generic)]
pub struct Destination {
// This is `Some(_)` iff there is an explicit user-specified `label
/// This is `Some(_)` iff there is an explicit user-specified 'label
pub label: Option<Label>,
// These errors are caught and then reported during the diagnostics pass in
// librustc_passes/loops.rs
/// These errors are caught and then reported during the diagnostics pass in
/// `librustc_passes/loops.rs`
pub target_id: Result<HirId, LoopIdError>,
}
@ -2335,7 +2338,7 @@ pub enum ImplItemKind<'hir> {
Type(&'hir Ty<'hir>),
}
// The name of the associated type for `Fn` return types.
/// The name of the associated type for `Fn` return types.
pub const FN_OUTPUT_NAME: Symbol = sym::Output;
/// Bind a type to an associated type (i.e., `A = Foo`).
@ -3068,7 +3071,7 @@ pub enum ItemKind<'hir> {
/// or just
///
/// `use foo::bar::baz;` (with `as baz` implicitly on the right).
Use(&'hir Path<'hir>, UseKind),
Use(&'hir UsePath<'hir>, UseKind),
/// A `static` item.
Static(&'hir Ty<'hir>, Mutability, BodyId),
@ -3261,7 +3264,7 @@ pub enum ForeignItemKind<'hir> {
/// A variable captured by a closure.
#[derive(Debug, Copy, Clone, Encodable, HashStable_Generic)]
pub struct Upvar {
// First span where it is accessed (there can be multiple).
/// First span where it is accessed (there can be multiple).
pub span: Span,
}

View File

@ -367,7 +367,7 @@ pub trait Visitor<'v>: Sized {
fn visit_fn(&mut self, fk: FnKind<'v>, fd: &'v FnDecl<'v>, b: BodyId, _: Span, id: HirId) {
walk_fn(self, fk, fd, b, id)
}
fn visit_use(&mut self, path: &'v Path<'v>, hir_id: HirId) {
fn visit_use(&mut self, path: &'v UsePath<'v>, hir_id: HirId) {
walk_use(self, path, hir_id)
}
fn visit_trait_item(&mut self, ti: &'v TraitItem<'v>) {
@ -422,7 +422,7 @@ pub trait Visitor<'v>: Sized {
fn visit_qpath(&mut self, qpath: &'v QPath<'v>, id: HirId, _span: Span) {
walk_qpath(self, qpath, id)
}
fn visit_path(&mut self, path: &'v Path<'v>, _id: HirId) {
fn visit_path(&mut self, path: &Path<'v>, _id: HirId) {
walk_path(self, path)
}
fn visit_path_segment(&mut self, path_segment: &'v PathSegment<'v>) {
@ -938,9 +938,12 @@ pub fn walk_fn_kind<'v, V: Visitor<'v>>(visitor: &mut V, function_kind: FnKind<'
}
}
pub fn walk_use<'v, V: Visitor<'v>>(visitor: &mut V, path: &'v Path<'v>, hir_id: HirId) {
pub fn walk_use<'v, V: Visitor<'v>>(visitor: &mut V, path: &'v UsePath<'v>, hir_id: HirId) {
visitor.visit_id(hir_id);
visitor.visit_path(path, hir_id);
let UsePath { segments, ref res, span } = *path;
for &res in res {
visitor.visit_path(&Path { segments, res, span }, hir_id);
}
}
pub fn walk_trait_item<'v, V: Visitor<'v>>(visitor: &mut V, trait_item: &'v TraitItem<'v>) {
@ -1126,7 +1129,7 @@ pub fn walk_qpath<'v, V: Visitor<'v>>(visitor: &mut V, qpath: &'v QPath<'v>, id:
}
}
pub fn walk_path<'v, V: Visitor<'v>>(visitor: &mut V, path: &'v Path<'v>) {
pub fn walk_path<'v, V: Visitor<'v>>(visitor: &mut V, path: &Path<'v>) {
for segment in path.segments {
visitor.visit_path_segment(segment);
}

View File

@ -109,6 +109,9 @@ pub trait AstConv<'tcx> {
) -> Ty<'tcx>;
/// Normalize an associated type coming from the user.
///
/// This should only be used by astconv. Use `FnCtxt::normalize`
/// or `ObligationCtxt::normalize` in downstream crates.
fn normalize_ty(&self, span: Span, ty: Ty<'tcx>) -> Ty<'tcx>;
/// Invoked when we encounter an error from some prior pass
@ -850,7 +853,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
.is_some()
}
// Sets `implicitly_sized` to true on `Bounds` if necessary
/// Sets `implicitly_sized` to true on `Bounds` if necessary
pub(crate) fn add_implicitly_sized<'hir>(
&self,
bounds: &mut Bounds<'hir>,
@ -2391,7 +2394,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
path_segs
}
// Check a type `Path` and convert it to a `Ty`.
/// Check a type `Path` and convert it to a `Ty`.
pub fn res_to_ty(
&self,
opt_self_ty: Option<Ty<'tcx>>,

View File

@ -221,7 +221,7 @@ fn compare_predicate_entailment<'tcx>(
let impl_m_own_bounds = impl_m_predicates.instantiate_own(tcx, impl_to_placeholder_substs);
for (predicate, span) in iter::zip(impl_m_own_bounds.predicates, impl_m_own_bounds.spans) {
let normalize_cause = traits::ObligationCause::misc(span, impl_m_hir_id);
let predicate = ocx.normalize(normalize_cause, param_env, predicate);
let predicate = ocx.normalize(&normalize_cause, param_env, predicate);
let cause = ObligationCause::new(
span,
@ -260,7 +260,7 @@ fn compare_predicate_entailment<'tcx>(
);
let norm_cause = ObligationCause::misc(impl_m_span, impl_m_hir_id);
let impl_sig = ocx.normalize(norm_cause.clone(), param_env, impl_sig);
let impl_sig = ocx.normalize(&norm_cause, param_env, impl_sig);
let impl_fty = tcx.mk_fn_ptr(ty::Binder::dummy(impl_sig));
debug!("compare_impl_method: impl_fty={:?}", impl_fty);
@ -271,7 +271,7 @@ fn compare_predicate_entailment<'tcx>(
// we have to do this before normalization, since the normalized ty may
// not contain the input parameters. See issue #87748.
wf_tys.extend(trait_sig.inputs_and_output.iter());
let trait_sig = ocx.normalize(norm_cause, param_env, trait_sig);
let trait_sig = ocx.normalize(&norm_cause, param_env, trait_sig);
// We also have to add the normalized trait signature
// as we don't normalize during implied bounds computation.
wf_tys.extend(trait_sig.inputs_and_output.iter());
@ -366,7 +366,7 @@ pub fn collect_trait_impl_trait_tys<'tcx>(
// Normalize the impl signature with fresh variables for lifetime inference.
let norm_cause = ObligationCause::misc(return_span, impl_m_hir_id);
let impl_sig = ocx.normalize(
norm_cause.clone(),
&norm_cause,
param_env,
infcx.replace_bound_vars_with_fresh_vars(
return_span,
@ -387,7 +387,7 @@ pub fn collect_trait_impl_trait_tys<'tcx>(
tcx.bound_fn_sig(trait_m.def_id).subst(tcx, trait_to_placeholder_substs),
)
.fold_with(&mut collector);
let trait_sig = ocx.normalize(norm_cause.clone(), param_env, unnormalized_trait_sig);
let trait_sig = ocx.normalize(&norm_cause, param_env, unnormalized_trait_sig);
let trait_return_ty = trait_sig.output();
let wf_tys = FxIndexSet::from_iter(
@ -592,7 +592,7 @@ impl<'tcx> TypeFolder<'tcx> for ImplTraitInTraitCollector<'_, 'tcx> {
for (pred, pred_span) in self.tcx().bound_explicit_item_bounds(proj.item_def_id).subst_iter_copied(self.tcx(), proj.substs) {
let pred = pred.fold_with(self);
let pred = self.ocx.normalize(
ObligationCause::misc(self.span, self.body_id),
&ObligationCause::misc(self.span, self.body_id),
self.param_env,
pred,
);
@ -1403,11 +1403,11 @@ pub(crate) fn raw_compare_const_impl<'tcx>(
);
// There is no "body" here, so just pass dummy id.
let impl_ty = ocx.normalize(cause.clone(), param_env, impl_ty);
let impl_ty = ocx.normalize(&cause, param_env, impl_ty);
debug!("compare_const_impl: impl_ty={:?}", impl_ty);
let trait_ty = ocx.normalize(cause.clone(), param_env, trait_ty);
let trait_ty = ocx.normalize(&cause, param_env, trait_ty);
debug!("compare_const_impl: trait_ty={:?}", trait_ty);
@ -1556,7 +1556,7 @@ fn compare_type_predicate_entailment<'tcx>(
for (span, predicate) in std::iter::zip(impl_ty_own_bounds.spans, impl_ty_own_bounds.predicates)
{
let cause = ObligationCause::misc(span, impl_ty_hir_id);
let predicate = ocx.normalize(cause, param_env, predicate);
let predicate = ocx.normalize(&cause, param_env, predicate);
let cause = ObligationCause::new(
span,
@ -1778,7 +1778,7 @@ pub fn check_type_bounds<'tcx>(
for mut obligation in util::elaborate_obligations(tcx, obligations) {
let normalized_predicate =
ocx.normalize(normalize_cause.clone(), normalize_param_env, obligation.predicate);
ocx.normalize(&normalize_cause, normalize_param_env, obligation.predicate);
debug!("compare_projection_bounds: normalized predicate = {:?}", normalized_predicate);
obligation.predicate = normalized_predicate;

View File

@ -233,9 +233,10 @@ fn ensure_drop_predicates_are_implied_by_item_defn<'tcx>(
result
}
// This is an implementation of the TypeRelation trait with the
// aim of simply comparing for equality (without side-effects).
// It is not intended to be used anywhere else other than here.
/// This is an implementation of the [`TypeRelation`] trait with the
/// aim of simply comparing for equality (without side-effects).
///
/// It is not intended to be used anywhere else other than here.
pub(crate) struct SimpleEqRelation<'tcx> {
tcx: TyCtxt<'tcx>,
param_env: ty::ParamEnv<'tcx>,

View File

@ -53,12 +53,14 @@ impl<'tcx> WfCheckingCtxt<'_, 'tcx> {
self.ocx.infcx.tcx
}
// Convenience function to normalize during wfcheck. This performs
// `ObligationCtxt::normalize`, but provides a nice `ObligationCauseCode`.
fn normalize<T>(&self, span: Span, loc: Option<WellFormedLoc>, value: T) -> T
where
T: TypeFoldable<'tcx>,
{
self.ocx.normalize(
ObligationCause::new(span, self.body_id, ObligationCauseCode::WellFormed(loc)),
&ObligationCause::new(span, self.body_id, ObligationCauseCode::WellFormed(loc)),
self.param_env,
value,
)

View File

@ -33,7 +33,7 @@ use rustc_middle::middle::codegen_fn_attrs::{CodegenFnAttrFlags, CodegenFnAttrs}
use rustc_middle::mir::mono::Linkage;
use rustc_middle::ty::query::Providers;
use rustc_middle::ty::util::{Discr, IntTypeExt};
use rustc_middle::ty::{self, AdtKind, Const, DefIdTree, IsSuggestable, Ty, TyCtxt};
use rustc_middle::ty::{self, AdtKind, Const, DefIdTree, IsSuggestable, ToPredicate, Ty, TyCtxt};
use rustc_session::lint;
use rustc_session::parse::feature_err;
use rustc_span::symbol::{kw, sym, Ident, Symbol};
@ -1366,12 +1366,14 @@ fn predicates_defined_on(tcx: TyCtxt<'_>, def_id: DefId) -> ty::GenericPredicate
"predicates_defined_on: inferred_outlives_of({:?}) = {:?}",
def_id, inferred_outlives,
);
let inferred_outlives_iter =
inferred_outlives.iter().map(|(clause, span)| ((*clause).to_predicate(tcx), *span));
if result.predicates.is_empty() {
result.predicates = inferred_outlives;
result.predicates = tcx.arena.alloc_from_iter(inferred_outlives_iter);
} else {
result.predicates = tcx
.arena
.alloc_from_iter(result.predicates.iter().chain(inferred_outlives).copied());
result.predicates = tcx.arena.alloc_from_iter(
result.predicates.into_iter().copied().chain(inferred_outlives_iter),
);
}
}
@ -2145,7 +2147,7 @@ fn should_inherit_track_caller(tcx: TyCtxt<'_>, def_id: DefId) -> bool {
}
fn check_link_ordinal(tcx: TyCtxt<'_>, attr: &ast::Attribute) -> Option<u16> {
use rustc_ast::{Lit, LitIntType, LitKind};
use rustc_ast::{LitIntType, LitKind, MetaItemLit};
if !tcx.features().raw_dylib && tcx.sess.target.arch == "x86" {
feature_err(
&tcx.sess.parse_sess,
@ -2158,7 +2160,7 @@ fn check_link_ordinal(tcx: TyCtxt<'_>, attr: &ast::Attribute) -> Option<u16> {
let meta_item_list = attr.meta_item_list();
let meta_item_list = meta_item_list.as_deref();
let sole_meta_list = match meta_item_list {
Some([item]) => item.literal(),
Some([item]) => item.lit(),
Some(_) => {
tcx.sess
.struct_span_err(attr.span, "incorrect number of arguments to `#[link_ordinal]`")
@ -2168,7 +2170,9 @@ fn check_link_ordinal(tcx: TyCtxt<'_>, attr: &ast::Attribute) -> Option<u16> {
}
_ => None,
};
if let Some(Lit { kind: LitKind::Int(ordinal, LitIntType::Unsuffixed), .. }) = sole_meta_list {
if let Some(MetaItemLit { kind: LitKind::Int(ordinal, LitIntType::Unsuffixed), .. }) =
sole_meta_list
{
// According to the table at https://docs.microsoft.com/en-us/windows/win32/debug/pe-format#import-header,
// the ordinal must fit into 16 bits. Similarly, the Ordinal field in COFFShortExport (defined
// in llvm/include/llvm/Object/COFFImportFile.h), which we use to communicate import information

Some files were not shown because too many files have changed in this diff Show More