mirror of
https://github.com/rust-lang/rust.git
synced 2025-04-17 06:26:55 +00:00
Auto merge of #137959 - matthiaskrgr:rollup-62vjvwr, r=matthiaskrgr
Rollup of 12 pull requests Successful merges: - #135767 (Future incompatibility warning `unsupported_fn_ptr_calling_conventions`: Also warn in dependencies) - #137852 (Remove layouting dead code for non-array SIMD types.) - #137863 (Fix pretty printing of unsafe binders) - #137882 (do not build additional stage on compiler paths) - #137894 (Revert "store ScalarPair via memset when one side is undef and the other side can be memset") - #137902 (Make `ast::TokenKind` more like `lexer::TokenKind`) - #137921 (Subtree update of `rust-analyzer`) - #137922 (A few cleanups after the removal of `cfg(not(parallel))`) - #137939 (fix order on shl impl) - #137946 (Fix docker run-local docs) - #137955 (Always allow rustdoc-json tests to contain long lines) - #137958 (triagebot.toml: Don't label `test/rustdoc-json` as A-rustdoc-search) r? `@ghost` `@rustbot` modify labels: rollup
This commit is contained in:
commit
fd17deacce
@ -2,7 +2,6 @@ use std::borrow::Cow;
|
||||
use std::fmt;
|
||||
use std::sync::Arc;
|
||||
|
||||
pub use BinOpToken::*;
|
||||
pub use LitKind::*;
|
||||
pub use Nonterminal::*;
|
||||
pub use NtExprKind::*;
|
||||
@ -26,21 +25,6 @@ pub enum CommentKind {
|
||||
Block,
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Encodable, Decodable, Hash, Debug, Copy)]
|
||||
#[derive(HashStable_Generic)]
|
||||
pub enum BinOpToken {
|
||||
Plus,
|
||||
Minus,
|
||||
Star,
|
||||
Slash,
|
||||
Percent,
|
||||
Caret,
|
||||
And,
|
||||
Or,
|
||||
Shl,
|
||||
Shr,
|
||||
}
|
||||
|
||||
// This type must not implement `Hash` due to the unusual `PartialEq` impl below.
|
||||
#[derive(Copy, Clone, Debug, Encodable, Decodable, HashStable_Generic)]
|
||||
pub enum InvisibleOrigin {
|
||||
@ -376,11 +360,49 @@ pub enum TokenKind {
|
||||
/// `||`
|
||||
OrOr,
|
||||
/// `!`
|
||||
Not,
|
||||
Bang,
|
||||
/// `~`
|
||||
Tilde,
|
||||
BinOp(BinOpToken),
|
||||
BinOpEq(BinOpToken),
|
||||
// `+`
|
||||
Plus,
|
||||
// `-`
|
||||
Minus,
|
||||
// `*`
|
||||
Star,
|
||||
// `/`
|
||||
Slash,
|
||||
// `%`
|
||||
Percent,
|
||||
// `^`
|
||||
Caret,
|
||||
// `&`
|
||||
And,
|
||||
// `|`
|
||||
Or,
|
||||
// `<<`
|
||||
Shl,
|
||||
// `>>`
|
||||
Shr,
|
||||
// `+=`
|
||||
PlusEq,
|
||||
// `-=`
|
||||
MinusEq,
|
||||
// `*=`
|
||||
StarEq,
|
||||
// `/=`
|
||||
SlashEq,
|
||||
// `%=`
|
||||
PercentEq,
|
||||
// `^=`
|
||||
CaretEq,
|
||||
// `&=`
|
||||
AndEq,
|
||||
// `|=`
|
||||
OrEq,
|
||||
// `<<=`
|
||||
ShlEq,
|
||||
// `>>=`
|
||||
ShrEq,
|
||||
|
||||
/* Structural symbols */
|
||||
/// `@`
|
||||
@ -500,31 +522,31 @@ impl TokenKind {
|
||||
Some(match (self, n) {
|
||||
(Le, 1) => (Lt, Eq),
|
||||
(EqEq, 1) => (Eq, Eq),
|
||||
(Ne, 1) => (Not, Eq),
|
||||
(Ne, 1) => (Bang, Eq),
|
||||
(Ge, 1) => (Gt, Eq),
|
||||
(AndAnd, 1) => (BinOp(And), BinOp(And)),
|
||||
(OrOr, 1) => (BinOp(Or), BinOp(Or)),
|
||||
(BinOp(Shl), 1) => (Lt, Lt),
|
||||
(BinOp(Shr), 1) => (Gt, Gt),
|
||||
(BinOpEq(Plus), 1) => (BinOp(Plus), Eq),
|
||||
(BinOpEq(Minus), 1) => (BinOp(Minus), Eq),
|
||||
(BinOpEq(Star), 1) => (BinOp(Star), Eq),
|
||||
(BinOpEq(Slash), 1) => (BinOp(Slash), Eq),
|
||||
(BinOpEq(Percent), 1) => (BinOp(Percent), Eq),
|
||||
(BinOpEq(Caret), 1) => (BinOp(Caret), Eq),
|
||||
(BinOpEq(And), 1) => (BinOp(And), Eq),
|
||||
(BinOpEq(Or), 1) => (BinOp(Or), Eq),
|
||||
(BinOpEq(Shl), 1) => (Lt, Le), // `<` + `<=`
|
||||
(BinOpEq(Shl), 2) => (BinOp(Shl), Eq), // `<<` + `=`
|
||||
(BinOpEq(Shr), 1) => (Gt, Ge), // `>` + `>=`
|
||||
(BinOpEq(Shr), 2) => (BinOp(Shr), Eq), // `>>` + `=`
|
||||
(AndAnd, 1) => (And, And),
|
||||
(OrOr, 1) => (Or, Or),
|
||||
(Shl, 1) => (Lt, Lt),
|
||||
(Shr, 1) => (Gt, Gt),
|
||||
(PlusEq, 1) => (Plus, Eq),
|
||||
(MinusEq, 1) => (Minus, Eq),
|
||||
(StarEq, 1) => (Star, Eq),
|
||||
(SlashEq, 1) => (Slash, Eq),
|
||||
(PercentEq, 1) => (Percent, Eq),
|
||||
(CaretEq, 1) => (Caret, Eq),
|
||||
(AndEq, 1) => (And, Eq),
|
||||
(OrEq, 1) => (Or, Eq),
|
||||
(ShlEq, 1) => (Lt, Le), // `<` + `<=`
|
||||
(ShlEq, 2) => (Shl, Eq), // `<<` + `=`
|
||||
(ShrEq, 1) => (Gt, Ge), // `>` + `>=`
|
||||
(ShrEq, 2) => (Shr, Eq), // `>>` + `=`
|
||||
(DotDot, 1) => (Dot, Dot),
|
||||
(DotDotDot, 1) => (Dot, DotDot), // `.` + `..`
|
||||
(DotDotDot, 2) => (DotDot, Dot), // `..` + `.`
|
||||
(DotDotEq, 2) => (DotDot, Eq),
|
||||
(PathSep, 1) => (Colon, Colon),
|
||||
(RArrow, 1) => (BinOp(Minus), Gt),
|
||||
(LArrow, 1) => (Lt, BinOp(Minus)),
|
||||
(RArrow, 1) => (Minus, Gt),
|
||||
(LArrow, 1) => (Lt, Minus),
|
||||
(FatArrow, 1) => (Eq, Gt),
|
||||
_ => return None,
|
||||
})
|
||||
@ -543,7 +565,7 @@ impl TokenKind {
|
||||
}
|
||||
|
||||
pub fn should_end_const_arg(&self) -> bool {
|
||||
matches!(self, Gt | Ge | BinOp(Shr) | BinOpEq(Shr))
|
||||
matches!(self, Gt | Ge | Shr | ShrEq)
|
||||
}
|
||||
}
|
||||
|
||||
@ -582,11 +604,11 @@ impl Token {
|
||||
|
||||
pub fn is_punct(&self) -> bool {
|
||||
match self.kind {
|
||||
Eq | Lt | Le | EqEq | Ne | Ge | Gt | AndAnd | OrOr | Not | Tilde | BinOp(_)
|
||||
| BinOpEq(_) | At | Dot | DotDot | DotDotDot | DotDotEq | Comma | Semi | Colon
|
||||
| PathSep | RArrow | LArrow | FatArrow | Pound | Dollar | Question | SingleQuote => {
|
||||
true
|
||||
}
|
||||
Eq | Lt | Le | EqEq | Ne | Ge | Gt | AndAnd | OrOr | Bang | Tilde | Plus | Minus
|
||||
| Star | Slash | Percent | Caret | And | Or | Shl | Shr | PlusEq | MinusEq | StarEq
|
||||
| SlashEq | PercentEq | CaretEq | AndEq | OrEq | ShlEq | ShrEq | At | Dot | DotDot
|
||||
| DotDotDot | DotDotEq | Comma | Semi | Colon | PathSep | RArrow | LArrow
|
||||
| FatArrow | Pound | Dollar | Question | SingleQuote => true,
|
||||
|
||||
OpenDelim(..) | CloseDelim(..) | Literal(..) | DocComment(..) | Ident(..)
|
||||
| NtIdent(..) | Lifetime(..) | NtLifetime(..) | Interpolated(..) | Eof => false,
|
||||
@ -594,7 +616,7 @@ impl Token {
|
||||
}
|
||||
|
||||
pub fn is_like_plus(&self) -> bool {
|
||||
matches!(self.kind, BinOp(Plus) | BinOpEq(Plus))
|
||||
matches!(self.kind, Plus | PlusEq)
|
||||
}
|
||||
|
||||
/// Returns `true` if the token can appear at the start of an expression.
|
||||
@ -608,15 +630,15 @@ impl Token {
|
||||
ident_can_begin_expr(name, self.span, is_raw), // value name or keyword
|
||||
OpenDelim(Parenthesis | Brace | Bracket) | // tuple, array or block
|
||||
Literal(..) | // literal
|
||||
Not | // operator not
|
||||
BinOp(Minus) | // unary minus
|
||||
BinOp(Star) | // dereference
|
||||
BinOp(Or) | OrOr | // closure
|
||||
BinOp(And) | // reference
|
||||
Bang | // operator not
|
||||
Minus | // unary minus
|
||||
Star | // dereference
|
||||
Or | OrOr | // closure
|
||||
And | // reference
|
||||
AndAnd | // double reference
|
||||
// DotDotDot is no longer supported, but we need some way to display the error
|
||||
DotDot | DotDotDot | DotDotEq | // range notation
|
||||
Lt | BinOp(Shl) | // associated path
|
||||
Lt | Shl | // associated path
|
||||
PathSep | // global path
|
||||
Lifetime(..) | // labeled loop
|
||||
Pound => true, // expression attributes
|
||||
@ -645,17 +667,16 @@ impl Token {
|
||||
Ident(..) | NtIdent(..) |
|
||||
OpenDelim(Delimiter::Parenthesis) | // tuple pattern
|
||||
OpenDelim(Delimiter::Bracket) | // slice pattern
|
||||
BinOp(And) | // reference
|
||||
BinOp(Minus) | // negative literal
|
||||
AndAnd | // double reference
|
||||
Literal(_) | // literal
|
||||
DotDot | // range pattern (future compat)
|
||||
DotDotDot | // range pattern (future compat)
|
||||
PathSep | // path
|
||||
Lt | // path (UFCS constant)
|
||||
BinOp(Shl) => true, // path (double UFCS)
|
||||
// leading vert `|` or-pattern
|
||||
BinOp(Or) => matches!(pat_kind, PatWithOr),
|
||||
And | // reference
|
||||
Minus | // negative literal
|
||||
AndAnd | // double reference
|
||||
Literal(_) | // literal
|
||||
DotDot | // range pattern (future compat)
|
||||
DotDotDot | // range pattern (future compat)
|
||||
PathSep | // path
|
||||
Lt | // path (UFCS constant)
|
||||
Shl => true, // path (double UFCS)
|
||||
Or => matches!(pat_kind, PatWithOr), // leading vert `|` or-pattern
|
||||
Interpolated(nt) =>
|
||||
matches!(&**nt,
|
||||
| NtExpr(..)
|
||||
@ -676,18 +697,18 @@ impl Token {
|
||||
/// Returns `true` if the token can appear at the start of a type.
|
||||
pub fn can_begin_type(&self) -> bool {
|
||||
match self.uninterpolate().kind {
|
||||
Ident(name, is_raw) =>
|
||||
Ident(name, is_raw) =>
|
||||
ident_can_begin_type(name, self.span, is_raw), // type name or keyword
|
||||
OpenDelim(Delimiter::Parenthesis) | // tuple
|
||||
OpenDelim(Delimiter::Bracket) | // array
|
||||
Not | // never
|
||||
BinOp(Star) | // raw pointer
|
||||
BinOp(And) | // reference
|
||||
AndAnd | // double reference
|
||||
Question | // maybe bound in trait object
|
||||
Lifetime(..) | // lifetime bound in trait object
|
||||
Lt | BinOp(Shl) | // associated path
|
||||
PathSep => true, // global path
|
||||
Bang | // never
|
||||
Star | // raw pointer
|
||||
And | // reference
|
||||
AndAnd | // double reference
|
||||
Question | // maybe bound in trait object
|
||||
Lifetime(..) | // lifetime bound in trait object
|
||||
Lt | Shl | // associated path
|
||||
PathSep => true, // global path
|
||||
OpenDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(
|
||||
MetaVarKind::Ty { .. } |
|
||||
MetaVarKind::Path
|
||||
@ -701,7 +722,7 @@ impl Token {
|
||||
/// Returns `true` if the token can appear at the start of a const param.
|
||||
pub fn can_begin_const_arg(&self) -> bool {
|
||||
match self.kind {
|
||||
OpenDelim(Delimiter::Brace) | Literal(..) | BinOp(Minus) => true,
|
||||
OpenDelim(Delimiter::Brace) | Literal(..) | Minus => true,
|
||||
Ident(name, IdentIsRaw::No) if name.is_bool_lit() => true,
|
||||
Interpolated(ref nt) => matches!(&**nt, NtExpr(..) | NtBlock(..) | NtLiteral(..)),
|
||||
OpenDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(
|
||||
@ -750,7 +771,7 @@ impl Token {
|
||||
/// Keep this in sync with and `Lit::from_token`, excluding unary negation.
|
||||
pub fn can_begin_literal_maybe_minus(&self) -> bool {
|
||||
match self.uninterpolate().kind {
|
||||
Literal(..) | BinOp(Minus) => true,
|
||||
Literal(..) | Minus => true,
|
||||
Ident(name, IdentIsRaw::No) if name.is_bool_lit() => true,
|
||||
Interpolated(ref nt) => match &**nt {
|
||||
NtLiteral(_) => true,
|
||||
@ -875,7 +896,7 @@ impl Token {
|
||||
}
|
||||
|
||||
pub fn is_qpath_start(&self) -> bool {
|
||||
self == &Lt || self == &BinOp(Shl)
|
||||
self == &Lt || self == &Shl
|
||||
}
|
||||
|
||||
pub fn is_path_start(&self) -> bool {
|
||||
@ -967,59 +988,82 @@ impl Token {
|
||||
}
|
||||
|
||||
pub fn glue(&self, joint: &Token) -> Option<Token> {
|
||||
let kind = match self.kind {
|
||||
Eq => match joint.kind {
|
||||
Eq => EqEq,
|
||||
Gt => FatArrow,
|
||||
_ => return None,
|
||||
},
|
||||
Lt => match joint.kind {
|
||||
Eq => Le,
|
||||
Lt => BinOp(Shl),
|
||||
Le => BinOpEq(Shl),
|
||||
BinOp(Minus) => LArrow,
|
||||
_ => return None,
|
||||
},
|
||||
Gt => match joint.kind {
|
||||
Eq => Ge,
|
||||
Gt => BinOp(Shr),
|
||||
Ge => BinOpEq(Shr),
|
||||
_ => return None,
|
||||
},
|
||||
Not => match joint.kind {
|
||||
Eq => Ne,
|
||||
_ => return None,
|
||||
},
|
||||
BinOp(op) => match joint.kind {
|
||||
Eq => BinOpEq(op),
|
||||
BinOp(And) if op == And => AndAnd,
|
||||
BinOp(Or) if op == Or => OrOr,
|
||||
Gt if op == Minus => RArrow,
|
||||
_ => return None,
|
||||
},
|
||||
Dot => match joint.kind {
|
||||
Dot => DotDot,
|
||||
DotDot => DotDotDot,
|
||||
_ => return None,
|
||||
},
|
||||
DotDot => match joint.kind {
|
||||
Dot => DotDotDot,
|
||||
Eq => DotDotEq,
|
||||
_ => return None,
|
||||
},
|
||||
Colon => match joint.kind {
|
||||
Colon => PathSep,
|
||||
_ => return None,
|
||||
},
|
||||
SingleQuote => match joint.kind {
|
||||
Ident(name, is_raw) => Lifetime(Symbol::intern(&format!("'{name}")), is_raw),
|
||||
_ => return None,
|
||||
},
|
||||
let kind = match (&self.kind, &joint.kind) {
|
||||
(Eq, Eq) => EqEq,
|
||||
(Eq, Gt) => FatArrow,
|
||||
(Eq, _) => return None,
|
||||
|
||||
Le | EqEq | Ne | Ge | AndAnd | OrOr | Tilde | BinOpEq(..) | At | DotDotDot
|
||||
| DotDotEq | Comma | Semi | PathSep | RArrow | LArrow | FatArrow | Pound | Dollar
|
||||
| Question | OpenDelim(..) | CloseDelim(..) | Literal(..) | Ident(..) | NtIdent(..)
|
||||
| Lifetime(..) | NtLifetime(..) | Interpolated(..) | DocComment(..) | Eof => {
|
||||
(Lt, Eq) => Le,
|
||||
(Lt, Lt) => Shl,
|
||||
(Lt, Le) => ShlEq,
|
||||
(Lt, Minus) => LArrow,
|
||||
(Lt, _) => return None,
|
||||
|
||||
(Gt, Eq) => Ge,
|
||||
(Gt, Gt) => Shr,
|
||||
(Gt, Ge) => ShrEq,
|
||||
(Gt, _) => return None,
|
||||
|
||||
(Bang, Eq) => Ne,
|
||||
(Bang, _) => return None,
|
||||
|
||||
(Plus, Eq) => PlusEq,
|
||||
(Plus, _) => return None,
|
||||
|
||||
(Minus, Eq) => MinusEq,
|
||||
(Minus, Gt) => RArrow,
|
||||
(Minus, _) => return None,
|
||||
|
||||
(Star, Eq) => StarEq,
|
||||
(Star, _) => return None,
|
||||
|
||||
(Slash, Eq) => SlashEq,
|
||||
(Slash, _) => return None,
|
||||
|
||||
(Percent, Eq) => PercentEq,
|
||||
(Percent, _) => return None,
|
||||
|
||||
(Caret, Eq) => CaretEq,
|
||||
(Caret, _) => return None,
|
||||
|
||||
(And, Eq) => AndEq,
|
||||
(And, And) => AndAnd,
|
||||
(And, _) => return None,
|
||||
|
||||
(Or, Eq) => OrEq,
|
||||
(Or, Or) => OrOr,
|
||||
(Or, _) => return None,
|
||||
|
||||
(Shl, Eq) => ShlEq,
|
||||
(Shl, _) => return None,
|
||||
|
||||
(Shr, Eq) => ShrEq,
|
||||
(Shr, _) => return None,
|
||||
|
||||
(Dot, Dot) => DotDot,
|
||||
(Dot, DotDot) => DotDotDot,
|
||||
(Dot, _) => return None,
|
||||
|
||||
(DotDot, Dot) => DotDotDot,
|
||||
(DotDot, Eq) => DotDotEq,
|
||||
(DotDot, _) => return None,
|
||||
|
||||
(Colon, Colon) => PathSep,
|
||||
(Colon, _) => return None,
|
||||
|
||||
(SingleQuote, Ident(name, is_raw)) => {
|
||||
Lifetime(Symbol::intern(&format!("'{name}")), *is_raw)
|
||||
}
|
||||
(SingleQuote, _) => return None,
|
||||
|
||||
(
|
||||
Le | EqEq | Ne | Ge | AndAnd | OrOr | Tilde | PlusEq | MinusEq | StarEq | SlashEq
|
||||
| PercentEq | CaretEq | AndEq | OrEq | ShlEq | ShrEq | At | DotDotDot | DotDotEq
|
||||
| Comma | Semi | PathSep | RArrow | LArrow | FatArrow | Pound | Dollar | Question
|
||||
| OpenDelim(..) | CloseDelim(..) | Literal(..) | Ident(..) | NtIdent(..)
|
||||
| Lifetime(..) | NtLifetime(..) | Interpolated(..) | DocComment(..) | Eof,
|
||||
_,
|
||||
) => {
|
||||
return None;
|
||||
}
|
||||
};
|
||||
|
@ -651,7 +651,7 @@ impl TokenStream {
|
||||
if attr_style == AttrStyle::Inner {
|
||||
vec![
|
||||
TokenTree::token_joint(token::Pound, span),
|
||||
TokenTree::token_joint_hidden(token::Not, span),
|
||||
TokenTree::token_joint_hidden(token::Bang, span),
|
||||
body,
|
||||
]
|
||||
} else {
|
||||
|
@ -1,7 +1,7 @@
|
||||
use rustc_span::kw;
|
||||
|
||||
use crate::ast::{self, BinOpKind, RangeLimits};
|
||||
use crate::token::{self, BinOpToken, Token};
|
||||
use crate::token::{self, Token};
|
||||
|
||||
/// Associative operator.
|
||||
#[derive(Copy, Clone, PartialEq, Debug)]
|
||||
@ -34,26 +34,26 @@ impl AssocOp {
|
||||
use AssocOp::*;
|
||||
match t.kind {
|
||||
token::Eq => Some(Assign),
|
||||
token::BinOp(BinOpToken::Plus) => Some(Binary(BinOpKind::Add)),
|
||||
token::BinOp(BinOpToken::Minus) => Some(Binary(BinOpKind::Sub)),
|
||||
token::BinOp(BinOpToken::Star) => Some(Binary(BinOpKind::Mul)),
|
||||
token::BinOp(BinOpToken::Slash) => Some(Binary(BinOpKind::Div)),
|
||||
token::BinOp(BinOpToken::Percent) => Some(Binary(BinOpKind::Rem)),
|
||||
token::BinOp(BinOpToken::Caret) => Some(Binary(BinOpKind::BitXor)),
|
||||
token::BinOp(BinOpToken::And) => Some(Binary(BinOpKind::BitAnd)),
|
||||
token::BinOp(BinOpToken::Or) => Some(Binary(BinOpKind::BitOr)),
|
||||
token::BinOp(BinOpToken::Shl) => Some(Binary(BinOpKind::Shl)),
|
||||
token::BinOp(BinOpToken::Shr) => Some(Binary(BinOpKind::Shr)),
|
||||
token::BinOpEq(BinOpToken::Plus) => Some(AssignOp(BinOpKind::Add)),
|
||||
token::BinOpEq(BinOpToken::Minus) => Some(AssignOp(BinOpKind::Sub)),
|
||||
token::BinOpEq(BinOpToken::Star) => Some(AssignOp(BinOpKind::Mul)),
|
||||
token::BinOpEq(BinOpToken::Slash) => Some(AssignOp(BinOpKind::Div)),
|
||||
token::BinOpEq(BinOpToken::Percent) => Some(AssignOp(BinOpKind::Rem)),
|
||||
token::BinOpEq(BinOpToken::Caret) => Some(AssignOp(BinOpKind::BitXor)),
|
||||
token::BinOpEq(BinOpToken::And) => Some(AssignOp(BinOpKind::BitAnd)),
|
||||
token::BinOpEq(BinOpToken::Or) => Some(AssignOp(BinOpKind::BitOr)),
|
||||
token::BinOpEq(BinOpToken::Shl) => Some(AssignOp(BinOpKind::Shl)),
|
||||
token::BinOpEq(BinOpToken::Shr) => Some(AssignOp(BinOpKind::Shr)),
|
||||
token::Plus => Some(Binary(BinOpKind::Add)),
|
||||
token::Minus => Some(Binary(BinOpKind::Sub)),
|
||||
token::Star => Some(Binary(BinOpKind::Mul)),
|
||||
token::Slash => Some(Binary(BinOpKind::Div)),
|
||||
token::Percent => Some(Binary(BinOpKind::Rem)),
|
||||
token::Caret => Some(Binary(BinOpKind::BitXor)),
|
||||
token::And => Some(Binary(BinOpKind::BitAnd)),
|
||||
token::Or => Some(Binary(BinOpKind::BitOr)),
|
||||
token::Shl => Some(Binary(BinOpKind::Shl)),
|
||||
token::Shr => Some(Binary(BinOpKind::Shr)),
|
||||
token::PlusEq => Some(AssignOp(BinOpKind::Add)),
|
||||
token::MinusEq => Some(AssignOp(BinOpKind::Sub)),
|
||||
token::StarEq => Some(AssignOp(BinOpKind::Mul)),
|
||||
token::SlashEq => Some(AssignOp(BinOpKind::Div)),
|
||||
token::PercentEq => Some(AssignOp(BinOpKind::Rem)),
|
||||
token::CaretEq => Some(AssignOp(BinOpKind::BitXor)),
|
||||
token::AndEq => Some(AssignOp(BinOpKind::BitAnd)),
|
||||
token::OrEq => Some(AssignOp(BinOpKind::BitOr)),
|
||||
token::ShlEq => Some(AssignOp(BinOpKind::Shl)),
|
||||
token::ShrEq => Some(AssignOp(BinOpKind::Shr)),
|
||||
token::Lt => Some(Binary(BinOpKind::Lt)),
|
||||
token::Le => Some(Binary(BinOpKind::Le)),
|
||||
token::Ge => Some(Binary(BinOpKind::Ge)),
|
||||
|
@ -11,9 +11,7 @@ use std::sync::Arc;
|
||||
|
||||
use rustc_ast::attr::AttrIdGenerator;
|
||||
use rustc_ast::ptr::P;
|
||||
use rustc_ast::token::{
|
||||
self, BinOpToken, CommentKind, Delimiter, IdentIsRaw, Nonterminal, Token, TokenKind,
|
||||
};
|
||||
use rustc_ast::token::{self, CommentKind, Delimiter, IdentIsRaw, Nonterminal, Token, TokenKind};
|
||||
use rustc_ast::tokenstream::{Spacing, TokenStream, TokenTree};
|
||||
use rustc_ast::util::classify;
|
||||
use rustc_ast::util::comments::{Comment, CommentStyle};
|
||||
@ -319,7 +317,7 @@ fn space_between(tt1: &TokenTree, tt2: &TokenTree) -> bool {
|
||||
(tt1, Tok(Token { kind: Comma | Semi | Dot, .. }, _)) if !is_punct(tt1) => false,
|
||||
|
||||
// IDENT + `!`: `println!()`, but `if !x { ... }` needs a space after the `if`
|
||||
(Tok(Token { kind: Ident(sym, is_raw), span }, _), Tok(Token { kind: Not, .. }, _))
|
||||
(Tok(Token { kind: Ident(sym, is_raw), span }, _), Tok(Token { kind: Bang, .. }, _))
|
||||
if !Ident::new(*sym, *span).is_reserved() || matches!(is_raw, IdentIsRaw::Yes) =>
|
||||
{
|
||||
false
|
||||
@ -344,21 +342,6 @@ fn space_between(tt1: &TokenTree, tt2: &TokenTree) -> bool {
|
||||
}
|
||||
}
|
||||
|
||||
fn binop_to_string(op: BinOpToken) -> &'static str {
|
||||
match op {
|
||||
token::Plus => "+",
|
||||
token::Minus => "-",
|
||||
token::Star => "*",
|
||||
token::Slash => "/",
|
||||
token::Percent => "%",
|
||||
token::Caret => "^",
|
||||
token::And => "&",
|
||||
token::Or => "|",
|
||||
token::Shl => "<<",
|
||||
token::Shr => ">>",
|
||||
}
|
||||
}
|
||||
|
||||
pub fn doc_comment_to_string(
|
||||
comment_kind: CommentKind,
|
||||
attr_style: ast::AttrStyle,
|
||||
@ -913,12 +896,30 @@ pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::Dere
|
||||
token::Ne => "!=".into(),
|
||||
token::Ge => ">=".into(),
|
||||
token::Gt => ">".into(),
|
||||
token::Not => "!".into(),
|
||||
token::Bang => "!".into(),
|
||||
token::Tilde => "~".into(),
|
||||
token::OrOr => "||".into(),
|
||||
token::AndAnd => "&&".into(),
|
||||
token::BinOp(op) => binop_to_string(op).into(),
|
||||
token::BinOpEq(op) => format!("{}=", binop_to_string(op)).into(),
|
||||
token::Plus => "+".into(),
|
||||
token::Minus => "-".into(),
|
||||
token::Star => "*".into(),
|
||||
token::Slash => "/".into(),
|
||||
token::Percent => "%".into(),
|
||||
token::Caret => "^".into(),
|
||||
token::And => "&".into(),
|
||||
token::Or => "|".into(),
|
||||
token::Shl => "<<".into(),
|
||||
token::Shr => ">>".into(),
|
||||
token::PlusEq => "+=".into(),
|
||||
token::MinusEq => "-=".into(),
|
||||
token::StarEq => "*=".into(),
|
||||
token::SlashEq => "/=".into(),
|
||||
token::PercentEq => "%=".into(),
|
||||
token::CaretEq => "^=".into(),
|
||||
token::AndEq => "&=".into(),
|
||||
token::OrEq => "|=".into(),
|
||||
token::ShlEq => "<<=".into(),
|
||||
token::ShrEq => ">>=".into(),
|
||||
|
||||
/* Structural symbols */
|
||||
token::At => "@".into(),
|
||||
|
@ -64,11 +64,6 @@ impl<'gcc, 'tcx> ConstCodegenMethods<'tcx> for CodegenCx<'gcc, 'tcx> {
|
||||
if type_is_pointer(typ) { self.context.new_null(typ) } else { self.const_int(typ, 0) }
|
||||
}
|
||||
|
||||
fn is_undef(&self, _val: RValue<'gcc>) -> bool {
|
||||
// FIXME: actually check for undef
|
||||
false
|
||||
}
|
||||
|
||||
fn const_undef(&self, typ: Type<'gcc>) -> RValue<'gcc> {
|
||||
let local = self.current_func.borrow().expect("func").new_local(None, typ, "undefined");
|
||||
if typ.is_struct().is_some() {
|
||||
|
@ -127,10 +127,6 @@ impl<'ll, 'tcx> ConstCodegenMethods<'tcx> for CodegenCx<'ll, 'tcx> {
|
||||
unsafe { llvm::LLVMGetUndef(t) }
|
||||
}
|
||||
|
||||
fn is_undef(&self, v: &'ll Value) -> bool {
|
||||
unsafe { llvm::LLVMIsUndef(v) == True }
|
||||
}
|
||||
|
||||
fn const_poison(&self, t: &'ll Type) -> &'ll Value {
|
||||
unsafe { llvm::LLVMGetPoison(t) }
|
||||
}
|
||||
|
@ -1046,7 +1046,6 @@ unsafe extern "C" {
|
||||
pub(crate) fn LLVMMetadataTypeInContext(C: &Context) -> &Type;
|
||||
|
||||
// Operations on all values
|
||||
pub(crate) fn LLVMIsUndef(Val: &Value) -> Bool;
|
||||
pub(crate) fn LLVMTypeOf(Val: &Value) -> &Type;
|
||||
pub(crate) fn LLVMGetValueName2(Val: &Value, Length: *mut size_t) -> *const c_char;
|
||||
pub(crate) fn LLVMSetValueName2(Val: &Value, Name: *const c_char, NameLen: size_t);
|
||||
|
@ -203,30 +203,14 @@ impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, V> {
|
||||
let alloc_align = alloc.inner().align;
|
||||
assert!(alloc_align >= layout.align.abi);
|
||||
|
||||
// Returns `None` when the value is partially undefined or any byte of it has provenance.
|
||||
// Otherwise returns the value or (if the entire value is undef) returns an undef.
|
||||
let read_scalar = |start, size, s: abi::Scalar, ty| {
|
||||
let range = alloc_range(start, size);
|
||||
match alloc.0.read_scalar(
|
||||
bx,
|
||||
range,
|
||||
alloc_range(start, size),
|
||||
/*read_provenance*/ matches!(s.primitive(), abi::Primitive::Pointer(_)),
|
||||
) {
|
||||
Ok(val) => Some(bx.scalar_to_backend(val, s, ty)),
|
||||
Err(_) => {
|
||||
// We may have failed due to partial provenance or unexpected provenance,
|
||||
// continue down the normal code path if so.
|
||||
if alloc.0.provenance().range_empty(range, &bx.tcx())
|
||||
// Since `read_scalar` failed, but there were no relocations involved, the
|
||||
// bytes must be partially or fully uninitialized. Thus we can now unwrap the
|
||||
// information about the range of uninit bytes and check if it's the full range.
|
||||
&& alloc.0.init_mask().is_range_initialized(range).unwrap_err() == range
|
||||
{
|
||||
Some(bx.const_undef(ty))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
Ok(val) => bx.scalar_to_backend(val, s, ty),
|
||||
Err(_) => bx.const_poison(ty),
|
||||
}
|
||||
};
|
||||
|
||||
@ -237,14 +221,16 @@ impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, V> {
|
||||
// check that walks over the type of `mplace` to make sure it is truly correct to treat this
|
||||
// like a `Scalar` (or `ScalarPair`).
|
||||
match layout.backend_repr {
|
||||
BackendRepr::Scalar(s) => {
|
||||
BackendRepr::Scalar(s @ abi::Scalar::Initialized { .. }) => {
|
||||
let size = s.size(bx);
|
||||
assert_eq!(size, layout.size, "abi::Scalar size does not match layout size");
|
||||
if let Some(val) = read_scalar(offset, size, s, bx.immediate_backend_type(layout)) {
|
||||
return OperandRef { val: OperandValue::Immediate(val), layout };
|
||||
}
|
||||
let val = read_scalar(offset, size, s, bx.immediate_backend_type(layout));
|
||||
OperandRef { val: OperandValue::Immediate(val), layout }
|
||||
}
|
||||
BackendRepr::ScalarPair(a, b) => {
|
||||
BackendRepr::ScalarPair(
|
||||
a @ abi::Scalar::Initialized { .. },
|
||||
b @ abi::Scalar::Initialized { .. },
|
||||
) => {
|
||||
let (a_size, b_size) = (a.size(bx), b.size(bx));
|
||||
let b_offset = (offset + a_size).align_to(b.align(bx).abi);
|
||||
assert!(b_offset.bytes() > 0);
|
||||
@ -260,21 +246,20 @@ impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, V> {
|
||||
b,
|
||||
bx.scalar_pair_element_backend_type(layout, 1, true),
|
||||
);
|
||||
if let (Some(a_val), Some(b_val)) = (a_val, b_val) {
|
||||
return OperandRef { val: OperandValue::Pair(a_val, b_val), layout };
|
||||
}
|
||||
OperandRef { val: OperandValue::Pair(a_val, b_val), layout }
|
||||
}
|
||||
_ if layout.is_zst() => return OperandRef::zero_sized(layout),
|
||||
_ => {}
|
||||
}
|
||||
// Neither a scalar nor scalar pair. Load from a place
|
||||
// FIXME: should we cache `const_data_from_alloc` to avoid repeating this for the
|
||||
// same `ConstAllocation`?
|
||||
let init = bx.const_data_from_alloc(alloc);
|
||||
let base_addr = bx.static_addr_of(init, alloc_align, None);
|
||||
_ if layout.is_zst() => OperandRef::zero_sized(layout),
|
||||
_ => {
|
||||
// Neither a scalar nor scalar pair. Load from a place
|
||||
// FIXME: should we cache `const_data_from_alloc` to avoid repeating this for the
|
||||
// same `ConstAllocation`?
|
||||
let init = bx.const_data_from_alloc(alloc);
|
||||
let base_addr = bx.static_addr_of(init, alloc_align, None);
|
||||
|
||||
let llval = bx.const_ptr_byte_offset(base_addr, offset);
|
||||
bx.load_operand(PlaceRef::new_sized(llval, layout))
|
||||
let llval = bx.const_ptr_byte_offset(base_addr, offset);
|
||||
bx.load_operand(PlaceRef::new_sized(llval, layout))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Asserts that this operand refers to a scalar and returns
|
||||
|
@ -8,7 +8,7 @@ use rustc_middle::ty::{self, Instance, Ty, TyCtxt};
|
||||
use rustc_middle::{bug, mir, span_bug};
|
||||
use rustc_session::config::OptLevel;
|
||||
use rustc_span::{DUMMY_SP, Span};
|
||||
use tracing::{debug, instrument, trace};
|
||||
use tracing::{debug, instrument};
|
||||
|
||||
use super::operand::{OperandRef, OperandValue};
|
||||
use super::place::PlaceRef;
|
||||
@ -93,8 +93,6 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
|
||||
return;
|
||||
}
|
||||
|
||||
// If `v` is an integer constant whose value is just a single byte repeated N times,
|
||||
// emit a `memset` filling the entire `dest` with that byte.
|
||||
let try_init_all_same = |bx: &mut Bx, v| {
|
||||
let start = dest.val.llval;
|
||||
let size = bx.const_usize(dest.layout.size.bytes());
|
||||
@ -119,33 +117,13 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
|
||||
false
|
||||
};
|
||||
|
||||
trace!(?cg_elem.val);
|
||||
match cg_elem.val {
|
||||
OperandValue::Immediate(v) => {
|
||||
if try_init_all_same(bx, v) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
OperandValue::Pair(a, b) => {
|
||||
let a_is_undef = bx.cx().is_undef(a);
|
||||
match (a_is_undef, bx.cx().is_undef(b)) {
|
||||
// Can happen for uninit unions
|
||||
(true, true) => {
|
||||
// FIXME: can we produce better output here?
|
||||
}
|
||||
(false, true) | (true, false) => {
|
||||
let val = if a_is_undef { b } else { a };
|
||||
if try_init_all_same(bx, val) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
(false, false) => {
|
||||
// FIXME: if both are the same value, use try_init_all_same
|
||||
}
|
||||
}
|
||||
}
|
||||
OperandValue::ZeroSized => unreachable!("checked above"),
|
||||
OperandValue::Ref(..) => {}
|
||||
_ => (),
|
||||
}
|
||||
|
||||
let count = self
|
||||
|
@ -9,7 +9,6 @@ pub trait ConstCodegenMethods<'tcx>: BackendTypes {
|
||||
/// Generate an uninitialized value (matching uninitialized memory in MIR).
|
||||
/// Whether memory is initialized or not is tracked byte-for-byte.
|
||||
fn const_undef(&self, t: Self::Type) -> Self::Value;
|
||||
fn is_undef(&self, v: Self::Value) -> bool;
|
||||
/// Generate a fake value. Poison always affects the entire value, even if just a single byte is
|
||||
/// poison. This can only be used in codepaths that are already UB, i.e., UB-free Rust code
|
||||
/// (including code that e.g. copies uninit memory with `MaybeUninit`) can never encounter a
|
||||
|
@ -43,10 +43,10 @@ impl<T> Sharded<T> {
|
||||
|
||||
/// The shard is selected by hashing `val` with `FxHasher`.
|
||||
#[inline]
|
||||
pub fn get_shard_by_value<K: Hash + ?Sized>(&self, _val: &K) -> &Lock<T> {
|
||||
pub fn get_shard_by_value<K: Hash + ?Sized>(&self, val: &K) -> &Lock<T> {
|
||||
match self {
|
||||
Self::Single(single) => single,
|
||||
Self::Shards(..) => self.get_shard_by_hash(make_hash(_val)),
|
||||
Self::Shards(..) => self.get_shard_by_hash(make_hash(val)),
|
||||
}
|
||||
}
|
||||
|
||||
@ -56,12 +56,12 @@ impl<T> Sharded<T> {
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn get_shard_by_index(&self, _i: usize) -> &Lock<T> {
|
||||
pub fn get_shard_by_index(&self, i: usize) -> &Lock<T> {
|
||||
match self {
|
||||
Self::Single(single) => single,
|
||||
Self::Shards(shards) => {
|
||||
// SAFETY: The index gets ANDed with the shard mask, ensuring it is always inbounds.
|
||||
unsafe { &shards.get_unchecked(_i & (SHARDS - 1)).0 }
|
||||
unsafe { &shards.get_unchecked(i & (SHARDS - 1)).0 }
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -69,7 +69,7 @@ impl<T> Sharded<T> {
|
||||
/// The shard is selected by hashing `val` with `FxHasher`.
|
||||
#[inline]
|
||||
#[track_caller]
|
||||
pub fn lock_shard_by_value<K: Hash + ?Sized>(&self, _val: &K) -> LockGuard<'_, T> {
|
||||
pub fn lock_shard_by_value<K: Hash + ?Sized>(&self, val: &K) -> LockGuard<'_, T> {
|
||||
match self {
|
||||
Self::Single(single) => {
|
||||
// Synchronization is disabled so use the `lock_assume_no_sync` method optimized
|
||||
@ -79,7 +79,7 @@ impl<T> Sharded<T> {
|
||||
// `might_be_dyn_thread_safe` was also false.
|
||||
unsafe { single.lock_assume(Mode::NoSync) }
|
||||
}
|
||||
Self::Shards(..) => self.lock_shard_by_hash(make_hash(_val)),
|
||||
Self::Shards(..) => self.lock_shard_by_hash(make_hash(val)),
|
||||
}
|
||||
}
|
||||
|
||||
@ -91,7 +91,7 @@ impl<T> Sharded<T> {
|
||||
|
||||
#[inline]
|
||||
#[track_caller]
|
||||
pub fn lock_shard_by_index(&self, _i: usize) -> LockGuard<'_, T> {
|
||||
pub fn lock_shard_by_index(&self, i: usize) -> LockGuard<'_, T> {
|
||||
match self {
|
||||
Self::Single(single) => {
|
||||
// Synchronization is disabled so use the `lock_assume_no_sync` method optimized
|
||||
@ -109,7 +109,7 @@ impl<T> Sharded<T> {
|
||||
// always inbounds.
|
||||
// SAFETY (lock_assume_sync): We know `is_dyn_thread_safe` was true when creating
|
||||
// the lock thus `might_be_dyn_thread_safe` was also true.
|
||||
unsafe { shards.get_unchecked(_i & (SHARDS - 1)).0.lock_assume(Mode::Sync) }
|
||||
unsafe { shards.get_unchecked(i & (SHARDS - 1)).0.lock_assume(Mode::Sync) }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -18,42 +18,54 @@
|
||||
//!
|
||||
//! | Type | Serial version | Parallel version |
|
||||
//! | ----------------------- | ------------------- | ------------------------------- |
|
||||
//! | `LRef<'a, T>` [^2] | `&'a mut T` | `&'a T` |
|
||||
//! | | | |
|
||||
//! | `Lock<T>` | `RefCell<T>` | `RefCell<T>` or |
|
||||
//! | | | `parking_lot::Mutex<T>` |
|
||||
//! | `RwLock<T>` | `RefCell<T>` | `parking_lot::RwLock<T>` |
|
||||
//! | `MTLock<T>` [^1] | `T` | `Lock<T>` |
|
||||
//! | `MTLockRef<'a, T>` [^2] | `&'a mut MTLock<T>` | `&'a MTLock<T>` |
|
||||
//! | | | |
|
||||
//! | `ParallelIterator` | `Iterator` | `rayon::iter::ParallelIterator` |
|
||||
//!
|
||||
//! [^1]: `MTLock` is similar to `Lock`, but the serial version avoids the cost
|
||||
//! of a `RefCell`. This is appropriate when interior mutability is not
|
||||
//! required.
|
||||
//!
|
||||
//! [^2]: `MTRef`, `MTLockRef` are type aliases.
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::hash::{BuildHasher, Hash};
|
||||
|
||||
pub use parking_lot::{
|
||||
MappedRwLockReadGuard as MappedReadGuard, MappedRwLockWriteGuard as MappedWriteGuard,
|
||||
RwLockReadGuard as ReadGuard, RwLockWriteGuard as WriteGuard,
|
||||
};
|
||||
|
||||
pub use self::atomic::AtomicU64;
|
||||
pub use self::freeze::{FreezeLock, FreezeReadGuard, FreezeWriteGuard};
|
||||
#[doc(no_inline)]
|
||||
pub use self::lock::{Lock, LockGuard, Mode};
|
||||
pub use self::mode::{is_dyn_thread_safe, set_dyn_thread_safe_mode};
|
||||
pub use self::parallel::{
|
||||
join, par_for_each_in, par_map, parallel_guard, scope, try_par_for_each_in,
|
||||
};
|
||||
pub use self::vec::{AppendOnlyIndexVec, AppendOnlyVec};
|
||||
pub use self::worker_local::{Registry, WorkerLocal};
|
||||
pub use crate::marker::*;
|
||||
|
||||
mod lock;
|
||||
#[doc(no_inline)]
|
||||
pub use lock::{Lock, LockGuard, Mode};
|
||||
|
||||
mod worker_local;
|
||||
pub use worker_local::{Registry, WorkerLocal};
|
||||
|
||||
mod parallel;
|
||||
pub use parallel::{join, par_for_each_in, par_map, parallel_guard, scope, try_par_for_each_in};
|
||||
pub use vec::{AppendOnlyIndexVec, AppendOnlyVec};
|
||||
|
||||
mod vec;
|
||||
|
||||
mod freeze;
|
||||
pub use freeze::{FreezeLock, FreezeReadGuard, FreezeWriteGuard};
|
||||
mod lock;
|
||||
mod parallel;
|
||||
mod vec;
|
||||
mod worker_local;
|
||||
|
||||
/// Keep the conditional imports together in a submodule, so that import-sorting
|
||||
/// doesn't split them up.
|
||||
mod atomic {
|
||||
// Most hosts can just use a regular AtomicU64.
|
||||
#[cfg(target_has_atomic = "64")]
|
||||
pub use std::sync::atomic::AtomicU64;
|
||||
|
||||
// Some 32-bit hosts don't have AtomicU64, so use a fallback.
|
||||
#[cfg(not(target_has_atomic = "64"))]
|
||||
pub use portable_atomic::AtomicU64;
|
||||
}
|
||||
|
||||
mod mode {
|
||||
use std::sync::atomic::{AtomicU8, Ordering};
|
||||
@ -97,21 +109,6 @@ mod mode {
|
||||
|
||||
// FIXME(parallel_compiler): Get rid of these aliases across the compiler.
|
||||
|
||||
pub use std::sync::OnceLock;
|
||||
// Use portable AtomicU64 for targets without native 64-bit atomics
|
||||
#[cfg(target_has_atomic = "64")]
|
||||
pub use std::sync::atomic::AtomicU64;
|
||||
|
||||
pub use mode::{is_dyn_thread_safe, set_dyn_thread_safe_mode};
|
||||
pub use parking_lot::{
|
||||
MappedRwLockReadGuard as MappedReadGuard, MappedRwLockWriteGuard as MappedWriteGuard,
|
||||
RwLockReadGuard as ReadGuard, RwLockWriteGuard as WriteGuard,
|
||||
};
|
||||
#[cfg(not(target_has_atomic = "64"))]
|
||||
pub use portable_atomic::AtomicU64;
|
||||
|
||||
pub type LRef<'a, T> = &'a T;
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct MTLock<T>(Lock<T>);
|
||||
|
||||
@ -142,14 +139,10 @@ impl<T> MTLock<T> {
|
||||
}
|
||||
}
|
||||
|
||||
use parking_lot::RwLock as InnerRwLock;
|
||||
|
||||
/// This makes locks panic if they are already held.
|
||||
/// It is only useful when you are running in a single thread
|
||||
const ERROR_CHECKING: bool = false;
|
||||
|
||||
pub type MTLockRef<'a, T> = LRef<'a, MTLock<T>>;
|
||||
|
||||
#[derive(Default)]
|
||||
#[repr(align(64))]
|
||||
pub struct CacheAligned<T>(pub T);
|
||||
@ -167,12 +160,12 @@ impl<K: Eq + Hash, V: Eq, S: BuildHasher> HashMapExt<K, V> for HashMap<K, V, S>
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct RwLock<T>(InnerRwLock<T>);
|
||||
pub struct RwLock<T>(parking_lot::RwLock<T>);
|
||||
|
||||
impl<T> RwLock<T> {
|
||||
#[inline(always)]
|
||||
pub fn new(inner: T) -> Self {
|
||||
RwLock(InnerRwLock::new(inner))
|
||||
RwLock(parking_lot::RwLock::new(inner))
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
|
@ -328,7 +328,7 @@ impl<'a> StripUnconfigured<'a> {
|
||||
|
||||
// For inner attributes, we do the same thing for the `!` in `#![attr]`.
|
||||
let mut trees = if cfg_attr.style == AttrStyle::Inner {
|
||||
let Some(TokenTree::Token(bang_token @ Token { kind: TokenKind::Not, .. }, _)) =
|
||||
let Some(TokenTree::Token(bang_token @ Token { kind: TokenKind::Bang, .. }, _)) =
|
||||
orig_trees.next()
|
||||
else {
|
||||
panic!("Bad tokens for attribute {cfg_attr:?}");
|
||||
|
@ -432,7 +432,7 @@ fn check_nested_occurrences(
|
||||
}
|
||||
(
|
||||
NestedMacroState::MacroRules,
|
||||
&TokenTree::Token(Token { kind: TokenKind::Not, .. }),
|
||||
&TokenTree::Token(Token { kind: TokenKind::Bang, .. }),
|
||||
) => {
|
||||
state = NestedMacroState::MacroRulesNot;
|
||||
}
|
||||
|
@ -690,7 +690,7 @@ fn has_compile_error_macro(rhs: &mbe::TokenTree) -> bool {
|
||||
&& let TokenKind::Ident(ident, _) = ident.kind
|
||||
&& ident == sym::compile_error
|
||||
&& let mbe::TokenTree::Token(bang) = bang
|
||||
&& let TokenKind::Not = bang.kind
|
||||
&& let TokenKind::Bang = bang.kind
|
||||
&& let mbe::TokenTree::Delimited(.., del) = args
|
||||
&& !del.delim.skip()
|
||||
{
|
||||
@ -1135,7 +1135,7 @@ fn check_matcher_core<'tt>(
|
||||
&& matches!(kind, NonterminalKind::Pat(PatParam { inferred: true }))
|
||||
&& matches!(
|
||||
next_token,
|
||||
TokenTree::Token(token) if *token == BinOp(token::BinOpToken::Or)
|
||||
TokenTree::Token(token) if *token == token::Or
|
||||
)
|
||||
{
|
||||
// It is suggestion to use pat_param, for example: $x:pat -> $x:pat_param.
|
||||
@ -1177,7 +1177,7 @@ fn check_matcher_core<'tt>(
|
||||
|
||||
if kind == NonterminalKind::Pat(PatWithOr)
|
||||
&& sess.psess.edition.at_least_rust_2021()
|
||||
&& next_token.is_token(&BinOp(token::BinOpToken::Or))
|
||||
&& next_token.is_token(&token::Or)
|
||||
{
|
||||
let suggestion = quoted_tt_to_string(&TokenTree::MetaVarDecl(
|
||||
span,
|
||||
@ -1296,7 +1296,7 @@ fn is_in_follow(tok: &mbe::TokenTree, kind: NonterminalKind) -> IsInFollow {
|
||||
const TOKENS: &[&str] = &["`=>`", "`,`", "`=`", "`|`", "`if`", "`in`"];
|
||||
match tok {
|
||||
TokenTree::Token(token) => match token.kind {
|
||||
FatArrow | Comma | Eq | BinOp(token::Or) => IsInFollow::Yes,
|
||||
FatArrow | Comma | Eq | Or => IsInFollow::Yes,
|
||||
Ident(name, IdentIsRaw::No) if name == kw::If || name == kw::In => {
|
||||
IsInFollow::Yes
|
||||
}
|
||||
@ -1332,9 +1332,9 @@ fn is_in_follow(tok: &mbe::TokenTree, kind: NonterminalKind) -> IsInFollow {
|
||||
| Colon
|
||||
| Eq
|
||||
| Gt
|
||||
| BinOp(token::Shr)
|
||||
| Shr
|
||||
| Semi
|
||||
| BinOp(token::Or) => IsInFollow::Yes,
|
||||
| Or => IsInFollow::Yes,
|
||||
Ident(name, IdentIsRaw::No) if name == kw::As || name == kw::Where => {
|
||||
IsInFollow::Yes
|
||||
}
|
||||
|
@ -302,8 +302,8 @@ fn parse_tree<'a>(
|
||||
/// `None`.
|
||||
fn kleene_op(token: &Token) -> Option<KleeneOp> {
|
||||
match token.kind {
|
||||
token::BinOp(token::Star) => Some(KleeneOp::ZeroOrMore),
|
||||
token::BinOp(token::Plus) => Some(KleeneOp::OneOrMore),
|
||||
token::Star => Some(KleeneOp::ZeroOrMore),
|
||||
token::Plus => Some(KleeneOp::OneOrMore),
|
||||
token::Question => Some(KleeneOp::ZeroOrOne),
|
||||
_ => None,
|
||||
}
|
||||
|
@ -180,28 +180,28 @@ impl FromInternal<(TokenStream, &mut Rustc<'_, '_>)> for Vec<TokenTree<TokenStre
|
||||
Gt => op(">"),
|
||||
AndAnd => op("&&"),
|
||||
OrOr => op("||"),
|
||||
Not => op("!"),
|
||||
Bang => op("!"),
|
||||
Tilde => op("~"),
|
||||
BinOp(Plus) => op("+"),
|
||||
BinOp(Minus) => op("-"),
|
||||
BinOp(Star) => op("*"),
|
||||
BinOp(Slash) => op("/"),
|
||||
BinOp(Percent) => op("%"),
|
||||
BinOp(Caret) => op("^"),
|
||||
BinOp(And) => op("&"),
|
||||
BinOp(Or) => op("|"),
|
||||
BinOp(Shl) => op("<<"),
|
||||
BinOp(Shr) => op(">>"),
|
||||
BinOpEq(Plus) => op("+="),
|
||||
BinOpEq(Minus) => op("-="),
|
||||
BinOpEq(Star) => op("*="),
|
||||
BinOpEq(Slash) => op("/="),
|
||||
BinOpEq(Percent) => op("%="),
|
||||
BinOpEq(Caret) => op("^="),
|
||||
BinOpEq(And) => op("&="),
|
||||
BinOpEq(Or) => op("|="),
|
||||
BinOpEq(Shl) => op("<<="),
|
||||
BinOpEq(Shr) => op(">>="),
|
||||
Plus => op("+"),
|
||||
Minus => op("-"),
|
||||
Star => op("*"),
|
||||
Slash => op("/"),
|
||||
Percent => op("%"),
|
||||
Caret => op("^"),
|
||||
And => op("&"),
|
||||
Or => op("|"),
|
||||
Shl => op("<<"),
|
||||
Shr => op(">>"),
|
||||
PlusEq => op("+="),
|
||||
MinusEq => op("-="),
|
||||
StarEq => op("*="),
|
||||
SlashEq => op("/="),
|
||||
PercentEq => op("%="),
|
||||
CaretEq => op("^="),
|
||||
AndEq => op("&="),
|
||||
OrEq => op("|="),
|
||||
ShlEq => op("<<="),
|
||||
ShrEq => op(">>="),
|
||||
At => op("@"),
|
||||
Dot => op("."),
|
||||
DotDot => op(".."),
|
||||
@ -322,16 +322,16 @@ impl ToInternal<SmallVec<[tokenstream::TokenTree; 2]>>
|
||||
b'=' => Eq,
|
||||
b'<' => Lt,
|
||||
b'>' => Gt,
|
||||
b'!' => Not,
|
||||
b'!' => Bang,
|
||||
b'~' => Tilde,
|
||||
b'+' => BinOp(Plus),
|
||||
b'-' => BinOp(Minus),
|
||||
b'*' => BinOp(Star),
|
||||
b'/' => BinOp(Slash),
|
||||
b'%' => BinOp(Percent),
|
||||
b'^' => BinOp(Caret),
|
||||
b'&' => BinOp(And),
|
||||
b'|' => BinOp(Or),
|
||||
b'+' => Plus,
|
||||
b'-' => Minus,
|
||||
b'*' => Star,
|
||||
b'/' => Slash,
|
||||
b'%' => Percent,
|
||||
b'^' => Caret,
|
||||
b'&' => And,
|
||||
b'|' => Or,
|
||||
b'@' => At,
|
||||
b'.' => Dot,
|
||||
b',' => Comma,
|
||||
@ -372,10 +372,9 @@ impl ToInternal<SmallVec<[tokenstream::TokenTree; 2]>>
|
||||
suffix,
|
||||
span,
|
||||
}) if symbol.as_str().starts_with('-') => {
|
||||
let minus = BinOp(BinOpToken::Minus);
|
||||
let symbol = Symbol::intern(&symbol.as_str()[1..]);
|
||||
let integer = TokenKind::lit(token::Integer, symbol, suffix);
|
||||
let a = tokenstream::TokenTree::token_joint_hidden(minus, span);
|
||||
let a = tokenstream::TokenTree::token_joint_hidden(Minus, span);
|
||||
let b = tokenstream::TokenTree::token_alone(integer, span);
|
||||
smallvec![a, b]
|
||||
}
|
||||
@ -385,10 +384,9 @@ impl ToInternal<SmallVec<[tokenstream::TokenTree; 2]>>
|
||||
suffix,
|
||||
span,
|
||||
}) if symbol.as_str().starts_with('-') => {
|
||||
let minus = BinOp(BinOpToken::Minus);
|
||||
let symbol = Symbol::intern(&symbol.as_str()[1..]);
|
||||
let float = TokenKind::lit(token::Float, symbol, suffix);
|
||||
let a = tokenstream::TokenTree::token_joint_hidden(minus, span);
|
||||
let a = tokenstream::TokenTree::token_joint_hidden(Minus, span);
|
||||
let b = tokenstream::TokenTree::token_alone(float, span);
|
||||
smallvec![a, b]
|
||||
}
|
||||
@ -599,10 +597,7 @@ impl server::TokenStream for Rustc<'_, '_> {
|
||||
Ok(Self::TokenStream::from_iter([
|
||||
// FIXME: The span of the `-` token is lost when
|
||||
// parsing, so we cannot faithfully recover it here.
|
||||
tokenstream::TokenTree::token_joint_hidden(
|
||||
token::BinOp(token::Minus),
|
||||
e.span,
|
||||
),
|
||||
tokenstream::TokenTree::token_joint_hidden(token::Minus, e.span),
|
||||
tokenstream::TokenTree::token_alone(token::Literal(*token_lit), e.span),
|
||||
]))
|
||||
}
|
||||
|
@ -2,14 +2,14 @@ use std::any::Any;
|
||||
use std::ffi::OsString;
|
||||
use std::io::{self, BufWriter, Write};
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::sync::{Arc, LazyLock};
|
||||
use std::sync::{Arc, LazyLock, OnceLock};
|
||||
use std::{env, fs, iter};
|
||||
|
||||
use rustc_ast as ast;
|
||||
use rustc_codegen_ssa::traits::CodegenBackend;
|
||||
use rustc_data_structures::parallel;
|
||||
use rustc_data_structures::steal::Steal;
|
||||
use rustc_data_structures::sync::{AppendOnlyIndexVec, FreezeLock, OnceLock, WorkerLocal};
|
||||
use rustc_data_structures::sync::{AppendOnlyIndexVec, FreezeLock, WorkerLocal};
|
||||
use rustc_expand::base::{ExtCtxt, LintStoreExpand};
|
||||
use rustc_feature::Features;
|
||||
use rustc_fs_util::try_canonicalize;
|
||||
|
@ -3782,7 +3782,7 @@ declare_lint! {
|
||||
Warn,
|
||||
"use of unsupported calling convention for function pointer",
|
||||
@future_incompatible = FutureIncompatibleInfo {
|
||||
reason: FutureIncompatibilityReason::FutureReleaseErrorDontReportInDeps,
|
||||
reason: FutureIncompatibilityReason::FutureReleaseErrorReportInDeps,
|
||||
reference: "issue #130260 <https://github.com/rust-lang/rust/issues/130260>",
|
||||
};
|
||||
}
|
||||
|
@ -2,7 +2,7 @@
|
||||
|
||||
use std::iter::TrustedLen;
|
||||
use std::path::Path;
|
||||
use std::sync::Arc;
|
||||
use std::sync::{Arc, OnceLock};
|
||||
use std::{io, iter, mem};
|
||||
|
||||
pub(super) use cstore_impl::provide;
|
||||
@ -11,7 +11,7 @@ use rustc_ast as ast;
|
||||
use rustc_data_structures::fingerprint::Fingerprint;
|
||||
use rustc_data_structures::fx::FxIndexMap;
|
||||
use rustc_data_structures::owned_slice::OwnedSlice;
|
||||
use rustc_data_structures::sync::{Lock, OnceLock};
|
||||
use rustc_data_structures::sync::Lock;
|
||||
use rustc_data_structures::unhash::UnhashMap;
|
||||
use rustc_expand::base::{SyntaxExtension, SyntaxExtensionKind};
|
||||
use rustc_expand::proc_macro::{AttrProcMacro, BangProcMacro, DeriveProcMacro};
|
||||
|
@ -1,8 +1,9 @@
|
||||
use std::sync::OnceLock;
|
||||
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
use rustc_data_structures::graph;
|
||||
use rustc_data_structures::graph::dominators::{Dominators, dominators};
|
||||
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
|
||||
use rustc_data_structures::sync::OnceLock;
|
||||
use rustc_index::{IndexSlice, IndexVec};
|
||||
use rustc_macros::{HashStable, TyDecodable, TyEncodable, TypeFoldable, TypeVisitable};
|
||||
use rustc_serialize::{Decodable, Decoder, Encodable, Encoder};
|
||||
|
@ -222,7 +222,7 @@ impl AllocError {
|
||||
}
|
||||
|
||||
/// The information that makes up a memory access: offset and size.
|
||||
#[derive(Copy, Clone, PartialEq)]
|
||||
#[derive(Copy, Clone)]
|
||||
pub struct AllocRange {
|
||||
pub start: Size,
|
||||
pub size: Size,
|
||||
|
@ -133,6 +133,20 @@ pub macro with_no_queries($e:expr) {{
|
||||
))
|
||||
}}
|
||||
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
|
||||
pub enum WrapBinderMode {
|
||||
ForAll,
|
||||
Unsafe,
|
||||
}
|
||||
impl WrapBinderMode {
|
||||
pub fn start_str(self) -> &'static str {
|
||||
match self {
|
||||
WrapBinderMode::ForAll => "for<",
|
||||
WrapBinderMode::Unsafe => "unsafe<",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// The "region highlights" are used to control region printing during
|
||||
/// specific error messages. When a "region highlight" is enabled, it
|
||||
/// gives an alternate way to print specific regions. For now, we
|
||||
@ -219,7 +233,7 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write {
|
||||
self.print_def_path(def_id, args)
|
||||
}
|
||||
|
||||
fn in_binder<T>(&mut self, value: &ty::Binder<'tcx, T>) -> Result<(), PrintError>
|
||||
fn print_in_binder<T>(&mut self, value: &ty::Binder<'tcx, T>) -> Result<(), PrintError>
|
||||
where
|
||||
T: Print<'tcx, Self> + TypeFoldable<TyCtxt<'tcx>>,
|
||||
{
|
||||
@ -229,6 +243,7 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write {
|
||||
fn wrap_binder<T, F: FnOnce(&T, &mut Self) -> Result<(), fmt::Error>>(
|
||||
&mut self,
|
||||
value: &ty::Binder<'tcx, T>,
|
||||
_mode: WrapBinderMode,
|
||||
f: F,
|
||||
) -> Result<(), PrintError>
|
||||
where
|
||||
@ -703,8 +718,9 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write {
|
||||
}
|
||||
ty::FnPtr(ref sig_tys, hdr) => p!(print(sig_tys.with(hdr))),
|
||||
ty::UnsafeBinder(ref bound_ty) => {
|
||||
// FIXME(unsafe_binders): Make this print `unsafe<>` rather than `for<>`.
|
||||
self.wrap_binder(bound_ty, |ty, cx| cx.pretty_print_type(*ty))?;
|
||||
self.wrap_binder(bound_ty, WrapBinderMode::Unsafe, |ty, cx| {
|
||||
cx.pretty_print_type(*ty)
|
||||
})?;
|
||||
}
|
||||
ty::Infer(infer_ty) => {
|
||||
if self.should_print_verbose() {
|
||||
@ -1067,29 +1083,33 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write {
|
||||
};
|
||||
|
||||
if let Some(return_ty) = entry.return_ty {
|
||||
self.wrap_binder(&bound_args_and_self_ty, |(args, _), cx| {
|
||||
define_scoped_cx!(cx);
|
||||
p!(write("{}", tcx.item_name(trait_def_id)));
|
||||
p!("(");
|
||||
self.wrap_binder(
|
||||
&bound_args_and_self_ty,
|
||||
WrapBinderMode::ForAll,
|
||||
|(args, _), cx| {
|
||||
define_scoped_cx!(cx);
|
||||
p!(write("{}", tcx.item_name(trait_def_id)));
|
||||
p!("(");
|
||||
|
||||
for (idx, ty) in args.iter().enumerate() {
|
||||
if idx > 0 {
|
||||
p!(", ");
|
||||
for (idx, ty) in args.iter().enumerate() {
|
||||
if idx > 0 {
|
||||
p!(", ");
|
||||
}
|
||||
p!(print(ty));
|
||||
}
|
||||
p!(print(ty));
|
||||
}
|
||||
|
||||
p!(")");
|
||||
if let Some(ty) = return_ty.skip_binder().as_type() {
|
||||
if !ty.is_unit() {
|
||||
p!(" -> ", print(return_ty));
|
||||
p!(")");
|
||||
if let Some(ty) = return_ty.skip_binder().as_type() {
|
||||
if !ty.is_unit() {
|
||||
p!(" -> ", print(return_ty));
|
||||
}
|
||||
}
|
||||
}
|
||||
p!(write("{}", if paren_needed { ")" } else { "" }));
|
||||
p!(write("{}", if paren_needed { ")" } else { "" }));
|
||||
|
||||
first = false;
|
||||
Ok(())
|
||||
})?;
|
||||
first = false;
|
||||
Ok(())
|
||||
},
|
||||
)?;
|
||||
} else {
|
||||
// Otherwise, render this like a regular trait.
|
||||
traits.insert(
|
||||
@ -1110,7 +1130,7 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write {
|
||||
for (trait_pred, assoc_items) in traits {
|
||||
write!(self, "{}", if first { "" } else { " + " })?;
|
||||
|
||||
self.wrap_binder(&trait_pred, |trait_pred, cx| {
|
||||
self.wrap_binder(&trait_pred, WrapBinderMode::ForAll, |trait_pred, cx| {
|
||||
define_scoped_cx!(cx);
|
||||
|
||||
if trait_pred.polarity == ty::PredicatePolarity::Negative {
|
||||
@ -1302,7 +1322,7 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write {
|
||||
let mut first = true;
|
||||
|
||||
if let Some(bound_principal) = predicates.principal() {
|
||||
self.wrap_binder(&bound_principal, |principal, cx| {
|
||||
self.wrap_binder(&bound_principal, WrapBinderMode::ForAll, |principal, cx| {
|
||||
define_scoped_cx!(cx);
|
||||
p!(print_def_path(principal.def_id, &[]));
|
||||
|
||||
@ -1927,7 +1947,7 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write {
|
||||
let kind = closure.kind_ty().to_opt_closure_kind().unwrap_or(ty::ClosureKind::Fn);
|
||||
|
||||
write!(self, "impl ")?;
|
||||
self.wrap_binder(&sig, |sig, cx| {
|
||||
self.wrap_binder(&sig, WrapBinderMode::ForAll, |sig, cx| {
|
||||
define_scoped_cx!(cx);
|
||||
|
||||
p!(write("{kind}("));
|
||||
@ -2367,22 +2387,23 @@ impl<'tcx> PrettyPrinter<'tcx> for FmtPrinter<'_, 'tcx> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn in_binder<T>(&mut self, value: &ty::Binder<'tcx, T>) -> Result<(), PrintError>
|
||||
fn print_in_binder<T>(&mut self, value: &ty::Binder<'tcx, T>) -> Result<(), PrintError>
|
||||
where
|
||||
T: Print<'tcx, Self> + TypeFoldable<TyCtxt<'tcx>>,
|
||||
{
|
||||
self.pretty_in_binder(value)
|
||||
self.pretty_print_in_binder(value)
|
||||
}
|
||||
|
||||
fn wrap_binder<T, C: FnOnce(&T, &mut Self) -> Result<(), PrintError>>(
|
||||
&mut self,
|
||||
value: &ty::Binder<'tcx, T>,
|
||||
mode: WrapBinderMode,
|
||||
f: C,
|
||||
) -> Result<(), PrintError>
|
||||
where
|
||||
T: TypeFoldable<TyCtxt<'tcx>>,
|
||||
{
|
||||
self.pretty_wrap_binder(value, f)
|
||||
self.pretty_wrap_binder(value, mode, f)
|
||||
}
|
||||
|
||||
fn typed_value(
|
||||
@ -2632,6 +2653,7 @@ impl<'tcx> FmtPrinter<'_, 'tcx> {
|
||||
pub fn name_all_regions<T>(
|
||||
&mut self,
|
||||
value: &ty::Binder<'tcx, T>,
|
||||
mode: WrapBinderMode,
|
||||
) -> Result<(T, UnordMap<ty::BoundRegion, ty::Region<'tcx>>), fmt::Error>
|
||||
where
|
||||
T: TypeFoldable<TyCtxt<'tcx>>,
|
||||
@ -2705,9 +2727,13 @@ impl<'tcx> FmtPrinter<'_, 'tcx> {
|
||||
// anyways.
|
||||
let (new_value, map) = if self.should_print_verbose() {
|
||||
for var in value.bound_vars().iter() {
|
||||
start_or_continue(self, "for<", ", ");
|
||||
start_or_continue(self, mode.start_str(), ", ");
|
||||
write!(self, "{var:?}")?;
|
||||
}
|
||||
// Unconditionally render `unsafe<>`.
|
||||
if value.bound_vars().is_empty() && mode == WrapBinderMode::Unsafe {
|
||||
start_or_continue(self, mode.start_str(), "");
|
||||
}
|
||||
start_or_continue(self, "", "> ");
|
||||
(value.clone().skip_binder(), UnordMap::default())
|
||||
} else {
|
||||
@ -2772,8 +2798,9 @@ impl<'tcx> FmtPrinter<'_, 'tcx> {
|
||||
}
|
||||
};
|
||||
|
||||
if !trim_path {
|
||||
start_or_continue(self, "for<", ", ");
|
||||
// Unconditionally render `unsafe<>`.
|
||||
if !trim_path || mode == WrapBinderMode::Unsafe {
|
||||
start_or_continue(self, mode.start_str(), ", ");
|
||||
do_continue(self, name);
|
||||
}
|
||||
ty::Region::new_bound(tcx, ty::INNERMOST, ty::BoundRegion { var: br.var, kind })
|
||||
@ -2786,9 +2813,12 @@ impl<'tcx> FmtPrinter<'_, 'tcx> {
|
||||
};
|
||||
let new_value = value.clone().skip_binder().fold_with(&mut folder);
|
||||
let region_map = folder.region_map;
|
||||
if !trim_path {
|
||||
start_or_continue(self, "", "> ");
|
||||
|
||||
if mode == WrapBinderMode::Unsafe && region_map.is_empty() {
|
||||
start_or_continue(self, mode.start_str(), "");
|
||||
}
|
||||
start_or_continue(self, "", "> ");
|
||||
|
||||
(new_value, region_map)
|
||||
};
|
||||
|
||||
@ -2797,12 +2827,15 @@ impl<'tcx> FmtPrinter<'_, 'tcx> {
|
||||
Ok((new_value, map))
|
||||
}
|
||||
|
||||
pub fn pretty_in_binder<T>(&mut self, value: &ty::Binder<'tcx, T>) -> Result<(), fmt::Error>
|
||||
pub fn pretty_print_in_binder<T>(
|
||||
&mut self,
|
||||
value: &ty::Binder<'tcx, T>,
|
||||
) -> Result<(), fmt::Error>
|
||||
where
|
||||
T: Print<'tcx, Self> + TypeFoldable<TyCtxt<'tcx>>,
|
||||
{
|
||||
let old_region_index = self.region_index;
|
||||
let (new_value, _) = self.name_all_regions(value)?;
|
||||
let (new_value, _) = self.name_all_regions(value, WrapBinderMode::ForAll)?;
|
||||
new_value.print(self)?;
|
||||
self.region_index = old_region_index;
|
||||
self.binder_depth -= 1;
|
||||
@ -2812,13 +2845,14 @@ impl<'tcx> FmtPrinter<'_, 'tcx> {
|
||||
pub fn pretty_wrap_binder<T, C: FnOnce(&T, &mut Self) -> Result<(), fmt::Error>>(
|
||||
&mut self,
|
||||
value: &ty::Binder<'tcx, T>,
|
||||
mode: WrapBinderMode,
|
||||
f: C,
|
||||
) -> Result<(), fmt::Error>
|
||||
where
|
||||
T: TypeFoldable<TyCtxt<'tcx>>,
|
||||
{
|
||||
let old_region_index = self.region_index;
|
||||
let (new_value, _) = self.name_all_regions(value)?;
|
||||
let (new_value, _) = self.name_all_regions(value, mode)?;
|
||||
f(&new_value, self)?;
|
||||
self.region_index = old_region_index;
|
||||
self.binder_depth -= 1;
|
||||
@ -2877,7 +2911,7 @@ where
|
||||
T: Print<'tcx, P> + TypeFoldable<TyCtxt<'tcx>>,
|
||||
{
|
||||
fn print(&self, cx: &mut P) -> Result<(), PrintError> {
|
||||
cx.in_binder(self)
|
||||
cx.print_in_binder(self)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -209,7 +209,7 @@ use std::path::PathBuf;
|
||||
|
||||
use rustc_attr_parsing::InlineAttr;
|
||||
use rustc_data_structures::fx::FxIndexMap;
|
||||
use rustc_data_structures::sync::{LRef, MTLock, par_for_each_in};
|
||||
use rustc_data_structures::sync::{MTLock, par_for_each_in};
|
||||
use rustc_data_structures::unord::{UnordMap, UnordSet};
|
||||
use rustc_hir as hir;
|
||||
use rustc_hir::def::DefKind;
|
||||
@ -357,7 +357,7 @@ impl<'tcx> Extend<Spanned<MonoItem<'tcx>>> for MonoItems<'tcx> {
|
||||
fn collect_items_rec<'tcx>(
|
||||
tcx: TyCtxt<'tcx>,
|
||||
starting_item: Spanned<MonoItem<'tcx>>,
|
||||
state: LRef<'_, SharedState<'tcx>>,
|
||||
state: &SharedState<'tcx>,
|
||||
recursion_depths: &mut DefIdMap<usize>,
|
||||
recursion_limit: Limit,
|
||||
mode: CollectionMode,
|
||||
@ -1671,30 +1671,26 @@ pub(crate) fn collect_crate_mono_items<'tcx>(
|
||||
|
||||
debug!("building mono item graph, beginning at roots");
|
||||
|
||||
let mut state = SharedState {
|
||||
let state = SharedState {
|
||||
visited: MTLock::new(UnordSet::default()),
|
||||
mentioned: MTLock::new(UnordSet::default()),
|
||||
usage_map: MTLock::new(UsageMap::new()),
|
||||
};
|
||||
let recursion_limit = tcx.recursion_limit();
|
||||
|
||||
{
|
||||
let state: LRef<'_, _> = &mut state;
|
||||
|
||||
tcx.sess.time("monomorphization_collector_graph_walk", || {
|
||||
par_for_each_in(roots, |root| {
|
||||
let mut recursion_depths = DefIdMap::default();
|
||||
collect_items_rec(
|
||||
tcx,
|
||||
dummy_spanned(root),
|
||||
state,
|
||||
&mut recursion_depths,
|
||||
recursion_limit,
|
||||
CollectionMode::UsedItems,
|
||||
);
|
||||
});
|
||||
tcx.sess.time("monomorphization_collector_graph_walk", || {
|
||||
par_for_each_in(roots, |root| {
|
||||
let mut recursion_depths = DefIdMap::default();
|
||||
collect_items_rec(
|
||||
tcx,
|
||||
dummy_spanned(root),
|
||||
&state,
|
||||
&mut recursion_depths,
|
||||
recursion_limit,
|
||||
CollectionMode::UsedItems,
|
||||
);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// The set of MonoItems was created in an inherently indeterministic order because
|
||||
// of parallelism. We sort it here to ensure that the output is deterministic.
|
||||
|
@ -384,17 +384,17 @@ impl<'psess, 'src> Lexer<'psess, 'src> {
|
||||
rustc_lexer::TokenKind::Colon => token::Colon,
|
||||
rustc_lexer::TokenKind::Dollar => token::Dollar,
|
||||
rustc_lexer::TokenKind::Eq => token::Eq,
|
||||
rustc_lexer::TokenKind::Bang => token::Not,
|
||||
rustc_lexer::TokenKind::Bang => token::Bang,
|
||||
rustc_lexer::TokenKind::Lt => token::Lt,
|
||||
rustc_lexer::TokenKind::Gt => token::Gt,
|
||||
rustc_lexer::TokenKind::Minus => token::BinOp(token::Minus),
|
||||
rustc_lexer::TokenKind::And => token::BinOp(token::And),
|
||||
rustc_lexer::TokenKind::Or => token::BinOp(token::Or),
|
||||
rustc_lexer::TokenKind::Plus => token::BinOp(token::Plus),
|
||||
rustc_lexer::TokenKind::Star => token::BinOp(token::Star),
|
||||
rustc_lexer::TokenKind::Slash => token::BinOp(token::Slash),
|
||||
rustc_lexer::TokenKind::Caret => token::BinOp(token::Caret),
|
||||
rustc_lexer::TokenKind::Percent => token::BinOp(token::Percent),
|
||||
rustc_lexer::TokenKind::Minus => token::Minus,
|
||||
rustc_lexer::TokenKind::And => token::And,
|
||||
rustc_lexer::TokenKind::Or => token::Or,
|
||||
rustc_lexer::TokenKind::Plus => token::Plus,
|
||||
rustc_lexer::TokenKind::Star => token::Star,
|
||||
rustc_lexer::TokenKind::Slash => token::Slash,
|
||||
rustc_lexer::TokenKind::Caret => token::Caret,
|
||||
rustc_lexer::TokenKind::Percent => token::Percent,
|
||||
|
||||
rustc_lexer::TokenKind::Unknown | rustc_lexer::TokenKind::InvalidIdent => {
|
||||
// Don't emit diagnostics for sequences of the same invalid token
|
||||
|
@ -308,11 +308,11 @@ pub(super) static UNICODE_ARRAY: &[(char, &str, &str)] = &[
|
||||
const ASCII_ARRAY: &[(&str, &str, Option<token::TokenKind>)] = &[
|
||||
(" ", "Space", None),
|
||||
("_", "Underscore", Some(token::Ident(kw::Underscore, token::IdentIsRaw::No))),
|
||||
("-", "Minus/Hyphen", Some(token::BinOp(token::Minus))),
|
||||
("-", "Minus/Hyphen", Some(token::Minus)),
|
||||
(",", "Comma", Some(token::Comma)),
|
||||
(";", "Semicolon", Some(token::Semi)),
|
||||
(":", "Colon", Some(token::Colon)),
|
||||
("!", "Exclamation Mark", Some(token::Not)),
|
||||
("!", "Exclamation Mark", Some(token::Bang)),
|
||||
("?", "Question Mark", Some(token::Question)),
|
||||
(".", "Period", Some(token::Dot)),
|
||||
("(", "Left Parenthesis", Some(token::OpenDelim(Delimiter::Parenthesis))),
|
||||
@ -321,11 +321,11 @@ const ASCII_ARRAY: &[(&str, &str, Option<token::TokenKind>)] = &[
|
||||
("]", "Right Square Bracket", Some(token::CloseDelim(Delimiter::Bracket))),
|
||||
("{", "Left Curly Brace", Some(token::OpenDelim(Delimiter::Brace))),
|
||||
("}", "Right Curly Brace", Some(token::CloseDelim(Delimiter::Brace))),
|
||||
("*", "Asterisk", Some(token::BinOp(token::Star))),
|
||||
("/", "Slash", Some(token::BinOp(token::Slash))),
|
||||
("*", "Asterisk", Some(token::Star)),
|
||||
("/", "Slash", Some(token::Slash)),
|
||||
("\\", "Backslash", None),
|
||||
("&", "Ampersand", Some(token::BinOp(token::And))),
|
||||
("+", "Plus Sign", Some(token::BinOp(token::Plus))),
|
||||
("&", "Ampersand", Some(token::And)),
|
||||
("+", "Plus Sign", Some(token::Plus)),
|
||||
("<", "Less-Than Sign", Some(token::Lt)),
|
||||
("=", "Equals Sign", Some(token::Eq)),
|
||||
("==", "Double Equals Sign", Some(token::EqEq)),
|
||||
|
@ -130,7 +130,7 @@ impl<'a> Parser<'a> {
|
||||
assert!(this.eat(exp!(Pound)), "parse_attribute called in non-attribute position");
|
||||
|
||||
let style =
|
||||
if this.eat(exp!(Not)) { ast::AttrStyle::Inner } else { ast::AttrStyle::Outer };
|
||||
if this.eat(exp!(Bang)) { ast::AttrStyle::Inner } else { ast::AttrStyle::Outer };
|
||||
|
||||
this.expect(exp!(OpenBracket))?;
|
||||
let item = this.parse_attr_item(ForceCollect::No)?;
|
||||
@ -312,7 +312,7 @@ impl<'a> Parser<'a> {
|
||||
loop {
|
||||
let start_pos = self.num_bump_calls;
|
||||
// Only try to parse if it is an inner attribute (has `!`).
|
||||
let attr = if self.check(exp!(Pound)) && self.look_ahead(1, |t| t == &token::Not) {
|
||||
let attr = if self.check(exp!(Pound)) && self.look_ahead(1, |t| t == &token::Bang) {
|
||||
Some(self.parse_attribute(InnerAttrPolicy::Permitted)?)
|
||||
} else if let token::DocComment(comment_kind, attr_style, data) = self.token.kind {
|
||||
if attr_style == ast::AttrStyle::Inner {
|
||||
|
@ -1167,7 +1167,7 @@ impl<'a> Parser<'a> {
|
||||
let mut number_of_gt = 0;
|
||||
while self.look_ahead(position, |t| {
|
||||
trace!("check_trailing_angle_brackets: t={:?}", t);
|
||||
if *t == token::BinOp(token::BinOpToken::Shr) {
|
||||
if *t == token::Shr {
|
||||
number_of_shr += 1;
|
||||
true
|
||||
} else if *t == token::Gt {
|
||||
@ -1222,7 +1222,7 @@ impl<'a> Parser<'a> {
|
||||
let span = lo.to(self.prev_token.span);
|
||||
// Detect trailing `>` like in `x.collect::Vec<_>>()`.
|
||||
let mut trailing_span = self.prev_token.span.shrink_to_hi();
|
||||
while self.token == token::BinOp(token::Shr) || self.token == token::Gt {
|
||||
while self.token == token::Shr || self.token == token::Gt {
|
||||
trailing_span = trailing_span.to(self.token.span);
|
||||
self.bump();
|
||||
}
|
||||
@ -1468,8 +1468,7 @@ impl<'a> Parser<'a> {
|
||||
let snapshot = self.create_snapshot_for_diagnostic();
|
||||
self.bump();
|
||||
// So far we have parsed `foo<bar<`, consume the rest of the type args.
|
||||
let modifiers =
|
||||
[(token::Lt, 1), (token::Gt, -1), (token::BinOp(token::Shr), -2)];
|
||||
let modifiers = [(token::Lt, 1), (token::Gt, -1), (token::Shr, -2)];
|
||||
self.consume_tts(1, &modifiers);
|
||||
|
||||
if !&[token::OpenDelim(Delimiter::Parenthesis), token::PathSep]
|
||||
@ -1962,7 +1961,7 @@ impl<'a> Parser<'a> {
|
||||
&mut self,
|
||||
await_sp: Span,
|
||||
) -> PResult<'a, P<Expr>> {
|
||||
let (hi, expr, is_question) = if self.token == token::Not {
|
||||
let (hi, expr, is_question) = if self.token == token::Bang {
|
||||
// Handle `await!(<expr>)`.
|
||||
self.recover_await_macro()?
|
||||
} else {
|
||||
@ -1974,7 +1973,7 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
|
||||
fn recover_await_macro(&mut self) -> PResult<'a, (Span, P<Expr>, bool)> {
|
||||
self.expect(exp!(Not))?;
|
||||
self.expect(exp!(Bang))?;
|
||||
self.expect(exp!(OpenParen))?;
|
||||
let expr = self.parse_expr()?;
|
||||
self.expect(exp!(CloseParen))?;
|
||||
@ -2034,7 +2033,7 @@ impl<'a> Parser<'a> {
|
||||
|
||||
pub(super) fn try_macro_suggestion(&mut self) -> PResult<'a, P<Expr>> {
|
||||
let is_try = self.token.is_keyword(kw::Try);
|
||||
let is_questionmark = self.look_ahead(1, |t| t == &token::Not); //check for !
|
||||
let is_questionmark = self.look_ahead(1, |t| t == &token::Bang); //check for !
|
||||
let is_open = self.look_ahead(2, |t| t == &token::OpenDelim(Delimiter::Parenthesis)); //check for (
|
||||
|
||||
if is_try && is_questionmark && is_open {
|
||||
@ -2613,8 +2612,7 @@ impl<'a> Parser<'a> {
|
||||
|| self.token == TokenKind::Dot;
|
||||
// This will be true when a trait object type `Foo +` or a path which was a `const fn` with
|
||||
// type params has been parsed.
|
||||
let was_op =
|
||||
matches!(self.prev_token.kind, token::BinOp(token::Plus | token::Shr) | token::Gt);
|
||||
let was_op = matches!(self.prev_token.kind, token::Plus | token::Shr | token::Gt);
|
||||
if !is_op_or_dot && !was_op {
|
||||
// We perform these checks and early return to avoid taking a snapshot unnecessarily.
|
||||
return Err(err);
|
||||
@ -2992,8 +2990,7 @@ impl<'a> Parser<'a> {
|
||||
|
||||
pub(super) fn recover_vcs_conflict_marker(&mut self) {
|
||||
// <<<<<<<
|
||||
let Some(start) = self.conflict_marker(&TokenKind::BinOp(token::Shl), &TokenKind::Lt)
|
||||
else {
|
||||
let Some(start) = self.conflict_marker(&TokenKind::Shl, &TokenKind::Lt) else {
|
||||
return;
|
||||
};
|
||||
let mut spans = Vec::with_capacity(3);
|
||||
@ -3008,15 +3005,13 @@ impl<'a> Parser<'a> {
|
||||
if self.token == TokenKind::Eof {
|
||||
break;
|
||||
}
|
||||
if let Some(span) = self.conflict_marker(&TokenKind::OrOr, &TokenKind::BinOp(token::Or))
|
||||
{
|
||||
if let Some(span) = self.conflict_marker(&TokenKind::OrOr, &TokenKind::Or) {
|
||||
middlediff3 = Some(span);
|
||||
}
|
||||
if let Some(span) = self.conflict_marker(&TokenKind::EqEq, &TokenKind::Eq) {
|
||||
middle = Some(span);
|
||||
}
|
||||
if let Some(span) = self.conflict_marker(&TokenKind::BinOp(token::Shr), &TokenKind::Gt)
|
||||
{
|
||||
if let Some(span) = self.conflict_marker(&TokenKind::Shr, &TokenKind::Gt) {
|
||||
spans.push(span);
|
||||
end = Some(span);
|
||||
break;
|
||||
|
@ -239,8 +239,8 @@ impl<'a> Parser<'a> {
|
||||
self.bump();
|
||||
}
|
||||
|
||||
if self.prev_token == token::BinOp(token::Plus)
|
||||
&& self.token == token::BinOp(token::Plus)
|
||||
if self.prev_token == token::Plus
|
||||
&& self.token == token::Plus
|
||||
&& self.prev_token.span.between(self.token.span).is_empty()
|
||||
{
|
||||
let op_span = self.prev_token.span.to(self.token.span);
|
||||
@ -250,8 +250,8 @@ impl<'a> Parser<'a> {
|
||||
continue;
|
||||
}
|
||||
|
||||
if self.prev_token == token::BinOp(token::Minus)
|
||||
&& self.token == token::BinOp(token::Minus)
|
||||
if self.prev_token == token::Minus
|
||||
&& self.token == token::Minus
|
||||
&& self.prev_token.span.between(self.token.span).is_empty()
|
||||
&& !self.look_ahead(1, |tok| tok.can_begin_expr())
|
||||
{
|
||||
@ -505,23 +505,23 @@ impl<'a> Parser<'a> {
|
||||
// Note: when adding new unary operators, don't forget to adjust TokenKind::can_begin_expr()
|
||||
match this.token.uninterpolate().kind {
|
||||
// `!expr`
|
||||
token::Not => make_it!(this, attrs, |this, _| this.parse_expr_unary(lo, UnOp::Not)),
|
||||
token::Bang => make_it!(this, attrs, |this, _| this.parse_expr_unary(lo, UnOp::Not)),
|
||||
// `~expr`
|
||||
token::Tilde => make_it!(this, attrs, |this, _| this.recover_tilde_expr(lo)),
|
||||
// `-expr`
|
||||
token::BinOp(token::Minus) => {
|
||||
token::Minus => {
|
||||
make_it!(this, attrs, |this, _| this.parse_expr_unary(lo, UnOp::Neg))
|
||||
}
|
||||
// `*expr`
|
||||
token::BinOp(token::Star) => {
|
||||
token::Star => {
|
||||
make_it!(this, attrs, |this, _| this.parse_expr_unary(lo, UnOp::Deref))
|
||||
}
|
||||
// `&expr` and `&&expr`
|
||||
token::BinOp(token::And) | token::AndAnd => {
|
||||
token::And | token::AndAnd => {
|
||||
make_it!(this, attrs, |this, _| this.parse_expr_borrow(lo))
|
||||
}
|
||||
// `+lit`
|
||||
token::BinOp(token::Plus) if this.look_ahead(1, |tok| tok.is_numeric_lit()) => {
|
||||
token::Plus if this.look_ahead(1, |tok| tok.is_numeric_lit()) => {
|
||||
let mut err = errors::LeadingPlusNotSupported {
|
||||
span: lo,
|
||||
remove_plus: None,
|
||||
@ -541,9 +541,7 @@ impl<'a> Parser<'a> {
|
||||
this.parse_expr_prefix(attrs)
|
||||
}
|
||||
// Recover from `++x`:
|
||||
token::BinOp(token::Plus)
|
||||
if this.look_ahead(1, |t| *t == token::BinOp(token::Plus)) =>
|
||||
{
|
||||
token::Plus if this.look_ahead(1, |t| *t == token::Plus) => {
|
||||
let starts_stmt = this.prev_token == token::Semi
|
||||
|| this.prev_token == token::CloseDelim(Delimiter::Brace);
|
||||
let pre_span = this.token.span.to(this.look_ahead(1, |t| t.span));
|
||||
@ -727,14 +725,12 @@ impl<'a> Parser<'a> {
|
||||
suggestion,
|
||||
})
|
||||
}
|
||||
token::BinOp(token::Shl) => {
|
||||
self.dcx().emit_err(errors::ShiftInterpretedAsGeneric {
|
||||
shift: self.token.span,
|
||||
r#type: path,
|
||||
args: args_span,
|
||||
suggestion,
|
||||
})
|
||||
}
|
||||
token::Shl => self.dcx().emit_err(errors::ShiftInterpretedAsGeneric {
|
||||
shift: self.token.span,
|
||||
r#type: path,
|
||||
args: args_span,
|
||||
suggestion,
|
||||
}),
|
||||
_ => {
|
||||
// We can end up here even without `<` being the next token, for
|
||||
// example because `parse_ty_no_plus` returns `Err` on keywords,
|
||||
@ -1578,7 +1574,7 @@ impl<'a> Parser<'a> {
|
||||
};
|
||||
|
||||
// `!`, as an operator, is prefix, so we know this isn't that.
|
||||
let (span, kind) = if self.eat(exp!(Not)) {
|
||||
let (span, kind) = if self.eat(exp!(Bang)) {
|
||||
// MACRO INVOCATION expression
|
||||
if qself.is_some() {
|
||||
self.dcx().emit_err(errors::MacroInvocationWithQualifiedPath(path.span));
|
||||
@ -2599,7 +2595,7 @@ impl<'a> Parser<'a> {
|
||||
missing_let: None,
|
||||
comparison: None,
|
||||
};
|
||||
if self.prev_token == token::BinOp(token::Or) {
|
||||
if self.prev_token == token::Or {
|
||||
// This was part of a closure, the that part of the parser recover.
|
||||
return Err(self.dcx().create_err(err));
|
||||
} else {
|
||||
|
@ -382,7 +382,7 @@ impl<'a> Parser<'a> {
|
||||
|
||||
/// Are we sure this could not possibly be a macro invocation?
|
||||
fn isnt_macro_invocation(&mut self) -> bool {
|
||||
self.check_ident() && self.look_ahead(1, |t| *t != token::Not && *t != token::PathSep)
|
||||
self.check_ident() && self.look_ahead(1, |t| *t != token::Bang && *t != token::PathSep)
|
||||
}
|
||||
|
||||
/// Recover on encountering a struct, enum, or method definition where the user
|
||||
@ -480,7 +480,7 @@ impl<'a> Parser<'a> {
|
||||
/// Parses an item macro, e.g., `item!();`.
|
||||
fn parse_item_macro(&mut self, vis: &Visibility) -> PResult<'a, MacCall> {
|
||||
let path = self.parse_path(PathStyle::Mod)?; // `foo::bar`
|
||||
self.expect(exp!(Not))?; // `!`
|
||||
self.expect(exp!(Bang))?; // `!`
|
||||
match self.parse_delim_args() {
|
||||
// `( .. )` or `[ .. ]` (followed by `;`), or `{ .. }`.
|
||||
Ok(args) => {
|
||||
@ -540,7 +540,7 @@ impl<'a> Parser<'a> {
|
||||
|
||||
fn parse_polarity(&mut self) -> ast::ImplPolarity {
|
||||
// Disambiguate `impl !Trait for Type { ... }` and `impl ! { ... }` for the never type.
|
||||
if self.check(exp!(Not)) && self.look_ahead(1, |t| t.can_begin_type()) {
|
||||
if self.check(exp!(Bang)) && self.look_ahead(1, |t| t.can_begin_type()) {
|
||||
self.bump(); // `!`
|
||||
ast::ImplPolarity::Negative(self.prev_token.span)
|
||||
} else {
|
||||
@ -1293,7 +1293,7 @@ impl<'a> Parser<'a> {
|
||||
if token.is_keyword(kw::Move) {
|
||||
return true;
|
||||
}
|
||||
matches!(token.kind, token::BinOp(token::Or) | token::OrOr)
|
||||
matches!(token.kind, token::Or | token::OrOr)
|
||||
})
|
||||
} else {
|
||||
// `$qual static`
|
||||
@ -1579,7 +1579,7 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
let ident = this.parse_field_ident("enum", vlo)?;
|
||||
|
||||
if this.token == token::Not {
|
||||
if this.token == token::Bang {
|
||||
if let Err(err) = this.unexpected() {
|
||||
err.with_note(fluent::parse_macro_expands_to_enum_variant).emit();
|
||||
}
|
||||
@ -1814,7 +1814,7 @@ impl<'a> Parser<'a> {
|
||||
let attrs = p.parse_outer_attributes()?;
|
||||
p.collect_tokens(None, attrs, ForceCollect::No, |p, attrs| {
|
||||
let mut snapshot = None;
|
||||
if p.is_vcs_conflict_marker(&TokenKind::BinOp(token::Shl), &TokenKind::Lt) {
|
||||
if p.is_vcs_conflict_marker(&TokenKind::Shl, &TokenKind::Lt) {
|
||||
// Account for `<<<<<<<` diff markers. We can't proactively error here because
|
||||
// that can be a valid type start, so we snapshot and reparse only we've
|
||||
// encountered another parse error.
|
||||
@ -2034,7 +2034,7 @@ impl<'a> Parser<'a> {
|
||||
attrs: AttrVec,
|
||||
) -> PResult<'a, FieldDef> {
|
||||
let name = self.parse_field_ident(adt_ty, lo)?;
|
||||
if self.token == token::Not {
|
||||
if self.token == token::Bang {
|
||||
if let Err(mut err) = self.unexpected() {
|
||||
// Encounter the macro invocation
|
||||
err.subdiagnostic(MacroExpandsToAdtField { adt_ty });
|
||||
@ -2184,7 +2184,7 @@ impl<'a> Parser<'a> {
|
||||
if self.check_keyword(exp!(MacroRules)) {
|
||||
let macro_rules_span = self.token.span;
|
||||
|
||||
if self.look_ahead(1, |t| *t == token::Not) && self.look_ahead(2, |t| t.is_ident()) {
|
||||
if self.look_ahead(1, |t| *t == token::Bang) && self.look_ahead(2, |t| t.is_ident()) {
|
||||
return IsMacroRulesItem::Yes { has_bang: true };
|
||||
} else if self.look_ahead(1, |t| (t.is_ident())) {
|
||||
// macro_rules foo
|
||||
@ -2209,11 +2209,11 @@ impl<'a> Parser<'a> {
|
||||
self.expect_keyword(exp!(MacroRules))?; // `macro_rules`
|
||||
|
||||
if has_bang {
|
||||
self.expect(exp!(Not))?; // `!`
|
||||
self.expect(exp!(Bang))?; // `!`
|
||||
}
|
||||
let ident = self.parse_ident()?;
|
||||
|
||||
if self.eat(exp!(Not)) {
|
||||
if self.eat(exp!(Bang)) {
|
||||
// Handle macro_rules! foo!
|
||||
let span = self.prev_token.span;
|
||||
self.dcx().emit_err(errors::MacroNameRemoveBang { span });
|
||||
@ -3011,7 +3011,7 @@ impl<'a> Parser<'a> {
|
||||
// else is parsed as a normal function parameter list, so some lookahead is required.
|
||||
let eself_lo = self.token.span;
|
||||
let (eself, eself_ident, eself_hi) = match self.token.uninterpolate().kind {
|
||||
token::BinOp(token::And) => {
|
||||
token::And => {
|
||||
let eself = if is_isolated_self(self, 1) {
|
||||
// `&self`
|
||||
self.bump();
|
||||
@ -3041,12 +3041,12 @@ impl<'a> Parser<'a> {
|
||||
(eself, self_ident, hi)
|
||||
}
|
||||
// `*self`
|
||||
token::BinOp(token::Star) if is_isolated_self(self, 1) => {
|
||||
token::Star if is_isolated_self(self, 1) => {
|
||||
self.bump();
|
||||
recover_self_ptr(self)?
|
||||
}
|
||||
// `*mut self` and `*const self`
|
||||
token::BinOp(token::Star)
|
||||
token::Star
|
||||
if self.look_ahead(1, |t| t.is_mutability()) && is_isolated_self(self, 2) =>
|
||||
{
|
||||
self.bump();
|
||||
@ -3077,7 +3077,7 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
_ => 0,
|
||||
},
|
||||
token::BinOp(token::And) | token::AndAnd => 1,
|
||||
token::And | token::AndAnd => 1,
|
||||
_ if self.token.is_keyword(kw::Mut) => 1,
|
||||
_ => 0,
|
||||
};
|
||||
|
@ -813,9 +813,9 @@ impl<'a> Parser<'a> {
|
||||
self.is_keyword_ahead(0, &[kw::Const])
|
||||
&& self.look_ahead(1, |t| match &t.kind {
|
||||
// async closures do not work with const closures, so we do not parse that here.
|
||||
token::Ident(kw::Move | kw::Static, IdentIsRaw::No)
|
||||
| token::OrOr
|
||||
| token::BinOp(token::Or) => true,
|
||||
token::Ident(kw::Move | kw::Static, IdentIsRaw::No) | token::OrOr | token::Or => {
|
||||
true
|
||||
}
|
||||
_ => false,
|
||||
})
|
||||
}
|
||||
@ -1651,7 +1651,7 @@ impl<'a> Parser<'a> {
|
||||
/// `::{` or `::*`
|
||||
fn is_import_coupler(&mut self) -> bool {
|
||||
self.check_path_sep_and_look_ahead(|t| {
|
||||
matches!(t.kind, token::OpenDelim(Delimiter::Brace) | token::BinOp(token::Star))
|
||||
matches!(t.kind, token::OpenDelim(Delimiter::Brace) | token::Star)
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -3,7 +3,7 @@ use std::ops::Bound;
|
||||
use rustc_ast::mut_visit::{self, MutVisitor};
|
||||
use rustc_ast::ptr::P;
|
||||
use rustc_ast::token::NtPatKind::*;
|
||||
use rustc_ast::token::{self, BinOpToken, Delimiter, IdentIsRaw, MetaVarKind, Token};
|
||||
use rustc_ast::token::{self, Delimiter, IdentIsRaw, MetaVarKind, Token};
|
||||
use rustc_ast::util::parser::ExprPrecedence;
|
||||
use rustc_ast::visit::{self, Visitor};
|
||||
use rustc_ast::{
|
||||
@ -358,7 +358,7 @@ impl<'a> Parser<'a> {
|
||||
)
|
||||
});
|
||||
match (is_end_ahead, &self.token.kind) {
|
||||
(true, token::BinOp(token::Or) | token::OrOr) => {
|
||||
(true, token::Or | token::OrOr) => {
|
||||
// A `|` or possibly `||` token shouldn't be here. Ban it.
|
||||
self.dcx().emit_err(TrailingVertNotAllowed {
|
||||
span: self.token.span,
|
||||
@ -432,7 +432,11 @@ impl<'a> Parser<'a> {
|
||||
// `[` is included for indexing operations,
|
||||
// `[]` is excluded as `a[]` isn't an expression and should be recovered as `a, []` (cf. `tests/ui/parser/pat-lt-bracket-7.rs`),
|
||||
// `as` is included for type casts
|
||||
let has_trailing_operator = matches!(self.token.kind, token::BinOp(op) if op != BinOpToken::Or)
|
||||
let has_trailing_operator = matches!(
|
||||
self.token.kind,
|
||||
token::Plus | token::Minus | token::Star | token::Slash | token::Percent
|
||||
| token::Caret | token::And | token::Shl | token::Shr // excludes `Or`
|
||||
)
|
||||
|| self.token == token::Question
|
||||
|| (self.token == token::OpenDelim(Delimiter::Bracket)
|
||||
&& self.look_ahead(1, |t| *t != token::CloseDelim(Delimiter::Bracket))) // excludes `[]`
|
||||
@ -763,7 +767,7 @@ impl<'a> Parser<'a> {
|
||||
self.recover_dotdotdot_rest_pat(lo)
|
||||
} else if let Some(form) = self.parse_range_end() {
|
||||
self.parse_pat_range_to(form)? // `..=X`, `...X`, or `..X`.
|
||||
} else if self.eat(exp!(Not)) {
|
||||
} else if self.eat(exp!(Bang)) {
|
||||
// Parse `!`
|
||||
self.psess.gated_spans.gate(sym::never_patterns, self.prev_token.span);
|
||||
PatKind::Never
|
||||
@ -819,7 +823,7 @@ impl<'a> Parser<'a> {
|
||||
};
|
||||
let span = lo.to(self.prev_token.span);
|
||||
|
||||
if qself.is_none() && self.check(exp!(Not)) {
|
||||
if qself.is_none() && self.check(exp!(Bang)) {
|
||||
self.parse_pat_mac_invoc(path)?
|
||||
} else if let Some(form) = self.parse_range_end() {
|
||||
let begin = self.mk_expr(span, ExprKind::Path(qself, path));
|
||||
@ -1255,7 +1259,7 @@ impl<'a> Parser<'a> {
|
||||
|| self.look_ahead(dist, |t| {
|
||||
t.is_path_start() // e.g. `MY_CONST`;
|
||||
|| *t == token::Dot // e.g. `.5` for recovery;
|
||||
|| matches!(t.kind, token::Literal(..) | token::BinOp(token::Minus))
|
||||
|| matches!(t.kind, token::Literal(..) | token::Minus)
|
||||
|| t.is_bool_lit()
|
||||
|| t.is_whole_expr()
|
||||
|| t.is_lifetime() // recover `'a` instead of `'a'`
|
||||
@ -1331,7 +1335,7 @@ impl<'a> Parser<'a> {
|
||||
| token::OpenDelim(Delimiter::Brace) // A struct pattern.
|
||||
| token::DotDotDot | token::DotDotEq | token::DotDot // A range pattern.
|
||||
| token::PathSep // A tuple / struct variant pattern.
|
||||
| token::Not)) // A macro expanding to a pattern.
|
||||
| token::Bang)) // A macro expanding to a pattern.
|
||||
}
|
||||
|
||||
/// Parses `ident` or `ident @ pat`.
|
||||
|
@ -305,10 +305,7 @@ impl<'a> Parser<'a> {
|
||||
let is_args_start = |token: &Token| {
|
||||
matches!(
|
||||
token.kind,
|
||||
token::Lt
|
||||
| token::BinOp(token::Shl)
|
||||
| token::OpenDelim(Delimiter::Parenthesis)
|
||||
| token::LArrow
|
||||
token::Lt | token::Shl | token::OpenDelim(Delimiter::Parenthesis) | token::LArrow
|
||||
)
|
||||
};
|
||||
let check_args_start = |this: &mut Self| {
|
||||
|
@ -176,7 +176,7 @@ impl<'a> Parser<'a> {
|
||||
let stmt = self.collect_tokens(None, attrs, ForceCollect::No, |this, attrs| {
|
||||
let path = this.parse_path(PathStyle::Expr)?;
|
||||
|
||||
if this.eat(exp!(Not)) {
|
||||
if this.eat(exp!(Bang)) {
|
||||
let stmt_mac = this.parse_stmt_mac(lo, attrs, path)?;
|
||||
return Ok((
|
||||
stmt_mac,
|
||||
@ -442,7 +442,16 @@ impl<'a> Parser<'a> {
|
||||
/// Parses the RHS of a local variable declaration (e.g., `= 14;`).
|
||||
fn parse_initializer(&mut self, eq_optional: bool) -> PResult<'a, Option<P<Expr>>> {
|
||||
let eq_consumed = match self.token.kind {
|
||||
token::BinOpEq(..) => {
|
||||
token::PlusEq
|
||||
| token::MinusEq
|
||||
| token::StarEq
|
||||
| token::SlashEq
|
||||
| token::PercentEq
|
||||
| token::CaretEq
|
||||
| token::AndEq
|
||||
| token::OrEq
|
||||
| token::ShlEq
|
||||
| token::ShrEq => {
|
||||
// Recover `let x <op>= 1` as `let x = 1` We must not use `+ BytePos(1)` here
|
||||
// because `<op>` can be a multi-byte lookalike that was recovered, e.g. `➖=` (the
|
||||
// `➖` is a U+2796 Heavy Minus Sign Unicode Character) that was recovered as a
|
||||
@ -688,7 +697,7 @@ impl<'a> Parser<'a> {
|
||||
if self.token == token::Eof {
|
||||
break;
|
||||
}
|
||||
if self.is_vcs_conflict_marker(&TokenKind::BinOp(token::Shl), &TokenKind::Lt) {
|
||||
if self.is_vcs_conflict_marker(&TokenKind::Shl, &TokenKind::Lt) {
|
||||
// Account for `<<<<<<<` diff markers. We can't proactively error here because
|
||||
// that can be a valid path start, so we snapshot and reparse only we've
|
||||
// encountered another parse error.
|
||||
|
@ -2291,7 +2291,7 @@ fn string_to_tts_macro() {
|
||||
Token { kind: token::Ident(name_macro_rules, IdentIsRaw::No), .. },
|
||||
_,
|
||||
),
|
||||
TokenTree::Token(Token { kind: token::Not, .. }, _),
|
||||
TokenTree::Token(Token { kind: token::Bang, .. }, _),
|
||||
TokenTree::Token(Token { kind: token::Ident(name_zip, IdentIsRaw::No), .. }, _),
|
||||
TokenTree::Delimited(.., macro_delim, macro_tts),
|
||||
] if name_macro_rules == &kw::MacroRules && name_zip.as_str() == "zip" => {
|
||||
|
@ -25,7 +25,7 @@ pub enum TokenType {
|
||||
Gt,
|
||||
AndAnd,
|
||||
OrOr,
|
||||
Not,
|
||||
Bang,
|
||||
Tilde,
|
||||
|
||||
// BinOps
|
||||
@ -172,7 +172,7 @@ impl TokenType {
|
||||
Gt,
|
||||
AndAnd,
|
||||
OrOr,
|
||||
Not,
|
||||
Bang,
|
||||
Tilde,
|
||||
|
||||
Plus,
|
||||
@ -366,7 +366,7 @@ impl TokenType {
|
||||
TokenType::Gt => "`>`",
|
||||
TokenType::AndAnd => "`&&`",
|
||||
TokenType::OrOr => "`||`",
|
||||
TokenType::Not => "`!`",
|
||||
TokenType::Bang => "`!`",
|
||||
TokenType::Tilde => "`~`",
|
||||
|
||||
TokenType::Plus => "`+`",
|
||||
@ -445,12 +445,6 @@ macro_rules! exp {
|
||||
token_type: $crate::parser::token_type::TokenType::$tok
|
||||
}
|
||||
};
|
||||
(@binop, $op:ident) => {
|
||||
$crate::parser::token_type::ExpTokenPair {
|
||||
tok: &rustc_ast::token::BinOp(rustc_ast::token::BinOpToken::$op),
|
||||
token_type: $crate::parser::token_type::TokenType::$op,
|
||||
}
|
||||
};
|
||||
(@open, $delim:ident, $token_type:ident) => {
|
||||
$crate::parser::token_type::ExpTokenPair {
|
||||
tok: &rustc_ast::token::OpenDelim(rustc_ast::token::Delimiter::$delim),
|
||||
@ -485,8 +479,13 @@ macro_rules! exp {
|
||||
(Gt) => { exp!(@tok, Gt) };
|
||||
(AndAnd) => { exp!(@tok, AndAnd) };
|
||||
(OrOr) => { exp!(@tok, OrOr) };
|
||||
(Not) => { exp!(@tok, Not) };
|
||||
(Bang) => { exp!(@tok, Bang) };
|
||||
(Tilde) => { exp!(@tok, Tilde) };
|
||||
(Plus) => { exp!(@tok, Plus) };
|
||||
(Minus) => { exp!(@tok, Minus) };
|
||||
(Star) => { exp!(@tok, Star) };
|
||||
(And) => { exp!(@tok, And) };
|
||||
(Or) => { exp!(@tok, Or) };
|
||||
(At) => { exp!(@tok, At) };
|
||||
(Dot) => { exp!(@tok, Dot) };
|
||||
(DotDot) => { exp!(@tok, DotDot) };
|
||||
@ -502,12 +501,6 @@ macro_rules! exp {
|
||||
(Question) => { exp!(@tok, Question) };
|
||||
(Eof) => { exp!(@tok, Eof) };
|
||||
|
||||
(Plus) => { exp!(@binop, Plus) };
|
||||
(Minus) => { exp!(@binop, Minus) };
|
||||
(Star) => { exp!(@binop, Star) };
|
||||
(And) => { exp!(@binop, And) };
|
||||
(Or) => { exp!(@binop, Or) };
|
||||
|
||||
(OpenParen) => { exp!(@open, Parenthesis, OpenParen) };
|
||||
(OpenBrace) => { exp!(@open, Brace, OpenBrace) };
|
||||
(OpenBracket) => { exp!(@open, Bracket, OpenBracket) };
|
||||
|
@ -1,5 +1,5 @@
|
||||
use rustc_ast::ptr::P;
|
||||
use rustc_ast::token::{self, BinOpToken, Delimiter, IdentIsRaw, MetaVarKind, Token, TokenKind};
|
||||
use rustc_ast::token::{self, Delimiter, IdentIsRaw, MetaVarKind, Token, TokenKind};
|
||||
use rustc_ast::util::case::Case;
|
||||
use rustc_ast::{
|
||||
self as ast, BareFnTy, BoundAsyncness, BoundConstness, BoundPolarity, DUMMY_NODE_ID, FnRetTy,
|
||||
@ -86,7 +86,7 @@ enum AllowCVariadic {
|
||||
/// Types can also be of the form `IDENT(u8, u8) -> u8`, however this assumes
|
||||
/// that `IDENT` is not the ident of a fn trait.
|
||||
fn can_continue_type_after_non_fn_ident(t: &Token) -> bool {
|
||||
t == &token::PathSep || t == &token::Lt || t == &token::BinOp(token::Shl)
|
||||
t == &token::PathSep || t == &token::Lt || t == &token::Shl
|
||||
}
|
||||
|
||||
fn can_begin_dyn_bound_in_edition_2015(t: &Token) -> bool {
|
||||
@ -260,7 +260,7 @@ impl<'a> Parser<'a> {
|
||||
let mut impl_dyn_multi = false;
|
||||
let kind = if self.check(exp!(OpenParen)) {
|
||||
self.parse_ty_tuple_or_parens(lo, allow_plus)?
|
||||
} else if self.eat(exp!(Not)) {
|
||||
} else if self.eat(exp!(Bang)) {
|
||||
// Never type `!`
|
||||
TyKind::Never
|
||||
} else if self.eat(exp!(Star)) {
|
||||
@ -399,7 +399,7 @@ impl<'a> Parser<'a> {
|
||||
let mut trailing_plus = false;
|
||||
let (ts, trailing) = self.parse_paren_comma_seq(|p| {
|
||||
let ty = p.parse_ty()?;
|
||||
trailing_plus = p.prev_token == TokenKind::BinOp(token::Plus);
|
||||
trailing_plus = p.prev_token == TokenKind::Plus;
|
||||
Ok(ty)
|
||||
})?;
|
||||
|
||||
@ -735,7 +735,7 @@ impl<'a> Parser<'a> {
|
||||
// Always parse bounds greedily for better error recovery.
|
||||
let bounds = self.parse_generic_bounds()?;
|
||||
|
||||
*impl_dyn_multi = bounds.len() > 1 || self.prev_token == TokenKind::BinOp(token::Plus);
|
||||
*impl_dyn_multi = bounds.len() > 1 || self.prev_token == TokenKind::Plus;
|
||||
|
||||
Ok(TyKind::ImplTrait(ast::DUMMY_NODE_ID, bounds))
|
||||
}
|
||||
@ -747,11 +747,7 @@ impl<'a> Parser<'a> {
|
||||
self.expect_lt()?;
|
||||
let (args, _, _) = self.parse_seq_to_before_tokens(
|
||||
&[exp!(Gt)],
|
||||
&[
|
||||
&TokenKind::Ge,
|
||||
&TokenKind::BinOp(BinOpToken::Shr),
|
||||
&TokenKind::BinOpEq(BinOpToken::Shr),
|
||||
],
|
||||
&[&TokenKind::Ge, &TokenKind::Shr, &TokenKind::Shr],
|
||||
SeqSep::trailing_allowed(exp!(Comma)),
|
||||
|self_| {
|
||||
if self_.check_keyword(exp!(SelfUpper)) {
|
||||
@ -781,7 +777,7 @@ impl<'a> Parser<'a> {
|
||||
self.check_keyword(exp!(Dyn))
|
||||
&& (self.token.uninterpolated_span().at_least_rust_2018()
|
||||
|| self.look_ahead(1, |t| {
|
||||
(can_begin_dyn_bound_in_edition_2015(t) || *t == TokenKind::BinOp(token::Star))
|
||||
(can_begin_dyn_bound_in_edition_2015(t) || *t == TokenKind::Star)
|
||||
&& !can_continue_type_after_non_fn_ident(t)
|
||||
}))
|
||||
}
|
||||
@ -803,7 +799,7 @@ impl<'a> Parser<'a> {
|
||||
|
||||
// Always parse bounds greedily for better error recovery.
|
||||
let bounds = self.parse_generic_bounds()?;
|
||||
*impl_dyn_multi = bounds.len() > 1 || self.prev_token == TokenKind::BinOp(token::Plus);
|
||||
*impl_dyn_multi = bounds.len() > 1 || self.prev_token == TokenKind::Plus;
|
||||
Ok(TyKind::TraitObject(bounds, syntax))
|
||||
}
|
||||
|
||||
@ -821,7 +817,7 @@ impl<'a> Parser<'a> {
|
||||
) -> PResult<'a, TyKind> {
|
||||
// Simple path
|
||||
let path = self.parse_path_inner(PathStyle::Type, ty_generics)?;
|
||||
if self.eat(exp!(Not)) {
|
||||
if self.eat(exp!(Bang)) {
|
||||
// Macro invocation in type position
|
||||
Ok(TyKind::MacCall(P(MacCall { path, args: self.parse_delim_args()? })))
|
||||
} else if allow_plus == AllowPlus::Yes && self.check_plus() {
|
||||
@ -874,7 +870,7 @@ impl<'a> Parser<'a> {
|
||||
fn can_begin_bound(&mut self) -> bool {
|
||||
self.check_path()
|
||||
|| self.check_lifetime()
|
||||
|| self.check(exp!(Not))
|
||||
|| self.check(exp!(Bang))
|
||||
|| self.check(exp!(Question))
|
||||
|| self.check(exp!(Tilde))
|
||||
|| self.check_keyword(exp!(For))
|
||||
@ -1025,7 +1021,7 @@ impl<'a> Parser<'a> {
|
||||
|
||||
let polarity = if self.eat(exp!(Question)) {
|
||||
BoundPolarity::Maybe(self.prev_token.span)
|
||||
} else if self.eat(exp!(Not)) {
|
||||
} else if self.eat(exp!(Bang)) {
|
||||
self.psess.gated_spans.gate(sym::negative_bounds, self.prev_token.span);
|
||||
BoundPolarity::Negative(self.prev_token.span)
|
||||
} else {
|
||||
|
@ -1,9 +1,9 @@
|
||||
use std::fmt::Debug;
|
||||
use std::hash::Hash;
|
||||
use std::sync::OnceLock;
|
||||
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
use rustc_data_structures::sharded::{self, Sharded};
|
||||
use rustc_data_structures::sync::OnceLock;
|
||||
pub use rustc_data_structures::vec_cache::VecCache;
|
||||
use rustc_hir::def_id::LOCAL_CRATE;
|
||||
use rustc_index::Idx;
|
||||
|
@ -169,7 +169,7 @@ impl<'tcx> SymbolMangler<'tcx> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn in_binder<T>(
|
||||
fn wrap_binder<T>(
|
||||
&mut self,
|
||||
value: &ty::Binder<'tcx, T>,
|
||||
print_value: impl FnOnce(&mut Self, &T) -> Result<(), PrintError>,
|
||||
@ -471,7 +471,7 @@ impl<'tcx> Printer<'tcx> for SymbolMangler<'tcx> {
|
||||
ty::FnPtr(sig_tys, hdr) => {
|
||||
let sig = sig_tys.with(hdr);
|
||||
self.push("F");
|
||||
self.in_binder(&sig, |cx, sig| {
|
||||
self.wrap_binder(&sig, |cx, sig| {
|
||||
if sig.safety.is_unsafe() {
|
||||
cx.push("U");
|
||||
}
|
||||
@ -554,7 +554,7 @@ impl<'tcx> Printer<'tcx> for SymbolMangler<'tcx> {
|
||||
// [<Trait> [{<Projection>}]] [{<Auto>}]
|
||||
// Since any predicates after the first one shouldn't change the binders,
|
||||
// just put them all in the binders of the first.
|
||||
self.in_binder(&predicates[0], |cx, _| {
|
||||
self.wrap_binder(&predicates[0], |cx, _| {
|
||||
for predicate in predicates.iter() {
|
||||
// It would be nice to be able to validate bound vars here, but
|
||||
// projections can actually include bound vars from super traits
|
||||
|
@ -65,7 +65,9 @@ use rustc_middle::bug;
|
||||
use rustc_middle::dep_graph::DepContext;
|
||||
use rustc_middle::traits::PatternOriginExpr;
|
||||
use rustc_middle::ty::error::{ExpectedFound, TypeError, TypeErrorToStringExt};
|
||||
use rustc_middle::ty::print::{PrintError, PrintTraitRefExt as _, with_forced_trimmed_paths};
|
||||
use rustc_middle::ty::print::{
|
||||
PrintError, PrintTraitRefExt as _, WrapBinderMode, with_forced_trimmed_paths,
|
||||
};
|
||||
use rustc_middle::ty::{
|
||||
self, List, ParamEnv, Region, Ty, TyCtxt, TypeFoldable, TypeSuperVisitable, TypeVisitable,
|
||||
TypeVisitableExt,
|
||||
@ -835,7 +837,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> {
|
||||
let get_lifetimes = |sig| {
|
||||
use rustc_hir::def::Namespace;
|
||||
let (sig, reg) = ty::print::FmtPrinter::new(self.tcx, Namespace::TypeNS)
|
||||
.name_all_regions(sig)
|
||||
.name_all_regions(sig, WrapBinderMode::ForAll)
|
||||
.unwrap();
|
||||
let lts: Vec<String> =
|
||||
reg.into_items().map(|(_, kind)| kind.to_string()).into_sorted_stable_ord();
|
||||
|
@ -42,8 +42,6 @@ ty_utils_logical_op_not_supported = unsupported operation in generic constants,
|
||||
|
||||
ty_utils_loop_not_supported = loops and loop control flow are not supported in generic constants
|
||||
|
||||
ty_utils_multiple_array_fields_simd_type = monomorphising SIMD type `{$ty}` with more than one array field
|
||||
|
||||
ty_utils_needs_drop_overflow = overflow while checking whether `{$query_ty}` requires drop
|
||||
|
||||
ty_utils_never_to_any_not_supported = coercing the `never` type is not supported in generic constants
|
||||
|
@ -82,12 +82,6 @@ pub(crate) struct ZeroLengthSimdType<'tcx> {
|
||||
pub ty: Ty<'tcx>,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(ty_utils_multiple_array_fields_simd_type)]
|
||||
pub(crate) struct MultipleArrayFieldsSimdType<'tcx> {
|
||||
pub ty: Ty<'tcx>,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(ty_utils_oversized_simd_type)]
|
||||
pub(crate) struct OversizedSimdType<'tcx> {
|
||||
|
@ -28,9 +28,7 @@ use rustc_span::{Symbol, sym};
|
||||
use tracing::{debug, instrument, trace};
|
||||
use {rustc_abi as abi, rustc_hir as hir};
|
||||
|
||||
use crate::errors::{
|
||||
MultipleArrayFieldsSimdType, NonPrimitiveSimdType, OversizedSimdType, ZeroLengthSimdType,
|
||||
};
|
||||
use crate::errors::{NonPrimitiveSimdType, OversizedSimdType, ZeroLengthSimdType};
|
||||
|
||||
mod invariant;
|
||||
|
||||
@ -450,71 +448,26 @@ fn layout_of_uncached<'tcx>(
|
||||
|
||||
// SIMD vector types.
|
||||
ty::Adt(def, args) if def.repr().simd() => {
|
||||
if !def.is_struct() {
|
||||
// Should have yielded E0517 by now.
|
||||
let guar = tcx
|
||||
.dcx()
|
||||
.delayed_bug("#[repr(simd)] was applied to an ADT that is not a struct");
|
||||
return Err(error(cx, LayoutError::ReferencesError(guar)));
|
||||
}
|
||||
|
||||
let fields = &def.non_enum_variant().fields;
|
||||
|
||||
// Supported SIMD vectors are homogeneous ADTs with at least one field:
|
||||
// Supported SIMD vectors are ADTs with a single array field:
|
||||
//
|
||||
// * #[repr(simd)] struct S(T, T, T, T);
|
||||
// * #[repr(simd)] struct S { x: T, y: T, z: T, w: T }
|
||||
// * #[repr(simd)] struct S([T; 4])
|
||||
//
|
||||
// where T is a primitive scalar (integer/float/pointer).
|
||||
|
||||
// SIMD vectors with zero fields are not supported.
|
||||
// (should be caught by typeck)
|
||||
if fields.is_empty() {
|
||||
tcx.dcx().emit_fatal(ZeroLengthSimdType { ty })
|
||||
}
|
||||
|
||||
// Type of the first ADT field:
|
||||
let f0_ty = fields[FieldIdx::ZERO].ty(tcx, args);
|
||||
|
||||
// Heterogeneous SIMD vectors are not supported:
|
||||
// (should be caught by typeck)
|
||||
for fi in fields {
|
||||
if fi.ty(tcx, args) != f0_ty {
|
||||
let guar = tcx.dcx().delayed_bug(
|
||||
"#[repr(simd)] was applied to an ADT with heterogeneous field type",
|
||||
);
|
||||
return Err(error(cx, LayoutError::ReferencesError(guar)));
|
||||
}
|
||||
}
|
||||
|
||||
// The element type and number of elements of the SIMD vector
|
||||
// are obtained from:
|
||||
//
|
||||
// * the element type and length of the single array field, if
|
||||
// the first field is of array type, or
|
||||
//
|
||||
// * the homogeneous field type and the number of fields.
|
||||
let (e_ty, e_len, is_array) = if let ty::Array(e_ty, _) = f0_ty.kind() {
|
||||
// First ADT field is an array:
|
||||
|
||||
// SIMD vectors with multiple array fields are not supported:
|
||||
// Can't be caught by typeck with a generic simd type.
|
||||
if def.non_enum_variant().fields.len() != 1 {
|
||||
tcx.dcx().emit_fatal(MultipleArrayFieldsSimdType { ty });
|
||||
}
|
||||
|
||||
// Extract the number of elements from the layout of the array field:
|
||||
let FieldsShape::Array { count, .. } = cx.layout_of(f0_ty)?.layout.fields() else {
|
||||
return Err(error(cx, LayoutError::Unknown(ty)));
|
||||
};
|
||||
|
||||
(*e_ty, *count, true)
|
||||
} else {
|
||||
// First ADT field is not an array:
|
||||
(f0_ty, def.non_enum_variant().fields.len() as _, false)
|
||||
let Some(ty::Array(e_ty, e_len)) = def
|
||||
.is_struct()
|
||||
.then(|| &def.variant(FIRST_VARIANT).fields)
|
||||
.filter(|fields| fields.len() == 1)
|
||||
.map(|fields| *fields[FieldIdx::ZERO].ty(tcx, args).kind())
|
||||
else {
|
||||
// Invalid SIMD types should have been caught by typeck by now.
|
||||
let guar = tcx.dcx().delayed_bug("#[repr(simd)] was applied to an invalid ADT");
|
||||
return Err(error(cx, LayoutError::ReferencesError(guar)));
|
||||
};
|
||||
|
||||
let e_len = extract_const_value(cx, ty, e_len)?
|
||||
.try_to_target_usize(tcx)
|
||||
.ok_or_else(|| error(cx, LayoutError::Unknown(ty)))?;
|
||||
|
||||
// SIMD vectors of zero length are not supported.
|
||||
// Additionally, lengths are capped at 2^16 as a fixed maximum backends must
|
||||
// support.
|
||||
@ -559,16 +512,12 @@ fn layout_of_uncached<'tcx>(
|
||||
};
|
||||
let size = size.align_to(align.abi);
|
||||
|
||||
// Compute the placement of the vector fields:
|
||||
let fields = if is_array {
|
||||
FieldsShape::Arbitrary { offsets: [Size::ZERO].into(), memory_index: [0].into() }
|
||||
} else {
|
||||
FieldsShape::Array { stride: e_ly.size, count: e_len }
|
||||
};
|
||||
|
||||
tcx.mk_layout(LayoutData {
|
||||
variants: Variants::Single { index: FIRST_VARIANT },
|
||||
fields,
|
||||
fields: FieldsShape::Arbitrary {
|
||||
offsets: [Size::ZERO].into(),
|
||||
memory_index: [0].into(),
|
||||
},
|
||||
backend_repr: abi,
|
||||
largest_niche: e_ly.largest_niche,
|
||||
uninhabited: false,
|
||||
|
@ -493,7 +493,7 @@ macro_rules! shl_impl_all {
|
||||
)*)
|
||||
}
|
||||
|
||||
shl_impl_all! { u8 u16 u32 u64 u128 usize i8 i16 i32 i64 isize i128 }
|
||||
shl_impl_all! { u8 u16 u32 u64 u128 usize i8 i16 i32 i64 i128 isize }
|
||||
|
||||
/// The right shift operator `>>`. Note that because this trait is implemented
|
||||
/// for all integer types with multiple right-hand-side types, Rust's type
|
||||
|
@ -994,7 +994,9 @@ impl Step for Rustc {
|
||||
fn make_run(run: RunConfig<'_>) {
|
||||
let crates = run.cargo_crates_in_set();
|
||||
run.builder.ensure(Rustc {
|
||||
compiler: run.builder.compiler(run.builder.top_stage, run.build_triple()),
|
||||
compiler: run
|
||||
.builder
|
||||
.compiler(run.builder.top_stage.saturating_sub(1), run.build_triple()),
|
||||
target: run.target,
|
||||
crates,
|
||||
});
|
||||
@ -1911,7 +1913,7 @@ impl Step for Assemble {
|
||||
|
||||
fn make_run(run: RunConfig<'_>) {
|
||||
run.builder.ensure(Assemble {
|
||||
target_compiler: run.builder.compiler(run.builder.top_stage + 1, run.target),
|
||||
target_compiler: run.builder.compiler(run.builder.top_stage, run.target),
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -653,6 +653,20 @@ mod dist {
|
||||
&["compiler/rustc".into(), "library".into()],
|
||||
);
|
||||
|
||||
assert_eq!(builder.config.stage, 2);
|
||||
|
||||
// `compile::Rustc` includes one-stage-off compiler information as the target compiler
|
||||
// artifacts get copied from there to the target stage sysroot.
|
||||
// For example, `stage2/bin/rustc` gets copied from the `stage1-rustc` build directory.
|
||||
assert_eq!(
|
||||
first(builder.cache.all::<compile::Rustc>()),
|
||||
&[
|
||||
rustc!(TEST_TRIPLE_1 => TEST_TRIPLE_1, stage = 0),
|
||||
rustc!(TEST_TRIPLE_1 => TEST_TRIPLE_1, stage = 1),
|
||||
rustc!(TEST_TRIPLE_1 => TEST_TRIPLE_2, stage = 1),
|
||||
]
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
first(builder.cache.all::<compile::Std>()),
|
||||
&[
|
||||
@ -664,15 +678,34 @@ mod dist {
|
||||
std!(TEST_TRIPLE_1 => TEST_TRIPLE_3, stage = 2),
|
||||
]
|
||||
);
|
||||
assert_eq!(builder.cache.all::<compile::Assemble>().len(), 5);
|
||||
|
||||
assert_eq!(
|
||||
first(builder.cache.all::<compile::Rustc>()),
|
||||
first(builder.cache.all::<compile::Assemble>()),
|
||||
&[
|
||||
rustc!(TEST_TRIPLE_1 => TEST_TRIPLE_1, stage = 0),
|
||||
rustc!(TEST_TRIPLE_1 => TEST_TRIPLE_1, stage = 1),
|
||||
rustc!(TEST_TRIPLE_1 => TEST_TRIPLE_1, stage = 2),
|
||||
rustc!(TEST_TRIPLE_1 => TEST_TRIPLE_2, stage = 1),
|
||||
rustc!(TEST_TRIPLE_1 => TEST_TRIPLE_2, stage = 2),
|
||||
compile::Assemble {
|
||||
target_compiler: Compiler {
|
||||
host: TargetSelection::from_user(TEST_TRIPLE_1),
|
||||
stage: 0
|
||||
}
|
||||
},
|
||||
compile::Assemble {
|
||||
target_compiler: Compiler {
|
||||
host: TargetSelection::from_user(TEST_TRIPLE_1),
|
||||
stage: 1
|
||||
}
|
||||
},
|
||||
compile::Assemble {
|
||||
target_compiler: Compiler {
|
||||
host: TargetSelection::from_user(TEST_TRIPLE_1),
|
||||
stage: 2
|
||||
}
|
||||
},
|
||||
compile::Assemble {
|
||||
target_compiler: Compiler {
|
||||
host: TargetSelection::from_user(TEST_TRIPLE_2),
|
||||
stage: 2
|
||||
}
|
||||
},
|
||||
]
|
||||
);
|
||||
}
|
||||
|
@ -11,12 +11,12 @@ the [`jobs.yml`](../github-actions/jobs.yml) file.
|
||||
To run a specific CI job locally, you can use the `citool` Rust crate:
|
||||
|
||||
```
|
||||
cargo --manifest-path src/ci/citool/Cargo.toml run run-local <job-name>
|
||||
cargo run --manifest-path src/ci/citool/Cargo.toml run-local <job-name>
|
||||
```
|
||||
|
||||
For example, to run the `x86_64-gnu-llvm-18-1` job:
|
||||
```
|
||||
cargo --manifest-path src/ci/citool/Cargo.toml run run-local x86_64-gnu-llvm-18-1
|
||||
cargo run --manifest-path src/ci/citool/Cargo.toml run-local x86_64-gnu-llvm-18-1
|
||||
```
|
||||
|
||||
The job will output artifacts in an `obj/<image-name>` dir at the root of a repository. Note
|
||||
|
@ -46,10 +46,8 @@ are implemented differently depending on whether `parallel-compiler` is true.
|
||||
|
||||
| data structure | parallel | non-parallel |
|
||||
| -------------------------------- | --------------------------------------------------- | ------------ |
|
||||
| OnceCell | std::sync::OnceLock | std::cell::OnceCell |
|
||||
| Lock\<T> | (parking_lot::Mutex\<T>) | (std::cell::RefCell) |
|
||||
| RwLock\<T> | (parking_lot::RwLock\<T>) | (std::cell::RefCell) |
|
||||
| MTRef<'a, T> | &'a T | &'a mut T |
|
||||
| MTLock\<T> | (Lock\<T>) | (T) |
|
||||
| ReadGuard | parking_lot::RwLockReadGuard | std::cell::Ref |
|
||||
| MappedReadGuard | parking_lot::MappedRwLockReadGuard | std::cell::Ref |
|
||||
|
@ -1,4 +1,4 @@
|
||||
use rustc_ast::token::{self, BinOpToken, Delimiter, IdentIsRaw};
|
||||
use rustc_ast::token::{self, Delimiter, IdentIsRaw};
|
||||
use rustc_ast::tokenstream::{TokenStream, TokenTree};
|
||||
use rustc_ast_pretty::pprust::PrintState;
|
||||
use rustc_ast_pretty::pprust::state::State as Printer;
|
||||
@ -137,15 +137,10 @@ fn print_tts(printer: &mut Printer<'_>, tts: &TokenStream) {
|
||||
(Dollar, token::Ident(..)) => (false, DollarIdent),
|
||||
(DollarIdent, token::Colon) => (false, DollarIdentColon),
|
||||
(DollarIdentColon, token::Ident(..)) => (false, Other),
|
||||
(
|
||||
DollarParen,
|
||||
token::BinOp(BinOpToken::Plus | BinOpToken::Star) | token::Question,
|
||||
) => (false, Other),
|
||||
(DollarParen, token::Plus | token::Star | token::Question) => (false, Other),
|
||||
(DollarParen, _) => (false, DollarParenSep),
|
||||
(DollarParenSep, token::BinOp(BinOpToken::Plus | BinOpToken::Star)) => {
|
||||
(false, Other)
|
||||
}
|
||||
(Pound, token::Not) => (false, PoundBang),
|
||||
(DollarParenSep, token::Plus | token::Star) => (false, Other),
|
||||
(Pound, token::Bang) => (false, PoundBang),
|
||||
(_, token::Ident(symbol, IdentIsRaw::No))
|
||||
if !usually_needs_space_between_keyword_and_open_delim(*symbol, tt.span) =>
|
||||
{
|
||||
|
@ -1,6 +1,6 @@
|
||||
#![allow(clippy::similar_names)] // `expr` and `expn`
|
||||
|
||||
use std::sync::Arc;
|
||||
use std::sync::{Arc, OnceLock};
|
||||
|
||||
use crate::get_unique_attr;
|
||||
use crate::visitors::{Descend, for_each_expr_without_closures};
|
||||
@ -8,7 +8,6 @@ use crate::visitors::{Descend, for_each_expr_without_closures};
|
||||
use arrayvec::ArrayVec;
|
||||
use rustc_ast::{FormatArgs, FormatArgument, FormatPlaceholder};
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
use rustc_data_structures::sync::OnceLock;
|
||||
use rustc_hir::{self as hir, Expr, ExprKind, HirId, Node, QPath};
|
||||
use rustc_lint::{LateContext, LintContext};
|
||||
use rustc_span::def_id::DefId;
|
||||
|
@ -11,7 +11,7 @@ on:
|
||||
|
||||
jobs:
|
||||
publish:
|
||||
if: ${{ github.repository == 'rust-lang/rust-analyzer' || github.event.action == 'workflow_dispatch' }}
|
||||
if: ${{ github.repository == 'rust-lang/rust-analyzer' || github.event_name == 'workflow_dispatch' }}
|
||||
name: publish
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
|
@ -174,7 +174,7 @@ jobs:
|
||||
- name: Install Nodejs
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 18
|
||||
node-version: 22
|
||||
if: needs.changes.outputs.typescript == 'true'
|
||||
|
||||
- name: Install xvfb
|
||||
|
@ -19,7 +19,7 @@ env:
|
||||
|
||||
jobs:
|
||||
rust:
|
||||
if: ${{ github.repository == 'rust-lang/rust-analyzer' || github.event.action == 'workflow_dispatch' }}
|
||||
if: ${{ github.repository == 'rust-lang/rust-analyzer' || github.event_name == 'workflow_dispatch' }}
|
||||
name: Rust
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
|
@ -9,7 +9,7 @@ on:
|
||||
|
||||
jobs:
|
||||
publish-libs:
|
||||
if: ${{ github.repository == 'rust-lang/rust-analyzer' || github.event.action == 'workflow_dispatch' }}
|
||||
if: ${{ github.repository == 'rust-lang/rust-analyzer' || github.event_name == 'workflow_dispatch' }}
|
||||
name: publish
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
|
@ -17,12 +17,12 @@ env:
|
||||
RUSTUP_MAX_RETRIES: 10
|
||||
FETCH_DEPTH: 0 # pull in the tags for the version string
|
||||
MACOSX_DEPLOYMENT_TARGET: 13.0
|
||||
CARGO_TARGET_AARCH64_UNKNOWN_LINUX_GNU_LINKER: aarch64-linux-gnu-gcc
|
||||
CARGO_TARGET_ARM_UNKNOWN_LINUX_GNUEABIHF_LINKER: arm-linux-gnueabihf-gcc
|
||||
ZIG_VERSION: 0.13.0
|
||||
ZIGBUILD_VERSION: 0.19.8
|
||||
|
||||
jobs:
|
||||
dist:
|
||||
if: ${{ github.repository == 'rust-lang/rust-analyzer' || github.event.action == 'workflow_dispatch' }}
|
||||
if: ${{ github.repository == 'rust-lang/rust-analyzer' || github.event_name == 'workflow_dispatch' }}
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
@ -36,13 +36,15 @@ jobs:
|
||||
code-target: win32-arm64
|
||||
- os: ubuntu-latest
|
||||
target: x86_64-unknown-linux-gnu
|
||||
zig_target: x86_64-unknown-linux-gnu.2.28
|
||||
code-target: linux-x64
|
||||
container: rockylinux:8
|
||||
- os: ubuntu-latest
|
||||
target: aarch64-unknown-linux-gnu
|
||||
zig_target: aarch64-unknown-linux-gnu.2.28
|
||||
code-target: linux-arm64
|
||||
- os: ubuntu-latest
|
||||
target: arm-unknown-linux-gnueabihf
|
||||
zig_target: arm-unknown-linux-gnueabihf.2.28
|
||||
code-target: linux-armhf
|
||||
- os: macos-13
|
||||
target: x86_64-apple-darwin
|
||||
@ -64,40 +66,33 @@ jobs:
|
||||
with:
|
||||
fetch-depth: ${{ env.FETCH_DEPTH }}
|
||||
|
||||
- name: Install toolchain dependencies
|
||||
if: matrix.container == 'rockylinux:8'
|
||||
shell: bash
|
||||
run: |
|
||||
dnf install -y gcc
|
||||
curl --proto '=https' --tlsv1.2 --retry 10 --retry-connrefused -fsSL "https://sh.rustup.rs" | sh -s -- --profile minimal --default-toolchain none -y
|
||||
echo "${CARGO_HOME:-$HOME/.cargo}/bin" >> $GITHUB_PATH
|
||||
- name: Install Node.js toolchain
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 22
|
||||
|
||||
- name: Install Rust toolchain
|
||||
run: |
|
||||
rustup update --no-self-update stable
|
||||
rustup target add ${{ matrix.target }}
|
||||
rustup component add rust-src
|
||||
rustup target add ${{ matrix.target }}
|
||||
|
||||
- name: Install Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 18
|
||||
- name: Install Zig toolchain
|
||||
if: ${{ matrix.zig_target }}
|
||||
run: |
|
||||
which cargo
|
||||
curl -L "https://ziglang.org/download/${ZIG_VERSION}/zig-linux-$(uname -m)-${ZIG_VERSION}.tar.xz" | sudo tar JxC /usr/local
|
||||
sudo ln -s "/usr/local/zig-linux-$(uname -m)-${ZIG_VERSION}/zig" /usr/local/bin/zig
|
||||
curl -L "https://github.com/rust-cross/cargo-zigbuild/releases/download/v${ZIGBUILD_VERSION}/cargo-zigbuild-v${ZIGBUILD_VERSION}.x86_64-unknown-linux-musl.tar.gz" | tar zxC ~/.cargo/bin
|
||||
|
||||
- name: Update apt repositories
|
||||
if: matrix.target == 'aarch64-unknown-linux-gnu' || matrix.target == 'arm-unknown-linux-gnueabihf'
|
||||
run: sudo apt-get update
|
||||
|
||||
- name: Install AArch64 target toolchain
|
||||
if: matrix.target == 'aarch64-unknown-linux-gnu'
|
||||
run: sudo apt-get install gcc-aarch64-linux-gnu
|
||||
|
||||
- name: Install ARM target toolchain
|
||||
if: matrix.target == 'arm-unknown-linux-gnueabihf'
|
||||
run: sudo apt-get install gcc-arm-linux-gnueabihf
|
||||
|
||||
- name: Dist
|
||||
- name: Dist (plain)
|
||||
if: ${{ !matrix.zig_target }}
|
||||
run: cargo xtask dist --client-patch-version ${{ github.run_number }}
|
||||
|
||||
- name: Dist (using zigbuild)
|
||||
if: ${{ matrix.zig_target }}
|
||||
run: RA_TARGET=${{ matrix.zig_target}} cargo xtask dist --client-patch-version ${{ github.run_number }} --zig
|
||||
|
||||
- run: npm ci
|
||||
working-directory: editors/code
|
||||
|
||||
@ -139,7 +134,7 @@ jobs:
|
||||
path: ./dist
|
||||
|
||||
dist-x86_64-unknown-linux-musl:
|
||||
if: ${{ github.repository == 'rust-lang/rust-analyzer' || github.event.action == 'workflow_dispatch' }}
|
||||
if: ${{ github.repository == 'rust-lang/rust-analyzer' || github.event_name == 'workflow_dispatch' }}
|
||||
name: dist (x86_64-unknown-linux-musl)
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
@ -185,7 +180,7 @@ jobs:
|
||||
path: ./dist
|
||||
|
||||
publish:
|
||||
if: ${{ github.repository == 'rust-lang/rust-analyzer' || github.event.action == 'workflow_dispatch' }}
|
||||
if: ${{ github.repository == 'rust-lang/rust-analyzer' || github.event_name == 'workflow_dispatch' }}
|
||||
name: publish
|
||||
runs-on: ubuntu-latest
|
||||
needs: ["dist", "dist-x86_64-unknown-linux-musl"]
|
||||
@ -193,7 +188,7 @@ jobs:
|
||||
- name: Install Nodejs
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
node-version: 22
|
||||
|
||||
- run: echo "TAG=$(date --iso -u)" >> $GITHUB_ENV
|
||||
if: github.ref == 'refs/heads/release'
|
||||
|
@ -18,6 +18,8 @@ extend-ignore-re = [
|
||||
"INOUT",
|
||||
"optin",
|
||||
"=Pn",
|
||||
# ignore `// spellchecker:off` until `// spellchecker:on`
|
||||
"(?s)(#|//)\\s*spellchecker:off.*?\\n\\s*(#|//)\\s*spellchecker:on",
|
||||
]
|
||||
|
||||
[default.extend-words]
|
||||
|
@ -22,9 +22,6 @@ name = "always-assert"
|
||||
version = "0.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a1078fa1ce1e34b1872d8611ad921196d76bdd7027e949fbe31231abde201892"
|
||||
dependencies = [
|
||||
"tracing",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "anyhow"
|
||||
@ -1507,9 +1504,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ra-ap-rustc_abi"
|
||||
version = "0.97.0"
|
||||
version = "0.98.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3829c3355d1681ffeaf1450ec71edcdace6820fe2e86469d8fc1ad45e2c96460"
|
||||
checksum = "4b42cccfff8091a4c3397736518774dbad619e82f8def6f70d8e46dbbe396007"
|
||||
dependencies = [
|
||||
"bitflags 2.7.0",
|
||||
"ra-ap-rustc_hashes",
|
||||
@ -1519,18 +1516,18 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ra-ap-rustc_hashes"
|
||||
version = "0.97.0"
|
||||
version = "0.98.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1bd4d6d4c434bec08e02370a4f64a4985312097215a62e82d0f757f3a98e502e"
|
||||
checksum = "46d8bd34ed6552c8cac1764106ef5adbeef3e5c7700e0ceb4c83a47a631894fe"
|
||||
dependencies = [
|
||||
"rustc-stable-hash",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ra-ap-rustc_index"
|
||||
version = "0.97.0"
|
||||
version = "0.98.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bad6fc4bd7522e31096e2de5b0351144fe0684b608791ee26c842bf2da1b19ae"
|
||||
checksum = "93799e4dccbbd47f8b66bc0aa42effc1b7077aaee09d8a40b86b8d659b80c7b7"
|
||||
dependencies = [
|
||||
"ra-ap-rustc_index_macros",
|
||||
"smallvec",
|
||||
@ -1538,9 +1535,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ra-ap-rustc_index_macros"
|
||||
version = "0.97.0"
|
||||
version = "0.98.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cfb234e1f84b92be45276c3025bee18789e9bc95bec8789bec961e78edb01c52"
|
||||
checksum = "30baa5d00f94ba437a9dcaf7ae074ebe4f367bb05a4c2835e0aa2e7af3463aac"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@ -1549,9 +1546,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ra-ap-rustc_lexer"
|
||||
version = "0.97.0"
|
||||
version = "0.98.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7a3a40bd11dc43d1cb110e730b80620cf8102f4cca8920a02b65954da0ed931f"
|
||||
checksum = "3004d1d1b50afe3e1f9cdd428a282da7ffbf5f26dd8bf04af0d651d44e4873d8"
|
||||
dependencies = [
|
||||
"memchr",
|
||||
"unicode-properties",
|
||||
@ -1560,9 +1557,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ra-ap-rustc_parse_format"
|
||||
version = "0.97.0"
|
||||
version = "0.98.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5feb877478994cb4c0c0c7a5116a352eefc0634aefc8636feb00a893fa5b7135"
|
||||
checksum = "cb57e5124a64aaaf92c06130fbc1b8e1d547b5a2a96081f1f848e31c211df5d2"
|
||||
dependencies = [
|
||||
"ra-ap-rustc_index",
|
||||
"ra-ap-rustc_lexer",
|
||||
@ -1570,9 +1567,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ra-ap-rustc_pattern_analysis"
|
||||
version = "0.97.0"
|
||||
version = "0.98.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a76774d35934d464c4115908cde16f76a4f7e540fe1eea6b79336c556e37bdd3"
|
||||
checksum = "e427c3d30e4bdff28abd6b0ef3e6f4dfab44acd9468a4954eeff8717d8df8819"
|
||||
dependencies = [
|
||||
"ra-ap-rustc_index",
|
||||
"rustc-hash 2.0.0",
|
||||
@ -1940,13 +1937,13 @@ dependencies = [
|
||||
name = "stdx"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"always-assert",
|
||||
"backtrace",
|
||||
"crossbeam-channel",
|
||||
"itertools",
|
||||
"jod-thread",
|
||||
"libc",
|
||||
"miow",
|
||||
"tracing",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
|
@ -25,7 +25,6 @@ salsa.opt-level = 3
|
||||
miniz_oxide.opt-level = 3
|
||||
|
||||
[profile.release]
|
||||
incremental = true
|
||||
# Set this to 1 or 2 to get more useful backtraces in debugger.
|
||||
debug = 0
|
||||
|
||||
@ -86,12 +85,12 @@ vfs-notify = { path = "./crates/vfs-notify", version = "0.0.0" }
|
||||
vfs = { path = "./crates/vfs", version = "0.0.0" }
|
||||
edition = { path = "./crates/edition", version = "0.0.0" }
|
||||
|
||||
ra-ap-rustc_hashes = { version = "0.97", default-features = false }
|
||||
ra-ap-rustc_lexer = { version = "0.97", default-features = false }
|
||||
ra-ap-rustc_parse_format = { version = "0.97", default-features = false }
|
||||
ra-ap-rustc_index = { version = "0.97", default-features = false }
|
||||
ra-ap-rustc_abi = { version = "0.97", default-features = false }
|
||||
ra-ap-rustc_pattern_analysis = { version = "0.97", default-features = false }
|
||||
ra-ap-rustc_hashes = { version = "0.98", default-features = false }
|
||||
ra-ap-rustc_lexer = { version = "0.98", default-features = false }
|
||||
ra-ap-rustc_parse_format = { version = "0.98", default-features = false }
|
||||
ra-ap-rustc_index = { version = "0.98", default-features = false }
|
||||
ra-ap-rustc_abi = { version = "0.98", default-features = false }
|
||||
ra-ap-rustc_pattern_analysis = { version = "0.98", default-features = false }
|
||||
|
||||
# local crates that aren't published to crates.io. These should not have versions.
|
||||
|
||||
|
@ -10,7 +10,6 @@ license.workspace = true
|
||||
rust-version.workspace = true
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
lz4_flex = { version = "0.11", default-features = false }
|
||||
|
@ -10,7 +10,6 @@ license.workspace = true
|
||||
rust-version.workspace = true
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
rustc-hash.workspace = true
|
||||
|
@ -10,7 +10,6 @@ license.workspace = true
|
||||
rust-version.workspace = true
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
arrayvec.workspace = true
|
||||
|
@ -5,7 +5,9 @@
|
||||
//!
|
||||
//! It is used like this:
|
||||
//!
|
||||
//! ```
|
||||
//! ```ignore
|
||||
//! # use hir_def::dyn_map::DynMap;
|
||||
//! # use hir_def::dyn_map::Key;
|
||||
//! // keys define submaps of a `DynMap`
|
||||
//! const STRING_TO_U32: Key<String, u32> = Key::new();
|
||||
//! const U32_TO_VEC: Key<u32, Vec<bool>> = Key::new();
|
||||
|
@ -883,20 +883,20 @@ pub struct UseTree {
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub enum UseTreeKind {
|
||||
/// ```
|
||||
/// ```ignore
|
||||
/// use path::to::Item;
|
||||
/// use path::to::Item as Renamed;
|
||||
/// use path::to::Trait as _;
|
||||
/// ```
|
||||
Single { path: Interned<ModPath>, alias: Option<ImportAlias> },
|
||||
|
||||
/// ```
|
||||
/// ```ignore
|
||||
/// use *; // (invalid, but can occur in nested tree)
|
||||
/// use path::*;
|
||||
/// ```
|
||||
Glob { path: Option<Interned<ModPath>> },
|
||||
|
||||
/// ```
|
||||
/// ```ignore
|
||||
/// use prefix::{self, Item, ...};
|
||||
/// ```
|
||||
Prefixed { prefix: Option<Interned<ModPath>>, list: Box<[UseTree]> },
|
||||
|
@ -22,7 +22,7 @@ use hir_expand::{
|
||||
db::ExpandDatabase,
|
||||
proc_macro::{ProcMacro, ProcMacroExpander, ProcMacroExpansionError, ProcMacroKind},
|
||||
span_map::SpanMapRef,
|
||||
InFile, MacroCallKind, MacroFileId, MacroFileIdExt,
|
||||
InFile, MacroCallKind, MacroFileId, MacroFileIdExt, MacroKind,
|
||||
};
|
||||
use intern::Symbol;
|
||||
use itertools::Itertools;
|
||||
@ -211,7 +211,11 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
|
||||
|
||||
if let Some(src) = src {
|
||||
if let Some(file_id) = src.file_id.macro_file() {
|
||||
if file_id.is_attr_macro(&db) || file_id.is_custom_derive(&db) {
|
||||
if let MacroKind::Derive
|
||||
| MacroKind::DeriveBuiltIn
|
||||
| MacroKind::Attr
|
||||
| MacroKind::AttrBuiltIn = file_id.kind(&db)
|
||||
{
|
||||
let call = file_id.call_node(&db);
|
||||
let mut show_spans = false;
|
||||
let mut show_ctxt = false;
|
||||
@ -236,7 +240,7 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
|
||||
for impl_id in def_map[local_id].scope.impls() {
|
||||
let src = impl_id.lookup(&db).source(&db);
|
||||
if let Some(macro_file) = src.file_id.macro_file() {
|
||||
if macro_file.is_builtin_derive(&db) {
|
||||
if let MacroKind::DeriveBuiltIn | MacroKind::Derive = macro_file.kind(&db) {
|
||||
let pp = pretty_print_macro_expansion(
|
||||
src.value.syntax().clone(),
|
||||
db.span_map(macro_file.into()).as_ref(),
|
||||
|
@ -134,7 +134,7 @@ impl DirPath {
|
||||
/// So this is the case which doesn't really work I think if we try to be
|
||||
/// 100% platform agnostic:
|
||||
///
|
||||
/// ```
|
||||
/// ```ignore
|
||||
/// mod a {
|
||||
/// #[path="C://sad/face"]
|
||||
/// mod b { mod c; }
|
||||
|
@ -532,16 +532,17 @@ impl Resolver {
|
||||
/// Note that in Rust one name can be bound to several items:
|
||||
///
|
||||
/// ```
|
||||
/// # #![allow(non_camel_case_types)]
|
||||
/// macro_rules! t { () => (()) }
|
||||
/// type t = t!();
|
||||
/// const t: t = t!()
|
||||
/// const t: t = t!();
|
||||
/// ```
|
||||
///
|
||||
/// That's why we return a multimap.
|
||||
///
|
||||
/// The shadowing is accounted for: in
|
||||
///
|
||||
/// ```
|
||||
/// ```ignore
|
||||
/// let it = 92;
|
||||
/// {
|
||||
/// let it = 92;
|
||||
|
@ -10,7 +10,6 @@ license.workspace = true
|
||||
rust-version.workspace = true
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
cov-mark = "2.0.0-pre.1"
|
||||
|
@ -101,7 +101,7 @@ fn dummy_gate_test_expand(
|
||||
/// somewhat inconsistently resolve derive attributes.
|
||||
///
|
||||
/// As such, we expand `#[derive(Foo, bar::Bar)]` into
|
||||
/// ```
|
||||
/// ```ignore
|
||||
/// #![Foo]
|
||||
/// #![bar::Bar]
|
||||
/// ```
|
||||
|
@ -10,7 +10,7 @@ use syntax::{AstNode, AstPtr, SyntaxNode, SyntaxNodePtr, SyntaxToken, TextRange,
|
||||
|
||||
use crate::{
|
||||
db::{self, ExpandDatabase},
|
||||
map_node_range_up, map_node_range_up_rooted, span_for_offset, MacroFileIdExt,
|
||||
map_node_range_up, map_node_range_up_rooted, span_for_offset, MacroFileIdExt, MacroKind,
|
||||
};
|
||||
|
||||
/// `InFile<T>` stores a value of `T` inside a particular file/syntax tree.
|
||||
@ -276,7 +276,11 @@ impl<SN: Borrow<SyntaxNode>> InFile<SN> {
|
||||
HirFileIdRepr::FileId(file_id) => {
|
||||
return Some(InRealFile { file_id, value: self.value.borrow().clone() })
|
||||
}
|
||||
HirFileIdRepr::MacroFile(m) if m.is_attr_macro(db) => m,
|
||||
HirFileIdRepr::MacroFile(m)
|
||||
if matches!(m.kind(db), MacroKind::Attr | MacroKind::AttrBuiltIn) =>
|
||||
{
|
||||
m
|
||||
}
|
||||
_ => return None,
|
||||
};
|
||||
|
||||
@ -453,7 +457,7 @@ impl<N: AstNode> InFile<N> {
|
||||
}
|
||||
HirFileIdRepr::MacroFile(m) => m,
|
||||
};
|
||||
if !file_id.is_attr_macro(db) {
|
||||
if !matches!(file_id.kind(db), MacroKind::Attr | MacroKind::AttrBuiltIn) {
|
||||
return None;
|
||||
}
|
||||
|
||||
|
@ -416,6 +416,24 @@ impl HirFileIdExt for HirFileId {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub enum MacroKind {
|
||||
/// `macro_rules!` or Macros 2.0 macro.
|
||||
Declarative,
|
||||
/// A built-in function-like macro.
|
||||
DeclarativeBuiltIn,
|
||||
/// A custom derive.
|
||||
Derive,
|
||||
/// A builtin-in derive.
|
||||
DeriveBuiltIn,
|
||||
/// A procedural attribute macro.
|
||||
Attr,
|
||||
/// A built-in attribute macro.
|
||||
AttrBuiltIn,
|
||||
/// A function-like procedural macro.
|
||||
ProcMacro,
|
||||
}
|
||||
|
||||
pub trait MacroFileIdExt {
|
||||
fn is_env_or_option_env(&self, db: &dyn ExpandDatabase) -> bool;
|
||||
fn is_include_like_macro(&self, db: &dyn ExpandDatabase) -> bool;
|
||||
@ -427,15 +445,12 @@ pub trait MacroFileIdExt {
|
||||
|
||||
fn expansion_info(self, db: &dyn ExpandDatabase) -> ExpansionInfo;
|
||||
|
||||
fn is_builtin_derive(&self, db: &dyn ExpandDatabase) -> bool;
|
||||
fn is_custom_derive(&self, db: &dyn ExpandDatabase) -> bool;
|
||||
fn kind(&self, db: &dyn ExpandDatabase) -> MacroKind;
|
||||
|
||||
/// Return whether this file is an include macro
|
||||
fn is_include_macro(&self, db: &dyn ExpandDatabase) -> bool;
|
||||
|
||||
fn is_eager(&self, db: &dyn ExpandDatabase) -> bool;
|
||||
/// Return whether this file is an attr macro
|
||||
fn is_attr_macro(&self, db: &dyn ExpandDatabase) -> bool;
|
||||
|
||||
/// Return whether this file is the pseudo expansion of the derive attribute.
|
||||
/// See [`crate::builtin_attr_macro::derive_attr_expand`].
|
||||
@ -468,18 +483,18 @@ impl MacroFileIdExt for MacroFileId {
|
||||
ExpansionInfo::new(db, self)
|
||||
}
|
||||
|
||||
fn is_custom_derive(&self, db: &dyn ExpandDatabase) -> bool {
|
||||
matches!(
|
||||
db.lookup_intern_macro_call(self.macro_call_id).def.kind,
|
||||
MacroDefKind::ProcMacro(_, _, ProcMacroKind::CustomDerive)
|
||||
)
|
||||
}
|
||||
|
||||
fn is_builtin_derive(&self, db: &dyn ExpandDatabase) -> bool {
|
||||
matches!(
|
||||
db.lookup_intern_macro_call(self.macro_call_id).def.kind,
|
||||
MacroDefKind::BuiltInDerive(..)
|
||||
)
|
||||
fn kind(&self, db: &dyn ExpandDatabase) -> MacroKind {
|
||||
match db.lookup_intern_macro_call(self.macro_call_id).def.kind {
|
||||
MacroDefKind::Declarative(..) => MacroKind::Declarative,
|
||||
MacroDefKind::BuiltIn(..) | MacroDefKind::BuiltInEager(..) => {
|
||||
MacroKind::DeclarativeBuiltIn
|
||||
}
|
||||
MacroDefKind::BuiltInDerive(..) => MacroKind::DeriveBuiltIn,
|
||||
MacroDefKind::ProcMacro(_, _, ProcMacroKind::CustomDerive) => MacroKind::Derive,
|
||||
MacroDefKind::ProcMacro(_, _, ProcMacroKind::Attr) => MacroKind::Attr,
|
||||
MacroDefKind::ProcMacro(_, _, ProcMacroKind::Bang) => MacroKind::ProcMacro,
|
||||
MacroDefKind::BuiltInAttr(..) => MacroKind::AttrBuiltIn,
|
||||
}
|
||||
}
|
||||
|
||||
fn is_include_macro(&self, db: &dyn ExpandDatabase) -> bool {
|
||||
@ -507,13 +522,6 @@ impl MacroFileIdExt for MacroFileId {
|
||||
}
|
||||
}
|
||||
|
||||
fn is_attr_macro(&self, db: &dyn ExpandDatabase) -> bool {
|
||||
matches!(
|
||||
db.lookup_intern_macro_call(self.macro_call_id).def.kind,
|
||||
MacroDefKind::BuiltInAttr(..) | MacroDefKind::ProcMacro(_, _, ProcMacroKind::Attr)
|
||||
)
|
||||
}
|
||||
|
||||
fn is_derive_attr_pseudo_expansion(&self, db: &dyn ExpandDatabase) -> bool {
|
||||
let loc = db.lookup_intern_macro_call(self.macro_call_id);
|
||||
loc.def.is_attribute_derive()
|
||||
|
@ -10,7 +10,6 @@ license.workspace = true
|
||||
rust-version.workspace = true
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
cov-mark = "2.0.0-pre.1"
|
||||
|
@ -354,12 +354,43 @@ fn overflowing_add() {
|
||||
fn needs_drop() {
|
||||
check_number(
|
||||
r#"
|
||||
//- minicore: copy, sized
|
||||
//- minicore: drop, manually_drop, copy, sized
|
||||
use core::mem::ManuallyDrop;
|
||||
extern "rust-intrinsic" {
|
||||
pub fn needs_drop<T: ?Sized>() -> bool;
|
||||
}
|
||||
struct X;
|
||||
const GOAL: bool = !needs_drop::<i32>() && needs_drop::<X>();
|
||||
struct NeedsDrop;
|
||||
impl Drop for NeedsDrop {
|
||||
fn drop(&mut self) {}
|
||||
}
|
||||
enum Enum<T> {
|
||||
A(T),
|
||||
B(X),
|
||||
}
|
||||
const fn val_needs_drop<T>(_v: T) -> bool { needs_drop::<T>() }
|
||||
const fn closure_needs_drop() -> bool {
|
||||
let a = NeedsDrop;
|
||||
let b = X;
|
||||
!val_needs_drop(|| &a) && val_needs_drop(move || &a) && !val_needs_drop(move || &b)
|
||||
}
|
||||
const fn opaque() -> impl Sized {
|
||||
|| {}
|
||||
}
|
||||
const fn opaque_copy() -> impl Sized + Copy {
|
||||
|| {}
|
||||
}
|
||||
trait Everything {}
|
||||
impl<T> Everything for T {}
|
||||
const GOAL: bool = !needs_drop::<i32>() && !needs_drop::<X>()
|
||||
&& needs_drop::<NeedsDrop>() && !needs_drop::<ManuallyDrop<NeedsDrop>>()
|
||||
&& needs_drop::<[NeedsDrop; 1]>() && !needs_drop::<[NeedsDrop; 0]>()
|
||||
&& needs_drop::<(X, NeedsDrop)>()
|
||||
&& needs_drop::<Enum<NeedsDrop>>() && !needs_drop::<Enum<X>>()
|
||||
&& closure_needs_drop()
|
||||
&& !val_needs_drop(opaque()) && !val_needs_drop(opaque_copy())
|
||||
&& needs_drop::<[NeedsDrop]>() && needs_drop::<dyn Everything>()
|
||||
&& !needs_drop::<&dyn Everything>() && !needs_drop::<str>();
|
||||
"#,
|
||||
1,
|
||||
);
|
||||
|
@ -13,6 +13,7 @@ use hir_def::{
|
||||
ConstParamId, DefWithBodyId, EnumVariantId, FunctionId, GeneralConstId, GenericDefId, ImplId,
|
||||
LifetimeParamId, LocalFieldId, StaticId, TraitId, TypeAliasId, TypeOrConstParamId, VariantId,
|
||||
};
|
||||
use hir_expand::name::Name;
|
||||
use la_arena::ArenaMap;
|
||||
use smallvec::SmallVec;
|
||||
use triomphe::Arc;
|
||||
@ -20,6 +21,7 @@ use triomphe::Arc;
|
||||
use crate::{
|
||||
chalk_db,
|
||||
consteval::ConstEvalError,
|
||||
drop::DropGlue,
|
||||
dyn_compatibility::DynCompatibilityViolation,
|
||||
layout::{Layout, LayoutError},
|
||||
lower::{Diagnostics, GenericDefaults, GenericPredicates},
|
||||
@ -28,7 +30,6 @@ use crate::{
|
||||
Binders, ClosureId, Const, FnDefId, ImplTraitId, ImplTraits, InferenceResult, Interner,
|
||||
PolyFnSig, Substitution, TraitEnvironment, TraitRef, Ty, TyDefId, ValueTyDefId,
|
||||
};
|
||||
use hir_expand::name::Name;
|
||||
|
||||
#[ra_salsa::query_group(HirDatabaseStorage)]
|
||||
pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
|
||||
@ -305,6 +306,10 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
|
||||
block: Option<BlockId>,
|
||||
env: chalk_ir::Environment<Interner>,
|
||||
) -> chalk_ir::ProgramClauses<Interner>;
|
||||
|
||||
#[ra_salsa::invoke(crate::drop::has_drop_glue)]
|
||||
#[ra_salsa::cycle(crate::drop::has_drop_glue_recover)]
|
||||
fn has_drop_glue(&self, ty: Ty, env: Arc<TraitEnvironment>) -> DropGlue {}
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -95,7 +95,7 @@ pub struct HirFormatter<'a> {
|
||||
enum BoundsFormattingCtx {
|
||||
Entered {
|
||||
/// We can have recursive bounds like the following case:
|
||||
/// ```rust
|
||||
/// ```ignore
|
||||
/// where
|
||||
/// T: Foo,
|
||||
/// T::FooAssoc: Baz<<T::FooAssoc as Bar>::BarAssoc> + Bar
|
||||
|
209
src/tools/rust-analyzer/crates/hir-ty/src/drop.rs
Normal file
209
src/tools/rust-analyzer/crates/hir-ty/src/drop.rs
Normal file
@ -0,0 +1,209 @@
|
||||
//! Utilities for computing drop info about types.
|
||||
|
||||
use base_db::ra_salsa;
|
||||
use chalk_ir::cast::Cast;
|
||||
use hir_def::data::adt::StructFlags;
|
||||
use hir_def::lang_item::LangItem;
|
||||
use hir_def::AdtId;
|
||||
use stdx::never;
|
||||
use triomphe::Arc;
|
||||
|
||||
use crate::{
|
||||
db::HirDatabase, method_resolution::TyFingerprint, AliasTy, Canonical, CanonicalVarKinds,
|
||||
InEnvironment, Interner, ProjectionTy, TraitEnvironment, Ty, TyBuilder, TyKind,
|
||||
};
|
||||
use crate::{ConcreteConst, ConstScalar, ConstValue};
|
||||
|
||||
fn has_destructor(db: &dyn HirDatabase, adt: AdtId) -> bool {
|
||||
let module = match adt {
|
||||
AdtId::EnumId(id) => db.lookup_intern_enum(id).container,
|
||||
AdtId::StructId(id) => db.lookup_intern_struct(id).container,
|
||||
AdtId::UnionId(id) => db.lookup_intern_union(id).container,
|
||||
};
|
||||
let Some(drop_trait) =
|
||||
db.lang_item(module.krate(), LangItem::Drop).and_then(|it| it.as_trait())
|
||||
else {
|
||||
return false;
|
||||
};
|
||||
let impls = match module.containing_block() {
|
||||
Some(block) => match db.trait_impls_in_block(block) {
|
||||
Some(it) => it,
|
||||
None => return false,
|
||||
},
|
||||
None => db.trait_impls_in_crate(module.krate()),
|
||||
};
|
||||
let result = impls.for_trait_and_self_ty(drop_trait, TyFingerprint::Adt(adt)).next().is_some();
|
||||
result
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
|
||||
pub enum DropGlue {
|
||||
// Order of variants is important.
|
||||
None,
|
||||
/// May have a drop glue if some type parameter has it.
|
||||
///
|
||||
/// For the compiler this is considered as a positive result, IDE distinguishes this from "yes".
|
||||
DependOnParams,
|
||||
HasDropGlue,
|
||||
}
|
||||
|
||||
pub(crate) fn has_drop_glue(db: &dyn HirDatabase, ty: Ty, env: Arc<TraitEnvironment>) -> DropGlue {
|
||||
match ty.kind(Interner) {
|
||||
TyKind::Adt(adt, subst) => {
|
||||
if has_destructor(db, adt.0) {
|
||||
return DropGlue::HasDropGlue;
|
||||
}
|
||||
match adt.0 {
|
||||
AdtId::StructId(id) => {
|
||||
if db.struct_data(id).flags.contains(StructFlags::IS_MANUALLY_DROP) {
|
||||
return DropGlue::None;
|
||||
}
|
||||
db.field_types(id.into())
|
||||
.iter()
|
||||
.map(|(_, field_ty)| {
|
||||
db.has_drop_glue(
|
||||
field_ty.clone().substitute(Interner, subst),
|
||||
env.clone(),
|
||||
)
|
||||
})
|
||||
.max()
|
||||
.unwrap_or(DropGlue::None)
|
||||
}
|
||||
// Unions cannot have fields with destructors.
|
||||
AdtId::UnionId(_) => DropGlue::None,
|
||||
AdtId::EnumId(id) => db
|
||||
.enum_data(id)
|
||||
.variants
|
||||
.iter()
|
||||
.map(|&(variant, _)| {
|
||||
db.field_types(variant.into())
|
||||
.iter()
|
||||
.map(|(_, field_ty)| {
|
||||
db.has_drop_glue(
|
||||
field_ty.clone().substitute(Interner, subst),
|
||||
env.clone(),
|
||||
)
|
||||
})
|
||||
.max()
|
||||
.unwrap_or(DropGlue::None)
|
||||
})
|
||||
.max()
|
||||
.unwrap_or(DropGlue::None),
|
||||
}
|
||||
}
|
||||
TyKind::Tuple(_, subst) => subst
|
||||
.iter(Interner)
|
||||
.map(|ty| ty.assert_ty_ref(Interner))
|
||||
.map(|ty| db.has_drop_glue(ty.clone(), env.clone()))
|
||||
.max()
|
||||
.unwrap_or(DropGlue::None),
|
||||
TyKind::Array(ty, len) => {
|
||||
if let ConstValue::Concrete(ConcreteConst { interned: ConstScalar::Bytes(len, _) }) =
|
||||
&len.data(Interner).value
|
||||
{
|
||||
match (&**len).try_into() {
|
||||
Ok(len) => {
|
||||
let len = usize::from_le_bytes(len);
|
||||
if len == 0 {
|
||||
// Arrays of size 0 don't have drop glue.
|
||||
return DropGlue::None;
|
||||
}
|
||||
}
|
||||
Err(_) => {
|
||||
never!("const array size with non-usize len");
|
||||
}
|
||||
}
|
||||
}
|
||||
db.has_drop_glue(ty.clone(), env)
|
||||
}
|
||||
TyKind::Slice(ty) => db.has_drop_glue(ty.clone(), env),
|
||||
TyKind::Closure(closure_id, subst) => {
|
||||
let owner = db.lookup_intern_closure((*closure_id).into()).0;
|
||||
let infer = db.infer(owner);
|
||||
let (captures, _) = infer.closure_info(closure_id);
|
||||
let env = db.trait_environment_for_body(owner);
|
||||
captures
|
||||
.iter()
|
||||
.map(|capture| db.has_drop_glue(capture.ty(subst), env.clone()))
|
||||
.max()
|
||||
.unwrap_or(DropGlue::None)
|
||||
}
|
||||
// FIXME: Handle coroutines.
|
||||
TyKind::Coroutine(..) | TyKind::CoroutineWitness(..) => DropGlue::None,
|
||||
TyKind::Ref(..)
|
||||
| TyKind::Raw(..)
|
||||
| TyKind::FnDef(..)
|
||||
| TyKind::Str
|
||||
| TyKind::Never
|
||||
| TyKind::Scalar(_)
|
||||
| TyKind::Function(_)
|
||||
| TyKind::Foreign(_)
|
||||
| TyKind::Error => DropGlue::None,
|
||||
TyKind::Dyn(_) => DropGlue::HasDropGlue,
|
||||
TyKind::AssociatedType(assoc_type_id, subst) => projection_has_drop_glue(
|
||||
db,
|
||||
env,
|
||||
ProjectionTy { associated_ty_id: *assoc_type_id, substitution: subst.clone() },
|
||||
ty,
|
||||
),
|
||||
TyKind::Alias(AliasTy::Projection(projection)) => {
|
||||
projection_has_drop_glue(db, env, projection.clone(), ty)
|
||||
}
|
||||
TyKind::OpaqueType(..) | TyKind::Alias(AliasTy::Opaque(_)) => {
|
||||
if is_copy(db, ty, env) {
|
||||
DropGlue::None
|
||||
} else {
|
||||
DropGlue::HasDropGlue
|
||||
}
|
||||
}
|
||||
TyKind::Placeholder(_) | TyKind::BoundVar(_) => {
|
||||
if is_copy(db, ty, env) {
|
||||
DropGlue::None
|
||||
} else {
|
||||
DropGlue::DependOnParams
|
||||
}
|
||||
}
|
||||
TyKind::InferenceVar(..) => unreachable!("inference vars shouldn't exist out of inference"),
|
||||
}
|
||||
}
|
||||
|
||||
fn projection_has_drop_glue(
|
||||
db: &dyn HirDatabase,
|
||||
env: Arc<TraitEnvironment>,
|
||||
projection: ProjectionTy,
|
||||
ty: Ty,
|
||||
) -> DropGlue {
|
||||
let normalized = db.normalize_projection(projection, env.clone());
|
||||
match normalized.kind(Interner) {
|
||||
TyKind::Alias(AliasTy::Projection(_)) | TyKind::AssociatedType(..) => {
|
||||
if is_copy(db, ty, env) {
|
||||
DropGlue::None
|
||||
} else {
|
||||
DropGlue::DependOnParams
|
||||
}
|
||||
}
|
||||
_ => db.has_drop_glue(normalized, env),
|
||||
}
|
||||
}
|
||||
|
||||
fn is_copy(db: &dyn HirDatabase, ty: Ty, env: Arc<TraitEnvironment>) -> bool {
|
||||
let Some(copy_trait) = db.lang_item(env.krate, LangItem::Copy).and_then(|it| it.as_trait())
|
||||
else {
|
||||
return false;
|
||||
};
|
||||
let trait_ref = TyBuilder::trait_ref(db, copy_trait).push(ty).build();
|
||||
let goal = Canonical {
|
||||
value: InEnvironment::new(&env.env, trait_ref.cast(Interner)),
|
||||
binders: CanonicalVarKinds::empty(Interner),
|
||||
};
|
||||
db.trait_solve(env.krate, env.block, goal).is_some()
|
||||
}
|
||||
|
||||
pub(crate) fn has_drop_glue_recover(
|
||||
_db: &dyn HirDatabase,
|
||||
_cycle: &ra_salsa::Cycle,
|
||||
_ty: &Ty,
|
||||
_env: &Arc<TraitEnvironment>,
|
||||
) -> DropGlue {
|
||||
DropGlue::None
|
||||
}
|
@ -335,7 +335,7 @@ impl Default for InternedStandardTypes {
|
||||
/// sized struct to a dynamically sized one. E.g., &[i32; 4] -> &[i32] is
|
||||
/// represented by:
|
||||
///
|
||||
/// ```
|
||||
/// ```ignore
|
||||
/// Deref(None) -> [i32; 4],
|
||||
/// Borrow(AutoBorrow::Ref) -> &[i32; 4],
|
||||
/// Unsize -> &[i32],
|
||||
@ -481,9 +481,10 @@ pub struct InferenceResult {
|
||||
/// or pattern can have multiple binding modes. For example:
|
||||
/// ```
|
||||
/// fn foo(mut slice: &[u32]) -> usize {
|
||||
/// slice = match slice {
|
||||
/// [0, rest @ ..] | rest => rest,
|
||||
/// };
|
||||
/// slice = match slice {
|
||||
/// [0, rest @ ..] | rest => rest,
|
||||
/// };
|
||||
/// 0
|
||||
/// }
|
||||
/// ```
|
||||
/// the first `rest` has implicit `ref` binding mode, but the second `rest` binding mode is `move`.
|
||||
|
@ -133,40 +133,22 @@ fn layout_of_simd_ty(
|
||||
env: Arc<TraitEnvironment>,
|
||||
dl: &TargetDataLayout,
|
||||
) -> Result<Arc<Layout>, LayoutError> {
|
||||
let fields = db.field_types(id.into());
|
||||
|
||||
// Supported SIMD vectors are homogeneous ADTs with at least one field:
|
||||
// Supported SIMD vectors are homogeneous ADTs with exactly one array field:
|
||||
//
|
||||
// * #[repr(simd)] struct S(T, T, T, T);
|
||||
// * #[repr(simd)] struct S { it: T, y: T, z: T, w: T }
|
||||
// * #[repr(simd)] struct S([T; 4])
|
||||
//
|
||||
// where T is a primitive scalar (integer/float/pointer).
|
||||
|
||||
let f0_ty = match fields.iter().next() {
|
||||
Some(it) => it.1.clone().substitute(Interner, subst),
|
||||
None => return Err(LayoutError::InvalidSimdType),
|
||||
let fields = db.field_types(id.into());
|
||||
let mut fields = fields.iter();
|
||||
let Some(TyKind::Array(e_ty, e_len)) = fields
|
||||
.next()
|
||||
.filter(|_| fields.next().is_none())
|
||||
.map(|f| f.1.clone().substitute(Interner, subst).kind(Interner).clone())
|
||||
else {
|
||||
return Err(LayoutError::InvalidSimdType);
|
||||
};
|
||||
|
||||
// The element type and number of elements of the SIMD vector
|
||||
// are obtained from:
|
||||
//
|
||||
// * the element type and length of the single array field, if
|
||||
// the first field is of array type, or
|
||||
//
|
||||
// * the homogeneous field type and the number of fields.
|
||||
let (e_ty, e_len, is_array) = if let TyKind::Array(e_ty, _) = f0_ty.kind(Interner) {
|
||||
// Extract the number of elements from the layout of the array field:
|
||||
let FieldsShape::Array { count, .. } = db.layout_of_ty(f0_ty.clone(), env.clone())?.fields
|
||||
else {
|
||||
return Err(LayoutError::Unknown);
|
||||
};
|
||||
|
||||
(e_ty.clone(), count, true)
|
||||
} else {
|
||||
// First ADT field is not an array:
|
||||
(f0_ty, fields.iter().count() as u64, false)
|
||||
};
|
||||
let e_len = try_const_usize(db, &e_len).ok_or(LayoutError::HasErrorConst)? as u64;
|
||||
|
||||
// Compute the ABI of the element type:
|
||||
let e_ly = db.layout_of_ty(e_ty, env)?;
|
||||
@ -182,16 +164,9 @@ fn layout_of_simd_ty(
|
||||
let align = dl.llvmlike_vector_align(size);
|
||||
let size = size.align_to(align.abi);
|
||||
|
||||
// Compute the placement of the vector fields:
|
||||
let fields = if is_array {
|
||||
FieldsShape::Arbitrary { offsets: [Size::ZERO].into(), memory_index: [0].into() }
|
||||
} else {
|
||||
FieldsShape::Array { stride: e_ly.size, count: e_len }
|
||||
};
|
||||
|
||||
Ok(Arc::new(Layout {
|
||||
variants: Variants::Single { index: struct_variant_idx() },
|
||||
fields,
|
||||
fields: FieldsShape::Arbitrary { offsets: [Size::ZERO].into(), memory_index: [0].into() },
|
||||
backend_repr: BackendRepr::SimdVector { element: e_abi, count: e_len },
|
||||
largest_niche: e_ly.largest_niche,
|
||||
uninhabited: false,
|
||||
|
@ -30,11 +30,13 @@ extern crate ra_ap_rustc_hashes as rustc_hashes;
|
||||
mod builder;
|
||||
mod chalk_db;
|
||||
mod chalk_ext;
|
||||
mod drop;
|
||||
mod infer;
|
||||
mod inhabitedness;
|
||||
mod interner;
|
||||
mod lower;
|
||||
mod mapping;
|
||||
mod target_feature;
|
||||
mod tls;
|
||||
mod utils;
|
||||
|
||||
@ -87,6 +89,7 @@ use crate::{
|
||||
pub use autoderef::autoderef;
|
||||
pub use builder::{ParamKind, TyBuilder};
|
||||
pub use chalk_ext::*;
|
||||
pub use drop::DropGlue;
|
||||
pub use infer::{
|
||||
cast::CastError,
|
||||
closure::{CaptureKind, CapturedItem},
|
||||
@ -105,10 +108,9 @@ pub use mapping::{
|
||||
to_foreign_def_id, to_placeholder_idx,
|
||||
};
|
||||
pub use method_resolution::check_orphan_rules;
|
||||
pub use target_feature::TargetFeatures;
|
||||
pub use traits::TraitEnvironment;
|
||||
pub use utils::{
|
||||
all_super_traits, direct_super_traits, is_fn_unsafe_to_call, TargetFeatures, Unsafety,
|
||||
};
|
||||
pub use utils::{all_super_traits, direct_super_traits, is_fn_unsafe_to_call, Unsafety};
|
||||
pub use variance::Variance;
|
||||
|
||||
pub use chalk_ir::{
|
||||
|
@ -11,6 +11,7 @@ use hir_def::{
|
||||
};
|
||||
use hir_expand::name::Name;
|
||||
use intern::{sym, Symbol};
|
||||
use stdx::never;
|
||||
|
||||
use crate::{
|
||||
error_lifetime,
|
||||
@ -20,6 +21,7 @@ use crate::{
|
||||
LangItem, Layout, Locals, Lookup, MirEvalError, MirSpan, Mutability, Result, Substitution,
|
||||
Ty, TyBuilder, TyExt,
|
||||
},
|
||||
DropGlue,
|
||||
};
|
||||
|
||||
mod simd;
|
||||
@ -853,7 +855,14 @@ impl Evaluator<'_> {
|
||||
"size_of generic arg is not provided".into(),
|
||||
));
|
||||
};
|
||||
let result = !ty.clone().is_copy(self.db, locals.body.owner);
|
||||
let result = match self.db.has_drop_glue(ty.clone(), self.trait_env.clone()) {
|
||||
DropGlue::HasDropGlue => true,
|
||||
DropGlue::None => false,
|
||||
DropGlue::DependOnParams => {
|
||||
never!("should be fully monomorphized now");
|
||||
true
|
||||
}
|
||||
};
|
||||
destination.write_from_bytes(self, &[u8::from(result)])
|
||||
}
|
||||
"ptr_guaranteed_cmp" => {
|
||||
|
261
src/tools/rust-analyzer/crates/hir-ty/src/target_feature.rs
Normal file
261
src/tools/rust-analyzer/crates/hir-ty/src/target_feature.rs
Normal file
@ -0,0 +1,261 @@
|
||||
//! Stuff for handling `#[target_feature]` (needed for unsafe check).
|
||||
|
||||
use std::sync::LazyLock;
|
||||
|
||||
use hir_def::attr::Attrs;
|
||||
use hir_def::tt;
|
||||
use intern::{sym, Symbol};
|
||||
use rustc_hash::{FxHashMap, FxHashSet};
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct TargetFeatures {
|
||||
pub(crate) enabled: FxHashSet<Symbol>,
|
||||
}
|
||||
|
||||
impl TargetFeatures {
|
||||
pub fn from_attrs(attrs: &Attrs) -> Self {
|
||||
let mut result = TargetFeatures::from_attrs_no_implications(attrs);
|
||||
result.expand_implications();
|
||||
result
|
||||
}
|
||||
|
||||
fn expand_implications(&mut self) {
|
||||
let all_implications = LazyLock::force(&TARGET_FEATURE_IMPLICATIONS);
|
||||
let mut queue = self.enabled.iter().cloned().collect::<Vec<_>>();
|
||||
while let Some(feature) = queue.pop() {
|
||||
if let Some(implications) = all_implications.get(&feature) {
|
||||
for implication in implications {
|
||||
if self.enabled.insert(implication.clone()) {
|
||||
queue.push(implication.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Retrieves the target features from the attributes, and does not expand the target features implied by them.
|
||||
pub(crate) fn from_attrs_no_implications(attrs: &Attrs) -> Self {
|
||||
let enabled = attrs
|
||||
.by_key(&sym::target_feature)
|
||||
.tt_values()
|
||||
.filter_map(|tt| {
|
||||
match tt.token_trees().flat_tokens() {
|
||||
[
|
||||
tt::TokenTree::Leaf(tt::Leaf::Ident(enable_ident)),
|
||||
tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: '=', .. })),
|
||||
tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal { kind: tt::LitKind::Str, symbol: features, .. })),
|
||||
] if enable_ident.sym == sym::enable => Some(features),
|
||||
_ => None,
|
||||
}
|
||||
})
|
||||
.flat_map(|features| features.as_str().split(',').map(Symbol::intern))
|
||||
.collect();
|
||||
Self { enabled }
|
||||
}
|
||||
}
|
||||
|
||||
// List of the target features each target feature implies.
|
||||
// Ideally we'd depend on rustc for this, but rustc_target doesn't compile on stable,
|
||||
// and t-compiler prefers for it to stay this way.
|
||||
|
||||
static TARGET_FEATURE_IMPLICATIONS: LazyLock<FxHashMap<Symbol, Box<[Symbol]>>> =
|
||||
LazyLock::new(|| {
|
||||
let mut result = FxHashMap::<Symbol, FxHashSet<Symbol>>::default();
|
||||
for &(feature_str, implications) in TARGET_FEATURE_IMPLICATIONS_RAW {
|
||||
let feature = Symbol::intern(feature_str);
|
||||
let implications = implications.iter().copied().map(Symbol::intern);
|
||||
// Some target features appear in two archs, e.g. Arm and x86.
|
||||
// Sometimes they contain different implications, e.g. `aes`.
|
||||
// We should probably choose by the active arch, but for now just merge them.
|
||||
result.entry(feature).or_default().extend(implications);
|
||||
}
|
||||
let mut result = result
|
||||
.into_iter()
|
||||
.map(|(feature, implications)| (feature, Box::from_iter(implications)))
|
||||
.collect::<FxHashMap<_, _>>();
|
||||
result.shrink_to_fit();
|
||||
result
|
||||
});
|
||||
|
||||
// spellchecker:off
|
||||
const TARGET_FEATURE_IMPLICATIONS_RAW: &[(&str, &[&str])] = &[
|
||||
// Arm
|
||||
("aes", &["neon"]),
|
||||
("dotprod", &["neon"]),
|
||||
("fp-armv8", &["vfp4"]),
|
||||
("fp16", &["neon"]),
|
||||
("i8mm", &["neon"]),
|
||||
("neon", &["vfp3"]),
|
||||
("sha2", &["neon"]),
|
||||
("v6", &["v5te"]),
|
||||
("v6k", &["v6"]),
|
||||
("v6t2", &["v6k", "thumb2"]),
|
||||
("v7", &["v6t2"]),
|
||||
("v8", &["v7"]),
|
||||
("vfp3", &["vfp2", "d32"]),
|
||||
("vfp4", &["vfp3"]),
|
||||
// Aarch64
|
||||
("aes", &["neon"]),
|
||||
("dotprod", &["neon"]),
|
||||
("dpb2", &["dpb"]),
|
||||
("f32mm", &["sve"]),
|
||||
("f64mm", &["sve"]),
|
||||
("fcma", &["neon"]),
|
||||
("fhm", &["fp16"]),
|
||||
("fp16", &["neon"]),
|
||||
("fp8", &["faminmax", "lut", "bf16"]),
|
||||
("fp8dot2", &["fp8dot4"]),
|
||||
("fp8dot4", &["fp8fma"]),
|
||||
("fp8fma", &["fp8"]),
|
||||
("jsconv", &["neon"]),
|
||||
("lse128", &["lse"]),
|
||||
("rcpc2", &["rcpc"]),
|
||||
("rcpc3", &["rcpc2"]),
|
||||
("rdm", &["neon"]),
|
||||
("sha2", &["neon"]),
|
||||
("sha3", &["sha2"]),
|
||||
("sm4", &["neon"]),
|
||||
("sme", &["bf16"]),
|
||||
("sme-b16b16", &["bf16", "sme2", "sve-b16b16"]),
|
||||
("sme-f16f16", &["sme2"]),
|
||||
("sme-f64f64", &["sme"]),
|
||||
("sme-f8f16", &["sme-f8f32"]),
|
||||
("sme-f8f32", &["sme2", "fp8"]),
|
||||
("sme-fa64", &["sme", "sve2"]),
|
||||
("sme-i16i64", &["sme"]),
|
||||
("sme2", &["sme"]),
|
||||
("sme2p1", &["sme2"]),
|
||||
("ssve-fp8dot2", &["ssve-fp8dot4"]),
|
||||
("ssve-fp8dot4", &["ssve-fp8fma"]),
|
||||
("ssve-fp8fma", &["sme2", "fp8"]),
|
||||
("sve", &["neon"]),
|
||||
("sve-b16b16", &["bf16"]),
|
||||
("sve2", &["sve"]),
|
||||
("sve2-aes", &["sve2", "aes"]),
|
||||
("sve2-bitperm", &["sve2"]),
|
||||
("sve2-sha3", &["sve2", "sha3"]),
|
||||
("sve2-sm4", &["sve2", "sm4"]),
|
||||
("sve2p1", &["sve2"]),
|
||||
("v8.1a", &["crc", "lse", "rdm", "pan", "lor", "vh"]),
|
||||
("v8.2a", &["v8.1a", "ras", "dpb"]),
|
||||
("v8.3a", &["v8.2a", "rcpc", "paca", "pacg", "jsconv"]),
|
||||
("v8.4a", &["v8.3a", "dotprod", "dit", "flagm"]),
|
||||
("v8.5a", &["v8.4a", "ssbs", "sb", "dpb2", "bti"]),
|
||||
("v8.6a", &["v8.5a", "bf16", "i8mm"]),
|
||||
("v8.7a", &["v8.6a", "wfxt"]),
|
||||
("v8.8a", &["v8.7a", "hbc", "mops"]),
|
||||
("v8.9a", &["v8.8a", "cssc"]),
|
||||
("v9.1a", &["v9a", "v8.6a"]),
|
||||
("v9.2a", &["v9.1a", "v8.7a"]),
|
||||
("v9.3a", &["v9.2a", "v8.8a"]),
|
||||
("v9.4a", &["v9.3a", "v8.9a"]),
|
||||
("v9.5a", &["v9.4a"]),
|
||||
("v9a", &["v8.5a", "sve2"]),
|
||||
// x86
|
||||
("aes", &["sse2"]),
|
||||
("amx-bf16", &["amx-tile"]),
|
||||
("amx-complex", &["amx-tile"]),
|
||||
("amx-fp16", &["amx-tile"]),
|
||||
("amx-int8", &["amx-tile"]),
|
||||
("avx", &["sse4.2"]),
|
||||
("avx2", &["avx"]),
|
||||
("avx512bf16", &["avx512bw"]),
|
||||
("avx512bitalg", &["avx512bw"]),
|
||||
("avx512bw", &["avx512f"]),
|
||||
("avx512cd", &["avx512f"]),
|
||||
("avx512dq", &["avx512f"]),
|
||||
("avx512f", &["avx2", "fma", "f16c"]),
|
||||
("avx512fp16", &["avx512bw", "avx512vl", "avx512dq"]),
|
||||
("avx512ifma", &["avx512f"]),
|
||||
("avx512vbmi", &["avx512bw"]),
|
||||
("avx512vbmi2", &["avx512bw"]),
|
||||
("avx512vl", &["avx512f"]),
|
||||
("avx512vnni", &["avx512f"]),
|
||||
("avx512vp2intersect", &["avx512f"]),
|
||||
("avx512vpopcntdq", &["avx512f"]),
|
||||
("avxifma", &["avx2"]),
|
||||
("avxneconvert", &["avx2"]),
|
||||
("avxvnni", &["avx2"]),
|
||||
("avxvnniint16", &["avx2"]),
|
||||
("avxvnniint8", &["avx2"]),
|
||||
("f16c", &["avx"]),
|
||||
("fma", &["avx"]),
|
||||
("gfni", &["sse2"]),
|
||||
("kl", &["sse2"]),
|
||||
("pclmulqdq", &["sse2"]),
|
||||
("sha", &["sse2"]),
|
||||
("sha512", &["avx2"]),
|
||||
("sm3", &["avx"]),
|
||||
("sm4", &["avx2"]),
|
||||
("sse2", &["sse"]),
|
||||
("sse3", &["sse2"]),
|
||||
("sse4.1", &["ssse3"]),
|
||||
("sse4.2", &["sse4.1"]),
|
||||
("sse4a", &["sse3"]),
|
||||
("ssse3", &["sse3"]),
|
||||
("vaes", &["avx2", "aes"]),
|
||||
("vpclmulqdq", &["avx", "pclmulqdq"]),
|
||||
("widekl", &["kl"]),
|
||||
("xop", &[/*"fma4", */ "avx", "sse4a"]),
|
||||
("xsavec", &["xsave"]),
|
||||
("xsaveopt", &["xsave"]),
|
||||
("xsaves", &["xsave"]),
|
||||
// Hexagon
|
||||
("hvx-length128b", &["hvx"]),
|
||||
// PowerPC
|
||||
("power10-vector", &["power9-vector"]),
|
||||
("power8-altivec", &["altivec"]),
|
||||
("power8-crypto", &["power8-altivec"]),
|
||||
("power8-vector", &["vsx", "power8-altivec"]),
|
||||
("power9-altivec", &["power8-altivec"]),
|
||||
("power9-vector", &["power8-vector", "power9-altivec"]),
|
||||
("vsx", &["altivec"]),
|
||||
// MIPS
|
||||
// RISC-V
|
||||
("a", &["zaamo", "zalrsc"]),
|
||||
("d", &["f"]),
|
||||
("zabha", &["zaamo"]),
|
||||
("zdinx", &["zfinx"]),
|
||||
("zfh", &["zfhmin"]),
|
||||
("zfhmin", &["f"]),
|
||||
("zhinx", &["zhinxmin"]),
|
||||
("zhinxmin", &["zfinx"]),
|
||||
("zk", &["zkn", "zkr", "zkt"]),
|
||||
("zkn", &["zbkb", "zbkc", "zbkx", "zkne", "zknd", "zknh"]),
|
||||
("zks", &["zbkb", "zbkc", "zbkx", "zksed", "zksh"]),
|
||||
// WASM
|
||||
("relaxed-simd", &["simd128"]),
|
||||
// BPF
|
||||
("alu32", &[]),
|
||||
// CSKY
|
||||
("10e60", &["7e10"]),
|
||||
("2e3", &["e2"]),
|
||||
("3e3r2", &["3e3r1", "doloop"]),
|
||||
("3e3r3", &["doloop"]),
|
||||
("3e7", &["2e3"]),
|
||||
("7e10", &["3e7"]),
|
||||
("e1", &["elrw"]),
|
||||
("e2", &["e2"]),
|
||||
("mp", &["2e3"]),
|
||||
("mp1e2", &["3e7"]),
|
||||
// LoongArch
|
||||
("d", &["f"]),
|
||||
("lasx", &["lsx"]),
|
||||
("lsx", &["d"]),
|
||||
// IBM Z
|
||||
("nnp-assist", &["vector"]),
|
||||
("vector-enhancements-1", &["vector"]),
|
||||
("vector-enhancements-2", &["vector-enhancements-1"]),
|
||||
("vector-packed-decimal", &["vector"]),
|
||||
("vector-packed-decimal-enhancement", &["vector-packed-decimal"]),
|
||||
("vector-packed-decimal-enhancement-2", &["vector-packed-decimal-enhancement"]),
|
||||
// SPARC
|
||||
// m68k
|
||||
("isa-68010", &["isa-68000"]),
|
||||
("isa-68020", &["isa-68010"]),
|
||||
("isa-68030", &["isa-68020"]),
|
||||
("isa-68040", &["isa-68030", "isa-68882"]),
|
||||
("isa-68060", &["isa-68040"]),
|
||||
("isa-68882", &["isa-68881"]),
|
||||
];
|
||||
// spellchecker:on
|
@ -9,18 +9,16 @@ use chalk_ir::{
|
||||
DebruijnIndex,
|
||||
};
|
||||
use hir_def::{
|
||||
attr::Attrs,
|
||||
db::DefDatabase,
|
||||
generics::{WherePredicate, WherePredicateTypeTarget},
|
||||
lang_item::LangItem,
|
||||
resolver::{HasResolver, TypeNs},
|
||||
tt,
|
||||
type_ref::{TraitBoundModifier, TypeRef},
|
||||
EnumId, EnumVariantId, FunctionId, Lookup, OpaqueInternableThing, TraitId, TypeAliasId,
|
||||
TypeOrConstParamId,
|
||||
};
|
||||
use hir_expand::name::Name;
|
||||
use intern::{sym, Symbol};
|
||||
use intern::sym;
|
||||
use rustc_abi::TargetDataLayout;
|
||||
use rustc_hash::FxHashSet;
|
||||
use smallvec::{smallvec, SmallVec};
|
||||
@ -32,8 +30,8 @@ use crate::{
|
||||
db::HirDatabase,
|
||||
layout::{Layout, TagEncoding},
|
||||
mir::pad16,
|
||||
ChalkTraitId, Const, ConstScalar, GenericArg, Interner, Substitution, TraitRef, TraitRefExt,
|
||||
Ty, WhereClause,
|
||||
ChalkTraitId, Const, ConstScalar, GenericArg, Interner, Substitution, TargetFeatures, TraitRef,
|
||||
TraitRefExt, Ty, WhereClause,
|
||||
};
|
||||
|
||||
pub(crate) fn fn_traits(
|
||||
@ -267,32 +265,6 @@ impl<'a> ClosureSubst<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct TargetFeatures {
|
||||
enabled: FxHashSet<Symbol>,
|
||||
}
|
||||
|
||||
impl TargetFeatures {
|
||||
pub fn from_attrs(attrs: &Attrs) -> Self {
|
||||
let enabled = attrs
|
||||
.by_key(&sym::target_feature)
|
||||
.tt_values()
|
||||
.filter_map(|tt| {
|
||||
match tt.token_trees().flat_tokens() {
|
||||
[
|
||||
tt::TokenTree::Leaf(tt::Leaf::Ident(enable_ident)),
|
||||
tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: '=', .. })),
|
||||
tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal { kind: tt::LitKind::Str, symbol: features, .. })),
|
||||
] if enable_ident.sym == sym::enable => Some(features),
|
||||
_ => None,
|
||||
}
|
||||
})
|
||||
.flat_map(|features| features.as_str().split(',').map(Symbol::intern))
|
||||
.collect();
|
||||
Self { enabled }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub enum Unsafety {
|
||||
Safe,
|
||||
@ -314,7 +286,8 @@ pub fn is_fn_unsafe_to_call(
|
||||
|
||||
if data.has_target_feature() {
|
||||
// RFC 2396 <https://rust-lang.github.io/rfcs/2396-target-feature-1.1.html>.
|
||||
let callee_target_features = TargetFeatures::from_attrs(&db.attrs(func.into()));
|
||||
let callee_target_features =
|
||||
TargetFeatures::from_attrs_no_implications(&db.attrs(func.into()));
|
||||
if !caller_target_features.enabled.is_superset(&callee_target_features.enabled) {
|
||||
return Unsafety::Unsafe;
|
||||
}
|
||||
|
@ -10,7 +10,6 @@ license.workspace = true
|
||||
rust-version.workspace = true
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
rustc-hash.workspace = true
|
||||
|
@ -142,7 +142,7 @@ pub use {
|
||||
name::Name,
|
||||
prettify_macro_expansion,
|
||||
proc_macro::{ProcMacros, ProcMacrosBuilder},
|
||||
tt, ExpandResult, HirFileId, HirFileIdExt, MacroFileId, MacroFileIdExt,
|
||||
tt, ExpandResult, HirFileId, HirFileIdExt, MacroFileId, MacroFileIdExt, MacroKind,
|
||||
},
|
||||
hir_ty::{
|
||||
consteval::ConstEvalError,
|
||||
@ -152,7 +152,7 @@ pub use {
|
||||
layout::LayoutError,
|
||||
method_resolution::TyFingerprint,
|
||||
mir::{MirEvalError, MirLowerError},
|
||||
CastError, FnAbi, PointerCast, Safety, Variance,
|
||||
CastError, DropGlue, FnAbi, PointerCast, Safety, Variance,
|
||||
},
|
||||
// FIXME: Properly encapsulate mir
|
||||
hir_ty::{mir, Interner as ChalkTyInterner},
|
||||
@ -699,7 +699,10 @@ impl Module {
|
||||
let source_map = tree_source_maps.impl_(loc.id.value).item();
|
||||
let node = &tree[loc.id.value];
|
||||
let file_id = loc.id.file_id();
|
||||
if file_id.macro_file().is_some_and(|it| it.is_builtin_derive(db.upcast())) {
|
||||
if file_id
|
||||
.macro_file()
|
||||
.is_some_and(|it| it.kind(db.upcast()) == MacroKind::DeriveBuiltIn)
|
||||
{
|
||||
// these expansion come from us, diagnosing them is a waste of resources
|
||||
// FIXME: Once we diagnose the inputs to builtin derives, we should at least extract those diagnostics somehow
|
||||
continue;
|
||||
@ -1391,6 +1394,10 @@ impl Struct {
|
||||
Type::from_def(db, self.id)
|
||||
}
|
||||
|
||||
pub fn ty_placeholders(self, db: &dyn HirDatabase) -> Type {
|
||||
Type::from_def_placeholders(db, self.id)
|
||||
}
|
||||
|
||||
pub fn constructor_ty(self, db: &dyn HirDatabase) -> Type {
|
||||
Type::from_value_def(db, self.id)
|
||||
}
|
||||
@ -1436,6 +1443,10 @@ impl Union {
|
||||
Type::from_def(db, self.id)
|
||||
}
|
||||
|
||||
pub fn ty_placeholders(self, db: &dyn HirDatabase) -> Type {
|
||||
Type::from_def_placeholders(db, self.id)
|
||||
}
|
||||
|
||||
pub fn constructor_ty(self, db: &dyn HirDatabase) -> Type {
|
||||
Type::from_value_def(db, self.id)
|
||||
}
|
||||
@ -1490,6 +1501,10 @@ impl Enum {
|
||||
Type::from_def(db, self.id)
|
||||
}
|
||||
|
||||
pub fn ty_placeholders(self, db: &dyn HirDatabase) -> Type {
|
||||
Type::from_def_placeholders(db, self.id)
|
||||
}
|
||||
|
||||
/// The type of the enum variant bodies.
|
||||
pub fn variant_body_ty(self, db: &dyn HirDatabase) -> Type {
|
||||
Type::new_for_crate(
|
||||
@ -2929,6 +2944,10 @@ impl TypeAlias {
|
||||
Type::from_def(db, self.id)
|
||||
}
|
||||
|
||||
pub fn ty_placeholders(self, db: &dyn HirDatabase) -> Type {
|
||||
Type::from_def_placeholders(db, self.id)
|
||||
}
|
||||
|
||||
pub fn name(self, db: &dyn HirDatabase) -> Name {
|
||||
db.type_alias_data(self.id).name.clone()
|
||||
}
|
||||
@ -3033,20 +3052,6 @@ impl BuiltinType {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub enum MacroKind {
|
||||
/// `macro_rules!` or Macros 2.0 macro.
|
||||
Declarative,
|
||||
/// A built-in or custom derive.
|
||||
Derive,
|
||||
/// A built-in function-like macro.
|
||||
BuiltIn,
|
||||
/// A procedural attribute macro.
|
||||
Attr,
|
||||
/// A function-like procedural macro.
|
||||
ProcMacro,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub struct Macro {
|
||||
pub(crate) id: MacroId,
|
||||
@ -3077,15 +3082,19 @@ impl Macro {
|
||||
match self.id {
|
||||
MacroId::Macro2Id(it) => match it.lookup(db.upcast()).expander {
|
||||
MacroExpander::Declarative => MacroKind::Declarative,
|
||||
MacroExpander::BuiltIn(_) | MacroExpander::BuiltInEager(_) => MacroKind::BuiltIn,
|
||||
MacroExpander::BuiltInAttr(_) => MacroKind::Attr,
|
||||
MacroExpander::BuiltInDerive(_) => MacroKind::Derive,
|
||||
MacroExpander::BuiltIn(_) | MacroExpander::BuiltInEager(_) => {
|
||||
MacroKind::DeclarativeBuiltIn
|
||||
}
|
||||
MacroExpander::BuiltInAttr(_) => MacroKind::AttrBuiltIn,
|
||||
MacroExpander::BuiltInDerive(_) => MacroKind::DeriveBuiltIn,
|
||||
},
|
||||
MacroId::MacroRulesId(it) => match it.lookup(db.upcast()).expander {
|
||||
MacroExpander::Declarative => MacroKind::Declarative,
|
||||
MacroExpander::BuiltIn(_) | MacroExpander::BuiltInEager(_) => MacroKind::BuiltIn,
|
||||
MacroExpander::BuiltInAttr(_) => MacroKind::Attr,
|
||||
MacroExpander::BuiltInDerive(_) => MacroKind::Derive,
|
||||
MacroExpander::BuiltIn(_) | MacroExpander::BuiltInEager(_) => {
|
||||
MacroKind::DeclarativeBuiltIn
|
||||
}
|
||||
MacroExpander::BuiltInAttr(_) => MacroKind::AttrBuiltIn,
|
||||
MacroExpander::BuiltInDerive(_) => MacroKind::DeriveBuiltIn,
|
||||
},
|
||||
MacroId::ProcMacroId(it) => match it.lookup(db.upcast()).kind {
|
||||
ProcMacroKind::CustomDerive => MacroKind::Derive,
|
||||
@ -3096,10 +3105,10 @@ impl Macro {
|
||||
}
|
||||
|
||||
pub fn is_fn_like(&self, db: &dyn HirDatabase) -> bool {
|
||||
match self.kind(db) {
|
||||
MacroKind::Declarative | MacroKind::BuiltIn | MacroKind::ProcMacro => true,
|
||||
MacroKind::Attr | MacroKind::Derive => false,
|
||||
}
|
||||
matches!(
|
||||
self.kind(db),
|
||||
MacroKind::Declarative | MacroKind::DeclarativeBuiltIn | MacroKind::ProcMacro
|
||||
)
|
||||
}
|
||||
|
||||
pub fn is_builtin_derive(&self, db: &dyn HirDatabase) -> bool {
|
||||
@ -3139,11 +3148,11 @@ impl Macro {
|
||||
}
|
||||
|
||||
pub fn is_attr(&self, db: &dyn HirDatabase) -> bool {
|
||||
matches!(self.kind(db), MacroKind::Attr)
|
||||
matches!(self.kind(db), MacroKind::Attr | MacroKind::AttrBuiltIn)
|
||||
}
|
||||
|
||||
pub fn is_derive(&self, db: &dyn HirDatabase) -> bool {
|
||||
matches!(self.kind(db), MacroKind::Derive)
|
||||
matches!(self.kind(db), MacroKind::Derive | MacroKind::DeriveBuiltIn)
|
||||
}
|
||||
}
|
||||
|
||||
@ -4708,6 +4717,19 @@ impl Type {
|
||||
Type::new(db, def, ty.substitute(Interner, &substs))
|
||||
}
|
||||
|
||||
fn from_def_placeholders(db: &dyn HirDatabase, def: impl Into<TyDefId> + HasResolver) -> Type {
|
||||
let ty = db.ty(def.into());
|
||||
let substs = TyBuilder::placeholder_subst(
|
||||
db,
|
||||
match def.into() {
|
||||
TyDefId::AdtId(it) => GenericDefId::AdtId(it),
|
||||
TyDefId::TypeAliasId(it) => GenericDefId::TypeAliasId(it),
|
||||
TyDefId::BuiltinType(_) => return Type::new(db, def, ty.skip_binders().clone()),
|
||||
},
|
||||
);
|
||||
Type::new(db, def, ty.substitute(Interner, &substs))
|
||||
}
|
||||
|
||||
fn from_value_def(db: &dyn HirDatabase, def: impl Into<ValueTyDefId> + HasResolver) -> Type {
|
||||
let Some(ty) = db.value_ty(def.into()) else {
|
||||
return Type::new(db, def, TyKind::Error.intern(Interner));
|
||||
@ -5737,6 +5759,10 @@ impl Type {
|
||||
db.layout_of_ty(self.ty.clone(), self.env.clone())
|
||||
.map(|layout| Layout(layout, db.target_data_layout(self.env.krate).unwrap()))
|
||||
}
|
||||
|
||||
pub fn drop_glue(&self, db: &dyn HirDatabase) -> DropGlue {
|
||||
db.has_drop_glue(self.ty.clone(), self.env.clone())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Copy, Clone, Hash)]
|
||||
|
@ -508,9 +508,7 @@ impl<'db> SemanticsImpl<'db> {
|
||||
})
|
||||
}
|
||||
|
||||
pub fn is_derive_annotated(&self, adt: &ast::Adt) -> bool {
|
||||
let file_id = self.find_file(adt.syntax()).file_id;
|
||||
let adt = InFile::new(file_id, adt);
|
||||
pub fn is_derive_annotated(&self, adt: InFile<&ast::Adt>) -> bool {
|
||||
self.with_ctx(|ctx| ctx.has_derives(adt))
|
||||
}
|
||||
|
||||
@ -551,10 +549,8 @@ impl<'db> SemanticsImpl<'db> {
|
||||
res.is_empty().not().then_some(res)
|
||||
}
|
||||
|
||||
pub fn is_attr_macro_call(&self, item: &ast::Item) -> bool {
|
||||
let file_id = self.find_file(item.syntax()).file_id;
|
||||
let src = InFile::new(file_id, item);
|
||||
self.with_ctx(|ctx| ctx.item_to_macro_call(src).is_some())
|
||||
pub fn is_attr_macro_call(&self, item: InFile<&ast::Item>) -> bool {
|
||||
self.with_ctx(|ctx| ctx.item_to_macro_call(item).is_some())
|
||||
}
|
||||
|
||||
/// Expand the macro call with a different token tree, mapping the `token_to_map` down into the
|
||||
@ -1526,8 +1522,13 @@ impl<'db> SemanticsImpl<'db> {
|
||||
self.analyze(field.syntax())?.resolve_record_pat_field(self.db, field)
|
||||
}
|
||||
|
||||
// FIXME: Replace this with `resolve_macro_call2`
|
||||
pub fn resolve_macro_call(&self, macro_call: &ast::MacroCall) -> Option<Macro> {
|
||||
let macro_call = self.find_file(macro_call.syntax()).with_value(macro_call);
|
||||
self.resolve_macro_call2(macro_call)
|
||||
}
|
||||
|
||||
pub fn resolve_macro_call2(&self, macro_call: InFile<&ast::MacroCall>) -> Option<Macro> {
|
||||
self.with_ctx(|ctx| {
|
||||
ctx.macro_call_to_macro_call(macro_call)
|
||||
.and_then(|call| macro_call_to_macro_id(ctx, call))
|
||||
@ -1538,8 +1539,8 @@ impl<'db> SemanticsImpl<'db> {
|
||||
})
|
||||
}
|
||||
|
||||
pub fn is_proc_macro_call(&self, macro_call: &ast::MacroCall) -> bool {
|
||||
self.resolve_macro_call(macro_call)
|
||||
pub fn is_proc_macro_call(&self, macro_call: InFile<&ast::MacroCall>) -> bool {
|
||||
self.resolve_macro_call2(macro_call)
|
||||
.is_some_and(|m| matches!(m.id, MacroId::ProcMacroId(..)))
|
||||
}
|
||||
|
||||
|
@ -5,7 +5,7 @@
|
||||
//!
|
||||
//! This module solves the following problem:
|
||||
//!
|
||||
//! Given a piece of syntax, find the corresponding semantic definition (def).
|
||||
//! > Given a piece of syntax, find the corresponding semantic definition (def).
|
||||
//!
|
||||
//! This problem is a part of more-or-less every IDE feature implemented. Every
|
||||
//! IDE functionality (like goto to definition), conceptually starts with a
|
||||
|
@ -40,7 +40,7 @@ fn mod_item_path_str(
|
||||
/// Type tree shows how can we get from set of types to some type.
|
||||
///
|
||||
/// Consider the following code as an example
|
||||
/// ```
|
||||
/// ```ignore
|
||||
/// fn foo(x: i32, y: bool) -> Option<i32> { None }
|
||||
/// fn bar() {
|
||||
/// let a = 1;
|
||||
|
@ -10,7 +10,6 @@ license.workspace = true
|
||||
rust-version.workspace = true
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
cov-mark = "2.0.0-pre.1"
|
||||
|
@ -52,6 +52,10 @@ pub(crate) struct AssistContext<'a> {
|
||||
frange: FileRange,
|
||||
trimmed_range: TextRange,
|
||||
source_file: SourceFile,
|
||||
// We cache this here to speed up things slightly
|
||||
token_at_offset: TokenAtOffset<SyntaxToken>,
|
||||
// We cache this here to speed up things slightly
|
||||
covering_element: SyntaxElement,
|
||||
}
|
||||
|
||||
impl<'a> AssistContext<'a> {
|
||||
@ -78,8 +82,18 @@ impl<'a> AssistContext<'a> {
|
||||
// Selection solely consists of whitespace so just fall back to the original
|
||||
_ => frange.range,
|
||||
};
|
||||
let token_at_offset = source_file.syntax().token_at_offset(frange.range.start());
|
||||
let covering_element = source_file.syntax().covering_element(trimmed_range);
|
||||
|
||||
AssistContext { config, sema, frange, source_file, trimmed_range }
|
||||
AssistContext {
|
||||
config,
|
||||
sema,
|
||||
frange,
|
||||
source_file,
|
||||
trimmed_range,
|
||||
token_at_offset,
|
||||
covering_element,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn db(&self) -> &RootDatabase {
|
||||
@ -114,7 +128,7 @@ impl<'a> AssistContext<'a> {
|
||||
}
|
||||
|
||||
pub(crate) fn token_at_offset(&self) -> TokenAtOffset<SyntaxToken> {
|
||||
self.source_file.syntax().token_at_offset(self.offset())
|
||||
self.token_at_offset.clone()
|
||||
}
|
||||
pub(crate) fn find_token_syntax_at_offset(&self, kind: SyntaxKind) -> Option<SyntaxToken> {
|
||||
self.token_at_offset().find(|it| it.kind() == kind)
|
||||
@ -136,7 +150,7 @@ impl<'a> AssistContext<'a> {
|
||||
}
|
||||
/// Returns the element covered by the selection range, this excludes trailing whitespace in the selection.
|
||||
pub(crate) fn covering_element(&self) -> SyntaxElement {
|
||||
self.source_file.syntax().covering_element(self.selection_trimmed())
|
||||
self.covering_element.clone()
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -76,6 +76,8 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>)
|
||||
|
||||
let cfg = ctx.config.import_path_config();
|
||||
|
||||
let make = SyntaxFactory::new();
|
||||
|
||||
let module = ctx.sema.scope(expr.syntax())?.module();
|
||||
let (mut missing_pats, is_non_exhaustive, has_hidden_variants): (
|
||||
Peekable<Box<dyn Iterator<Item = (ast::Pat, bool)>>>,
|
||||
@ -93,7 +95,7 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>)
|
||||
.into_iter()
|
||||
.filter_map(|variant| {
|
||||
Some((
|
||||
build_pat(ctx, module, variant, cfg)?,
|
||||
build_pat(ctx, &make, module, variant, cfg)?,
|
||||
variant.should_be_hidden(ctx.db(), module.krate()),
|
||||
))
|
||||
})
|
||||
@ -144,10 +146,11 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>)
|
||||
let is_hidden = variants
|
||||
.iter()
|
||||
.any(|variant| variant.should_be_hidden(ctx.db(), module.krate()));
|
||||
let patterns =
|
||||
variants.into_iter().filter_map(|variant| build_pat(ctx, module, variant, cfg));
|
||||
let patterns = variants
|
||||
.into_iter()
|
||||
.filter_map(|variant| build_pat(ctx, &make, module, variant, cfg));
|
||||
|
||||
(ast::Pat::from(make::tuple_pat(patterns)), is_hidden)
|
||||
(ast::Pat::from(make.tuple_pat(patterns)), is_hidden)
|
||||
})
|
||||
.filter(|(variant_pat, _)| is_variant_missing(&top_lvl_pats, variant_pat));
|
||||
(
|
||||
@ -176,9 +179,11 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>)
|
||||
let is_hidden = variants
|
||||
.iter()
|
||||
.any(|variant| variant.should_be_hidden(ctx.db(), module.krate()));
|
||||
let patterns =
|
||||
variants.into_iter().filter_map(|variant| build_pat(ctx, module, variant, cfg));
|
||||
(ast::Pat::from(make::slice_pat(patterns)), is_hidden)
|
||||
let patterns = variants
|
||||
.into_iter()
|
||||
.filter_map(|variant| build_pat(ctx, &make, module, variant, cfg));
|
||||
|
||||
(ast::Pat::from(make.slice_pat(patterns)), is_hidden)
|
||||
})
|
||||
.filter(|(variant_pat, _)| is_variant_missing(&top_lvl_pats, variant_pat));
|
||||
(
|
||||
@ -203,8 +208,6 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>)
|
||||
"Fill match arms",
|
||||
ctx.sema.original_range(match_expr.syntax()).range,
|
||||
|builder| {
|
||||
let make = SyntaxFactory::new();
|
||||
|
||||
// having any hidden variants means that we need a catch-all arm
|
||||
needs_catch_all_arm |= has_hidden_variants;
|
||||
|
||||
@ -243,7 +246,7 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>)
|
||||
|
||||
if needs_catch_all_arm && !has_catch_all_arm {
|
||||
cov_mark::hit!(added_wildcard_pattern);
|
||||
let arm = make.match_arm(make::wildcard_pat().into(), None, make::ext::expr_todo());
|
||||
let arm = make.match_arm(make.wildcard_pat().into(), None, make::ext::expr_todo());
|
||||
arms.push(arm);
|
||||
}
|
||||
|
||||
@ -290,7 +293,7 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>)
|
||||
}
|
||||
}
|
||||
|
||||
editor.add_mappings(make.finish_with_mappings());
|
||||
editor.add_mappings(make.take());
|
||||
builder.add_file_edits(ctx.file_id(), editor);
|
||||
},
|
||||
)
|
||||
@ -445,6 +448,7 @@ fn resolve_array_of_enum_def(
|
||||
|
||||
fn build_pat(
|
||||
ctx: &AssistContext<'_>,
|
||||
make: &SyntaxFactory,
|
||||
module: hir::Module,
|
||||
var: ExtendedVariant,
|
||||
cfg: ImportPathConfig,
|
||||
@ -455,31 +459,32 @@ fn build_pat(
|
||||
let edition = module.krate().edition(db);
|
||||
let path = mod_path_to_ast(&module.find_path(db, ModuleDef::from(var), cfg)?, edition);
|
||||
let fields = var.fields(db);
|
||||
let pat = match var.kind(db) {
|
||||
let pat: ast::Pat = match var.kind(db) {
|
||||
hir::StructKind::Tuple => {
|
||||
let mut name_generator = suggest_name::NameGenerator::new();
|
||||
let pats = fields.into_iter().map(|f| {
|
||||
let name = name_generator.for_type(&f.ty(db), db, edition);
|
||||
match name {
|
||||
Some(name) => make::ext::simple_ident_pat(make::name(&name)).into(),
|
||||
None => make::wildcard_pat().into(),
|
||||
Some(name) => make::ext::simple_ident_pat(make.name(&name)).into(),
|
||||
None => make.wildcard_pat().into(),
|
||||
}
|
||||
});
|
||||
make::tuple_struct_pat(path, pats).into()
|
||||
make.tuple_struct_pat(path, pats).into()
|
||||
}
|
||||
hir::StructKind::Record => {
|
||||
let pats = fields
|
||||
let fields = fields
|
||||
.into_iter()
|
||||
.map(|f| make::name(f.name(db).as_str()))
|
||||
.map(|name| make::ext::simple_ident_pat(name).into());
|
||||
make::record_pat(path, pats).into()
|
||||
.map(|f| make.name_ref(f.name(db).as_str()))
|
||||
.map(|name_ref| make.record_pat_field_shorthand(name_ref));
|
||||
let fields = make.record_pat_field_list(fields, None);
|
||||
make.record_pat_with_fields(path, fields).into()
|
||||
}
|
||||
hir::StructKind::Unit => make::path_pat(path),
|
||||
hir::StructKind::Unit => make.path_pat(path),
|
||||
};
|
||||
Some(pat)
|
||||
}
|
||||
ExtendedVariant::True => Some(ast::Pat::from(make::literal_pat("true"))),
|
||||
ExtendedVariant::False => Some(ast::Pat::from(make::literal_pat("false"))),
|
||||
ExtendedVariant::True => Some(ast::Pat::from(make.literal_pat("true"))),
|
||||
ExtendedVariant::False => Some(ast::Pat::from(make.literal_pat("false"))),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -3,12 +3,19 @@ use std::collections::VecDeque;
|
||||
use ide_db::{
|
||||
assists::GroupLabel,
|
||||
famous_defs::FamousDefs,
|
||||
source_change::SourceChangeBuilder,
|
||||
syntax_helpers::node_ext::{for_each_tail_expr, walk_expr},
|
||||
};
|
||||
use syntax::{
|
||||
ast::{self, make, AstNode, Expr::BinExpr, HasArgList},
|
||||
ted, SyntaxKind, T,
|
||||
ast::{
|
||||
self,
|
||||
prec::{precedence, ExprPrecedence},
|
||||
syntax_factory::SyntaxFactory,
|
||||
AstNode,
|
||||
Expr::BinExpr,
|
||||
HasArgList,
|
||||
},
|
||||
syntax_editor::{Position, SyntaxEditor},
|
||||
SyntaxKind, T,
|
||||
};
|
||||
|
||||
use crate::{utils::invert_boolean_expression, AssistContext, AssistId, AssistKind, Assists};
|
||||
@ -52,53 +59,60 @@ pub(crate) fn apply_demorgan(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti
|
||||
}
|
||||
|
||||
let op = bin_expr.op_kind()?;
|
||||
let inv_token = match op {
|
||||
ast::BinaryOp::LogicOp(ast::LogicOp::And) => SyntaxKind::PIPE2,
|
||||
ast::BinaryOp::LogicOp(ast::LogicOp::Or) => SyntaxKind::AMP2,
|
||||
let (inv_token, prec) = match op {
|
||||
ast::BinaryOp::LogicOp(ast::LogicOp::And) => (SyntaxKind::PIPE2, ExprPrecedence::LOr),
|
||||
ast::BinaryOp::LogicOp(ast::LogicOp::Or) => (SyntaxKind::AMP2, ExprPrecedence::LAnd),
|
||||
_ => return None,
|
||||
};
|
||||
|
||||
let demorganed = bin_expr.clone_subtree().clone_for_update();
|
||||
let make = SyntaxFactory::new();
|
||||
|
||||
let demorganed = bin_expr.clone_subtree();
|
||||
let mut editor = SyntaxEditor::new(demorganed.syntax().clone());
|
||||
editor.replace(demorganed.op_token()?, make.token(inv_token));
|
||||
|
||||
ted::replace(demorganed.op_token()?, ast::make::token(inv_token));
|
||||
let mut exprs = VecDeque::from([
|
||||
(bin_expr.lhs()?, demorganed.lhs()?),
|
||||
(bin_expr.rhs()?, demorganed.rhs()?),
|
||||
(bin_expr.lhs()?, demorganed.lhs()?, prec),
|
||||
(bin_expr.rhs()?, demorganed.rhs()?, prec),
|
||||
]);
|
||||
|
||||
while let Some((expr, dm)) = exprs.pop_front() {
|
||||
while let Some((expr, demorganed, prec)) = exprs.pop_front() {
|
||||
if let BinExpr(bin_expr) = &expr {
|
||||
if let BinExpr(cbin_expr) = &dm {
|
||||
if let BinExpr(cbin_expr) = &demorganed {
|
||||
if op == bin_expr.op_kind()? {
|
||||
ted::replace(cbin_expr.op_token()?, ast::make::token(inv_token));
|
||||
exprs.push_back((bin_expr.lhs()?, cbin_expr.lhs()?));
|
||||
exprs.push_back((bin_expr.rhs()?, cbin_expr.rhs()?));
|
||||
editor.replace(cbin_expr.op_token()?, make.token(inv_token));
|
||||
exprs.push_back((bin_expr.lhs()?, cbin_expr.lhs()?, prec));
|
||||
exprs.push_back((bin_expr.rhs()?, cbin_expr.rhs()?, prec));
|
||||
} else {
|
||||
let mut inv = invert_boolean_expression(expr);
|
||||
if inv.needs_parens_in(dm.syntax().parent()?) {
|
||||
inv = ast::make::expr_paren(inv).clone_for_update();
|
||||
let mut inv = invert_boolean_expression(&make, expr);
|
||||
if precedence(&inv).needs_parentheses_in(prec) {
|
||||
inv = make.expr_paren(inv).into();
|
||||
}
|
||||
ted::replace(dm.syntax(), inv.syntax());
|
||||
editor.replace(demorganed.syntax(), inv.syntax());
|
||||
}
|
||||
} else {
|
||||
return None;
|
||||
}
|
||||
} else {
|
||||
let mut inv = invert_boolean_expression(dm.clone_subtree()).clone_for_update();
|
||||
if inv.needs_parens_in(dm.syntax().parent()?) {
|
||||
inv = ast::make::expr_paren(inv).clone_for_update();
|
||||
let mut inv = invert_boolean_expression(&make, demorganed.clone());
|
||||
if precedence(&inv).needs_parentheses_in(prec) {
|
||||
inv = make.expr_paren(inv).into();
|
||||
}
|
||||
ted::replace(dm.syntax(), inv.syntax());
|
||||
editor.replace(demorganed.syntax(), inv.syntax());
|
||||
}
|
||||
}
|
||||
|
||||
editor.add_mappings(make.finish_with_mappings());
|
||||
let edit = editor.finish();
|
||||
let demorganed = ast::Expr::cast(edit.new_root().clone())?;
|
||||
|
||||
acc.add_group(
|
||||
&GroupLabel("Apply De Morgan's law".to_owned()),
|
||||
AssistId("apply_demorgan", AssistKind::RefactorRewrite),
|
||||
"Apply De Morgan's law",
|
||||
op_range,
|
||||
|edit| {
|
||||
let demorganed = ast::Expr::BinExpr(demorganed);
|
||||
|builder| {
|
||||
let make = SyntaxFactory::new();
|
||||
let paren_expr = bin_expr.syntax().parent().and_then(ast::ParenExpr::cast);
|
||||
let neg_expr = paren_expr
|
||||
.clone()
|
||||
@ -107,24 +121,32 @@ pub(crate) fn apply_demorgan(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti
|
||||
.filter(|prefix_expr| matches!(prefix_expr.op_kind(), Some(ast::UnaryOp::Not)))
|
||||
.map(ast::Expr::PrefixExpr);
|
||||
|
||||
let mut editor;
|
||||
if let Some(paren_expr) = paren_expr {
|
||||
if let Some(neg_expr) = neg_expr {
|
||||
cov_mark::hit!(demorgan_double_negation);
|
||||
let parent = neg_expr.syntax().parent();
|
||||
editor = builder.make_editor(neg_expr.syntax());
|
||||
|
||||
if parent.is_some_and(|parent| demorganed.needs_parens_in(parent)) {
|
||||
if parent.is_some_and(|parent| demorganed.needs_parens_in(&parent)) {
|
||||
cov_mark::hit!(demorgan_keep_parens_for_op_precedence2);
|
||||
edit.replace_ast(neg_expr, make::expr_paren(demorganed));
|
||||
editor.replace(neg_expr.syntax(), make.expr_paren(demorganed).syntax());
|
||||
} else {
|
||||
edit.replace_ast(neg_expr, demorganed);
|
||||
editor.replace(neg_expr.syntax(), demorganed.syntax());
|
||||
};
|
||||
} else {
|
||||
cov_mark::hit!(demorgan_double_parens);
|
||||
edit.replace_ast(paren_expr.into(), add_bang_paren(demorganed));
|
||||
editor = builder.make_editor(paren_expr.syntax());
|
||||
|
||||
editor.replace(paren_expr.syntax(), add_bang_paren(&make, demorganed).syntax());
|
||||
}
|
||||
} else {
|
||||
edit.replace_ast(bin_expr.into(), add_bang_paren(demorganed));
|
||||
editor = builder.make_editor(bin_expr.syntax());
|
||||
editor.replace(bin_expr.syntax(), add_bang_paren(&make, demorganed).syntax());
|
||||
}
|
||||
|
||||
editor.add_mappings(make.finish_with_mappings());
|
||||
builder.add_file_edits(ctx.file_id(), editor);
|
||||
},
|
||||
)
|
||||
}
|
||||
@ -161,7 +183,7 @@ pub(crate) fn apply_demorgan_iterator(acc: &mut Assists, ctx: &AssistContext<'_>
|
||||
let (name, arg_expr) = validate_method_call_expr(ctx, &method_call)?;
|
||||
|
||||
let ast::Expr::ClosureExpr(closure_expr) = arg_expr else { return None };
|
||||
let closure_body = closure_expr.body()?;
|
||||
let closure_body = closure_expr.body()?.clone_for_update();
|
||||
|
||||
let op_range = method_call.syntax().text_range();
|
||||
let label = format!("Apply De Morgan's law to `Iterator::{}`", name.text().as_str());
|
||||
@ -170,18 +192,19 @@ pub(crate) fn apply_demorgan_iterator(acc: &mut Assists, ctx: &AssistContext<'_>
|
||||
AssistId("apply_demorgan_iterator", AssistKind::RefactorRewrite),
|
||||
label,
|
||||
op_range,
|
||||
|edit| {
|
||||
|builder| {
|
||||
let make = SyntaxFactory::new();
|
||||
let mut editor = builder.make_editor(method_call.syntax());
|
||||
// replace the method name
|
||||
let new_name = match name.text().as_str() {
|
||||
"all" => make::name_ref("any"),
|
||||
"any" => make::name_ref("all"),
|
||||
"all" => make.name_ref("any"),
|
||||
"any" => make.name_ref("all"),
|
||||
_ => unreachable!(),
|
||||
}
|
||||
.clone_for_update();
|
||||
edit.replace_ast(name, new_name);
|
||||
};
|
||||
editor.replace(name.syntax(), new_name.syntax());
|
||||
|
||||
// negate all tail expressions in the closure body
|
||||
let tail_cb = &mut |e: &_| tail_cb_impl(edit, e);
|
||||
let tail_cb = &mut |e: &_| tail_cb_impl(&mut editor, &make, e);
|
||||
walk_expr(&closure_body, &mut |expr| {
|
||||
if let ast::Expr::ReturnExpr(ret_expr) = expr {
|
||||
if let Some(ret_expr_arg) = &ret_expr.expr() {
|
||||
@ -198,15 +221,15 @@ pub(crate) fn apply_demorgan_iterator(acc: &mut Assists, ctx: &AssistContext<'_>
|
||||
.and_then(ast::PrefixExpr::cast)
|
||||
.filter(|prefix_expr| matches!(prefix_expr.op_kind(), Some(ast::UnaryOp::Not)))
|
||||
{
|
||||
edit.delete(
|
||||
prefix_expr
|
||||
.op_token()
|
||||
.expect("prefix expression always has an operator")
|
||||
.text_range(),
|
||||
editor.delete(
|
||||
prefix_expr.op_token().expect("prefix expression always has an operator"),
|
||||
);
|
||||
} else {
|
||||
edit.insert(method_call.syntax().text_range().start(), "!");
|
||||
editor.insert(Position::before(method_call.syntax()), make.token(SyntaxKind::BANG));
|
||||
}
|
||||
|
||||
editor.add_mappings(make.finish_with_mappings());
|
||||
builder.add_file_edits(ctx.file_id(), editor);
|
||||
},
|
||||
)
|
||||
}
|
||||
@ -233,26 +256,26 @@ fn validate_method_call_expr(
|
||||
it_type.impls_trait(sema.db, iter_trait, &[]).then_some((name_ref, arg_expr))
|
||||
}
|
||||
|
||||
fn tail_cb_impl(edit: &mut SourceChangeBuilder, e: &ast::Expr) {
|
||||
fn tail_cb_impl(editor: &mut SyntaxEditor, make: &SyntaxFactory, e: &ast::Expr) {
|
||||
match e {
|
||||
ast::Expr::BreakExpr(break_expr) => {
|
||||
if let Some(break_expr_arg) = break_expr.expr() {
|
||||
for_each_tail_expr(&break_expr_arg, &mut |e| tail_cb_impl(edit, e))
|
||||
for_each_tail_expr(&break_expr_arg, &mut |e| tail_cb_impl(editor, make, e))
|
||||
}
|
||||
}
|
||||
ast::Expr::ReturnExpr(_) => {
|
||||
// all return expressions have already been handled by the walk loop
|
||||
}
|
||||
e => {
|
||||
let inverted_body = invert_boolean_expression(e.clone());
|
||||
edit.replace(e.syntax().text_range(), inverted_body.syntax().text());
|
||||
let inverted_body = invert_boolean_expression(make, e.clone());
|
||||
editor.replace(e.syntax(), inverted_body.syntax());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Add bang and parentheses to the expression.
|
||||
fn add_bang_paren(expr: ast::Expr) -> ast::Expr {
|
||||
make::expr_prefix(T![!], make::expr_paren(expr)).into()
|
||||
fn add_bang_paren(make: &SyntaxFactory, expr: ast::Expr) -> ast::Expr {
|
||||
make.expr_prefix(T![!], make.expr_paren(expr).into()).into()
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
@ -8,8 +8,9 @@ use ide_db::{
|
||||
};
|
||||
use itertools::Itertools;
|
||||
use syntax::{
|
||||
ast::{self, edit::AstNodeEdit, make, HasArgList},
|
||||
ted, AstNode, SyntaxNode,
|
||||
ast::{self, edit::AstNodeEdit, syntax_factory::SyntaxFactory, HasArgList},
|
||||
syntax_editor::SyntaxEditor,
|
||||
AstNode, SyntaxNode,
|
||||
};
|
||||
|
||||
use crate::{
|
||||
@ -76,9 +77,9 @@ pub(crate) fn convert_if_to_bool_then(acc: &mut Assists, ctx: &AssistContext<'_>
|
||||
"Convert `if` expression to `bool::then` call",
|
||||
target,
|
||||
|builder| {
|
||||
let closure_body = closure_body.clone_for_update();
|
||||
let closure_body = closure_body.clone_subtree();
|
||||
let mut editor = SyntaxEditor::new(closure_body.syntax().clone());
|
||||
// Rewrite all `Some(e)` in tail position to `e`
|
||||
let mut replacements = Vec::new();
|
||||
for_each_tail_expr(&closure_body, &mut |e| {
|
||||
let e = match e {
|
||||
ast::Expr::BreakExpr(e) => e.expr(),
|
||||
@ -88,12 +89,16 @@ pub(crate) fn convert_if_to_bool_then(acc: &mut Assists, ctx: &AssistContext<'_>
|
||||
if let Some(ast::Expr::CallExpr(call)) = e {
|
||||
if let Some(arg_list) = call.arg_list() {
|
||||
if let Some(arg) = arg_list.args().next() {
|
||||
replacements.push((call.syntax().clone(), arg.syntax().clone()));
|
||||
editor.replace(call.syntax(), arg.syntax());
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
replacements.into_iter().for_each(|(old, new)| ted::replace(old, new));
|
||||
let edit = editor.finish();
|
||||
let closure_body = ast::Expr::cast(edit.new_root().clone()).unwrap();
|
||||
|
||||
let mut editor = builder.make_editor(expr.syntax());
|
||||
let make = SyntaxFactory::new();
|
||||
let closure_body = match closure_body {
|
||||
ast::Expr::BlockExpr(block) => unwrap_trivial_block(block),
|
||||
e => e,
|
||||
@ -119,11 +124,18 @@ pub(crate) fn convert_if_to_bool_then(acc: &mut Assists, ctx: &AssistContext<'_>
|
||||
| ast::Expr::WhileExpr(_)
|
||||
| ast::Expr::YieldExpr(_)
|
||||
);
|
||||
let cond = if invert_cond { invert_boolean_expression(cond) } else { cond };
|
||||
let cond = if parenthesize { make::expr_paren(cond) } else { cond };
|
||||
let arg_list = make::arg_list(Some(make::expr_closure(None, closure_body)));
|
||||
let mcall = make::expr_method_call(cond, make::name_ref("then"), arg_list);
|
||||
builder.replace(target, mcall.to_string());
|
||||
let cond = if invert_cond {
|
||||
invert_boolean_expression(&make, cond)
|
||||
} else {
|
||||
cond.clone_for_update()
|
||||
};
|
||||
let cond = if parenthesize { make.expr_paren(cond).into() } else { cond };
|
||||
let arg_list = make.arg_list(Some(make.expr_closure(None, closure_body).into()));
|
||||
let mcall = make.expr_method_call(cond, make.name_ref("then"), arg_list);
|
||||
editor.replace(expr.syntax(), mcall.syntax());
|
||||
|
||||
editor.add_mappings(make.finish_with_mappings());
|
||||
builder.add_file_edits(ctx.file_id(), editor);
|
||||
},
|
||||
)
|
||||
}
|
||||
@ -173,16 +185,17 @@ pub(crate) fn convert_bool_then_to_if(acc: &mut Assists, ctx: &AssistContext<'_>
|
||||
"Convert `bool::then` call to `if`",
|
||||
target,
|
||||
|builder| {
|
||||
let closure_body = match closure_body {
|
||||
let mapless_make = SyntaxFactory::without_mappings();
|
||||
let closure_body = match closure_body.reset_indent() {
|
||||
ast::Expr::BlockExpr(block) => block,
|
||||
e => make::block_expr(None, Some(e)),
|
||||
e => mapless_make.block_expr(None, Some(e)),
|
||||
};
|
||||
|
||||
let closure_body = closure_body.clone_for_update();
|
||||
let closure_body = closure_body.clone_subtree();
|
||||
let mut editor = SyntaxEditor::new(closure_body.syntax().clone());
|
||||
// Wrap all tails in `Some(...)`
|
||||
let none_path = make::expr_path(make::ext::ident_path("None"));
|
||||
let some_path = make::expr_path(make::ext::ident_path("Some"));
|
||||
let mut replacements = Vec::new();
|
||||
let none_path = mapless_make.expr_path(mapless_make.ident_path("None"));
|
||||
let some_path = mapless_make.expr_path(mapless_make.ident_path("Some"));
|
||||
for_each_tail_expr(&ast::Expr::BlockExpr(closure_body.clone()), &mut |e| {
|
||||
let e = match e {
|
||||
ast::Expr::BreakExpr(e) => e.expr(),
|
||||
@ -190,28 +203,37 @@ pub(crate) fn convert_bool_then_to_if(acc: &mut Assists, ctx: &AssistContext<'_>
|
||||
_ => Some(e.clone()),
|
||||
};
|
||||
if let Some(expr) = e {
|
||||
replacements.push((
|
||||
editor.replace(
|
||||
expr.syntax().clone(),
|
||||
make::expr_call(some_path.clone(), make::arg_list(Some(expr)))
|
||||
mapless_make
|
||||
.expr_call(some_path.clone(), mapless_make.arg_list(Some(expr)))
|
||||
.syntax()
|
||||
.clone_for_update(),
|
||||
));
|
||||
.clone(),
|
||||
);
|
||||
}
|
||||
});
|
||||
replacements.into_iter().for_each(|(old, new)| ted::replace(old, new));
|
||||
let edit = editor.finish();
|
||||
let closure_body = ast::BlockExpr::cast(edit.new_root().clone()).unwrap();
|
||||
|
||||
let mut editor = builder.make_editor(mcall.syntax());
|
||||
let make = SyntaxFactory::new();
|
||||
|
||||
let cond = match &receiver {
|
||||
ast::Expr::ParenExpr(expr) => expr.expr().unwrap_or(receiver),
|
||||
_ => receiver,
|
||||
};
|
||||
let if_expr = make::expr_if(
|
||||
cond,
|
||||
closure_body.reset_indent(),
|
||||
Some(ast::ElseBranch::Block(make::block_expr(None, Some(none_path)))),
|
||||
)
|
||||
.indent(mcall.indent_level());
|
||||
let if_expr = make
|
||||
.expr_if(
|
||||
cond,
|
||||
closure_body,
|
||||
Some(ast::ElseBranch::Block(make.block_expr(None, Some(none_path)))),
|
||||
)
|
||||
.indent(mcall.indent_level())
|
||||
.clone_for_update();
|
||||
editor.replace(mcall.syntax().clone(), if_expr.syntax().clone());
|
||||
|
||||
builder.replace(target, if_expr.to_string());
|
||||
editor.add_mappings(make.finish_with_mappings());
|
||||
builder.add_file_edits(ctx.file_id(), editor);
|
||||
},
|
||||
)
|
||||
}
|
||||
|
@ -136,7 +136,7 @@ fn comment_to_doc(acc: &mut Assists, comment: ast::Comment, style: CommentPlacem
|
||||
|
||||
/// Not all comments are valid candidates for conversion into doc comments. For example, the
|
||||
/// comments in the code:
|
||||
/// ```rust
|
||||
/// ```ignore
|
||||
/// // Brilliant module right here
|
||||
///
|
||||
/// // Really good right
|
||||
@ -148,7 +148,7 @@ fn comment_to_doc(acc: &mut Assists, comment: ast::Comment, style: CommentPlacem
|
||||
/// mod nice_module {}
|
||||
/// ```
|
||||
/// can be converted to doc comments. However, the comments in this example:
|
||||
/// ```rust
|
||||
/// ```ignore
|
||||
/// fn foo_bar(foo: Foo /* not bar yet */) -> Bar {
|
||||
/// foo.into_bar()
|
||||
/// // Nicely done
|
||||
@ -162,7 +162,7 @@ fn comment_to_doc(acc: &mut Assists, comment: ast::Comment, style: CommentPlacem
|
||||
/// are not allowed to become doc comments. Moreover, some comments _are_ allowed, but aren't common
|
||||
/// style in Rust. For example, the following comments are allowed to be doc comments, but it is not
|
||||
/// common style for them to be:
|
||||
/// ```rust
|
||||
/// ```ignore
|
||||
/// fn foo_bar(foo: Foo) -> Bar {
|
||||
/// // this could be an inner comment with //!
|
||||
/// foo.into_bar()
|
||||
|
@ -17,7 +17,7 @@ use syntax::{
|
||||
|
||||
use crate::{
|
||||
assist_context::{AssistContext, Assists},
|
||||
utils::invert_boolean_expression,
|
||||
utils::invert_boolean_expression_legacy,
|
||||
AssistId, AssistKind,
|
||||
};
|
||||
|
||||
@ -139,7 +139,7 @@ fn if_expr_to_guarded_return(
|
||||
let new_expr = {
|
||||
let then_branch =
|
||||
make::block_expr(once(make::expr_stmt(early_expression).into()), None);
|
||||
let cond = invert_boolean_expression(cond_expr);
|
||||
let cond = invert_boolean_expression_legacy(cond_expr);
|
||||
make::expr_if(cond, then_branch, None).indent(if_indent_level)
|
||||
};
|
||||
new_expr.syntax().clone_for_update()
|
||||
|
@ -13,7 +13,7 @@ use syntax::{
|
||||
|
||||
use crate::{
|
||||
assist_context::{AssistContext, Assists},
|
||||
utils::invert_boolean_expression,
|
||||
utils::invert_boolean_expression_legacy,
|
||||
AssistId, AssistKind,
|
||||
};
|
||||
|
||||
@ -63,7 +63,7 @@ pub(crate) fn convert_while_to_loop(acc: &mut Assists, ctx: &AssistContext<'_>)
|
||||
let stmts = iter::once(make::expr_stmt(if_expr.into()).into());
|
||||
make::block_expr(stmts, None)
|
||||
} else {
|
||||
let if_cond = invert_boolean_expression(while_cond);
|
||||
let if_cond = invert_boolean_expression_legacy(while_cond);
|
||||
let if_expr = make::expr_if(if_cond, break_block, None).syntax().clone().into();
|
||||
let elements = while_body.stmt_list().map_or_else(
|
||||
|| Either::Left(iter::empty()),
|
||||
|
@ -272,7 +272,7 @@ fn make_function_name(semantics_scope: &hir::SemanticsScope<'_>) -> ast::NameRef
|
||||
/// * We want whole node, like `loop {}`, `2 + 2`, `{ let n = 1; }` exprs.
|
||||
/// Then we can use `ast::Expr`
|
||||
/// * We want a few statements for a block. E.g.
|
||||
/// ```rust,no_run
|
||||
/// ```ignore
|
||||
/// fn foo() -> i32 {
|
||||
/// let m = 1;
|
||||
/// $0
|
||||
@ -386,7 +386,7 @@ struct ContainerInfo {
|
||||
/// Control flow that is exported from extracted function
|
||||
///
|
||||
/// E.g.:
|
||||
/// ```rust,no_run
|
||||
/// ```ignore
|
||||
/// loop {
|
||||
/// $0
|
||||
/// if 42 == 42 {
|
||||
|
@ -1,8 +1,8 @@
|
||||
use syntax::{
|
||||
algo::non_trivia_sibling,
|
||||
ast::{self, syntax_factory::SyntaxFactory},
|
||||
syntax_editor::{Element, SyntaxMapping},
|
||||
AstNode, Direction, NodeOrToken, SyntaxElement, SyntaxKind, SyntaxToken, T,
|
||||
syntax_editor::SyntaxMapping,
|
||||
AstNode, Direction, NodeOrToken, SyntaxKind, SyntaxToken, T,
|
||||
};
|
||||
|
||||
use crate::{AssistContext, AssistId, AssistKind, Assists};
|
||||
@ -39,37 +39,24 @@ pub(crate) fn flip_comma(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<(
|
||||
return None;
|
||||
}
|
||||
|
||||
let prev = match prev {
|
||||
SyntaxElement::Node(node) => node.syntax_element(),
|
||||
_ => prev,
|
||||
};
|
||||
let next = match next {
|
||||
SyntaxElement::Node(node) => node.syntax_element(),
|
||||
_ => next,
|
||||
};
|
||||
let target = comma.text_range();
|
||||
acc.add(AssistId("flip_comma", AssistKind::RefactorRewrite), "Flip comma", target, |builder| {
|
||||
let parent = comma.parent().unwrap();
|
||||
let mut editor = builder.make_editor(&parent);
|
||||
|
||||
acc.add(
|
||||
AssistId("flip_comma", AssistKind::RefactorRewrite),
|
||||
"Flip comma",
|
||||
comma.text_range(),
|
||||
|builder| {
|
||||
let parent = comma.parent().unwrap();
|
||||
let mut editor = builder.make_editor(&parent);
|
||||
if let Some(parent) = ast::TokenTree::cast(parent) {
|
||||
// An attribute. It often contains a path followed by a
|
||||
// token tree (e.g. `align(2)`), so we have to be smarter.
|
||||
let (new_tree, mapping) = flip_tree(parent.clone(), comma);
|
||||
editor.replace(parent.syntax(), new_tree.syntax());
|
||||
editor.add_mappings(mapping);
|
||||
} else {
|
||||
editor.replace(prev.clone(), next.clone());
|
||||
editor.replace(next.clone(), prev.clone());
|
||||
}
|
||||
|
||||
if let Some(parent) = ast::TokenTree::cast(parent) {
|
||||
// An attribute. It often contains a path followed by a
|
||||
// token tree (e.g. `align(2)`), so we have to be smarter.
|
||||
let (new_tree, mapping) = flip_tree(parent.clone(), comma);
|
||||
editor.replace(parent.syntax(), new_tree.syntax());
|
||||
editor.add_mappings(mapping);
|
||||
} else {
|
||||
editor.replace(prev.clone(), next.clone());
|
||||
editor.replace(next.clone(), prev.clone());
|
||||
}
|
||||
|
||||
builder.add_file_edits(ctx.file_id(), editor);
|
||||
},
|
||||
)
|
||||
builder.add_file_edits(ctx.file_id(), editor);
|
||||
})
|
||||
}
|
||||
|
||||
fn flip_tree(tree: ast::TokenTree, comma: SyntaxToken) -> (ast::TokenTree, SyntaxMapping) {
|
||||
|
@ -0,0 +1,80 @@
|
||||
use syntax::{
|
||||
algo::non_trivia_sibling,
|
||||
ast::{self, AstNode},
|
||||
Direction, T,
|
||||
};
|
||||
|
||||
use crate::{AssistContext, AssistId, AssistKind, Assists};
|
||||
|
||||
// Assist: flip_or_pattern
|
||||
//
|
||||
// Flips two patterns in an or-pattern.
|
||||
//
|
||||
// ```
|
||||
// fn foo() {
|
||||
// let (a |$0 b) = 1;
|
||||
// }
|
||||
// ```
|
||||
// ->
|
||||
// ```
|
||||
// fn foo() {
|
||||
// let (b | a) = 1;
|
||||
// }
|
||||
// ```
|
||||
pub(crate) fn flip_or_pattern(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
|
||||
// Only flip on the `|` token
|
||||
let pipe = ctx.find_token_syntax_at_offset(T![|])?;
|
||||
|
||||
let parent = ast::OrPat::cast(pipe.parent()?)?;
|
||||
|
||||
let before = non_trivia_sibling(pipe.clone().into(), Direction::Prev)?.into_node()?;
|
||||
let after = non_trivia_sibling(pipe.clone().into(), Direction::Next)?.into_node()?;
|
||||
|
||||
let target = pipe.text_range();
|
||||
acc.add(
|
||||
AssistId("flip_or_pattern", AssistKind::RefactorRewrite),
|
||||
"Flip patterns",
|
||||
target,
|
||||
|builder| {
|
||||
let mut editor = builder.make_editor(parent.syntax());
|
||||
editor.replace(before.clone(), after.clone());
|
||||
editor.replace(after, before);
|
||||
builder.add_file_edits(ctx.file_id(), editor);
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
use crate::tests::{check_assist, check_assist_not_applicable, check_assist_target};
|
||||
|
||||
#[test]
|
||||
fn flip_or_pattern_assist_available() {
|
||||
check_assist_target(flip_or_pattern, "fn main(a |$0 b: ()) {}", "|")
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn flip_or_pattern_not_applicable_for_leading_pipe() {
|
||||
check_assist_not_applicable(flip_or_pattern, "fn main(|$0 b: ()) {}")
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn flip_or_pattern_works() {
|
||||
check_assist(
|
||||
flip_or_pattern,
|
||||
"fn foo() { let (a | b |$0 c | d) = 1; }",
|
||||
"fn foo() { let (a | c | b | d) = 1; }",
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn flip_or_pattern_works_match_guard() {
|
||||
check_assist(
|
||||
flip_or_pattern,
|
||||
"fn foo() { match() { a |$0 b if true => () }}",
|
||||
"fn foo() { match() { b | a if true => () }}",
|
||||
)
|
||||
}
|
||||
}
|
@ -18,17 +18,14 @@ use crate::{AssistContext, AssistId, AssistKind, Assists};
|
||||
// fn foo<T: Copy + Clone>() { }
|
||||
// ```
|
||||
pub(crate) fn flip_trait_bound(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
|
||||
// We want to replicate the behavior of `flip_binexpr` by only suggesting
|
||||
// the assist when the cursor is on a `+`
|
||||
// Only flip on the `+` token
|
||||
let plus = ctx.find_token_syntax_at_offset(T![+])?;
|
||||
|
||||
// Make sure we're in a `TypeBoundList`
|
||||
let parent = ast::TypeBoundList::cast(plus.parent()?)?;
|
||||
|
||||
let (before, after) = (
|
||||
non_trivia_sibling(plus.clone().into(), Direction::Prev)?.into_node()?,
|
||||
non_trivia_sibling(plus.clone().into(), Direction::Next)?.into_node()?,
|
||||
);
|
||||
let before = non_trivia_sibling(plus.clone().into(), Direction::Prev)?.into_node()?;
|
||||
let after = non_trivia_sibling(plus.clone().into(), Direction::Next)?.into_node()?;
|
||||
|
||||
let target = plus.text_range();
|
||||
acc.add(
|
||||
|
@ -1037,7 +1037,7 @@ fn filter_bounds_in_scope(
|
||||
|
||||
/// Makes duplicate argument names unique by appending incrementing numbers.
|
||||
///
|
||||
/// ```
|
||||
/// ```ignore
|
||||
/// let mut names: Vec<String> =
|
||||
/// vec!["foo".into(), "foo".into(), "bar".into(), "baz".into(), "bar".into()];
|
||||
/// deduplicate_arg_names(&mut names);
|
||||
|
@ -78,7 +78,7 @@ pub(crate) fn generate_impl(acc: &mut Assists, ctx: &AssistContext<'_>) -> Optio
|
||||
// data: T,
|
||||
// }
|
||||
//
|
||||
// impl<T: Clone> ${0:_} for Ctx<T> {}
|
||||
// impl<T: Clone> ${1:_} for Ctx<T> {$0}
|
||||
// ```
|
||||
pub(crate) fn generate_trait_impl(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
|
||||
let nominal = ctx.find_node_at_offset::<ast::Adt>()?;
|
||||
@ -102,6 +102,10 @@ pub(crate) fn generate_trait_impl(acc: &mut Assists, ctx: &AssistContext<'_>) ->
|
||||
if let Some(trait_) = impl_.trait_() {
|
||||
edit.add_placeholder_snippet(cap, trait_);
|
||||
}
|
||||
|
||||
if let Some(l_curly) = impl_.assoc_item_list().and_then(|it| it.l_curly_token()) {
|
||||
edit.add_tabstop_after_token(cap, l_curly);
|
||||
}
|
||||
}
|
||||
|
||||
insert_impl(impl_, &edit.make_mut(nominal));
|
||||
@ -278,7 +282,7 @@ mod tests {
|
||||
r#"
|
||||
struct Foo {}
|
||||
|
||||
impl ${0:_} for Foo {}
|
||||
impl ${1:_} for Foo {$0}
|
||||
"#,
|
||||
);
|
||||
}
|
||||
@ -293,7 +297,7 @@ mod tests {
|
||||
r#"
|
||||
struct Foo<T: Clone> {}
|
||||
|
||||
impl<T: Clone> ${0:_} for Foo<T> {}
|
||||
impl<T: Clone> ${1:_} for Foo<T> {$0}
|
||||
"#,
|
||||
);
|
||||
}
|
||||
@ -308,7 +312,7 @@ mod tests {
|
||||
r#"
|
||||
struct Foo<'a, T: Foo<'a>> {}
|
||||
|
||||
impl<'a, T: Foo<'a>> ${0:_} for Foo<'a, T> {}
|
||||
impl<'a, T: Foo<'a>> ${1:_} for Foo<'a, T> {$0}
|
||||
"#,
|
||||
);
|
||||
}
|
||||
@ -326,7 +330,7 @@ mod tests {
|
||||
struct Foo<'a, T: Foo<'a>> {}
|
||||
|
||||
#[cfg(feature = "foo")]
|
||||
impl<'a, T: Foo<'a>> ${0:_} for Foo<'a, T> {}
|
||||
impl<'a, T: Foo<'a>> ${1:_} for Foo<'a, T> {$0}
|
||||
"#,
|
||||
);
|
||||
}
|
||||
@ -341,7 +345,7 @@ mod tests {
|
||||
r#"
|
||||
struct Defaulted<T = i32> {}
|
||||
|
||||
impl<T> ${0:_} for Defaulted<T> {}
|
||||
impl<T> ${1:_} for Defaulted<T> {$0}
|
||||
"#,
|
||||
);
|
||||
}
|
||||
@ -356,7 +360,7 @@ mod tests {
|
||||
r#"
|
||||
struct Defaulted<'a, 'b: 'a, T: Debug + Clone + 'a + 'b = String, const S: usize> {}
|
||||
|
||||
impl<'a, 'b: 'a, T: Debug + Clone + 'a + 'b, const S: usize> ${0:_} for Defaulted<'a, 'b, T, S> {}
|
||||
impl<'a, 'b: 'a, T: Debug + Clone + 'a + 'b, const S: usize> ${1:_} for Defaulted<'a, 'b, T, S> {$0}
|
||||
"#,
|
||||
);
|
||||
}
|
||||
@ -371,7 +375,7 @@ mod tests {
|
||||
r#"
|
||||
struct Defaulted<const N: i32 = 0> {}
|
||||
|
||||
impl<const N: i32> ${0:_} for Defaulted<N> {}
|
||||
impl<const N: i32> ${1:_} for Defaulted<N> {$0}
|
||||
"#,
|
||||
);
|
||||
}
|
||||
@ -398,10 +402,10 @@ mod tests {
|
||||
inner: T,
|
||||
}
|
||||
|
||||
impl<T> ${0:_} for Struct<T>
|
||||
impl<T> ${1:_} for Struct<T>
|
||||
where
|
||||
T: Trait,
|
||||
{
|
||||
{$0
|
||||
}
|
||||
"#,
|
||||
);
|
||||
@ -476,7 +480,7 @@ mod tests {
|
||||
mod foo {
|
||||
struct Bar {}
|
||||
|
||||
impl ${0:_} for Bar {}
|
||||
impl ${1:_} for Bar {$0}
|
||||
}
|
||||
"#,
|
||||
);
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user