Rollup merge of #100018 - nnethercote:clean-up-LitKind, r=petrochenkov

Clean up `LitKind`

r? ``@petrochenkov``
This commit is contained in:
Matthias Krüger 2022-08-17 12:32:49 +02:00 committed by GitHub
commit d5dca26a94
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
19 changed files with 73 additions and 66 deletions

View File

@ -1689,7 +1689,7 @@ pub enum StrStyle {
#[derive(Clone, Encodable, Decodable, Debug, HashStable_Generic)]
pub struct Lit {
/// The original literal token as written in source code.
pub token: token::Lit,
pub token_lit: token::Lit,
/// The "semantic" representation of the literal lowered from the original tokens.
/// Strings are unescaped, hexadecimal forms are eliminated, etc.
/// FIXME: Remove this and only create the semantic representation during lowering to HIR.
@ -1717,7 +1717,7 @@ impl StrLit {
StrStyle::Raw(n) => token::StrRaw(n),
};
Lit {
token: token::Lit::new(token_kind, self.symbol, self.suffix),
token_lit: token::Lit::new(token_kind, self.symbol, self.suffix),
span: self.span,
kind: LitKind::Str(self.symbol_unescaped, self.style),
}

View File

@ -184,13 +184,7 @@ impl MetaItem {
}
pub fn value_str(&self) -> Option<Symbol> {
match self.kind {
MetaItemKind::NameValue(ref v) => match v.kind {
LitKind::Str(ref s, _) => Some(*s),
_ => None,
},
_ => None,
}
self.kind.value_str()
}
pub fn meta_item_list(&self) -> Option<&[NestedMetaItem]> {

View File

@ -23,7 +23,7 @@ pub enum LitError {
impl LitKind {
/// Converts literal token into a semantic literal.
pub fn from_lit_token(lit: token::Lit) -> Result<LitKind, LitError> {
pub fn from_token_lit(lit: token::Lit) -> Result<LitKind, LitError> {
let token::Lit { kind, symbol, suffix } = lit;
if suffix.is_some() && !kind.may_have_suffix() {
return Err(LitError::InvalidSuffix);
@ -153,7 +153,7 @@ impl LitKind {
/// Attempts to recover a token from semantic literal.
/// This function is used when the original token doesn't exist (e.g. the literal is created
/// by an AST-based macro) or unavailable (e.g. from HIR pretty-printing).
pub fn to_lit_token(&self) -> token::Lit {
pub fn to_token_lit(&self) -> token::Lit {
let (kind, symbol, suffix) = match *self {
LitKind::Str(symbol, ast::StrStyle::Cooked) => {
// Don't re-intern unless the escaped string is different.
@ -208,8 +208,8 @@ impl LitKind {
impl Lit {
/// Converts literal token into an AST literal.
pub fn from_lit_token(token: token::Lit, span: Span) -> Result<Lit, LitError> {
Ok(Lit { token, kind: LitKind::from_lit_token(token)?, span })
pub fn from_token_lit(token_lit: token::Lit, span: Span) -> Result<Lit, LitError> {
Ok(Lit { token_lit, kind: LitKind::from_token_lit(token_lit)?, span })
}
/// Converts arbitrary token into an AST literal.
@ -232,21 +232,21 @@ impl Lit {
_ => return Err(LitError::NotLiteral),
};
Lit::from_lit_token(lit, token.span)
Lit::from_token_lit(lit, token.span)
}
/// Attempts to recover an AST literal from semantic literal.
/// This function is used when the original token doesn't exist (e.g. the literal is created
/// by an AST-based macro) or unavailable (e.g. from HIR pretty-printing).
pub fn from_lit_kind(kind: LitKind, span: Span) -> Lit {
Lit { token: kind.to_lit_token(), kind, span }
Lit { token_lit: kind.to_token_lit(), kind, span }
}
/// Losslessly convert an AST literal into a token.
pub fn to_token(&self) -> Token {
let kind = match self.token.kind {
token::Bool => token::Ident(self.token.symbol, false),
_ => token::Literal(self.token),
let kind = match self.token_lit.kind {
token::Bool => token::Ident(self.token_lit.symbol, false),
_ => token::Literal(self.token_lit),
};
Token::new(kind, self.span)
}

View File

@ -927,7 +927,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
lit.clone()
} else {
Lit {
token: token::Lit::new(token::LitKind::Err, kw::Empty, None),
token_lit: token::Lit::new(token::LitKind::Err, kw::Empty, None),
kind: LitKind::Err(kw::Empty),
span: DUMMY_SP,
}

View File

@ -372,7 +372,7 @@ pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::Dere
fn print_literal(&mut self, lit: &ast::Lit) {
self.maybe_print_comment(lit.span.lo());
self.word(lit.token.to_string())
self.word(lit.token_lit.to_string())
}
fn print_string(&mut self, st: &str, style: ast::StrStyle) {

View File

@ -1,6 +1,5 @@
use rustc_ast as ast;
use rustc_ast::{ptr::P, tokenstream::TokenStream};
use rustc_data_structures::sync::Lrc;
use rustc_errors::Applicability;
use rustc_expand::base::{self, DummyResult};
@ -185,5 +184,5 @@ pub fn expand_concat_bytes(
return base::MacEager::expr(DummyResult::raw_expr(sp, true));
}
let sp = cx.with_def_site_ctxt(sp);
base::MacEager::expr(cx.expr_lit(sp, ast::LitKind::ByteStr(Lrc::from(accumulator))))
base::MacEager::expr(cx.expr_byte_str(sp, accumulator))
}

View File

@ -126,9 +126,9 @@ fn report_bad_target(sess: &Session, item: &Annotatable, span: Span) -> bool {
}
fn report_unexpected_literal(sess: &Session, lit: &ast::Lit) {
let help_msg = match lit.token.kind {
token::Str if rustc_lexer::is_ident(lit.token.symbol.as_str()) => {
format!("try using `#[derive({})]`", lit.token.symbol)
let help_msg = match lit.token_lit.kind {
token::Str if rustc_lexer::is_ident(lit.token_lit.symbol.as_str()) => {
format!("try using `#[derive({})]`", lit.token_lit.symbol)
}
_ => "for example, write `#[derive(Debug)]` for `Debug`".to_string(),
};

View File

@ -52,7 +52,7 @@ fn show_substructure(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_>
// We want to make sure we have the ctxt set so that we can use unstable methods
let span = cx.with_def_site_ctxt(span);
let name = cx.expr_lit(span, ast::LitKind::Str(ident.name, ast::StrStyle::Cooked));
let name = cx.expr_str(span, ident.name);
let fmt = substr.nonselflike_args[0].clone();
// Struct and tuples are similar enough that we use the same code for both,
@ -89,10 +89,7 @@ fn show_substructure(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_>
for i in 0..fields.len() {
let field = &fields[i];
if is_struct {
let name = cx.expr_lit(
field.span,
ast::LitKind::Str(field.name.unwrap().name, ast::StrStyle::Cooked),
);
let name = cx.expr_str(field.span, field.name.unwrap().name);
args.push(name);
}
// Use an extra indirection to make sure this works for unsized types.
@ -108,10 +105,7 @@ fn show_substructure(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_>
for field in fields {
if is_struct {
name_exprs.push(cx.expr_lit(
field.span,
ast::LitKind::Str(field.name.unwrap().name, ast::StrStyle::Cooked),
));
name_exprs.push(cx.expr_str(field.span, field.name.unwrap().name));
}
// Use an extra indirection to make sure this works for unsized types.

View File

@ -923,7 +923,7 @@ impl<'a, 'b> Context<'a, 'b> {
}
// Build the format
let fill = self.ecx.expr_lit(sp, ast::LitKind::Char(fill));
let fill = self.ecx.expr_char(sp, fill);
let align = |name| {
let mut p = Context::rtpath(self.ecx, sym::Alignment);
p.push(Ident::new(name, sp));

View File

@ -216,7 +216,7 @@ pub fn expand_include_bytes(
}
};
match cx.source_map().load_binary_file(&file) {
Ok(bytes) => base::MacEager::expr(cx.expr_lit(sp, ast::LitKind::ByteStr(bytes.into()))),
Ok(bytes) => base::MacEager::expr(cx.expr_byte_str(sp, bytes)),
Err(e) => {
cx.span_err(sp, &format!("couldn't read {}: {}", file.display(), e));
DummyResult::any(sp)

View File

@ -3,6 +3,7 @@ use crate::base::ExtCtxt;
use rustc_ast::attr;
use rustc_ast::ptr::P;
use rustc_ast::{self as ast, AttrVec, BlockCheckMode, Expr, LocalKind, PatKind, UnOp};
use rustc_data_structures::sync::Lrc;
use rustc_span::source_map::Spanned;
use rustc_span::symbol::{kw, sym, Ident, Symbol};
@ -330,23 +331,38 @@ impl<'a> ExtCtxt<'a> {
self.expr_struct(span, self.path_ident(span, id), fields)
}
pub fn expr_lit(&self, span: Span, lit_kind: ast::LitKind) -> P<ast::Expr> {
fn expr_lit(&self, span: Span, lit_kind: ast::LitKind) -> P<ast::Expr> {
let lit = ast::Lit::from_lit_kind(lit_kind, span);
self.expr(span, ast::ExprKind::Lit(lit))
}
pub fn expr_usize(&self, span: Span, i: usize) -> P<ast::Expr> {
self.expr_lit(
span,
ast::LitKind::Int(i as u128, ast::LitIntType::Unsigned(ast::UintTy::Usize)),
)
}
pub fn expr_u32(&self, sp: Span, u: u32) -> P<ast::Expr> {
self.expr_lit(sp, ast::LitKind::Int(u as u128, ast::LitIntType::Unsigned(ast::UintTy::U32)))
}
pub fn expr_bool(&self, sp: Span, value: bool) -> P<ast::Expr> {
self.expr_lit(sp, ast::LitKind::Bool(value))
}
pub fn expr_str(&self, sp: Span, s: Symbol) -> P<ast::Expr> {
self.expr_lit(sp, ast::LitKind::Str(s, ast::StrStyle::Cooked))
}
pub fn expr_char(&self, sp: Span, ch: char) -> P<ast::Expr> {
self.expr_lit(sp, ast::LitKind::Char(ch))
}
pub fn expr_byte_str(&self, sp: Span, bytes: Vec<u8>) -> P<ast::Expr> {
self.expr_lit(sp, ast::LitKind::ByteStr(Lrc::from(bytes)))
}
/// `[expr1, expr2, ...]`
pub fn expr_array(&self, sp: Span, exprs: Vec<P<ast::Expr>>) -> P<ast::Expr> {
self.expr(sp, ast::ExprKind::Array(exprs))
@ -357,10 +373,6 @@ impl<'a> ExtCtxt<'a> {
self.expr_addr_of(sp, self.expr_array(sp, exprs))
}
pub fn expr_str(&self, sp: Span, s: Symbol) -> P<ast::Expr> {
self.expr_lit(sp, ast::LitKind::Str(s, ast::StrStyle::Cooked))
}
pub fn expr_cast(&self, sp: Span, expr: P<ast::Expr>, ty: P<ast::Ty>) -> P<ast::Expr> {
self.expr(sp, ast::ExprKind::Cast(expr, ty))
}

View File

@ -112,7 +112,7 @@ fn parse_depth<'sess>(
"meta-variable expression depth must be a literal"
));
};
if let Ok(lit_kind) = LitKind::from_lit_token(*lit)
if let Ok(lit_kind) = LitKind::from_token_lit(*lit)
&& let LitKind::Int(n_u128, LitIntType::Unsuffixed) = lit_kind
&& let Ok(n_usize) = usize::try_from(n_u128)
{

View File

@ -486,20 +486,26 @@ impl server::TokenStream for Rustc<'_, '_> {
// We don't use `TokenStream::from_ast` as the tokenstream currently cannot
// be recovered in the general case.
match &expr.kind {
ast::ExprKind::Lit(l) if l.token.kind == token::Bool => Ok(
tokenstream::TokenStream::token_alone(token::Ident(l.token.symbol, false), l.span),
),
ast::ExprKind::Lit(l) if l.token_lit.kind == token::Bool => {
Ok(tokenstream::TokenStream::token_alone(
token::Ident(l.token_lit.symbol, false),
l.span,
))
}
ast::ExprKind::Lit(l) => {
Ok(tokenstream::TokenStream::token_alone(token::Literal(l.token), l.span))
Ok(tokenstream::TokenStream::token_alone(token::Literal(l.token_lit), l.span))
}
ast::ExprKind::Unary(ast::UnOp::Neg, e) => match &e.kind {
ast::ExprKind::Lit(l) => match l.token {
ast::ExprKind::Lit(l) => match l.token_lit {
token::Lit { kind: token::Integer | token::Float, .. } => {
Ok(Self::TokenStream::from_iter([
// FIXME: The span of the `-` token is lost when
// parsing, so we cannot faithfully recover it here.
tokenstream::TokenTree::token_alone(token::BinOp(token::Minus), e.span),
tokenstream::TokenTree::token_alone(token::Literal(l.token), l.span),
tokenstream::TokenTree::token_alone(
token::Literal(l.token_lit),
l.span,
),
]))
}
_ => Err(()),

View File

@ -1247,7 +1247,7 @@ impl<'a> State<'a> {
fn print_literal(&mut self, lit: &hir::Lit) {
self.maybe_print_comment(lit.span.lo());
self.word(lit.node.to_lit_token().to_string())
self.word(lit.node.to_token_lit().to_string())
}
fn print_inline_asm(&mut self, asm: &hir::InlineAsm<'_>) {

View File

@ -120,8 +120,8 @@ impl EarlyLintPass for HiddenUnicodeCodepoints {
fn check_expr(&mut self, cx: &EarlyContext<'_>, expr: &ast::Expr) {
// byte strings are already handled well enough by `EscapeError::NonAsciiCharInByteString`
let (text, span, padding) = match &expr.kind {
ast::ExprKind::Lit(ast::Lit { token, kind, span }) => {
let text = token.symbol;
ast::ExprKind::Lit(ast::Lit { token_lit, kind, span }) => {
let text = token_lit.symbol;
if !contains_text_flow_control_chars(text.as_str()) {
return;
}

View File

@ -1750,8 +1750,8 @@ impl<'a> Parser<'a> {
Some(lit) => match lit.kind {
ast::LitKind::Str(symbol_unescaped, style) => Ok(ast::StrLit {
style,
symbol: lit.token.symbol,
suffix: lit.token.suffix,
symbol: lit.token_lit.symbol,
suffix: lit.token_lit.suffix,
span: lit.span,
symbol_unescaped,
}),
@ -1828,7 +1828,7 @@ impl<'a> Parser<'a> {
let suffixless_lit = token::Lit::new(lit.kind, lit.symbol, None);
let symbol = Symbol::intern(&suffixless_lit.to_string());
let lit = token::Lit::new(token::Err, symbol, lit.suffix);
Some(Lit::from_lit_token(lit, span).unwrap_or_else(|_| unreachable!()))
Some(Lit::from_token_lit(lit, span).unwrap_or_else(|_| unreachable!()))
}
}
}

View File

@ -57,10 +57,10 @@ impl EarlyLintPass for OctalEscapes {
}
if let ExprKind::Lit(lit) = &expr.kind {
if matches!(lit.token.kind, LitKind::Str) {
check_lit(cx, &lit.token, lit.span, true);
} else if matches!(lit.token.kind, LitKind::ByteStr) {
check_lit(cx, &lit.token, lit.span, false);
if matches!(lit.token_lit.kind, LitKind::Str) {
check_lit(cx, &lit.token_lit, lit.span, true);
} else if matches!(lit.token_lit.kind, LitKind::ByteStr) {
check_lit(cx, &lit.token_lit, lit.span, false);
}
}
}

View File

@ -589,12 +589,12 @@ impl Write {
},
};
let replacement: String = match lit.token.kind {
let replacement: String = match lit.token_lit.kind {
LitKind::StrRaw(_) | LitKind::ByteStrRaw(_) if matches!(fmtstr.style, StrStyle::Raw(_)) => {
lit.token.symbol.as_str().replace('{', "{{").replace('}', "}}")
lit.token_lit.symbol.as_str().replace('{', "{{").replace('}', "}}")
},
LitKind::Str | LitKind::ByteStr if matches!(fmtstr.style, StrStyle::Cooked) => {
lit.token.symbol.as_str().replace('{', "{{").replace('}', "}}")
lit.token_lit.symbol.as_str().replace('{', "{{").replace('}', "}}")
},
LitKind::StrRaw(_)
| LitKind::Str
@ -603,7 +603,7 @@ impl Write {
| LitKind::Integer
| LitKind::Float
| LitKind::Err => continue,
LitKind::Byte | LitKind::Char => match lit.token.symbol.as_str() {
LitKind::Byte | LitKind::Char => match lit.token_lit.symbol.as_str() {
"\"" if matches!(fmtstr.style, StrStyle::Cooked) => "\\\"",
"\"" if matches!(fmtstr.style, StrStyle::Raw(0)) => continue,
"\\\\" if matches!(fmtstr.style, StrStyle::Raw(_)) => "\\",
@ -614,7 +614,7 @@ impl Write {
x => x,
}
.into(),
LitKind::Bool => lit.token.symbol.as_str().deref().into(),
LitKind::Bool => lit.token_lit.symbol.as_str().deref().into(),
};
if !fmt_spans.is_empty() {

View File

@ -79,7 +79,7 @@ pub(crate) fn format_expr(
if let Some(expr_rw) = rewrite_literal(context, l, shape) {
Some(expr_rw)
} else {
if let LitKind::StrRaw(_) = l.token.kind {
if let LitKind::StrRaw(_) = l.token_lit.kind {
Some(context.snippet(l.span).trim().into())
} else {
None
@ -1226,7 +1226,7 @@ fn rewrite_string_lit(context: &RewriteContext<'_>, span: Span, shape: Shape) ->
fn rewrite_int_lit(context: &RewriteContext<'_>, lit: &ast::Lit, shape: Shape) -> Option<String> {
let span = lit.span;
let symbol = lit.token.symbol.as_str();
let symbol = lit.token_lit.symbol.as_str();
if let Some(symbol_stripped) = symbol.strip_prefix("0x") {
let hex_lit = match context.config.hex_literal_case() {
@ -1239,7 +1239,9 @@ fn rewrite_int_lit(context: &RewriteContext<'_>, lit: &ast::Lit, shape: Shape) -
format!(
"0x{}{}",
hex_lit,
lit.token.suffix.map_or(String::new(), |s| s.to_string())
lit.token_lit
.suffix
.map_or(String::new(), |s| s.to_string())
),
context.config.max_width(),
shape,