Rename some things related to literals.

- Rename `ast::Lit::token` as `ast::Lit::token_lit`, because its type is
  `token::Lit`, which is not a token. (This has been confusing me for a
  long time.)
  reasonable because we have an `ast::token::Lit` inside an `ast::Lit`.
- Rename `LitKind::{from,to}_lit_token` as
  `LitKind::{from,to}_token_lit`, to match the above change and
  `token::Lit`.
This commit is contained in:
Nicholas Nethercote 2022-08-01 16:46:08 +10:00
parent d7a041f607
commit 5d3cc1713a
13 changed files with 49 additions and 41 deletions

View File

@ -1690,7 +1690,7 @@ pub enum StrStyle {
#[derive(Clone, Encodable, Decodable, Debug, HashStable_Generic)]
pub struct Lit {
/// The original literal token as written in source code.
pub token: token::Lit,
pub token_lit: token::Lit,
/// The "semantic" representation of the literal lowered from the original tokens.
/// Strings are unescaped, hexadecimal forms are eliminated, etc.
/// FIXME: Remove this and only create the semantic representation during lowering to HIR.
@ -1718,7 +1718,7 @@ impl StrLit {
StrStyle::Raw(n) => token::StrRaw(n),
};
Lit {
token: token::Lit::new(token_kind, self.symbol, self.suffix),
token_lit: token::Lit::new(token_kind, self.symbol, self.suffix),
span: self.span,
kind: LitKind::Str(self.symbol_unescaped, self.style),
}

View File

@ -23,7 +23,7 @@ pub enum LitError {
impl LitKind {
/// Converts literal token into a semantic literal.
pub fn from_lit_token(lit: token::Lit) -> Result<LitKind, LitError> {
pub fn from_token_lit(lit: token::Lit) -> Result<LitKind, LitError> {
let token::Lit { kind, symbol, suffix } = lit;
if suffix.is_some() && !kind.may_have_suffix() {
return Err(LitError::InvalidSuffix);
@ -153,7 +153,7 @@ impl LitKind {
/// Attempts to recover a token from semantic literal.
/// This function is used when the original token doesn't exist (e.g. the literal is created
/// by an AST-based macro) or unavailable (e.g. from HIR pretty-printing).
pub fn to_lit_token(&self) -> token::Lit {
pub fn to_token_lit(&self) -> token::Lit {
let (kind, symbol, suffix) = match *self {
LitKind::Str(symbol, ast::StrStyle::Cooked) => {
// Don't re-intern unless the escaped string is different.
@ -208,8 +208,8 @@ impl LitKind {
impl Lit {
/// Converts literal token into an AST literal.
pub fn from_lit_token(token: token::Lit, span: Span) -> Result<Lit, LitError> {
Ok(Lit { token, kind: LitKind::from_lit_token(token)?, span })
pub fn from_token_lit(token_lit: token::Lit, span: Span) -> Result<Lit, LitError> {
Ok(Lit { token_lit, kind: LitKind::from_token_lit(token_lit)?, span })
}
/// Converts arbitrary token into an AST literal.
@ -232,21 +232,21 @@ impl Lit {
_ => return Err(LitError::NotLiteral),
};
Lit::from_lit_token(lit, token.span)
Lit::from_token_lit(lit, token.span)
}
/// Attempts to recover an AST literal from semantic literal.
/// This function is used when the original token doesn't exist (e.g. the literal is created
/// by an AST-based macro) or unavailable (e.g. from HIR pretty-printing).
pub fn from_lit_kind(kind: LitKind, span: Span) -> Lit {
Lit { token: kind.to_lit_token(), kind, span }
Lit { token_lit: kind.to_token_lit(), kind, span }
}
/// Losslessly convert an AST literal into a token.
pub fn to_token(&self) -> Token {
let kind = match self.token.kind {
token::Bool => token::Ident(self.token.symbol, false),
_ => token::Literal(self.token),
let kind = match self.token_lit.kind {
token::Bool => token::Ident(self.token_lit.symbol, false),
_ => token::Literal(self.token_lit),
};
Token::new(kind, self.span)
}

View File

@ -928,7 +928,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
lit.clone()
} else {
Lit {
token: token::Lit::new(token::LitKind::Err, kw::Empty, None),
token_lit: token::Lit::new(token::LitKind::Err, kw::Empty, None),
kind: LitKind::Err(kw::Empty),
span: DUMMY_SP,
}

View File

@ -372,7 +372,7 @@ pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::Dere
fn print_literal(&mut self, lit: &ast::Lit) {
self.maybe_print_comment(lit.span.lo());
self.word(lit.token.to_string())
self.word(lit.token_lit.to_string())
}
fn print_string(&mut self, st: &str, style: ast::StrStyle) {

View File

@ -126,9 +126,9 @@ fn report_bad_target(sess: &Session, item: &Annotatable, span: Span) -> bool {
}
fn report_unexpected_literal(sess: &Session, lit: &ast::Lit) {
let help_msg = match lit.token.kind {
token::Str if rustc_lexer::is_ident(lit.token.symbol.as_str()) => {
format!("try using `#[derive({})]`", lit.token.symbol)
let help_msg = match lit.token_lit.kind {
token::Str if rustc_lexer::is_ident(lit.token_lit.symbol.as_str()) => {
format!("try using `#[derive({})]`", lit.token_lit.symbol)
}
_ => "for example, write `#[derive(Debug)]` for `Debug`".to_string(),
};

View File

@ -112,7 +112,7 @@ fn parse_depth<'sess>(
"meta-variable expression depth must be a literal"
));
};
if let Ok(lit_kind) = LitKind::from_lit_token(*lit)
if let Ok(lit_kind) = LitKind::from_token_lit(*lit)
&& let LitKind::Int(n_u128, LitIntType::Unsuffixed) = lit_kind
&& let Ok(n_usize) = usize::try_from(n_u128)
{

View File

@ -486,20 +486,26 @@ impl server::TokenStream for Rustc<'_, '_> {
// We don't use `TokenStream::from_ast` as the tokenstream currently cannot
// be recovered in the general case.
match &expr.kind {
ast::ExprKind::Lit(l) if l.token.kind == token::Bool => Ok(
tokenstream::TokenStream::token_alone(token::Ident(l.token.symbol, false), l.span),
),
ast::ExprKind::Lit(l) if l.token_lit.kind == token::Bool => {
Ok(tokenstream::TokenStream::token_alone(
token::Ident(l.token_lit.symbol, false),
l.span,
))
}
ast::ExprKind::Lit(l) => {
Ok(tokenstream::TokenStream::token_alone(token::Literal(l.token), l.span))
Ok(tokenstream::TokenStream::token_alone(token::Literal(l.token_lit), l.span))
}
ast::ExprKind::Unary(ast::UnOp::Neg, e) => match &e.kind {
ast::ExprKind::Lit(l) => match l.token {
ast::ExprKind::Lit(l) => match l.token_lit {
token::Lit { kind: token::Integer | token::Float, .. } => {
Ok(Self::TokenStream::from_iter([
// FIXME: The span of the `-` token is lost when
// parsing, so we cannot faithfully recover it here.
tokenstream::TokenTree::token_alone(token::BinOp(token::Minus), e.span),
tokenstream::TokenTree::token_alone(token::Literal(l.token), l.span),
tokenstream::TokenTree::token_alone(
token::Literal(l.token_lit),
l.span,
),
]))
}
_ => Err(()),

View File

@ -1247,7 +1247,7 @@ impl<'a> State<'a> {
fn print_literal(&mut self, lit: &hir::Lit) {
self.maybe_print_comment(lit.span.lo());
self.word(lit.node.to_lit_token().to_string())
self.word(lit.node.to_token_lit().to_string())
}
fn print_inline_asm(&mut self, asm: &hir::InlineAsm<'_>) {

View File

@ -120,8 +120,8 @@ impl EarlyLintPass for HiddenUnicodeCodepoints {
fn check_expr(&mut self, cx: &EarlyContext<'_>, expr: &ast::Expr) {
// byte strings are already handled well enough by `EscapeError::NonAsciiCharInByteString`
let (text, span, padding) = match &expr.kind {
ast::ExprKind::Lit(ast::Lit { token, kind, span }) => {
let text = token.symbol;
ast::ExprKind::Lit(ast::Lit { token_lit, kind, span }) => {
let text = token_lit.symbol;
if !contains_text_flow_control_chars(text.as_str()) {
return;
}

View File

@ -1750,8 +1750,8 @@ impl<'a> Parser<'a> {
Some(lit) => match lit.kind {
ast::LitKind::Str(symbol_unescaped, style) => Ok(ast::StrLit {
style,
symbol: lit.token.symbol,
suffix: lit.token.suffix,
symbol: lit.token_lit.symbol,
suffix: lit.token_lit.suffix,
span: lit.span,
symbol_unescaped,
}),
@ -1828,7 +1828,7 @@ impl<'a> Parser<'a> {
let suffixless_lit = token::Lit::new(lit.kind, lit.symbol, None);
let symbol = Symbol::intern(&suffixless_lit.to_string());
let lit = token::Lit::new(token::Err, symbol, lit.suffix);
Some(Lit::from_lit_token(lit, span).unwrap_or_else(|_| unreachable!()))
Some(Lit::from_token_lit(lit, span).unwrap_or_else(|_| unreachable!()))
}
}
}

View File

@ -57,10 +57,10 @@ impl EarlyLintPass for OctalEscapes {
}
if let ExprKind::Lit(lit) = &expr.kind {
if matches!(lit.token.kind, LitKind::Str) {
check_lit(cx, &lit.token, lit.span, true);
} else if matches!(lit.token.kind, LitKind::ByteStr) {
check_lit(cx, &lit.token, lit.span, false);
if matches!(lit.token_lit.kind, LitKind::Str) {
check_lit(cx, &lit.token_lit, lit.span, true);
} else if matches!(lit.token_lit.kind, LitKind::ByteStr) {
check_lit(cx, &lit.token_lit, lit.span, false);
}
}
}

View File

@ -589,12 +589,12 @@ impl Write {
},
};
let replacement: String = match lit.token.kind {
let replacement: String = match lit.token_lit.kind {
LitKind::StrRaw(_) | LitKind::ByteStrRaw(_) if matches!(fmtstr.style, StrStyle::Raw(_)) => {
lit.token.symbol.as_str().replace('{', "{{").replace('}', "}}")
lit.token_lit.symbol.as_str().replace('{', "{{").replace('}', "}}")
},
LitKind::Str | LitKind::ByteStr if matches!(fmtstr.style, StrStyle::Cooked) => {
lit.token.symbol.as_str().replace('{', "{{").replace('}', "}}")
lit.token_lit.symbol.as_str().replace('{', "{{").replace('}', "}}")
},
LitKind::StrRaw(_)
| LitKind::Str
@ -603,7 +603,7 @@ impl Write {
| LitKind::Integer
| LitKind::Float
| LitKind::Err => continue,
LitKind::Byte | LitKind::Char => match lit.token.symbol.as_str() {
LitKind::Byte | LitKind::Char => match lit.token_lit.symbol.as_str() {
"\"" if matches!(fmtstr.style, StrStyle::Cooked) => "\\\"",
"\"" if matches!(fmtstr.style, StrStyle::Raw(0)) => continue,
"\\\\" if matches!(fmtstr.style, StrStyle::Raw(_)) => "\\",
@ -614,7 +614,7 @@ impl Write {
x => x,
}
.into(),
LitKind::Bool => lit.token.symbol.as_str().deref().into(),
LitKind::Bool => lit.token_lit.symbol.as_str().deref().into(),
};
if !fmt_spans.is_empty() {

View File

@ -79,7 +79,7 @@ pub(crate) fn format_expr(
if let Some(expr_rw) = rewrite_literal(context, l, shape) {
Some(expr_rw)
} else {
if let LitKind::StrRaw(_) = l.token.kind {
if let LitKind::StrRaw(_) = l.token_lit.kind {
Some(context.snippet(l.span).trim().into())
} else {
None
@ -1226,7 +1226,7 @@ fn rewrite_string_lit(context: &RewriteContext<'_>, span: Span, shape: Shape) ->
fn rewrite_int_lit(context: &RewriteContext<'_>, lit: &ast::Lit, shape: Shape) -> Option<String> {
let span = lit.span;
let symbol = lit.token.symbol.as_str();
let symbol = lit.token_lit.symbol.as_str();
if let Some(symbol_stripped) = symbol.strip_prefix("0x") {
let hex_lit = match context.config.hex_literal_case() {
@ -1239,7 +1239,9 @@ fn rewrite_int_lit(context: &RewriteContext<'_>, lit: &ast::Lit, shape: Shape) -
format!(
"0x{}{}",
hex_lit,
lit.token.suffix.map_or(String::new(), |s| s.to_string())
lit.token_lit
.suffix
.map_or(String::new(), |s| s.to_string())
),
context.config.max_width(),
shape,