mirror of
https://github.com/rust-lang/rust.git
synced 2024-11-01 06:51:58 +00:00
Use token::Lit
in ast::ExprKind::Lit
.
Instead of `ast::Lit`. Literal lowering now happens at two different times. Expression literals are lowered when HIR is crated. Attribute literals are lowered during parsing. This commit changes the language very slightly. Some programs that used to not compile now will compile. This is because some invalid literals that are removed by `cfg` or attribute macros will no longer trigger errors. See this comment for more details: https://github.com/rust-lang/rust/pull/102944#issuecomment-1277476773
This commit is contained in:
parent
01760265cb
commit
358a603f11
@ -1332,7 +1332,7 @@ pub enum ExprKind {
|
|||||||
/// A unary operation (e.g., `!x`, `*x`).
|
/// A unary operation (e.g., `!x`, `*x`).
|
||||||
Unary(UnOp, P<Expr>),
|
Unary(UnOp, P<Expr>),
|
||||||
/// A literal (e.g., `1`, `"foo"`).
|
/// A literal (e.g., `1`, `"foo"`).
|
||||||
Lit(Lit),
|
Lit(token::Lit),
|
||||||
/// A cast (e.g., `foo as f64`).
|
/// A cast (e.g., `foo as f64`).
|
||||||
Cast(P<Expr>, P<Ty>),
|
Cast(P<Expr>, P<Ty>),
|
||||||
/// A type ascription (e.g., `42: usize`).
|
/// A type ascription (e.g., `42: usize`).
|
||||||
@ -1698,16 +1698,12 @@ pub struct StrLit {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl StrLit {
|
impl StrLit {
|
||||||
pub fn as_lit(&self) -> Lit {
|
pub fn as_token_lit(&self) -> token::Lit {
|
||||||
let token_kind = match self.style {
|
let token_kind = match self.style {
|
||||||
StrStyle::Cooked => token::Str,
|
StrStyle::Cooked => token::Str,
|
||||||
StrStyle::Raw(n) => token::StrRaw(n),
|
StrStyle::Raw(n) => token::StrRaw(n),
|
||||||
};
|
};
|
||||||
Lit {
|
token::Lit::new(token_kind, self.symbol, self.suffix)
|
||||||
token_lit: token::Lit::new(token_kind, self.symbol, self.suffix),
|
|
||||||
span: self.span,
|
|
||||||
kind: LitKind::Str(self.symbol_unescaped, self.style),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1733,9 +1729,10 @@ pub enum LitFloatType {
|
|||||||
Unsuffixed,
|
Unsuffixed,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Literal kind.
|
/// Note that the entire literal (including the suffix) is considered when
|
||||||
///
|
/// deciding the `LitKind`. This means that float literals like `1f32` are
|
||||||
/// E.g., `"foo"`, `42`, `12.34`, or `bool`.
|
/// classified by this type as `Float`. This is different to `token::LitKind`
|
||||||
|
/// which does *not* consider the suffix.
|
||||||
#[derive(Clone, Encodable, Decodable, Debug, Hash, Eq, PartialEq, HashStable_Generic)]
|
#[derive(Clone, Encodable, Decodable, Debug, Hash, Eq, PartialEq, HashStable_Generic)]
|
||||||
pub enum LitKind {
|
pub enum LitKind {
|
||||||
/// A string literal (`"foo"`). The symbol is unescaped, and so may differ
|
/// A string literal (`"foo"`). The symbol is unescaped, and so may differ
|
||||||
@ -1749,10 +1746,11 @@ pub enum LitKind {
|
|||||||
Char(char),
|
Char(char),
|
||||||
/// An integer literal (`1`).
|
/// An integer literal (`1`).
|
||||||
Int(u128, LitIntType),
|
Int(u128, LitIntType),
|
||||||
/// A float literal (`1f64` or `1E10f64`). Stored as a symbol rather than
|
/// A float literal (`1.0`, `1f64` or `1E10f64`). The pre-suffix part is
|
||||||
/// `f64` so that `LitKind` can impl `Eq` and `Hash`.
|
/// stored as a symbol rather than `f64` so that `LitKind` can impl `Eq`
|
||||||
|
/// and `Hash`.
|
||||||
Float(Symbol, LitFloatType),
|
Float(Symbol, LitFloatType),
|
||||||
/// A boolean literal.
|
/// A boolean literal (`true`, `false`).
|
||||||
Bool(bool),
|
Bool(bool),
|
||||||
/// Placeholder for a literal that wasn't well-formed in some way.
|
/// Placeholder for a literal that wasn't well-formed in some way.
|
||||||
Err,
|
Err,
|
||||||
|
@ -533,7 +533,7 @@ impl MetaItemKind {
|
|||||||
MetaItemKind::NameValue(lit) => {
|
MetaItemKind::NameValue(lit) => {
|
||||||
let expr = P(ast::Expr {
|
let expr = P(ast::Expr {
|
||||||
id: ast::DUMMY_NODE_ID,
|
id: ast::DUMMY_NODE_ID,
|
||||||
kind: ast::ExprKind::Lit(lit.clone()),
|
kind: ast::ExprKind::Lit(lit.token_lit.clone()),
|
||||||
span: lit.span,
|
span: lit.span,
|
||||||
attrs: ast::AttrVec::new(),
|
attrs: ast::AttrVec::new(),
|
||||||
tokens: None,
|
tokens: None,
|
||||||
@ -605,7 +605,7 @@ impl MetaItemKind {
|
|||||||
MetaItemKind::name_value_from_tokens(&mut inner_tokens.into_trees())
|
MetaItemKind::name_value_from_tokens(&mut inner_tokens.into_trees())
|
||||||
}
|
}
|
||||||
Some(TokenTree::Token(token, _)) => {
|
Some(TokenTree::Token(token, _)) => {
|
||||||
Lit::from_token(&token).ok().map(MetaItemKind::NameValue)
|
Lit::from_token(&token).map(MetaItemKind::NameValue)
|
||||||
}
|
}
|
||||||
_ => None,
|
_ => None,
|
||||||
}
|
}
|
||||||
@ -618,8 +618,10 @@ impl MetaItemKind {
|
|||||||
MetaItemKind::list_from_tokens(tokens.clone())
|
MetaItemKind::list_from_tokens(tokens.clone())
|
||||||
}
|
}
|
||||||
MacArgs::Delimited(..) => None,
|
MacArgs::Delimited(..) => None,
|
||||||
MacArgs::Eq(_, MacArgsEq::Ast(expr)) => match &expr.kind {
|
MacArgs::Eq(_, MacArgsEq::Ast(expr)) => match expr.kind {
|
||||||
ast::ExprKind::Lit(lit) => Some(MetaItemKind::NameValue(lit.clone())),
|
ast::ExprKind::Lit(token_lit) => Some(MetaItemKind::NameValue(
|
||||||
|
Lit::from_token_lit(token_lit, expr.span).expect("token_lit in from_mac_args"),
|
||||||
|
)),
|
||||||
_ => None,
|
_ => None,
|
||||||
},
|
},
|
||||||
MacArgs::Eq(_, MacArgsEq::Hir(lit)) => Some(MetaItemKind::NameValue(lit.clone())),
|
MacArgs::Eq(_, MacArgsEq::Hir(lit)) => Some(MetaItemKind::NameValue(lit.clone())),
|
||||||
@ -668,7 +670,7 @@ impl NestedMetaItem {
|
|||||||
{
|
{
|
||||||
match tokens.peek() {
|
match tokens.peek() {
|
||||||
Some(TokenTree::Token(token, _))
|
Some(TokenTree::Token(token, _))
|
||||||
if let Ok(lit) = Lit::from_token(token) =>
|
if let Some(lit) = Lit::from_token(token) =>
|
||||||
{
|
{
|
||||||
tokens.next();
|
tokens.next();
|
||||||
return Some(NestedMetaItem::Literal(lit));
|
return Some(NestedMetaItem::Literal(lit));
|
||||||
|
@ -59,13 +59,17 @@ pub enum Delimiter {
|
|||||||
Invisible,
|
Invisible,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Note that the suffix is *not* considered when deciding the `LitKind` in this
|
||||||
|
// type. This means that float literals like `1f32` are classified by this type
|
||||||
|
// as `Int`. Only upon conversion to `ast::LitKind` will such a literal be
|
||||||
|
// given the `Float` kind.
|
||||||
#[derive(Clone, Copy, PartialEq, Encodable, Decodable, Debug, HashStable_Generic)]
|
#[derive(Clone, Copy, PartialEq, Encodable, Decodable, Debug, HashStable_Generic)]
|
||||||
pub enum LitKind {
|
pub enum LitKind {
|
||||||
Bool, // AST only, must never appear in a `Token`
|
Bool, // AST only, must never appear in a `Token`
|
||||||
Byte,
|
Byte,
|
||||||
Char,
|
Char,
|
||||||
Integer,
|
Integer, // e.g. `1`, `1u8`, `1f32`
|
||||||
Float,
|
Float, // e.g. `1.`, `1.0`, `1e3f32`
|
||||||
Str,
|
Str,
|
||||||
StrRaw(u8), // raw string delimited by `n` hash symbols
|
StrRaw(u8), // raw string delimited by `n` hash symbols
|
||||||
ByteStr,
|
ByteStr,
|
||||||
@ -81,6 +85,42 @@ pub struct Lit {
|
|||||||
pub suffix: Option<Symbol>,
|
pub suffix: Option<Symbol>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl Lit {
|
||||||
|
pub fn new(kind: LitKind, symbol: Symbol, suffix: Option<Symbol>) -> Lit {
|
||||||
|
Lit { kind, symbol, suffix }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns `true` if this is semantically a float literal. This includes
|
||||||
|
/// ones like `1f32` that have an `Integer` kind but a float suffix.
|
||||||
|
pub fn is_semantic_float(&self) -> bool {
|
||||||
|
match self.kind {
|
||||||
|
LitKind::Float => true,
|
||||||
|
LitKind::Integer => match self.suffix {
|
||||||
|
Some(sym) => sym == sym::f32 || sym == sym::f64,
|
||||||
|
None => false,
|
||||||
|
},
|
||||||
|
_ => false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Keep this in sync with `Token::can_begin_literal_or_bool` excluding unary negation.
|
||||||
|
pub fn from_token(token: &Token) -> Option<Lit> {
|
||||||
|
match token.uninterpolate().kind {
|
||||||
|
Ident(name, false) if name.is_bool_lit() => {
|
||||||
|
Some(Lit::new(Bool, name, None))
|
||||||
|
}
|
||||||
|
Literal(token_lit) => Some(token_lit),
|
||||||
|
Interpolated(ref nt)
|
||||||
|
if let NtExpr(expr) | NtLiteral(expr) = &**nt
|
||||||
|
&& let ast::ExprKind::Lit(token_lit) = expr.kind =>
|
||||||
|
{
|
||||||
|
Some(token_lit.clone())
|
||||||
|
}
|
||||||
|
_ => None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl fmt::Display for Lit {
|
impl fmt::Display for Lit {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
let Lit { kind, symbol, suffix } = *self;
|
let Lit { kind, symbol, suffix } = *self;
|
||||||
@ -139,12 +179,6 @@ impl LitKind {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Lit {
|
|
||||||
pub fn new(kind: LitKind, symbol: Symbol, suffix: Option<Symbol>) -> Lit {
|
|
||||||
Lit { kind, symbol, suffix }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn ident_can_begin_expr(name: Symbol, span: Span, is_raw: bool) -> bool {
|
pub fn ident_can_begin_expr(name: Symbol, span: Span, is_raw: bool) -> bool {
|
||||||
let ident_token = Token::new(Ident(name, is_raw), span);
|
let ident_token = Token::new(Ident(name, is_raw), span);
|
||||||
|
|
||||||
|
@ -8,8 +8,8 @@ use rustc_span::symbol::{kw, sym, Symbol};
|
|||||||
use rustc_span::Span;
|
use rustc_span::Span;
|
||||||
use std::ascii;
|
use std::ascii;
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
pub enum LitError {
|
pub enum LitError {
|
||||||
NotLiteral,
|
|
||||||
LexerError,
|
LexerError,
|
||||||
InvalidSuffix,
|
InvalidSuffix,
|
||||||
InvalidIntSuffix,
|
InvalidIntSuffix,
|
||||||
@ -202,27 +202,10 @@ impl Lit {
|
|||||||
Ok(Lit { token_lit, kind: LitKind::from_token_lit(token_lit)?, span })
|
Ok(Lit { token_lit, kind: LitKind::from_token_lit(token_lit)?, span })
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Converts arbitrary token into an AST literal.
|
/// Converts an arbitrary token into an AST literal.
|
||||||
///
|
pub fn from_token(token: &Token) -> Option<Lit> {
|
||||||
/// Keep this in sync with `Token::can_begin_literal_or_bool` excluding unary negation.
|
token::Lit::from_token(token)
|
||||||
pub fn from_token(token: &Token) -> Result<Lit, LitError> {
|
.and_then(|token_lit| Lit::from_token_lit(token_lit, token.span).ok())
|
||||||
let lit = match token.uninterpolate().kind {
|
|
||||||
token::Ident(name, false) if name.is_bool_lit() => {
|
|
||||||
token::Lit::new(token::Bool, name, None)
|
|
||||||
}
|
|
||||||
token::Literal(lit) => lit,
|
|
||||||
token::Interpolated(ref nt) => {
|
|
||||||
if let token::NtExpr(expr) | token::NtLiteral(expr) = &**nt
|
|
||||||
&& let ast::ExprKind::Lit(lit) = &expr.kind
|
|
||||||
{
|
|
||||||
return Ok(lit.clone());
|
|
||||||
}
|
|
||||||
return Err(LitError::NotLiteral);
|
|
||||||
}
|
|
||||||
_ => return Err(LitError::NotLiteral),
|
|
||||||
};
|
|
||||||
|
|
||||||
Lit::from_token_lit(lit, token.span)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Attempts to recover an AST literal from semantic literal.
|
/// Attempts to recover an AST literal from semantic literal.
|
||||||
|
@ -14,6 +14,7 @@ use rustc_data_structures::stack::ensure_sufficient_stack;
|
|||||||
use rustc_hir as hir;
|
use rustc_hir as hir;
|
||||||
use rustc_hir::def::Res;
|
use rustc_hir::def::Res;
|
||||||
use rustc_hir::definitions::DefPathData;
|
use rustc_hir::definitions::DefPathData;
|
||||||
|
use rustc_session::errors::report_lit_error;
|
||||||
use rustc_span::source_map::{respan, DesugaringKind, Span, Spanned};
|
use rustc_span::source_map::{respan, DesugaringKind, Span, Spanned};
|
||||||
use rustc_span::symbol::{sym, Ident};
|
use rustc_span::symbol::{sym, Ident};
|
||||||
use rustc_span::DUMMY_SP;
|
use rustc_span::DUMMY_SP;
|
||||||
@ -84,8 +85,15 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||||||
let ohs = self.lower_expr(ohs);
|
let ohs = self.lower_expr(ohs);
|
||||||
hir::ExprKind::Unary(op, ohs)
|
hir::ExprKind::Unary(op, ohs)
|
||||||
}
|
}
|
||||||
ExprKind::Lit(ref l) => {
|
ExprKind::Lit(token_lit) => {
|
||||||
hir::ExprKind::Lit(respan(self.lower_span(l.span), l.kind.clone()))
|
let lit_kind = match LitKind::from_token_lit(token_lit) {
|
||||||
|
Ok(lit_kind) => lit_kind,
|
||||||
|
Err(err) => {
|
||||||
|
report_lit_error(&self.tcx.sess.parse_sess, err, token_lit, e.span);
|
||||||
|
LitKind::Err
|
||||||
|
}
|
||||||
|
};
|
||||||
|
hir::ExprKind::Lit(respan(self.lower_span(e.span), lit_kind))
|
||||||
}
|
}
|
||||||
ExprKind::IncludedBytes(ref bytes) => hir::ExprKind::Lit(respan(
|
ExprKind::IncludedBytes(ref bytes) => hir::ExprKind::Lit(respan(
|
||||||
self.lower_span(e.span),
|
self.lower_span(e.span),
|
||||||
|
@ -959,8 +959,15 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||||||
MacArgs::Eq(eq_span, MacArgsEq::Ast(ref expr)) => {
|
MacArgs::Eq(eq_span, MacArgsEq::Ast(ref expr)) => {
|
||||||
// In valid code the value always ends up as a single literal. Otherwise, a dummy
|
// In valid code the value always ends up as a single literal. Otherwise, a dummy
|
||||||
// literal suffices because the error is handled elsewhere.
|
// literal suffices because the error is handled elsewhere.
|
||||||
let lit = if let ExprKind::Lit(lit) = &expr.kind {
|
let lit = if let ExprKind::Lit(token_lit) = expr.kind {
|
||||||
lit.clone()
|
match Lit::from_token_lit(token_lit, expr.span) {
|
||||||
|
Ok(lit) => lit,
|
||||||
|
Err(_err) => Lit {
|
||||||
|
token_lit: token::Lit::new(token::LitKind::Err, kw::Empty, None),
|
||||||
|
kind: LitKind::Err,
|
||||||
|
span: DUMMY_SP,
|
||||||
|
},
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
Lit {
|
Lit {
|
||||||
token_lit: token::Lit::new(token::LitKind::Err, kw::Empty, None),
|
token_lit: token::Lit::new(token::LitKind::Err, kw::Empty, None),
|
||||||
|
@ -373,8 +373,12 @@ pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::Dere
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn print_literal(&mut self, lit: &ast::Lit) {
|
fn print_literal(&mut self, lit: &ast::Lit) {
|
||||||
self.maybe_print_comment(lit.span.lo());
|
self.print_token_literal(lit.token_lit, lit.span)
|
||||||
self.word(lit.token_lit.to_string())
|
}
|
||||||
|
|
||||||
|
fn print_token_literal(&mut self, token_lit: token::Lit, span: Span) {
|
||||||
|
self.maybe_print_comment(span.lo());
|
||||||
|
self.word(token_lit.to_string())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn print_string(&mut self, st: &str, style: ast::StrStyle) {
|
fn print_string(&mut self, st: &str, style: ast::StrStyle) {
|
||||||
@ -1735,7 +1739,7 @@ impl<'a> State<'a> {
|
|||||||
}
|
}
|
||||||
ast::Extern::Explicit(abi, _) => {
|
ast::Extern::Explicit(abi, _) => {
|
||||||
self.word_nbsp("extern");
|
self.word_nbsp("extern");
|
||||||
self.print_literal(&abi.as_lit());
|
self.print_token_literal(abi.as_token_lit(), abi.span);
|
||||||
self.nbsp();
|
self.nbsp();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -319,8 +319,8 @@ impl<'a> State<'a> {
|
|||||||
ast::ExprKind::AddrOf(k, m, ref expr) => {
|
ast::ExprKind::AddrOf(k, m, ref expr) => {
|
||||||
self.print_expr_addr_of(k, m, expr);
|
self.print_expr_addr_of(k, m, expr);
|
||||||
}
|
}
|
||||||
ast::ExprKind::Lit(ref lit) => {
|
ast::ExprKind::Lit(token_lit) => {
|
||||||
self.print_literal(lit);
|
self.print_token_literal(token_lit, expr.span);
|
||||||
}
|
}
|
||||||
ast::ExprKind::IncludedBytes(ref bytes) => {
|
ast::ExprKind::IncludedBytes(ref bytes) => {
|
||||||
let lit = ast::Lit::from_included_bytes(bytes, expr.span);
|
let lit = ast::Lit::from_included_bytes(bytes, expr.span);
|
||||||
|
@ -207,7 +207,7 @@ impl<'a> State<'a> {
|
|||||||
s.word("extern");
|
s.word("extern");
|
||||||
}));
|
}));
|
||||||
if let Some(abi) = nmod.abi {
|
if let Some(abi) = nmod.abi {
|
||||||
self.print_literal(&abi.as_lit());
|
self.print_token_literal(abi.as_token_lit(), abi.span);
|
||||||
self.nbsp();
|
self.nbsp();
|
||||||
}
|
}
|
||||||
self.bopen();
|
self.bopen();
|
||||||
|
@ -172,7 +172,11 @@ pub fn parse_asm_args<'a>(
|
|||||||
// If it can't possibly expand to a string, provide diagnostics here to include other
|
// If it can't possibly expand to a string, provide diagnostics here to include other
|
||||||
// things it could have been.
|
// things it could have been.
|
||||||
match template.kind {
|
match template.kind {
|
||||||
ast::ExprKind::Lit(ast::Lit { kind: ast::LitKind::Str(..), .. }) => {}
|
ast::ExprKind::Lit(token_lit)
|
||||||
|
if matches!(
|
||||||
|
token_lit.kind,
|
||||||
|
token::LitKind::Str | token::LitKind::StrRaw(_)
|
||||||
|
) => {}
|
||||||
ast::ExprKind::MacCall(..) => {}
|
ast::ExprKind::MacCall(..) => {}
|
||||||
_ => {
|
_ => {
|
||||||
let errstr = if is_global_asm {
|
let errstr = if is_global_asm {
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
use rustc_ast as ast;
|
use rustc_ast as ast;
|
||||||
use rustc_ast::tokenstream::TokenStream;
|
use rustc_ast::tokenstream::TokenStream;
|
||||||
use rustc_expand::base::{self, DummyResult};
|
use rustc_expand::base::{self, DummyResult};
|
||||||
|
use rustc_session::errors::report_lit_error;
|
||||||
use rustc_span::symbol::Symbol;
|
use rustc_span::symbol::Symbol;
|
||||||
|
|
||||||
use std::string::String;
|
use std::string::String;
|
||||||
@ -18,28 +19,28 @@ pub fn expand_concat(
|
|||||||
let mut has_errors = false;
|
let mut has_errors = false;
|
||||||
for e in es {
|
for e in es {
|
||||||
match e.kind {
|
match e.kind {
|
||||||
ast::ExprKind::Lit(ref lit) => match lit.kind {
|
ast::ExprKind::Lit(token_lit) => match ast::LitKind::from_token_lit(token_lit) {
|
||||||
ast::LitKind::Str(ref s, _) | ast::LitKind::Float(ref s, _) => {
|
Ok(ast::LitKind::Str(ref s, _) | ast::LitKind::Float(ref s, _)) => {
|
||||||
accumulator.push_str(s.as_str());
|
accumulator.push_str(s.as_str());
|
||||||
}
|
}
|
||||||
ast::LitKind::Char(c) => {
|
Ok(ast::LitKind::Char(c)) => {
|
||||||
accumulator.push(c);
|
accumulator.push(c);
|
||||||
}
|
}
|
||||||
ast::LitKind::Int(
|
Ok(ast::LitKind::Int(i, _)) => {
|
||||||
i,
|
|
||||||
ast::LitIntType::Unsigned(_)
|
|
||||||
| ast::LitIntType::Signed(_)
|
|
||||||
| ast::LitIntType::Unsuffixed,
|
|
||||||
) => {
|
|
||||||
accumulator.push_str(&i.to_string());
|
accumulator.push_str(&i.to_string());
|
||||||
}
|
}
|
||||||
ast::LitKind::Bool(b) => {
|
Ok(ast::LitKind::Bool(b)) => {
|
||||||
accumulator.push_str(&b.to_string());
|
accumulator.push_str(&b.to_string());
|
||||||
}
|
}
|
||||||
ast::LitKind::Byte(..) | ast::LitKind::ByteStr(..) => {
|
Ok(ast::LitKind::Byte(..) | ast::LitKind::ByteStr(..)) => {
|
||||||
cx.span_err(e.span, "cannot concatenate a byte string literal");
|
cx.span_err(e.span, "cannot concatenate a byte string literal");
|
||||||
|
has_errors = true;
|
||||||
}
|
}
|
||||||
ast::LitKind::Err => {
|
Ok(ast::LitKind::Err) => {
|
||||||
|
has_errors = true;
|
||||||
|
}
|
||||||
|
Err(err) => {
|
||||||
|
report_lit_error(&cx.sess.parse_sess, err, token_lit, e.span);
|
||||||
has_errors = true;
|
has_errors = true;
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
@ -2,18 +2,21 @@ use rustc_ast as ast;
|
|||||||
use rustc_ast::{ptr::P, tokenstream::TokenStream};
|
use rustc_ast::{ptr::P, tokenstream::TokenStream};
|
||||||
use rustc_errors::Applicability;
|
use rustc_errors::Applicability;
|
||||||
use rustc_expand::base::{self, DummyResult};
|
use rustc_expand::base::{self, DummyResult};
|
||||||
|
use rustc_span::Span;
|
||||||
|
|
||||||
/// Emits errors for literal expressions that are invalid inside and outside of an array.
|
/// Emits errors for literal expressions that are invalid inside and outside of an array.
|
||||||
fn invalid_type_err(cx: &mut base::ExtCtxt<'_>, expr: &P<rustc_ast::Expr>, is_nested: bool) {
|
fn invalid_type_err(
|
||||||
let ast::ExprKind::Lit(lit) = &expr.kind else {
|
cx: &mut base::ExtCtxt<'_>,
|
||||||
unreachable!();
|
token_lit: ast::token::Lit,
|
||||||
};
|
span: Span,
|
||||||
match lit.kind {
|
is_nested: bool,
|
||||||
ast::LitKind::Char(_) => {
|
) {
|
||||||
let mut err = cx.struct_span_err(expr.span, "cannot concatenate character literals");
|
match ast::LitKind::from_token_lit(token_lit) {
|
||||||
if let Ok(snippet) = cx.sess.source_map().span_to_snippet(expr.span) {
|
Ok(ast::LitKind::Char(_)) => {
|
||||||
|
let mut err = cx.struct_span_err(span, "cannot concatenate character literals");
|
||||||
|
if let Ok(snippet) = cx.sess.source_map().span_to_snippet(span) {
|
||||||
err.span_suggestion(
|
err.span_suggestion(
|
||||||
expr.span,
|
span,
|
||||||
"try using a byte character",
|
"try using a byte character",
|
||||||
format!("b{}", snippet),
|
format!("b{}", snippet),
|
||||||
Applicability::MachineApplicable,
|
Applicability::MachineApplicable,
|
||||||
@ -21,13 +24,13 @@ fn invalid_type_err(cx: &mut base::ExtCtxt<'_>, expr: &P<rustc_ast::Expr>, is_ne
|
|||||||
.emit();
|
.emit();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
ast::LitKind::Str(_, _) => {
|
Ok(ast::LitKind::Str(_, _)) => {
|
||||||
let mut err = cx.struct_span_err(expr.span, "cannot concatenate string literals");
|
let mut err = cx.struct_span_err(span, "cannot concatenate string literals");
|
||||||
// suggestion would be invalid if we are nested
|
// suggestion would be invalid if we are nested
|
||||||
if !is_nested {
|
if !is_nested {
|
||||||
if let Ok(snippet) = cx.sess.source_map().span_to_snippet(expr.span) {
|
if let Ok(snippet) = cx.sess.source_map().span_to_snippet(span) {
|
||||||
err.span_suggestion(
|
err.span_suggestion(
|
||||||
expr.span,
|
span,
|
||||||
"try using a byte string",
|
"try using a byte string",
|
||||||
format!("b{}", snippet),
|
format!("b{}", snippet),
|
||||||
Applicability::MachineApplicable,
|
Applicability::MachineApplicable,
|
||||||
@ -36,18 +39,18 @@ fn invalid_type_err(cx: &mut base::ExtCtxt<'_>, expr: &P<rustc_ast::Expr>, is_ne
|
|||||||
}
|
}
|
||||||
err.emit();
|
err.emit();
|
||||||
}
|
}
|
||||||
ast::LitKind::Float(_, _) => {
|
Ok(ast::LitKind::Float(_, _)) => {
|
||||||
cx.span_err(expr.span, "cannot concatenate float literals");
|
cx.span_err(span, "cannot concatenate float literals");
|
||||||
}
|
}
|
||||||
ast::LitKind::Bool(_) => {
|
Ok(ast::LitKind::Bool(_)) => {
|
||||||
cx.span_err(expr.span, "cannot concatenate boolean literals");
|
cx.span_err(span, "cannot concatenate boolean literals");
|
||||||
}
|
}
|
||||||
ast::LitKind::Err => {}
|
Ok(ast::LitKind::Err) => {}
|
||||||
ast::LitKind::Int(_, _) if !is_nested => {
|
Ok(ast::LitKind::Int(_, _)) if !is_nested => {
|
||||||
let mut err = cx.struct_span_err(expr.span, "cannot concatenate numeric literals");
|
let mut err = cx.struct_span_err(span, "cannot concatenate numeric literals");
|
||||||
if let Ok(snippet) = cx.sess.source_map().span_to_snippet(expr.span) {
|
if let Ok(snippet) = cx.sess.source_map().span_to_snippet(span) {
|
||||||
err.span_suggestion(
|
err.span_suggestion(
|
||||||
expr.span,
|
span,
|
||||||
"try wrapping the number in an array",
|
"try wrapping the number in an array",
|
||||||
format!("[{}]", snippet),
|
format!("[{}]", snippet),
|
||||||
Applicability::MachineApplicable,
|
Applicability::MachineApplicable,
|
||||||
@ -55,15 +58,15 @@ fn invalid_type_err(cx: &mut base::ExtCtxt<'_>, expr: &P<rustc_ast::Expr>, is_ne
|
|||||||
}
|
}
|
||||||
err.emit();
|
err.emit();
|
||||||
}
|
}
|
||||||
ast::LitKind::Int(
|
Ok(ast::LitKind::Int(
|
||||||
val,
|
val,
|
||||||
ast::LitIntType::Unsuffixed | ast::LitIntType::Unsigned(ast::UintTy::U8),
|
ast::LitIntType::Unsuffixed | ast::LitIntType::Unsigned(ast::UintTy::U8),
|
||||||
) => {
|
)) => {
|
||||||
assert!(val > u8::MAX.into()); // must be an error
|
assert!(val > u8::MAX.into()); // must be an error
|
||||||
cx.span_err(expr.span, "numeric literal is out of bounds");
|
cx.span_err(span, "numeric literal is out of bounds");
|
||||||
}
|
}
|
||||||
ast::LitKind::Int(_, _) => {
|
Ok(ast::LitKind::Int(_, _)) => {
|
||||||
cx.span_err(expr.span, "numeric literal is not a `u8`");
|
cx.span_err(span, "numeric literal is not a `u8`");
|
||||||
}
|
}
|
||||||
_ => unreachable!(),
|
_ => unreachable!(),
|
||||||
}
|
}
|
||||||
@ -83,14 +86,14 @@ fn handle_array_element(
|
|||||||
*has_errors = true;
|
*has_errors = true;
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
ast::ExprKind::Lit(ref lit) => match lit.kind {
|
ast::ExprKind::Lit(token_lit) => match ast::LitKind::from_token_lit(token_lit) {
|
||||||
ast::LitKind::Int(
|
Ok(ast::LitKind::Int(
|
||||||
val,
|
val,
|
||||||
ast::LitIntType::Unsuffixed | ast::LitIntType::Unsigned(ast::UintTy::U8),
|
ast::LitIntType::Unsuffixed | ast::LitIntType::Unsigned(ast::UintTy::U8),
|
||||||
) if val <= u8::MAX.into() => Some(val as u8),
|
)) if val <= u8::MAX.into() => Some(val as u8),
|
||||||
|
|
||||||
ast::LitKind::Byte(val) => Some(val),
|
Ok(ast::LitKind::Byte(val)) => Some(val),
|
||||||
ast::LitKind::ByteStr(_) => {
|
Ok(ast::LitKind::ByteStr(_)) => {
|
||||||
if !*has_errors {
|
if !*has_errors {
|
||||||
cx.struct_span_err(expr.span, "cannot concatenate doubly nested array")
|
cx.struct_span_err(expr.span, "cannot concatenate doubly nested array")
|
||||||
.note("byte strings are treated as arrays of bytes")
|
.note("byte strings are treated as arrays of bytes")
|
||||||
@ -102,7 +105,7 @@ fn handle_array_element(
|
|||||||
}
|
}
|
||||||
_ => {
|
_ => {
|
||||||
if !*has_errors {
|
if !*has_errors {
|
||||||
invalid_type_err(cx, expr, true);
|
invalid_type_err(cx, token_lit, expr.span, true);
|
||||||
}
|
}
|
||||||
*has_errors = true;
|
*has_errors = true;
|
||||||
None
|
None
|
||||||
@ -148,9 +151,9 @@ pub fn expand_concat_bytes(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
ast::ExprKind::Repeat(ref expr, ref count) => {
|
ast::ExprKind::Repeat(ref expr, ref count) => {
|
||||||
if let ast::ExprKind::Lit(ast::Lit {
|
if let ast::ExprKind::Lit(token_lit) = count.value.kind
|
||||||
kind: ast::LitKind::Int(count_val, _), ..
|
&& let Ok(ast::LitKind::Int(count_val, _)) =
|
||||||
}) = count.value.kind
|
ast::LitKind::from_token_lit(token_lit)
|
||||||
{
|
{
|
||||||
if let Some(elem) =
|
if let Some(elem) =
|
||||||
handle_array_element(cx, &mut has_errors, &mut missing_literals, expr)
|
handle_array_element(cx, &mut has_errors, &mut missing_literals, expr)
|
||||||
@ -163,16 +166,16 @@ pub fn expand_concat_bytes(
|
|||||||
cx.span_err(count.value.span, "repeat count is not a positive number");
|
cx.span_err(count.value.span, "repeat count is not a positive number");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
ast::ExprKind::Lit(ref lit) => match lit.kind {
|
ast::ExprKind::Lit(token_lit) => match ast::LitKind::from_token_lit(token_lit) {
|
||||||
ast::LitKind::Byte(val) => {
|
Ok(ast::LitKind::Byte(val)) => {
|
||||||
accumulator.push(val);
|
accumulator.push(val);
|
||||||
}
|
}
|
||||||
ast::LitKind::ByteStr(ref bytes) => {
|
Ok(ast::LitKind::ByteStr(ref bytes)) => {
|
||||||
accumulator.extend_from_slice(&bytes);
|
accumulator.extend_from_slice(&bytes);
|
||||||
}
|
}
|
||||||
_ => {
|
_ => {
|
||||||
if !has_errors {
|
if !has_errors {
|
||||||
invalid_type_err(cx, &e, false);
|
invalid_type_err(cx, token_lit, e.span, false);
|
||||||
}
|
}
|
||||||
has_errors = true;
|
has_errors = true;
|
||||||
}
|
}
|
||||||
|
@ -1226,10 +1226,10 @@ pub fn expr_to_spanned_string<'a>(
|
|||||||
let expr = cx.expander().fully_expand_fragment(AstFragment::Expr(expr)).make_expr();
|
let expr = cx.expander().fully_expand_fragment(AstFragment::Expr(expr)).make_expr();
|
||||||
|
|
||||||
Err(match expr.kind {
|
Err(match expr.kind {
|
||||||
ast::ExprKind::Lit(ref l) => match l.kind {
|
ast::ExprKind::Lit(token_lit) => match ast::LitKind::from_token_lit(token_lit) {
|
||||||
ast::LitKind::Str(s, style) => return Ok((s, style, expr.span)),
|
Ok(ast::LitKind::Str(s, style)) => return Ok((s, style, expr.span)),
|
||||||
ast::LitKind::ByteStr(_) => {
|
Ok(ast::LitKind::ByteStr(_)) => {
|
||||||
let mut err = cx.struct_span_err(l.span, err_msg);
|
let mut err = cx.struct_span_err(expr.span, err_msg);
|
||||||
let span = expr.span.shrink_to_lo();
|
let span = expr.span.shrink_to_lo();
|
||||||
err.span_suggestion(
|
err.span_suggestion(
|
||||||
span.with_hi(span.lo() + BytePos(1)),
|
span.with_hi(span.lo() + BytePos(1)),
|
||||||
@ -1239,8 +1239,9 @@ pub fn expr_to_spanned_string<'a>(
|
|||||||
);
|
);
|
||||||
Some((err, true))
|
Some((err, true))
|
||||||
}
|
}
|
||||||
ast::LitKind::Err => None,
|
Ok(ast::LitKind::Err) => None,
|
||||||
_ => Some((cx.struct_span_err(l.span, err_msg), false)),
|
Err(_) => None,
|
||||||
|
_ => Some((cx.struct_span_err(expr.span, err_msg), false)),
|
||||||
},
|
},
|
||||||
ast::ExprKind::Err => None,
|
ast::ExprKind::Err => None,
|
||||||
_ => Some((cx.struct_span_err(expr.span, err_msg), false)),
|
_ => Some((cx.struct_span_err(expr.span, err_msg), false)),
|
||||||
|
@ -334,8 +334,8 @@ impl<'a> ExtCtxt<'a> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn expr_lit(&self, span: Span, lit_kind: ast::LitKind) -> P<ast::Expr> {
|
fn expr_lit(&self, span: Span, lit_kind: ast::LitKind) -> P<ast::Expr> {
|
||||||
let lit = ast::Lit::from_lit_kind(lit_kind, span);
|
let token_lit = lit_kind.to_token_lit();
|
||||||
self.expr(span, ast::ExprKind::Lit(lit))
|
self.expr(span, ast::ExprKind::Lit(token_lit))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn expr_usize(&self, span: Span, i: usize) -> P<ast::Expr> {
|
pub fn expr_usize(&self, span: Span, i: usize) -> P<ast::Expr> {
|
||||||
|
@ -516,14 +516,14 @@ impl server::TokenStream for Rustc<'_, '_> {
|
|||||||
// We don't use `TokenStream::from_ast` as the tokenstream currently cannot
|
// We don't use `TokenStream::from_ast` as the tokenstream currently cannot
|
||||||
// be recovered in the general case.
|
// be recovered in the general case.
|
||||||
match &expr.kind {
|
match &expr.kind {
|
||||||
ast::ExprKind::Lit(l) if l.token_lit.kind == token::Bool => {
|
ast::ExprKind::Lit(token_lit) if token_lit.kind == token::Bool => {
|
||||||
Ok(tokenstream::TokenStream::token_alone(
|
Ok(tokenstream::TokenStream::token_alone(
|
||||||
token::Ident(l.token_lit.symbol, false),
|
token::Ident(token_lit.symbol, false),
|
||||||
l.span,
|
expr.span,
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
ast::ExprKind::Lit(l) => {
|
ast::ExprKind::Lit(token_lit) => {
|
||||||
Ok(tokenstream::TokenStream::token_alone(token::Literal(l.token_lit), l.span))
|
Ok(tokenstream::TokenStream::token_alone(token::Literal(*token_lit), expr.span))
|
||||||
}
|
}
|
||||||
ast::ExprKind::IncludedBytes(bytes) => {
|
ast::ExprKind::IncludedBytes(bytes) => {
|
||||||
let lit = ast::Lit::from_included_bytes(bytes, expr.span);
|
let lit = ast::Lit::from_included_bytes(bytes, expr.span);
|
||||||
@ -533,16 +533,13 @@ impl server::TokenStream for Rustc<'_, '_> {
|
|||||||
))
|
))
|
||||||
}
|
}
|
||||||
ast::ExprKind::Unary(ast::UnOp::Neg, e) => match &e.kind {
|
ast::ExprKind::Unary(ast::UnOp::Neg, e) => match &e.kind {
|
||||||
ast::ExprKind::Lit(l) => match l.token_lit {
|
ast::ExprKind::Lit(token_lit) => match token_lit {
|
||||||
token::Lit { kind: token::Integer | token::Float, .. } => {
|
token::Lit { kind: token::Integer | token::Float, .. } => {
|
||||||
Ok(Self::TokenStream::from_iter([
|
Ok(Self::TokenStream::from_iter([
|
||||||
// FIXME: The span of the `-` token is lost when
|
// FIXME: The span of the `-` token is lost when
|
||||||
// parsing, so we cannot faithfully recover it here.
|
// parsing, so we cannot faithfully recover it here.
|
||||||
tokenstream::TokenTree::token_alone(token::BinOp(token::Minus), e.span),
|
tokenstream::TokenTree::token_alone(token::BinOp(token::Minus), e.span),
|
||||||
tokenstream::TokenTree::token_alone(
|
tokenstream::TokenTree::token_alone(token::Literal(*token_lit), e.span),
|
||||||
token::Literal(l.token_lit),
|
|
||||||
l.span,
|
|
||||||
),
|
|
||||||
]))
|
]))
|
||||||
}
|
}
|
||||||
_ => Err(()),
|
_ => Err(()),
|
||||||
|
@ -167,11 +167,15 @@ pub enum DocStyle {
|
|||||||
Inner,
|
Inner,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Note that the suffix is *not* considered when deciding the `LiteralKind` in
|
||||||
|
// this type. This means that float literals like `1f32` are classified by this
|
||||||
|
// type as `Int`. (Compare against `rustc_ast::token::LitKind` and
|
||||||
|
// `rustc_ast::ast::LitKind.)
|
||||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)]
|
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)]
|
||||||
pub enum LiteralKind {
|
pub enum LiteralKind {
|
||||||
/// "12_u8", "0o100", "0b120i99"
|
/// "12_u8", "0o100", "0b120i99", "1f32".
|
||||||
Int { base: Base, empty_int: bool },
|
Int { base: Base, empty_int: bool },
|
||||||
/// "12.34f32", "0b100.100"
|
/// "12.34f32", "1e3", but not "1f32`.
|
||||||
Float { base: Base, empty_exponent: bool },
|
Float { base: Base, empty_exponent: bool },
|
||||||
/// "'a'", "'\\'", "'''", "';"
|
/// "'a'", "'\\'", "'''", "';"
|
||||||
Char { terminated: bool },
|
Char { terminated: bool },
|
||||||
|
@ -98,9 +98,10 @@ fn pierce_parens(mut expr: &ast::Expr) -> &ast::Expr {
|
|||||||
impl EarlyLintPass for WhileTrue {
|
impl EarlyLintPass for WhileTrue {
|
||||||
fn check_expr(&mut self, cx: &EarlyContext<'_>, e: &ast::Expr) {
|
fn check_expr(&mut self, cx: &EarlyContext<'_>, e: &ast::Expr) {
|
||||||
if let ast::ExprKind::While(cond, _, label) = &e.kind
|
if let ast::ExprKind::While(cond, _, label) = &e.kind
|
||||||
&& let ast::ExprKind::Lit(ref lit) = pierce_parens(cond).kind
|
&& let cond = pierce_parens(cond)
|
||||||
&& let ast::LitKind::Bool(true) = lit.kind
|
&& let ast::ExprKind::Lit(token_lit) = cond.kind
|
||||||
&& !lit.span.from_expansion()
|
&& let token::Lit { kind: token::Bool, symbol: kw::True, .. } = token_lit
|
||||||
|
&& !cond.span.from_expansion()
|
||||||
{
|
{
|
||||||
let condition_span = e.span.with_hi(cond.span.hi());
|
let condition_span = e.span.with_hi(cond.span.hi());
|
||||||
cx.struct_span_lint(
|
cx.struct_span_lint(
|
||||||
|
@ -123,23 +123,22 @@ impl EarlyLintPass for HiddenUnicodeCodepoints {
|
|||||||
|
|
||||||
fn check_expr(&mut self, cx: &EarlyContext<'_>, expr: &ast::Expr) {
|
fn check_expr(&mut self, cx: &EarlyContext<'_>, expr: &ast::Expr) {
|
||||||
// byte strings are already handled well enough by `EscapeError::NonAsciiCharInByteString`
|
// byte strings are already handled well enough by `EscapeError::NonAsciiCharInByteString`
|
||||||
let (text, span, padding) = match &expr.kind {
|
match &expr.kind {
|
||||||
ast::ExprKind::Lit(ast::Lit { token_lit, kind, span }) => {
|
ast::ExprKind::Lit(token_lit) => {
|
||||||
let text = token_lit.symbol;
|
let text = token_lit.symbol;
|
||||||
if !contains_text_flow_control_chars(text.as_str()) {
|
if !contains_text_flow_control_chars(text.as_str()) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
let padding = match kind {
|
let padding = match token_lit.kind {
|
||||||
// account for `"` or `'`
|
// account for `"` or `'`
|
||||||
ast::LitKind::Str(_, ast::StrStyle::Cooked) | ast::LitKind::Char(_) => 1,
|
ast::token::LitKind::Str | ast::token::LitKind::Char => 1,
|
||||||
// account for `r###"`
|
// account for `r###"`
|
||||||
ast::LitKind::Str(_, ast::StrStyle::Raw(val)) => *val as u32 + 2,
|
ast::token::LitKind::StrRaw(n) => n as u32 + 2,
|
||||||
_ => return,
|
_ => return,
|
||||||
};
|
};
|
||||||
(text, span, padding)
|
self.lint_text_direction_codepoint(cx, text, expr.span, padding, true, "literal");
|
||||||
}
|
}
|
||||||
_ => return,
|
_ => {}
|
||||||
};
|
};
|
||||||
self.lint_text_direction_codepoint(cx, text, *span, padding, true, "literal");
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -304,61 +304,6 @@ pub(crate) struct FloatLiteralRequiresIntegerPart {
|
|||||||
pub correct: String,
|
pub correct: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Diagnostic)]
|
|
||||||
#[diag(parser_invalid_int_literal_width)]
|
|
||||||
#[help]
|
|
||||||
pub(crate) struct InvalidIntLiteralWidth {
|
|
||||||
#[primary_span]
|
|
||||||
pub span: Span,
|
|
||||||
pub width: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Diagnostic)]
|
|
||||||
#[diag(parser_invalid_num_literal_base_prefix)]
|
|
||||||
#[note]
|
|
||||||
pub(crate) struct InvalidNumLiteralBasePrefix {
|
|
||||||
#[primary_span]
|
|
||||||
#[suggestion(applicability = "maybe-incorrect", code = "{fixed}")]
|
|
||||||
pub span: Span,
|
|
||||||
pub fixed: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Diagnostic)]
|
|
||||||
#[diag(parser_invalid_num_literal_suffix)]
|
|
||||||
#[help]
|
|
||||||
pub(crate) struct InvalidNumLiteralSuffix {
|
|
||||||
#[primary_span]
|
|
||||||
#[label]
|
|
||||||
pub span: Span,
|
|
||||||
pub suffix: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Diagnostic)]
|
|
||||||
#[diag(parser_invalid_float_literal_width)]
|
|
||||||
#[help]
|
|
||||||
pub(crate) struct InvalidFloatLiteralWidth {
|
|
||||||
#[primary_span]
|
|
||||||
pub span: Span,
|
|
||||||
pub width: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Diagnostic)]
|
|
||||||
#[diag(parser_invalid_float_literal_suffix)]
|
|
||||||
#[help]
|
|
||||||
pub(crate) struct InvalidFloatLiteralSuffix {
|
|
||||||
#[primary_span]
|
|
||||||
#[label]
|
|
||||||
pub span: Span,
|
|
||||||
pub suffix: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Diagnostic)]
|
|
||||||
#[diag(parser_int_literal_too_large)]
|
|
||||||
pub(crate) struct IntLiteralTooLarge {
|
|
||||||
#[primary_span]
|
|
||||||
pub span: Span,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Diagnostic)]
|
#[derive(Diagnostic)]
|
||||||
#[diag(parser_missing_semicolon_before_array)]
|
#[diag(parser_missing_semicolon_before_array)]
|
||||||
pub(crate) struct MissingSemicolonBeforeArray {
|
pub(crate) struct MissingSemicolonBeforeArray {
|
||||||
@ -740,41 +685,6 @@ pub(crate) struct InvalidInterpolatedExpression {
|
|||||||
pub span: Span,
|
pub span: Span,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Diagnostic)]
|
|
||||||
#[diag(parser_hexadecimal_float_literal_not_supported)]
|
|
||||||
pub(crate) struct HexadecimalFloatLiteralNotSupported {
|
|
||||||
#[primary_span]
|
|
||||||
#[label(parser_not_supported)]
|
|
||||||
pub span: Span,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Diagnostic)]
|
|
||||||
#[diag(parser_octal_float_literal_not_supported)]
|
|
||||||
pub(crate) struct OctalFloatLiteralNotSupported {
|
|
||||||
#[primary_span]
|
|
||||||
#[label(parser_not_supported)]
|
|
||||||
pub span: Span,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Diagnostic)]
|
|
||||||
#[diag(parser_binary_float_literal_not_supported)]
|
|
||||||
pub(crate) struct BinaryFloatLiteralNotSupported {
|
|
||||||
#[primary_span]
|
|
||||||
#[label(parser_not_supported)]
|
|
||||||
pub span: Span,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Diagnostic)]
|
|
||||||
#[diag(parser_invalid_literal_suffix)]
|
|
||||||
pub(crate) struct InvalidLiteralSuffix {
|
|
||||||
#[primary_span]
|
|
||||||
#[label]
|
|
||||||
pub span: Span,
|
|
||||||
// FIXME(#100717)
|
|
||||||
pub kind: String,
|
|
||||||
pub suffix: Symbol,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Diagnostic)]
|
#[derive(Diagnostic)]
|
||||||
#[diag(parser_invalid_literal_suffix_on_tuple_index)]
|
#[diag(parser_invalid_literal_suffix_on_tuple_index)]
|
||||||
pub(crate) struct InvalidLiteralSuffixOnTupleIndex {
|
pub(crate) struct InvalidLiteralSuffixOnTupleIndex {
|
||||||
|
@ -661,6 +661,7 @@ impl<'a> StringReader<'a> {
|
|||||||
prefix_len: u32,
|
prefix_len: u32,
|
||||||
postfix_len: u32,
|
postfix_len: u32,
|
||||||
) -> (token::LitKind, Symbol) {
|
) -> (token::LitKind, Symbol) {
|
||||||
|
let mut has_fatal_err = false;
|
||||||
let content_start = start + BytePos(prefix_len);
|
let content_start = start + BytePos(prefix_len);
|
||||||
let content_end = end - BytePos(postfix_len);
|
let content_end = end - BytePos(postfix_len);
|
||||||
let lit_content = self.str_from_to(content_start, content_end);
|
let lit_content = self.str_from_to(content_start, content_end);
|
||||||
@ -672,6 +673,9 @@ impl<'a> StringReader<'a> {
|
|||||||
let lo = content_start + BytePos(start);
|
let lo = content_start + BytePos(start);
|
||||||
let hi = lo + BytePos(end - start);
|
let hi = lo + BytePos(end - start);
|
||||||
let span = self.mk_sp(lo, hi);
|
let span = self.mk_sp(lo, hi);
|
||||||
|
if err.is_fatal() {
|
||||||
|
has_fatal_err = true;
|
||||||
|
}
|
||||||
emit_unescape_error(
|
emit_unescape_error(
|
||||||
&self.sess.span_diagnostic,
|
&self.sess.span_diagnostic,
|
||||||
lit_content,
|
lit_content,
|
||||||
@ -683,7 +687,14 @@ impl<'a> StringReader<'a> {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// We normally exclude the quotes for the symbol, but for errors we
|
||||||
|
// include it because it results in clearer error messages.
|
||||||
|
if !has_fatal_err {
|
||||||
(kind, Symbol::intern(lit_content))
|
(kind, Symbol::intern(lit_content))
|
||||||
|
} else {
|
||||||
|
(token::Err, self.symbol_from_to(start, end))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -316,8 +316,8 @@ impl<'a> Parser<'a> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn parse_unsuffixed_lit(&mut self) -> PResult<'a, ast::Lit> {
|
pub(crate) fn parse_unsuffixed_lit(&mut self) -> PResult<'a, ast::Lit> {
|
||||||
let lit = self.parse_lit()?;
|
let lit = self.parse_ast_lit()?;
|
||||||
debug!("checking if {:?} is unusuffixed", lit);
|
debug!("checking if {:?} is unsuffixed", lit);
|
||||||
|
|
||||||
if !lit.kind.is_unsuffixed() {
|
if !lit.kind.is_unsuffixed() {
|
||||||
self.sess.emit_err(SuffixedLiteralInAttribute { span: lit.span });
|
self.sess.emit_err(SuffixedLiteralInAttribute { span: lit.span });
|
||||||
|
@ -7,35 +7,30 @@ use super::{
|
|||||||
};
|
};
|
||||||
use crate::errors::{
|
use crate::errors::{
|
||||||
ArrayBracketsInsteadOfSpaces, ArrayBracketsInsteadOfSpacesSugg, AsyncMoveOrderIncorrect,
|
ArrayBracketsInsteadOfSpaces, ArrayBracketsInsteadOfSpacesSugg, AsyncMoveOrderIncorrect,
|
||||||
BinaryFloatLiteralNotSupported, BracesForStructLiteral, CatchAfterTry, CommaAfterBaseStruct,
|
BracesForStructLiteral, CatchAfterTry, CommaAfterBaseStruct, ComparisonInterpretedAsGeneric,
|
||||||
ComparisonInterpretedAsGeneric, ComparisonOrShiftInterpretedAsGenericSugg,
|
ComparisonOrShiftInterpretedAsGenericSugg, DoCatchSyntaxRemoved, DotDotDot, EqFieldInit,
|
||||||
DoCatchSyntaxRemoved, DotDotDot, EqFieldInit, ExpectedElseBlock, ExpectedEqForLetExpr,
|
ExpectedElseBlock, ExpectedEqForLetExpr, ExpectedExpressionFoundLet,
|
||||||
ExpectedExpressionFoundLet, FieldExpressionWithGeneric, FloatLiteralRequiresIntegerPart,
|
FieldExpressionWithGeneric, FloatLiteralRequiresIntegerPart, FoundExprWouldBeStmt,
|
||||||
FoundExprWouldBeStmt, HexadecimalFloatLiteralNotSupported, IfExpressionMissingCondition,
|
IfExpressionMissingCondition, IfExpressionMissingThenBlock, IfExpressionMissingThenBlockSub,
|
||||||
IfExpressionMissingThenBlock, IfExpressionMissingThenBlockSub, IntLiteralTooLarge,
|
|
||||||
InvalidBlockMacroSegment, InvalidComparisonOperator, InvalidComparisonOperatorSub,
|
InvalidBlockMacroSegment, InvalidComparisonOperator, InvalidComparisonOperatorSub,
|
||||||
InvalidFloatLiteralSuffix, InvalidFloatLiteralWidth, InvalidIntLiteralWidth,
|
InvalidInterpolatedExpression, InvalidLiteralSuffixOnTupleIndex, InvalidLogicalOperator,
|
||||||
InvalidInterpolatedExpression, InvalidLiteralSuffix, InvalidLiteralSuffixOnTupleIndex,
|
InvalidLogicalOperatorSub, LabeledLoopInBreak, LeadingPlusNotSupported, LeftArrowOperator,
|
||||||
InvalidLogicalOperator, InvalidLogicalOperatorSub, InvalidNumLiteralBasePrefix,
|
|
||||||
InvalidNumLiteralSuffix, LabeledLoopInBreak, LeadingPlusNotSupported, LeftArrowOperator,
|
|
||||||
LifetimeInBorrowExpression, MacroInvocationWithQualifiedPath, MalformedLoopLabel,
|
LifetimeInBorrowExpression, MacroInvocationWithQualifiedPath, MalformedLoopLabel,
|
||||||
MatchArmBodyWithoutBraces, MatchArmBodyWithoutBracesSugg, MissingCommaAfterMatchArm,
|
MatchArmBodyWithoutBraces, MatchArmBodyWithoutBracesSugg, MissingCommaAfterMatchArm,
|
||||||
MissingDotDot, MissingInInForLoop, MissingInInForLoopSub, MissingSemicolonBeforeArray,
|
MissingDotDot, MissingInInForLoop, MissingInInForLoopSub, MissingSemicolonBeforeArray,
|
||||||
NoFieldsForFnCall, NotAsNegationOperator, NotAsNegationOperatorSub,
|
NoFieldsForFnCall, NotAsNegationOperator, NotAsNegationOperatorSub,
|
||||||
OctalFloatLiteralNotSupported, OuterAttributeNotAllowedOnIfElse, ParenthesesWithStructFields,
|
OuterAttributeNotAllowedOnIfElse, ParenthesesWithStructFields,
|
||||||
RequireColonAfterLabeledExpression, ShiftInterpretedAsGeneric, StructLiteralNotAllowedHere,
|
RequireColonAfterLabeledExpression, ShiftInterpretedAsGeneric, StructLiteralNotAllowedHere,
|
||||||
StructLiteralNotAllowedHereSugg, TildeAsUnaryOperator, UnexpectedTokenAfterLabel,
|
StructLiteralNotAllowedHereSugg, TildeAsUnaryOperator, UnexpectedTokenAfterLabel,
|
||||||
UnexpectedTokenAfterLabelSugg, WrapExpressionInParentheses,
|
UnexpectedTokenAfterLabelSugg, WrapExpressionInParentheses,
|
||||||
};
|
};
|
||||||
use crate::maybe_recover_from_interpolated_ty_qpath;
|
use crate::maybe_recover_from_interpolated_ty_qpath;
|
||||||
|
|
||||||
use core::mem;
|
use core::mem;
|
||||||
use rustc_ast::ptr::P;
|
use rustc_ast::ptr::P;
|
||||||
use rustc_ast::token::{self, Delimiter, Token, TokenKind};
|
use rustc_ast::token::{self, Delimiter, Token, TokenKind};
|
||||||
use rustc_ast::tokenstream::Spacing;
|
use rustc_ast::tokenstream::Spacing;
|
||||||
use rustc_ast::util::case::Case;
|
use rustc_ast::util::case::Case;
|
||||||
use rustc_ast::util::classify;
|
use rustc_ast::util::classify;
|
||||||
use rustc_ast::util::literal::LitError;
|
|
||||||
use rustc_ast::util::parser::{prec_let_scrutinee_needs_par, AssocOp, Fixity};
|
use rustc_ast::util::parser::{prec_let_scrutinee_needs_par, AssocOp, Fixity};
|
||||||
use rustc_ast::visit::Visitor;
|
use rustc_ast::visit::Visitor;
|
||||||
use rustc_ast::{self as ast, AttrStyle, AttrVec, CaptureBy, ExprField, Lit, UnOp, DUMMY_NODE_ID};
|
use rustc_ast::{self as ast, AttrStyle, AttrVec, CaptureBy, ExprField, Lit, UnOp, DUMMY_NODE_ID};
|
||||||
@ -47,7 +42,7 @@ use rustc_errors::{
|
|||||||
Applicability, Diagnostic, DiagnosticBuilder, ErrorGuaranteed, IntoDiagnostic, PResult,
|
Applicability, Diagnostic, DiagnosticBuilder, ErrorGuaranteed, IntoDiagnostic, PResult,
|
||||||
StashKey,
|
StashKey,
|
||||||
};
|
};
|
||||||
use rustc_session::errors::ExprParenthesesNeeded;
|
use rustc_session::errors::{report_lit_error, ExprParenthesesNeeded};
|
||||||
use rustc_session::lint::builtin::BREAK_WITH_LABEL_AND_LOOP;
|
use rustc_session::lint::builtin::BREAK_WITH_LABEL_AND_LOOP;
|
||||||
use rustc_session::lint::BuiltinLintDiagnostics;
|
use rustc_session::lint::BuiltinLintDiagnostics;
|
||||||
use rustc_span::source_map::{self, Span, Spanned};
|
use rustc_span::source_map::{self, Span, Spanned};
|
||||||
@ -1415,9 +1410,9 @@ impl<'a> Parser<'a> {
|
|||||||
|
|
||||||
fn parse_lit_expr(&mut self) -> PResult<'a, P<Expr>> {
|
fn parse_lit_expr(&mut self) -> PResult<'a, P<Expr>> {
|
||||||
let lo = self.token.span;
|
let lo = self.token.span;
|
||||||
match self.parse_opt_lit() {
|
match self.parse_opt_token_lit() {
|
||||||
Some(literal) => {
|
Some((token_lit, _)) => {
|
||||||
let expr = self.mk_expr(lo.to(self.prev_token.span), ExprKind::Lit(literal));
|
let expr = self.mk_expr(lo.to(self.prev_token.span), ExprKind::Lit(token_lit));
|
||||||
self.maybe_recover_from_bad_qpath(expr)
|
self.maybe_recover_from_bad_qpath(expr)
|
||||||
}
|
}
|
||||||
None => self.try_macro_suggestion(),
|
None => self.try_macro_suggestion(),
|
||||||
@ -1548,7 +1543,7 @@ impl<'a> Parser<'a> {
|
|||||||
})
|
})
|
||||||
});
|
});
|
||||||
consume_colon = false;
|
consume_colon = false;
|
||||||
Ok(self.mk_expr(lo, ExprKind::Lit(lit)))
|
Ok(self.mk_expr(lo, ExprKind::Lit(lit.token_lit)))
|
||||||
} else if !ate_colon
|
} else if !ate_colon
|
||||||
&& (self.check_noexpect(&TokenKind::Comma) || self.check_noexpect(&TokenKind::Gt))
|
&& (self.check_noexpect(&TokenKind::Comma) || self.check_noexpect(&TokenKind::Gt))
|
||||||
{
|
{
|
||||||
@ -1625,9 +1620,9 @@ impl<'a> Parser<'a> {
|
|||||||
|
|
||||||
/// Emit an error when a char is parsed as a lifetime because of a missing quote
|
/// Emit an error when a char is parsed as a lifetime because of a missing quote
|
||||||
pub(super) fn recover_unclosed_char(
|
pub(super) fn recover_unclosed_char(
|
||||||
&mut self,
|
&self,
|
||||||
lifetime: Ident,
|
lifetime: Ident,
|
||||||
err: impl FnOnce(&mut Self) -> DiagnosticBuilder<'a, ErrorGuaranteed>,
|
err: impl FnOnce(&Self) -> DiagnosticBuilder<'a, ErrorGuaranteed>,
|
||||||
) -> ast::Lit {
|
) -> ast::Lit {
|
||||||
if let Some(mut diag) =
|
if let Some(mut diag) =
|
||||||
self.sess.span_diagnostic.steal_diagnostic(lifetime.span, StashKey::LifetimeIsChar)
|
self.sess.span_diagnostic.steal_diagnostic(lifetime.span, StashKey::LifetimeIsChar)
|
||||||
@ -1649,9 +1644,10 @@ impl<'a> Parser<'a> {
|
|||||||
)
|
)
|
||||||
.emit();
|
.emit();
|
||||||
}
|
}
|
||||||
|
let name = lifetime.without_first_quote().name;
|
||||||
ast::Lit {
|
ast::Lit {
|
||||||
token_lit: token::Lit::new(token::LitKind::Char, lifetime.name, None),
|
token_lit: token::Lit::new(token::LitKind::Char, name, None),
|
||||||
kind: ast::LitKind::Char(lifetime.name.as_str().chars().next().unwrap_or('_')),
|
kind: ast::LitKind::Char(name.as_str().chars().next().unwrap_or('_')),
|
||||||
span: lifetime.span,
|
span: lifetime.span,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -1765,7 +1761,7 @@ impl<'a> Parser<'a> {
|
|||||||
/// In case of error returns `Some(lit)` if the next token is a literal with a wrong kind,
|
/// In case of error returns `Some(lit)` if the next token is a literal with a wrong kind,
|
||||||
/// and returns `None` if the next token is not literal at all.
|
/// and returns `None` if the next token is not literal at all.
|
||||||
pub fn parse_str_lit(&mut self) -> Result<ast::StrLit, Option<Lit>> {
|
pub fn parse_str_lit(&mut self) -> Result<ast::StrLit, Option<Lit>> {
|
||||||
match self.parse_opt_lit() {
|
match self.parse_opt_ast_lit() {
|
||||||
Some(lit) => match lit.kind {
|
Some(lit) => match lit.kind {
|
||||||
ast::LitKind::Str(symbol_unescaped, style) => Ok(ast::StrLit {
|
ast::LitKind::Str(symbol_unescaped, style) => Ok(ast::StrLit {
|
||||||
style,
|
style,
|
||||||
@ -1780,8 +1776,7 @@ impl<'a> Parser<'a> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(super) fn parse_lit(&mut self) -> PResult<'a, Lit> {
|
fn handle_missing_lit(&mut self) -> PResult<'a, Lit> {
|
||||||
self.parse_opt_lit().ok_or(()).or_else(|()| {
|
|
||||||
if let token::Interpolated(inner) = &self.token.kind {
|
if let token::Interpolated(inner) = &self.token.kind {
|
||||||
let expr = match inner.as_ref() {
|
let expr = match inner.as_ref() {
|
||||||
token::NtExpr(expr) => Some(expr),
|
token::NtExpr(expr) => Some(expr),
|
||||||
@ -1798,7 +1793,7 @@ impl<'a> Parser<'a> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
let token = self.token.clone();
|
let token = self.token.clone();
|
||||||
let err = |self_: &mut Self| {
|
let err = |self_: &Self| {
|
||||||
let msg = format!("unexpected token: {}", super::token_descr(&token));
|
let msg = format!("unexpected token: {}", super::token_descr(&token));
|
||||||
self_.struct_span_err(token.span, &msg)
|
self_.struct_span_err(token.span, &msg)
|
||||||
};
|
};
|
||||||
@ -1809,12 +1804,19 @@ impl<'a> Parser<'a> {
|
|||||||
} else {
|
} else {
|
||||||
Err(err(self))
|
Err(err(self))
|
||||||
}
|
}
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Matches `lit = true | false | token_lit`.
|
pub(super) fn parse_token_lit(&mut self) -> PResult<'a, (token::Lit, Span)> {
|
||||||
/// Returns `None` if the next token is not a literal.
|
self.parse_opt_token_lit()
|
||||||
pub(super) fn parse_opt_lit(&mut self) -> Option<Lit> {
|
.ok_or(())
|
||||||
|
.or_else(|()| self.handle_missing_lit().map(|lit| (lit.token_lit, lit.span)))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(super) fn parse_ast_lit(&mut self) -> PResult<'a, Lit> {
|
||||||
|
self.parse_opt_ast_lit().ok_or(()).or_else(|()| self.handle_missing_lit())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn recover_after_dot(&mut self) -> Option<Token> {
|
||||||
let mut recovered = None;
|
let mut recovered = None;
|
||||||
if self.token == token::Dot {
|
if self.token == token::Dot {
|
||||||
// Attempt to recover `.4` as `0.4`. We don't currently have any syntax where
|
// Attempt to recover `.4` as `0.4`. We don't currently have any syntax where
|
||||||
@ -1840,20 +1842,40 @@ impl<'a> Parser<'a> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
recovered
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Matches `lit = true | false | token_lit`.
|
||||||
|
/// Returns `None` if the next token is not a literal.
|
||||||
|
pub(super) fn parse_opt_token_lit(&mut self) -> Option<(token::Lit, Span)> {
|
||||||
|
let recovered = self.recover_after_dot();
|
||||||
let token = recovered.as_ref().unwrap_or(&self.token);
|
let token = recovered.as_ref().unwrap_or(&self.token);
|
||||||
match Lit::from_token(token) {
|
let span = token.span;
|
||||||
|
token::Lit::from_token(token).map(|token_lit| {
|
||||||
|
self.bump();
|
||||||
|
(token_lit, span)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Matches `lit = true | false | token_lit`.
|
||||||
|
/// Returns `None` if the next token is not a literal.
|
||||||
|
pub(super) fn parse_opt_ast_lit(&mut self) -> Option<Lit> {
|
||||||
|
let recovered = self.recover_after_dot();
|
||||||
|
let token = recovered.as_ref().unwrap_or(&self.token);
|
||||||
|
match token::Lit::from_token(token) {
|
||||||
|
Some(token_lit) => {
|
||||||
|
match Lit::from_token_lit(token_lit, token.span) {
|
||||||
Ok(lit) => {
|
Ok(lit) => {
|
||||||
self.bump();
|
self.bump();
|
||||||
Some(lit)
|
Some(lit)
|
||||||
}
|
}
|
||||||
Err(LitError::NotLiteral) => None,
|
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
let span = token.span;
|
let span = token.span;
|
||||||
let token::Literal(lit) = token.kind else {
|
let token::Literal(lit) = token.kind else {
|
||||||
unreachable!();
|
unreachable!();
|
||||||
};
|
};
|
||||||
self.bump();
|
self.bump();
|
||||||
self.report_lit_error(err, lit, span);
|
report_lit_error(&self.sess, err, lit, span);
|
||||||
// Pack possible quotes and prefixes from the original literal into
|
// Pack possible quotes and prefixes from the original literal into
|
||||||
// the error literal's symbol so they can be pretty-printed faithfully.
|
// the error literal's symbol so they can be pretty-printed faithfully.
|
||||||
let suffixless_lit = token::Lit::new(lit.kind, lit.symbol, None);
|
let suffixless_lit = token::Lit::new(lit.kind, lit.symbol, None);
|
||||||
@ -1863,77 +1885,7 @@ impl<'a> Parser<'a> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
None => None,
|
||||||
fn report_lit_error(&self, err: LitError, lit: token::Lit, span: Span) {
|
|
||||||
// Checks if `s` looks like i32 or u1234 etc.
|
|
||||||
fn looks_like_width_suffix(first_chars: &[char], s: &str) -> bool {
|
|
||||||
s.len() > 1 && s.starts_with(first_chars) && s[1..].chars().all(|c| c.is_ascii_digit())
|
|
||||||
}
|
|
||||||
|
|
||||||
// Try to lowercase the prefix if it's a valid base prefix.
|
|
||||||
fn fix_base_capitalisation(s: &str) -> Option<String> {
|
|
||||||
if let Some(stripped) = s.strip_prefix('B') {
|
|
||||||
Some(format!("0b{stripped}"))
|
|
||||||
} else if let Some(stripped) = s.strip_prefix('O') {
|
|
||||||
Some(format!("0o{stripped}"))
|
|
||||||
} else if let Some(stripped) = s.strip_prefix('X') {
|
|
||||||
Some(format!("0x{stripped}"))
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let token::Lit { kind, suffix, .. } = lit;
|
|
||||||
match err {
|
|
||||||
// `NotLiteral` is not an error by itself, so we don't report
|
|
||||||
// it and give the parser opportunity to try something else.
|
|
||||||
LitError::NotLiteral => {}
|
|
||||||
// `LexerError` *is* an error, but it was already reported
|
|
||||||
// by lexer, so here we don't report it the second time.
|
|
||||||
LitError::LexerError => {}
|
|
||||||
LitError::InvalidSuffix => {
|
|
||||||
if let Some(suffix) = suffix {
|
|
||||||
self.sess.emit_err(InvalidLiteralSuffix {
|
|
||||||
span,
|
|
||||||
kind: format!("{}", kind.descr()),
|
|
||||||
suffix,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
LitError::InvalidIntSuffix => {
|
|
||||||
let suf = suffix.expect("suffix error with no suffix");
|
|
||||||
let suf = suf.as_str();
|
|
||||||
if looks_like_width_suffix(&['i', 'u'], &suf) {
|
|
||||||
// If it looks like a width, try to be helpful.
|
|
||||||
self.sess.emit_err(InvalidIntLiteralWidth { span, width: suf[1..].into() });
|
|
||||||
} else if let Some(fixed) = fix_base_capitalisation(suf) {
|
|
||||||
self.sess.emit_err(InvalidNumLiteralBasePrefix { span, fixed });
|
|
||||||
} else {
|
|
||||||
self.sess.emit_err(InvalidNumLiteralSuffix { span, suffix: suf.to_string() });
|
|
||||||
}
|
|
||||||
}
|
|
||||||
LitError::InvalidFloatSuffix => {
|
|
||||||
let suf = suffix.expect("suffix error with no suffix");
|
|
||||||
let suf = suf.as_str();
|
|
||||||
if looks_like_width_suffix(&['f'], suf) {
|
|
||||||
// If it looks like a width, try to be helpful.
|
|
||||||
self.sess
|
|
||||||
.emit_err(InvalidFloatLiteralWidth { span, width: suf[1..].to_string() });
|
|
||||||
} else {
|
|
||||||
self.sess.emit_err(InvalidFloatLiteralSuffix { span, suffix: suf.to_string() });
|
|
||||||
}
|
|
||||||
}
|
|
||||||
LitError::NonDecimalFloat(base) => {
|
|
||||||
match base {
|
|
||||||
16 => self.sess.emit_err(HexadecimalFloatLiteralNotSupported { span }),
|
|
||||||
8 => self.sess.emit_err(OctalFloatLiteralNotSupported { span }),
|
|
||||||
2 => self.sess.emit_err(BinaryFloatLiteralNotSupported { span }),
|
|
||||||
_ => unreachable!(),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
LitError::IntTooLarge => {
|
|
||||||
self.sess.emit_err(IntLiteralTooLarge { span });
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1958,8 +1910,8 @@ impl<'a> Parser<'a> {
|
|||||||
|
|
||||||
let lo = self.token.span;
|
let lo = self.token.span;
|
||||||
let minus_present = self.eat(&token::BinOp(token::Minus));
|
let minus_present = self.eat(&token::BinOp(token::Minus));
|
||||||
let lit = self.parse_lit()?;
|
let (token_lit, span) = self.parse_token_lit()?;
|
||||||
let expr = self.mk_expr(lit.span, ExprKind::Lit(lit));
|
let expr = self.mk_expr(span, ExprKind::Lit(token_lit));
|
||||||
|
|
||||||
if minus_present {
|
if minus_present {
|
||||||
Ok(self.mk_expr(lo.to(self.prev_token.span), self.mk_unary(UnOp::Neg, expr)))
|
Ok(self.mk_expr(lo.to(self.prev_token.span), self.mk_unary(UnOp::Neg, expr)))
|
||||||
|
@ -420,7 +420,7 @@ impl<'a> Parser<'a> {
|
|||||||
err.span_label(self_.token.span, format!("expected {}", expected));
|
err.span_label(self_.token.span, format!("expected {}", expected));
|
||||||
err
|
err
|
||||||
});
|
});
|
||||||
PatKind::Lit(self.mk_expr(lo, ExprKind::Lit(lit)))
|
PatKind::Lit(self.mk_expr(lo, ExprKind::Lit(lit.token_lit)))
|
||||||
} else {
|
} else {
|
||||||
// Try to parse everything else as literal with optional minus
|
// Try to parse everything else as literal with optional minus
|
||||||
match self.parse_literal_maybe_minus() {
|
match self.parse_literal_maybe_minus() {
|
||||||
|
@ -359,7 +359,7 @@ impl<'a> Parser<'a> {
|
|||||||
/// report error for `let 1x = 123`
|
/// report error for `let 1x = 123`
|
||||||
pub fn report_invalid_identifier_error(&mut self) -> PResult<'a, ()> {
|
pub fn report_invalid_identifier_error(&mut self) -> PResult<'a, ()> {
|
||||||
if let token::Literal(lit) = self.token.uninterpolate().kind &&
|
if let token::Literal(lit) = self.token.uninterpolate().kind &&
|
||||||
let Err(_) = rustc_ast::Lit::from_token(&self.token) &&
|
rustc_ast::Lit::from_token(&self.token).is_none() &&
|
||||||
(lit.kind == token::LitKind::Integer || lit.kind == token::LitKind::Float) &&
|
(lit.kind == token::LitKind::Integer || lit.kind == token::LitKind::Float) &&
|
||||||
self.look_ahead(1, |t| matches!(t.kind, token::Eq) || matches!(t.kind, token::Colon ) ) {
|
self.look_ahead(1, |t| matches!(t.kind, token::Eq) || matches!(t.kind, token::Colon ) ) {
|
||||||
return Err(self.sess.create_err(InvalidIdentiferStartsWithNumber { span: self.token.span }));
|
return Err(self.sess.create_err(InvalidIdentiferStartsWithNumber { span: self.token.span }));
|
||||||
|
@ -49,10 +49,12 @@ pub fn parse_meta<'a>(sess: &'a ParseSess, attr: &Attribute) -> PResult<'a, Meta
|
|||||||
MetaItemKind::List(nmis)
|
MetaItemKind::List(nmis)
|
||||||
}
|
}
|
||||||
MacArgs::Eq(_, MacArgsEq::Ast(expr)) => {
|
MacArgs::Eq(_, MacArgsEq::Ast(expr)) => {
|
||||||
if let ast::ExprKind::Lit(lit) = &expr.kind {
|
if let ast::ExprKind::Lit(token_lit) = expr.kind
|
||||||
if !lit.kind.is_unsuffixed() {
|
&& let Ok(lit) = ast::Lit::from_token_lit(token_lit, expr.span)
|
||||||
|
{
|
||||||
|
if token_lit.suffix.is_some() {
|
||||||
let mut err = sess.span_diagnostic.struct_span_err(
|
let mut err = sess.span_diagnostic.struct_span_err(
|
||||||
lit.span,
|
expr.span,
|
||||||
"suffixed literals are not allowed in attributes",
|
"suffixed literals are not allowed in attributes",
|
||||||
);
|
);
|
||||||
err.help(
|
err.help(
|
||||||
@ -61,7 +63,7 @@ pub fn parse_meta<'a>(sess: &'a ParseSess, attr: &Attribute) -> PResult<'a, Meta
|
|||||||
);
|
);
|
||||||
return Err(err);
|
return Err(err);
|
||||||
} else {
|
} else {
|
||||||
MetaItemKind::NameValue(lit.clone())
|
MetaItemKind::NameValue(lit)
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// The non-error case can happen with e.g. `#[foo = 1+1]`. The error case can
|
// The non-error case can happen with e.g. `#[foo = 1+1]`. The error case can
|
||||||
|
@ -1,6 +1,9 @@
|
|||||||
use std::num::NonZeroU32;
|
use std::num::NonZeroU32;
|
||||||
|
|
||||||
use crate::cgu_reuse_tracker::CguReuse;
|
use crate::cgu_reuse_tracker::CguReuse;
|
||||||
|
use crate::parse::ParseSess;
|
||||||
|
use rustc_ast::token;
|
||||||
|
use rustc_ast::util::literal::LitError;
|
||||||
use rustc_errors::MultiSpan;
|
use rustc_errors::MultiSpan;
|
||||||
use rustc_macros::Diagnostic;
|
use rustc_macros::Diagnostic;
|
||||||
use rustc_span::{Span, Symbol};
|
use rustc_span::{Span, Symbol};
|
||||||
@ -191,3 +194,162 @@ pub enum UnleashedFeatureHelp {
|
|||||||
span: Span,
|
span: Span,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Diagnostic)]
|
||||||
|
#[diag(parser_invalid_literal_suffix)]
|
||||||
|
pub(crate) struct InvalidLiteralSuffix {
|
||||||
|
#[primary_span]
|
||||||
|
#[label]
|
||||||
|
pub span: Span,
|
||||||
|
// FIXME(#100717)
|
||||||
|
pub kind: String,
|
||||||
|
pub suffix: Symbol,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Diagnostic)]
|
||||||
|
#[diag(parser_invalid_int_literal_width)]
|
||||||
|
#[help]
|
||||||
|
pub(crate) struct InvalidIntLiteralWidth {
|
||||||
|
#[primary_span]
|
||||||
|
pub span: Span,
|
||||||
|
pub width: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Diagnostic)]
|
||||||
|
#[diag(parser_invalid_num_literal_base_prefix)]
|
||||||
|
#[note]
|
||||||
|
pub(crate) struct InvalidNumLiteralBasePrefix {
|
||||||
|
#[primary_span]
|
||||||
|
#[suggestion(applicability = "maybe-incorrect", code = "{fixed}")]
|
||||||
|
pub span: Span,
|
||||||
|
pub fixed: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Diagnostic)]
|
||||||
|
#[diag(parser_invalid_num_literal_suffix)]
|
||||||
|
#[help]
|
||||||
|
pub(crate) struct InvalidNumLiteralSuffix {
|
||||||
|
#[primary_span]
|
||||||
|
#[label]
|
||||||
|
pub span: Span,
|
||||||
|
pub suffix: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Diagnostic)]
|
||||||
|
#[diag(parser_invalid_float_literal_width)]
|
||||||
|
#[help]
|
||||||
|
pub(crate) struct InvalidFloatLiteralWidth {
|
||||||
|
#[primary_span]
|
||||||
|
pub span: Span,
|
||||||
|
pub width: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Diagnostic)]
|
||||||
|
#[diag(parser_invalid_float_literal_suffix)]
|
||||||
|
#[help]
|
||||||
|
pub(crate) struct InvalidFloatLiteralSuffix {
|
||||||
|
#[primary_span]
|
||||||
|
#[label]
|
||||||
|
pub span: Span,
|
||||||
|
pub suffix: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Diagnostic)]
|
||||||
|
#[diag(parser_int_literal_too_large)]
|
||||||
|
pub(crate) struct IntLiteralTooLarge {
|
||||||
|
#[primary_span]
|
||||||
|
pub span: Span,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Diagnostic)]
|
||||||
|
#[diag(parser_hexadecimal_float_literal_not_supported)]
|
||||||
|
pub(crate) struct HexadecimalFloatLiteralNotSupported {
|
||||||
|
#[primary_span]
|
||||||
|
#[label(parser_not_supported)]
|
||||||
|
pub span: Span,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Diagnostic)]
|
||||||
|
#[diag(parser_octal_float_literal_not_supported)]
|
||||||
|
pub(crate) struct OctalFloatLiteralNotSupported {
|
||||||
|
#[primary_span]
|
||||||
|
#[label(parser_not_supported)]
|
||||||
|
pub span: Span,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Diagnostic)]
|
||||||
|
#[diag(parser_binary_float_literal_not_supported)]
|
||||||
|
pub(crate) struct BinaryFloatLiteralNotSupported {
|
||||||
|
#[primary_span]
|
||||||
|
#[label(parser_not_supported)]
|
||||||
|
pub span: Span,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn report_lit_error(sess: &ParseSess, err: LitError, lit: token::Lit, span: Span) {
|
||||||
|
// Checks if `s` looks like i32 or u1234 etc.
|
||||||
|
fn looks_like_width_suffix(first_chars: &[char], s: &str) -> bool {
|
||||||
|
s.len() > 1 && s.starts_with(first_chars) && s[1..].chars().all(|c| c.is_ascii_digit())
|
||||||
|
}
|
||||||
|
|
||||||
|
// Try to lowercase the prefix if it's a valid base prefix.
|
||||||
|
fn fix_base_capitalisation(s: &str) -> Option<String> {
|
||||||
|
if let Some(stripped) = s.strip_prefix('B') {
|
||||||
|
Some(format!("0b{stripped}"))
|
||||||
|
} else if let Some(stripped) = s.strip_prefix('O') {
|
||||||
|
Some(format!("0o{stripped}"))
|
||||||
|
} else if let Some(stripped) = s.strip_prefix('X') {
|
||||||
|
Some(format!("0x{stripped}"))
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let token::Lit { kind, suffix, .. } = lit;
|
||||||
|
match err {
|
||||||
|
// `LexerError` is an error, but it was already reported
|
||||||
|
// by lexer, so here we don't report it the second time.
|
||||||
|
LitError::LexerError => {}
|
||||||
|
LitError::InvalidSuffix => {
|
||||||
|
if let Some(suffix) = suffix {
|
||||||
|
sess.emit_err(InvalidLiteralSuffix {
|
||||||
|
span,
|
||||||
|
kind: format!("{}", kind.descr()),
|
||||||
|
suffix,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
LitError::InvalidIntSuffix => {
|
||||||
|
let suf = suffix.expect("suffix error with no suffix");
|
||||||
|
let suf = suf.as_str();
|
||||||
|
if looks_like_width_suffix(&['i', 'u'], &suf) {
|
||||||
|
// If it looks like a width, try to be helpful.
|
||||||
|
sess.emit_err(InvalidIntLiteralWidth { span, width: suf[1..].into() });
|
||||||
|
} else if let Some(fixed) = fix_base_capitalisation(suf) {
|
||||||
|
sess.emit_err(InvalidNumLiteralBasePrefix { span, fixed });
|
||||||
|
} else {
|
||||||
|
sess.emit_err(InvalidNumLiteralSuffix { span, suffix: suf.to_string() });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
LitError::InvalidFloatSuffix => {
|
||||||
|
let suf = suffix.expect("suffix error with no suffix");
|
||||||
|
let suf = suf.as_str();
|
||||||
|
if looks_like_width_suffix(&['f'], suf) {
|
||||||
|
// If it looks like a width, try to be helpful.
|
||||||
|
sess.emit_err(InvalidFloatLiteralWidth { span, width: suf[1..].to_string() });
|
||||||
|
} else {
|
||||||
|
sess.emit_err(InvalidFloatLiteralSuffix { span, suffix: suf.to_string() });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
LitError::NonDecimalFloat(base) => {
|
||||||
|
match base {
|
||||||
|
16 => sess.emit_err(HexadecimalFloatLiteralNotSupported { span }),
|
||||||
|
8 => sess.emit_err(OctalFloatLiteralNotSupported { span }),
|
||||||
|
2 => sess.emit_err(BinaryFloatLiteralNotSupported { span }),
|
||||||
|
_ => unreachable!(),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
LitError::IntTooLarge => {
|
||||||
|
sess.emit_err(IntLiteralTooLarge { span });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@ -1,3 +1,9 @@
|
|||||||
|
error[E0425]: cannot find value `a̐é` in this scope
|
||||||
|
--> $DIR/unicode_2.rs:4:13
|
||||||
|
|
|
||||||
|
LL | let _ = a̐é;
|
||||||
|
| ^^ not found in this scope
|
||||||
|
|
||||||
error: invalid width `7` for integer literal
|
error: invalid width `7` for integer literal
|
||||||
--> $DIR/unicode_2.rs:2:25
|
--> $DIR/unicode_2.rs:2:25
|
||||||
|
|
|
|
||||||
@ -14,12 +20,6 @@ LL | let _ = ("아あ", 1i42);
|
|||||||
|
|
|
|
||||||
= help: valid widths are 8, 16, 32, 64 and 128
|
= help: valid widths are 8, 16, 32, 64 and 128
|
||||||
|
|
||||||
error[E0425]: cannot find value `a̐é` in this scope
|
|
||||||
--> $DIR/unicode_2.rs:4:13
|
|
||||||
|
|
|
||||||
LL | let _ = a̐é;
|
|
||||||
| ^^ not found in this scope
|
|
||||||
|
|
||||||
error: aborting due to 3 previous errors
|
error: aborting due to 3 previous errors
|
||||||
|
|
||||||
For more information about this error, try `rustc --explain E0425`.
|
For more information about this error, try `rustc --explain E0425`.
|
||||||
|
@ -1,31 +1,80 @@
|
|||||||
|
// This test is about the treatment of invalid literals. In particular, some
|
||||||
|
// literals are only considered invalid if they survive to HIR lowering.
|
||||||
|
//
|
||||||
|
// Literals with bad suffixes
|
||||||
|
// --------------------------
|
||||||
|
// Literals consist of a primary part and an optional suffix.
|
||||||
|
// https://doc.rust-lang.org/reference/tokens.html#suffixes says:
|
||||||
|
//
|
||||||
|
// Any kind of literal (string, integer, etc) with any suffix is valid as a
|
||||||
|
// token, and can be passed to a macro without producing an error. The macro
|
||||||
|
// itself will decide how to interpret such a token and whether to produce an
|
||||||
|
// error or not.
|
||||||
|
//
|
||||||
|
// ```
|
||||||
|
// macro_rules! blackhole { ($tt:tt) => () }
|
||||||
|
// blackhole!("string"suffix); // OK
|
||||||
|
// ```
|
||||||
|
//
|
||||||
|
// However, suffixes on literal tokens parsed as Rust code are restricted.
|
||||||
|
// Any suffixes are rejected on non-numeric literal tokens, and numeric
|
||||||
|
// literal tokens are accepted only with suffixes from the list below.
|
||||||
|
//
|
||||||
|
// Integer: u8, i8, u16, i16, u32, i32, u64, i64, u128, i128, usize, isize
|
||||||
|
// Floating-point: f32, f64
|
||||||
|
//
|
||||||
|
// This means that something like `"string"any_suffix` is a token accepted by
|
||||||
|
// the lexer, but rejected later for being an invalid combination of primary
|
||||||
|
// part and suffix.
|
||||||
|
//
|
||||||
|
// `0b10f32` is a similar case. `0b10` is a valid primary part that is a valid
|
||||||
|
// *integer* literal when no suffix is present. It only causes an error later
|
||||||
|
// when combined with the `f32` float suffix.
|
||||||
|
//
|
||||||
|
// However, `0b10.0f32` is different. It is rejected by the lexer because
|
||||||
|
// `0b10.0` is not a valid token even on its own.
|
||||||
|
//
|
||||||
|
// This difference is unfortunate, but it's baked into the language now.
|
||||||
|
//
|
||||||
|
// Too-large integer literals
|
||||||
|
// --------------------------
|
||||||
|
// https://doc.rust-lang.org/reference/tokens.html#integer-literals says that
|
||||||
|
// literals like `128_i8` and `256_u8` "are too big for their type, but are
|
||||||
|
// still valid tokens".
|
||||||
|
|
||||||
macro_rules! sink {
|
macro_rules! sink {
|
||||||
($($x:tt;)*) => {()}
|
($($x:tt;)*) => {()}
|
||||||
}
|
}
|
||||||
|
|
||||||
// The invalid literals are ignored because the macro consumes them.
|
// The invalid literals are ignored because the macro consumes them. Except for
|
||||||
|
// `0b10.0f32` because it's a lexer error.
|
||||||
const _: () = sink! {
|
const _: () = sink! {
|
||||||
"string"any_suffix; // OK
|
"string"any_suffix; // OK
|
||||||
10u123; // OK
|
10u123; // OK
|
||||||
10.0f123; // OK
|
10.0f123; // OK
|
||||||
0b10f32; // OK
|
0b10f32; // OK
|
||||||
|
0b10.0f32; //~ ERROR binary float literal is not supported
|
||||||
999340282366920938463463374607431768211455999; // OK
|
999340282366920938463463374607431768211455999; // OK
|
||||||
};
|
};
|
||||||
|
|
||||||
// The invalid literals cause errors.
|
// The invalid literals used to cause errors, but this was changed by #102944.
|
||||||
|
// Except for `0b010.0f32`, because it's a lexer error.
|
||||||
#[cfg(FALSE)]
|
#[cfg(FALSE)]
|
||||||
fn configured_out() {
|
fn configured_out() {
|
||||||
"string"any_suffix; //~ ERROR suffixes on string literals are invalid
|
"string"any_suffix; // OK
|
||||||
10u123; //~ ERROR invalid width `123` for integer literal
|
10u123; // OK
|
||||||
10.0f123; //~ ERROR invalid width `123` for float literal
|
10.0f123; // OK
|
||||||
0b10f32; //~ ERROR binary float literal is not supported
|
0b10f32; // OK
|
||||||
999340282366920938463463374607431768211455999; //~ ERROR integer literal is too large
|
0b10.0f32; //~ ERROR binary float literal is not supported
|
||||||
|
999340282366920938463463374607431768211455999; // OK
|
||||||
}
|
}
|
||||||
|
|
||||||
// The invalid literals cause errors.
|
// All the invalid literals cause errors.
|
||||||
fn main() {
|
fn main() {
|
||||||
"string"any_suffix; //~ ERROR suffixes on string literals are invalid
|
"string"any_suffix; //~ ERROR suffixes on string literals are invalid
|
||||||
10u123; //~ ERROR invalid width `123` for integer literal
|
10u123; //~ ERROR invalid width `123` for integer literal
|
||||||
10.0f123; //~ ERROR invalid width `123` for float literal
|
10.0f123; //~ ERROR invalid width `123` for float literal
|
||||||
0b10f32; //~ ERROR binary float literal is not supported
|
0b10f32; //~ ERROR binary float literal is not supported
|
||||||
|
0b10.0f32; //~ ERROR binary float literal is not supported
|
||||||
999340282366920938463463374607431768211455999; //~ ERROR integer literal is too large
|
999340282366920938463463374607431768211455999; //~ ERROR integer literal is too large
|
||||||
}
|
}
|
||||||
|
@ -1,11 +1,29 @@
|
|||||||
|
error: binary float literal is not supported
|
||||||
|
--> $DIR/error-stage.rs:56:5
|
||||||
|
|
|
||||||
|
LL | 0b10.0f32;
|
||||||
|
| ^^^^^^
|
||||||
|
|
||||||
|
error: binary float literal is not supported
|
||||||
|
--> $DIR/error-stage.rs:68:5
|
||||||
|
|
|
||||||
|
LL | 0b10.0f32;
|
||||||
|
| ^^^^^^
|
||||||
|
|
||||||
|
error: binary float literal is not supported
|
||||||
|
--> $DIR/error-stage.rs:78:5
|
||||||
|
|
|
||||||
|
LL | 0b10.0f32;
|
||||||
|
| ^^^^^^
|
||||||
|
|
||||||
error: suffixes on string literals are invalid
|
error: suffixes on string literals are invalid
|
||||||
--> $DIR/error-stage.rs:17:5
|
--> $DIR/error-stage.rs:74:5
|
||||||
|
|
|
|
||||||
LL | "string"any_suffix;
|
LL | "string"any_suffix;
|
||||||
| ^^^^^^^^^^^^^^^^^^ invalid suffix `any_suffix`
|
| ^^^^^^^^^^^^^^^^^^ invalid suffix `any_suffix`
|
||||||
|
|
||||||
error: invalid width `123` for integer literal
|
error: invalid width `123` for integer literal
|
||||||
--> $DIR/error-stage.rs:18:5
|
--> $DIR/error-stage.rs:75:5
|
||||||
|
|
|
|
||||||
LL | 10u123;
|
LL | 10u123;
|
||||||
| ^^^^^^
|
| ^^^^^^
|
||||||
@ -13,7 +31,7 @@ LL | 10u123;
|
|||||||
= help: valid widths are 8, 16, 32, 64 and 128
|
= help: valid widths are 8, 16, 32, 64 and 128
|
||||||
|
|
||||||
error: invalid width `123` for float literal
|
error: invalid width `123` for float literal
|
||||||
--> $DIR/error-stage.rs:19:5
|
--> $DIR/error-stage.rs:76:5
|
||||||
|
|
|
|
||||||
LL | 10.0f123;
|
LL | 10.0f123;
|
||||||
| ^^^^^^^^
|
| ^^^^^^^^
|
||||||
@ -21,50 +39,16 @@ LL | 10.0f123;
|
|||||||
= help: valid widths are 32 and 64
|
= help: valid widths are 32 and 64
|
||||||
|
|
||||||
error: binary float literal is not supported
|
error: binary float literal is not supported
|
||||||
--> $DIR/error-stage.rs:20:5
|
--> $DIR/error-stage.rs:77:5
|
||||||
|
|
|
|
||||||
LL | 0b10f32;
|
LL | 0b10f32;
|
||||||
| ^^^^^^^ not supported
|
| ^^^^^^^ not supported
|
||||||
|
|
||||||
error: integer literal is too large
|
error: integer literal is too large
|
||||||
--> $DIR/error-stage.rs:21:5
|
--> $DIR/error-stage.rs:79:5
|
||||||
|
|
|
|
||||||
LL | 999340282366920938463463374607431768211455999;
|
LL | 999340282366920938463463374607431768211455999;
|
||||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
error: suffixes on string literals are invalid
|
error: aborting due to 8 previous errors
|
||||||
--> $DIR/error-stage.rs:26:5
|
|
||||||
|
|
|
||||||
LL | "string"any_suffix;
|
|
||||||
| ^^^^^^^^^^^^^^^^^^ invalid suffix `any_suffix`
|
|
||||||
|
|
||||||
error: invalid width `123` for integer literal
|
|
||||||
--> $DIR/error-stage.rs:27:5
|
|
||||||
|
|
|
||||||
LL | 10u123;
|
|
||||||
| ^^^^^^
|
|
||||||
|
|
|
||||||
= help: valid widths are 8, 16, 32, 64 and 128
|
|
||||||
|
|
||||||
error: invalid width `123` for float literal
|
|
||||||
--> $DIR/error-stage.rs:28:5
|
|
||||||
|
|
|
||||||
LL | 10.0f123;
|
|
||||||
| ^^^^^^^^
|
|
||||||
|
|
|
||||||
= help: valid widths are 32 and 64
|
|
||||||
|
|
||||||
error: binary float literal is not supported
|
|
||||||
--> $DIR/error-stage.rs:29:5
|
|
||||||
|
|
|
||||||
LL | 0b10f32;
|
|
||||||
| ^^^^^^^ not supported
|
|
||||||
|
|
||||||
error: integer literal is too large
|
|
||||||
--> $DIR/error-stage.rs:30:5
|
|
||||||
|
|
|
||||||
LL | 999340282366920938463463374607431768211455999;
|
|
||||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
error: aborting due to 10 previous errors
|
|
||||||
|
|
||||||
|
@ -28,12 +28,11 @@ fn main() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[rustc_dummy = "string"suffix]
|
#[rustc_dummy = "string"suffix]
|
||||||
//~^ ERROR suffixes on string literals are invalid
|
//~^ ERROR unexpected expression: `"string"suffix`
|
||||||
fn f() {}
|
fn f() {}
|
||||||
|
|
||||||
#[must_use = "string"suffix]
|
#[must_use = "string"suffix]
|
||||||
//~^ ERROR suffixes on string literals are invalid
|
//~^ ERROR unexpected expression: `"string"suffix`
|
||||||
//~^^ ERROR malformed `must_use` attribute input
|
|
||||||
fn g() {}
|
fn g() {}
|
||||||
|
|
||||||
#[link(name = "string"suffix)]
|
#[link(name = "string"suffix)]
|
||||||
|
@ -10,6 +10,32 @@ error: suffixes on string literals are invalid
|
|||||||
LL | "C"suffix
|
LL | "C"suffix
|
||||||
| ^^^^^^^^^ invalid suffix `suffix`
|
| ^^^^^^^^^ invalid suffix `suffix`
|
||||||
|
|
||||||
|
error: unexpected expression: `"string"suffix`
|
||||||
|
--> $DIR/bad-lit-suffixes.rs:30:17
|
||||||
|
|
|
||||||
|
LL | #[rustc_dummy = "string"suffix]
|
||||||
|
| ^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
error: unexpected expression: `"string"suffix`
|
||||||
|
--> $DIR/bad-lit-suffixes.rs:34:14
|
||||||
|
|
|
||||||
|
LL | #[must_use = "string"suffix]
|
||||||
|
| ^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
error: suffixes on string literals are invalid
|
||||||
|
--> $DIR/bad-lit-suffixes.rs:38:15
|
||||||
|
|
|
||||||
|
LL | #[link(name = "string"suffix)]
|
||||||
|
| ^^^^^^^^^^^^^^ invalid suffix `suffix`
|
||||||
|
|
||||||
|
error: invalid suffix `suffix` for number literal
|
||||||
|
--> $DIR/bad-lit-suffixes.rs:42:41
|
||||||
|
|
|
||||||
|
LL | #[rustc_layout_scalar_valid_range_start(0suffix)]
|
||||||
|
| ^^^^^^^ invalid suffix `suffix`
|
||||||
|
|
|
||||||
|
= help: the suffix must be one of the numeric types (`u32`, `isize`, `f32`, etc.)
|
||||||
|
|
||||||
error: suffixes on string literals are invalid
|
error: suffixes on string literals are invalid
|
||||||
--> $DIR/bad-lit-suffixes.rs:12:5
|
--> $DIR/bad-lit-suffixes.rs:12:5
|
||||||
|
|
|
|
||||||
@ -110,44 +136,5 @@ LL | 1.0e10suffix;
|
|||||||
|
|
|
|
||||||
= help: valid suffixes are `f32` and `f64`
|
= help: valid suffixes are `f32` and `f64`
|
||||||
|
|
||||||
error: suffixes on string literals are invalid
|
error: aborting due to 20 previous errors
|
||||||
--> $DIR/bad-lit-suffixes.rs:30:17
|
|
||||||
|
|
|
||||||
LL | #[rustc_dummy = "string"suffix]
|
|
||||||
| ^^^^^^^^^^^^^^ invalid suffix `suffix`
|
|
||||||
|
|
||||||
error: suffixes on string literals are invalid
|
|
||||||
--> $DIR/bad-lit-suffixes.rs:34:14
|
|
||||||
|
|
|
||||||
LL | #[must_use = "string"suffix]
|
|
||||||
| ^^^^^^^^^^^^^^ invalid suffix `suffix`
|
|
||||||
|
|
||||||
error: malformed `must_use` attribute input
|
|
||||||
--> $DIR/bad-lit-suffixes.rs:34:1
|
|
||||||
|
|
|
||||||
LL | #[must_use = "string"suffix]
|
|
||||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
|
|
|
||||||
help: the following are the possible correct uses
|
|
||||||
|
|
|
||||||
LL | #[must_use = "reason"]
|
|
||||||
|
|
|
||||||
LL | #[must_use]
|
|
||||||
|
|
|
||||||
|
|
||||||
error: suffixes on string literals are invalid
|
|
||||||
--> $DIR/bad-lit-suffixes.rs:39:15
|
|
||||||
|
|
|
||||||
LL | #[link(name = "string"suffix)]
|
|
||||||
| ^^^^^^^^^^^^^^ invalid suffix `suffix`
|
|
||||||
|
|
||||||
error: invalid suffix `suffix` for number literal
|
|
||||||
--> $DIR/bad-lit-suffixes.rs:43:41
|
|
||||||
|
|
|
||||||
LL | #[rustc_layout_scalar_valid_range_start(0suffix)]
|
|
||||||
| ^^^^^^^ invalid suffix `suffix`
|
|
||||||
|
|
|
||||||
= help: the suffix must be one of the numeric types (`u32`, `isize`, `f32`, etc.)
|
|
||||||
|
|
||||||
error: aborting due to 21 previous errors
|
|
||||||
|
|
||||||
|
@ -73,12 +73,21 @@ impl EarlyLintPass for AlmostCompleteLetterRange {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn check_range(cx: &EarlyContext<'_>, span: Span, start: &Expr, end: &Expr, sugg: Option<(Span, &str)>) {
|
fn check_range(cx: &EarlyContext<'_>, span: Span, start: &Expr, end: &Expr, sugg: Option<(Span, &str)>) {
|
||||||
if let ExprKind::Lit(start_lit) = &start.peel_parens().kind
|
if let ExprKind::Lit(start_token_lit) = start.peel_parens().kind
|
||||||
&& let ExprKind::Lit(end_lit) = &end.peel_parens().kind
|
&& let ExprKind::Lit(end_token_lit) = end.peel_parens().kind
|
||||||
&& matches!(
|
&& matches!(
|
||||||
(&start_lit.kind, &end_lit.kind),
|
(
|
||||||
(LitKind::Byte(b'a') | LitKind::Char('a'), LitKind::Byte(b'z') | LitKind::Char('z'))
|
LitKind::from_token_lit(start_token_lit),
|
||||||
| (LitKind::Byte(b'A') | LitKind::Char('A'), LitKind::Byte(b'Z') | LitKind::Char('Z'))
|
LitKind::from_token_lit(end_token_lit),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
Ok(LitKind::Byte(b'a') | LitKind::Char('a')),
|
||||||
|
Ok(LitKind::Byte(b'z') | LitKind::Char('z'))
|
||||||
|
)
|
||||||
|
| (
|
||||||
|
Ok(LitKind::Byte(b'A') | LitKind::Char('A')),
|
||||||
|
Ok(LitKind::Byte(b'Z') | LitKind::Char('Z')),
|
||||||
|
)
|
||||||
)
|
)
|
||||||
&& !in_external_macro(cx.sess(), span)
|
&& !in_external_macro(cx.sess(), span)
|
||||||
{
|
{
|
||||||
|
@ -2,7 +2,8 @@
|
|||||||
|
|
||||||
use clippy_utils::diagnostics::span_lint_and_sugg;
|
use clippy_utils::diagnostics::span_lint_and_sugg;
|
||||||
use clippy_utils::source::snippet_opt;
|
use clippy_utils::source::snippet_opt;
|
||||||
use rustc_ast::ast::{BinOpKind, Expr, ExprKind, Lit, LitKind};
|
use rustc_ast::ast::{BinOpKind, Expr, ExprKind, LitKind};
|
||||||
|
use rustc_ast::token;
|
||||||
use rustc_errors::Applicability;
|
use rustc_errors::Applicability;
|
||||||
use rustc_lint::{EarlyContext, EarlyLintPass};
|
use rustc_lint::{EarlyContext, EarlyLintPass};
|
||||||
use rustc_session::{declare_lint_pass, declare_tool_lint};
|
use rustc_session::{declare_lint_pass, declare_tool_lint};
|
||||||
@ -52,8 +53,8 @@ enum Side {
|
|||||||
|
|
||||||
impl IntPlusOne {
|
impl IntPlusOne {
|
||||||
#[expect(clippy::cast_sign_loss)]
|
#[expect(clippy::cast_sign_loss)]
|
||||||
fn check_lit(lit: &Lit, target_value: i128) -> bool {
|
fn check_lit(token_lit: token::Lit, target_value: i128) -> bool {
|
||||||
if let LitKind::Int(value, ..) = lit.kind {
|
if let Ok(LitKind::Int(value, ..)) = LitKind::from_token_lit(token_lit) {
|
||||||
return value == (target_value as u128);
|
return value == (target_value as u128);
|
||||||
}
|
}
|
||||||
false
|
false
|
||||||
@ -65,11 +66,11 @@ impl IntPlusOne {
|
|||||||
(BinOpKind::Ge, &ExprKind::Binary(ref lhskind, ref lhslhs, ref lhsrhs), _) => {
|
(BinOpKind::Ge, &ExprKind::Binary(ref lhskind, ref lhslhs, ref lhsrhs), _) => {
|
||||||
match (lhskind.node, &lhslhs.kind, &lhsrhs.kind) {
|
match (lhskind.node, &lhslhs.kind, &lhsrhs.kind) {
|
||||||
// `-1 + x`
|
// `-1 + x`
|
||||||
(BinOpKind::Add, &ExprKind::Lit(ref lit), _) if Self::check_lit(lit, -1) => {
|
(BinOpKind::Add, &ExprKind::Lit(lit), _) if Self::check_lit(lit, -1) => {
|
||||||
Self::generate_recommendation(cx, binop, lhsrhs, rhs, Side::Lhs)
|
Self::generate_recommendation(cx, binop, lhsrhs, rhs, Side::Lhs)
|
||||||
},
|
},
|
||||||
// `x - 1`
|
// `x - 1`
|
||||||
(BinOpKind::Sub, _, &ExprKind::Lit(ref lit)) if Self::check_lit(lit, 1) => {
|
(BinOpKind::Sub, _, &ExprKind::Lit(lit)) if Self::check_lit(lit, 1) => {
|
||||||
Self::generate_recommendation(cx, binop, lhslhs, rhs, Side::Lhs)
|
Self::generate_recommendation(cx, binop, lhslhs, rhs, Side::Lhs)
|
||||||
},
|
},
|
||||||
_ => None,
|
_ => None,
|
||||||
@ -81,10 +82,10 @@ impl IntPlusOne {
|
|||||||
{
|
{
|
||||||
match (&rhslhs.kind, &rhsrhs.kind) {
|
match (&rhslhs.kind, &rhsrhs.kind) {
|
||||||
// `y + 1` and `1 + y`
|
// `y + 1` and `1 + y`
|
||||||
(&ExprKind::Lit(ref lit), _) if Self::check_lit(lit, 1) => {
|
(&ExprKind::Lit(lit), _) if Self::check_lit(lit, 1) => {
|
||||||
Self::generate_recommendation(cx, binop, rhsrhs, lhs, Side::Rhs)
|
Self::generate_recommendation(cx, binop, rhsrhs, lhs, Side::Rhs)
|
||||||
},
|
},
|
||||||
(_, &ExprKind::Lit(ref lit)) if Self::check_lit(lit, 1) => {
|
(_, &ExprKind::Lit(lit)) if Self::check_lit(lit, 1) => {
|
||||||
Self::generate_recommendation(cx, binop, rhslhs, lhs, Side::Rhs)
|
Self::generate_recommendation(cx, binop, rhslhs, lhs, Side::Rhs)
|
||||||
},
|
},
|
||||||
_ => None,
|
_ => None,
|
||||||
@ -96,10 +97,10 @@ impl IntPlusOne {
|
|||||||
{
|
{
|
||||||
match (&lhslhs.kind, &lhsrhs.kind) {
|
match (&lhslhs.kind, &lhsrhs.kind) {
|
||||||
// `1 + x` and `x + 1`
|
// `1 + x` and `x + 1`
|
||||||
(&ExprKind::Lit(ref lit), _) if Self::check_lit(lit, 1) => {
|
(&ExprKind::Lit(lit), _) if Self::check_lit(lit, 1) => {
|
||||||
Self::generate_recommendation(cx, binop, lhsrhs, rhs, Side::Lhs)
|
Self::generate_recommendation(cx, binop, lhsrhs, rhs, Side::Lhs)
|
||||||
},
|
},
|
||||||
(_, &ExprKind::Lit(ref lit)) if Self::check_lit(lit, 1) => {
|
(_, &ExprKind::Lit(lit)) if Self::check_lit(lit, 1) => {
|
||||||
Self::generate_recommendation(cx, binop, lhslhs, rhs, Side::Lhs)
|
Self::generate_recommendation(cx, binop, lhslhs, rhs, Side::Lhs)
|
||||||
},
|
},
|
||||||
_ => None,
|
_ => None,
|
||||||
@ -109,11 +110,11 @@ impl IntPlusOne {
|
|||||||
(BinOpKind::Le, _, &ExprKind::Binary(ref rhskind, ref rhslhs, ref rhsrhs)) => {
|
(BinOpKind::Le, _, &ExprKind::Binary(ref rhskind, ref rhslhs, ref rhsrhs)) => {
|
||||||
match (rhskind.node, &rhslhs.kind, &rhsrhs.kind) {
|
match (rhskind.node, &rhslhs.kind, &rhsrhs.kind) {
|
||||||
// `-1 + y`
|
// `-1 + y`
|
||||||
(BinOpKind::Add, &ExprKind::Lit(ref lit), _) if Self::check_lit(lit, -1) => {
|
(BinOpKind::Add, &ExprKind::Lit(lit), _) if Self::check_lit(lit, -1) => {
|
||||||
Self::generate_recommendation(cx, binop, rhsrhs, lhs, Side::Rhs)
|
Self::generate_recommendation(cx, binop, rhsrhs, lhs, Side::Rhs)
|
||||||
},
|
},
|
||||||
// `y - 1`
|
// `y - 1`
|
||||||
(BinOpKind::Sub, _, &ExprKind::Lit(ref lit)) if Self::check_lit(lit, 1) => {
|
(BinOpKind::Sub, _, &ExprKind::Lit(lit)) if Self::check_lit(lit, 1) => {
|
||||||
Self::generate_recommendation(cx, binop, rhslhs, lhs, Side::Rhs)
|
Self::generate_recommendation(cx, binop, rhslhs, lhs, Side::Rhs)
|
||||||
},
|
},
|
||||||
_ => None,
|
_ => None,
|
||||||
|
@ -5,11 +5,13 @@ use clippy_utils::diagnostics::span_lint_and_sugg;
|
|||||||
use clippy_utils::numeric_literal::{NumericLiteral, Radix};
|
use clippy_utils::numeric_literal::{NumericLiteral, Radix};
|
||||||
use clippy_utils::source::snippet_opt;
|
use clippy_utils::source::snippet_opt;
|
||||||
use if_chain::if_chain;
|
use if_chain::if_chain;
|
||||||
use rustc_ast::ast::{Expr, ExprKind, Lit, LitKind};
|
use rustc_ast::ast::{Expr, ExprKind, LitKind};
|
||||||
|
use rustc_ast::token;
|
||||||
use rustc_errors::Applicability;
|
use rustc_errors::Applicability;
|
||||||
use rustc_lint::{EarlyContext, EarlyLintPass, LintContext};
|
use rustc_lint::{EarlyContext, EarlyLintPass, LintContext};
|
||||||
use rustc_middle::lint::in_external_macro;
|
use rustc_middle::lint::in_external_macro;
|
||||||
use rustc_session::{declare_tool_lint, impl_lint_pass};
|
use rustc_session::{declare_tool_lint, impl_lint_pass};
|
||||||
|
use rustc_span::Span;
|
||||||
use std::iter;
|
use std::iter;
|
||||||
|
|
||||||
declare_clippy_lint! {
|
declare_clippy_lint! {
|
||||||
@ -236,8 +238,8 @@ impl EarlyLintPass for LiteralDigitGrouping {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
if let ExprKind::Lit(ref lit) = expr.kind {
|
if let ExprKind::Lit(lit) = expr.kind {
|
||||||
self.check_lit(cx, lit);
|
self.check_lit(cx, lit, expr.span);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -252,12 +254,13 @@ impl LiteralDigitGrouping {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn check_lit(self, cx: &EarlyContext<'_>, lit: &Lit) {
|
fn check_lit(self, cx: &EarlyContext<'_>, lit: token::Lit, span: Span) {
|
||||||
if_chain! {
|
if_chain! {
|
||||||
if let Some(src) = snippet_opt(cx, lit.span);
|
if let Some(src) = snippet_opt(cx, span);
|
||||||
if let Some(mut num_lit) = NumericLiteral::from_lit(&src, lit);
|
if let Ok(lit_kind) = LitKind::from_token_lit(lit);
|
||||||
|
if let Some(mut num_lit) = NumericLiteral::from_lit_kind(&src, &lit_kind);
|
||||||
then {
|
then {
|
||||||
if !Self::check_for_mistyped_suffix(cx, lit.span, &mut num_lit) {
|
if !Self::check_for_mistyped_suffix(cx, span, &mut num_lit) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -293,14 +296,14 @@ impl LiteralDigitGrouping {
|
|||||||
| WarningType::InconsistentDigitGrouping
|
| WarningType::InconsistentDigitGrouping
|
||||||
| WarningType::UnusualByteGroupings
|
| WarningType::UnusualByteGroupings
|
||||||
| WarningType::LargeDigitGroups => {
|
| WarningType::LargeDigitGroups => {
|
||||||
!lit.span.from_expansion()
|
!span.from_expansion()
|
||||||
}
|
}
|
||||||
WarningType::DecimalRepresentation | WarningType::MistypedLiteralSuffix => {
|
WarningType::DecimalRepresentation | WarningType::MistypedLiteralSuffix => {
|
||||||
true
|
true
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
if should_warn {
|
if should_warn {
|
||||||
warning_type.display(num_lit.format(), cx, lit.span);
|
warning_type.display(num_lit.format(), cx, span);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -458,8 +461,8 @@ impl EarlyLintPass for DecimalLiteralRepresentation {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
if let ExprKind::Lit(ref lit) = expr.kind {
|
if let ExprKind::Lit(lit) = expr.kind {
|
||||||
self.check_lit(cx, lit);
|
self.check_lit(cx, lit, expr.span);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -469,19 +472,20 @@ impl DecimalLiteralRepresentation {
|
|||||||
pub fn new(threshold: u64) -> Self {
|
pub fn new(threshold: u64) -> Self {
|
||||||
Self { threshold }
|
Self { threshold }
|
||||||
}
|
}
|
||||||
fn check_lit(self, cx: &EarlyContext<'_>, lit: &Lit) {
|
fn check_lit(self, cx: &EarlyContext<'_>, lit: token::Lit, span: Span) {
|
||||||
// Lint integral literals.
|
// Lint integral literals.
|
||||||
if_chain! {
|
if_chain! {
|
||||||
if let LitKind::Int(val, _) = lit.kind;
|
if let Ok(lit_kind) = LitKind::from_token_lit(lit);
|
||||||
if let Some(src) = snippet_opt(cx, lit.span);
|
if let LitKind::Int(val, _) = lit_kind;
|
||||||
if let Some(num_lit) = NumericLiteral::from_lit(&src, lit);
|
if let Some(src) = snippet_opt(cx, span);
|
||||||
|
if let Some(num_lit) = NumericLiteral::from_lit_kind(&src, &lit_kind);
|
||||||
if num_lit.radix == Radix::Decimal;
|
if num_lit.radix == Radix::Decimal;
|
||||||
if val >= u128::from(self.threshold);
|
if val >= u128::from(self.threshold);
|
||||||
then {
|
then {
|
||||||
let hex = format!("{val:#X}");
|
let hex = format!("{val:#X}");
|
||||||
let num_lit = NumericLiteral::new(&hex, num_lit.suffix, false);
|
let num_lit = NumericLiteral::new(&hex, num_lit.suffix, false);
|
||||||
let _ = Self::do_lint(num_lit.integer).map_err(|warning_type| {
|
let _ = Self::do_lint(num_lit.integer).map_err(|warning_type| {
|
||||||
warning_type.display(num_lit.format(), cx, lit.span);
|
warning_type.display(num_lit.format(), cx, span);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,11 +1,11 @@
|
|||||||
use clippy_utils::diagnostics::span_lint_and_sugg;
|
use clippy_utils::diagnostics::span_lint_and_sugg;
|
||||||
use rustc_ast::ast::Lit;
|
|
||||||
use rustc_errors::Applicability;
|
use rustc_errors::Applicability;
|
||||||
use rustc_lint::EarlyContext;
|
use rustc_lint::EarlyContext;
|
||||||
|
use rustc_span::Span;
|
||||||
|
|
||||||
use super::{SEPARATED_LITERAL_SUFFIX, UNSEPARATED_LITERAL_SUFFIX};
|
use super::{SEPARATED_LITERAL_SUFFIX, UNSEPARATED_LITERAL_SUFFIX};
|
||||||
|
|
||||||
pub(super) fn check(cx: &EarlyContext<'_>, lit: &Lit, lit_snip: &str, suffix: &str, sugg_type: &str) {
|
pub(super) fn check(cx: &EarlyContext<'_>, lit_span: Span, lit_snip: &str, suffix: &str, sugg_type: &str) {
|
||||||
let Some(maybe_last_sep_idx) = lit_snip.len().checked_sub(suffix.len() + 1) else {
|
let Some(maybe_last_sep_idx) = lit_snip.len().checked_sub(suffix.len() + 1) else {
|
||||||
return; // It's useless so shouldn't lint.
|
return; // It's useless so shouldn't lint.
|
||||||
};
|
};
|
||||||
@ -15,7 +15,7 @@ pub(super) fn check(cx: &EarlyContext<'_>, lit: &Lit, lit_snip: &str, suffix: &s
|
|||||||
span_lint_and_sugg(
|
span_lint_and_sugg(
|
||||||
cx,
|
cx,
|
||||||
SEPARATED_LITERAL_SUFFIX,
|
SEPARATED_LITERAL_SUFFIX,
|
||||||
lit.span,
|
lit_span,
|
||||||
&format!("{sugg_type} type suffix should not be separated by an underscore"),
|
&format!("{sugg_type} type suffix should not be separated by an underscore"),
|
||||||
"remove the underscore",
|
"remove the underscore",
|
||||||
format!("{}{suffix}", &lit_snip[..maybe_last_sep_idx]),
|
format!("{}{suffix}", &lit_snip[..maybe_last_sep_idx]),
|
||||||
@ -25,7 +25,7 @@ pub(super) fn check(cx: &EarlyContext<'_>, lit: &Lit, lit_snip: &str, suffix: &s
|
|||||||
span_lint_and_sugg(
|
span_lint_and_sugg(
|
||||||
cx,
|
cx,
|
||||||
UNSEPARATED_LITERAL_SUFFIX,
|
UNSEPARATED_LITERAL_SUFFIX,
|
||||||
lit.span,
|
lit_span,
|
||||||
&format!("{sugg_type} type suffix should be separated by an underscore"),
|
&format!("{sugg_type} type suffix should be separated by an underscore"),
|
||||||
"add an underscore",
|
"add an underscore",
|
||||||
format!("{}_{suffix}", &lit_snip[..=maybe_last_sep_idx]),
|
format!("{}_{suffix}", &lit_snip[..=maybe_last_sep_idx]),
|
||||||
|
@ -1,10 +1,10 @@
|
|||||||
use clippy_utils::diagnostics::span_lint;
|
use clippy_utils::diagnostics::span_lint;
|
||||||
use rustc_ast::ast::Lit;
|
|
||||||
use rustc_lint::EarlyContext;
|
use rustc_lint::EarlyContext;
|
||||||
|
use rustc_span::Span;
|
||||||
|
|
||||||
use super::MIXED_CASE_HEX_LITERALS;
|
use super::MIXED_CASE_HEX_LITERALS;
|
||||||
|
|
||||||
pub(super) fn check(cx: &EarlyContext<'_>, lit: &Lit, suffix: &str, lit_snip: &str) {
|
pub(super) fn check(cx: &EarlyContext<'_>, lit_span: Span, suffix: &str, lit_snip: &str) {
|
||||||
let Some(maybe_last_sep_idx) = lit_snip.len().checked_sub(suffix.len() + 1) else {
|
let Some(maybe_last_sep_idx) = lit_snip.len().checked_sub(suffix.len() + 1) else {
|
||||||
return; // It's useless so shouldn't lint.
|
return; // It's useless so shouldn't lint.
|
||||||
};
|
};
|
||||||
@ -23,7 +23,7 @@ pub(super) fn check(cx: &EarlyContext<'_>, lit: &Lit, suffix: &str, lit_snip: &s
|
|||||||
span_lint(
|
span_lint(
|
||||||
cx,
|
cx,
|
||||||
MIXED_CASE_HEX_LITERALS,
|
MIXED_CASE_HEX_LITERALS,
|
||||||
lit.span,
|
lit_span,
|
||||||
"inconsistent casing in hexadecimal literal",
|
"inconsistent casing in hexadecimal literal",
|
||||||
);
|
);
|
||||||
break;
|
break;
|
||||||
|
@ -9,7 +9,8 @@ mod zero_prefixed_literal;
|
|||||||
|
|
||||||
use clippy_utils::diagnostics::span_lint;
|
use clippy_utils::diagnostics::span_lint;
|
||||||
use clippy_utils::source::snippet_opt;
|
use clippy_utils::source::snippet_opt;
|
||||||
use rustc_ast::ast::{Expr, ExprKind, Generics, Lit, LitFloatType, LitIntType, LitKind, NodeId, Pat, PatKind};
|
use rustc_ast::ast::{Expr, ExprKind, Generics, LitFloatType, LitIntType, LitKind, NodeId, Pat, PatKind};
|
||||||
|
use rustc_ast::token;
|
||||||
use rustc_ast::visit::FnKind;
|
use rustc_ast::visit::FnKind;
|
||||||
use rustc_data_structures::fx::FxHashMap;
|
use rustc_data_structures::fx::FxHashMap;
|
||||||
use rustc_lint::{EarlyContext, EarlyLintPass, LintContext};
|
use rustc_lint::{EarlyContext, EarlyLintPass, LintContext};
|
||||||
@ -374,42 +375,43 @@ impl EarlyLintPass for MiscEarlyLints {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
if let ExprKind::Lit(ref lit) = expr.kind {
|
if let ExprKind::Lit(lit) = expr.kind {
|
||||||
MiscEarlyLints::check_lit(cx, lit);
|
MiscEarlyLints::check_lit(cx, lit, expr.span);
|
||||||
}
|
}
|
||||||
double_neg::check(cx, expr);
|
double_neg::check(cx, expr);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl MiscEarlyLints {
|
impl MiscEarlyLints {
|
||||||
fn check_lit(cx: &EarlyContext<'_>, lit: &Lit) {
|
fn check_lit(cx: &EarlyContext<'_>, lit: token::Lit, span: Span) {
|
||||||
// We test if first character in snippet is a number, because the snippet could be an expansion
|
// We test if first character in snippet is a number, because the snippet could be an expansion
|
||||||
// from a built-in macro like `line!()` or a proc-macro like `#[wasm_bindgen]`.
|
// from a built-in macro like `line!()` or a proc-macro like `#[wasm_bindgen]`.
|
||||||
// Note that this check also covers special case that `line!()` is eagerly expanded by compiler.
|
// Note that this check also covers special case that `line!()` is eagerly expanded by compiler.
|
||||||
// See <https://github.com/rust-lang/rust-clippy/issues/4507> for a regression.
|
// See <https://github.com/rust-lang/rust-clippy/issues/4507> for a regression.
|
||||||
// FIXME: Find a better way to detect those cases.
|
// FIXME: Find a better way to detect those cases.
|
||||||
let lit_snip = match snippet_opt(cx, lit.span) {
|
let lit_snip = match snippet_opt(cx, span) {
|
||||||
Some(snip) if snip.chars().next().map_or(false, |c| c.is_ascii_digit()) => snip,
|
Some(snip) if snip.chars().next().map_or(false, |c| c.is_ascii_digit()) => snip,
|
||||||
_ => return,
|
_ => return,
|
||||||
};
|
};
|
||||||
|
|
||||||
if let LitKind::Int(value, lit_int_type) = lit.kind {
|
let lit_kind = LitKind::from_token_lit(lit);
|
||||||
|
if let Ok(LitKind::Int(value, lit_int_type)) = lit_kind {
|
||||||
let suffix = match lit_int_type {
|
let suffix = match lit_int_type {
|
||||||
LitIntType::Signed(ty) => ty.name_str(),
|
LitIntType::Signed(ty) => ty.name_str(),
|
||||||
LitIntType::Unsigned(ty) => ty.name_str(),
|
LitIntType::Unsigned(ty) => ty.name_str(),
|
||||||
LitIntType::Unsuffixed => "",
|
LitIntType::Unsuffixed => "",
|
||||||
};
|
};
|
||||||
literal_suffix::check(cx, lit, &lit_snip, suffix, "integer");
|
literal_suffix::check(cx, span, &lit_snip, suffix, "integer");
|
||||||
if lit_snip.starts_with("0x") {
|
if lit_snip.starts_with("0x") {
|
||||||
mixed_case_hex_literals::check(cx, lit, suffix, &lit_snip);
|
mixed_case_hex_literals::check(cx, span, suffix, &lit_snip);
|
||||||
} else if lit_snip.starts_with("0b") || lit_snip.starts_with("0o") {
|
} else if lit_snip.starts_with("0b") || lit_snip.starts_with("0o") {
|
||||||
// nothing to do
|
// nothing to do
|
||||||
} else if value != 0 && lit_snip.starts_with('0') {
|
} else if value != 0 && lit_snip.starts_with('0') {
|
||||||
zero_prefixed_literal::check(cx, lit, &lit_snip);
|
zero_prefixed_literal::check(cx, span, &lit_snip);
|
||||||
}
|
}
|
||||||
} else if let LitKind::Float(_, LitFloatType::Suffixed(float_ty)) = lit.kind {
|
} else if let Ok(LitKind::Float(_, LitFloatType::Suffixed(float_ty))) = lit_kind {
|
||||||
let suffix = float_ty.name_str();
|
let suffix = float_ty.name_str();
|
||||||
literal_suffix::check(cx, lit, &lit_snip, suffix, "float");
|
literal_suffix::check(cx, span, &lit_snip, suffix, "float");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,20 +1,20 @@
|
|||||||
use clippy_utils::diagnostics::span_lint_and_then;
|
use clippy_utils::diagnostics::span_lint_and_then;
|
||||||
use rustc_ast::ast::Lit;
|
|
||||||
use rustc_errors::Applicability;
|
use rustc_errors::Applicability;
|
||||||
use rustc_lint::EarlyContext;
|
use rustc_lint::EarlyContext;
|
||||||
|
use rustc_span::Span;
|
||||||
|
|
||||||
use super::ZERO_PREFIXED_LITERAL;
|
use super::ZERO_PREFIXED_LITERAL;
|
||||||
|
|
||||||
pub(super) fn check(cx: &EarlyContext<'_>, lit: &Lit, lit_snip: &str) {
|
pub(super) fn check(cx: &EarlyContext<'_>, lit_span: Span, lit_snip: &str) {
|
||||||
let trimmed_lit_snip = lit_snip.trim_start_matches(|c| c == '_' || c == '0');
|
let trimmed_lit_snip = lit_snip.trim_start_matches(|c| c == '_' || c == '0');
|
||||||
span_lint_and_then(
|
span_lint_and_then(
|
||||||
cx,
|
cx,
|
||||||
ZERO_PREFIXED_LITERAL,
|
ZERO_PREFIXED_LITERAL,
|
||||||
lit.span,
|
lit_span,
|
||||||
"this is a decimal constant",
|
"this is a decimal constant",
|
||||||
|diag| {
|
|diag| {
|
||||||
diag.span_suggestion(
|
diag.span_suggestion(
|
||||||
lit.span,
|
lit_span,
|
||||||
"if you mean to use a decimal constant, remove the `0` to avoid confusion",
|
"if you mean to use a decimal constant, remove the `0` to avoid confusion",
|
||||||
trimmed_lit_snip.to_string(),
|
trimmed_lit_snip.to_string(),
|
||||||
Applicability::MaybeIncorrect,
|
Applicability::MaybeIncorrect,
|
||||||
@ -22,7 +22,7 @@ pub(super) fn check(cx: &EarlyContext<'_>, lit: &Lit, lit_snip: &str) {
|
|||||||
// do not advise to use octal form if the literal cannot be expressed in base 8.
|
// do not advise to use octal form if the literal cannot be expressed in base 8.
|
||||||
if !lit_snip.contains(|c| c == '8' || c == '9') {
|
if !lit_snip.contains(|c| c == '8' || c == '9') {
|
||||||
diag.span_suggestion(
|
diag.span_suggestion(
|
||||||
lit.span,
|
lit_span,
|
||||||
"if you mean to use an octal constant, use `0o`",
|
"if you mean to use an octal constant, use `0o`",
|
||||||
format!("0o{trimmed_lit_snip}"),
|
format!("0o{trimmed_lit_snip}"),
|
||||||
Applicability::MaybeIncorrect,
|
Applicability::MaybeIncorrect,
|
||||||
|
@ -56,11 +56,11 @@ impl EarlyLintPass for OctalEscapes {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
if let ExprKind::Lit(lit) = &expr.kind {
|
if let ExprKind::Lit(token_lit) = &expr.kind {
|
||||||
if matches!(lit.token_lit.kind, LitKind::Str) {
|
if matches!(token_lit.kind, LitKind::Str) {
|
||||||
check_lit(cx, &lit.token_lit, lit.span, true);
|
check_lit(cx, &token_lit, expr.span, true);
|
||||||
} else if matches!(lit.token_lit.kind, LitKind::ByteStr) {
|
} else if matches!(token_lit.kind, LitKind::ByteStr) {
|
||||||
check_lit(cx, &lit.token_lit, lit.span, false);
|
check_lit(cx, &token_lit, expr.span, false);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,7 +1,8 @@
|
|||||||
use clippy_utils::diagnostics::span_lint_and_sugg;
|
use clippy_utils::diagnostics::span_lint_and_sugg;
|
||||||
use clippy_utils::source::snippet_with_applicability;
|
use clippy_utils::source::snippet_with_applicability;
|
||||||
use if_chain::if_chain;
|
use if_chain::if_chain;
|
||||||
use rustc_ast::ast::{BinOpKind, Expr, ExprKind, LitKind, UnOp};
|
use rustc_ast::ast::{BinOpKind, Expr, ExprKind, UnOp};
|
||||||
|
use rustc_ast::token;
|
||||||
use rustc_errors::Applicability;
|
use rustc_errors::Applicability;
|
||||||
use rustc_lint::{EarlyContext, EarlyLintPass};
|
use rustc_lint::{EarlyContext, EarlyLintPass};
|
||||||
use rustc_session::{declare_lint_pass, declare_tool_lint};
|
use rustc_session::{declare_lint_pass, declare_tool_lint};
|
||||||
@ -120,7 +121,7 @@ impl EarlyLintPass for Precedence {
|
|||||||
if_chain! {
|
if_chain! {
|
||||||
if !all_odd;
|
if !all_odd;
|
||||||
if let ExprKind::Lit(lit) = &arg.kind;
|
if let ExprKind::Lit(lit) = &arg.kind;
|
||||||
if let LitKind::Int(..) | LitKind::Float(..) = &lit.kind;
|
if let token::LitKind::Integer | token::LitKind::Float = &lit.kind;
|
||||||
then {
|
then {
|
||||||
let mut applicability = Applicability::MachineApplicable;
|
let mut applicability = Applicability::MachineApplicable;
|
||||||
span_lint_and_sugg(
|
span_lint_and_sugg(
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
use clippy_utils::diagnostics::span_lint_and_sugg;
|
use clippy_utils::diagnostics::span_lint_and_sugg;
|
||||||
use rustc_ast::ast::{Expr, ExprKind, LitFloatType, LitKind};
|
use rustc_ast::ast::{Expr, ExprKind};
|
||||||
use rustc_errors::Applicability;
|
use rustc_errors::Applicability;
|
||||||
use rustc_lint::{EarlyContext, EarlyLintPass};
|
use rustc_lint::{EarlyContext, EarlyLintPass};
|
||||||
use rustc_session::{declare_lint_pass, declare_tool_lint};
|
use rustc_session::{declare_lint_pass, declare_tool_lint};
|
||||||
@ -33,14 +33,14 @@ fn is_useless_rounding(expr: &Expr) -> Option<(&str, String)> {
|
|||||||
if let ExprKind::MethodCall(name_ident, receiver, _, _) = &expr.kind
|
if let ExprKind::MethodCall(name_ident, receiver, _, _) = &expr.kind
|
||||||
&& let method_name = name_ident.ident.name.as_str()
|
&& let method_name = name_ident.ident.name.as_str()
|
||||||
&& (method_name == "ceil" || method_name == "round" || method_name == "floor")
|
&& (method_name == "ceil" || method_name == "round" || method_name == "floor")
|
||||||
&& let ExprKind::Lit(spanned) = &receiver.kind
|
&& let ExprKind::Lit(token_lit) = &receiver.kind
|
||||||
&& let LitKind::Float(symbol, ty) = spanned.kind {
|
&& token_lit.is_semantic_float() {
|
||||||
let f = symbol.as_str().parse::<f64>().unwrap();
|
let f = token_lit.symbol.as_str().parse::<f64>().unwrap();
|
||||||
let f_str = symbol.to_string() + if let LitFloatType::Suffixed(ty) = ty {
|
let mut f_str = token_lit.symbol.to_string();
|
||||||
ty.name_str()
|
match token_lit.suffix {
|
||||||
} else {
|
Some(suffix) => f_str.push_str(suffix.as_str()),
|
||||||
""
|
None => {}
|
||||||
};
|
}
|
||||||
if f.fract() == 0.0 {
|
if f.fract() == 0.0 {
|
||||||
Some((method_name, f_str))
|
Some((method_name, f_str))
|
||||||
} else {
|
} else {
|
||||||
|
@ -152,7 +152,7 @@ pub fn eq_expr(l: &Expr, r: &Expr) -> bool {
|
|||||||
},
|
},
|
||||||
(Binary(lo, ll, lr), Binary(ro, rl, rr)) => lo.node == ro.node && eq_expr(ll, rl) && eq_expr(lr, rr),
|
(Binary(lo, ll, lr), Binary(ro, rl, rr)) => lo.node == ro.node && eq_expr(ll, rl) && eq_expr(lr, rr),
|
||||||
(Unary(lo, l), Unary(ro, r)) => mem::discriminant(lo) == mem::discriminant(ro) && eq_expr(l, r),
|
(Unary(lo, l), Unary(ro, r)) => mem::discriminant(lo) == mem::discriminant(ro) && eq_expr(l, r),
|
||||||
(Lit(l), Lit(r)) => l.kind == r.kind,
|
(Lit(l), Lit(r)) => l == r,
|
||||||
(Cast(l, lt), Cast(r, rt)) | (Type(l, lt), Type(r, rt)) => eq_expr(l, r) && eq_ty(lt, rt),
|
(Cast(l, lt), Cast(r, rt)) | (Type(l, lt), Type(r, rt)) => eq_expr(l, r) && eq_ty(lt, rt),
|
||||||
(Let(lp, le, _), Let(rp, re, _)) => eq_pat(lp, rp) && eq_expr(le, re),
|
(Let(lp, le, _), Let(rp, re, _)) => eq_pat(lp, rp) && eq_expr(le, re),
|
||||||
(If(lc, lt, le), If(rc, rt, re)) => eq_expr(lc, rc) && eq_block(lt, rt) && eq_expr_opt(le, re),
|
(If(lc, lt, le), If(rc, rt, re)) => eq_expr(lc, rc) && eq_block(lt, rt) && eq_expr_opt(le, re),
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
use rustc_ast::ast::{Lit, LitFloatType, LitIntType, LitKind};
|
use rustc_ast::ast::{LitFloatType, LitIntType, LitKind};
|
||||||
use std::iter;
|
use std::iter;
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Eq, Copy, Clone)]
|
#[derive(Debug, PartialEq, Eq, Copy, Clone)]
|
||||||
@ -46,10 +46,6 @@ pub struct NumericLiteral<'a> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> NumericLiteral<'a> {
|
impl<'a> NumericLiteral<'a> {
|
||||||
pub fn from_lit(src: &'a str, lit: &Lit) -> Option<NumericLiteral<'a>> {
|
|
||||||
NumericLiteral::from_lit_kind(src, &lit.kind)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn from_lit_kind(src: &'a str, lit_kind: &LitKind) -> Option<NumericLiteral<'a>> {
|
pub fn from_lit_kind(src: &'a str, lit_kind: &LitKind) -> Option<NumericLiteral<'a>> {
|
||||||
let unsigned_src = src.strip_prefix('-').map_or(src, |s| s);
|
let unsigned_src = src.strip_prefix('-').map_or(src, |s| s);
|
||||||
if lit_kind.is_numeric()
|
if lit_kind.is_numeric()
|
||||||
|
@ -260,7 +260,9 @@ impl Rewrite for ast::NestedMetaItem {
|
|||||||
fn rewrite(&self, context: &RewriteContext<'_>, shape: Shape) -> Option<String> {
|
fn rewrite(&self, context: &RewriteContext<'_>, shape: Shape) -> Option<String> {
|
||||||
match self {
|
match self {
|
||||||
ast::NestedMetaItem::MetaItem(ref meta_item) => meta_item.rewrite(context, shape),
|
ast::NestedMetaItem::MetaItem(ref meta_item) => meta_item.rewrite(context, shape),
|
||||||
ast::NestedMetaItem::Literal(ref l) => rewrite_literal(context, l, shape),
|
ast::NestedMetaItem::Literal(ref l) => {
|
||||||
|
rewrite_literal(context, l.token_lit, l.span, shape)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -318,7 +320,7 @@ impl Rewrite for ast::MetaItem {
|
|||||||
// we might be better off ignoring the fact that the attribute
|
// we might be better off ignoring the fact that the attribute
|
||||||
// is longer than the max width and continue on formatting.
|
// is longer than the max width and continue on formatting.
|
||||||
// See #2479 for example.
|
// See #2479 for example.
|
||||||
let value = rewrite_literal(context, literal, lit_shape)
|
let value = rewrite_literal(context, literal.token_lit, literal.span, lit_shape)
|
||||||
.unwrap_or_else(|| context.snippet(literal.span).to_owned());
|
.unwrap_or_else(|| context.snippet(literal.span).to_owned());
|
||||||
format!("{} = {}", path, value)
|
format!("{} = {}", path, value)
|
||||||
}
|
}
|
||||||
|
@ -3,7 +3,7 @@ use std::cmp::min;
|
|||||||
|
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use rustc_ast::token::{Delimiter, LitKind};
|
use rustc_ast::token::{Delimiter, LitKind};
|
||||||
use rustc_ast::{ast, ptr};
|
use rustc_ast::{ast, ptr, token};
|
||||||
use rustc_span::{BytePos, Span};
|
use rustc_span::{BytePos, Span};
|
||||||
|
|
||||||
use crate::chains::rewrite_chain;
|
use crate::chains::rewrite_chain;
|
||||||
@ -75,12 +75,12 @@ pub(crate) fn format_expr(
|
|||||||
choose_separator_tactic(context, expr.span),
|
choose_separator_tactic(context, expr.span),
|
||||||
None,
|
None,
|
||||||
),
|
),
|
||||||
ast::ExprKind::Lit(ref l) => {
|
ast::ExprKind::Lit(token_lit) => {
|
||||||
if let Some(expr_rw) = rewrite_literal(context, l, shape) {
|
if let Some(expr_rw) = rewrite_literal(context, token_lit, expr.span, shape) {
|
||||||
Some(expr_rw)
|
Some(expr_rw)
|
||||||
} else {
|
} else {
|
||||||
if let LitKind::StrRaw(_) = l.token_lit.kind {
|
if let LitKind::StrRaw(_) = token_lit.kind {
|
||||||
Some(context.snippet(l.span).trim().into())
|
Some(context.snippet(expr.span).trim().into())
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
@ -274,9 +274,9 @@ pub(crate) fn format_expr(
|
|||||||
|
|
||||||
fn needs_space_before_range(context: &RewriteContext<'_>, lhs: &ast::Expr) -> bool {
|
fn needs_space_before_range(context: &RewriteContext<'_>, lhs: &ast::Expr) -> bool {
|
||||||
match lhs.kind {
|
match lhs.kind {
|
||||||
ast::ExprKind::Lit(ref lit) => match lit.kind {
|
ast::ExprKind::Lit(token_lit) => match token_lit.kind {
|
||||||
ast::LitKind::Float(_, ast::LitFloatType::Unsuffixed) => {
|
token::LitKind::Float if token_lit.suffix.is_none() => {
|
||||||
context.snippet(lit.span).ends_with('.')
|
context.snippet(lhs.span).ends_with('.')
|
||||||
}
|
}
|
||||||
_ => false,
|
_ => false,
|
||||||
},
|
},
|
||||||
@ -1185,14 +1185,15 @@ pub(crate) fn is_unsafe_block(block: &ast::Block) -> bool {
|
|||||||
|
|
||||||
pub(crate) fn rewrite_literal(
|
pub(crate) fn rewrite_literal(
|
||||||
context: &RewriteContext<'_>,
|
context: &RewriteContext<'_>,
|
||||||
l: &ast::Lit,
|
token_lit: token::Lit,
|
||||||
|
span: Span,
|
||||||
shape: Shape,
|
shape: Shape,
|
||||||
) -> Option<String> {
|
) -> Option<String> {
|
||||||
match l.kind {
|
match token_lit.kind {
|
||||||
ast::LitKind::Str(_, ast::StrStyle::Cooked) => rewrite_string_lit(context, l.span, shape),
|
token::LitKind::Str => rewrite_string_lit(context, span, shape),
|
||||||
ast::LitKind::Int(..) => rewrite_int_lit(context, l, shape),
|
token::LitKind::Integer => rewrite_int_lit(context, token_lit, span, shape),
|
||||||
_ => wrap_str(
|
_ => wrap_str(
|
||||||
context.snippet(l.span).to_owned(),
|
context.snippet(span).to_owned(),
|
||||||
context.config.max_width(),
|
context.config.max_width(),
|
||||||
shape,
|
shape,
|
||||||
),
|
),
|
||||||
@ -1225,9 +1226,13 @@ fn rewrite_string_lit(context: &RewriteContext<'_>, span: Span, shape: Shape) ->
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn rewrite_int_lit(context: &RewriteContext<'_>, lit: &ast::Lit, shape: Shape) -> Option<String> {
|
fn rewrite_int_lit(
|
||||||
let span = lit.span;
|
context: &RewriteContext<'_>,
|
||||||
let symbol = lit.token_lit.symbol.as_str();
|
token_lit: token::Lit,
|
||||||
|
span: Span,
|
||||||
|
shape: Shape,
|
||||||
|
) -> Option<String> {
|
||||||
|
let symbol = token_lit.symbol.as_str();
|
||||||
|
|
||||||
if let Some(symbol_stripped) = symbol.strip_prefix("0x") {
|
if let Some(symbol_stripped) = symbol.strip_prefix("0x") {
|
||||||
let hex_lit = match context.config.hex_literal_case() {
|
let hex_lit = match context.config.hex_literal_case() {
|
||||||
@ -1240,9 +1245,7 @@ fn rewrite_int_lit(context: &RewriteContext<'_>, lit: &ast::Lit, shape: Shape) -
|
|||||||
format!(
|
format!(
|
||||||
"0x{}{}",
|
"0x{}{}",
|
||||||
hex_lit,
|
hex_lit,
|
||||||
lit.token_lit
|
token_lit.suffix.map_or(String::new(), |s| s.to_string())
|
||||||
.suffix
|
|
||||||
.map_or(String::new(), |s| s.to_string())
|
|
||||||
),
|
),
|
||||||
context.config.max_width(),
|
context.config.max_width(),
|
||||||
shape,
|
shape,
|
||||||
|
Loading…
Reference in New Issue
Block a user