2022-08-30 11:19:17 +00:00
|
|
|
use super::diagnostics::SnapshotParser;
|
2022-01-12 20:43:24 +00:00
|
|
|
use super::pat::{CommaRecoveryMode, RecoverColon, RecoverComma, PARAM_EXPECTED};
|
2020-10-15 19:21:45 +00:00
|
|
|
use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign};
|
2021-08-16 13:22:36 +00:00
|
|
|
use super::{
|
2022-03-18 07:56:43 +00:00
|
|
|
AttrWrapper, BlockMode, ClosureSpans, ForceCollect, Parser, PathStyle, Restrictions,
|
|
|
|
SemiColonMode, SeqSep, TokenExpectType, TokenType, TrailingToken,
|
2021-08-16 13:22:36 +00:00
|
|
|
};
|
2022-08-30 11:19:17 +00:00
|
|
|
use crate::errors::{
|
2022-09-08 16:23:31 +00:00
|
|
|
ArrayBracketsInsteadOfSpaces, ArrayBracketsInsteadOfSpacesSugg, AsyncMoveOrderIncorrect,
|
2022-10-10 02:40:56 +00:00
|
|
|
BracesForStructLiteral, CatchAfterTry, CommaAfterBaseStruct, ComparisonInterpretedAsGeneric,
|
|
|
|
ComparisonOrShiftInterpretedAsGenericSugg, DoCatchSyntaxRemoved, DotDotDot, EqFieldInit,
|
|
|
|
ExpectedElseBlock, ExpectedEqForLetExpr, ExpectedExpressionFoundLet,
|
|
|
|
FieldExpressionWithGeneric, FloatLiteralRequiresIntegerPart, FoundExprWouldBeStmt,
|
|
|
|
IfExpressionMissingCondition, IfExpressionMissingThenBlock, IfExpressionMissingThenBlockSub,
|
2022-09-08 16:23:31 +00:00
|
|
|
InvalidBlockMacroSegment, InvalidComparisonOperator, InvalidComparisonOperatorSub,
|
2022-10-10 02:40:56 +00:00
|
|
|
InvalidInterpolatedExpression, InvalidLiteralSuffixOnTupleIndex, InvalidLogicalOperator,
|
|
|
|
InvalidLogicalOperatorSub, LabeledLoopInBreak, LeadingPlusNotSupported, LeftArrowOperator,
|
2022-09-08 16:23:31 +00:00
|
|
|
LifetimeInBorrowExpression, MacroInvocationWithQualifiedPath, MalformedLoopLabel,
|
|
|
|
MatchArmBodyWithoutBraces, MatchArmBodyWithoutBracesSugg, MissingCommaAfterMatchArm,
|
2022-10-13 15:27:17 +00:00
|
|
|
MissingDotDot, MissingInInForLoop, MissingInInForLoopSub, MissingSemicolonBeforeArray,
|
|
|
|
NoFieldsForFnCall, NotAsNegationOperator, NotAsNegationOperatorSub,
|
2022-10-10 02:40:56 +00:00
|
|
|
OuterAttributeNotAllowedOnIfElse, ParenthesesWithStructFields,
|
2022-09-08 16:23:31 +00:00
|
|
|
RequireColonAfterLabeledExpression, ShiftInterpretedAsGeneric, StructLiteralNotAllowedHere,
|
|
|
|
StructLiteralNotAllowedHereSugg, TildeAsUnaryOperator, UnexpectedTokenAfterLabel,
|
|
|
|
UnexpectedTokenAfterLabelSugg, WrapExpressionInParentheses,
|
2022-08-17 17:05:49 +00:00
|
|
|
};
|
2022-08-30 11:19:17 +00:00
|
|
|
use crate::maybe_recover_from_interpolated_ty_qpath;
|
2022-06-25 11:08:38 +00:00
|
|
|
use core::mem;
|
2020-02-29 17:37:32 +00:00
|
|
|
use rustc_ast::ptr::P;
|
2022-04-26 12:40:14 +00:00
|
|
|
use rustc_ast::token::{self, Delimiter, Token, TokenKind};
|
Rewrite `collect_tokens` implementations to use a flattened buffer
Instead of trying to collect tokens at each depth, we 'flatten' the
stream as we go allong, pushing open/close delimiters to our buffer
just like regular tokens. One capturing is complete, we reconstruct a
nested `TokenTree::Delimited` structure, producing a normal
`TokenStream`.
The reconstructed `TokenStream` is not created immediately - instead, it is
produced on-demand by a closure (wrapped in a new `LazyTokenStream` type). This
closure stores a clone of the original `TokenCursor`, plus a record of the
number of calls to `next()/next_desugared()`. This is sufficient to reconstruct
the tokenstream seen by the callback without storing any additional state. If
the tokenstream is never used (e.g. when a captured `macro_rules!` argument is
never passed to a proc macro), we never actually create a `TokenStream`.
This implementation has a number of advantages over the previous one:
* It is significantly simpler, with no edge cases around capturing the
start/end of a delimited group.
* It can be easily extended to allow replacing tokens an an arbitrary
'depth' by just using `Vec::splice` at the proper position. This is
important for PR #76130, which requires us to track information about
attributes along with tokens.
* The lazy approach to `TokenStream` construction allows us to easily
parse an AST struct, and then decide after the fact whether we need a
`TokenStream`. This will be useful when we start collecting tokens for
`Attribute` - we can discard the `LazyTokenStream` if the parsed
attribute doesn't need tokens (e.g. is a builtin attribute).
The performance impact seems to be neglibile (see
https://github.com/rust-lang/rust/pull/77250#issuecomment-703960604). There is a
small slowdown on a few benchmarks, but it only rises above 1% for incremental
builds, where it represents a larger fraction of the much smaller instruction
count. There a ~1% speedup on a few other incremental benchmarks - my guess is
that the speedups and slowdowns will usually cancel out in practice.
2020-09-27 01:56:29 +00:00
|
|
|
use rustc_ast::tokenstream::Spacing;
|
2022-09-15 16:27:23 +00:00
|
|
|
use rustc_ast::util::case::Case;
|
2020-02-29 17:37:32 +00:00
|
|
|
use rustc_ast::util::classify;
|
|
|
|
use rustc_ast::util::parser::{prec_let_scrutinee_needs_par, AssocOp, Fixity};
|
2022-06-05 19:12:51 +00:00
|
|
|
use rustc_ast::visit::Visitor;
|
2021-03-15 21:36:07 +00:00
|
|
|
use rustc_ast::{self as ast, AttrStyle, AttrVec, CaptureBy, ExprField, Lit, UnOp, DUMMY_NODE_ID};
|
2020-04-27 17:56:11 +00:00
|
|
|
use rustc_ast::{AnonConst, BinOp, BinOpKind, FnDecl, FnRetTy, MacCall, Param, Ty, TyKind};
|
|
|
|
use rustc_ast::{Arm, Async, BlockCheckMode, Expr, ExprKind, Label, Movability, RangeLimits};
|
2022-06-02 16:15:05 +00:00
|
|
|
use rustc_ast::{ClosureBinder, StmtKind};
|
2020-01-11 16:02:46 +00:00
|
|
|
use rustc_ast_pretty::pprust;
|
2022-09-01 18:48:09 +00:00
|
|
|
use rustc_errors::{
|
|
|
|
Applicability, Diagnostic, DiagnosticBuilder, ErrorGuaranteed, IntoDiagnostic, PResult,
|
|
|
|
StashKey,
|
|
|
|
};
|
2022-10-10 02:40:56 +00:00
|
|
|
use rustc_session::errors::{report_lit_error, ExprParenthesesNeeded};
|
2021-07-10 14:38:55 +00:00
|
|
|
use rustc_session::lint::builtin::BREAK_WITH_LABEL_AND_LOOP;
|
|
|
|
use rustc_session::lint::BuiltinLintDiagnostics;
|
2020-01-11 00:19:09 +00:00
|
|
|
use rustc_span::source_map::{self, Span, Spanned};
|
2020-04-19 11:00:18 +00:00
|
|
|
use rustc_span::symbol::{kw, sym, Ident, Symbol};
|
2020-10-09 23:01:44 +00:00
|
|
|
use rustc_span::{BytePos, Pos};
|
2019-08-11 11:14:30 +00:00
|
|
|
|
|
|
|
/// Possibly accepts an `token::Interpolated` expression (a pre-parsed expression
|
|
|
|
/// dropped into the token stream, which happens while parsing the result of
|
|
|
|
/// macro expansion). Placement of these is not as complex as I feared it would
|
|
|
|
/// be. The important thing is to make sure that lookahead doesn't balk at
|
|
|
|
/// `token::Interpolated` tokens.
|
|
|
|
macro_rules! maybe_whole_expr {
|
|
|
|
($p:expr) => {
|
2020-07-01 10:16:49 +00:00
|
|
|
if let token::Interpolated(nt) = &$p.token.kind {
|
2019-08-11 11:14:30 +00:00
|
|
|
match &**nt {
|
|
|
|
token::NtExpr(e) | token::NtLiteral(e) => {
|
|
|
|
let e = e.clone();
|
|
|
|
$p.bump();
|
|
|
|
return Ok(e);
|
|
|
|
}
|
|
|
|
token::NtPath(path) => {
|
2022-04-06 02:08:39 +00:00
|
|
|
let path = (**path).clone();
|
2019-08-11 11:14:30 +00:00
|
|
|
$p.bump();
|
2022-08-14 23:58:38 +00:00
|
|
|
return Ok($p.mk_expr($p.prev_token.span, ExprKind::Path(None, path)));
|
2019-08-11 11:14:30 +00:00
|
|
|
}
|
|
|
|
token::NtBlock(block) => {
|
|
|
|
let block = block.clone();
|
|
|
|
$p.bump();
|
2022-08-14 23:58:38 +00:00
|
|
|
return Ok($p.mk_expr($p.prev_token.span, ExprKind::Block(block, None)));
|
2019-08-11 11:14:30 +00:00
|
|
|
}
|
2019-12-22 22:42:04 +00:00
|
|
|
_ => {}
|
2019-08-11 11:14:30 +00:00
|
|
|
};
|
|
|
|
}
|
2019-12-22 22:42:04 +00:00
|
|
|
};
|
2019-08-11 11:14:30 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Debug)]
|
|
|
|
pub(super) enum LhsExpr {
|
|
|
|
NotYetParsed,
|
2021-01-22 18:28:08 +00:00
|
|
|
AttributesParsed(AttrWrapper),
|
2019-08-11 11:14:30 +00:00
|
|
|
AlreadyParsed(P<Expr>),
|
|
|
|
}
|
|
|
|
|
2021-01-22 18:28:08 +00:00
|
|
|
impl From<Option<AttrWrapper>> for LhsExpr {
|
2019-09-03 22:42:58 +00:00
|
|
|
/// Converts `Some(attrs)` into `LhsExpr::AttributesParsed(attrs)`
|
|
|
|
/// and `None` into `LhsExpr::NotYetParsed`.
|
|
|
|
///
|
|
|
|
/// This conversion does not allocate.
|
2021-01-22 18:28:08 +00:00
|
|
|
fn from(o: Option<AttrWrapper>) -> Self {
|
2019-12-22 22:42:04 +00:00
|
|
|
if let Some(attrs) = o { LhsExpr::AttributesParsed(attrs) } else { LhsExpr::NotYetParsed }
|
2019-08-11 11:14:30 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl From<P<Expr>> for LhsExpr {
|
2019-09-03 22:42:58 +00:00
|
|
|
/// Converts the `expr: P<Expr>` into `LhsExpr::AlreadyParsed(expr)`.
|
|
|
|
///
|
|
|
|
/// This conversion does not allocate.
|
2019-08-11 11:14:30 +00:00
|
|
|
fn from(expr: P<Expr>) -> Self {
|
|
|
|
LhsExpr::AlreadyParsed(expr)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<'a> Parser<'a> {
|
|
|
|
/// Parses an expression.
|
|
|
|
#[inline]
|
|
|
|
pub fn parse_expr(&mut self) -> PResult<'a, P<Expr>> {
|
2021-08-16 13:22:36 +00:00
|
|
|
self.current_closure.take();
|
|
|
|
|
2019-08-11 11:14:30 +00:00
|
|
|
self.parse_expr_res(Restrictions::empty(), None)
|
|
|
|
}
|
|
|
|
|
2021-03-25 22:05:49 +00:00
|
|
|
/// Parses an expression, forcing tokens to be collected
|
|
|
|
pub fn parse_expr_force_collect(&mut self) -> PResult<'a, P<Expr>> {
|
2021-05-06 13:21:40 +00:00
|
|
|
self.collect_tokens_no_attrs(|this| this.parse_expr())
|
2021-03-25 22:05:49 +00:00
|
|
|
}
|
|
|
|
|
2021-04-06 04:50:55 +00:00
|
|
|
pub fn parse_anon_const_expr(&mut self) -> PResult<'a, AnonConst> {
|
2019-12-08 07:19:53 +00:00
|
|
|
self.parse_expr().map(|value| AnonConst { id: DUMMY_NODE_ID, value })
|
|
|
|
}
|
|
|
|
|
2019-09-16 20:45:43 +00:00
|
|
|
fn parse_expr_catch_underscore(&mut self) -> PResult<'a, P<Expr>> {
|
|
|
|
match self.parse_expr() {
|
|
|
|
Ok(expr) => Ok(expr),
|
2020-03-04 20:37:52 +00:00
|
|
|
Err(mut err) => match self.token.ident() {
|
2020-03-09 09:42:33 +00:00
|
|
|
Some((Ident { name: kw::Underscore, .. }, false))
|
2022-10-28 18:44:26 +00:00
|
|
|
if self.may_recover() && self.look_ahead(1, |t| t == &token::Comma) =>
|
2019-12-22 22:42:04 +00:00
|
|
|
{
|
2019-09-16 20:45:43 +00:00
|
|
|
// Special-case handling of `foo(_, _, _)`
|
|
|
|
err.emit();
|
|
|
|
self.bump();
|
2022-08-14 23:58:38 +00:00
|
|
|
Ok(self.mk_expr(self.prev_token.span, ExprKind::Err))
|
2019-09-16 20:45:43 +00:00
|
|
|
}
|
|
|
|
_ => Err(err),
|
|
|
|
},
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-12-08 07:19:53 +00:00
|
|
|
/// Parses a sequence of expressions delimited by parentheses.
|
2019-08-11 11:14:30 +00:00
|
|
|
fn parse_paren_expr_seq(&mut self) -> PResult<'a, Vec<P<Expr>>> {
|
2019-12-22 22:42:04 +00:00
|
|
|
self.parse_paren_comma_seq(|p| p.parse_expr_catch_underscore()).map(|(r, _)| r)
|
2019-08-11 11:14:30 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
/// Parses an expression, subject to the given restrictions.
|
|
|
|
#[inline]
|
|
|
|
pub(super) fn parse_expr_res(
|
|
|
|
&mut self,
|
|
|
|
r: Restrictions,
|
2021-01-22 18:28:08 +00:00
|
|
|
already_parsed_attrs: Option<AttrWrapper>,
|
2019-08-11 11:14:30 +00:00
|
|
|
) -> PResult<'a, P<Expr>> {
|
|
|
|
self.with_res(r, |this| this.parse_assoc_expr(already_parsed_attrs))
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Parses an associative expression.
|
|
|
|
///
|
|
|
|
/// This parses an expression accounting for associativity and precedence of the operators in
|
|
|
|
/// the expression.
|
|
|
|
#[inline]
|
2021-01-22 18:28:08 +00:00
|
|
|
fn parse_assoc_expr(
|
|
|
|
&mut self,
|
|
|
|
already_parsed_attrs: Option<AttrWrapper>,
|
|
|
|
) -> PResult<'a, P<Expr>> {
|
2019-08-11 11:14:30 +00:00
|
|
|
self.parse_assoc_expr_with(0, already_parsed_attrs.into())
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Parses an associative expression with operators of at least `min_prec` precedence.
|
|
|
|
pub(super) fn parse_assoc_expr_with(
|
|
|
|
&mut self,
|
|
|
|
min_prec: usize,
|
|
|
|
lhs: LhsExpr,
|
|
|
|
) -> PResult<'a, P<Expr>> {
|
|
|
|
let mut lhs = if let LhsExpr::AlreadyParsed(expr) = lhs {
|
|
|
|
expr
|
|
|
|
} else {
|
|
|
|
let attrs = match lhs {
|
|
|
|
LhsExpr::AttributesParsed(attrs) => Some(attrs),
|
|
|
|
_ => None,
|
|
|
|
};
|
|
|
|
if [token::DotDot, token::DotDotDot, token::DotDotEq].contains(&self.token.kind) {
|
|
|
|
return self.parse_prefix_range_expr(attrs);
|
|
|
|
} else {
|
|
|
|
self.parse_prefix_expr(attrs)?
|
|
|
|
}
|
|
|
|
};
|
|
|
|
let last_type_ascription_set = self.last_type_ascription.is_some();
|
|
|
|
|
2019-12-03 08:04:36 +00:00
|
|
|
if !self.should_continue_as_assoc_expr(&lhs) {
|
|
|
|
self.last_type_ascription = None;
|
|
|
|
return Ok(lhs);
|
2019-08-11 11:14:30 +00:00
|
|
|
}
|
|
|
|
|
2019-12-03 08:04:36 +00:00
|
|
|
self.expected_tokens.push(TokenType::Operator);
|
|
|
|
while let Some(op) = self.check_assoc_op() {
|
2020-02-10 17:20:01 +00:00
|
|
|
// Adjust the span for interpolated LHS to point to the `$lhs` token
|
|
|
|
// and not to what it refers to.
|
2020-02-24 10:04:13 +00:00
|
|
|
let lhs_span = match self.prev_token.kind {
|
2020-02-29 11:56:15 +00:00
|
|
|
TokenKind::Interpolated(..) => self.prev_token.span,
|
2019-08-11 11:14:30 +00:00
|
|
|
_ => lhs.span,
|
|
|
|
};
|
|
|
|
|
|
|
|
let cur_op_span = self.token.span;
|
2020-01-11 00:19:09 +00:00
|
|
|
let restrictions = if op.node.is_assign_like() {
|
2019-08-11 11:14:30 +00:00
|
|
|
self.restrictions & Restrictions::NO_STRUCT_LITERAL
|
|
|
|
} else {
|
|
|
|
self.restrictions
|
|
|
|
};
|
2020-01-11 00:19:09 +00:00
|
|
|
let prec = op.node.precedence();
|
2019-08-11 11:14:30 +00:00
|
|
|
if prec < min_prec {
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
// Check for deprecated `...` syntax
|
2020-01-11 00:19:09 +00:00
|
|
|
if self.token == token::DotDotDot && op.node == AssocOp::DotDotEq {
|
2019-08-11 11:14:30 +00:00
|
|
|
self.err_dotdotdot_syntax(self.token.span);
|
|
|
|
}
|
|
|
|
|
2019-08-11 21:37:05 +00:00
|
|
|
if self.token == token::LArrow {
|
|
|
|
self.err_larrow_operator(self.token.span);
|
|
|
|
}
|
|
|
|
|
2019-08-11 11:14:30 +00:00
|
|
|
self.bump();
|
2020-01-11 00:19:09 +00:00
|
|
|
if op.node.is_comparison() {
|
2019-09-30 02:07:26 +00:00
|
|
|
if let Some(expr) = self.check_no_chained_comparison(&lhs, &op)? {
|
|
|
|
return Ok(expr);
|
|
|
|
}
|
2019-08-11 11:14:30 +00:00
|
|
|
}
|
2020-08-09 04:17:15 +00:00
|
|
|
|
2021-12-06 17:55:58 +00:00
|
|
|
// Look for JS' `===` and `!==` and recover
|
2020-08-09 04:17:15 +00:00
|
|
|
if (op.node == AssocOp::Equal || op.node == AssocOp::NotEqual)
|
|
|
|
&& self.token.kind == token::Eq
|
|
|
|
&& self.prev_token.span.hi() == self.token.span.lo()
|
|
|
|
{
|
|
|
|
let sp = op.span.to(self.token.span);
|
|
|
|
let sugg = match op.node {
|
|
|
|
AssocOp::Equal => "==",
|
|
|
|
AssocOp::NotEqual => "!=",
|
|
|
|
_ => unreachable!(),
|
2022-08-17 17:05:49 +00:00
|
|
|
}
|
|
|
|
.into();
|
|
|
|
let invalid = format!("{}=", &sugg);
|
|
|
|
self.sess.emit_err(InvalidComparisonOperator {
|
|
|
|
span: sp,
|
|
|
|
invalid: invalid.clone(),
|
|
|
|
sub: InvalidComparisonOperatorSub::Correctable {
|
|
|
|
span: sp,
|
|
|
|
invalid,
|
|
|
|
correct: sugg,
|
|
|
|
},
|
|
|
|
});
|
2020-08-09 04:17:15 +00:00
|
|
|
self.bump();
|
|
|
|
}
|
|
|
|
|
2021-12-06 17:55:58 +00:00
|
|
|
// Look for PHP's `<>` and recover
|
|
|
|
if op.node == AssocOp::Less
|
|
|
|
&& self.token.kind == token::Gt
|
|
|
|
&& self.prev_token.span.hi() == self.token.span.lo()
|
|
|
|
{
|
|
|
|
let sp = op.span.to(self.token.span);
|
2022-08-17 17:05:49 +00:00
|
|
|
self.sess.emit_err(InvalidComparisonOperator {
|
|
|
|
span: sp,
|
|
|
|
invalid: "<>".into(),
|
|
|
|
sub: InvalidComparisonOperatorSub::Correctable {
|
|
|
|
span: sp,
|
|
|
|
invalid: "<>".into(),
|
|
|
|
correct: "!=".into(),
|
|
|
|
},
|
|
|
|
});
|
2021-12-06 17:55:58 +00:00
|
|
|
self.bump();
|
|
|
|
}
|
|
|
|
|
|
|
|
// Look for C++'s `<=>` and recover
|
|
|
|
if op.node == AssocOp::LessEqual
|
|
|
|
&& self.token.kind == token::Gt
|
|
|
|
&& self.prev_token.span.hi() == self.token.span.lo()
|
|
|
|
{
|
|
|
|
let sp = op.span.to(self.token.span);
|
2022-08-17 17:05:49 +00:00
|
|
|
self.sess.emit_err(InvalidComparisonOperator {
|
|
|
|
span: sp,
|
|
|
|
invalid: "<=>".into(),
|
|
|
|
sub: InvalidComparisonOperatorSub::Spaceship(sp),
|
|
|
|
});
|
2021-12-06 17:55:58 +00:00
|
|
|
self.bump();
|
|
|
|
}
|
|
|
|
|
2021-09-06 23:16:52 +00:00
|
|
|
if self.prev_token == token::BinOp(token::Plus)
|
|
|
|
&& self.token == token::BinOp(token::Plus)
|
2022-02-18 00:30:48 +00:00
|
|
|
&& self.prev_token.span.between(self.token.span).is_empty()
|
2021-09-06 23:16:52 +00:00
|
|
|
{
|
|
|
|
let op_span = self.prev_token.span.to(self.token.span);
|
|
|
|
// Eat the second `+`
|
|
|
|
self.bump();
|
2022-02-18 00:24:22 +00:00
|
|
|
lhs = self.recover_from_postfix_increment(lhs, op_span)?;
|
2021-09-06 23:16:52 +00:00
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
2020-01-11 00:19:09 +00:00
|
|
|
let op = op.node;
|
2019-08-11 11:14:30 +00:00
|
|
|
// Special cases:
|
|
|
|
if op == AssocOp::As {
|
|
|
|
lhs = self.parse_assoc_op_cast(lhs, lhs_span, ExprKind::Cast)?;
|
2019-12-22 22:42:04 +00:00
|
|
|
continue;
|
2019-08-11 11:14:30 +00:00
|
|
|
} else if op == AssocOp::Colon {
|
2019-12-07 03:59:08 +00:00
|
|
|
lhs = self.parse_assoc_op_ascribe(lhs, lhs_span)?;
|
2019-12-22 22:42:04 +00:00
|
|
|
continue;
|
2019-08-11 11:14:30 +00:00
|
|
|
} else if op == AssocOp::DotDot || op == AssocOp::DotDotEq {
|
2022-03-30 21:04:46 +00:00
|
|
|
// If we didn't have to handle `x..`/`x..=`, it would be pretty easy to
|
2019-08-11 11:14:30 +00:00
|
|
|
// generalise it to the Fixity::None code.
|
2019-12-07 03:59:08 +00:00
|
|
|
lhs = self.parse_range_expr(prec, lhs, op, cur_op_span)?;
|
2019-12-22 22:42:04 +00:00
|
|
|
break;
|
2019-08-11 11:14:30 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
let fixity = op.fixity();
|
|
|
|
let prec_adjustment = match fixity {
|
|
|
|
Fixity::Right => 0,
|
|
|
|
Fixity::Left => 1,
|
|
|
|
// We currently have no non-associative operators that are not handled above by
|
|
|
|
// the special cases. The code is here only for future convenience.
|
|
|
|
Fixity::None => 1,
|
|
|
|
};
|
2019-12-22 22:42:04 +00:00
|
|
|
let rhs = self.with_res(restrictions - Restrictions::STMT_EXPR, |this| {
|
|
|
|
this.parse_assoc_expr_with(prec + prec_adjustment, LhsExpr::NotYetParsed)
|
|
|
|
})?;
|
2019-08-11 11:14:30 +00:00
|
|
|
|
2020-10-10 04:40:27 +00:00
|
|
|
let span = self.mk_expr_sp(&lhs, lhs_span, rhs.span);
|
2019-08-11 11:14:30 +00:00
|
|
|
lhs = match op {
|
2019-12-22 22:42:04 +00:00
|
|
|
AssocOp::Add
|
|
|
|
| AssocOp::Subtract
|
|
|
|
| AssocOp::Multiply
|
|
|
|
| AssocOp::Divide
|
|
|
|
| AssocOp::Modulus
|
|
|
|
| AssocOp::LAnd
|
|
|
|
| AssocOp::LOr
|
|
|
|
| AssocOp::BitXor
|
|
|
|
| AssocOp::BitAnd
|
|
|
|
| AssocOp::BitOr
|
|
|
|
| AssocOp::ShiftLeft
|
|
|
|
| AssocOp::ShiftRight
|
|
|
|
| AssocOp::Equal
|
|
|
|
| AssocOp::Less
|
|
|
|
| AssocOp::LessEqual
|
|
|
|
| AssocOp::NotEqual
|
|
|
|
| AssocOp::Greater
|
|
|
|
| AssocOp::GreaterEqual => {
|
2019-08-11 11:14:30 +00:00
|
|
|
let ast_op = op.to_ast_binop().unwrap();
|
|
|
|
let binary = self.mk_binary(source_map::respan(cur_op_span, ast_op), lhs, rhs);
|
2022-08-14 23:58:38 +00:00
|
|
|
self.mk_expr(span, binary)
|
2019-12-22 21:08:53 +00:00
|
|
|
}
|
2022-08-14 23:58:38 +00:00
|
|
|
AssocOp::Assign => self.mk_expr(span, ExprKind::Assign(lhs, rhs, cur_op_span)),
|
2019-08-11 11:14:30 +00:00
|
|
|
AssocOp::AssignOp(k) => {
|
|
|
|
let aop = match k {
|
2019-12-22 22:42:04 +00:00
|
|
|
token::Plus => BinOpKind::Add,
|
|
|
|
token::Minus => BinOpKind::Sub,
|
|
|
|
token::Star => BinOpKind::Mul,
|
|
|
|
token::Slash => BinOpKind::Div,
|
2019-08-11 11:14:30 +00:00
|
|
|
token::Percent => BinOpKind::Rem,
|
2019-12-22 22:42:04 +00:00
|
|
|
token::Caret => BinOpKind::BitXor,
|
|
|
|
token::And => BinOpKind::BitAnd,
|
|
|
|
token::Or => BinOpKind::BitOr,
|
|
|
|
token::Shl => BinOpKind::Shl,
|
|
|
|
token::Shr => BinOpKind::Shr,
|
2019-08-11 11:14:30 +00:00
|
|
|
};
|
|
|
|
let aopexpr = self.mk_assign_op(source_map::respan(cur_op_span, aop), lhs, rhs);
|
2022-08-14 23:58:38 +00:00
|
|
|
self.mk_expr(span, aopexpr)
|
2019-08-11 11:14:30 +00:00
|
|
|
}
|
|
|
|
AssocOp::As | AssocOp::Colon | AssocOp::DotDot | AssocOp::DotDotEq => {
|
2019-12-31 03:30:55 +00:00
|
|
|
self.span_bug(span, "AssocOp should have been handled by special case")
|
2019-08-11 11:14:30 +00:00
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2019-12-22 22:42:04 +00:00
|
|
|
if let Fixity::None = fixity {
|
|
|
|
break;
|
|
|
|
}
|
2019-08-11 11:14:30 +00:00
|
|
|
}
|
|
|
|
if last_type_ascription_set {
|
|
|
|
self.last_type_ascription = None;
|
|
|
|
}
|
|
|
|
Ok(lhs)
|
|
|
|
}
|
|
|
|
|
2019-12-03 08:04:36 +00:00
|
|
|
fn should_continue_as_assoc_expr(&mut self, lhs: &Expr) -> bool {
|
2020-08-18 02:15:51 +00:00
|
|
|
match (self.expr_is_complete(lhs), AssocOp::from_token(&self.token)) {
|
2019-12-03 08:04:36 +00:00
|
|
|
// Semi-statement forms are odd:
|
|
|
|
// See https://github.com/rust-lang/rust/issues/29071
|
|
|
|
(true, None) => false,
|
|
|
|
(false, _) => true, // Continue parsing the expression.
|
|
|
|
// An exhaustive check is done in the following block, but these are checked first
|
|
|
|
// because they *are* ambiguous but also reasonable looking incorrect syntax, so we
|
|
|
|
// want to keep their span info to improve diagnostics in these cases in a later stage.
|
|
|
|
(true, Some(AssocOp::Multiply)) | // `{ 42 } *foo = bar;` or `{ 42 } * 3`
|
|
|
|
(true, Some(AssocOp::Subtract)) | // `{ 42 } -5`
|
|
|
|
(true, Some(AssocOp::Add)) // `{ 42 } + 42
|
|
|
|
// If the next token is a keyword, then the tokens above *are* unambiguously incorrect:
|
|
|
|
// `if x { a } else { b } && if y { c } else { d }`
|
2020-07-22 19:01:56 +00:00
|
|
|
if !self.look_ahead(1, |t| t.is_used_keyword()) => {
|
|
|
|
// These cases are ambiguous and can't be identified in the parser alone.
|
|
|
|
let sp = self.sess.source_map().start_point(self.token.span);
|
|
|
|
self.sess.ambiguous_block_expr_parse.borrow_mut().insert(sp, lhs.span);
|
|
|
|
false
|
|
|
|
}
|
2022-02-25 00:02:38 +00:00
|
|
|
(true, Some(AssocOp::LAnd)) |
|
|
|
|
(true, Some(AssocOp::LOr)) |
|
|
|
|
(true, Some(AssocOp::BitOr)) => {
|
2020-07-22 19:01:56 +00:00
|
|
|
// `{ 42 } &&x` (#61475) or `{ 42 } && if x { 1 } else { 0 }`. Separated from the
|
|
|
|
// above due to #74233.
|
2019-12-03 08:04:36 +00:00
|
|
|
// These cases are ambiguous and can't be identified in the parser alone.
|
2022-02-25 00:02:38 +00:00
|
|
|
//
|
|
|
|
// Bitwise AND is left out because guessing intent is hard. We can make
|
|
|
|
// suggestions based on the assumption that double-refs are rarely intentional,
|
|
|
|
// and closures are distinct enough that they don't get mixed up with their
|
|
|
|
// return value.
|
2019-12-03 08:04:36 +00:00
|
|
|
let sp = self.sess.source_map().start_point(self.token.span);
|
|
|
|
self.sess.ambiguous_block_expr_parse.borrow_mut().insert(sp, lhs.span);
|
|
|
|
false
|
|
|
|
}
|
|
|
|
(true, Some(ref op)) if !op.can_continue_expr_unambiguously() => false,
|
|
|
|
(true, Some(_)) => {
|
|
|
|
self.error_found_expr_would_be_stmt(lhs);
|
|
|
|
true
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/// We've found an expression that would be parsed as a statement,
|
|
|
|
/// but the next token implies this should be parsed as an expression.
|
|
|
|
/// For example: `if let Some(x) = x { x } else { 0 } / 2`.
|
|
|
|
fn error_found_expr_would_be_stmt(&self, lhs: &Expr) {
|
2022-08-24 20:41:51 +00:00
|
|
|
self.sess.emit_err(FoundExprWouldBeStmt {
|
|
|
|
span: self.token.span,
|
2022-09-22 16:39:17 +00:00
|
|
|
token: self.token.clone(),
|
2022-08-24 20:41:51 +00:00
|
|
|
suggestion: ExprParenthesesNeeded::surrounding(lhs.span),
|
|
|
|
});
|
2019-12-03 08:04:36 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
/// Possibly translate the current token to an associative operator.
|
|
|
|
/// The method does not advance the current token.
|
|
|
|
///
|
|
|
|
/// Also performs recovery for `and` / `or` which are mistaken for `&&` and `||` respectively.
|
2020-01-11 00:19:09 +00:00
|
|
|
fn check_assoc_op(&self) -> Option<Spanned<AssocOp>> {
|
2020-03-04 20:37:52 +00:00
|
|
|
let (op, span) = match (AssocOp::from_token(&self.token), self.token.ident()) {
|
2020-10-03 18:30:32 +00:00
|
|
|
// When parsing const expressions, stop parsing when encountering `>`.
|
|
|
|
(
|
|
|
|
Some(
|
|
|
|
AssocOp::ShiftRight
|
|
|
|
| AssocOp::Greater
|
|
|
|
| AssocOp::GreaterEqual
|
|
|
|
| AssocOp::AssignOp(token::BinOpToken::Shr),
|
|
|
|
),
|
|
|
|
_,
|
|
|
|
) if self.restrictions.contains(Restrictions::CONST_EXPR) => {
|
|
|
|
return None;
|
|
|
|
}
|
2020-03-04 20:37:52 +00:00
|
|
|
(Some(op), _) => (op, self.token.span),
|
2022-10-28 18:44:26 +00:00
|
|
|
(None, Some((Ident { name: sym::and, span }, false))) if self.may_recover() => {
|
2022-08-17 17:05:49 +00:00
|
|
|
self.sess.emit_err(InvalidLogicalOperator {
|
|
|
|
span: self.token.span,
|
|
|
|
incorrect: "and".into(),
|
|
|
|
sub: InvalidLogicalOperatorSub::Conjunction(self.token.span),
|
|
|
|
});
|
2020-03-09 09:42:33 +00:00
|
|
|
(AssocOp::LAnd, span)
|
2020-03-04 20:37:52 +00:00
|
|
|
}
|
2022-10-28 18:44:26 +00:00
|
|
|
(None, Some((Ident { name: sym::or, span }, false))) if self.may_recover() => {
|
2022-08-17 17:05:49 +00:00
|
|
|
self.sess.emit_err(InvalidLogicalOperator {
|
|
|
|
span: self.token.span,
|
|
|
|
incorrect: "or".into(),
|
|
|
|
sub: InvalidLogicalOperatorSub::Disjunction(self.token.span),
|
|
|
|
});
|
2020-03-09 09:42:33 +00:00
|
|
|
(AssocOp::LOr, span)
|
2020-03-04 20:37:52 +00:00
|
|
|
}
|
|
|
|
_ => return None,
|
|
|
|
};
|
|
|
|
Some(source_map::respan(span, op))
|
2019-12-03 09:19:58 +00:00
|
|
|
}
|
|
|
|
|
2019-08-11 11:14:30 +00:00
|
|
|
/// Checks if this expression is a successfully parsed statement.
|
|
|
|
fn expr_is_complete(&self, e: &Expr) -> bool {
|
2019-12-22 22:42:04 +00:00
|
|
|
self.restrictions.contains(Restrictions::STMT_EXPR)
|
|
|
|
&& !classify::expr_requires_semi_to_be_stmt(e)
|
2019-08-11 11:14:30 +00:00
|
|
|
}
|
|
|
|
|
2019-12-07 03:59:08 +00:00
|
|
|
/// Parses `x..y`, `x..=y`, and `x..`/`x..=`.
|
|
|
|
/// The other two variants are handled in `parse_prefix_range_expr` below.
|
|
|
|
fn parse_range_expr(
|
|
|
|
&mut self,
|
|
|
|
prec: usize,
|
|
|
|
lhs: P<Expr>,
|
|
|
|
op: AssocOp,
|
|
|
|
cur_op_span: Span,
|
|
|
|
) -> PResult<'a, P<Expr>> {
|
|
|
|
let rhs = if self.is_at_start_of_range_notation_rhs() {
|
|
|
|
Some(self.parse_assoc_expr_with(prec + 1, LhsExpr::NotYetParsed)?)
|
|
|
|
} else {
|
|
|
|
None
|
|
|
|
};
|
|
|
|
let rhs_span = rhs.as_ref().map_or(cur_op_span, |x| x.span);
|
2020-10-12 17:24:24 +00:00
|
|
|
let span = self.mk_expr_sp(&lhs, lhs.span, rhs_span);
|
2019-12-07 03:59:08 +00:00
|
|
|
let limits =
|
|
|
|
if op == AssocOp::DotDot { RangeLimits::HalfOpen } else { RangeLimits::Closed };
|
2021-06-26 02:46:41 +00:00
|
|
|
let range = self.mk_range(Some(lhs), rhs, limits);
|
2022-08-14 23:58:38 +00:00
|
|
|
Ok(self.mk_expr(span, range))
|
2019-12-07 03:59:08 +00:00
|
|
|
}
|
|
|
|
|
2019-08-11 11:14:30 +00:00
|
|
|
fn is_at_start_of_range_notation_rhs(&self) -> bool {
|
|
|
|
if self.token.can_begin_expr() {
|
2019-09-06 02:56:45 +00:00
|
|
|
// Parse `for i in 1.. { }` as infinite loop, not as `for i in (1..{})`.
|
2022-04-26 12:40:14 +00:00
|
|
|
if self.token == token::OpenDelim(Delimiter::Brace) {
|
2019-08-11 11:14:30 +00:00
|
|
|
return !self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL);
|
|
|
|
}
|
|
|
|
true
|
|
|
|
} else {
|
|
|
|
false
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-09-06 02:56:45 +00:00
|
|
|
/// Parses prefix-forms of range notation: `..expr`, `..`, `..=expr`.
|
2021-01-22 18:28:08 +00:00
|
|
|
fn parse_prefix_range_expr(&mut self, attrs: Option<AttrWrapper>) -> PResult<'a, P<Expr>> {
|
2019-09-06 02:56:45 +00:00
|
|
|
// Check for deprecated `...` syntax.
|
2019-08-11 11:14:30 +00:00
|
|
|
if self.token == token::DotDotDot {
|
|
|
|
self.err_dotdotdot_syntax(self.token.span);
|
|
|
|
}
|
|
|
|
|
2019-12-22 22:42:04 +00:00
|
|
|
debug_assert!(
|
|
|
|
[token::DotDot, token::DotDotDot, token::DotDotEq].contains(&self.token.kind),
|
|
|
|
"parse_prefix_range_expr: token {:?} is not DotDot/DotDotEq",
|
|
|
|
self.token
|
|
|
|
);
|
2019-12-07 03:37:05 +00:00
|
|
|
|
|
|
|
let limits = match self.token.kind {
|
|
|
|
token::DotDot => RangeLimits::HalfOpen,
|
|
|
|
_ => RangeLimits::Closed,
|
|
|
|
};
|
|
|
|
let op = AssocOp::from_token(&self.token);
|
2021-02-13 17:42:43 +00:00
|
|
|
// FIXME: `parse_prefix_range_expr` is called when the current
|
|
|
|
// token is `DotDot`, `DotDotDot`, or `DotDotEq`. If we haven't already
|
|
|
|
// parsed attributes, then trying to parse them here will always fail.
|
|
|
|
// We should figure out how we want attributes on range expressions to work.
|
2019-12-07 03:37:05 +00:00
|
|
|
let attrs = self.parse_or_use_outer_attributes(attrs)?;
|
2021-01-22 18:28:08 +00:00
|
|
|
self.collect_tokens_for_expr(attrs, |this, attrs| {
|
|
|
|
let lo = this.token.span;
|
|
|
|
this.bump();
|
|
|
|
let (span, opt_end) = if this.is_at_start_of_range_notation_rhs() {
|
|
|
|
// RHS must be parsed with more associativity than the dots.
|
|
|
|
this.parse_assoc_expr_with(op.unwrap().precedence() + 1, LhsExpr::NotYetParsed)
|
|
|
|
.map(|x| (lo.to(x.span), Some(x)))?
|
|
|
|
} else {
|
|
|
|
(lo, None)
|
|
|
|
};
|
2021-06-26 02:46:41 +00:00
|
|
|
let range = this.mk_range(None, opt_end, limits);
|
2022-08-14 23:58:38 +00:00
|
|
|
Ok(this.mk_expr_with_attrs(span, range, attrs))
|
2021-01-22 18:28:08 +00:00
|
|
|
})
|
2019-08-11 11:14:30 +00:00
|
|
|
}
|
|
|
|
|
2019-09-06 02:56:45 +00:00
|
|
|
/// Parses a prefix-unary-operator expr.
|
2021-01-22 18:28:08 +00:00
|
|
|
fn parse_prefix_expr(&mut self, attrs: Option<AttrWrapper>) -> PResult<'a, P<Expr>> {
|
2019-12-07 02:07:35 +00:00
|
|
|
let attrs = self.parse_or_use_outer_attributes(attrs)?;
|
2021-01-22 18:28:08 +00:00
|
|
|
let lo = self.token.span;
|
|
|
|
|
|
|
|
macro_rules! make_it {
|
|
|
|
($this:ident, $attrs:expr, |this, _| $body:expr) => {
|
|
|
|
$this.collect_tokens_for_expr($attrs, |$this, attrs| {
|
|
|
|
let (hi, ex) = $body?;
|
2022-08-14 23:58:38 +00:00
|
|
|
Ok($this.mk_expr_with_attrs(lo.to(hi), ex, attrs))
|
2021-01-22 18:28:08 +00:00
|
|
|
})
|
|
|
|
};
|
|
|
|
}
|
|
|
|
|
|
|
|
let this = self;
|
|
|
|
|
|
|
|
// Note: when adding new unary operators, don't forget to adjust TokenKind::can_begin_expr()
|
|
|
|
match this.token.uninterpolate().kind {
|
|
|
|
token::Not => make_it!(this, attrs, |this, _| this.parse_unary_expr(lo, UnOp::Not)), // `!expr`
|
|
|
|
token::Tilde => make_it!(this, attrs, |this, _| this.recover_tilde_expr(lo)), // `~expr`
|
|
|
|
token::BinOp(token::Minus) => {
|
|
|
|
make_it!(this, attrs, |this, _| this.parse_unary_expr(lo, UnOp::Neg))
|
|
|
|
} // `-expr`
|
|
|
|
token::BinOp(token::Star) => {
|
|
|
|
make_it!(this, attrs, |this, _| this.parse_unary_expr(lo, UnOp::Deref))
|
|
|
|
} // `*expr`
|
|
|
|
token::BinOp(token::And) | token::AndAnd => {
|
|
|
|
make_it!(this, attrs, |this, _| this.parse_borrow_expr(lo))
|
|
|
|
}
|
2021-09-05 02:35:59 +00:00
|
|
|
token::BinOp(token::Plus) if this.look_ahead(1, |tok| tok.is_numeric_lit()) => {
|
2022-08-24 20:41:51 +00:00
|
|
|
let mut err =
|
|
|
|
LeadingPlusNotSupported { span: lo, remove_plus: None, add_parentheses: None };
|
2021-09-01 15:54:06 +00:00
|
|
|
|
|
|
|
// a block on the LHS might have been intended to be an expression instead
|
2021-09-05 02:35:59 +00:00
|
|
|
if let Some(sp) = this.sess.ambiguous_block_expr_parse.borrow().get(&lo) {
|
2022-08-24 20:41:51 +00:00
|
|
|
err.add_parentheses = Some(ExprParenthesesNeeded::surrounding(*sp));
|
2021-09-01 15:54:06 +00:00
|
|
|
} else {
|
2022-08-24 20:41:51 +00:00
|
|
|
err.remove_plus = Some(lo);
|
2021-09-01 15:54:06 +00:00
|
|
|
}
|
2022-08-24 20:41:51 +00:00
|
|
|
this.sess.emit_err(err);
|
2021-09-01 03:07:58 +00:00
|
|
|
|
2021-09-01 15:54:06 +00:00
|
|
|
this.bump();
|
2021-09-01 03:07:58 +00:00
|
|
|
this.parse_prefix_expr(None)
|
|
|
|
} // `+expr`
|
2022-02-17 20:28:07 +00:00
|
|
|
// Recover from `++x`:
|
2021-09-06 23:16:52 +00:00
|
|
|
token::BinOp(token::Plus)
|
|
|
|
if this.look_ahead(1, |t| *t == token::BinOp(token::Plus)) =>
|
|
|
|
{
|
|
|
|
let prev_is_semi = this.prev_token == token::Semi;
|
|
|
|
let pre_span = this.token.span.to(this.look_ahead(1, |t| t.span));
|
|
|
|
// Eat both `+`s.
|
|
|
|
this.bump();
|
|
|
|
this.bump();
|
|
|
|
|
2022-02-17 20:28:07 +00:00
|
|
|
let operand_expr = this.parse_dot_or_call_expr(Default::default())?;
|
2022-02-18 00:24:22 +00:00
|
|
|
this.recover_from_prefix_increment(operand_expr, pre_span, prev_is_semi)
|
2021-09-06 23:16:52 +00:00
|
|
|
}
|
2021-01-22 18:28:08 +00:00
|
|
|
token::Ident(..) if this.token.is_keyword(kw::Box) => {
|
|
|
|
make_it!(this, attrs, |this, _| this.parse_box_expr(lo))
|
|
|
|
}
|
2022-10-28 18:44:26 +00:00
|
|
|
token::Ident(..) if this.may_recover() && this.is_mistaken_not_ident_negation() => {
|
2021-01-22 18:28:08 +00:00
|
|
|
make_it!(this, attrs, |this, _| this.recover_not_expr(lo))
|
|
|
|
}
|
2021-02-17 19:37:09 +00:00
|
|
|
_ => return this.parse_dot_or_call_expr(Some(attrs)),
|
2021-01-22 18:28:08 +00:00
|
|
|
}
|
2019-08-11 11:14:30 +00:00
|
|
|
}
|
|
|
|
|
2019-12-07 02:07:35 +00:00
|
|
|
fn parse_prefix_expr_common(&mut self, lo: Span) -> PResult<'a, (Span, P<Expr>)> {
|
2019-12-07 02:05:51 +00:00
|
|
|
self.bump();
|
|
|
|
let expr = self.parse_prefix_expr(None);
|
|
|
|
let (span, expr) = self.interpolated_or_expr_span(expr)?;
|
2019-12-07 02:07:35 +00:00
|
|
|
Ok((lo.to(span), expr))
|
|
|
|
}
|
|
|
|
|
|
|
|
fn parse_unary_expr(&mut self, lo: Span, op: UnOp) -> PResult<'a, (Span, ExprKind)> {
|
|
|
|
let (span, expr) = self.parse_prefix_expr_common(lo)?;
|
|
|
|
Ok((span, self.mk_unary(op, expr)))
|
|
|
|
}
|
|
|
|
|
|
|
|
// Recover on `!` suggesting for bitwise negation instead.
|
|
|
|
fn recover_tilde_expr(&mut self, lo: Span) -> PResult<'a, (Span, ExprKind)> {
|
2022-08-17 17:05:49 +00:00
|
|
|
self.sess.emit_err(TildeAsUnaryOperator(lo));
|
2019-12-07 02:05:51 +00:00
|
|
|
|
2019-12-07 02:07:35 +00:00
|
|
|
self.parse_unary_expr(lo, UnOp::Not)
|
2019-12-07 02:00:06 +00:00
|
|
|
}
|
|
|
|
|
2019-12-07 01:55:12 +00:00
|
|
|
/// Parse `box expr`.
|
|
|
|
fn parse_box_expr(&mut self, lo: Span) -> PResult<'a, (Span, ExprKind)> {
|
2019-12-07 02:07:35 +00:00
|
|
|
let (span, expr) = self.parse_prefix_expr_common(lo)?;
|
2019-12-07 01:55:12 +00:00
|
|
|
self.sess.gated_spans.gate(sym::box_syntax, span);
|
2019-12-07 02:00:06 +00:00
|
|
|
Ok((span, ExprKind::Box(expr)))
|
2019-12-07 01:55:12 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
fn is_mistaken_not_ident_negation(&self) -> bool {
|
2020-03-07 12:58:27 +00:00
|
|
|
let token_cannot_continue_expr = |t: &Token| match t.uninterpolate().kind {
|
2019-12-07 01:55:12 +00:00
|
|
|
// These tokens can start an expression after `!`, but
|
|
|
|
// can't continue an expression after an ident
|
|
|
|
token::Ident(name, is_raw) => token::ident_can_begin_expr(name, t.span, is_raw),
|
|
|
|
token::Literal(..) | token::Pound => true,
|
|
|
|
_ => t.is_whole_expr(),
|
2019-08-11 11:14:30 +00:00
|
|
|
};
|
2019-12-07 01:55:12 +00:00
|
|
|
self.token.is_ident_named(sym::not) && self.look_ahead(1, token_cannot_continue_expr)
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Recover on `not expr` in favor of `!expr`.
|
2019-12-07 01:50:22 +00:00
|
|
|
fn recover_not_expr(&mut self, lo: Span) -> PResult<'a, (Span, ExprKind)> {
|
2019-12-07 02:07:35 +00:00
|
|
|
// Emit the error...
|
2022-08-17 17:05:49 +00:00
|
|
|
let negated_token = self.look_ahead(1, |t| t.clone());
|
2022-09-16 06:41:42 +00:00
|
|
|
|
|
|
|
let sub_diag = if negated_token.is_numeric_lit() {
|
|
|
|
NotAsNegationOperatorSub::SuggestNotBitwise
|
2022-09-15 14:39:16 +00:00
|
|
|
} else if negated_token.is_bool_lit() {
|
2022-09-16 06:41:42 +00:00
|
|
|
NotAsNegationOperatorSub::SuggestNotLogical
|
2022-09-15 14:39:16 +00:00
|
|
|
} else {
|
2022-09-16 06:41:42 +00:00
|
|
|
NotAsNegationOperatorSub::SuggestNotDefault
|
2022-09-15 14:39:16 +00:00
|
|
|
};
|
|
|
|
|
2022-08-17 17:05:49 +00:00
|
|
|
self.sess.emit_err(NotAsNegationOperator {
|
|
|
|
negated: negated_token.span,
|
|
|
|
negated_desc: super::token_descr(&negated_token),
|
2019-12-07 01:50:22 +00:00
|
|
|
// Span the `not` plus trailing whitespace to avoid
|
|
|
|
// trailing whitespace after the `!` in our suggestion
|
2022-09-16 06:41:42 +00:00
|
|
|
sub: sub_diag(
|
|
|
|
self.sess.source_map().span_until_non_whitespace(lo.to(negated_token.span)),
|
|
|
|
),
|
2022-08-17 17:05:49 +00:00
|
|
|
});
|
2019-12-07 02:07:35 +00:00
|
|
|
|
|
|
|
// ...and recover!
|
|
|
|
self.parse_unary_expr(lo, UnOp::Not)
|
2019-08-11 11:14:30 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
/// Returns the span of expr, if it was not interpolated or the span of the interpolated token.
|
|
|
|
fn interpolated_or_expr_span(
|
|
|
|
&self,
|
|
|
|
expr: PResult<'a, P<Expr>>,
|
|
|
|
) -> PResult<'a, (Span, P<Expr>)> {
|
|
|
|
expr.map(|e| {
|
2020-02-10 17:20:01 +00:00
|
|
|
(
|
2020-02-24 10:04:13 +00:00
|
|
|
match self.prev_token.kind {
|
2020-02-29 11:56:15 +00:00
|
|
|
TokenKind::Interpolated(..) => self.prev_token.span,
|
2020-02-10 17:20:01 +00:00
|
|
|
_ => e.span,
|
|
|
|
},
|
|
|
|
e,
|
|
|
|
)
|
2019-08-11 11:14:30 +00:00
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2019-12-22 22:42:04 +00:00
|
|
|
fn parse_assoc_op_cast(
|
|
|
|
&mut self,
|
|
|
|
lhs: P<Expr>,
|
|
|
|
lhs_span: Span,
|
|
|
|
expr_kind: fn(P<Expr>, P<Ty>) -> ExprKind,
|
|
|
|
) -> PResult<'a, P<Expr>> {
|
2021-01-21 04:03:29 +00:00
|
|
|
let mk_expr = |this: &mut Self, lhs: P<Expr>, rhs: P<Ty>| {
|
2022-08-14 23:58:38 +00:00
|
|
|
this.mk_expr(this.mk_expr_sp(&lhs, lhs_span, rhs.span), expr_kind(lhs, rhs))
|
2019-08-11 11:14:30 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
// Save the state of the parser before parsing type normally, in case there is a
|
|
|
|
// LessThan comparison after this cast.
|
|
|
|
let parser_snapshot_before_type = self.clone();
|
2022-01-10 22:02:19 +00:00
|
|
|
let cast_expr = match self.parse_as_cast_ty() {
|
2021-01-21 04:03:29 +00:00
|
|
|
Ok(rhs) => mk_expr(self, lhs, rhs),
|
2022-01-26 03:39:14 +00:00
|
|
|
Err(type_err) => {
|
2022-10-28 18:44:26 +00:00
|
|
|
if !self.may_recover() {
|
|
|
|
return Err(type_err);
|
|
|
|
}
|
|
|
|
|
2019-08-11 11:14:30 +00:00
|
|
|
// Rewind to before attempting to parse the type with generics, to recover
|
|
|
|
// from situations like `x as usize < y` in which we first tried to parse
|
|
|
|
// `usize < y` as a type with generic arguments.
|
2020-04-17 20:59:14 +00:00
|
|
|
let parser_snapshot_after_type = mem::replace(self, parser_snapshot_before_type);
|
2019-08-11 11:14:30 +00:00
|
|
|
|
2021-01-21 04:03:29 +00:00
|
|
|
// Check for typo of `'a: loop { break 'a }` with a missing `'`.
|
|
|
|
match (&lhs.kind, &self.token.kind) {
|
|
|
|
(
|
|
|
|
// `foo: `
|
|
|
|
ExprKind::Path(None, ast::Path { segments, .. }),
|
|
|
|
TokenKind::Ident(kw::For | kw::Loop | kw::While, false),
|
|
|
|
) if segments.len() == 1 => {
|
2022-03-10 13:11:00 +00:00
|
|
|
let snapshot = self.create_snapshot_for_diagnostic();
|
2021-01-21 04:03:29 +00:00
|
|
|
let label = Label {
|
|
|
|
ident: Ident::from_str_and_span(
|
|
|
|
&format!("'{}", segments[0].ident),
|
|
|
|
segments[0].ident.span,
|
|
|
|
),
|
|
|
|
};
|
2022-08-14 23:58:38 +00:00
|
|
|
match self.parse_labeled_expr(label, false) {
|
2021-01-21 04:03:29 +00:00
|
|
|
Ok(expr) => {
|
|
|
|
type_err.cancel();
|
2022-08-17 17:05:49 +00:00
|
|
|
self.sess.emit_err(MalformedLoopLabel {
|
|
|
|
span: label.ident.span,
|
|
|
|
correct_label: label.ident,
|
|
|
|
});
|
2021-01-21 04:03:29 +00:00
|
|
|
return Ok(expr);
|
|
|
|
}
|
2022-01-26 03:39:14 +00:00
|
|
|
Err(err) => {
|
2021-01-21 04:03:29 +00:00
|
|
|
err.cancel();
|
2022-03-10 13:11:00 +00:00
|
|
|
self.restore_snapshot(snapshot);
|
2021-01-21 04:03:29 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
_ => {}
|
|
|
|
}
|
|
|
|
|
2019-08-11 11:14:30 +00:00
|
|
|
match self.parse_path(PathStyle::Expr) {
|
|
|
|
Ok(path) => {
|
2022-08-24 20:41:51 +00:00
|
|
|
let span_after_type = parser_snapshot_after_type.token.span;
|
2022-09-14 18:11:42 +00:00
|
|
|
let expr = mk_expr(
|
|
|
|
self,
|
|
|
|
lhs,
|
|
|
|
self.mk_ty(path.span, TyKind::Path(None, path.clone())),
|
|
|
|
);
|
2022-08-24 20:41:51 +00:00
|
|
|
|
|
|
|
let args_span = self.look_ahead(1, |t| t.span).to(span_after_type);
|
|
|
|
let suggestion = ComparisonOrShiftInterpretedAsGenericSugg {
|
|
|
|
left: expr.span.shrink_to_lo(),
|
|
|
|
right: expr.span.shrink_to_hi(),
|
|
|
|
};
|
|
|
|
|
|
|
|
match self.token.kind {
|
|
|
|
token::Lt => self.sess.emit_err(ComparisonInterpretedAsGeneric {
|
|
|
|
comparison: self.token.span,
|
2022-09-14 18:11:42 +00:00
|
|
|
r#type: path,
|
2022-08-24 20:41:51 +00:00
|
|
|
args: args_span,
|
|
|
|
suggestion,
|
|
|
|
}),
|
|
|
|
token::BinOp(token::Shl) => {
|
|
|
|
self.sess.emit_err(ShiftInterpretedAsGeneric {
|
|
|
|
shift: self.token.span,
|
2022-09-14 18:11:42 +00:00
|
|
|
r#type: path,
|
2022-08-24 20:41:51 +00:00
|
|
|
args: args_span,
|
|
|
|
suggestion,
|
|
|
|
})
|
|
|
|
}
|
2019-08-11 11:14:30 +00:00
|
|
|
_ => {
|
|
|
|
// We can end up here even without `<` being the next token, for
|
|
|
|
// example because `parse_ty_no_plus` returns `Err` on keywords,
|
|
|
|
// but `parse_path` returns `Ok` on them due to error recovery.
|
|
|
|
// Return original error and parser state.
|
2020-04-17 20:59:14 +00:00
|
|
|
*self = parser_snapshot_after_type;
|
2019-08-11 11:14:30 +00:00
|
|
|
return Err(type_err);
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
// Successfully parsed the type path leaving a `<` yet to parse.
|
|
|
|
type_err.cancel();
|
|
|
|
|
2022-08-24 20:41:51 +00:00
|
|
|
// Keep `x as usize` as an expression in AST and continue parsing.
|
2020-02-16 00:12:59 +00:00
|
|
|
expr
|
2019-08-11 11:14:30 +00:00
|
|
|
}
|
2022-01-26 03:39:14 +00:00
|
|
|
Err(path_err) => {
|
2019-08-11 11:14:30 +00:00
|
|
|
// Couldn't parse as a path, return original error and parser state.
|
|
|
|
path_err.cancel();
|
2020-04-17 20:59:14 +00:00
|
|
|
*self = parser_snapshot_after_type;
|
2020-02-16 00:12:59 +00:00
|
|
|
return Err(type_err);
|
2019-08-11 11:14:30 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2020-02-09 05:34:38 +00:00
|
|
|
};
|
|
|
|
|
2020-02-16 00:12:59 +00:00
|
|
|
self.parse_and_disallow_postfix_after_cast(cast_expr)
|
|
|
|
}
|
2020-02-09 05:34:38 +00:00
|
|
|
|
2020-02-16 00:12:59 +00:00
|
|
|
/// Parses a postfix operators such as `.`, `?`, or index (`[]`) after a cast,
|
|
|
|
/// then emits an error and returns the newly parsed tree.
|
|
|
|
/// The resulting parse tree for `&x as T[0]` has a precedence of `((&x) as T)[0]`.
|
|
|
|
fn parse_and_disallow_postfix_after_cast(
|
|
|
|
&mut self,
|
|
|
|
cast_expr: P<Expr>,
|
|
|
|
) -> PResult<'a, P<Expr>> {
|
2022-03-19 03:45:15 +00:00
|
|
|
let span = cast_expr.span;
|
2022-07-29 07:01:58 +00:00
|
|
|
let (cast_kind, maybe_ascription_span) =
|
|
|
|
if let ExprKind::Type(ascripted_expr, _) = &cast_expr.kind {
|
|
|
|
("type ascription", Some(ascripted_expr.span.shrink_to_hi().with_hi(span.hi())))
|
|
|
|
} else {
|
|
|
|
("cast", None)
|
|
|
|
};
|
2022-03-19 03:45:15 +00:00
|
|
|
|
2020-02-16 00:12:59 +00:00
|
|
|
let with_postfix = self.parse_dot_or_call_expr_with_(cast_expr, span)?;
|
|
|
|
|
|
|
|
// Check if an illegal postfix operator has been added after the cast.
|
2022-11-08 17:49:58 +00:00
|
|
|
// If the resulting expression is not a cast, it is an illegal postfix operator.
|
2022-11-08 11:51:10 +00:00
|
|
|
if !matches!(with_postfix.kind, ExprKind::Cast(_, _) | ExprKind::Type(_, _)) {
|
2020-02-09 05:34:38 +00:00
|
|
|
let msg = format!(
|
2022-07-29 07:01:58 +00:00
|
|
|
"{cast_kind} cannot be followed by {}",
|
2020-02-09 05:34:38 +00:00
|
|
|
match with_postfix.kind {
|
2020-02-16 00:12:59 +00:00
|
|
|
ExprKind::Index(_, _) => "indexing",
|
2022-01-10 22:02:19 +00:00
|
|
|
ExprKind::Try(_) => "`?`",
|
2020-02-16 00:12:59 +00:00
|
|
|
ExprKind::Field(_, _) => "a field access",
|
2022-08-07 13:21:11 +00:00
|
|
|
ExprKind::MethodCall(_, _, _, _) => "a method call",
|
2020-02-16 00:12:59 +00:00
|
|
|
ExprKind::Call(_, _) => "a function call",
|
|
|
|
ExprKind::Await(_) => "`.await`",
|
2020-03-30 07:20:55 +00:00
|
|
|
ExprKind::Err => return Ok(with_postfix),
|
2020-02-16 01:09:42 +00:00
|
|
|
_ => unreachable!("parse_dot_or_call_expr_with_ shouldn't produce this"),
|
2020-02-09 05:34:38 +00:00
|
|
|
}
|
|
|
|
);
|
2020-02-16 00:12:59 +00:00
|
|
|
let mut err = self.struct_span_err(span, &msg);
|
2022-03-19 03:45:15 +00:00
|
|
|
|
2022-08-10 03:39:41 +00:00
|
|
|
let suggest_parens = |err: &mut Diagnostic| {
|
2020-02-22 20:33:06 +00:00
|
|
|
let suggestions = vec![
|
|
|
|
(span.shrink_to_lo(), "(".to_string()),
|
|
|
|
(span.shrink_to_hi(), ")".to_string()),
|
|
|
|
];
|
|
|
|
err.multipart_suggestion(
|
|
|
|
"try surrounding the expression in parentheses",
|
|
|
|
suggestions,
|
|
|
|
Applicability::MachineApplicable,
|
|
|
|
);
|
2022-03-19 03:45:15 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
// If type ascription is "likely an error", the user will already be getting a useful
|
|
|
|
// help message, and doesn't need a second.
|
|
|
|
if self.last_type_ascription.map_or(false, |last_ascription| last_ascription.1) {
|
|
|
|
self.maybe_annotate_with_ascription(&mut err, false);
|
|
|
|
} else if let Some(ascription_span) = maybe_ascription_span {
|
|
|
|
let is_nightly = self.sess.unstable_features.is_nightly_build();
|
|
|
|
if is_nightly {
|
|
|
|
suggest_parens(&mut err);
|
|
|
|
}
|
|
|
|
err.span_suggestion(
|
|
|
|
ascription_span,
|
|
|
|
&format!(
|
|
|
|
"{}remove the type ascription",
|
|
|
|
if is_nightly { "alternatively, " } else { "" }
|
|
|
|
),
|
2022-06-13 06:48:40 +00:00
|
|
|
"",
|
2022-03-19 03:45:15 +00:00
|
|
|
if is_nightly {
|
|
|
|
Applicability::MaybeIncorrect
|
|
|
|
} else {
|
|
|
|
Applicability::MachineApplicable
|
|
|
|
},
|
|
|
|
);
|
|
|
|
} else {
|
|
|
|
suggest_parens(&mut err);
|
2020-02-09 05:34:38 +00:00
|
|
|
}
|
2020-02-16 00:18:50 +00:00
|
|
|
err.emit();
|
2020-02-09 05:34:38 +00:00
|
|
|
};
|
|
|
|
Ok(with_postfix)
|
2019-08-11 11:14:30 +00:00
|
|
|
}
|
|
|
|
|
2019-12-07 03:59:08 +00:00
|
|
|
fn parse_assoc_op_ascribe(&mut self, lhs: P<Expr>, lhs_span: Span) -> PResult<'a, P<Expr>> {
|
|
|
|
let maybe_path = self.could_ascription_be_path(&lhs.kind);
|
2020-02-29 11:56:15 +00:00
|
|
|
self.last_type_ascription = Some((self.prev_token.span, maybe_path));
|
2019-12-07 03:59:08 +00:00
|
|
|
let lhs = self.parse_assoc_op_cast(lhs, lhs_span, ExprKind::Type)?;
|
|
|
|
self.sess.gated_spans.gate(sym::type_ascription, lhs.span);
|
|
|
|
Ok(lhs)
|
|
|
|
}
|
|
|
|
|
2019-11-23 14:22:00 +00:00
|
|
|
/// Parse `& mut? <expr>` or `& raw [ const | mut ] <expr>`.
|
2019-12-07 01:37:03 +00:00
|
|
|
fn parse_borrow_expr(&mut self, lo: Span) -> PResult<'a, (Span, ExprKind)> {
|
2019-11-23 14:15:49 +00:00
|
|
|
self.expect_and()?;
|
2020-03-05 06:54:22 +00:00
|
|
|
let has_lifetime = self.token.is_lifetime() && self.look_ahead(1, |t| t != &token::Colon);
|
|
|
|
let lifetime = has_lifetime.then(|| self.expect_lifetime()); // For recovery, see below.
|
2019-12-07 01:37:03 +00:00
|
|
|
let (borrow_kind, mutbl) = self.parse_borrow_modifiers(lo);
|
|
|
|
let expr = self.parse_prefix_expr(None);
|
2020-03-05 06:54:22 +00:00
|
|
|
let (hi, expr) = self.interpolated_or_expr_span(expr)?;
|
|
|
|
let span = lo.to(hi);
|
|
|
|
if let Some(lt) = lifetime {
|
|
|
|
self.error_remove_borrow_lifetime(span, lt.ident.span);
|
|
|
|
}
|
|
|
|
Ok((span, ExprKind::AddrOf(borrow_kind, mutbl, expr)))
|
|
|
|
}
|
|
|
|
|
|
|
|
fn error_remove_borrow_lifetime(&self, span: Span, lt_span: Span) {
|
2022-08-17 17:05:49 +00:00
|
|
|
self.sess.emit_err(LifetimeInBorrowExpression { span, lifetime_span: lt_span });
|
2019-12-07 01:37:03 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
/// Parse `mut?` or `raw [ const | mut ]`.
|
|
|
|
fn parse_borrow_modifiers(&mut self, lo: Span) -> (ast::BorrowKind, ast::Mutability) {
|
|
|
|
if self.check_keyword(kw::Raw) && self.look_ahead(1, Token::is_mutability) {
|
|
|
|
// `raw [ const | mut ]`.
|
2019-11-23 14:15:49 +00:00
|
|
|
let found_raw = self.eat_keyword(kw::Raw);
|
|
|
|
assert!(found_raw);
|
|
|
|
let mutability = self.parse_const_or_mut().unwrap();
|
2020-02-29 11:56:15 +00:00
|
|
|
self.sess.gated_spans.gate(sym::raw_ref_op, lo.to(self.prev_token.span));
|
2019-11-23 14:15:49 +00:00
|
|
|
(ast::BorrowKind::Raw, mutability)
|
|
|
|
} else {
|
2019-12-07 01:37:03 +00:00
|
|
|
// `mut?`
|
2019-11-23 14:15:49 +00:00
|
|
|
(ast::BorrowKind::Ref, self.parse_mutability())
|
2019-12-07 01:37:03 +00:00
|
|
|
}
|
2019-11-23 14:15:49 +00:00
|
|
|
}
|
|
|
|
|
2019-08-11 11:14:30 +00:00
|
|
|
/// Parses `a.b` or `a(13)` or `a[4]` or just `a`.
|
2021-01-22 18:28:08 +00:00
|
|
|
fn parse_dot_or_call_expr(&mut self, attrs: Option<AttrWrapper>) -> PResult<'a, P<Expr>> {
|
2019-12-07 01:30:54 +00:00
|
|
|
let attrs = self.parse_or_use_outer_attributes(attrs)?;
|
2021-01-22 18:28:08 +00:00
|
|
|
self.collect_tokens_for_expr(attrs, |this, attrs| {
|
|
|
|
let base = this.parse_bottom_expr();
|
|
|
|
let (span, base) = this.interpolated_or_expr_span(base)?;
|
|
|
|
this.parse_dot_or_call_expr_with(base, span, attrs)
|
|
|
|
})
|
2019-08-11 11:14:30 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
pub(super) fn parse_dot_or_call_expr_with(
|
|
|
|
&mut self,
|
|
|
|
e0: P<Expr>,
|
|
|
|
lo: Span,
|
2022-08-17 02:34:33 +00:00
|
|
|
mut attrs: ast::AttrVec,
|
2019-08-11 11:14:30 +00:00
|
|
|
) -> PResult<'a, P<Expr>> {
|
|
|
|
// Stitch the list of outer attributes onto the return value.
|
|
|
|
// A little bit ugly, but the best way given the current code
|
|
|
|
// structure
|
2022-08-23 03:28:20 +00:00
|
|
|
let res = self.parse_dot_or_call_expr_with_(e0, lo);
|
|
|
|
if attrs.is_empty() {
|
|
|
|
res
|
|
|
|
} else {
|
|
|
|
res.map(|expr| {
|
|
|
|
expr.map(|mut expr| {
|
|
|
|
attrs.extend(expr.attrs);
|
|
|
|
expr.attrs = attrs;
|
|
|
|
expr
|
|
|
|
})
|
2019-08-11 11:14:30 +00:00
|
|
|
})
|
2022-08-23 03:28:20 +00:00
|
|
|
}
|
2019-08-11 11:14:30 +00:00
|
|
|
}
|
|
|
|
|
2019-12-07 01:01:58 +00:00
|
|
|
fn parse_dot_or_call_expr_with_(&mut self, mut e: P<Expr>, lo: Span) -> PResult<'a, P<Expr>> {
|
2019-08-11 11:14:30 +00:00
|
|
|
loop {
|
2022-05-01 17:05:35 +00:00
|
|
|
let has_question = if self.prev_token.kind == TokenKind::Ident(kw::Return, false) {
|
|
|
|
// we are using noexpect here because we don't expect a `?` directly after a `return`
|
|
|
|
// which could be suggested otherwise
|
|
|
|
self.eat_noexpect(&token::Question)
|
|
|
|
} else {
|
|
|
|
self.eat(&token::Question)
|
|
|
|
};
|
|
|
|
if has_question {
|
2019-12-07 01:01:58 +00:00
|
|
|
// `expr?`
|
2022-08-14 23:58:38 +00:00
|
|
|
e = self.mk_expr(lo.to(self.prev_token.span), ExprKind::Try(e));
|
2019-12-07 01:01:58 +00:00
|
|
|
continue;
|
2019-08-11 11:14:30 +00:00
|
|
|
}
|
2022-05-01 17:05:35 +00:00
|
|
|
let has_dot = if self.prev_token.kind == TokenKind::Ident(kw::Return, false) {
|
|
|
|
// we are using noexpect here because we don't expect a `.` directly after a `return`
|
|
|
|
// which could be suggested otherwise
|
|
|
|
self.eat_noexpect(&token::Dot)
|
|
|
|
} else {
|
|
|
|
self.eat(&token::Dot)
|
|
|
|
};
|
|
|
|
if has_dot {
|
2019-12-07 01:01:58 +00:00
|
|
|
// expr.f
|
2019-12-07 00:52:53 +00:00
|
|
|
e = self.parse_dot_suffix_expr(lo, e)?;
|
2019-08-11 11:14:30 +00:00
|
|
|
continue;
|
|
|
|
}
|
2019-12-22 22:42:04 +00:00
|
|
|
if self.expr_is_complete(&e) {
|
2019-12-07 01:01:58 +00:00
|
|
|
return Ok(e);
|
2019-12-22 22:42:04 +00:00
|
|
|
}
|
2019-12-07 01:01:58 +00:00
|
|
|
e = match self.token.kind {
|
2022-04-26 12:40:14 +00:00
|
|
|
token::OpenDelim(Delimiter::Parenthesis) => self.parse_fn_call_expr(lo, e),
|
|
|
|
token::OpenDelim(Delimiter::Bracket) => self.parse_index_expr(lo, e)?,
|
2019-12-22 22:42:04 +00:00
|
|
|
_ => return Ok(e),
|
2019-08-11 11:14:30 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-09-07 17:45:16 +00:00
|
|
|
fn look_ahead_type_ascription_as_field(&mut self) -> bool {
|
|
|
|
self.look_ahead(1, |t| t.is_ident())
|
|
|
|
&& self.look_ahead(2, |t| t == &token::Colon)
|
|
|
|
&& self.look_ahead(3, |t| t.can_begin_expr())
|
|
|
|
}
|
|
|
|
|
2019-12-07 00:52:53 +00:00
|
|
|
fn parse_dot_suffix_expr(&mut self, lo: Span, base: P<Expr>) -> PResult<'a, P<Expr>> {
|
2020-03-07 11:37:38 +00:00
|
|
|
match self.token.uninterpolate().kind {
|
2019-12-07 00:52:53 +00:00
|
|
|
token::Ident(..) => self.parse_dot_suffix(base, lo),
|
|
|
|
token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) => {
|
2020-04-18 18:26:10 +00:00
|
|
|
Ok(self.parse_tuple_field_access_expr(lo, base, symbol, suffix, None))
|
2019-12-07 00:52:53 +00:00
|
|
|
}
|
2020-04-18 18:26:10 +00:00
|
|
|
token::Literal(token::Lit { kind: token::Float, symbol, suffix }) => {
|
|
|
|
Ok(self.parse_tuple_field_access_expr_float(lo, base, symbol, suffix))
|
2019-12-07 00:52:53 +00:00
|
|
|
}
|
|
|
|
_ => {
|
|
|
|
self.error_unexpected_after_dot();
|
|
|
|
Ok(base)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-12-06 23:59:56 +00:00
|
|
|
fn error_unexpected_after_dot(&self) {
|
|
|
|
// FIXME Could factor this out into non_fatal_unexpected or something.
|
2019-12-07 02:07:35 +00:00
|
|
|
let actual = pprust::token_to_string(&self.token);
|
2022-03-15 10:13:56 +00:00
|
|
|
self.struct_span_err(self.token.span, &format!("unexpected token: `{actual}`")).emit();
|
2019-12-06 23:59:56 +00:00
|
|
|
}
|
|
|
|
|
2020-10-27 21:23:58 +00:00
|
|
|
// We need an identifier or integer, but the next token is a float.
|
2020-04-18 18:26:10 +00:00
|
|
|
// Break the float into components to extract the identifier or integer.
|
|
|
|
// FIXME: With current `TokenCursor` it's hard to break tokens into more than 2
|
|
|
|
// parts unless those parts are processed immediately. `TokenCursor` should either
|
|
|
|
// support pushing "future tokens" (would be also helpful to `break_and_eat`), or
|
|
|
|
// we should break everything including floats into more basic proc-macro style
|
|
|
|
// tokens in the lexer (probably preferable).
|
|
|
|
fn parse_tuple_field_access_expr_float(
|
2019-12-06 23:34:32 +00:00
|
|
|
&mut self,
|
|
|
|
lo: Span,
|
2019-12-07 00:52:53 +00:00
|
|
|
base: P<Expr>,
|
2020-04-18 18:26:10 +00:00
|
|
|
float: Symbol,
|
|
|
|
suffix: Option<Symbol>,
|
|
|
|
) -> P<Expr> {
|
|
|
|
#[derive(Debug)]
|
|
|
|
enum FloatComponent {
|
|
|
|
IdentLike(String),
|
|
|
|
Punct(char),
|
|
|
|
}
|
|
|
|
use FloatComponent::*;
|
|
|
|
|
2020-10-09 23:01:44 +00:00
|
|
|
let float_str = float.as_str();
|
2020-04-18 18:26:10 +00:00
|
|
|
let mut components = Vec::new();
|
|
|
|
let mut ident_like = String::new();
|
2020-10-09 23:01:44 +00:00
|
|
|
for c in float_str.chars() {
|
2020-04-18 18:26:10 +00:00
|
|
|
if c == '_' || c.is_ascii_alphanumeric() {
|
|
|
|
ident_like.push(c);
|
|
|
|
} else if matches!(c, '.' | '+' | '-') {
|
|
|
|
if !ident_like.is_empty() {
|
|
|
|
components.push(IdentLike(mem::take(&mut ident_like)));
|
2019-12-07 00:52:53 +00:00
|
|
|
}
|
2020-04-18 18:26:10 +00:00
|
|
|
components.push(Punct(c));
|
|
|
|
} else {
|
|
|
|
panic!("unexpected character in a float token: {:?}", c)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if !ident_like.is_empty() {
|
|
|
|
components.push(IdentLike(ident_like));
|
|
|
|
}
|
|
|
|
|
2020-10-09 23:01:44 +00:00
|
|
|
// With proc macros the span can refer to anything, the source may be too short,
|
|
|
|
// or too long, or non-ASCII. It only makes sense to break our span into components
|
|
|
|
// if its underlying text is identical to our float literal.
|
2020-04-18 18:26:10 +00:00
|
|
|
let span = self.token.span;
|
2020-10-09 23:01:44 +00:00
|
|
|
let can_take_span_apart =
|
|
|
|
|| self.span_to_snippet(span).as_deref() == Ok(float_str).as_deref();
|
|
|
|
|
2020-04-18 18:26:10 +00:00
|
|
|
match &*components {
|
|
|
|
// 1e2
|
|
|
|
[IdentLike(i)] => {
|
|
|
|
self.parse_tuple_field_access_expr(lo, base, Symbol::intern(&i), suffix, None)
|
|
|
|
}
|
|
|
|
// 1.
|
|
|
|
[IdentLike(i), Punct('.')] => {
|
2020-10-09 23:01:44 +00:00
|
|
|
let (ident_span, dot_span) = if can_take_span_apart() {
|
|
|
|
let (span, ident_len) = (span.data(), BytePos::from_usize(i.len()));
|
|
|
|
let ident_span = span.with_hi(span.lo + ident_len);
|
|
|
|
let dot_span = span.with_lo(span.lo + ident_len);
|
|
|
|
(ident_span, dot_span)
|
|
|
|
} else {
|
|
|
|
(span, span)
|
|
|
|
};
|
2020-04-18 18:26:10 +00:00
|
|
|
assert!(suffix.is_none());
|
|
|
|
let symbol = Symbol::intern(&i);
|
2020-10-09 23:01:44 +00:00
|
|
|
self.token = Token::new(token::Ident(symbol, false), ident_span);
|
Rewrite `collect_tokens` implementations to use a flattened buffer
Instead of trying to collect tokens at each depth, we 'flatten' the
stream as we go allong, pushing open/close delimiters to our buffer
just like regular tokens. One capturing is complete, we reconstruct a
nested `TokenTree::Delimited` structure, producing a normal
`TokenStream`.
The reconstructed `TokenStream` is not created immediately - instead, it is
produced on-demand by a closure (wrapped in a new `LazyTokenStream` type). This
closure stores a clone of the original `TokenCursor`, plus a record of the
number of calls to `next()/next_desugared()`. This is sufficient to reconstruct
the tokenstream seen by the callback without storing any additional state. If
the tokenstream is never used (e.g. when a captured `macro_rules!` argument is
never passed to a proc macro), we never actually create a `TokenStream`.
This implementation has a number of advantages over the previous one:
* It is significantly simpler, with no edge cases around capturing the
start/end of a delimited group.
* It can be easily extended to allow replacing tokens an an arbitrary
'depth' by just using `Vec::splice` at the proper position. This is
important for PR #76130, which requires us to track information about
attributes along with tokens.
* The lazy approach to `TokenStream` construction allows us to easily
parse an AST struct, and then decide after the fact whether we need a
`TokenStream`. This will be useful when we start collecting tokens for
`Attribute` - we can discard the `LazyTokenStream` if the parsed
attribute doesn't need tokens (e.g. is a builtin attribute).
The performance impact seems to be neglibile (see
https://github.com/rust-lang/rust/pull/77250#issuecomment-703960604). There is a
small slowdown on a few benchmarks, but it only rises above 1% for incremental
builds, where it represents a larger fraction of the much smaller instruction
count. There a ~1% speedup on a few other incremental benchmarks - my guess is
that the speedups and slowdowns will usually cancel out in practice.
2020-09-27 01:56:29 +00:00
|
|
|
let next_token = (Token::new(token::Dot, dot_span), self.token_spacing);
|
2020-04-18 18:26:10 +00:00
|
|
|
self.parse_tuple_field_access_expr(lo, base, symbol, None, Some(next_token))
|
|
|
|
}
|
|
|
|
// 1.2 | 1.2e3
|
|
|
|
[IdentLike(i1), Punct('.'), IdentLike(i2)] => {
|
2020-10-09 23:01:44 +00:00
|
|
|
let (ident1_span, dot_span, ident2_span) = if can_take_span_apart() {
|
|
|
|
let (span, ident1_len) = (span.data(), BytePos::from_usize(i1.len()));
|
|
|
|
let ident1_span = span.with_hi(span.lo + ident1_len);
|
|
|
|
let dot_span = span
|
|
|
|
.with_lo(span.lo + ident1_len)
|
|
|
|
.with_hi(span.lo + ident1_len + BytePos(1));
|
|
|
|
let ident2_span = self.token.span.with_lo(span.lo + ident1_len + BytePos(1));
|
|
|
|
(ident1_span, dot_span, ident2_span)
|
|
|
|
} else {
|
|
|
|
(span, span, span)
|
|
|
|
};
|
2020-04-18 18:26:10 +00:00
|
|
|
let symbol1 = Symbol::intern(&i1);
|
2020-10-09 23:01:44 +00:00
|
|
|
self.token = Token::new(token::Ident(symbol1, false), ident1_span);
|
Rewrite `collect_tokens` implementations to use a flattened buffer
Instead of trying to collect tokens at each depth, we 'flatten' the
stream as we go allong, pushing open/close delimiters to our buffer
just like regular tokens. One capturing is complete, we reconstruct a
nested `TokenTree::Delimited` structure, producing a normal
`TokenStream`.
The reconstructed `TokenStream` is not created immediately - instead, it is
produced on-demand by a closure (wrapped in a new `LazyTokenStream` type). This
closure stores a clone of the original `TokenCursor`, plus a record of the
number of calls to `next()/next_desugared()`. This is sufficient to reconstruct
the tokenstream seen by the callback without storing any additional state. If
the tokenstream is never used (e.g. when a captured `macro_rules!` argument is
never passed to a proc macro), we never actually create a `TokenStream`.
This implementation has a number of advantages over the previous one:
* It is significantly simpler, with no edge cases around capturing the
start/end of a delimited group.
* It can be easily extended to allow replacing tokens an an arbitrary
'depth' by just using `Vec::splice` at the proper position. This is
important for PR #76130, which requires us to track information about
attributes along with tokens.
* The lazy approach to `TokenStream` construction allows us to easily
parse an AST struct, and then decide after the fact whether we need a
`TokenStream`. This will be useful when we start collecting tokens for
`Attribute` - we can discard the `LazyTokenStream` if the parsed
attribute doesn't need tokens (e.g. is a builtin attribute).
The performance impact seems to be neglibile (see
https://github.com/rust-lang/rust/pull/77250#issuecomment-703960604). There is a
small slowdown on a few benchmarks, but it only rises above 1% for incremental
builds, where it represents a larger fraction of the much smaller instruction
count. There a ~1% speedup on a few other incremental benchmarks - my guess is
that the speedups and slowdowns will usually cancel out in practice.
2020-09-27 01:56:29 +00:00
|
|
|
// This needs to be `Spacing::Alone` to prevent regressions.
|
|
|
|
// See issue #76399 and PR #76285 for more details
|
|
|
|
let next_token1 = (Token::new(token::Dot, dot_span), Spacing::Alone);
|
2020-04-18 18:26:10 +00:00
|
|
|
let base1 =
|
|
|
|
self.parse_tuple_field_access_expr(lo, base, symbol1, None, Some(next_token1));
|
|
|
|
let symbol2 = Symbol::intern(&i2);
|
2020-10-09 23:01:44 +00:00
|
|
|
let next_token2 = Token::new(token::Ident(symbol2, false), ident2_span);
|
Rewrite `collect_tokens` implementations to use a flattened buffer
Instead of trying to collect tokens at each depth, we 'flatten' the
stream as we go allong, pushing open/close delimiters to our buffer
just like regular tokens. One capturing is complete, we reconstruct a
nested `TokenTree::Delimited` structure, producing a normal
`TokenStream`.
The reconstructed `TokenStream` is not created immediately - instead, it is
produced on-demand by a closure (wrapped in a new `LazyTokenStream` type). This
closure stores a clone of the original `TokenCursor`, plus a record of the
number of calls to `next()/next_desugared()`. This is sufficient to reconstruct
the tokenstream seen by the callback without storing any additional state. If
the tokenstream is never used (e.g. when a captured `macro_rules!` argument is
never passed to a proc macro), we never actually create a `TokenStream`.
This implementation has a number of advantages over the previous one:
* It is significantly simpler, with no edge cases around capturing the
start/end of a delimited group.
* It can be easily extended to allow replacing tokens an an arbitrary
'depth' by just using `Vec::splice` at the proper position. This is
important for PR #76130, which requires us to track information about
attributes along with tokens.
* The lazy approach to `TokenStream` construction allows us to easily
parse an AST struct, and then decide after the fact whether we need a
`TokenStream`. This will be useful when we start collecting tokens for
`Attribute` - we can discard the `LazyTokenStream` if the parsed
attribute doesn't need tokens (e.g. is a builtin attribute).
The performance impact seems to be neglibile (see
https://github.com/rust-lang/rust/pull/77250#issuecomment-703960604). There is a
small slowdown on a few benchmarks, but it only rises above 1% for incremental
builds, where it represents a larger fraction of the much smaller instruction
count. There a ~1% speedup on a few other incremental benchmarks - my guess is
that the speedups and slowdowns will usually cancel out in practice.
2020-09-27 01:56:29 +00:00
|
|
|
self.bump_with((next_token2, self.token_spacing)); // `.`
|
2020-04-18 18:26:10 +00:00
|
|
|
self.parse_tuple_field_access_expr(lo, base1, symbol2, suffix, None)
|
|
|
|
}
|
|
|
|
// 1e+ | 1e- (recovered)
|
|
|
|
[IdentLike(_), Punct('+' | '-')] |
|
|
|
|
// 1e+2 | 1e-2
|
|
|
|
[IdentLike(_), Punct('+' | '-'), IdentLike(_)] |
|
2021-11-15 16:46:44 +00:00
|
|
|
// 1.2e+ | 1.2e-
|
|
|
|
[IdentLike(_), Punct('.'), IdentLike(_), Punct('+' | '-')] |
|
2020-04-18 18:26:10 +00:00
|
|
|
// 1.2e+3 | 1.2e-3
|
|
|
|
[IdentLike(_), Punct('.'), IdentLike(_), Punct('+' | '-'), IdentLike(_)] => {
|
|
|
|
// See the FIXME about `TokenCursor` above.
|
|
|
|
self.error_unexpected_after_dot();
|
|
|
|
base
|
|
|
|
}
|
|
|
|
_ => panic!("unexpected components in a float token: {:?}", components),
|
2019-12-06 23:34:32 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-12-06 23:16:19 +00:00
|
|
|
fn parse_tuple_field_access_expr(
|
|
|
|
&mut self,
|
|
|
|
lo: Span,
|
|
|
|
base: P<Expr>,
|
|
|
|
field: Symbol,
|
|
|
|
suffix: Option<Symbol>,
|
Rewrite `collect_tokens` implementations to use a flattened buffer
Instead of trying to collect tokens at each depth, we 'flatten' the
stream as we go allong, pushing open/close delimiters to our buffer
just like regular tokens. One capturing is complete, we reconstruct a
nested `TokenTree::Delimited` structure, producing a normal
`TokenStream`.
The reconstructed `TokenStream` is not created immediately - instead, it is
produced on-demand by a closure (wrapped in a new `LazyTokenStream` type). This
closure stores a clone of the original `TokenCursor`, plus a record of the
number of calls to `next()/next_desugared()`. This is sufficient to reconstruct
the tokenstream seen by the callback without storing any additional state. If
the tokenstream is never used (e.g. when a captured `macro_rules!` argument is
never passed to a proc macro), we never actually create a `TokenStream`.
This implementation has a number of advantages over the previous one:
* It is significantly simpler, with no edge cases around capturing the
start/end of a delimited group.
* It can be easily extended to allow replacing tokens an an arbitrary
'depth' by just using `Vec::splice` at the proper position. This is
important for PR #76130, which requires us to track information about
attributes along with tokens.
* The lazy approach to `TokenStream` construction allows us to easily
parse an AST struct, and then decide after the fact whether we need a
`TokenStream`. This will be useful when we start collecting tokens for
`Attribute` - we can discard the `LazyTokenStream` if the parsed
attribute doesn't need tokens (e.g. is a builtin attribute).
The performance impact seems to be neglibile (see
https://github.com/rust-lang/rust/pull/77250#issuecomment-703960604). There is a
small slowdown on a few benchmarks, but it only rises above 1% for incremental
builds, where it represents a larger fraction of the much smaller instruction
count. There a ~1% speedup on a few other incremental benchmarks - my guess is
that the speedups and slowdowns will usually cancel out in practice.
2020-09-27 01:56:29 +00:00
|
|
|
next_token: Option<(Token, Spacing)>,
|
2019-12-06 23:16:19 +00:00
|
|
|
) -> P<Expr> {
|
2020-04-18 18:26:10 +00:00
|
|
|
match next_token {
|
|
|
|
Some(next_token) => self.bump_with(next_token),
|
|
|
|
None => self.bump(),
|
|
|
|
}
|
2020-02-24 10:04:13 +00:00
|
|
|
let span = self.prev_token.span;
|
2019-12-06 23:16:19 +00:00
|
|
|
let field = ExprKind::Field(base, Ident::new(field, span));
|
2022-09-15 08:12:09 +00:00
|
|
|
if let Some(suffix) = suffix {
|
|
|
|
self.expect_no_tuple_index_suffix(span, suffix);
|
|
|
|
}
|
2022-08-14 23:58:38 +00:00
|
|
|
self.mk_expr(lo.to(span), field)
|
2019-12-06 23:16:19 +00:00
|
|
|
}
|
|
|
|
|
2019-12-06 23:08:44 +00:00
|
|
|
/// Parse a function call expression, `expr(...)`.
|
|
|
|
fn parse_fn_call_expr(&mut self, lo: Span, fun: P<Expr>) -> P<Expr> {
|
2022-04-26 12:40:14 +00:00
|
|
|
let snapshot = if self.token.kind == token::OpenDelim(Delimiter::Parenthesis)
|
2021-09-07 17:45:16 +00:00
|
|
|
&& self.look_ahead_type_ascription_as_field()
|
|
|
|
{
|
2022-03-18 07:56:43 +00:00
|
|
|
Some((self.create_snapshot_for_diagnostic(), fun.kind.clone()))
|
2021-09-07 17:45:16 +00:00
|
|
|
} else {
|
|
|
|
None
|
|
|
|
};
|
|
|
|
let open_paren = self.token.span;
|
|
|
|
|
2022-08-14 23:58:38 +00:00
|
|
|
let mut seq = self
|
|
|
|
.parse_paren_expr_seq()
|
|
|
|
.map(|args| self.mk_expr(lo.to(self.prev_token.span), self.mk_call(fun, args)));
|
2021-09-14 18:16:33 +00:00
|
|
|
if let Some(expr) =
|
|
|
|
self.maybe_recover_struct_lit_bad_delims(lo, open_paren, &mut seq, snapshot)
|
|
|
|
{
|
|
|
|
return expr;
|
|
|
|
}
|
2022-04-26 12:40:14 +00:00
|
|
|
self.recover_seq_parse_error(Delimiter::Parenthesis, lo, seq)
|
2019-12-06 23:08:44 +00:00
|
|
|
}
|
|
|
|
|
2021-09-14 18:16:33 +00:00
|
|
|
/// If we encounter a parser state that looks like the user has written a `struct` literal with
|
|
|
|
/// parentheses instead of braces, recover the parser state and provide suggestions.
|
2021-09-20 15:24:47 +00:00
|
|
|
#[instrument(skip(self, seq, snapshot), level = "trace")]
|
2021-09-14 18:16:33 +00:00
|
|
|
fn maybe_recover_struct_lit_bad_delims(
|
|
|
|
&mut self,
|
|
|
|
lo: Span,
|
|
|
|
open_paren: Span,
|
|
|
|
seq: &mut PResult<'a, P<Expr>>,
|
2022-03-18 07:56:43 +00:00
|
|
|
snapshot: Option<(SnapshotParser<'a>, ExprKind)>,
|
2021-09-14 18:16:33 +00:00
|
|
|
) -> Option<P<Expr>> {
|
2022-10-28 18:44:26 +00:00
|
|
|
if !self.may_recover() {
|
|
|
|
return None;
|
|
|
|
}
|
|
|
|
|
2021-09-07 17:45:16 +00:00
|
|
|
match (seq.as_mut(), snapshot) {
|
2022-01-26 03:39:14 +00:00
|
|
|
(Err(err), Some((mut snapshot, ExprKind::Path(None, path)))) => {
|
2021-09-07 17:45:16 +00:00
|
|
|
snapshot.bump(); // `(`
|
2022-09-14 18:11:42 +00:00
|
|
|
match snapshot.parse_struct_fields(path.clone(), false, Delimiter::Parenthesis) {
|
2022-04-26 12:40:14 +00:00
|
|
|
Ok((fields, ..))
|
|
|
|
if snapshot.eat(&token::CloseDelim(Delimiter::Parenthesis)) =>
|
|
|
|
{
|
2021-12-07 15:44:51 +00:00
|
|
|
// We are certain we have `Enum::Foo(a: 3, b: 4)`, suggest
|
2021-09-07 17:45:16 +00:00
|
|
|
// `Enum::Foo { a: 3, b: 4 }` or `Enum::Foo(3, 4)`.
|
2022-03-18 07:56:43 +00:00
|
|
|
self.restore_snapshot(snapshot);
|
2021-09-07 17:45:16 +00:00
|
|
|
let close_paren = self.prev_token.span;
|
|
|
|
let span = lo.to(self.prev_token.span);
|
2021-12-07 15:44:51 +00:00
|
|
|
if !fields.is_empty() {
|
2022-08-24 20:41:51 +00:00
|
|
|
let mut replacement_err = ParenthesesWithStructFields {
|
2021-12-07 15:44:51 +00:00
|
|
|
span,
|
2022-09-14 18:11:42 +00:00
|
|
|
r#type: path,
|
2022-08-24 20:41:51 +00:00
|
|
|
braces_for_struct: BracesForStructLiteral {
|
|
|
|
first: open_paren,
|
|
|
|
second: close_paren,
|
|
|
|
},
|
|
|
|
no_fields_for_fn: NoFieldsForFnCall {
|
|
|
|
fields: fields
|
|
|
|
.into_iter()
|
|
|
|
.map(|field| field.span.until(field.expr.span))
|
|
|
|
.collect(),
|
|
|
|
},
|
|
|
|
}
|
|
|
|
.into_diagnostic(&self.sess.span_diagnostic);
|
|
|
|
replacement_err.emit();
|
|
|
|
|
|
|
|
let old_err = mem::replace(err, replacement_err);
|
|
|
|
old_err.cancel();
|
2021-12-07 15:44:51 +00:00
|
|
|
} else {
|
|
|
|
err.emit();
|
|
|
|
}
|
2021-09-14 18:16:33 +00:00
|
|
|
return Some(self.mk_expr_err(span));
|
2021-09-07 17:45:16 +00:00
|
|
|
}
|
|
|
|
Ok(_) => {}
|
2022-01-27 09:44:25 +00:00
|
|
|
Err(mut err) => {
|
|
|
|
err.emit();
|
|
|
|
}
|
2021-09-07 17:45:16 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
_ => {}
|
|
|
|
}
|
2021-09-14 18:16:33 +00:00
|
|
|
None
|
2019-12-06 23:08:44 +00:00
|
|
|
}
|
|
|
|
|
2019-12-06 23:04:46 +00:00
|
|
|
/// Parse an indexing expression `expr[...]`.
|
|
|
|
fn parse_index_expr(&mut self, lo: Span, base: P<Expr>) -> PResult<'a, P<Expr>> {
|
2022-08-09 17:29:28 +00:00
|
|
|
let prev_span = self.prev_token.span;
|
|
|
|
let open_delim_span = self.token.span;
|
2019-12-06 23:04:46 +00:00
|
|
|
self.bump(); // `[`
|
|
|
|
let index = self.parse_expr()?;
|
2022-08-09 17:29:28 +00:00
|
|
|
self.suggest_missing_semicolon_before_array(prev_span, open_delim_span)?;
|
2022-04-26 12:40:14 +00:00
|
|
|
self.expect(&token::CloseDelim(Delimiter::Bracket))?;
|
2022-08-14 23:58:38 +00:00
|
|
|
Ok(self.mk_expr(lo.to(self.prev_token.span), self.mk_index(base, index)))
|
2019-08-11 11:14:30 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
/// Assuming we have just parsed `.`, continue parsing into an expression.
|
|
|
|
fn parse_dot_suffix(&mut self, self_arg: P<Expr>, lo: Span) -> PResult<'a, P<Expr>> {
|
2020-03-04 21:34:57 +00:00
|
|
|
if self.token.uninterpolated_span().rust_2018() && self.eat_keyword(kw::Await) {
|
2021-02-21 12:01:01 +00:00
|
|
|
return Ok(self.mk_await_expr(self_arg, lo));
|
2019-08-11 11:14:30 +00:00
|
|
|
}
|
|
|
|
|
2020-06-09 19:34:23 +00:00
|
|
|
let fn_span_lo = self.token.span;
|
2021-05-15 21:56:28 +00:00
|
|
|
let mut segment = self.parse_path_segment(PathStyle::Expr, None)?;
|
2022-04-26 12:40:14 +00:00
|
|
|
self.check_trailing_angle_brackets(&segment, &[&token::OpenDelim(Delimiter::Parenthesis)]);
|
2020-07-23 16:34:07 +00:00
|
|
|
self.check_turbofish_missing_angle_brackets(&mut segment);
|
2019-08-11 11:14:30 +00:00
|
|
|
|
2022-04-26 12:40:14 +00:00
|
|
|
if self.check(&token::OpenDelim(Delimiter::Parenthesis)) {
|
2019-12-06 23:04:46 +00:00
|
|
|
// Method call `expr.f()`
|
2022-08-07 13:21:11 +00:00
|
|
|
let args = self.parse_paren_expr_seq()?;
|
2020-06-09 19:34:23 +00:00
|
|
|
let fn_span = fn_span_lo.to(self.prev_token.span);
|
2020-02-29 11:56:15 +00:00
|
|
|
let span = lo.to(self.prev_token.span);
|
2022-08-14 23:58:38 +00:00
|
|
|
Ok(self.mk_expr(span, ExprKind::MethodCall(segment, self_arg, args, fn_span)))
|
2019-12-06 23:04:46 +00:00
|
|
|
} else {
|
|
|
|
// Field access `expr.f`
|
|
|
|
if let Some(args) = segment.args {
|
2022-08-17 17:05:49 +00:00
|
|
|
self.sess.emit_err(FieldExpressionWithGeneric(args.span()));
|
2019-08-11 11:14:30 +00:00
|
|
|
}
|
|
|
|
|
2020-02-29 11:56:15 +00:00
|
|
|
let span = lo.to(self.prev_token.span);
|
2022-08-14 23:58:38 +00:00
|
|
|
Ok(self.mk_expr(span, ExprKind::Field(self_arg, segment.ident)))
|
2019-12-06 23:04:46 +00:00
|
|
|
}
|
2019-08-11 11:14:30 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
/// At the bottom (top?) of the precedence hierarchy,
|
|
|
|
/// Parses things like parenthesized exprs, macros, `return`, etc.
|
|
|
|
///
|
|
|
|
/// N.B., this does not parse outer attributes, and is private because it only works
|
|
|
|
/// correctly if called from `parse_dot_or_call_expr()`.
|
|
|
|
fn parse_bottom_expr(&mut self) -> PResult<'a, P<Expr>> {
|
|
|
|
maybe_recover_from_interpolated_ty_qpath!(self, true);
|
|
|
|
maybe_whole_expr!(self);
|
|
|
|
|
|
|
|
// Outer attributes are already parsed and will be
|
|
|
|
// added to the return value after the fact.
|
|
|
|
|
2019-09-06 02:56:45 +00:00
|
|
|
// Note: when adding new syntax here, don't forget to adjust `TokenKind::can_begin_expr()`.
|
2019-12-03 14:31:45 +00:00
|
|
|
let lo = self.token.span;
|
2019-12-03 15:38:08 +00:00
|
|
|
if let token::Literal(_) = self.token.kind {
|
2019-08-11 11:14:30 +00:00
|
|
|
// This match arm is a special-case of the `_` match arm below and
|
|
|
|
// could be removed without changing functionality, but it's faster
|
|
|
|
// to have it here, especially for programs with large constants.
|
2022-08-14 23:58:38 +00:00
|
|
|
self.parse_lit_expr()
|
2022-04-26 12:40:14 +00:00
|
|
|
} else if self.check(&token::OpenDelim(Delimiter::Parenthesis)) {
|
2022-08-14 23:58:38 +00:00
|
|
|
self.parse_tuple_parens_expr()
|
2022-04-26 12:40:14 +00:00
|
|
|
} else if self.check(&token::OpenDelim(Delimiter::Brace)) {
|
2022-08-14 23:58:38 +00:00
|
|
|
self.parse_block_expr(None, lo, BlockCheckMode::Default)
|
2019-12-03 15:38:08 +00:00
|
|
|
} else if self.check(&token::BinOp(token::Or)) || self.check(&token::OrOr) {
|
2022-08-14 23:58:38 +00:00
|
|
|
self.parse_closure_expr().map_err(|mut err| {
|
2022-02-25 00:02:38 +00:00
|
|
|
// If the input is something like `if a { 1 } else { 2 } | if a { 3 } else { 4 }`
|
|
|
|
// then suggest parens around the lhs.
|
|
|
|
if let Some(sp) = self.sess.ambiguous_block_expr_parse.borrow().get(&lo) {
|
2022-09-30 01:38:15 +00:00
|
|
|
err.subdiagnostic(ExprParenthesesNeeded::surrounding(*sp));
|
2022-02-25 00:02:38 +00:00
|
|
|
}
|
|
|
|
err
|
|
|
|
})
|
2022-04-26 12:40:14 +00:00
|
|
|
} else if self.check(&token::OpenDelim(Delimiter::Bracket)) {
|
2022-08-14 23:58:38 +00:00
|
|
|
self.parse_array_or_repeat_expr(Delimiter::Bracket)
|
2020-03-07 12:15:58 +00:00
|
|
|
} else if self.check_path() {
|
2022-08-14 23:58:38 +00:00
|
|
|
self.parse_path_start_expr()
|
2019-12-03 15:38:08 +00:00
|
|
|
} else if self.check_keyword(kw::Move) || self.check_keyword(kw::Static) {
|
2022-08-14 23:58:38 +00:00
|
|
|
self.parse_closure_expr()
|
2019-12-03 15:38:08 +00:00
|
|
|
} else if self.eat_keyword(kw::If) {
|
2022-08-14 23:58:38 +00:00
|
|
|
self.parse_if_expr()
|
2020-03-21 07:32:55 +00:00
|
|
|
} else if self.check_keyword(kw::For) {
|
|
|
|
if self.choose_generics_over_qpath(1) {
|
2022-08-14 23:58:38 +00:00
|
|
|
self.parse_closure_expr()
|
2020-03-21 07:32:55 +00:00
|
|
|
} else {
|
|
|
|
assert!(self.eat_keyword(kw::For));
|
2022-08-14 23:58:38 +00:00
|
|
|
self.parse_for_expr(None, self.prev_token.span)
|
2020-03-21 07:32:55 +00:00
|
|
|
}
|
2019-12-03 15:38:08 +00:00
|
|
|
} else if self.eat_keyword(kw::While) {
|
2022-08-14 23:58:38 +00:00
|
|
|
self.parse_while_expr(None, self.prev_token.span)
|
2019-12-03 15:38:08 +00:00
|
|
|
} else if let Some(label) = self.eat_label() {
|
2022-08-14 23:58:38 +00:00
|
|
|
self.parse_labeled_expr(label, true)
|
2019-12-03 15:38:08 +00:00
|
|
|
} else if self.eat_keyword(kw::Loop) {
|
2022-01-12 20:43:24 +00:00
|
|
|
let sp = self.prev_token.span;
|
2022-08-14 23:58:38 +00:00
|
|
|
self.parse_loop_expr(None, self.prev_token.span).map_err(|mut err| {
|
2022-01-12 20:43:24 +00:00
|
|
|
err.span_label(sp, "while parsing this `loop` expression");
|
|
|
|
err
|
|
|
|
})
|
2019-12-03 15:38:08 +00:00
|
|
|
} else if self.eat_keyword(kw::Continue) {
|
|
|
|
let kind = ExprKind::Continue(self.eat_label());
|
2022-08-14 23:58:38 +00:00
|
|
|
Ok(self.mk_expr(lo.to(self.prev_token.span), kind))
|
2019-12-03 15:38:08 +00:00
|
|
|
} else if self.eat_keyword(kw::Match) {
|
2020-02-29 11:56:15 +00:00
|
|
|
let match_sp = self.prev_token.span;
|
2022-08-14 23:58:38 +00:00
|
|
|
self.parse_match_expr().map_err(|mut err| {
|
2022-01-12 20:43:24 +00:00
|
|
|
err.span_label(match_sp, "while parsing this `match` expression");
|
2019-12-03 15:38:08 +00:00
|
|
|
err
|
|
|
|
})
|
|
|
|
} else if self.eat_keyword(kw::Unsafe) {
|
2022-01-12 20:43:24 +00:00
|
|
|
let sp = self.prev_token.span;
|
2022-08-14 23:58:38 +00:00
|
|
|
self.parse_block_expr(None, lo, BlockCheckMode::Unsafe(ast::UserProvided)).map_err(
|
|
|
|
|mut err| {
|
2022-01-12 20:43:24 +00:00
|
|
|
err.span_label(sp, "while parsing this `unsafe` expression");
|
|
|
|
err
|
2022-08-14 23:58:38 +00:00
|
|
|
},
|
|
|
|
)
|
2020-10-19 21:44:37 +00:00
|
|
|
} else if self.check_inline_const(0) {
|
2021-11-22 16:25:28 +00:00
|
|
|
self.parse_const_block(lo.to(self.token.span), false)
|
2022-10-28 18:44:26 +00:00
|
|
|
} else if self.may_recover() && self.is_do_catch_block() {
|
2022-08-14 23:58:38 +00:00
|
|
|
self.recover_do_catch()
|
2019-12-03 15:38:08 +00:00
|
|
|
} else if self.is_try_block() {
|
|
|
|
self.expect_keyword(kw::Try)?;
|
2022-08-14 23:58:38 +00:00
|
|
|
self.parse_try_block(lo)
|
2019-12-03 15:38:08 +00:00
|
|
|
} else if self.eat_keyword(kw::Return) {
|
2022-08-14 23:58:38 +00:00
|
|
|
self.parse_return_expr()
|
2019-12-03 15:38:08 +00:00
|
|
|
} else if self.eat_keyword(kw::Break) {
|
2022-08-14 23:58:38 +00:00
|
|
|
self.parse_break_expr()
|
2019-12-03 15:38:08 +00:00
|
|
|
} else if self.eat_keyword(kw::Yield) {
|
2022-08-14 23:58:38 +00:00
|
|
|
self.parse_yield_expr()
|
2022-03-26 06:43:54 +00:00
|
|
|
} else if self.is_do_yeet() {
|
2022-08-14 23:58:38 +00:00
|
|
|
self.parse_yeet_expr()
|
2022-07-08 10:25:50 +00:00
|
|
|
} else if self.check_keyword(kw::Let) {
|
2022-08-14 23:58:38 +00:00
|
|
|
self.parse_let_expr()
|
2020-11-11 13:15:15 +00:00
|
|
|
} else if self.eat_keyword(kw::Underscore) {
|
2022-08-14 23:58:38 +00:00
|
|
|
Ok(self.mk_expr(self.prev_token.span, ExprKind::Underscore))
|
2019-12-03 15:38:08 +00:00
|
|
|
} else if !self.unclosed_delims.is_empty() && self.check(&token::Semi) {
|
|
|
|
// Don't complain about bare semicolons after unclosed braces
|
|
|
|
// recovery in order to keep the error count down. Fixing the
|
|
|
|
// delimiters will possibly also fix the bare semicolon found in
|
|
|
|
// expression context. For example, silence the following error:
|
|
|
|
//
|
|
|
|
// error: expected expression, found `;`
|
|
|
|
// --> file.rs:2:13
|
|
|
|
// |
|
|
|
|
// 2 | foo(bar(;
|
|
|
|
// | ^ expected expression
|
|
|
|
self.bump();
|
|
|
|
Ok(self.mk_expr_err(self.token.span))
|
2020-03-04 21:34:57 +00:00
|
|
|
} else if self.token.uninterpolated_span().rust_2018() {
|
2019-12-03 15:38:08 +00:00
|
|
|
// `Span::rust_2018()` is somewhat expensive; don't get it repeatedly.
|
|
|
|
if self.check_keyword(kw::Async) {
|
2019-12-22 22:42:04 +00:00
|
|
|
if self.is_async_block() {
|
|
|
|
// Check for `async {` and `async move {`.
|
2022-08-14 23:58:38 +00:00
|
|
|
self.parse_async_block()
|
2019-08-11 11:14:30 +00:00
|
|
|
} else {
|
2022-08-14 23:58:38 +00:00
|
|
|
self.parse_closure_expr()
|
2019-08-11 11:14:30 +00:00
|
|
|
}
|
2019-12-03 15:38:08 +00:00
|
|
|
} else if self.eat_keyword(kw::Await) {
|
2022-08-14 23:58:38 +00:00
|
|
|
self.recover_incorrect_await_syntax(lo, self.prev_token.span)
|
2019-12-03 15:38:08 +00:00
|
|
|
} else {
|
2022-08-14 23:58:38 +00:00
|
|
|
self.parse_lit_expr()
|
2019-08-11 11:14:30 +00:00
|
|
|
}
|
2019-12-03 15:38:08 +00:00
|
|
|
} else {
|
2022-08-14 23:58:38 +00:00
|
|
|
self.parse_lit_expr()
|
2019-12-03 14:31:45 +00:00
|
|
|
}
|
|
|
|
}
|
2019-08-11 11:14:30 +00:00
|
|
|
|
2022-08-14 23:58:38 +00:00
|
|
|
fn parse_lit_expr(&mut self) -> PResult<'a, P<Expr>> {
|
2019-12-03 14:31:45 +00:00
|
|
|
let lo = self.token.span;
|
2022-10-10 02:40:56 +00:00
|
|
|
match self.parse_opt_token_lit() {
|
|
|
|
Some((token_lit, _)) => {
|
|
|
|
let expr = self.mk_expr(lo.to(self.prev_token.span), ExprKind::Lit(token_lit));
|
2022-05-19 05:51:49 +00:00
|
|
|
self.maybe_recover_from_bad_qpath(expr)
|
2019-12-03 14:31:45 +00:00
|
|
|
}
|
2020-04-17 17:10:29 +00:00
|
|
|
None => self.try_macro_suggestion(),
|
2019-12-03 14:31:45 +00:00
|
|
|
}
|
2019-08-11 11:14:30 +00:00
|
|
|
}
|
|
|
|
|
2022-08-14 23:58:38 +00:00
|
|
|
fn parse_tuple_parens_expr(&mut self) -> PResult<'a, P<Expr>> {
|
2019-12-03 10:36:40 +00:00
|
|
|
let lo = self.token.span;
|
2022-04-26 12:40:14 +00:00
|
|
|
self.expect(&token::OpenDelim(Delimiter::Parenthesis))?;
|
2019-12-04 09:13:29 +00:00
|
|
|
let (es, trailing_comma) = match self.parse_seq_to_end(
|
2022-04-26 12:40:14 +00:00
|
|
|
&token::CloseDelim(Delimiter::Parenthesis),
|
2019-12-04 09:13:29 +00:00
|
|
|
SeqSep::trailing_allowed(token::Comma),
|
|
|
|
|p| p.parse_expr_catch_underscore(),
|
|
|
|
) {
|
2019-12-03 10:36:40 +00:00
|
|
|
Ok(x) => x,
|
2022-04-26 12:40:14 +00:00
|
|
|
Err(err) => {
|
|
|
|
return Ok(self.recover_seq_parse_error(Delimiter::Parenthesis, lo, Err(err)));
|
|
|
|
}
|
2019-12-03 10:36:40 +00:00
|
|
|
};
|
|
|
|
let kind = if es.len() == 1 && !trailing_comma {
|
|
|
|
// `(e)` is parenthesized `e`.
|
2020-03-03 00:19:00 +00:00
|
|
|
ExprKind::Paren(es.into_iter().next().unwrap())
|
2019-12-03 10:36:40 +00:00
|
|
|
} else {
|
|
|
|
// `(e,)` is a tuple with only one field, `e`.
|
|
|
|
ExprKind::Tup(es)
|
|
|
|
};
|
2022-08-14 23:58:38 +00:00
|
|
|
let expr = self.mk_expr(lo.to(self.prev_token.span), kind);
|
2022-05-19 05:51:49 +00:00
|
|
|
self.maybe_recover_from_bad_qpath(expr)
|
2019-12-03 10:36:40 +00:00
|
|
|
}
|
|
|
|
|
2022-08-14 23:58:38 +00:00
|
|
|
fn parse_array_or_repeat_expr(&mut self, close_delim: Delimiter) -> PResult<'a, P<Expr>> {
|
2019-12-03 10:49:56 +00:00
|
|
|
let lo = self.token.span;
|
Suggest replacing braces for brackets on array-esque invalid block expr
Newcomers may write `{1, 2, 3}` for making arrays, and the current error
message is not informative enough to quickly convince them what is
needed to fix the error.
This PR implements a diagnostic for this case, and its output looks like
this:
```text
error: this code is interpreted as a block expression, not an array
--> src/lib.rs:1:22
|
1 | const FOO: [u8; 3] = {
| ______________________^
2 | | 1, 2, 3
3 | | };
| |_^
|
= note: to define an array, one would use square brackets instead of curly braces
help: try using [] instead of {}
|
1 | const FOO: [u8; 3] = [
2 | 1, 2, 3
3 | ];
|
```
Fix #87672
2021-08-06 19:34:29 +00:00
|
|
|
self.bump(); // `[` or other open delim
|
2019-12-03 10:49:56 +00:00
|
|
|
|
Suggest replacing braces for brackets on array-esque invalid block expr
Newcomers may write `{1, 2, 3}` for making arrays, and the current error
message is not informative enough to quickly convince them what is
needed to fix the error.
This PR implements a diagnostic for this case, and its output looks like
this:
```text
error: this code is interpreted as a block expression, not an array
--> src/lib.rs:1:22
|
1 | const FOO: [u8; 3] = {
| ______________________^
2 | | 1, 2, 3
3 | | };
| |_^
|
= note: to define an array, one would use square brackets instead of curly braces
help: try using [] instead of {}
|
1 | const FOO: [u8; 3] = [
2 | 1, 2, 3
3 | ];
|
```
Fix #87672
2021-08-06 19:34:29 +00:00
|
|
|
let close = &token::CloseDelim(close_delim);
|
2019-12-04 09:13:29 +00:00
|
|
|
let kind = if self.eat(close) {
|
2019-12-03 10:49:56 +00:00
|
|
|
// Empty vector
|
|
|
|
ExprKind::Array(Vec::new())
|
|
|
|
} else {
|
|
|
|
// Non-empty vector
|
|
|
|
let first_expr = self.parse_expr()?;
|
|
|
|
if self.eat(&token::Semi) {
|
|
|
|
// Repeating array syntax: `[ 0; 512 ]`
|
2019-12-08 07:19:53 +00:00
|
|
|
let count = self.parse_anon_const_expr()?;
|
2019-12-04 09:13:29 +00:00
|
|
|
self.expect(close)?;
|
2019-12-03 10:49:56 +00:00
|
|
|
ExprKind::Repeat(first_expr, count)
|
|
|
|
} else if self.eat(&token::Comma) {
|
|
|
|
// Vector with two or more elements.
|
2019-12-04 09:13:29 +00:00
|
|
|
let sep = SeqSep::trailing_allowed(token::Comma);
|
|
|
|
let (remaining_exprs, _) = self.parse_seq_to_end(close, sep, |p| p.parse_expr())?;
|
2019-12-03 10:49:56 +00:00
|
|
|
let mut exprs = vec![first_expr];
|
|
|
|
exprs.extend(remaining_exprs);
|
|
|
|
ExprKind::Array(exprs)
|
|
|
|
} else {
|
|
|
|
// Vector with one element
|
2019-12-04 09:13:29 +00:00
|
|
|
self.expect(close)?;
|
2019-12-03 10:49:56 +00:00
|
|
|
ExprKind::Array(vec![first_expr])
|
|
|
|
}
|
|
|
|
};
|
2022-08-14 23:58:38 +00:00
|
|
|
let expr = self.mk_expr(lo.to(self.prev_token.span), kind);
|
2022-05-19 05:51:49 +00:00
|
|
|
self.maybe_recover_from_bad_qpath(expr)
|
2019-12-03 10:49:56 +00:00
|
|
|
}
|
|
|
|
|
2022-08-14 23:58:38 +00:00
|
|
|
fn parse_path_start_expr(&mut self) -> PResult<'a, P<Expr>> {
|
2020-12-10 12:20:07 +00:00
|
|
|
let (qself, path) = if self.eat_lt() {
|
|
|
|
let (qself, path) = self.parse_qpath(PathStyle::Expr)?;
|
|
|
|
(Some(qself), path)
|
|
|
|
} else {
|
|
|
|
(None, self.parse_path(PathStyle::Expr)?)
|
|
|
|
};
|
2019-12-03 11:43:45 +00:00
|
|
|
|
|
|
|
// `!`, as an operator, is prefix, so we know this isn't that.
|
2022-08-14 23:13:03 +00:00
|
|
|
let (span, kind) = if self.eat(&token::Not) {
|
2019-12-03 11:43:45 +00:00
|
|
|
// MACRO INVOCATION expression
|
2020-12-10 12:20:07 +00:00
|
|
|
if qself.is_some() {
|
2022-08-17 17:05:49 +00:00
|
|
|
self.sess.emit_err(MacroInvocationWithQualifiedPath(path.span));
|
2020-12-10 12:20:07 +00:00
|
|
|
}
|
2022-08-14 23:13:03 +00:00
|
|
|
let lo = path.span;
|
2022-08-12 02:20:10 +00:00
|
|
|
let mac = P(MacCall {
|
2019-12-03 11:43:45 +00:00
|
|
|
path,
|
|
|
|
args: self.parse_mac_args()?,
|
|
|
|
prior_type_ascription: self.last_type_ascription,
|
2022-08-12 02:20:10 +00:00
|
|
|
});
|
2022-08-14 23:13:03 +00:00
|
|
|
(lo.to(self.prev_token.span), ExprKind::MacCall(mac))
|
|
|
|
} else if self.check(&token::OpenDelim(Delimiter::Brace)) &&
|
2022-08-14 23:58:38 +00:00
|
|
|
let Some(expr) = self.maybe_parse_struct_expr(qself.as_ref(), &path) {
|
2020-12-10 12:20:07 +00:00
|
|
|
if qself.is_some() {
|
|
|
|
self.sess.gated_spans.gate(sym::more_qualified_paths, path.span);
|
|
|
|
}
|
2019-12-03 11:43:45 +00:00
|
|
|
return expr;
|
|
|
|
} else {
|
2020-12-10 12:20:07 +00:00
|
|
|
(path.span, ExprKind::Path(qself, path))
|
2019-12-03 11:43:45 +00:00
|
|
|
};
|
|
|
|
|
2022-08-14 23:58:38 +00:00
|
|
|
let expr = self.mk_expr(span, kind);
|
2022-05-19 05:51:49 +00:00
|
|
|
self.maybe_recover_from_bad_qpath(expr)
|
2019-12-03 11:43:45 +00:00
|
|
|
}
|
|
|
|
|
2020-03-05 06:54:22 +00:00
|
|
|
/// Parse `'label: $expr`. The label is already parsed.
|
2021-01-21 04:03:29 +00:00
|
|
|
fn parse_labeled_expr(
|
|
|
|
&mut self,
|
2022-09-01 18:48:09 +00:00
|
|
|
label_: Label,
|
2022-01-13 23:44:17 +00:00
|
|
|
mut consume_colon: bool,
|
2021-01-21 04:03:29 +00:00
|
|
|
) -> PResult<'a, P<Expr>> {
|
2022-09-01 18:48:09 +00:00
|
|
|
let lo = label_.ident.span;
|
|
|
|
let label = Some(label_);
|
2020-03-05 06:54:22 +00:00
|
|
|
let ate_colon = self.eat(&token::Colon);
|
|
|
|
let expr = if self.eat_keyword(kw::While) {
|
2022-08-14 23:58:38 +00:00
|
|
|
self.parse_while_expr(label, lo)
|
2020-03-05 00:47:15 +00:00
|
|
|
} else if self.eat_keyword(kw::For) {
|
2022-08-14 23:58:38 +00:00
|
|
|
self.parse_for_expr(label, lo)
|
2020-03-05 00:47:15 +00:00
|
|
|
} else if self.eat_keyword(kw::Loop) {
|
2022-08-14 23:58:38 +00:00
|
|
|
self.parse_loop_expr(label, lo)
|
2022-05-01 17:05:35 +00:00
|
|
|
} else if self.check_noexpect(&token::OpenDelim(Delimiter::Brace))
|
|
|
|
|| self.token.is_whole_block()
|
|
|
|
{
|
2022-08-14 23:58:38 +00:00
|
|
|
self.parse_block_expr(label, lo, BlockCheckMode::Default)
|
2022-09-01 18:48:09 +00:00
|
|
|
} else if !ate_colon
|
2022-10-28 18:44:26 +00:00
|
|
|
&& self.may_recover()
|
2022-09-01 18:48:09 +00:00
|
|
|
&& (matches!(self.token.kind, token::CloseDelim(_) | token::Comma)
|
|
|
|
|| self.token.is_op())
|
|
|
|
{
|
|
|
|
let lit = self.recover_unclosed_char(label_.ident, |self_| {
|
|
|
|
self_.sess.create_err(UnexpectedTokenAfterLabel {
|
|
|
|
span: self_.token.span,
|
|
|
|
remove_label: None,
|
|
|
|
enclose_in_block: None,
|
|
|
|
})
|
|
|
|
});
|
|
|
|
consume_colon = false;
|
2022-10-10 02:40:56 +00:00
|
|
|
Ok(self.mk_expr(lo, ExprKind::Lit(lit.token_lit)))
|
2022-05-01 17:05:35 +00:00
|
|
|
} else if !ate_colon
|
|
|
|
&& (self.check_noexpect(&TokenKind::Comma) || self.check_noexpect(&TokenKind::Gt))
|
|
|
|
{
|
2022-02-02 18:33:13 +00:00
|
|
|
// We're probably inside of a `Path<'a>` that needs a turbofish
|
2022-08-24 20:41:51 +00:00
|
|
|
self.sess.emit_err(UnexpectedTokenAfterLabel {
|
|
|
|
span: self.token.span,
|
|
|
|
remove_label: None,
|
|
|
|
enclose_in_block: None,
|
|
|
|
});
|
2022-01-13 23:44:17 +00:00
|
|
|
consume_colon = false;
|
|
|
|
Ok(self.mk_expr_err(lo))
|
2020-03-05 00:47:15 +00:00
|
|
|
} else {
|
2022-08-24 20:41:51 +00:00
|
|
|
let mut err = UnexpectedTokenAfterLabel {
|
|
|
|
span: self.token.span,
|
|
|
|
remove_label: None,
|
|
|
|
enclose_in_block: None,
|
|
|
|
};
|
2022-06-05 12:45:29 +00:00
|
|
|
|
2020-03-05 00:47:15 +00:00
|
|
|
// Continue as an expression in an effort to recover on `'label: non_block_expr`.
|
2022-06-05 12:45:29 +00:00
|
|
|
let expr = self.parse_expr().map(|expr| {
|
2022-06-05 19:34:11 +00:00
|
|
|
let span = expr.span;
|
|
|
|
|
2022-06-05 19:12:51 +00:00
|
|
|
let found_labeled_breaks = {
|
|
|
|
struct FindLabeledBreaksVisitor(bool);
|
|
|
|
|
|
|
|
impl<'ast> Visitor<'ast> for FindLabeledBreaksVisitor {
|
|
|
|
fn visit_expr_post(&mut self, ex: &'ast Expr) {
|
|
|
|
if let ExprKind::Break(Some(_label), _) = ex.kind {
|
|
|
|
self.0 = true;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
let mut vis = FindLabeledBreaksVisitor(false);
|
|
|
|
vis.visit_expr(&expr);
|
|
|
|
vis.0
|
|
|
|
};
|
|
|
|
|
2022-06-05 19:34:11 +00:00
|
|
|
// Suggestion involves adding a (as of time of writing this, unstable) labeled block.
|
|
|
|
//
|
|
|
|
// If there are no breaks that may use this label, suggest removing the label and
|
|
|
|
// recover to the unmodified expression.
|
2022-06-05 19:12:51 +00:00
|
|
|
if !found_labeled_breaks {
|
2022-08-24 20:41:51 +00:00
|
|
|
err.remove_label = Some(lo.until(span));
|
2022-06-05 19:34:11 +00:00
|
|
|
|
2022-06-05 19:12:51 +00:00
|
|
|
return expr;
|
|
|
|
}
|
|
|
|
|
2022-08-24 20:41:51 +00:00
|
|
|
err.enclose_in_block = Some(UnexpectedTokenAfterLabelSugg {
|
|
|
|
left: span.shrink_to_lo(),
|
|
|
|
right: span.shrink_to_hi(),
|
|
|
|
});
|
2022-06-05 12:45:29 +00:00
|
|
|
|
2022-08-18 02:13:37 +00:00
|
|
|
// Replace `'label: non_block_expr` with `'label: {non_block_expr}` in order to suppress future errors about `break 'label`.
|
2022-06-05 12:45:29 +00:00
|
|
|
let stmt = self.mk_stmt(span, StmtKind::Expr(expr));
|
|
|
|
let blk = self.mk_block(vec![stmt], BlockCheckMode::Default, span);
|
2022-08-14 23:58:38 +00:00
|
|
|
self.mk_expr(span, ExprKind::Block(blk, label))
|
2022-06-05 12:45:29 +00:00
|
|
|
});
|
|
|
|
|
2022-08-24 20:41:51 +00:00
|
|
|
self.sess.emit_err(err);
|
2022-06-05 12:45:29 +00:00
|
|
|
expr
|
2020-03-05 06:54:22 +00:00
|
|
|
}?;
|
|
|
|
|
2021-01-21 04:03:29 +00:00
|
|
|
if !ate_colon && consume_colon {
|
2022-08-17 17:05:49 +00:00
|
|
|
self.sess.emit_err(RequireColonAfterLabeledExpression {
|
|
|
|
span: expr.span,
|
|
|
|
label: lo,
|
|
|
|
label_end: lo.shrink_to_hi(),
|
|
|
|
});
|
2019-12-03 11:48:08 +00:00
|
|
|
}
|
2020-03-05 06:54:22 +00:00
|
|
|
|
|
|
|
Ok(expr)
|
2019-12-03 11:48:08 +00:00
|
|
|
}
|
|
|
|
|
2022-09-01 18:48:09 +00:00
|
|
|
/// Emit an error when a char is parsed as a lifetime because of a missing quote
|
|
|
|
pub(super) fn recover_unclosed_char(
|
2022-10-10 02:40:56 +00:00
|
|
|
&self,
|
2022-09-01 18:48:09 +00:00
|
|
|
lifetime: Ident,
|
2022-10-10 02:40:56 +00:00
|
|
|
err: impl FnOnce(&Self) -> DiagnosticBuilder<'a, ErrorGuaranteed>,
|
2022-09-01 18:48:09 +00:00
|
|
|
) -> ast::Lit {
|
|
|
|
if let Some(mut diag) =
|
|
|
|
self.sess.span_diagnostic.steal_diagnostic(lifetime.span, StashKey::LifetimeIsChar)
|
|
|
|
{
|
|
|
|
diag.span_suggestion_verbose(
|
|
|
|
lifetime.span.shrink_to_hi(),
|
|
|
|
"add `'` to close the char literal",
|
|
|
|
"'",
|
|
|
|
Applicability::MaybeIncorrect,
|
|
|
|
)
|
|
|
|
.emit();
|
|
|
|
} else {
|
|
|
|
err(self)
|
|
|
|
.span_suggestion_verbose(
|
|
|
|
lifetime.span.shrink_to_hi(),
|
|
|
|
"add `'` to close the char literal",
|
|
|
|
"'",
|
|
|
|
Applicability::MaybeIncorrect,
|
|
|
|
)
|
|
|
|
.emit();
|
|
|
|
}
|
2022-10-10 02:40:56 +00:00
|
|
|
let name = lifetime.without_first_quote().name;
|
2022-09-01 18:48:09 +00:00
|
|
|
ast::Lit {
|
2022-10-10 02:40:56 +00:00
|
|
|
token_lit: token::Lit::new(token::LitKind::Char, name, None),
|
|
|
|
kind: ast::LitKind::Char(name.as_str().chars().next().unwrap_or('_')),
|
2022-09-01 18:48:09 +00:00
|
|
|
span: lifetime.span,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-12-03 12:35:05 +00:00
|
|
|
/// Recover on the syntax `do catch { ... }` suggesting `try { ... }` instead.
|
2022-08-14 23:58:38 +00:00
|
|
|
fn recover_do_catch(&mut self) -> PResult<'a, P<Expr>> {
|
2019-12-03 12:35:05 +00:00
|
|
|
let lo = self.token.span;
|
|
|
|
|
|
|
|
self.bump(); // `do`
|
|
|
|
self.bump(); // `catch`
|
|
|
|
|
2022-08-17 17:05:49 +00:00
|
|
|
let span = lo.to(self.prev_token.span);
|
|
|
|
self.sess.emit_err(DoCatchSyntaxRemoved { span });
|
2019-12-03 12:35:05 +00:00
|
|
|
|
2022-08-14 23:58:38 +00:00
|
|
|
self.parse_try_block(lo)
|
2019-12-03 12:35:05 +00:00
|
|
|
}
|
|
|
|
|
2019-12-03 13:01:24 +00:00
|
|
|
/// Parse an expression if the token can begin one.
|
|
|
|
fn parse_expr_opt(&mut self) -> PResult<'a, Option<P<Expr>>> {
|
2019-12-22 22:42:04 +00:00
|
|
|
Ok(if self.token.can_begin_expr() { Some(self.parse_expr()?) } else { None })
|
2019-12-03 13:01:24 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
/// Parse `"return" expr?`.
|
2022-08-14 23:58:38 +00:00
|
|
|
fn parse_return_expr(&mut self) -> PResult<'a, P<Expr>> {
|
2020-02-29 11:56:15 +00:00
|
|
|
let lo = self.prev_token.span;
|
2019-12-03 13:01:24 +00:00
|
|
|
let kind = ExprKind::Ret(self.parse_expr_opt()?);
|
2022-08-14 23:58:38 +00:00
|
|
|
let expr = self.mk_expr(lo.to(self.prev_token.span), kind);
|
2022-05-19 05:51:49 +00:00
|
|
|
self.maybe_recover_from_bad_qpath(expr)
|
2019-12-03 13:01:24 +00:00
|
|
|
}
|
|
|
|
|
2022-03-26 06:43:54 +00:00
|
|
|
/// Parse `"do" "yeet" expr?`.
|
2022-08-14 23:58:38 +00:00
|
|
|
fn parse_yeet_expr(&mut self) -> PResult<'a, P<Expr>> {
|
2022-03-26 06:43:54 +00:00
|
|
|
let lo = self.token.span;
|
|
|
|
|
|
|
|
self.bump(); // `do`
|
|
|
|
self.bump(); // `yeet`
|
|
|
|
|
|
|
|
let kind = ExprKind::Yeet(self.parse_expr_opt()?);
|
|
|
|
|
|
|
|
let span = lo.to(self.prev_token.span);
|
|
|
|
self.sess.gated_spans.gate(sym::yeet_expr, span);
|
2022-08-14 23:58:38 +00:00
|
|
|
let expr = self.mk_expr(span, kind);
|
2022-05-19 05:51:49 +00:00
|
|
|
self.maybe_recover_from_bad_qpath(expr)
|
2022-03-26 06:43:54 +00:00
|
|
|
}
|
|
|
|
|
2021-07-10 14:38:55 +00:00
|
|
|
/// Parse `"break" (('label (:? expr)?) | expr?)` with `"break"` token already eaten.
|
|
|
|
/// If the label is followed immediately by a `:` token, the label and `:` are
|
|
|
|
/// parsed as part of the expression (i.e. a labeled loop). The language team has
|
|
|
|
/// decided in #87026 to require parentheses as a visual aid to avoid confusion if
|
|
|
|
/// the break expression of an unlabeled break is a labeled loop (as in
|
|
|
|
/// `break 'lbl: loop {}`); a labeled break with an unlabeled loop as its value
|
|
|
|
/// expression only gets a warning for compatibility reasons; and a labeled break
|
|
|
|
/// with a labeled loop does not even get a warning because there is no ambiguity.
|
2022-08-14 23:58:38 +00:00
|
|
|
fn parse_break_expr(&mut self) -> PResult<'a, P<Expr>> {
|
2020-02-29 11:56:15 +00:00
|
|
|
let lo = self.prev_token.span;
|
2021-07-10 14:38:55 +00:00
|
|
|
let mut label = self.eat_label();
|
|
|
|
let kind = if label.is_some() && self.token == token::Colon {
|
|
|
|
// The value expression can be a labeled loop, see issue #86948, e.g.:
|
|
|
|
// `loop { break 'label: loop { break 'label 42; }; }`
|
2022-08-14 23:58:38 +00:00
|
|
|
let lexpr = self.parse_labeled_expr(label.take().unwrap(), true)?;
|
2022-08-24 20:41:51 +00:00
|
|
|
self.sess.emit_err(LabeledLoopInBreak {
|
|
|
|
span: lexpr.span,
|
2022-09-01 17:29:23 +00:00
|
|
|
sub: WrapExpressionInParentheses {
|
|
|
|
left: lexpr.span.shrink_to_lo(),
|
|
|
|
right: lexpr.span.shrink_to_hi(),
|
2022-08-24 20:41:51 +00:00
|
|
|
},
|
|
|
|
});
|
2021-07-10 14:38:55 +00:00
|
|
|
Some(lexpr)
|
2022-04-26 12:40:14 +00:00
|
|
|
} else if self.token != token::OpenDelim(Delimiter::Brace)
|
2019-12-03 14:06:34 +00:00
|
|
|
|| !self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL)
|
|
|
|
{
|
2021-07-10 14:38:55 +00:00
|
|
|
let expr = self.parse_expr_opt()?;
|
|
|
|
if let Some(ref expr) = expr {
|
|
|
|
if label.is_some()
|
|
|
|
&& matches!(
|
|
|
|
expr.kind,
|
|
|
|
ExprKind::While(_, _, None)
|
|
|
|
| ExprKind::ForLoop(_, _, _, None)
|
|
|
|
| ExprKind::Loop(_, None)
|
|
|
|
| ExprKind::Block(_, None)
|
|
|
|
)
|
|
|
|
{
|
|
|
|
self.sess.buffer_lint_with_diagnostic(
|
|
|
|
BREAK_WITH_LABEL_AND_LOOP,
|
|
|
|
lo.to(expr.span),
|
|
|
|
ast::CRATE_NODE_ID,
|
|
|
|
"this labeled break expression is easy to confuse with an unlabeled break with a labeled value expression",
|
|
|
|
BuiltinLintDiagnostics::BreakWithLabelAndLoop(expr.span),
|
|
|
|
);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
expr
|
2019-12-03 14:06:34 +00:00
|
|
|
} else {
|
|
|
|
None
|
|
|
|
};
|
2022-08-14 23:58:38 +00:00
|
|
|
let expr = self.mk_expr(lo.to(self.prev_token.span), ExprKind::Break(label, kind));
|
2022-05-19 05:51:49 +00:00
|
|
|
self.maybe_recover_from_bad_qpath(expr)
|
2019-12-03 14:06:34 +00:00
|
|
|
}
|
|
|
|
|
2019-12-03 13:01:24 +00:00
|
|
|
/// Parse `"yield" expr?`.
|
2022-08-14 23:58:38 +00:00
|
|
|
fn parse_yield_expr(&mut self) -> PResult<'a, P<Expr>> {
|
2020-02-29 11:56:15 +00:00
|
|
|
let lo = self.prev_token.span;
|
2019-12-03 13:01:24 +00:00
|
|
|
let kind = ExprKind::Yield(self.parse_expr_opt()?);
|
2020-02-29 11:56:15 +00:00
|
|
|
let span = lo.to(self.prev_token.span);
|
2019-12-03 13:01:24 +00:00
|
|
|
self.sess.gated_spans.gate(sym::generators, span);
|
2022-08-14 23:58:38 +00:00
|
|
|
let expr = self.mk_expr(span, kind);
|
2022-05-19 05:51:49 +00:00
|
|
|
self.maybe_recover_from_bad_qpath(expr)
|
2019-12-03 13:01:24 +00:00
|
|
|
}
|
|
|
|
|
2019-11-16 17:11:05 +00:00
|
|
|
/// Returns a string literal if the next token is a string literal.
|
|
|
|
/// In case of error returns `Some(lit)` if the next token is a literal with a wrong kind,
|
|
|
|
/// and returns `None` if the next token is not literal at all.
|
2019-11-10 14:04:12 +00:00
|
|
|
pub fn parse_str_lit(&mut self) -> Result<ast::StrLit, Option<Lit>> {
|
2022-10-10 02:40:56 +00:00
|
|
|
match self.parse_opt_ast_lit() {
|
2019-11-10 14:04:12 +00:00
|
|
|
Some(lit) => match lit.kind {
|
|
|
|
ast::LitKind::Str(symbol_unescaped, style) => Ok(ast::StrLit {
|
|
|
|
style,
|
2022-08-01 06:46:08 +00:00
|
|
|
symbol: lit.token_lit.symbol,
|
|
|
|
suffix: lit.token_lit.suffix,
|
2019-11-10 14:04:12 +00:00
|
|
|
span: lit.span,
|
|
|
|
symbol_unescaped,
|
|
|
|
}),
|
|
|
|
_ => Err(Some(lit)),
|
2019-12-22 22:42:04 +00:00
|
|
|
},
|
2019-11-10 14:04:12 +00:00
|
|
|
None => Err(None),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-10-10 02:40:56 +00:00
|
|
|
fn handle_missing_lit(&mut self) -> PResult<'a, Lit> {
|
|
|
|
if let token::Interpolated(inner) = &self.token.kind {
|
|
|
|
let expr = match inner.as_ref() {
|
|
|
|
token::NtExpr(expr) => Some(expr),
|
|
|
|
token::NtLiteral(expr) => Some(expr),
|
|
|
|
_ => None,
|
2022-09-01 18:48:09 +00:00
|
|
|
};
|
2022-10-10 02:40:56 +00:00
|
|
|
if let Some(expr) = expr {
|
|
|
|
if matches!(expr.kind, ExprKind::Err) {
|
|
|
|
let mut err = InvalidInterpolatedExpression { span: self.token.span }
|
|
|
|
.into_diagnostic(&self.sess.span_diagnostic);
|
|
|
|
err.downgrade_to_delayed_bug();
|
|
|
|
return Err(err);
|
|
|
|
}
|
2022-09-01 18:48:09 +00:00
|
|
|
}
|
2022-10-10 02:40:56 +00:00
|
|
|
}
|
|
|
|
let token = self.token.clone();
|
|
|
|
let err = |self_: &Self| {
|
|
|
|
let msg = format!("unexpected token: {}", super::token_descr(&token));
|
|
|
|
self_.struct_span_err(token.span, &msg)
|
|
|
|
};
|
|
|
|
// On an error path, eagerly consider a lifetime to be an unclosed character lit
|
|
|
|
if self.token.is_lifetime() {
|
|
|
|
let lt = self.expect_lifetime();
|
|
|
|
Ok(self.recover_unclosed_char(lt.ident, err))
|
|
|
|
} else {
|
|
|
|
Err(err(self))
|
|
|
|
}
|
2019-11-10 12:32:41 +00:00
|
|
|
}
|
|
|
|
|
2022-10-10 02:40:56 +00:00
|
|
|
pub(super) fn parse_token_lit(&mut self) -> PResult<'a, (token::Lit, Span)> {
|
|
|
|
self.parse_opt_token_lit()
|
|
|
|
.ok_or(())
|
|
|
|
.or_else(|()| self.handle_missing_lit().map(|lit| (lit.token_lit, lit.span)))
|
|
|
|
}
|
|
|
|
|
|
|
|
pub(super) fn parse_ast_lit(&mut self) -> PResult<'a, Lit> {
|
|
|
|
self.parse_opt_ast_lit().ok_or(()).or_else(|()| self.handle_missing_lit())
|
|
|
|
}
|
|
|
|
|
|
|
|
fn recover_after_dot(&mut self) -> Option<Token> {
|
2019-10-11 16:40:56 +00:00
|
|
|
let mut recovered = None;
|
|
|
|
if self.token == token::Dot {
|
2019-11-10 12:32:41 +00:00
|
|
|
// Attempt to recover `.4` as `0.4`. We don't currently have any syntax where
|
|
|
|
// dot would follow an optional literal, so we do this unconditionally.
|
2019-10-11 16:40:56 +00:00
|
|
|
recovered = self.look_ahead(1, |next_token| {
|
2019-12-22 22:42:04 +00:00
|
|
|
if let token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) =
|
|
|
|
next_token.kind
|
|
|
|
{
|
2019-10-11 16:40:56 +00:00
|
|
|
if self.token.span.hi() == next_token.span.lo() {
|
2021-12-15 03:39:23 +00:00
|
|
|
let s = String::from("0.") + symbol.as_str();
|
2019-10-11 16:40:56 +00:00
|
|
|
let kind = TokenKind::lit(token::Float, Symbol::intern(&s), suffix);
|
|
|
|
return Some(Token::new(kind, self.token.span.to(next_token.span)));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
None
|
|
|
|
});
|
|
|
|
if let Some(token) = &recovered {
|
|
|
|
self.bump();
|
2022-08-24 20:41:51 +00:00
|
|
|
self.sess.emit_err(FloatLiteralRequiresIntegerPart {
|
|
|
|
span: token.span,
|
|
|
|
correct: pprust::token_to_string(token).into_owned(),
|
|
|
|
});
|
2019-10-11 16:40:56 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-10-10 02:40:56 +00:00
|
|
|
recovered
|
2019-10-11 16:40:56 +00:00
|
|
|
}
|
|
|
|
|
2022-10-10 02:40:56 +00:00
|
|
|
/// Matches `lit = true | false | token_lit`.
|
|
|
|
/// Returns `None` if the next token is not a literal.
|
|
|
|
pub(super) fn parse_opt_token_lit(&mut self) -> Option<(token::Lit, Span)> {
|
|
|
|
let recovered = self.recover_after_dot();
|
|
|
|
let token = recovered.as_ref().unwrap_or(&self.token);
|
|
|
|
let span = token.span;
|
|
|
|
token::Lit::from_token(token).map(|token_lit| {
|
|
|
|
self.bump();
|
|
|
|
(token_lit, span)
|
|
|
|
})
|
|
|
|
}
|
2022-01-17 22:13:15 +00:00
|
|
|
|
2022-10-10 02:40:56 +00:00
|
|
|
/// Matches `lit = true | false | token_lit`.
|
|
|
|
/// Returns `None` if the next token is not a literal.
|
|
|
|
pub(super) fn parse_opt_ast_lit(&mut self) -> Option<Lit> {
|
|
|
|
let recovered = self.recover_after_dot();
|
|
|
|
let token = recovered.as_ref().unwrap_or(&self.token);
|
|
|
|
match token::Lit::from_token(token) {
|
|
|
|
Some(token_lit) => {
|
|
|
|
match Lit::from_token_lit(token_lit, token.span) {
|
|
|
|
Ok(lit) => {
|
|
|
|
self.bump();
|
|
|
|
Some(lit)
|
|
|
|
}
|
|
|
|
Err(err) => {
|
|
|
|
let span = token.span;
|
|
|
|
let token::Literal(lit) = token.kind else {
|
|
|
|
unreachable!();
|
|
|
|
};
|
|
|
|
self.bump();
|
|
|
|
report_lit_error(&self.sess, err, lit, span);
|
|
|
|
// Pack possible quotes and prefixes from the original literal into
|
|
|
|
// the error literal's symbol so they can be pretty-printed faithfully.
|
|
|
|
let suffixless_lit = token::Lit::new(lit.kind, lit.symbol, None);
|
|
|
|
let symbol = Symbol::intern(&suffixless_lit.to_string());
|
|
|
|
let lit = token::Lit::new(token::Err, symbol, lit.suffix);
|
|
|
|
Some(Lit::from_token_lit(lit, span).unwrap_or_else(|_| unreachable!()))
|
|
|
|
}
|
2019-10-11 16:40:56 +00:00
|
|
|
}
|
|
|
|
}
|
2022-10-10 02:40:56 +00:00
|
|
|
None => None,
|
2019-10-11 16:40:56 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-09-15 08:12:09 +00:00
|
|
|
pub(super) fn expect_no_tuple_index_suffix(&self, span: Span, suffix: Symbol) {
|
|
|
|
if [sym::i32, sym::u32, sym::isize, sym::usize].contains(&suffix) {
|
|
|
|
// #59553: warn instead of reject out of hand to allow the fix to percolate
|
|
|
|
// through the ecosystem when people fix their macros
|
|
|
|
self.sess.emit_warning(InvalidLiteralSuffixOnTupleIndex {
|
|
|
|
span,
|
|
|
|
suffix,
|
|
|
|
exception: Some(()),
|
|
|
|
});
|
|
|
|
} else {
|
|
|
|
self.sess.emit_err(InvalidLiteralSuffixOnTupleIndex { span, suffix, exception: None });
|
2019-10-11 16:40:56 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-08-11 11:14:30 +00:00
|
|
|
/// Matches `'-' lit | lit` (cf. `ast_validation::AstValidator::check_expr_within_pat`).
|
2020-03-16 22:36:14 +00:00
|
|
|
/// Keep this in sync with `Token::can_begin_literal_maybe_minus`.
|
2020-08-28 21:04:42 +00:00
|
|
|
pub fn parse_literal_maybe_minus(&mut self) -> PResult<'a, P<Expr>> {
|
2019-08-11 11:14:30 +00:00
|
|
|
maybe_whole_expr!(self);
|
|
|
|
|
|
|
|
let lo = self.token.span;
|
2019-12-06 22:44:23 +00:00
|
|
|
let minus_present = self.eat(&token::BinOp(token::Minus));
|
2022-10-10 02:40:56 +00:00
|
|
|
let (token_lit, span) = self.parse_token_lit()?;
|
|
|
|
let expr = self.mk_expr(span, ExprKind::Lit(token_lit));
|
2019-08-11 11:14:30 +00:00
|
|
|
|
|
|
|
if minus_present {
|
2022-08-14 23:58:38 +00:00
|
|
|
Ok(self.mk_expr(lo.to(self.prev_token.span), self.mk_unary(UnOp::Neg, expr)))
|
2019-08-11 11:14:30 +00:00
|
|
|
} else {
|
|
|
|
Ok(expr)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
Suggest replacing braces for brackets on array-esque invalid block expr
Newcomers may write `{1, 2, 3}` for making arrays, and the current error
message is not informative enough to quickly convince them what is
needed to fix the error.
This PR implements a diagnostic for this case, and its output looks like
this:
```text
error: this code is interpreted as a block expression, not an array
--> src/lib.rs:1:22
|
1 | const FOO: [u8; 3] = {
| ______________________^
2 | | 1, 2, 3
3 | | };
| |_^
|
= note: to define an array, one would use square brackets instead of curly braces
help: try using [] instead of {}
|
1 | const FOO: [u8; 3] = [
2 | 1, 2, 3
3 | ];
|
```
Fix #87672
2021-08-06 19:34:29 +00:00
|
|
|
fn is_array_like_block(&mut self) -> bool {
|
|
|
|
self.look_ahead(1, |t| matches!(t.kind, TokenKind::Ident(..) | TokenKind::Literal(_)))
|
|
|
|
&& self.look_ahead(2, |t| t == &token::Comma)
|
|
|
|
&& self.look_ahead(3, |t| t.can_begin_expr())
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Emits a suggestion if it looks like the user meant an array but
|
|
|
|
/// accidentally used braces, causing the code to be interpreted as a block
|
|
|
|
/// expression.
|
2022-08-14 23:58:38 +00:00
|
|
|
fn maybe_suggest_brackets_instead_of_braces(&mut self, lo: Span) -> Option<P<Expr>> {
|
2022-03-10 13:11:00 +00:00
|
|
|
let mut snapshot = self.create_snapshot_for_diagnostic();
|
2022-08-14 23:58:38 +00:00
|
|
|
match snapshot.parse_array_or_repeat_expr(Delimiter::Brace) {
|
Suggest replacing braces for brackets on array-esque invalid block expr
Newcomers may write `{1, 2, 3}` for making arrays, and the current error
message is not informative enough to quickly convince them what is
needed to fix the error.
This PR implements a diagnostic for this case, and its output looks like
this:
```text
error: this code is interpreted as a block expression, not an array
--> src/lib.rs:1:22
|
1 | const FOO: [u8; 3] = {
| ______________________^
2 | | 1, 2, 3
3 | | };
| |_^
|
= note: to define an array, one would use square brackets instead of curly braces
help: try using [] instead of {}
|
1 | const FOO: [u8; 3] = [
2 | 1, 2, 3
3 | ];
|
```
Fix #87672
2021-08-06 19:34:29 +00:00
|
|
|
Ok(arr) => {
|
2022-08-24 20:41:51 +00:00
|
|
|
self.sess.emit_err(ArrayBracketsInsteadOfSpaces {
|
|
|
|
span: arr.span,
|
|
|
|
sub: ArrayBracketsInsteadOfSpacesSugg {
|
|
|
|
left: lo,
|
|
|
|
right: snapshot.prev_token.span,
|
|
|
|
},
|
|
|
|
});
|
Suggest replacing braces for brackets on array-esque invalid block expr
Newcomers may write `{1, 2, 3}` for making arrays, and the current error
message is not informative enough to quickly convince them what is
needed to fix the error.
This PR implements a diagnostic for this case, and its output looks like
this:
```text
error: this code is interpreted as a block expression, not an array
--> src/lib.rs:1:22
|
1 | const FOO: [u8; 3] = {
| ______________________^
2 | | 1, 2, 3
3 | | };
| |_^
|
= note: to define an array, one would use square brackets instead of curly braces
help: try using [] instead of {}
|
1 | const FOO: [u8; 3] = [
2 | 1, 2, 3
3 | ];
|
```
Fix #87672
2021-08-06 19:34:29 +00:00
|
|
|
|
2022-03-10 13:11:00 +00:00
|
|
|
self.restore_snapshot(snapshot);
|
Suggest replacing braces for brackets on array-esque invalid block expr
Newcomers may write `{1, 2, 3}` for making arrays, and the current error
message is not informative enough to quickly convince them what is
needed to fix the error.
This PR implements a diagnostic for this case, and its output looks like
this:
```text
error: this code is interpreted as a block expression, not an array
--> src/lib.rs:1:22
|
1 | const FOO: [u8; 3] = {
| ______________________^
2 | | 1, 2, 3
3 | | };
| |_^
|
= note: to define an array, one would use square brackets instead of curly braces
help: try using [] instead of {}
|
1 | const FOO: [u8; 3] = [
2 | 1, 2, 3
3 | ];
|
```
Fix #87672
2021-08-06 19:34:29 +00:00
|
|
|
Some(self.mk_expr_err(arr.span))
|
|
|
|
}
|
2022-01-26 03:39:14 +00:00
|
|
|
Err(e) => {
|
Suggest replacing braces for brackets on array-esque invalid block expr
Newcomers may write `{1, 2, 3}` for making arrays, and the current error
message is not informative enough to quickly convince them what is
needed to fix the error.
This PR implements a diagnostic for this case, and its output looks like
this:
```text
error: this code is interpreted as a block expression, not an array
--> src/lib.rs:1:22
|
1 | const FOO: [u8; 3] = {
| ______________________^
2 | | 1, 2, 3
3 | | };
| |_^
|
= note: to define an array, one would use square brackets instead of curly braces
help: try using [] instead of {}
|
1 | const FOO: [u8; 3] = [
2 | 1, 2, 3
3 | ];
|
```
Fix #87672
2021-08-06 19:34:29 +00:00
|
|
|
e.cancel();
|
|
|
|
None
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-08-09 17:29:28 +00:00
|
|
|
fn suggest_missing_semicolon_before_array(
|
|
|
|
&self,
|
|
|
|
prev_span: Span,
|
|
|
|
open_delim_span: Span,
|
|
|
|
) -> PResult<'a, ()> {
|
2022-10-28 18:44:26 +00:00
|
|
|
if !self.may_recover() {
|
|
|
|
return Ok(());
|
|
|
|
}
|
|
|
|
|
2022-08-09 17:29:28 +00:00
|
|
|
if self.token.kind == token::Comma {
|
2022-09-06 18:59:47 +00:00
|
|
|
if !self.sess.source_map().is_multiline(prev_span.until(self.token.span)) {
|
|
|
|
return Ok(());
|
|
|
|
}
|
2022-08-09 17:29:28 +00:00
|
|
|
let mut snapshot = self.create_snapshot_for_diagnostic();
|
|
|
|
snapshot.bump();
|
|
|
|
match snapshot.parse_seq_to_before_end(
|
|
|
|
&token::CloseDelim(Delimiter::Bracket),
|
|
|
|
SeqSep::trailing_allowed(token::Comma),
|
|
|
|
|p| p.parse_expr(),
|
|
|
|
) {
|
|
|
|
Ok(_)
|
|
|
|
// When the close delim is `)`, `token.kind` is expected to be `token::CloseDelim(Delimiter::Parenthesis)`,
|
|
|
|
// but the actual `token.kind` is `token::CloseDelim(Delimiter::Bracket)`.
|
|
|
|
// This is because the `token.kind` of the close delim is treated as the same as
|
|
|
|
// that of the open delim in `TokenTreesReader::parse_token_tree`, even if the delimiters of them are different.
|
|
|
|
// Therefore, `token.kind` should not be compared here.
|
|
|
|
if snapshot
|
|
|
|
.span_to_snippet(snapshot.token.span)
|
|
|
|
.map_or(false, |snippet| snippet == "]") =>
|
|
|
|
{
|
2022-08-17 17:05:49 +00:00
|
|
|
return Err(MissingSemicolonBeforeArray {
|
|
|
|
open_delim: open_delim_span,
|
|
|
|
semicolon: prev_span.shrink_to_hi(),
|
2022-09-05 04:15:50 +00:00
|
|
|
}.into_diagnostic(&self.sess.span_diagnostic));
|
2022-08-09 17:29:28 +00:00
|
|
|
}
|
|
|
|
Ok(_) => (),
|
|
|
|
Err(err) => err.cancel(),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2019-08-11 11:14:30 +00:00
|
|
|
/// Parses a block or unsafe block.
|
2019-10-08 07:35:34 +00:00
|
|
|
pub(super) fn parse_block_expr(
|
2019-08-11 11:14:30 +00:00
|
|
|
&mut self,
|
|
|
|
opt_label: Option<Label>,
|
|
|
|
lo: Span,
|
|
|
|
blk_mode: BlockCheckMode,
|
|
|
|
) -> PResult<'a, P<Expr>> {
|
2022-10-28 18:44:26 +00:00
|
|
|
if self.may_recover() && self.is_array_like_block() {
|
2022-08-14 23:58:38 +00:00
|
|
|
if let Some(arr) = self.maybe_suggest_brackets_instead_of_braces(lo) {
|
Suggest replacing braces for brackets on array-esque invalid block expr
Newcomers may write `{1, 2, 3}` for making arrays, and the current error
message is not informative enough to quickly convince them what is
needed to fix the error.
This PR implements a diagnostic for this case, and its output looks like
this:
```text
error: this code is interpreted as a block expression, not an array
--> src/lib.rs:1:22
|
1 | const FOO: [u8; 3] = {
| ______________________^
2 | | 1, 2, 3
3 | | };
| |_^
|
= note: to define an array, one would use square brackets instead of curly braces
help: try using [] instead of {}
|
1 | const FOO: [u8; 3] = [
2 | 1, 2, 3
3 | ];
|
```
Fix #87672
2021-08-06 19:34:29 +00:00
|
|
|
return Ok(arr);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-03-05 04:49:30 +00:00
|
|
|
if self.token.is_whole_block() {
|
2022-08-17 17:05:49 +00:00
|
|
|
self.sess.emit_err(InvalidBlockMacroSegment {
|
|
|
|
span: self.token.span,
|
|
|
|
context: lo.to(self.token.span),
|
|
|
|
});
|
2020-03-05 04:49:30 +00:00
|
|
|
}
|
2019-08-11 11:14:30 +00:00
|
|
|
|
2022-08-14 23:58:38 +00:00
|
|
|
let (attrs, blk) = self.parse_block_common(lo, blk_mode)?;
|
|
|
|
Ok(self.mk_expr_with_attrs(blk.span, ExprKind::Block(blk, opt_label), attrs))
|
2019-08-11 11:14:30 +00:00
|
|
|
}
|
|
|
|
|
2022-05-22 23:06:36 +00:00
|
|
|
/// Parse a block which takes no attributes and has no label
|
|
|
|
fn parse_simple_block(&mut self) -> PResult<'a, P<Expr>> {
|
|
|
|
let blk = self.parse_block()?;
|
2022-08-14 23:58:38 +00:00
|
|
|
Ok(self.mk_expr(blk.span, ExprKind::Block(blk, None)))
|
2022-05-22 23:06:36 +00:00
|
|
|
}
|
|
|
|
|
2022-06-02 16:15:05 +00:00
|
|
|
/// Parses a closure expression (e.g., `move |args| expr`).
|
2022-08-14 23:58:38 +00:00
|
|
|
fn parse_closure_expr(&mut self) -> PResult<'a, P<Expr>> {
|
2020-03-21 07:32:55 +00:00
|
|
|
let lo = self.token.span;
|
|
|
|
|
2022-06-02 16:15:05 +00:00
|
|
|
let binder = if self.check_keyword(kw::For) {
|
|
|
|
let lo = self.token.span;
|
|
|
|
let lifetime_defs = self.parse_late_bound_lifetime_defs()?;
|
|
|
|
let span = lo.to(self.prev_token.span);
|
2020-03-21 07:32:55 +00:00
|
|
|
|
2022-06-02 16:15:05 +00:00
|
|
|
self.sess.gated_spans.gate(sym::closure_lifetime_binder, span);
|
2020-03-21 07:32:55 +00:00
|
|
|
|
2022-06-02 16:15:05 +00:00
|
|
|
ClosureBinder::For { span, generic_params: P::from_vec(lifetime_defs) }
|
|
|
|
} else {
|
|
|
|
ClosureBinder::NotPresent
|
|
|
|
};
|
2019-08-11 11:14:30 +00:00
|
|
|
|
2019-12-22 22:42:04 +00:00
|
|
|
let movability =
|
|
|
|
if self.eat_keyword(kw::Static) { Movability::Static } else { Movability::Movable };
|
2019-08-11 11:14:30 +00:00
|
|
|
|
2020-03-04 21:34:57 +00:00
|
|
|
let asyncness = if self.token.uninterpolated_span().rust_2018() {
|
2022-09-15 16:27:23 +00:00
|
|
|
self.parse_asyncness(Case::Sensitive)
|
2020-03-04 21:34:57 +00:00
|
|
|
} else {
|
|
|
|
Async::No
|
|
|
|
};
|
2019-08-11 11:14:30 +00:00
|
|
|
|
2020-12-18 16:32:26 +00:00
|
|
|
let capture_clause = self.parse_capture_clause()?;
|
2019-08-11 11:14:30 +00:00
|
|
|
let decl = self.parse_fn_block_decl()?;
|
2020-02-29 11:56:15 +00:00
|
|
|
let decl_hi = self.prev_token.span;
|
2021-08-16 13:22:36 +00:00
|
|
|
let mut body = match decl.output {
|
2020-02-15 03:10:59 +00:00
|
|
|
FnRetTy::Default(_) => {
|
2019-08-11 11:14:30 +00:00
|
|
|
let restrictions = self.restrictions - Restrictions::STMT_EXPR;
|
|
|
|
self.parse_expr_res(restrictions, None)?
|
2019-12-22 22:42:04 +00:00
|
|
|
}
|
2019-08-11 11:14:30 +00:00
|
|
|
_ => {
|
2019-09-06 02:56:45 +00:00
|
|
|
// If an explicit return type is given, require a block to appear (RFC 968).
|
2019-08-11 11:14:30 +00:00
|
|
|
let body_lo = self.token.span;
|
2022-08-14 23:58:38 +00:00
|
|
|
self.parse_block_expr(None, body_lo, BlockCheckMode::Default)?
|
2019-08-11 11:14:30 +00:00
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2020-09-09 22:16:34 +00:00
|
|
|
if let Async::Yes { span, .. } = asyncness {
|
|
|
|
// Feature-gate `async ||` closures.
|
|
|
|
self.sess.gated_spans.gate(sym::async_closure, span);
|
|
|
|
}
|
|
|
|
|
2022-04-26 03:14:38 +00:00
|
|
|
if self.token.kind == TokenKind::Semi
|
2022-04-26 12:40:14 +00:00
|
|
|
&& matches!(self.token_cursor.frame.delim_sp, Some((Delimiter::Parenthesis, _)))
|
2022-10-19 04:55:31 +00:00
|
|
|
// HACK: This is needed so we can detect whether we're inside a macro,
|
|
|
|
// where regular assumptions about what tokens can follow other tokens
|
|
|
|
// don't necessarily apply.
|
2022-10-25 19:24:01 +00:00
|
|
|
&& self.may_recover()
|
|
|
|
// FIXME(Nilstrieb): Remove this check once `may_recover` actually stops recovery
|
2022-10-19 04:55:31 +00:00
|
|
|
&& self.subparser_name.is_none()
|
2021-08-16 13:22:36 +00:00
|
|
|
{
|
|
|
|
// It is likely that the closure body is a block but where the
|
|
|
|
// braces have been removed. We will recover and eat the next
|
|
|
|
// statements later in the parsing process.
|
|
|
|
body = self.mk_expr_err(body.span);
|
|
|
|
}
|
|
|
|
|
|
|
|
let body_span = body.span;
|
|
|
|
|
|
|
|
let closure = self.mk_expr(
|
2019-08-11 11:14:30 +00:00
|
|
|
lo.to(body.span),
|
2022-06-02 16:15:05 +00:00
|
|
|
ExprKind::Closure(
|
|
|
|
binder,
|
|
|
|
capture_clause,
|
|
|
|
asyncness,
|
|
|
|
movability,
|
|
|
|
decl,
|
|
|
|
body,
|
|
|
|
lo.to(decl_hi),
|
|
|
|
),
|
2021-08-16 13:22:36 +00:00
|
|
|
);
|
|
|
|
|
|
|
|
// Disable recovery for closure body
|
|
|
|
let spans =
|
|
|
|
ClosureSpans { whole_closure: closure.span, closing_pipe: decl_hi, body: body_span };
|
|
|
|
self.current_closure = Some(spans);
|
|
|
|
|
|
|
|
Ok(closure)
|
2019-08-11 11:14:30 +00:00
|
|
|
}
|
|
|
|
|
2020-03-06 11:13:55 +00:00
|
|
|
/// Parses an optional `move` prefix to a closure-like construct.
|
2020-12-18 16:32:26 +00:00
|
|
|
fn parse_capture_clause(&mut self) -> PResult<'a, CaptureBy> {
|
|
|
|
if self.eat_keyword(kw::Move) {
|
|
|
|
// Check for `move async` and recover
|
|
|
|
if self.check_keyword(kw::Async) {
|
|
|
|
let move_async_span = self.token.span.with_lo(self.prev_token.span.data().lo);
|
2022-09-08 16:23:31 +00:00
|
|
|
Err(AsyncMoveOrderIncorrect { span: move_async_span }
|
|
|
|
.into_diagnostic(&self.sess.span_diagnostic))
|
2020-12-18 16:32:26 +00:00
|
|
|
} else {
|
|
|
|
Ok(CaptureBy::Value)
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
Ok(CaptureBy::Ref)
|
|
|
|
}
|
2019-08-11 11:14:30 +00:00
|
|
|
}
|
|
|
|
|
2019-08-11 18:04:09 +00:00
|
|
|
/// Parses the `|arg, arg|` header of a closure.
|
|
|
|
fn parse_fn_block_decl(&mut self) -> PResult<'a, P<FnDecl>> {
|
2019-12-06 22:35:48 +00:00
|
|
|
let inputs = if self.eat(&token::OrOr) {
|
|
|
|
Vec::new()
|
|
|
|
} else {
|
|
|
|
self.expect(&token::BinOp(token::Or))?;
|
|
|
|
let args = self
|
|
|
|
.parse_seq_to_before_tokens(
|
|
|
|
&[&token::BinOp(token::Or), &token::OrOr],
|
|
|
|
SeqSep::trailing_allowed(token::Comma),
|
|
|
|
TokenExpectType::NoExpect,
|
|
|
|
|p| p.parse_fn_block_param(),
|
|
|
|
)?
|
|
|
|
.0;
|
|
|
|
self.expect_or()?;
|
|
|
|
args
|
2019-08-11 18:04:09 +00:00
|
|
|
};
|
2020-10-15 19:21:45 +00:00
|
|
|
let output =
|
|
|
|
self.parse_ret_ty(AllowPlus::Yes, RecoverQPath::Yes, RecoverReturnSign::Yes)?;
|
2019-08-11 18:04:09 +00:00
|
|
|
|
2019-12-06 22:35:48 +00:00
|
|
|
Ok(P(FnDecl { inputs, output }))
|
2019-08-11 18:04:09 +00:00
|
|
|
}
|
|
|
|
|
2019-09-06 02:56:45 +00:00
|
|
|
/// Parses a parameter in a closure header (e.g., `|arg, arg|`).
|
2019-08-27 11:24:32 +00:00
|
|
|
fn parse_fn_block_param(&mut self) -> PResult<'a, Param> {
|
2019-08-11 18:04:09 +00:00
|
|
|
let lo = self.token.span;
|
2019-08-29 23:44:30 +00:00
|
|
|
let attrs = self.parse_outer_attributes()?;
|
2021-01-22 18:28:08 +00:00
|
|
|
self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| {
|
2021-02-08 02:40:33 +00:00
|
|
|
let pat = this.parse_pat_no_top_alt(PARAM_EXPECTED)?;
|
2021-01-22 18:28:08 +00:00
|
|
|
let ty = if this.eat(&token::Colon) {
|
|
|
|
this.parse_ty()?
|
|
|
|
} else {
|
|
|
|
this.mk_ty(this.prev_token.span, TyKind::Infer)
|
|
|
|
};
|
|
|
|
|
|
|
|
Ok((
|
|
|
|
Param {
|
2022-08-17 02:34:33 +00:00
|
|
|
attrs,
|
2021-01-22 18:28:08 +00:00
|
|
|
ty,
|
|
|
|
pat,
|
2022-07-23 17:46:20 +00:00
|
|
|
span: lo.to(this.prev_token.span),
|
2021-01-22 18:28:08 +00:00
|
|
|
id: DUMMY_NODE_ID,
|
|
|
|
is_placeholder: false,
|
|
|
|
},
|
|
|
|
TrailingToken::MaybeComma,
|
|
|
|
))
|
2019-08-11 18:04:09 +00:00
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2019-08-11 11:14:30 +00:00
|
|
|
/// Parses an `if` expression (`if` token already eaten).
|
2022-08-14 23:58:38 +00:00
|
|
|
fn parse_if_expr(&mut self) -> PResult<'a, P<Expr>> {
|
2020-02-29 11:56:15 +00:00
|
|
|
let lo = self.prev_token.span;
|
2019-08-11 11:14:30 +00:00
|
|
|
let cond = self.parse_cond_expr()?;
|
2022-08-14 23:58:38 +00:00
|
|
|
self.parse_if_after_cond(lo, cond)
|
2022-05-22 23:06:36 +00:00
|
|
|
}
|
|
|
|
|
2022-08-14 23:58:38 +00:00
|
|
|
fn parse_if_after_cond(&mut self, lo: Span, mut cond: P<Expr>) -> PResult<'a, P<Expr>> {
|
2022-05-28 04:58:48 +00:00
|
|
|
let cond_span = cond.span;
|
|
|
|
// Tries to interpret `cond` as either a missing expression if it's a block,
|
|
|
|
// or as an unfinished expression if it's a binop and the RHS is a block.
|
|
|
|
// We could probably add more recoveries here too...
|
|
|
|
let mut recover_block_from_condition = |this: &mut Self| {
|
|
|
|
let block = match &mut cond.kind {
|
|
|
|
ExprKind::Binary(Spanned { span: binop_span, .. }, _, right)
|
|
|
|
if let ExprKind::Block(_, None) = right.kind => {
|
2022-08-17 17:05:49 +00:00
|
|
|
self.sess.emit_err(IfExpressionMissingThenBlock {
|
|
|
|
if_span: lo,
|
|
|
|
sub: IfExpressionMissingThenBlockSub::UnfinishedCondition(
|
|
|
|
cond_span.shrink_to_lo().to(*binop_span)
|
|
|
|
),
|
|
|
|
});
|
2022-05-28 04:58:48 +00:00
|
|
|
std::mem::replace(right, this.mk_expr_err(binop_span.shrink_to_hi()))
|
|
|
|
},
|
|
|
|
ExprKind::Block(_, None) => {
|
2022-08-17 17:05:49 +00:00
|
|
|
self.sess.emit_err(IfExpressionMissingCondition {
|
2022-10-17 18:00:06 +00:00
|
|
|
if_span: lo.shrink_to_hi(),
|
2022-08-17 17:05:49 +00:00
|
|
|
block_span: self.sess.source_map().start_point(cond_span),
|
|
|
|
});
|
2022-05-28 04:58:48 +00:00
|
|
|
std::mem::replace(&mut cond, this.mk_expr_err(cond_span.shrink_to_hi()))
|
|
|
|
}
|
|
|
|
_ => {
|
|
|
|
return None;
|
|
|
|
}
|
|
|
|
};
|
|
|
|
if let ExprKind::Block(block, _) = &block.kind {
|
|
|
|
Some(block.clone())
|
|
|
|
} else {
|
|
|
|
unreachable!()
|
2021-12-01 21:36:50 +00:00
|
|
|
}
|
|
|
|
};
|
2022-05-28 04:58:48 +00:00
|
|
|
// Parse then block
|
|
|
|
let thn = if self.token.is_keyword(kw::Else) {
|
|
|
|
if let Some(block) = recover_block_from_condition(self) {
|
|
|
|
block
|
2021-12-01 21:36:50 +00:00
|
|
|
} else {
|
2022-08-17 17:05:49 +00:00
|
|
|
self.sess.emit_err(IfExpressionMissingThenBlock {
|
|
|
|
if_span: lo,
|
|
|
|
sub: IfExpressionMissingThenBlockSub::AddThenBlock(cond_span.shrink_to_hi()),
|
|
|
|
});
|
2022-05-28 04:58:48 +00:00
|
|
|
self.mk_block_err(cond_span.shrink_to_hi())
|
2021-12-01 21:36:50 +00:00
|
|
|
}
|
2019-12-06 22:23:30 +00:00
|
|
|
} else {
|
2022-11-13 13:08:58 +00:00
|
|
|
let attrs = self.parse_outer_attributes()?; // For recovery.
|
2022-05-28 04:58:48 +00:00
|
|
|
let block = if self.check(&token::OpenDelim(Delimiter::Brace)) {
|
|
|
|
self.parse_block()?
|
|
|
|
} else {
|
|
|
|
if let Some(block) = recover_block_from_condition(self) {
|
|
|
|
block
|
2021-12-01 21:36:50 +00:00
|
|
|
} else {
|
2022-05-28 04:58:48 +00:00
|
|
|
// Parse block, which will always fail, but we can add a nice note to the error
|
|
|
|
self.parse_block().map_err(|mut err| {
|
|
|
|
err.span_note(
|
|
|
|
cond_span,
|
|
|
|
"the `if` expression is missing a block after this condition",
|
|
|
|
);
|
|
|
|
err
|
|
|
|
})?
|
2019-12-06 22:23:30 +00:00
|
|
|
}
|
2022-05-28 04:58:48 +00:00
|
|
|
};
|
2022-11-13 13:08:58 +00:00
|
|
|
self.error_on_if_block_attrs(lo, false, block.span, attrs);
|
2020-03-07 16:16:29 +00:00
|
|
|
block
|
2019-12-06 22:23:30 +00:00
|
|
|
};
|
2019-12-06 22:33:13 +00:00
|
|
|
let els = if self.eat_keyword(kw::Else) { Some(self.parse_else_expr()?) } else { None };
|
2022-08-14 23:58:38 +00:00
|
|
|
Ok(self.mk_expr(lo.to(self.prev_token.span), ExprKind::If(cond, thn, els)))
|
2019-08-11 11:14:30 +00:00
|
|
|
}
|
|
|
|
|
2019-09-06 02:56:45 +00:00
|
|
|
/// Parses the condition of a `if` or `while` expression.
|
2019-08-11 11:14:30 +00:00
|
|
|
fn parse_cond_expr(&mut self) -> PResult<'a, P<Expr>> {
|
2022-08-20 18:40:08 +00:00
|
|
|
let cond =
|
|
|
|
self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL | Restrictions::ALLOW_LET, None)?;
|
|
|
|
|
|
|
|
if let ExprKind::Let(..) = cond.kind {
|
|
|
|
// Remove the last feature gating of a `let` expression since it's stable.
|
|
|
|
self.sess.gated_spans.ungate_last(sym::let_chains, cond.span);
|
|
|
|
}
|
|
|
|
|
|
|
|
Ok(cond)
|
2019-08-11 11:14:30 +00:00
|
|
|
}
|
|
|
|
|
2022-08-01 01:13:16 +00:00
|
|
|
/// Parses a `let $pat = $expr` pseudo-expression.
|
2022-08-14 23:58:38 +00:00
|
|
|
fn parse_let_expr(&mut self) -> PResult<'a, P<Expr>> {
|
2022-08-01 01:13:16 +00:00
|
|
|
// This is a *approximate* heuristic that detects if `let` chains are
|
|
|
|
// being parsed in the right position. It's approximate because it
|
|
|
|
// doesn't deny all invalid `let` expressions, just completely wrong usages.
|
|
|
|
let not_in_chain = !matches!(
|
|
|
|
self.prev_token.kind,
|
|
|
|
TokenKind::AndAnd | TokenKind::Ident(kw::If, _) | TokenKind::Ident(kw::While, _)
|
|
|
|
);
|
|
|
|
if !self.restrictions.contains(Restrictions::ALLOW_LET) || not_in_chain {
|
2022-08-17 17:05:49 +00:00
|
|
|
self.sess.emit_err(ExpectedExpressionFoundLet { span: self.token.span });
|
2022-07-08 10:25:50 +00:00
|
|
|
}
|
|
|
|
|
2022-08-01 01:13:16 +00:00
|
|
|
self.bump(); // Eat `let` token
|
2020-02-29 11:56:15 +00:00
|
|
|
let lo = self.prev_token.span;
|
2022-01-12 20:43:24 +00:00
|
|
|
let pat = self.parse_pat_allow_top_alt(
|
|
|
|
None,
|
|
|
|
RecoverComma::Yes,
|
|
|
|
RecoverColon::Yes,
|
|
|
|
CommaRecoveryMode::LikelyTuple,
|
|
|
|
)?;
|
2022-10-27 06:43:15 +00:00
|
|
|
if self.token == token::EqEq {
|
|
|
|
self.sess.emit_err(ExpectedEqForLetExpr {
|
|
|
|
span: self.token.span,
|
|
|
|
sugg_span: self.token.span,
|
|
|
|
});
|
|
|
|
self.bump();
|
|
|
|
} else {
|
|
|
|
self.expect(&token::Eq)?;
|
|
|
|
}
|
2020-10-03 18:30:32 +00:00
|
|
|
let expr = self.with_res(self.restrictions | Restrictions::NO_STRUCT_LITERAL, |this| {
|
2019-12-22 22:42:04 +00:00
|
|
|
this.parse_assoc_expr_with(1 + prec_let_scrutinee_needs_par(), None.into())
|
|
|
|
})?;
|
2019-08-11 11:14:30 +00:00
|
|
|
let span = lo.to(expr.span);
|
2022-08-20 18:40:08 +00:00
|
|
|
self.sess.gated_spans.gate(sym::let_chains, span);
|
2022-08-14 23:58:38 +00:00
|
|
|
Ok(self.mk_expr(span, ExprKind::Let(pat, expr, span)))
|
2019-08-11 11:14:30 +00:00
|
|
|
}
|
|
|
|
|
2019-09-06 02:56:45 +00:00
|
|
|
/// Parses an `else { ... }` expression (`else` token already eaten).
|
2019-08-11 11:14:30 +00:00
|
|
|
fn parse_else_expr(&mut self) -> PResult<'a, P<Expr>> {
|
2022-05-22 23:06:36 +00:00
|
|
|
let else_span = self.prev_token.span; // `else`
|
2022-11-13 13:08:58 +00:00
|
|
|
let attrs = self.parse_outer_attributes()?; // For recovery.
|
2020-03-07 16:16:29 +00:00
|
|
|
let expr = if self.eat_keyword(kw::If) {
|
2022-08-14 23:58:38 +00:00
|
|
|
self.parse_if_expr()?
|
2022-05-22 23:06:36 +00:00
|
|
|
} else if self.check(&TokenKind::OpenDelim(Delimiter::Brace)) {
|
|
|
|
self.parse_simple_block()?
|
2019-08-11 11:14:30 +00:00
|
|
|
} else {
|
2022-05-22 23:06:36 +00:00
|
|
|
let snapshot = self.create_snapshot_for_diagnostic();
|
|
|
|
let first_tok = super::token_descr(&self.token);
|
|
|
|
let first_tok_span = self.token.span;
|
|
|
|
match self.parse_expr() {
|
|
|
|
Ok(cond)
|
|
|
|
// If it's not a free-standing expression, and is followed by a block,
|
|
|
|
// then it's very likely the condition to an `else if`.
|
|
|
|
if self.check(&TokenKind::OpenDelim(Delimiter::Brace))
|
|
|
|
&& classify::expr_requires_semi_to_be_stmt(&cond) =>
|
|
|
|
{
|
2022-08-17 17:05:49 +00:00
|
|
|
self.sess.emit_err(ExpectedElseBlock {
|
|
|
|
first_tok_span,
|
|
|
|
first_tok,
|
|
|
|
else_span,
|
|
|
|
condition_start: cond.span.shrink_to_lo(),
|
|
|
|
});
|
2022-08-14 23:58:38 +00:00
|
|
|
self.parse_if_after_cond(cond.span.shrink_to_lo(), cond)?
|
2022-05-22 23:06:36 +00:00
|
|
|
}
|
|
|
|
Err(e) => {
|
|
|
|
e.cancel();
|
|
|
|
self.restore_snapshot(snapshot);
|
|
|
|
self.parse_simple_block()?
|
|
|
|
},
|
|
|
|
Ok(_) => {
|
|
|
|
self.restore_snapshot(snapshot);
|
|
|
|
self.parse_simple_block()?
|
|
|
|
},
|
|
|
|
}
|
2020-03-07 16:16:29 +00:00
|
|
|
};
|
2022-11-13 13:08:58 +00:00
|
|
|
self.error_on_if_block_attrs(else_span, true, expr.span, attrs);
|
2020-03-07 16:16:29 +00:00
|
|
|
Ok(expr)
|
|
|
|
}
|
|
|
|
|
|
|
|
fn error_on_if_block_attrs(
|
|
|
|
&self,
|
|
|
|
ctx_span: Span,
|
|
|
|
is_ctx_else: bool,
|
|
|
|
branch_span: Span,
|
2022-11-13 13:08:58 +00:00
|
|
|
attrs: AttrWrapper,
|
2020-03-07 16:16:29 +00:00
|
|
|
) {
|
2022-11-13 13:08:58 +00:00
|
|
|
if attrs.is_empty() {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
let attrs: &[ast::Attribute] = &attrs.take_for_recovery(self.sess);
|
2022-08-17 17:05:49 +00:00
|
|
|
let (attributes, last) = match attrs {
|
2020-03-07 16:16:29 +00:00
|
|
|
[] => return,
|
|
|
|
[x0 @ xn] | [x0, .., xn] => (x0.span.to(xn.span), xn.span),
|
|
|
|
};
|
|
|
|
let ctx = if is_ctx_else { "else" } else { "if" };
|
2022-08-17 17:05:49 +00:00
|
|
|
self.sess.emit_err(OuterAttributeNotAllowedOnIfElse {
|
|
|
|
last,
|
|
|
|
branch_span,
|
|
|
|
ctx_span,
|
|
|
|
ctx: ctx.to_string(),
|
|
|
|
attributes,
|
|
|
|
});
|
2019-08-11 11:14:30 +00:00
|
|
|
}
|
|
|
|
|
2019-12-06 21:41:10 +00:00
|
|
|
/// Parses `for <src_pat> in <src_expr> <src_loop_block>` (`for` token already eaten).
|
2022-08-14 23:58:38 +00:00
|
|
|
fn parse_for_expr(&mut self, opt_label: Option<Label>, lo: Span) -> PResult<'a, P<Expr>> {
|
2019-08-11 11:14:30 +00:00
|
|
|
// Record whether we are about to parse `for (`.
|
|
|
|
// This is used below for recovery in case of `for ( $stuff ) $block`
|
|
|
|
// in which case we will suggest `for $stuff $block`.
|
|
|
|
let begin_paren = match self.token.kind {
|
2022-04-26 12:40:14 +00:00
|
|
|
token::OpenDelim(Delimiter::Parenthesis) => Some(self.token.span),
|
2019-08-11 11:14:30 +00:00
|
|
|
_ => None,
|
|
|
|
};
|
|
|
|
|
2022-01-12 20:43:24 +00:00
|
|
|
let pat = self.parse_pat_allow_top_alt(
|
|
|
|
None,
|
|
|
|
RecoverComma::Yes,
|
|
|
|
RecoverColon::Yes,
|
|
|
|
CommaRecoveryMode::LikelyTuple,
|
|
|
|
)?;
|
2019-08-11 11:14:30 +00:00
|
|
|
if !self.eat_keyword(kw::In) {
|
2019-12-06 21:41:10 +00:00
|
|
|
self.error_missing_in_for_loop();
|
2019-08-11 11:14:30 +00:00
|
|
|
}
|
2020-02-29 11:56:15 +00:00
|
|
|
self.check_for_for_in_in_typo(self.prev_token.span);
|
2019-08-11 11:14:30 +00:00
|
|
|
let expr = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, None)?;
|
|
|
|
|
2021-09-17 04:39:26 +00:00
|
|
|
let pat = self.recover_parens_around_for_head(pat, begin_paren);
|
2019-08-11 11:14:30 +00:00
|
|
|
|
2022-08-14 23:58:38 +00:00
|
|
|
let (attrs, loop_block) = self.parse_inner_attrs_and_block()?;
|
2019-08-11 11:14:30 +00:00
|
|
|
|
2019-12-06 21:41:10 +00:00
|
|
|
let kind = ExprKind::ForLoop(pat, expr, loop_block, opt_label);
|
2022-08-14 23:58:38 +00:00
|
|
|
Ok(self.mk_expr_with_attrs(lo.to(self.prev_token.span), kind, attrs))
|
2019-12-06 21:41:10 +00:00
|
|
|
}
|
|
|
|
|
2020-08-09 03:53:40 +00:00
|
|
|
fn error_missing_in_for_loop(&mut self) {
|
2022-08-17 17:05:49 +00:00
|
|
|
let (span, sub): (_, fn(_) -> _) = if self.token.is_ident_named(sym::of) {
|
2020-08-09 03:53:40 +00:00
|
|
|
// Possibly using JS syntax (#75311).
|
|
|
|
let span = self.token.span;
|
|
|
|
self.bump();
|
2022-08-17 17:05:49 +00:00
|
|
|
(span, MissingInInForLoopSub::InNotOf)
|
2020-08-09 03:53:40 +00:00
|
|
|
} else {
|
2022-08-17 17:05:49 +00:00
|
|
|
(self.prev_token.span.between(self.token.span), MissingInInForLoopSub::AddIn)
|
2020-08-09 03:53:40 +00:00
|
|
|
};
|
2022-08-17 17:05:49 +00:00
|
|
|
|
|
|
|
self.sess.emit_err(MissingInInForLoop { span, sub: sub(span) });
|
2019-08-11 11:14:30 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
/// Parses a `while` or `while let` expression (`while` token already eaten).
|
2022-08-14 23:58:38 +00:00
|
|
|
fn parse_while_expr(&mut self, opt_label: Option<Label>, lo: Span) -> PResult<'a, P<Expr>> {
|
2022-01-12 20:43:24 +00:00
|
|
|
let cond = self.parse_cond_expr().map_err(|mut err| {
|
|
|
|
err.span_label(lo, "while parsing the condition of this `while` expression");
|
|
|
|
err
|
|
|
|
})?;
|
2022-08-14 23:58:38 +00:00
|
|
|
let (attrs, body) = self.parse_inner_attrs_and_block().map_err(|mut err| {
|
2022-01-12 20:43:24 +00:00
|
|
|
err.span_label(lo, "while parsing the body of this `while` expression");
|
|
|
|
err.span_label(cond.span, "this `while` condition successfully parsed");
|
|
|
|
err
|
|
|
|
})?;
|
2022-08-14 23:58:38 +00:00
|
|
|
Ok(self.mk_expr_with_attrs(
|
|
|
|
lo.to(self.prev_token.span),
|
|
|
|
ExprKind::While(cond, body, opt_label),
|
|
|
|
attrs,
|
|
|
|
))
|
2019-08-11 11:14:30 +00:00
|
|
|
}
|
|
|
|
|
2019-09-06 02:56:45 +00:00
|
|
|
/// Parses `loop { ... }` (`loop` token already eaten).
|
2022-08-14 23:58:38 +00:00
|
|
|
fn parse_loop_expr(&mut self, opt_label: Option<Label>, lo: Span) -> PResult<'a, P<Expr>> {
|
|
|
|
let (attrs, body) = self.parse_inner_attrs_and_block()?;
|
|
|
|
Ok(self.mk_expr_with_attrs(
|
|
|
|
lo.to(self.prev_token.span),
|
|
|
|
ExprKind::Loop(body, opt_label),
|
|
|
|
attrs,
|
|
|
|
))
|
2019-08-11 11:14:30 +00:00
|
|
|
}
|
|
|
|
|
2022-05-20 23:51:09 +00:00
|
|
|
pub(crate) fn eat_label(&mut self) -> Option<Label> {
|
2019-12-06 21:41:10 +00:00
|
|
|
self.token.lifetime().map(|ident| {
|
2019-08-11 11:14:30 +00:00
|
|
|
self.bump();
|
2020-02-24 10:04:13 +00:00
|
|
|
Label { ident }
|
2019-12-06 21:41:10 +00:00
|
|
|
})
|
2019-08-11 11:14:30 +00:00
|
|
|
}
|
|
|
|
|
2019-09-06 02:56:45 +00:00
|
|
|
/// Parses a `match ... { ... }` expression (`match` token already eaten).
|
2022-08-14 23:58:38 +00:00
|
|
|
fn parse_match_expr(&mut self) -> PResult<'a, P<Expr>> {
|
2020-02-29 11:56:15 +00:00
|
|
|
let match_span = self.prev_token.span;
|
|
|
|
let lo = self.prev_token.span;
|
2019-12-06 21:05:47 +00:00
|
|
|
let scrutinee = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, None)?;
|
2022-04-26 12:40:14 +00:00
|
|
|
if let Err(mut e) = self.expect(&token::OpenDelim(Delimiter::Brace)) {
|
2019-08-11 11:14:30 +00:00
|
|
|
if self.token == token::Semi {
|
|
|
|
e.span_suggestion_short(
|
|
|
|
match_span,
|
|
|
|
"try removing this `match`",
|
2022-06-13 06:48:40 +00:00
|
|
|
"",
|
2019-12-22 22:42:04 +00:00
|
|
|
Applicability::MaybeIncorrect, // speculative
|
2019-08-11 11:14:30 +00:00
|
|
|
);
|
|
|
|
}
|
2022-01-12 20:43:24 +00:00
|
|
|
if self.maybe_recover_unexpected_block_label() {
|
|
|
|
e.cancel();
|
|
|
|
self.bump();
|
|
|
|
} else {
|
|
|
|
return Err(e);
|
|
|
|
}
|
2019-08-11 11:14:30 +00:00
|
|
|
}
|
2022-08-14 23:58:38 +00:00
|
|
|
let attrs = self.parse_inner_attributes()?;
|
2019-08-11 11:14:30 +00:00
|
|
|
|
|
|
|
let mut arms: Vec<Arm> = Vec::new();
|
2022-04-26 12:40:14 +00:00
|
|
|
while self.token != token::CloseDelim(Delimiter::Brace) {
|
2019-08-11 11:14:30 +00:00
|
|
|
match self.parse_arm() {
|
|
|
|
Ok(arm) => arms.push(arm),
|
|
|
|
Err(mut e) => {
|
|
|
|
// Recover by skipping to the end of the block.
|
|
|
|
e.emit();
|
|
|
|
self.recover_stmt();
|
|
|
|
let span = lo.to(self.token.span);
|
2022-04-26 12:40:14 +00:00
|
|
|
if self.token == token::CloseDelim(Delimiter::Brace) {
|
2019-08-11 11:14:30 +00:00
|
|
|
self.bump();
|
|
|
|
}
|
2022-08-14 23:58:38 +00:00
|
|
|
return Ok(self.mk_expr_with_attrs(
|
|
|
|
span,
|
|
|
|
ExprKind::Match(scrutinee, arms),
|
|
|
|
attrs,
|
|
|
|
));
|
2019-08-11 11:14:30 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
let hi = self.token.span;
|
|
|
|
self.bump();
|
2022-08-14 23:58:38 +00:00
|
|
|
Ok(self.mk_expr_with_attrs(lo.to(hi), ExprKind::Match(scrutinee, arms), attrs))
|
2019-08-11 11:14:30 +00:00
|
|
|
}
|
|
|
|
|
2021-02-26 03:29:50 +00:00
|
|
|
/// Attempt to recover from match arm body with statements and no surrounding braces.
|
|
|
|
fn parse_arm_body_missing_braces(
|
|
|
|
&mut self,
|
|
|
|
first_expr: &P<Expr>,
|
|
|
|
arrow_span: Span,
|
|
|
|
) -> Option<P<Expr>> {
|
|
|
|
if self.token.kind != token::Semi {
|
|
|
|
return None;
|
|
|
|
}
|
2022-03-10 13:11:00 +00:00
|
|
|
let start_snapshot = self.create_snapshot_for_diagnostic();
|
2021-02-26 03:29:50 +00:00
|
|
|
let semi_sp = self.token.span;
|
|
|
|
self.bump(); // `;`
|
|
|
|
let mut stmts =
|
|
|
|
vec![self.mk_stmt(first_expr.span, ast::StmtKind::Expr(first_expr.clone()))];
|
2022-08-24 20:41:51 +00:00
|
|
|
let err = |this: &Parser<'_>, stmts: Vec<ast::Stmt>| {
|
2021-02-26 03:29:50 +00:00
|
|
|
let span = stmts[0].span.to(stmts[stmts.len() - 1].span);
|
2022-08-24 20:41:51 +00:00
|
|
|
|
|
|
|
this.sess.emit_err(MatchArmBodyWithoutBraces {
|
|
|
|
statements: span,
|
|
|
|
arrow: arrow_span,
|
|
|
|
num_statements: stmts.len(),
|
|
|
|
sub: if stmts.len() > 1 {
|
|
|
|
MatchArmBodyWithoutBracesSugg::AddBraces {
|
|
|
|
left: span.shrink_to_lo(),
|
|
|
|
right: span.shrink_to_hi(),
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
MatchArmBodyWithoutBracesSugg::UseComma { semicolon: semi_sp }
|
|
|
|
},
|
|
|
|
});
|
2021-02-26 03:29:50 +00:00
|
|
|
this.mk_expr_err(span)
|
|
|
|
};
|
|
|
|
// We might have either a `,` -> `;` typo, or a block without braces. We need
|
|
|
|
// a more subtle parsing strategy.
|
|
|
|
loop {
|
2022-04-26 12:40:14 +00:00
|
|
|
if self.token.kind == token::CloseDelim(Delimiter::Brace) {
|
2021-02-26 03:29:50 +00:00
|
|
|
// We have reached the closing brace of the `match` expression.
|
|
|
|
return Some(err(self, stmts));
|
|
|
|
}
|
|
|
|
if self.token.kind == token::Comma {
|
2022-03-10 13:11:00 +00:00
|
|
|
self.restore_snapshot(start_snapshot);
|
2021-02-26 03:29:50 +00:00
|
|
|
return None;
|
|
|
|
}
|
2022-03-10 13:11:00 +00:00
|
|
|
let pre_pat_snapshot = self.create_snapshot_for_diagnostic();
|
2021-02-26 03:29:50 +00:00
|
|
|
match self.parse_pat_no_top_alt(None) {
|
|
|
|
Ok(_pat) => {
|
|
|
|
if self.token.kind == token::FatArrow {
|
|
|
|
// Reached arm end.
|
2022-03-10 13:11:00 +00:00
|
|
|
self.restore_snapshot(pre_pat_snapshot);
|
2021-02-26 03:29:50 +00:00
|
|
|
return Some(err(self, stmts));
|
|
|
|
}
|
|
|
|
}
|
2022-01-26 03:39:14 +00:00
|
|
|
Err(err) => {
|
2021-02-26 03:29:50 +00:00
|
|
|
err.cancel();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-03-10 13:11:00 +00:00
|
|
|
self.restore_snapshot(pre_pat_snapshot);
|
2021-02-26 03:29:50 +00:00
|
|
|
match self.parse_stmt_without_recovery(true, ForceCollect::No) {
|
|
|
|
// Consume statements for as long as possible.
|
|
|
|
Ok(Some(stmt)) => {
|
|
|
|
stmts.push(stmt);
|
|
|
|
}
|
|
|
|
Ok(None) => {
|
2022-03-10 13:11:00 +00:00
|
|
|
self.restore_snapshot(start_snapshot);
|
2021-02-26 03:29:50 +00:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
// We couldn't parse either yet another statement missing it's
|
|
|
|
// enclosing block nor the next arm's pattern or closing brace.
|
2022-01-26 03:39:14 +00:00
|
|
|
Err(stmt_err) => {
|
2021-02-26 03:29:50 +00:00
|
|
|
stmt_err.cancel();
|
2022-03-10 13:11:00 +00:00
|
|
|
self.restore_snapshot(start_snapshot);
|
2021-02-26 03:29:50 +00:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
None
|
|
|
|
}
|
|
|
|
|
2019-10-08 07:35:34 +00:00
|
|
|
pub(super) fn parse_arm(&mut self) -> PResult<'a, Arm> {
|
2022-08-18 02:13:37 +00:00
|
|
|
// Used to check the `let_chains` and `if_let_guard` features mostly by scanning
|
2022-06-25 11:08:38 +00:00
|
|
|
// `&&` tokens.
|
2022-08-20 18:40:08 +00:00
|
|
|
fn check_let_expr(expr: &Expr) -> (bool, bool) {
|
2022-01-22 20:45:45 +00:00
|
|
|
match expr.kind {
|
2022-08-01 01:13:16 +00:00
|
|
|
ExprKind::Binary(BinOp { node: BinOpKind::And, .. }, ref lhs, ref rhs) => {
|
2022-08-20 18:40:08 +00:00
|
|
|
let lhs_rslt = check_let_expr(lhs);
|
|
|
|
let rhs_rslt = check_let_expr(rhs);
|
|
|
|
(lhs_rslt.0 || rhs_rslt.0, false)
|
2022-08-01 01:13:16 +00:00
|
|
|
}
|
2022-08-20 18:40:08 +00:00
|
|
|
ExprKind::Let(..) => (true, true),
|
|
|
|
_ => (false, true),
|
2022-01-22 20:45:45 +00:00
|
|
|
}
|
|
|
|
}
|
2019-08-11 11:14:30 +00:00
|
|
|
let attrs = self.parse_outer_attributes()?;
|
2021-01-22 18:28:08 +00:00
|
|
|
self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| {
|
|
|
|
let lo = this.token.span;
|
2022-01-12 20:43:24 +00:00
|
|
|
let pat = this.parse_pat_allow_top_alt(
|
|
|
|
None,
|
|
|
|
RecoverComma::Yes,
|
|
|
|
RecoverColon::Yes,
|
|
|
|
CommaRecoveryMode::EitherTupleOrPipe,
|
|
|
|
)?;
|
2021-01-22 18:28:08 +00:00
|
|
|
let guard = if this.eat_keyword(kw::If) {
|
|
|
|
let if_span = this.prev_token.span;
|
2022-08-01 01:13:16 +00:00
|
|
|
let cond = this.parse_expr_res(Restrictions::ALLOW_LET, None)?;
|
2022-08-20 18:40:08 +00:00
|
|
|
let (has_let_expr, does_not_have_bin_op) = check_let_expr(&cond);
|
|
|
|
if has_let_expr {
|
|
|
|
if does_not_have_bin_op {
|
|
|
|
// Remove the last feature gating of a `let` expression since it's stable.
|
|
|
|
this.sess.gated_spans.ungate_last(sym::let_chains, cond.span);
|
|
|
|
}
|
2021-01-22 18:28:08 +00:00
|
|
|
let span = if_span.to(cond.span);
|
|
|
|
this.sess.gated_spans.gate(sym::if_let_guard, span);
|
|
|
|
}
|
|
|
|
Some(cond)
|
|
|
|
} else {
|
|
|
|
None
|
|
|
|
};
|
|
|
|
let arrow_span = this.token.span;
|
2021-10-03 12:14:35 +00:00
|
|
|
if let Err(mut err) = this.expect(&token::FatArrow) {
|
|
|
|
// We might have a `=>` -> `=` or `->` typo (issue #89396).
|
2021-10-04 20:13:00 +00:00
|
|
|
if TokenKind::FatArrow
|
|
|
|
.similar_tokens()
|
|
|
|
.map_or(false, |similar_tokens| similar_tokens.contains(&this.token.kind))
|
|
|
|
{
|
2021-10-03 12:14:35 +00:00
|
|
|
err.span_suggestion(
|
|
|
|
this.token.span,
|
|
|
|
"try using a fat arrow here",
|
2022-06-13 06:48:40 +00:00
|
|
|
"=>",
|
2021-10-03 12:14:35 +00:00
|
|
|
Applicability::MaybeIncorrect,
|
|
|
|
);
|
|
|
|
err.emit();
|
|
|
|
this.bump();
|
|
|
|
} else {
|
|
|
|
return Err(err);
|
|
|
|
}
|
|
|
|
}
|
2021-01-22 18:28:08 +00:00
|
|
|
let arm_start_span = this.token.span;
|
2019-08-11 11:14:30 +00:00
|
|
|
|
2021-01-22 18:28:08 +00:00
|
|
|
let expr = this.parse_expr_res(Restrictions::STMT_EXPR, None).map_err(|mut err| {
|
|
|
|
err.span_label(arrow_span, "while parsing the `match` arm starting here");
|
|
|
|
err
|
|
|
|
})?;
|
2019-08-11 11:14:30 +00:00
|
|
|
|
2021-01-22 18:28:08 +00:00
|
|
|
let require_comma = classify::expr_requires_semi_to_be_stmt(&expr)
|
2022-04-26 12:40:14 +00:00
|
|
|
&& this.token != token::CloseDelim(Delimiter::Brace);
|
2021-01-22 18:28:08 +00:00
|
|
|
|
|
|
|
let hi = this.prev_token.span;
|
|
|
|
|
|
|
|
if require_comma {
|
|
|
|
let sm = this.sess.source_map();
|
2021-02-26 03:29:50 +00:00
|
|
|
if let Some(body) = this.parse_arm_body_missing_braces(&expr, arrow_span) {
|
|
|
|
let span = body.span;
|
|
|
|
return Ok((
|
|
|
|
ast::Arm {
|
2022-08-17 02:34:33 +00:00
|
|
|
attrs,
|
2021-02-26 03:29:50 +00:00
|
|
|
pat,
|
|
|
|
guard,
|
|
|
|
body,
|
|
|
|
span,
|
|
|
|
id: DUMMY_NODE_ID,
|
|
|
|
is_placeholder: false,
|
|
|
|
},
|
|
|
|
TrailingToken::None,
|
|
|
|
));
|
|
|
|
}
|
2022-04-26 12:40:14 +00:00
|
|
|
this.expect_one_of(&[token::Comma], &[token::CloseDelim(Delimiter::Brace)])
|
2022-06-07 09:19:18 +00:00
|
|
|
.or_else(|mut err| {
|
|
|
|
if this.token == token::FatArrow {
|
|
|
|
if let Ok(expr_lines) = sm.span_to_lines(expr.span)
|
|
|
|
&& let Ok(arm_start_lines) = sm.span_to_lines(arm_start_span)
|
|
|
|
&& arm_start_lines.lines[0].end_col == expr_lines.lines[0].end_col
|
|
|
|
&& expr_lines.lines.len() == 2
|
2021-01-22 18:28:08 +00:00
|
|
|
{
|
|
|
|
// We check whether there's any trailing code in the parse span,
|
|
|
|
// if there isn't, we very likely have the following:
|
|
|
|
//
|
|
|
|
// X | &Y => "y"
|
|
|
|
// | -- - missing comma
|
|
|
|
// | |
|
|
|
|
// | arrow_span
|
|
|
|
// X | &X => "x"
|
|
|
|
// | - ^^ self.token.span
|
|
|
|
// | |
|
|
|
|
// | parsed until here as `"y" & X`
|
|
|
|
err.span_suggestion_short(
|
|
|
|
arm_start_span.shrink_to_hi(),
|
|
|
|
"missing a comma here to end this `match` arm",
|
2022-06-13 06:48:40 +00:00
|
|
|
",",
|
2021-01-22 18:28:08 +00:00
|
|
|
Applicability::MachineApplicable,
|
|
|
|
);
|
2022-06-07 09:19:18 +00:00
|
|
|
return Err(err);
|
2021-01-22 18:28:08 +00:00
|
|
|
}
|
2022-06-07 09:19:18 +00:00
|
|
|
} else {
|
|
|
|
// FIXME(compiler-errors): We could also recover `; PAT =>` here
|
|
|
|
|
|
|
|
// Try to parse a following `PAT =>`, if successful
|
|
|
|
// then we should recover.
|
|
|
|
let mut snapshot = this.create_snapshot_for_diagnostic();
|
|
|
|
let pattern_follows = snapshot
|
|
|
|
.parse_pat_allow_top_alt(
|
|
|
|
None,
|
|
|
|
RecoverComma::Yes,
|
|
|
|
RecoverColon::Yes,
|
|
|
|
CommaRecoveryMode::EitherTupleOrPipe,
|
|
|
|
)
|
|
|
|
.map_err(|err| err.cancel())
|
|
|
|
.is_ok();
|
|
|
|
if pattern_follows && snapshot.check(&TokenKind::FatArrow) {
|
|
|
|
err.cancel();
|
2022-08-17 17:05:49 +00:00
|
|
|
this.sess.emit_err(MissingCommaAfterMatchArm {
|
|
|
|
span: hi.shrink_to_hi(),
|
|
|
|
});
|
2022-06-07 09:19:18 +00:00
|
|
|
return Ok(true);
|
2021-01-22 18:28:08 +00:00
|
|
|
}
|
2019-08-11 11:14:30 +00:00
|
|
|
}
|
2022-06-07 09:19:18 +00:00
|
|
|
err.span_label(arrow_span, "while parsing the `match` arm starting here");
|
|
|
|
Err(err)
|
2022-04-26 12:40:14 +00:00
|
|
|
})?;
|
2021-01-22 18:28:08 +00:00
|
|
|
} else {
|
|
|
|
this.eat(&token::Comma);
|
|
|
|
}
|
2019-08-11 11:14:30 +00:00
|
|
|
|
2021-01-22 18:28:08 +00:00
|
|
|
Ok((
|
|
|
|
ast::Arm {
|
2022-08-17 02:34:33 +00:00
|
|
|
attrs,
|
2021-01-22 18:28:08 +00:00
|
|
|
pat,
|
|
|
|
guard,
|
|
|
|
body: expr,
|
|
|
|
span: lo.to(hi),
|
|
|
|
id: DUMMY_NODE_ID,
|
|
|
|
is_placeholder: false,
|
|
|
|
},
|
|
|
|
TrailingToken::None,
|
|
|
|
))
|
2019-08-11 11:14:30 +00:00
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Parses a `try {...}` expression (`try` token already eaten).
|
2022-08-14 23:58:38 +00:00
|
|
|
fn parse_try_block(&mut self, span_lo: Span) -> PResult<'a, P<Expr>> {
|
|
|
|
let (attrs, body) = self.parse_inner_attrs_and_block()?;
|
2019-08-11 11:14:30 +00:00
|
|
|
if self.eat_keyword(kw::Catch) {
|
2022-09-05 04:15:50 +00:00
|
|
|
Err(CatchAfterTry { span: self.prev_token.span }
|
|
|
|
.into_diagnostic(&self.sess.span_diagnostic))
|
2019-08-11 11:14:30 +00:00
|
|
|
} else {
|
2019-09-21 21:09:17 +00:00
|
|
|
let span = span_lo.to(body.span);
|
2019-10-30 15:38:16 +00:00
|
|
|
self.sess.gated_spans.gate(sym::try_blocks, span);
|
2022-08-14 23:58:38 +00:00
|
|
|
Ok(self.mk_expr_with_attrs(span, ExprKind::TryBlock(body), attrs))
|
2019-08-11 11:14:30 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn is_do_catch_block(&self) -> bool {
|
2019-12-22 22:42:04 +00:00
|
|
|
self.token.is_keyword(kw::Do)
|
|
|
|
&& self.is_keyword_ahead(1, &[kw::Catch])
|
2022-04-26 12:40:14 +00:00
|
|
|
&& self.look_ahead(2, |t| *t == token::OpenDelim(Delimiter::Brace))
|
2019-12-22 22:42:04 +00:00
|
|
|
&& !self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL)
|
2019-08-11 11:14:30 +00:00
|
|
|
}
|
|
|
|
|
2022-03-26 06:43:54 +00:00
|
|
|
fn is_do_yeet(&self) -> bool {
|
|
|
|
self.token.is_keyword(kw::Do) && self.is_keyword_ahead(1, &[kw::Yeet])
|
|
|
|
}
|
|
|
|
|
2019-08-11 11:14:30 +00:00
|
|
|
fn is_try_block(&self) -> bool {
|
2020-04-14 15:45:00 +00:00
|
|
|
self.token.is_keyword(kw::Try)
|
2022-04-26 12:40:14 +00:00
|
|
|
&& self.look_ahead(1, |t| *t == token::OpenDelim(Delimiter::Brace))
|
2020-04-14 15:45:00 +00:00
|
|
|
&& self.token.uninterpolated_span().rust_2018()
|
2019-08-11 11:14:30 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
/// Parses an `async move? {...}` expression.
|
2022-08-14 23:58:38 +00:00
|
|
|
fn parse_async_block(&mut self) -> PResult<'a, P<Expr>> {
|
2019-12-06 21:05:47 +00:00
|
|
|
let lo = self.token.span;
|
2019-08-11 11:14:30 +00:00
|
|
|
self.expect_keyword(kw::Async)?;
|
2020-12-18 16:32:26 +00:00
|
|
|
let capture_clause = self.parse_capture_clause()?;
|
2022-08-14 23:58:38 +00:00
|
|
|
let (attrs, body) = self.parse_inner_attrs_and_block()?;
|
2019-12-06 21:05:47 +00:00
|
|
|
let kind = ExprKind::Async(capture_clause, DUMMY_NODE_ID, body);
|
2022-08-14 23:58:38 +00:00
|
|
|
Ok(self.mk_expr_with_attrs(lo.to(self.prev_token.span), kind, attrs))
|
2019-08-11 11:14:30 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
fn is_async_block(&self) -> bool {
|
2019-12-22 22:42:04 +00:00
|
|
|
self.token.is_keyword(kw::Async)
|
|
|
|
&& ((
|
|
|
|
// `async move {`
|
|
|
|
self.is_keyword_ahead(1, &[kw::Move])
|
2022-04-26 12:40:14 +00:00
|
|
|
&& self.look_ahead(2, |t| *t == token::OpenDelim(Delimiter::Brace))
|
2019-12-22 22:42:04 +00:00
|
|
|
) || (
|
|
|
|
// `async {`
|
2022-04-26 12:40:14 +00:00
|
|
|
self.look_ahead(1, |t| *t == token::OpenDelim(Delimiter::Brace))
|
2019-12-22 22:42:04 +00:00
|
|
|
))
|
2019-08-11 11:14:30 +00:00
|
|
|
}
|
|
|
|
|
2019-12-04 03:31:44 +00:00
|
|
|
fn is_certainly_not_a_block(&self) -> bool {
|
|
|
|
self.look_ahead(1, |t| t.is_ident())
|
|
|
|
&& (
|
|
|
|
// `{ ident, ` cannot start a block.
|
|
|
|
self.look_ahead(2, |t| t == &token::Comma)
|
|
|
|
|| self.look_ahead(2, |t| t == &token::Colon)
|
|
|
|
&& (
|
|
|
|
// `{ ident: token, ` cannot start a block.
|
|
|
|
self.look_ahead(4, |t| t == &token::Comma) ||
|
|
|
|
// `{ ident: ` cannot start a block unless it's a type ascription `ident: Type`.
|
|
|
|
self.look_ahead(3, |t| !t.can_begin_type())
|
|
|
|
)
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
2019-08-11 11:14:30 +00:00
|
|
|
fn maybe_parse_struct_expr(
|
|
|
|
&mut self,
|
2020-12-10 12:20:07 +00:00
|
|
|
qself: Option<&ast::QSelf>,
|
2019-08-11 11:14:30 +00:00
|
|
|
path: &ast::Path,
|
|
|
|
) -> Option<PResult<'a, P<Expr>>> {
|
|
|
|
let struct_allowed = !self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL);
|
2019-12-04 03:31:44 +00:00
|
|
|
if struct_allowed || self.is_certainly_not_a_block() {
|
2022-04-26 12:40:14 +00:00
|
|
|
if let Err(err) = self.expect(&token::OpenDelim(Delimiter::Brace)) {
|
2020-08-12 22:39:15 +00:00
|
|
|
return Some(Err(err));
|
|
|
|
}
|
2022-08-14 23:58:38 +00:00
|
|
|
let expr = self.parse_struct_expr(qself.cloned(), path.clone(), true);
|
2019-08-11 11:14:30 +00:00
|
|
|
if let (Ok(expr), false) = (&expr, struct_allowed) {
|
2020-08-12 22:39:15 +00:00
|
|
|
// This is a struct literal, but we don't can't accept them here.
|
2022-08-24 20:41:51 +00:00
|
|
|
self.sess.emit_err(StructLiteralNotAllowedHere {
|
|
|
|
span: expr.span,
|
|
|
|
sub: StructLiteralNotAllowedHereSugg {
|
|
|
|
left: path.span.shrink_to_lo(),
|
|
|
|
right: expr.span.shrink_to_hi(),
|
|
|
|
},
|
|
|
|
});
|
2019-08-11 11:14:30 +00:00
|
|
|
}
|
|
|
|
return Some(expr);
|
|
|
|
}
|
|
|
|
None
|
|
|
|
}
|
|
|
|
|
2021-09-07 17:45:16 +00:00
|
|
|
pub(super) fn parse_struct_fields(
|
2019-08-11 11:14:30 +00:00
|
|
|
&mut self,
|
|
|
|
pth: ast::Path,
|
2020-08-12 22:39:15 +00:00
|
|
|
recover: bool,
|
2022-04-26 12:40:14 +00:00
|
|
|
close_delim: Delimiter,
|
2021-09-07 17:45:16 +00:00
|
|
|
) -> PResult<'a, (Vec<ExprField>, ast::StructRest, bool)> {
|
2019-08-11 11:14:30 +00:00
|
|
|
let mut fields = Vec::new();
|
2020-11-07 14:28:55 +00:00
|
|
|
let mut base = ast::StructRest::None;
|
2020-05-02 01:24:14 +00:00
|
|
|
let mut recover_async = false;
|
2019-08-11 11:14:30 +00:00
|
|
|
|
2022-01-23 20:41:46 +00:00
|
|
|
let mut async_block_err = |e: &mut Diagnostic, span: Span| {
|
2020-05-02 01:24:14 +00:00
|
|
|
recover_async = true;
|
2020-11-30 21:11:29 +00:00
|
|
|
e.span_label(span, "`async` blocks are only allowed in Rust 2018 or later");
|
2022-03-07 17:50:47 +00:00
|
|
|
e.help_use_latest_edition();
|
2020-05-02 01:24:14 +00:00
|
|
|
};
|
|
|
|
|
2021-09-07 17:45:16 +00:00
|
|
|
while self.token != token::CloseDelim(close_delim) {
|
2022-11-06 02:33:12 +00:00
|
|
|
if self.eat(&token::DotDot) || self.recover_struct_field_dots(close_delim) {
|
2020-02-29 11:56:15 +00:00
|
|
|
let exp_span = self.prev_token.span;
|
2020-11-07 14:28:55 +00:00
|
|
|
// We permit `.. }` on the left-hand side of a destructuring assignment.
|
2021-09-07 17:45:16 +00:00
|
|
|
if self.check(&token::CloseDelim(close_delim)) {
|
2020-11-07 14:28:55 +00:00
|
|
|
base = ast::StructRest::Rest(self.prev_token.span.shrink_to_hi());
|
|
|
|
break;
|
|
|
|
}
|
2019-08-11 11:14:30 +00:00
|
|
|
match self.parse_expr() {
|
2020-11-07 14:28:55 +00:00
|
|
|
Ok(e) => base = ast::StructRest::Base(e),
|
2020-08-12 22:39:15 +00:00
|
|
|
Err(mut e) if recover => {
|
2019-08-11 11:14:30 +00:00
|
|
|
e.emit();
|
|
|
|
self.recover_stmt();
|
|
|
|
}
|
2020-08-12 22:39:15 +00:00
|
|
|
Err(e) => return Err(e),
|
2019-08-11 11:14:30 +00:00
|
|
|
}
|
2019-12-04 02:31:32 +00:00
|
|
|
self.recover_struct_comma_after_dotdot(exp_span);
|
2019-08-11 11:14:30 +00:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
2019-12-04 02:47:18 +00:00
|
|
|
let recovery_field = self.find_struct_error_after_field_looking_code();
|
2021-03-15 21:36:07 +00:00
|
|
|
let parsed_field = match self.parse_expr_field() {
|
2019-12-04 02:47:18 +00:00
|
|
|
Ok(f) => Some(f),
|
2019-08-11 11:14:30 +00:00
|
|
|
Err(mut e) => {
|
2020-05-02 01:24:14 +00:00
|
|
|
if pth == kw::Async {
|
|
|
|
async_block_err(&mut e, pth.span);
|
|
|
|
} else {
|
|
|
|
e.span_label(pth.span, "while parsing this struct");
|
|
|
|
}
|
2019-08-11 11:14:30 +00:00
|
|
|
e.emit();
|
|
|
|
|
|
|
|
// If the next token is a comma, then try to parse
|
|
|
|
// what comes next as additional fields, rather than
|
|
|
|
// bailing out until next `}`.
|
|
|
|
if self.token != token::Comma {
|
|
|
|
self.recover_stmt_(SemiColonMode::Comma, BlockMode::Ignore);
|
|
|
|
if self.token != token::Comma {
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
2019-12-04 02:47:18 +00:00
|
|
|
None
|
2019-08-11 11:14:30 +00:00
|
|
|
}
|
2019-12-04 02:47:18 +00:00
|
|
|
};
|
2019-08-11 11:14:30 +00:00
|
|
|
|
2022-07-12 14:34:49 +00:00
|
|
|
let is_shorthand = parsed_field.as_ref().map_or(false, |f| f.is_shorthand);
|
2022-07-07 21:59:54 +00:00
|
|
|
// A shorthand field can be turned into a full field with `:`.
|
|
|
|
// We should point this out.
|
|
|
|
self.check_or_expected(!is_shorthand, TokenType::Token(token::Colon));
|
|
|
|
|
2021-09-07 17:45:16 +00:00
|
|
|
match self.expect_one_of(&[token::Comma], &[token::CloseDelim(close_delim)]) {
|
2019-12-22 22:42:04 +00:00
|
|
|
Ok(_) => {
|
|
|
|
if let Some(f) = parsed_field.or(recovery_field) {
|
|
|
|
// Only include the field if there's no parse error for the field name.
|
|
|
|
fields.push(f);
|
|
|
|
}
|
2019-08-11 11:14:30 +00:00
|
|
|
}
|
|
|
|
Err(mut e) => {
|
2020-05-02 01:24:14 +00:00
|
|
|
if pth == kw::Async {
|
|
|
|
async_block_err(&mut e, pth.span);
|
|
|
|
} else {
|
|
|
|
e.span_label(pth.span, "while parsing this struct");
|
|
|
|
if let Some(f) = recovery_field {
|
|
|
|
fields.push(f);
|
|
|
|
e.span_suggestion(
|
|
|
|
self.prev_token.span.shrink_to_hi(),
|
|
|
|
"try adding a comma",
|
2022-04-26 05:17:33 +00:00
|
|
|
",",
|
2020-05-02 01:24:14 +00:00
|
|
|
Applicability::MachineApplicable,
|
|
|
|
);
|
2022-07-07 22:20:08 +00:00
|
|
|
} else if is_shorthand
|
|
|
|
&& (AssocOp::from_token(&self.token).is_some()
|
|
|
|
|| matches!(&self.token.kind, token::OpenDelim(_))
|
|
|
|
|| self.token.kind == token::Dot)
|
|
|
|
{
|
|
|
|
// Looks like they tried to write a shorthand, complex expression.
|
|
|
|
let ident = parsed_field.expect("is_shorthand implies Some").ident;
|
|
|
|
e.span_suggestion(
|
|
|
|
ident.span.shrink_to_lo(),
|
|
|
|
"try naming a field",
|
|
|
|
&format!("{ident}: "),
|
|
|
|
Applicability::HasPlaceholders,
|
|
|
|
);
|
2020-05-02 01:24:14 +00:00
|
|
|
}
|
2019-08-11 11:14:30 +00:00
|
|
|
}
|
2020-08-12 22:39:15 +00:00
|
|
|
if !recover {
|
|
|
|
return Err(e);
|
|
|
|
}
|
2019-08-11 11:14:30 +00:00
|
|
|
e.emit();
|
|
|
|
self.recover_stmt_(SemiColonMode::Comma, BlockMode::Ignore);
|
|
|
|
self.eat(&token::Comma);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2021-09-07 17:45:16 +00:00
|
|
|
Ok((fields, base, recover_async))
|
|
|
|
}
|
2019-08-11 11:14:30 +00:00
|
|
|
|
2021-09-07 17:45:16 +00:00
|
|
|
/// Precondition: already parsed the '{'.
|
|
|
|
pub(super) fn parse_struct_expr(
|
|
|
|
&mut self,
|
|
|
|
qself: Option<ast::QSelf>,
|
|
|
|
pth: ast::Path,
|
|
|
|
recover: bool,
|
|
|
|
) -> PResult<'a, P<Expr>> {
|
|
|
|
let lo = pth.span;
|
|
|
|
let (fields, base, recover_async) =
|
2022-04-26 12:40:14 +00:00
|
|
|
self.parse_struct_fields(pth.clone(), recover, Delimiter::Brace)?;
|
2021-09-07 17:45:16 +00:00
|
|
|
let span = lo.to(self.token.span);
|
2022-04-26 12:40:14 +00:00
|
|
|
self.expect(&token::CloseDelim(Delimiter::Brace))?;
|
2021-03-16 00:15:53 +00:00
|
|
|
let expr = if recover_async {
|
|
|
|
ExprKind::Err
|
|
|
|
} else {
|
2020-12-10 12:20:07 +00:00
|
|
|
ExprKind::Struct(P(ast::StructExpr { qself, path: pth, fields, rest: base }))
|
2021-03-16 00:15:53 +00:00
|
|
|
};
|
2022-08-14 23:58:38 +00:00
|
|
|
Ok(self.mk_expr(span, expr))
|
2019-12-04 02:47:18 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
/// Use in case of error after field-looking code: `S { foo: () with a }`.
|
2021-03-15 21:36:07 +00:00
|
|
|
fn find_struct_error_after_field_looking_code(&self) -> Option<ExprField> {
|
2020-03-04 20:37:52 +00:00
|
|
|
match self.token.ident() {
|
|
|
|
Some((ident, is_raw))
|
|
|
|
if (is_raw || !ident.is_reserved())
|
|
|
|
&& self.look_ahead(1, |t| *t == token::Colon) =>
|
|
|
|
{
|
2021-03-15 21:36:07 +00:00
|
|
|
Some(ast::ExprField {
|
2020-03-04 20:37:52 +00:00
|
|
|
ident,
|
2020-02-24 10:04:13 +00:00
|
|
|
span: self.token.span,
|
|
|
|
expr: self.mk_expr_err(self.token.span),
|
2019-12-04 02:47:18 +00:00
|
|
|
is_shorthand: false,
|
|
|
|
attrs: AttrVec::new(),
|
|
|
|
id: DUMMY_NODE_ID,
|
|
|
|
is_placeholder: false,
|
2020-03-04 20:37:52 +00:00
|
|
|
})
|
2019-12-04 02:47:18 +00:00
|
|
|
}
|
2020-03-04 20:37:52 +00:00
|
|
|
_ => None,
|
2019-12-04 02:47:18 +00:00
|
|
|
}
|
2019-08-11 11:14:30 +00:00
|
|
|
}
|
|
|
|
|
2019-12-04 02:31:32 +00:00
|
|
|
fn recover_struct_comma_after_dotdot(&mut self, span: Span) {
|
|
|
|
if self.token != token::Comma {
|
|
|
|
return;
|
|
|
|
}
|
2022-08-17 17:05:49 +00:00
|
|
|
self.sess.emit_err(CommaAfterBaseStruct {
|
|
|
|
span: span.to(self.prev_token.span),
|
|
|
|
comma: self.token.span,
|
|
|
|
});
|
2019-12-04 02:31:32 +00:00
|
|
|
self.recover_stmt();
|
2019-08-11 11:14:30 +00:00
|
|
|
}
|
|
|
|
|
2022-11-06 02:33:12 +00:00
|
|
|
fn recover_struct_field_dots(&mut self, close_delim: Delimiter) -> bool {
|
2022-10-13 22:52:23 +00:00
|
|
|
if !self.look_ahead(1, |t| *t == token::CloseDelim(close_delim))
|
|
|
|
&& self.eat(&token::DotDotDot)
|
|
|
|
{
|
|
|
|
// recover from typo of `...`, suggest `..`
|
|
|
|
let span = self.prev_token.span;
|
|
|
|
self.sess.emit_err(MissingDotDot { token_span: span, sugg_span: span });
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
false
|
|
|
|
}
|
|
|
|
|
2019-09-06 02:56:45 +00:00
|
|
|
/// Parses `ident (COLON expr)?`.
|
2021-03-15 21:36:07 +00:00
|
|
|
fn parse_expr_field(&mut self) -> PResult<'a, ExprField> {
|
2021-01-22 18:28:08 +00:00
|
|
|
let attrs = self.parse_outer_attributes()?;
|
|
|
|
self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| {
|
|
|
|
let lo = this.token.span;
|
2019-08-11 11:14:30 +00:00
|
|
|
|
2021-01-22 18:28:08 +00:00
|
|
|
// Check if a colon exists one ahead. This means we're parsing a fieldname.
|
|
|
|
let is_shorthand = !this.look_ahead(1, |t| t == &token::Colon || t == &token::Eq);
|
|
|
|
let (ident, expr) = if is_shorthand {
|
|
|
|
// Mimic `x: x` for the `x` field shorthand.
|
|
|
|
let ident = this.parse_ident_common(false)?;
|
|
|
|
let path = ast::Path::from_ident(ident);
|
2022-08-14 23:58:38 +00:00
|
|
|
(ident, this.mk_expr(ident.span, ExprKind::Path(None, path)))
|
2021-01-22 18:28:08 +00:00
|
|
|
} else {
|
|
|
|
let ident = this.parse_field_name()?;
|
|
|
|
this.error_on_eq_field_init(ident);
|
|
|
|
this.bump(); // `:`
|
|
|
|
(ident, this.parse_expr()?)
|
|
|
|
};
|
|
|
|
|
|
|
|
Ok((
|
2021-03-15 21:36:07 +00:00
|
|
|
ast::ExprField {
|
2021-01-22 18:28:08 +00:00
|
|
|
ident,
|
|
|
|
span: lo.to(expr.span),
|
|
|
|
expr,
|
|
|
|
is_shorthand,
|
2022-08-17 02:34:33 +00:00
|
|
|
attrs,
|
2021-01-22 18:28:08 +00:00
|
|
|
id: DUMMY_NODE_ID,
|
|
|
|
is_placeholder: false,
|
|
|
|
},
|
|
|
|
TrailingToken::MaybeComma,
|
|
|
|
))
|
2019-08-11 11:14:30 +00:00
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2019-12-04 02:23:20 +00:00
|
|
|
/// Check for `=`. This means the source incorrectly attempts to
|
|
|
|
/// initialize a field with an eq rather than a colon.
|
|
|
|
fn error_on_eq_field_init(&self, field_name: Ident) {
|
|
|
|
if self.token != token::Eq {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2022-08-17 17:05:49 +00:00
|
|
|
self.sess.emit_err(EqFieldInit {
|
|
|
|
span: self.token.span,
|
|
|
|
eq: field_name.span.shrink_to_hi().to(self.token.span),
|
|
|
|
});
|
2019-12-04 02:23:20 +00:00
|
|
|
}
|
|
|
|
|
2019-08-11 11:14:30 +00:00
|
|
|
fn err_dotdotdot_syntax(&self, span: Span) {
|
2022-08-17 17:05:49 +00:00
|
|
|
self.sess.emit_err(DotDotDot { span });
|
2019-08-11 11:14:30 +00:00
|
|
|
}
|
|
|
|
|
2019-08-11 21:37:05 +00:00
|
|
|
fn err_larrow_operator(&self, span: Span) {
|
2022-08-17 17:05:49 +00:00
|
|
|
self.sess.emit_err(LeftArrowOperator { span });
|
2019-08-11 21:37:05 +00:00
|
|
|
}
|
|
|
|
|
2019-08-11 11:14:30 +00:00
|
|
|
fn mk_assign_op(&self, binop: BinOp, lhs: P<Expr>, rhs: P<Expr>) -> ExprKind {
|
|
|
|
ExprKind::AssignOp(binop, lhs, rhs)
|
|
|
|
}
|
|
|
|
|
|
|
|
fn mk_range(
|
2021-06-26 02:46:41 +00:00
|
|
|
&mut self,
|
2019-08-11 11:14:30 +00:00
|
|
|
start: Option<P<Expr>>,
|
|
|
|
end: Option<P<Expr>>,
|
2019-12-22 22:42:04 +00:00
|
|
|
limits: RangeLimits,
|
2021-02-21 11:54:49 +00:00
|
|
|
) -> ExprKind {
|
2019-08-11 11:14:30 +00:00
|
|
|
if end.is_none() && limits == RangeLimits::Closed {
|
2021-06-26 02:46:41 +00:00
|
|
|
self.inclusive_range_with_incorrect_end(self.prev_token.span);
|
2021-02-21 11:54:49 +00:00
|
|
|
ExprKind::Err
|
2019-08-11 11:14:30 +00:00
|
|
|
} else {
|
2021-02-21 11:54:49 +00:00
|
|
|
ExprKind::Range(start, end, limits)
|
2019-08-11 11:14:30 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn mk_unary(&self, unop: UnOp, expr: P<Expr>) -> ExprKind {
|
|
|
|
ExprKind::Unary(unop, expr)
|
|
|
|
}
|
|
|
|
|
|
|
|
fn mk_binary(&self, binop: BinOp, lhs: P<Expr>, rhs: P<Expr>) -> ExprKind {
|
|
|
|
ExprKind::Binary(binop, lhs, rhs)
|
|
|
|
}
|
|
|
|
|
|
|
|
fn mk_index(&self, expr: P<Expr>, idx: P<Expr>) -> ExprKind {
|
|
|
|
ExprKind::Index(expr, idx)
|
|
|
|
}
|
|
|
|
|
|
|
|
fn mk_call(&self, f: P<Expr>, args: Vec<P<Expr>>) -> ExprKind {
|
|
|
|
ExprKind::Call(f, args)
|
|
|
|
}
|
|
|
|
|
2021-02-21 12:01:01 +00:00
|
|
|
fn mk_await_expr(&mut self, self_arg: P<Expr>, lo: Span) -> P<Expr> {
|
2020-02-29 11:56:15 +00:00
|
|
|
let span = lo.to(self.prev_token.span);
|
2022-08-14 23:58:38 +00:00
|
|
|
let await_expr = self.mk_expr(span, ExprKind::Await(self_arg));
|
2019-08-11 11:14:30 +00:00
|
|
|
self.recover_from_await_method_call();
|
2021-02-21 12:01:01 +00:00
|
|
|
await_expr
|
2019-08-11 11:14:30 +00:00
|
|
|
}
|
|
|
|
|
2022-08-17 02:34:33 +00:00
|
|
|
pub(crate) fn mk_expr_with_attrs(&self, span: Span, kind: ExprKind, attrs: AttrVec) -> P<Expr> {
|
|
|
|
P(Expr { kind, span, attrs, id: DUMMY_NODE_ID, tokens: None })
|
2022-08-14 23:58:38 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
pub(crate) fn mk_expr(&self, span: Span, kind: ExprKind) -> P<Expr> {
|
|
|
|
P(Expr { kind, span, attrs: AttrVec::new(), id: DUMMY_NODE_ID, tokens: None })
|
2019-08-11 11:14:30 +00:00
|
|
|
}
|
2019-10-08 12:39:58 +00:00
|
|
|
|
|
|
|
pub(super) fn mk_expr_err(&self, span: Span) -> P<Expr> {
|
2022-08-14 23:58:38 +00:00
|
|
|
self.mk_expr(span, ExprKind::Err)
|
2019-10-08 12:39:58 +00:00
|
|
|
}
|
2020-10-10 04:40:27 +00:00
|
|
|
|
|
|
|
/// Create expression span ensuring the span of the parent node
|
|
|
|
/// is larger than the span of lhs and rhs, including the attributes.
|
|
|
|
fn mk_expr_sp(&self, lhs: &P<Expr>, lhs_span: Span, rhs_span: Span) -> Span {
|
|
|
|
lhs.attrs
|
|
|
|
.iter()
|
|
|
|
.find(|a| a.style == AttrStyle::Outer)
|
|
|
|
.map_or(lhs_span, |a| a.span)
|
|
|
|
.to(rhs_span)
|
|
|
|
}
|
2021-01-22 18:28:08 +00:00
|
|
|
|
|
|
|
fn collect_tokens_for_expr(
|
|
|
|
&mut self,
|
|
|
|
attrs: AttrWrapper,
|
2022-08-17 02:34:33 +00:00
|
|
|
f: impl FnOnce(&mut Self, ast::AttrVec) -> PResult<'a, P<Expr>>,
|
2021-01-22 18:28:08 +00:00
|
|
|
) -> PResult<'a, P<Expr>> {
|
2020-11-28 23:33:17 +00:00
|
|
|
self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| {
|
2021-01-22 18:28:08 +00:00
|
|
|
let res = f(this, attrs)?;
|
|
|
|
let trailing = if this.restrictions.contains(Restrictions::STMT_EXPR)
|
|
|
|
&& this.token.kind == token::Semi
|
|
|
|
{
|
|
|
|
TrailingToken::Semi
|
2022-10-20 11:40:21 +00:00
|
|
|
} else if this.token.kind == token::Gt {
|
|
|
|
TrailingToken::Gt
|
2021-01-22 18:28:08 +00:00
|
|
|
} else {
|
2020-11-28 23:33:17 +00:00
|
|
|
// FIXME - pass this through from the place where we know
|
|
|
|
// we need a comma, rather than assuming that `#[attr] expr,`
|
|
|
|
// always captures a trailing comma
|
|
|
|
TrailingToken::MaybeComma
|
2021-01-22 18:28:08 +00:00
|
|
|
};
|
|
|
|
Ok((res, trailing))
|
|
|
|
})
|
|
|
|
}
|
2019-08-11 11:14:30 +00:00
|
|
|
}
|