2021-07-13 11:18:03 +00:00
|
|
|
|
use super::pat::{RecoverColon, RecoverComma, PARAM_EXPECTED};
|
2020-10-15 19:21:45 +00:00
|
|
|
|
use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign};
|
2021-08-16 13:22:36 +00:00
|
|
|
|
use super::{
|
|
|
|
|
AttrWrapper, BlockMode, ClosureSpans, ForceCollect, Parser, PathStyle, Restrictions, TokenType,
|
|
|
|
|
};
|
2021-02-13 17:42:43 +00:00
|
|
|
|
use super::{SemiColonMode, SeqSep, TokenExpectType, TrailingToken};
|
2019-10-15 20:48:13 +00:00
|
|
|
|
use crate::maybe_recover_from_interpolated_ty_qpath;
|
2019-08-11 11:14:30 +00:00
|
|
|
|
|
2021-08-16 13:22:36 +00:00
|
|
|
|
use ast::token::DelimToken;
|
2020-02-29 17:37:32 +00:00
|
|
|
|
use rustc_ast::ptr::P;
|
|
|
|
|
use rustc_ast::token::{self, Token, TokenKind};
|
Rewrite `collect_tokens` implementations to use a flattened buffer
Instead of trying to collect tokens at each depth, we 'flatten' the
stream as we go allong, pushing open/close delimiters to our buffer
just like regular tokens. One capturing is complete, we reconstruct a
nested `TokenTree::Delimited` structure, producing a normal
`TokenStream`.
The reconstructed `TokenStream` is not created immediately - instead, it is
produced on-demand by a closure (wrapped in a new `LazyTokenStream` type). This
closure stores a clone of the original `TokenCursor`, plus a record of the
number of calls to `next()/next_desugared()`. This is sufficient to reconstruct
the tokenstream seen by the callback without storing any additional state. If
the tokenstream is never used (e.g. when a captured `macro_rules!` argument is
never passed to a proc macro), we never actually create a `TokenStream`.
This implementation has a number of advantages over the previous one:
* It is significantly simpler, with no edge cases around capturing the
start/end of a delimited group.
* It can be easily extended to allow replacing tokens an an arbitrary
'depth' by just using `Vec::splice` at the proper position. This is
important for PR #76130, which requires us to track information about
attributes along with tokens.
* The lazy approach to `TokenStream` construction allows us to easily
parse an AST struct, and then decide after the fact whether we need a
`TokenStream`. This will be useful when we start collecting tokens for
`Attribute` - we can discard the `LazyTokenStream` if the parsed
attribute doesn't need tokens (e.g. is a builtin attribute).
The performance impact seems to be neglibile (see
https://github.com/rust-lang/rust/pull/77250#issuecomment-703960604). There is a
small slowdown on a few benchmarks, but it only rises above 1% for incremental
builds, where it represents a larger fraction of the much smaller instruction
count. There a ~1% speedup on a few other incremental benchmarks - my guess is
that the speedups and slowdowns will usually cancel out in practice.
2020-09-27 01:56:29 +00:00
|
|
|
|
use rustc_ast::tokenstream::Spacing;
|
2020-02-29 17:37:32 +00:00
|
|
|
|
use rustc_ast::util::classify;
|
|
|
|
|
use rustc_ast::util::literal::LitError;
|
|
|
|
|
use rustc_ast::util::parser::{prec_let_scrutinee_needs_par, AssocOp, Fixity};
|
2021-03-15 21:36:07 +00:00
|
|
|
|
use rustc_ast::{self as ast, AttrStyle, AttrVec, CaptureBy, ExprField, Lit, UnOp, DUMMY_NODE_ID};
|
2020-04-27 17:56:11 +00:00
|
|
|
|
use rustc_ast::{AnonConst, BinOp, BinOpKind, FnDecl, FnRetTy, MacCall, Param, Ty, TyKind};
|
|
|
|
|
use rustc_ast::{Arm, Async, BlockCheckMode, Expr, ExprKind, Label, Movability, RangeLimits};
|
2020-01-11 16:02:46 +00:00
|
|
|
|
use rustc_ast_pretty::pprust;
|
2022-01-23 20:41:46 +00:00
|
|
|
|
use rustc_errors::{Applicability, Diagnostic, DiagnosticBuilder, PResult};
|
2021-07-10 14:38:55 +00:00
|
|
|
|
use rustc_session::lint::builtin::BREAK_WITH_LABEL_AND_LOOP;
|
|
|
|
|
use rustc_session::lint::BuiltinLintDiagnostics;
|
2020-12-30 13:33:46 +00:00
|
|
|
|
use rustc_span::edition::LATEST_STABLE_EDITION;
|
2020-01-11 00:19:09 +00:00
|
|
|
|
use rustc_span::source_map::{self, Span, Spanned};
|
2020-04-19 11:00:18 +00:00
|
|
|
|
use rustc_span::symbol::{kw, sym, Ident, Symbol};
|
2020-10-09 23:01:44 +00:00
|
|
|
|
use rustc_span::{BytePos, Pos};
|
2019-12-22 22:42:04 +00:00
|
|
|
|
use std::mem;
|
2019-08-11 11:14:30 +00:00
|
|
|
|
|
|
|
|
|
/// Possibly accepts an `token::Interpolated` expression (a pre-parsed expression
|
|
|
|
|
/// dropped into the token stream, which happens while parsing the result of
|
|
|
|
|
/// macro expansion). Placement of these is not as complex as I feared it would
|
|
|
|
|
/// be. The important thing is to make sure that lookahead doesn't balk at
|
|
|
|
|
/// `token::Interpolated` tokens.
|
|
|
|
|
macro_rules! maybe_whole_expr {
|
|
|
|
|
($p:expr) => {
|
2020-07-01 10:16:49 +00:00
|
|
|
|
if let token::Interpolated(nt) = &$p.token.kind {
|
2019-08-11 11:14:30 +00:00
|
|
|
|
match &**nt {
|
|
|
|
|
token::NtExpr(e) | token::NtLiteral(e) => {
|
|
|
|
|
let e = e.clone();
|
|
|
|
|
$p.bump();
|
|
|
|
|
return Ok(e);
|
|
|
|
|
}
|
|
|
|
|
token::NtPath(path) => {
|
|
|
|
|
let path = path.clone();
|
|
|
|
|
$p.bump();
|
|
|
|
|
return Ok($p.mk_expr(
|
2021-08-29 00:35:39 +00:00
|
|
|
|
$p.prev_token.span,
|
2019-12-22 22:42:04 +00:00
|
|
|
|
ExprKind::Path(None, path),
|
|
|
|
|
AttrVec::new(),
|
2019-08-11 11:14:30 +00:00
|
|
|
|
));
|
|
|
|
|
}
|
|
|
|
|
token::NtBlock(block) => {
|
|
|
|
|
let block = block.clone();
|
|
|
|
|
$p.bump();
|
|
|
|
|
return Ok($p.mk_expr(
|
2021-08-29 00:28:39 +00:00
|
|
|
|
$p.prev_token.span,
|
2019-12-22 22:42:04 +00:00
|
|
|
|
ExprKind::Block(block, None),
|
|
|
|
|
AttrVec::new(),
|
2019-08-11 11:14:30 +00:00
|
|
|
|
));
|
|
|
|
|
}
|
2019-12-22 22:42:04 +00:00
|
|
|
|
_ => {}
|
2019-08-11 11:14:30 +00:00
|
|
|
|
};
|
|
|
|
|
}
|
2019-12-22 22:42:04 +00:00
|
|
|
|
};
|
2019-08-11 11:14:30 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[derive(Debug)]
|
|
|
|
|
pub(super) enum LhsExpr {
|
|
|
|
|
NotYetParsed,
|
2021-01-22 18:28:08 +00:00
|
|
|
|
AttributesParsed(AttrWrapper),
|
2019-08-11 11:14:30 +00:00
|
|
|
|
AlreadyParsed(P<Expr>),
|
|
|
|
|
}
|
|
|
|
|
|
2021-01-22 18:28:08 +00:00
|
|
|
|
impl From<Option<AttrWrapper>> for LhsExpr {
|
2019-09-03 22:42:58 +00:00
|
|
|
|
/// Converts `Some(attrs)` into `LhsExpr::AttributesParsed(attrs)`
|
|
|
|
|
/// and `None` into `LhsExpr::NotYetParsed`.
|
|
|
|
|
///
|
|
|
|
|
/// This conversion does not allocate.
|
2021-01-22 18:28:08 +00:00
|
|
|
|
fn from(o: Option<AttrWrapper>) -> Self {
|
2019-12-22 22:42:04 +00:00
|
|
|
|
if let Some(attrs) = o { LhsExpr::AttributesParsed(attrs) } else { LhsExpr::NotYetParsed }
|
2019-08-11 11:14:30 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
impl From<P<Expr>> for LhsExpr {
|
2019-09-03 22:42:58 +00:00
|
|
|
|
/// Converts the `expr: P<Expr>` into `LhsExpr::AlreadyParsed(expr)`.
|
|
|
|
|
///
|
|
|
|
|
/// This conversion does not allocate.
|
2019-08-11 11:14:30 +00:00
|
|
|
|
fn from(expr: P<Expr>) -> Self {
|
|
|
|
|
LhsExpr::AlreadyParsed(expr)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
impl<'a> Parser<'a> {
|
|
|
|
|
/// Parses an expression.
|
|
|
|
|
#[inline]
|
|
|
|
|
pub fn parse_expr(&mut self) -> PResult<'a, P<Expr>> {
|
2021-08-16 13:22:36 +00:00
|
|
|
|
self.current_closure.take();
|
|
|
|
|
|
2019-08-11 11:14:30 +00:00
|
|
|
|
self.parse_expr_res(Restrictions::empty(), None)
|
|
|
|
|
}
|
|
|
|
|
|
2021-03-25 22:05:49 +00:00
|
|
|
|
/// Parses an expression, forcing tokens to be collected
|
|
|
|
|
pub fn parse_expr_force_collect(&mut self) -> PResult<'a, P<Expr>> {
|
2021-05-06 13:21:40 +00:00
|
|
|
|
self.collect_tokens_no_attrs(|this| this.parse_expr())
|
2021-03-25 22:05:49 +00:00
|
|
|
|
}
|
|
|
|
|
|
2021-04-06 04:50:55 +00:00
|
|
|
|
pub fn parse_anon_const_expr(&mut self) -> PResult<'a, AnonConst> {
|
2019-12-08 07:19:53 +00:00
|
|
|
|
self.parse_expr().map(|value| AnonConst { id: DUMMY_NODE_ID, value })
|
|
|
|
|
}
|
|
|
|
|
|
2019-09-16 20:45:43 +00:00
|
|
|
|
fn parse_expr_catch_underscore(&mut self) -> PResult<'a, P<Expr>> {
|
|
|
|
|
match self.parse_expr() {
|
|
|
|
|
Ok(expr) => Ok(expr),
|
2020-03-04 20:37:52 +00:00
|
|
|
|
Err(mut err) => match self.token.ident() {
|
2020-03-09 09:42:33 +00:00
|
|
|
|
Some((Ident { name: kw::Underscore, .. }, false))
|
|
|
|
|
if self.look_ahead(1, |t| t == &token::Comma) =>
|
2019-12-22 22:42:04 +00:00
|
|
|
|
{
|
2019-09-16 20:45:43 +00:00
|
|
|
|
// Special-case handling of `foo(_, _, _)`
|
|
|
|
|
err.emit();
|
|
|
|
|
self.bump();
|
2020-02-24 10:04:13 +00:00
|
|
|
|
Ok(self.mk_expr(self.prev_token.span, ExprKind::Err, AttrVec::new()))
|
2019-09-16 20:45:43 +00:00
|
|
|
|
}
|
|
|
|
|
_ => Err(err),
|
|
|
|
|
},
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2019-12-08 07:19:53 +00:00
|
|
|
|
/// Parses a sequence of expressions delimited by parentheses.
|
2019-08-11 11:14:30 +00:00
|
|
|
|
fn parse_paren_expr_seq(&mut self) -> PResult<'a, Vec<P<Expr>>> {
|
2019-12-22 22:42:04 +00:00
|
|
|
|
self.parse_paren_comma_seq(|p| p.parse_expr_catch_underscore()).map(|(r, _)| r)
|
2019-08-11 11:14:30 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Parses an expression, subject to the given restrictions.
|
|
|
|
|
#[inline]
|
|
|
|
|
pub(super) fn parse_expr_res(
|
|
|
|
|
&mut self,
|
|
|
|
|
r: Restrictions,
|
2021-01-22 18:28:08 +00:00
|
|
|
|
already_parsed_attrs: Option<AttrWrapper>,
|
2019-08-11 11:14:30 +00:00
|
|
|
|
) -> PResult<'a, P<Expr>> {
|
|
|
|
|
self.with_res(r, |this| this.parse_assoc_expr(already_parsed_attrs))
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Parses an associative expression.
|
|
|
|
|
///
|
|
|
|
|
/// This parses an expression accounting for associativity and precedence of the operators in
|
|
|
|
|
/// the expression.
|
|
|
|
|
#[inline]
|
2021-01-22 18:28:08 +00:00
|
|
|
|
fn parse_assoc_expr(
|
|
|
|
|
&mut self,
|
|
|
|
|
already_parsed_attrs: Option<AttrWrapper>,
|
|
|
|
|
) -> PResult<'a, P<Expr>> {
|
2019-08-11 11:14:30 +00:00
|
|
|
|
self.parse_assoc_expr_with(0, already_parsed_attrs.into())
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Parses an associative expression with operators of at least `min_prec` precedence.
|
|
|
|
|
pub(super) fn parse_assoc_expr_with(
|
|
|
|
|
&mut self,
|
|
|
|
|
min_prec: usize,
|
|
|
|
|
lhs: LhsExpr,
|
|
|
|
|
) -> PResult<'a, P<Expr>> {
|
|
|
|
|
let mut lhs = if let LhsExpr::AlreadyParsed(expr) = lhs {
|
|
|
|
|
expr
|
|
|
|
|
} else {
|
|
|
|
|
let attrs = match lhs {
|
|
|
|
|
LhsExpr::AttributesParsed(attrs) => Some(attrs),
|
|
|
|
|
_ => None,
|
|
|
|
|
};
|
|
|
|
|
if [token::DotDot, token::DotDotDot, token::DotDotEq].contains(&self.token.kind) {
|
|
|
|
|
return self.parse_prefix_range_expr(attrs);
|
|
|
|
|
} else {
|
|
|
|
|
self.parse_prefix_expr(attrs)?
|
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
let last_type_ascription_set = self.last_type_ascription.is_some();
|
|
|
|
|
|
2019-12-03 08:04:36 +00:00
|
|
|
|
if !self.should_continue_as_assoc_expr(&lhs) {
|
|
|
|
|
self.last_type_ascription = None;
|
|
|
|
|
return Ok(lhs);
|
2019-08-11 11:14:30 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-12-03 08:04:36 +00:00
|
|
|
|
self.expected_tokens.push(TokenType::Operator);
|
|
|
|
|
while let Some(op) = self.check_assoc_op() {
|
2020-02-10 17:20:01 +00:00
|
|
|
|
// Adjust the span for interpolated LHS to point to the `$lhs` token
|
|
|
|
|
// and not to what it refers to.
|
2020-02-24 10:04:13 +00:00
|
|
|
|
let lhs_span = match self.prev_token.kind {
|
2020-02-29 11:56:15 +00:00
|
|
|
|
TokenKind::Interpolated(..) => self.prev_token.span,
|
2019-08-11 11:14:30 +00:00
|
|
|
|
_ => lhs.span,
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
let cur_op_span = self.token.span;
|
2020-01-11 00:19:09 +00:00
|
|
|
|
let restrictions = if op.node.is_assign_like() {
|
2019-08-11 11:14:30 +00:00
|
|
|
|
self.restrictions & Restrictions::NO_STRUCT_LITERAL
|
|
|
|
|
} else {
|
|
|
|
|
self.restrictions
|
|
|
|
|
};
|
2020-01-11 00:19:09 +00:00
|
|
|
|
let prec = op.node.precedence();
|
2019-08-11 11:14:30 +00:00
|
|
|
|
if prec < min_prec {
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
// Check for deprecated `...` syntax
|
2020-01-11 00:19:09 +00:00
|
|
|
|
if self.token == token::DotDotDot && op.node == AssocOp::DotDotEq {
|
2019-08-11 11:14:30 +00:00
|
|
|
|
self.err_dotdotdot_syntax(self.token.span);
|
|
|
|
|
}
|
|
|
|
|
|
2019-08-11 21:37:05 +00:00
|
|
|
|
if self.token == token::LArrow {
|
|
|
|
|
self.err_larrow_operator(self.token.span);
|
|
|
|
|
}
|
|
|
|
|
|
2019-08-11 11:14:30 +00:00
|
|
|
|
self.bump();
|
2020-01-11 00:19:09 +00:00
|
|
|
|
if op.node.is_comparison() {
|
2019-09-30 02:07:26 +00:00
|
|
|
|
if let Some(expr) = self.check_no_chained_comparison(&lhs, &op)? {
|
|
|
|
|
return Ok(expr);
|
|
|
|
|
}
|
2019-08-11 11:14:30 +00:00
|
|
|
|
}
|
2020-08-09 04:17:15 +00:00
|
|
|
|
|
2021-12-06 17:55:58 +00:00
|
|
|
|
// Look for JS' `===` and `!==` and recover
|
2020-08-09 04:17:15 +00:00
|
|
|
|
if (op.node == AssocOp::Equal || op.node == AssocOp::NotEqual)
|
|
|
|
|
&& self.token.kind == token::Eq
|
|
|
|
|
&& self.prev_token.span.hi() == self.token.span.lo()
|
|
|
|
|
{
|
|
|
|
|
let sp = op.span.to(self.token.span);
|
|
|
|
|
let sugg = match op.node {
|
|
|
|
|
AssocOp::Equal => "==",
|
|
|
|
|
AssocOp::NotEqual => "!=",
|
|
|
|
|
_ => unreachable!(),
|
|
|
|
|
};
|
|
|
|
|
self.struct_span_err(sp, &format!("invalid comparison operator `{}=`", sugg))
|
|
|
|
|
.span_suggestion_short(
|
|
|
|
|
sp,
|
|
|
|
|
&format!("`{s}=` is not a valid comparison operator, use `{s}`", s = sugg),
|
|
|
|
|
sugg.to_string(),
|
|
|
|
|
Applicability::MachineApplicable,
|
|
|
|
|
)
|
|
|
|
|
.emit();
|
|
|
|
|
self.bump();
|
|
|
|
|
}
|
|
|
|
|
|
2021-12-06 17:55:58 +00:00
|
|
|
|
// Look for PHP's `<>` and recover
|
|
|
|
|
if op.node == AssocOp::Less
|
|
|
|
|
&& self.token.kind == token::Gt
|
|
|
|
|
&& self.prev_token.span.hi() == self.token.span.lo()
|
|
|
|
|
{
|
|
|
|
|
let sp = op.span.to(self.token.span);
|
|
|
|
|
self.struct_span_err(sp, "invalid comparison operator `<>`")
|
|
|
|
|
.span_suggestion_short(
|
|
|
|
|
sp,
|
|
|
|
|
"`<>` is not a valid comparison operator, use `!=`",
|
|
|
|
|
"!=".to_string(),
|
|
|
|
|
Applicability::MachineApplicable,
|
|
|
|
|
)
|
|
|
|
|
.emit();
|
|
|
|
|
self.bump();
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Look for C++'s `<=>` and recover
|
|
|
|
|
if op.node == AssocOp::LessEqual
|
|
|
|
|
&& self.token.kind == token::Gt
|
|
|
|
|
&& self.prev_token.span.hi() == self.token.span.lo()
|
|
|
|
|
{
|
|
|
|
|
let sp = op.span.to(self.token.span);
|
|
|
|
|
self.struct_span_err(sp, "invalid comparison operator `<=>`")
|
|
|
|
|
.span_label(
|
|
|
|
|
sp,
|
|
|
|
|
"`<=>` is not a valid comparison operator, use `std::cmp::Ordering`",
|
|
|
|
|
)
|
|
|
|
|
.emit();
|
|
|
|
|
self.bump();
|
|
|
|
|
}
|
|
|
|
|
|
2020-01-11 00:19:09 +00:00
|
|
|
|
let op = op.node;
|
2019-08-11 11:14:30 +00:00
|
|
|
|
// Special cases:
|
|
|
|
|
if op == AssocOp::As {
|
|
|
|
|
lhs = self.parse_assoc_op_cast(lhs, lhs_span, ExprKind::Cast)?;
|
2019-12-22 22:42:04 +00:00
|
|
|
|
continue;
|
2019-08-11 11:14:30 +00:00
|
|
|
|
} else if op == AssocOp::Colon {
|
2019-12-07 03:59:08 +00:00
|
|
|
|
lhs = self.parse_assoc_op_ascribe(lhs, lhs_span)?;
|
2019-12-22 22:42:04 +00:00
|
|
|
|
continue;
|
2019-08-11 11:14:30 +00:00
|
|
|
|
} else if op == AssocOp::DotDot || op == AssocOp::DotDotEq {
|
|
|
|
|
// If we didn’t have to handle `x..`/`x..=`, it would be pretty easy to
|
|
|
|
|
// generalise it to the Fixity::None code.
|
2019-12-07 03:59:08 +00:00
|
|
|
|
lhs = self.parse_range_expr(prec, lhs, op, cur_op_span)?;
|
2019-12-22 22:42:04 +00:00
|
|
|
|
break;
|
2019-08-11 11:14:30 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
let fixity = op.fixity();
|
|
|
|
|
let prec_adjustment = match fixity {
|
|
|
|
|
Fixity::Right => 0,
|
|
|
|
|
Fixity::Left => 1,
|
|
|
|
|
// We currently have no non-associative operators that are not handled above by
|
|
|
|
|
// the special cases. The code is here only for future convenience.
|
|
|
|
|
Fixity::None => 1,
|
|
|
|
|
};
|
2019-12-22 22:42:04 +00:00
|
|
|
|
let rhs = self.with_res(restrictions - Restrictions::STMT_EXPR, |this| {
|
|
|
|
|
this.parse_assoc_expr_with(prec + prec_adjustment, LhsExpr::NotYetParsed)
|
|
|
|
|
})?;
|
2019-08-11 11:14:30 +00:00
|
|
|
|
|
2020-10-10 04:40:27 +00:00
|
|
|
|
let span = self.mk_expr_sp(&lhs, lhs_span, rhs.span);
|
2019-08-11 11:14:30 +00:00
|
|
|
|
lhs = match op {
|
2019-12-22 22:42:04 +00:00
|
|
|
|
AssocOp::Add
|
|
|
|
|
| AssocOp::Subtract
|
|
|
|
|
| AssocOp::Multiply
|
|
|
|
|
| AssocOp::Divide
|
|
|
|
|
| AssocOp::Modulus
|
|
|
|
|
| AssocOp::LAnd
|
|
|
|
|
| AssocOp::LOr
|
|
|
|
|
| AssocOp::BitXor
|
|
|
|
|
| AssocOp::BitAnd
|
|
|
|
|
| AssocOp::BitOr
|
|
|
|
|
| AssocOp::ShiftLeft
|
|
|
|
|
| AssocOp::ShiftRight
|
|
|
|
|
| AssocOp::Equal
|
|
|
|
|
| AssocOp::Less
|
|
|
|
|
| AssocOp::LessEqual
|
|
|
|
|
| AssocOp::NotEqual
|
|
|
|
|
| AssocOp::Greater
|
|
|
|
|
| AssocOp::GreaterEqual => {
|
2019-08-11 11:14:30 +00:00
|
|
|
|
let ast_op = op.to_ast_binop().unwrap();
|
|
|
|
|
let binary = self.mk_binary(source_map::respan(cur_op_span, ast_op), lhs, rhs);
|
2019-12-03 15:38:34 +00:00
|
|
|
|
self.mk_expr(span, binary, AttrVec::new())
|
2019-08-11 11:14:30 +00:00
|
|
|
|
}
|
2019-12-22 21:08:53 +00:00
|
|
|
|
AssocOp::Assign => {
|
|
|
|
|
self.mk_expr(span, ExprKind::Assign(lhs, rhs, cur_op_span), AttrVec::new())
|
|
|
|
|
}
|
2019-08-11 11:14:30 +00:00
|
|
|
|
AssocOp::AssignOp(k) => {
|
|
|
|
|
let aop = match k {
|
2019-12-22 22:42:04 +00:00
|
|
|
|
token::Plus => BinOpKind::Add,
|
|
|
|
|
token::Minus => BinOpKind::Sub,
|
|
|
|
|
token::Star => BinOpKind::Mul,
|
|
|
|
|
token::Slash => BinOpKind::Div,
|
2019-08-11 11:14:30 +00:00
|
|
|
|
token::Percent => BinOpKind::Rem,
|
2019-12-22 22:42:04 +00:00
|
|
|
|
token::Caret => BinOpKind::BitXor,
|
|
|
|
|
token::And => BinOpKind::BitAnd,
|
|
|
|
|
token::Or => BinOpKind::BitOr,
|
|
|
|
|
token::Shl => BinOpKind::Shl,
|
|
|
|
|
token::Shr => BinOpKind::Shr,
|
2019-08-11 11:14:30 +00:00
|
|
|
|
};
|
|
|
|
|
let aopexpr = self.mk_assign_op(source_map::respan(cur_op_span, aop), lhs, rhs);
|
2019-12-03 15:38:34 +00:00
|
|
|
|
self.mk_expr(span, aopexpr, AttrVec::new())
|
2019-08-11 11:14:30 +00:00
|
|
|
|
}
|
|
|
|
|
AssocOp::As | AssocOp::Colon | AssocOp::DotDot | AssocOp::DotDotEq => {
|
2019-12-31 03:30:55 +00:00
|
|
|
|
self.span_bug(span, "AssocOp should have been handled by special case")
|
2019-08-11 11:14:30 +00:00
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
|
2019-12-22 22:42:04 +00:00
|
|
|
|
if let Fixity::None = fixity {
|
|
|
|
|
break;
|
|
|
|
|
}
|
2019-08-11 11:14:30 +00:00
|
|
|
|
}
|
|
|
|
|
if last_type_ascription_set {
|
|
|
|
|
self.last_type_ascription = None;
|
|
|
|
|
}
|
|
|
|
|
Ok(lhs)
|
|
|
|
|
}
|
|
|
|
|
|
2019-12-03 08:04:36 +00:00
|
|
|
|
fn should_continue_as_assoc_expr(&mut self, lhs: &Expr) -> bool {
|
2020-08-18 02:15:51 +00:00
|
|
|
|
match (self.expr_is_complete(lhs), AssocOp::from_token(&self.token)) {
|
2019-12-03 08:04:36 +00:00
|
|
|
|
// Semi-statement forms are odd:
|
|
|
|
|
// See https://github.com/rust-lang/rust/issues/29071
|
|
|
|
|
(true, None) => false,
|
|
|
|
|
(false, _) => true, // Continue parsing the expression.
|
|
|
|
|
// An exhaustive check is done in the following block, but these are checked first
|
|
|
|
|
// because they *are* ambiguous but also reasonable looking incorrect syntax, so we
|
|
|
|
|
// want to keep their span info to improve diagnostics in these cases in a later stage.
|
|
|
|
|
(true, Some(AssocOp::Multiply)) | // `{ 42 } *foo = bar;` or `{ 42 } * 3`
|
|
|
|
|
(true, Some(AssocOp::Subtract)) | // `{ 42 } -5`
|
|
|
|
|
(true, Some(AssocOp::Add)) // `{ 42 } + 42
|
|
|
|
|
// If the next token is a keyword, then the tokens above *are* unambiguously incorrect:
|
|
|
|
|
// `if x { a } else { b } && if y { c } else { d }`
|
2020-07-22 19:01:56 +00:00
|
|
|
|
if !self.look_ahead(1, |t| t.is_used_keyword()) => {
|
|
|
|
|
// These cases are ambiguous and can't be identified in the parser alone.
|
|
|
|
|
let sp = self.sess.source_map().start_point(self.token.span);
|
|
|
|
|
self.sess.ambiguous_block_expr_parse.borrow_mut().insert(sp, lhs.span);
|
|
|
|
|
false
|
|
|
|
|
}
|
|
|
|
|
(true, Some(AssocOp::LAnd)) => {
|
|
|
|
|
// `{ 42 } &&x` (#61475) or `{ 42 } && if x { 1 } else { 0 }`. Separated from the
|
|
|
|
|
// above due to #74233.
|
2019-12-03 08:04:36 +00:00
|
|
|
|
// These cases are ambiguous and can't be identified in the parser alone.
|
|
|
|
|
let sp = self.sess.source_map().start_point(self.token.span);
|
|
|
|
|
self.sess.ambiguous_block_expr_parse.borrow_mut().insert(sp, lhs.span);
|
|
|
|
|
false
|
|
|
|
|
}
|
|
|
|
|
(true, Some(ref op)) if !op.can_continue_expr_unambiguously() => false,
|
|
|
|
|
(true, Some(_)) => {
|
|
|
|
|
self.error_found_expr_would_be_stmt(lhs);
|
|
|
|
|
true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// We've found an expression that would be parsed as a statement,
|
|
|
|
|
/// but the next token implies this should be parsed as an expression.
|
|
|
|
|
/// For example: `if let Some(x) = x { x } else { 0 } / 2`.
|
|
|
|
|
fn error_found_expr_would_be_stmt(&self, lhs: &Expr) {
|
2019-12-22 22:42:04 +00:00
|
|
|
|
let mut err = self.struct_span_err(
|
|
|
|
|
self.token.span,
|
|
|
|
|
&format!("expected expression, found `{}`", pprust::token_to_string(&self.token),),
|
|
|
|
|
);
|
2019-12-03 08:04:36 +00:00
|
|
|
|
err.span_label(self.token.span, "expected expression");
|
2021-06-28 18:22:47 +00:00
|
|
|
|
self.sess.expr_parentheses_needed(&mut err, lhs.span);
|
2019-12-03 08:04:36 +00:00
|
|
|
|
err.emit();
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Possibly translate the current token to an associative operator.
|
|
|
|
|
/// The method does not advance the current token.
|
|
|
|
|
///
|
|
|
|
|
/// Also performs recovery for `and` / `or` which are mistaken for `&&` and `||` respectively.
|
2020-01-11 00:19:09 +00:00
|
|
|
|
fn check_assoc_op(&self) -> Option<Spanned<AssocOp>> {
|
2020-03-04 20:37:52 +00:00
|
|
|
|
let (op, span) = match (AssocOp::from_token(&self.token), self.token.ident()) {
|
2020-10-03 18:30:32 +00:00
|
|
|
|
// When parsing const expressions, stop parsing when encountering `>`.
|
|
|
|
|
(
|
|
|
|
|
Some(
|
|
|
|
|
AssocOp::ShiftRight
|
|
|
|
|
| AssocOp::Greater
|
|
|
|
|
| AssocOp::GreaterEqual
|
|
|
|
|
| AssocOp::AssignOp(token::BinOpToken::Shr),
|
|
|
|
|
),
|
|
|
|
|
_,
|
|
|
|
|
) if self.restrictions.contains(Restrictions::CONST_EXPR) => {
|
|
|
|
|
return None;
|
|
|
|
|
}
|
2020-03-04 20:37:52 +00:00
|
|
|
|
(Some(op), _) => (op, self.token.span),
|
2020-03-09 09:42:33 +00:00
|
|
|
|
(None, Some((Ident { name: sym::and, span }, false))) => {
|
2020-03-04 20:37:52 +00:00
|
|
|
|
self.error_bad_logical_op("and", "&&", "conjunction");
|
2020-03-09 09:42:33 +00:00
|
|
|
|
(AssocOp::LAnd, span)
|
2020-03-04 20:37:52 +00:00
|
|
|
|
}
|
2020-03-09 09:42:33 +00:00
|
|
|
|
(None, Some((Ident { name: sym::or, span }, false))) => {
|
2020-03-04 20:37:52 +00:00
|
|
|
|
self.error_bad_logical_op("or", "||", "disjunction");
|
2020-03-09 09:42:33 +00:00
|
|
|
|
(AssocOp::LOr, span)
|
2020-03-04 20:37:52 +00:00
|
|
|
|
}
|
|
|
|
|
_ => return None,
|
|
|
|
|
};
|
|
|
|
|
Some(source_map::respan(span, op))
|
2019-12-03 09:19:58 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Error on `and` and `or` suggesting `&&` and `||` respectively.
|
|
|
|
|
fn error_bad_logical_op(&self, bad: &str, good: &str, english: &str) {
|
|
|
|
|
self.struct_span_err(self.token.span, &format!("`{}` is not a logical operator", bad))
|
2019-12-04 10:27:11 +00:00
|
|
|
|
.span_suggestion_short(
|
2019-12-03 09:19:58 +00:00
|
|
|
|
self.token.span,
|
2019-12-04 10:27:11 +00:00
|
|
|
|
&format!("use `{}` to perform logical {}", good, english),
|
2019-12-03 09:19:58 +00:00
|
|
|
|
good.to_string(),
|
|
|
|
|
Applicability::MachineApplicable,
|
|
|
|
|
)
|
|
|
|
|
.note("unlike in e.g., python and PHP, `&&` and `||` are used for logical operators")
|
|
|
|
|
.emit();
|
2019-12-03 08:04:36 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-08-11 11:14:30 +00:00
|
|
|
|
/// Checks if this expression is a successfully parsed statement.
|
|
|
|
|
fn expr_is_complete(&self, e: &Expr) -> bool {
|
2019-12-22 22:42:04 +00:00
|
|
|
|
self.restrictions.contains(Restrictions::STMT_EXPR)
|
|
|
|
|
&& !classify::expr_requires_semi_to_be_stmt(e)
|
2019-08-11 11:14:30 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-12-07 03:59:08 +00:00
|
|
|
|
/// Parses `x..y`, `x..=y`, and `x..`/`x..=`.
|
|
|
|
|
/// The other two variants are handled in `parse_prefix_range_expr` below.
|
|
|
|
|
fn parse_range_expr(
|
|
|
|
|
&mut self,
|
|
|
|
|
prec: usize,
|
|
|
|
|
lhs: P<Expr>,
|
|
|
|
|
op: AssocOp,
|
|
|
|
|
cur_op_span: Span,
|
|
|
|
|
) -> PResult<'a, P<Expr>> {
|
|
|
|
|
let rhs = if self.is_at_start_of_range_notation_rhs() {
|
|
|
|
|
Some(self.parse_assoc_expr_with(prec + 1, LhsExpr::NotYetParsed)?)
|
|
|
|
|
} else {
|
|
|
|
|
None
|
|
|
|
|
};
|
|
|
|
|
let rhs_span = rhs.as_ref().map_or(cur_op_span, |x| x.span);
|
2020-10-12 17:24:24 +00:00
|
|
|
|
let span = self.mk_expr_sp(&lhs, lhs.span, rhs_span);
|
2019-12-07 03:59:08 +00:00
|
|
|
|
let limits =
|
|
|
|
|
if op == AssocOp::DotDot { RangeLimits::HalfOpen } else { RangeLimits::Closed };
|
2021-06-26 02:46:41 +00:00
|
|
|
|
let range = self.mk_range(Some(lhs), rhs, limits);
|
|
|
|
|
Ok(self.mk_expr(span, range, AttrVec::new()))
|
2019-12-07 03:59:08 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-08-11 11:14:30 +00:00
|
|
|
|
fn is_at_start_of_range_notation_rhs(&self) -> bool {
|
|
|
|
|
if self.token.can_begin_expr() {
|
2019-09-06 02:56:45 +00:00
|
|
|
|
// Parse `for i in 1.. { }` as infinite loop, not as `for i in (1..{})`.
|
2019-08-11 11:14:30 +00:00
|
|
|
|
if self.token == token::OpenDelim(token::Brace) {
|
|
|
|
|
return !self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL);
|
|
|
|
|
}
|
|
|
|
|
true
|
|
|
|
|
} else {
|
|
|
|
|
false
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2019-09-06 02:56:45 +00:00
|
|
|
|
/// Parses prefix-forms of range notation: `..expr`, `..`, `..=expr`.
|
2021-01-22 18:28:08 +00:00
|
|
|
|
fn parse_prefix_range_expr(&mut self, attrs: Option<AttrWrapper>) -> PResult<'a, P<Expr>> {
|
2019-09-06 02:56:45 +00:00
|
|
|
|
// Check for deprecated `...` syntax.
|
2019-08-11 11:14:30 +00:00
|
|
|
|
if self.token == token::DotDotDot {
|
|
|
|
|
self.err_dotdotdot_syntax(self.token.span);
|
|
|
|
|
}
|
|
|
|
|
|
2019-12-22 22:42:04 +00:00
|
|
|
|
debug_assert!(
|
|
|
|
|
[token::DotDot, token::DotDotDot, token::DotDotEq].contains(&self.token.kind),
|
|
|
|
|
"parse_prefix_range_expr: token {:?} is not DotDot/DotDotEq",
|
|
|
|
|
self.token
|
|
|
|
|
);
|
2019-12-07 03:37:05 +00:00
|
|
|
|
|
|
|
|
|
let limits = match self.token.kind {
|
|
|
|
|
token::DotDot => RangeLimits::HalfOpen,
|
|
|
|
|
_ => RangeLimits::Closed,
|
|
|
|
|
};
|
|
|
|
|
let op = AssocOp::from_token(&self.token);
|
2021-02-13 17:42:43 +00:00
|
|
|
|
// FIXME: `parse_prefix_range_expr` is called when the current
|
|
|
|
|
// token is `DotDot`, `DotDotDot`, or `DotDotEq`. If we haven't already
|
|
|
|
|
// parsed attributes, then trying to parse them here will always fail.
|
|
|
|
|
// We should figure out how we want attributes on range expressions to work.
|
2019-12-07 03:37:05 +00:00
|
|
|
|
let attrs = self.parse_or_use_outer_attributes(attrs)?;
|
2021-01-22 18:28:08 +00:00
|
|
|
|
self.collect_tokens_for_expr(attrs, |this, attrs| {
|
|
|
|
|
let lo = this.token.span;
|
|
|
|
|
this.bump();
|
|
|
|
|
let (span, opt_end) = if this.is_at_start_of_range_notation_rhs() {
|
|
|
|
|
// RHS must be parsed with more associativity than the dots.
|
|
|
|
|
this.parse_assoc_expr_with(op.unwrap().precedence() + 1, LhsExpr::NotYetParsed)
|
|
|
|
|
.map(|x| (lo.to(x.span), Some(x)))?
|
|
|
|
|
} else {
|
|
|
|
|
(lo, None)
|
|
|
|
|
};
|
2021-06-26 02:46:41 +00:00
|
|
|
|
let range = this.mk_range(None, opt_end, limits);
|
|
|
|
|
Ok(this.mk_expr(span, range, attrs.into()))
|
2021-01-22 18:28:08 +00:00
|
|
|
|
})
|
2019-08-11 11:14:30 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-09-06 02:56:45 +00:00
|
|
|
|
/// Parses a prefix-unary-operator expr.
|
2021-01-22 18:28:08 +00:00
|
|
|
|
fn parse_prefix_expr(&mut self, attrs: Option<AttrWrapper>) -> PResult<'a, P<Expr>> {
|
2019-12-07 02:07:35 +00:00
|
|
|
|
let attrs = self.parse_or_use_outer_attributes(attrs)?;
|
2021-01-22 18:28:08 +00:00
|
|
|
|
let lo = self.token.span;
|
|
|
|
|
|
|
|
|
|
macro_rules! make_it {
|
|
|
|
|
($this:ident, $attrs:expr, |this, _| $body:expr) => {
|
|
|
|
|
$this.collect_tokens_for_expr($attrs, |$this, attrs| {
|
|
|
|
|
let (hi, ex) = $body?;
|
|
|
|
|
Ok($this.mk_expr(lo.to(hi), ex, attrs.into()))
|
|
|
|
|
})
|
|
|
|
|
};
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
let this = self;
|
|
|
|
|
|
|
|
|
|
// Note: when adding new unary operators, don't forget to adjust TokenKind::can_begin_expr()
|
|
|
|
|
match this.token.uninterpolate().kind {
|
|
|
|
|
token::Not => make_it!(this, attrs, |this, _| this.parse_unary_expr(lo, UnOp::Not)), // `!expr`
|
|
|
|
|
token::Tilde => make_it!(this, attrs, |this, _| this.recover_tilde_expr(lo)), // `~expr`
|
|
|
|
|
token::BinOp(token::Minus) => {
|
|
|
|
|
make_it!(this, attrs, |this, _| this.parse_unary_expr(lo, UnOp::Neg))
|
|
|
|
|
} // `-expr`
|
|
|
|
|
token::BinOp(token::Star) => {
|
|
|
|
|
make_it!(this, attrs, |this, _| this.parse_unary_expr(lo, UnOp::Deref))
|
|
|
|
|
} // `*expr`
|
|
|
|
|
token::BinOp(token::And) | token::AndAnd => {
|
|
|
|
|
make_it!(this, attrs, |this, _| this.parse_borrow_expr(lo))
|
|
|
|
|
}
|
2021-09-05 02:35:59 +00:00
|
|
|
|
token::BinOp(token::Plus) if this.look_ahead(1, |tok| tok.is_numeric_lit()) => {
|
2021-09-01 15:54:06 +00:00
|
|
|
|
let mut err = this.struct_span_err(lo, "leading `+` is not supported");
|
|
|
|
|
err.span_label(lo, "unexpected `+`");
|
|
|
|
|
|
|
|
|
|
// a block on the LHS might have been intended to be an expression instead
|
2021-09-05 02:35:59 +00:00
|
|
|
|
if let Some(sp) = this.sess.ambiguous_block_expr_parse.borrow().get(&lo) {
|
2021-09-01 15:54:06 +00:00
|
|
|
|
this.sess.expr_parentheses_needed(&mut err, *sp);
|
|
|
|
|
} else {
|
2021-09-05 02:35:59 +00:00
|
|
|
|
err.span_suggestion_verbose(
|
2021-09-01 03:09:43 +00:00
|
|
|
|
lo,
|
2021-09-01 15:54:06 +00:00
|
|
|
|
"try removing the `+`",
|
2021-09-01 03:09:43 +00:00
|
|
|
|
"".to_string(),
|
|
|
|
|
Applicability::MachineApplicable,
|
2021-09-01 15:54:06 +00:00
|
|
|
|
);
|
|
|
|
|
}
|
|
|
|
|
err.emit();
|
2021-09-01 03:07:58 +00:00
|
|
|
|
|
2021-09-01 15:54:06 +00:00
|
|
|
|
this.bump();
|
2021-09-01 03:07:58 +00:00
|
|
|
|
this.parse_prefix_expr(None)
|
|
|
|
|
} // `+expr`
|
2021-01-22 18:28:08 +00:00
|
|
|
|
token::Ident(..) if this.token.is_keyword(kw::Box) => {
|
|
|
|
|
make_it!(this, attrs, |this, _| this.parse_box_expr(lo))
|
|
|
|
|
}
|
|
|
|
|
token::Ident(..) if this.is_mistaken_not_ident_negation() => {
|
|
|
|
|
make_it!(this, attrs, |this, _| this.recover_not_expr(lo))
|
|
|
|
|
}
|
2021-02-17 19:37:09 +00:00
|
|
|
|
_ => return this.parse_dot_or_call_expr(Some(attrs)),
|
2021-01-22 18:28:08 +00:00
|
|
|
|
}
|
2019-08-11 11:14:30 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-12-07 02:07:35 +00:00
|
|
|
|
fn parse_prefix_expr_common(&mut self, lo: Span) -> PResult<'a, (Span, P<Expr>)> {
|
2019-12-07 02:05:51 +00:00
|
|
|
|
self.bump();
|
|
|
|
|
let expr = self.parse_prefix_expr(None);
|
|
|
|
|
let (span, expr) = self.interpolated_or_expr_span(expr)?;
|
2019-12-07 02:07:35 +00:00
|
|
|
|
Ok((lo.to(span), expr))
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn parse_unary_expr(&mut self, lo: Span, op: UnOp) -> PResult<'a, (Span, ExprKind)> {
|
|
|
|
|
let (span, expr) = self.parse_prefix_expr_common(lo)?;
|
|
|
|
|
Ok((span, self.mk_unary(op, expr)))
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Recover on `!` suggesting for bitwise negation instead.
|
|
|
|
|
fn recover_tilde_expr(&mut self, lo: Span) -> PResult<'a, (Span, ExprKind)> {
|
2019-12-07 02:05:51 +00:00
|
|
|
|
self.struct_span_err(lo, "`~` cannot be used as a unary operator")
|
|
|
|
|
.span_suggestion_short(
|
|
|
|
|
lo,
|
|
|
|
|
"use `!` to perform bitwise not",
|
|
|
|
|
"!".to_owned(),
|
|
|
|
|
Applicability::MachineApplicable,
|
|
|
|
|
)
|
|
|
|
|
.emit();
|
|
|
|
|
|
2019-12-07 02:07:35 +00:00
|
|
|
|
self.parse_unary_expr(lo, UnOp::Not)
|
2019-12-07 02:00:06 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-12-07 01:55:12 +00:00
|
|
|
|
/// Parse `box expr`.
|
|
|
|
|
fn parse_box_expr(&mut self, lo: Span) -> PResult<'a, (Span, ExprKind)> {
|
2019-12-07 02:07:35 +00:00
|
|
|
|
let (span, expr) = self.parse_prefix_expr_common(lo)?;
|
2019-12-07 01:55:12 +00:00
|
|
|
|
self.sess.gated_spans.gate(sym::box_syntax, span);
|
2019-12-07 02:00:06 +00:00
|
|
|
|
Ok((span, ExprKind::Box(expr)))
|
2019-12-07 01:55:12 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn is_mistaken_not_ident_negation(&self) -> bool {
|
2020-03-07 12:58:27 +00:00
|
|
|
|
let token_cannot_continue_expr = |t: &Token| match t.uninterpolate().kind {
|
2019-12-07 01:55:12 +00:00
|
|
|
|
// These tokens can start an expression after `!`, but
|
|
|
|
|
// can't continue an expression after an ident
|
|
|
|
|
token::Ident(name, is_raw) => token::ident_can_begin_expr(name, t.span, is_raw),
|
|
|
|
|
token::Literal(..) | token::Pound => true,
|
|
|
|
|
_ => t.is_whole_expr(),
|
2019-08-11 11:14:30 +00:00
|
|
|
|
};
|
2019-12-07 01:55:12 +00:00
|
|
|
|
self.token.is_ident_named(sym::not) && self.look_ahead(1, token_cannot_continue_expr)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Recover on `not expr` in favor of `!expr`.
|
2019-12-07 01:50:22 +00:00
|
|
|
|
fn recover_not_expr(&mut self, lo: Span) -> PResult<'a, (Span, ExprKind)> {
|
2019-12-07 02:07:35 +00:00
|
|
|
|
// Emit the error...
|
|
|
|
|
let not_token = self.look_ahead(1, |t| t.clone());
|
2019-12-07 01:50:22 +00:00
|
|
|
|
self.struct_span_err(
|
2019-12-07 02:07:35 +00:00
|
|
|
|
not_token.span,
|
|
|
|
|
&format!("unexpected {} after identifier", super::token_descr(¬_token)),
|
2019-12-07 01:50:22 +00:00
|
|
|
|
)
|
|
|
|
|
.span_suggestion_short(
|
|
|
|
|
// Span the `not` plus trailing whitespace to avoid
|
|
|
|
|
// trailing whitespace after the `!` in our suggestion
|
2019-12-07 02:07:35 +00:00
|
|
|
|
self.sess.source_map().span_until_non_whitespace(lo.to(not_token.span)),
|
2019-12-07 01:50:22 +00:00
|
|
|
|
"use `!` to perform logical negation",
|
|
|
|
|
"!".to_owned(),
|
|
|
|
|
Applicability::MachineApplicable,
|
|
|
|
|
)
|
|
|
|
|
.emit();
|
2019-12-07 02:07:35 +00:00
|
|
|
|
|
|
|
|
|
// ...and recover!
|
|
|
|
|
self.parse_unary_expr(lo, UnOp::Not)
|
2019-08-11 11:14:30 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Returns the span of expr, if it was not interpolated or the span of the interpolated token.
|
|
|
|
|
fn interpolated_or_expr_span(
|
|
|
|
|
&self,
|
|
|
|
|
expr: PResult<'a, P<Expr>>,
|
|
|
|
|
) -> PResult<'a, (Span, P<Expr>)> {
|
|
|
|
|
expr.map(|e| {
|
2020-02-10 17:20:01 +00:00
|
|
|
|
(
|
2020-02-24 10:04:13 +00:00
|
|
|
|
match self.prev_token.kind {
|
2020-02-29 11:56:15 +00:00
|
|
|
|
TokenKind::Interpolated(..) => self.prev_token.span,
|
2020-02-10 17:20:01 +00:00
|
|
|
|
_ => e.span,
|
|
|
|
|
},
|
|
|
|
|
e,
|
|
|
|
|
)
|
2019-08-11 11:14:30 +00:00
|
|
|
|
})
|
|
|
|
|
}
|
|
|
|
|
|
2019-12-22 22:42:04 +00:00
|
|
|
|
fn parse_assoc_op_cast(
|
|
|
|
|
&mut self,
|
|
|
|
|
lhs: P<Expr>,
|
|
|
|
|
lhs_span: Span,
|
|
|
|
|
expr_kind: fn(P<Expr>, P<Ty>) -> ExprKind,
|
|
|
|
|
) -> PResult<'a, P<Expr>> {
|
2021-01-21 04:03:29 +00:00
|
|
|
|
let mk_expr = |this: &mut Self, lhs: P<Expr>, rhs: P<Ty>| {
|
2020-10-10 04:40:27 +00:00
|
|
|
|
this.mk_expr(
|
|
|
|
|
this.mk_expr_sp(&lhs, lhs_span, rhs.span),
|
|
|
|
|
expr_kind(lhs, rhs),
|
|
|
|
|
AttrVec::new(),
|
|
|
|
|
)
|
2019-08-11 11:14:30 +00:00
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
// Save the state of the parser before parsing type normally, in case there is a
|
|
|
|
|
// LessThan comparison after this cast.
|
|
|
|
|
let parser_snapshot_before_type = self.clone();
|
2022-01-10 22:02:19 +00:00
|
|
|
|
let cast_expr = match self.parse_as_cast_ty() {
|
2021-01-21 04:03:29 +00:00
|
|
|
|
Ok(rhs) => mk_expr(self, lhs, rhs),
|
2019-08-11 11:14:30 +00:00
|
|
|
|
Err(mut type_err) => {
|
|
|
|
|
// Rewind to before attempting to parse the type with generics, to recover
|
|
|
|
|
// from situations like `x as usize < y` in which we first tried to parse
|
|
|
|
|
// `usize < y` as a type with generic arguments.
|
2020-04-17 20:59:14 +00:00
|
|
|
|
let parser_snapshot_after_type = mem::replace(self, parser_snapshot_before_type);
|
2019-08-11 11:14:30 +00:00
|
|
|
|
|
2021-01-21 04:03:29 +00:00
|
|
|
|
// Check for typo of `'a: loop { break 'a }` with a missing `'`.
|
|
|
|
|
match (&lhs.kind, &self.token.kind) {
|
|
|
|
|
(
|
|
|
|
|
// `foo: `
|
|
|
|
|
ExprKind::Path(None, ast::Path { segments, .. }),
|
|
|
|
|
TokenKind::Ident(kw::For | kw::Loop | kw::While, false),
|
|
|
|
|
) if segments.len() == 1 => {
|
|
|
|
|
let snapshot = self.clone();
|
|
|
|
|
let label = Label {
|
|
|
|
|
ident: Ident::from_str_and_span(
|
|
|
|
|
&format!("'{}", segments[0].ident),
|
|
|
|
|
segments[0].ident.span,
|
|
|
|
|
),
|
|
|
|
|
};
|
|
|
|
|
match self.parse_labeled_expr(label, AttrVec::new(), false) {
|
|
|
|
|
Ok(expr) => {
|
|
|
|
|
type_err.cancel();
|
|
|
|
|
self.struct_span_err(label.ident.span, "malformed loop label")
|
|
|
|
|
.span_suggestion(
|
|
|
|
|
label.ident.span,
|
|
|
|
|
"use the correct loop label format",
|
|
|
|
|
label.ident.to_string(),
|
|
|
|
|
Applicability::MachineApplicable,
|
|
|
|
|
)
|
|
|
|
|
.emit();
|
|
|
|
|
return Ok(expr);
|
|
|
|
|
}
|
|
|
|
|
Err(mut err) => {
|
|
|
|
|
err.cancel();
|
|
|
|
|
*self = snapshot;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
_ => {}
|
|
|
|
|
}
|
|
|
|
|
|
2019-08-11 11:14:30 +00:00
|
|
|
|
match self.parse_path(PathStyle::Expr) {
|
|
|
|
|
Ok(path) => {
|
|
|
|
|
let (op_noun, op_verb) = match self.token.kind {
|
|
|
|
|
token::Lt => ("comparison", "comparing"),
|
|
|
|
|
token::BinOp(token::Shl) => ("shift", "shifting"),
|
|
|
|
|
_ => {
|
|
|
|
|
// We can end up here even without `<` being the next token, for
|
|
|
|
|
// example because `parse_ty_no_plus` returns `Err` on keywords,
|
|
|
|
|
// but `parse_path` returns `Ok` on them due to error recovery.
|
|
|
|
|
// Return original error and parser state.
|
2020-04-17 20:59:14 +00:00
|
|
|
|
*self = parser_snapshot_after_type;
|
2019-08-11 11:14:30 +00:00
|
|
|
|
return Err(type_err);
|
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
// Successfully parsed the type path leaving a `<` yet to parse.
|
|
|
|
|
type_err.cancel();
|
|
|
|
|
|
|
|
|
|
// Report non-fatal diagnostics, keep `x as usize` as an expression
|
|
|
|
|
// in AST and continue parsing.
|
2019-10-08 20:17:46 +00:00
|
|
|
|
let msg = format!(
|
|
|
|
|
"`<` is interpreted as a start of generic arguments for `{}`, not a {}",
|
|
|
|
|
pprust::path_to_string(&path),
|
|
|
|
|
op_noun,
|
|
|
|
|
);
|
2019-08-11 11:14:30 +00:00
|
|
|
|
let span_after_type = parser_snapshot_after_type.token.span;
|
2021-01-21 04:03:29 +00:00
|
|
|
|
let expr =
|
|
|
|
|
mk_expr(self, lhs, self.mk_ty(path.span, TyKind::Path(None, path)));
|
2019-08-11 11:14:30 +00:00
|
|
|
|
|
|
|
|
|
self.struct_span_err(self.token.span, &msg)
|
|
|
|
|
.span_label(
|
|
|
|
|
self.look_ahead(1, |t| t.span).to(span_after_type),
|
2019-12-22 22:42:04 +00:00
|
|
|
|
"interpreted as generic arguments",
|
2019-08-11 11:14:30 +00:00
|
|
|
|
)
|
|
|
|
|
.span_label(self.token.span, format!("not interpreted as {}", op_noun))
|
2021-06-28 18:22:47 +00:00
|
|
|
|
.multipart_suggestion(
|
2019-08-11 11:14:30 +00:00
|
|
|
|
&format!("try {} the cast value", op_verb),
|
2021-06-28 18:22:47 +00:00
|
|
|
|
vec![
|
|
|
|
|
(expr.span.shrink_to_lo(), "(".to_string()),
|
|
|
|
|
(expr.span.shrink_to_hi(), ")".to_string()),
|
|
|
|
|
],
|
2019-09-06 02:56:45 +00:00
|
|
|
|
Applicability::MachineApplicable,
|
2019-08-11 11:14:30 +00:00
|
|
|
|
)
|
|
|
|
|
.emit();
|
|
|
|
|
|
2020-02-16 00:12:59 +00:00
|
|
|
|
expr
|
2019-08-11 11:14:30 +00:00
|
|
|
|
}
|
|
|
|
|
Err(mut path_err) => {
|
|
|
|
|
// Couldn't parse as a path, return original error and parser state.
|
|
|
|
|
path_err.cancel();
|
2020-04-17 20:59:14 +00:00
|
|
|
|
*self = parser_snapshot_after_type;
|
2020-02-16 00:12:59 +00:00
|
|
|
|
return Err(type_err);
|
2019-08-11 11:14:30 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
2020-02-09 05:34:38 +00:00
|
|
|
|
};
|
|
|
|
|
|
2020-02-16 00:12:59 +00:00
|
|
|
|
self.parse_and_disallow_postfix_after_cast(cast_expr)
|
|
|
|
|
}
|
2020-02-09 05:34:38 +00:00
|
|
|
|
|
2020-02-16 00:12:59 +00:00
|
|
|
|
/// Parses a postfix operators such as `.`, `?`, or index (`[]`) after a cast,
|
|
|
|
|
/// then emits an error and returns the newly parsed tree.
|
|
|
|
|
/// The resulting parse tree for `&x as T[0]` has a precedence of `((&x) as T)[0]`.
|
|
|
|
|
fn parse_and_disallow_postfix_after_cast(
|
|
|
|
|
&mut self,
|
|
|
|
|
cast_expr: P<Expr>,
|
|
|
|
|
) -> PResult<'a, P<Expr>> {
|
2020-02-25 06:11:15 +00:00
|
|
|
|
// Save the memory location of expr before parsing any following postfix operators.
|
|
|
|
|
// This will be compared with the memory location of the output expression.
|
2020-02-16 00:12:59 +00:00
|
|
|
|
// If they different we can assume we parsed another expression because the existing expression is not reallocated.
|
2020-02-25 06:11:15 +00:00
|
|
|
|
let addr_before = &*cast_expr as *const _ as usize;
|
2020-02-16 00:12:59 +00:00
|
|
|
|
let span = cast_expr.span;
|
|
|
|
|
let with_postfix = self.parse_dot_or_call_expr_with_(cast_expr, span)?;
|
2020-02-25 06:11:15 +00:00
|
|
|
|
let changed = addr_before != &*with_postfix as *const _ as usize;
|
2020-02-16 00:12:59 +00:00
|
|
|
|
|
|
|
|
|
// Check if an illegal postfix operator has been added after the cast.
|
|
|
|
|
// If the resulting expression is not a cast, or has a different memory location, it is an illegal postfix operator.
|
2020-02-25 06:11:15 +00:00
|
|
|
|
if !matches!(with_postfix.kind, ExprKind::Cast(_, _) | ExprKind::Type(_, _)) || changed {
|
2020-02-09 05:34:38 +00:00
|
|
|
|
let msg = format!(
|
2020-02-16 00:12:59 +00:00
|
|
|
|
"casts cannot be followed by {}",
|
2020-02-09 05:34:38 +00:00
|
|
|
|
match with_postfix.kind {
|
2020-02-16 00:12:59 +00:00
|
|
|
|
ExprKind::Index(_, _) => "indexing",
|
2022-01-10 22:02:19 +00:00
|
|
|
|
ExprKind::Try(_) => "`?`",
|
2020-02-16 00:12:59 +00:00
|
|
|
|
ExprKind::Field(_, _) => "a field access",
|
2020-06-09 19:34:23 +00:00
|
|
|
|
ExprKind::MethodCall(_, _, _) => "a method call",
|
2020-02-16 00:12:59 +00:00
|
|
|
|
ExprKind::Call(_, _) => "a function call",
|
|
|
|
|
ExprKind::Await(_) => "`.await`",
|
2020-03-30 07:20:55 +00:00
|
|
|
|
ExprKind::Err => return Ok(with_postfix),
|
2020-02-16 01:09:42 +00:00
|
|
|
|
_ => unreachable!("parse_dot_or_call_expr_with_ shouldn't produce this"),
|
2020-02-09 05:34:38 +00:00
|
|
|
|
}
|
|
|
|
|
);
|
2020-02-16 00:12:59 +00:00
|
|
|
|
let mut err = self.struct_span_err(span, &msg);
|
2020-02-25 06:11:15 +00:00
|
|
|
|
// If type ascription is "likely an error", the user will already be getting a useful
|
2020-02-16 00:18:50 +00:00
|
|
|
|
// help message, and doesn't need a second.
|
|
|
|
|
if self.last_type_ascription.map_or(false, |last_ascription| last_ascription.1) {
|
|
|
|
|
self.maybe_annotate_with_ascription(&mut err, false);
|
2020-02-09 05:34:38 +00:00
|
|
|
|
} else {
|
2020-02-22 20:33:06 +00:00
|
|
|
|
let suggestions = vec![
|
|
|
|
|
(span.shrink_to_lo(), "(".to_string()),
|
|
|
|
|
(span.shrink_to_hi(), ")".to_string()),
|
|
|
|
|
];
|
|
|
|
|
err.multipart_suggestion(
|
|
|
|
|
"try surrounding the expression in parentheses",
|
|
|
|
|
suggestions,
|
|
|
|
|
Applicability::MachineApplicable,
|
|
|
|
|
);
|
2020-02-09 05:34:38 +00:00
|
|
|
|
}
|
2020-02-16 00:18:50 +00:00
|
|
|
|
err.emit();
|
2020-02-09 05:34:38 +00:00
|
|
|
|
};
|
|
|
|
|
Ok(with_postfix)
|
2019-08-11 11:14:30 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-12-07 03:59:08 +00:00
|
|
|
|
fn parse_assoc_op_ascribe(&mut self, lhs: P<Expr>, lhs_span: Span) -> PResult<'a, P<Expr>> {
|
|
|
|
|
let maybe_path = self.could_ascription_be_path(&lhs.kind);
|
2020-02-29 11:56:15 +00:00
|
|
|
|
self.last_type_ascription = Some((self.prev_token.span, maybe_path));
|
2019-12-07 03:59:08 +00:00
|
|
|
|
let lhs = self.parse_assoc_op_cast(lhs, lhs_span, ExprKind::Type)?;
|
|
|
|
|
self.sess.gated_spans.gate(sym::type_ascription, lhs.span);
|
|
|
|
|
Ok(lhs)
|
|
|
|
|
}
|
|
|
|
|
|
2019-11-23 14:22:00 +00:00
|
|
|
|
/// Parse `& mut? <expr>` or `& raw [ const | mut ] <expr>`.
|
2019-12-07 01:37:03 +00:00
|
|
|
|
fn parse_borrow_expr(&mut self, lo: Span) -> PResult<'a, (Span, ExprKind)> {
|
2019-11-23 14:15:49 +00:00
|
|
|
|
self.expect_and()?;
|
2020-03-05 06:54:22 +00:00
|
|
|
|
let has_lifetime = self.token.is_lifetime() && self.look_ahead(1, |t| t != &token::Colon);
|
|
|
|
|
let lifetime = has_lifetime.then(|| self.expect_lifetime()); // For recovery, see below.
|
2019-12-07 01:37:03 +00:00
|
|
|
|
let (borrow_kind, mutbl) = self.parse_borrow_modifiers(lo);
|
|
|
|
|
let expr = self.parse_prefix_expr(None);
|
2020-03-05 06:54:22 +00:00
|
|
|
|
let (hi, expr) = self.interpolated_or_expr_span(expr)?;
|
|
|
|
|
let span = lo.to(hi);
|
|
|
|
|
if let Some(lt) = lifetime {
|
|
|
|
|
self.error_remove_borrow_lifetime(span, lt.ident.span);
|
|
|
|
|
}
|
|
|
|
|
Ok((span, ExprKind::AddrOf(borrow_kind, mutbl, expr)))
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn error_remove_borrow_lifetime(&self, span: Span, lt_span: Span) {
|
|
|
|
|
self.struct_span_err(span, "borrow expressions cannot be annotated with lifetimes")
|
|
|
|
|
.span_label(lt_span, "annotated with lifetime here")
|
|
|
|
|
.span_suggestion(
|
|
|
|
|
lt_span,
|
|
|
|
|
"remove the lifetime annotation",
|
|
|
|
|
String::new(),
|
|
|
|
|
Applicability::MachineApplicable,
|
|
|
|
|
)
|
|
|
|
|
.emit();
|
2019-12-07 01:37:03 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Parse `mut?` or `raw [ const | mut ]`.
|
|
|
|
|
fn parse_borrow_modifiers(&mut self, lo: Span) -> (ast::BorrowKind, ast::Mutability) {
|
|
|
|
|
if self.check_keyword(kw::Raw) && self.look_ahead(1, Token::is_mutability) {
|
|
|
|
|
// `raw [ const | mut ]`.
|
2019-11-23 14:15:49 +00:00
|
|
|
|
let found_raw = self.eat_keyword(kw::Raw);
|
|
|
|
|
assert!(found_raw);
|
|
|
|
|
let mutability = self.parse_const_or_mut().unwrap();
|
2020-02-29 11:56:15 +00:00
|
|
|
|
self.sess.gated_spans.gate(sym::raw_ref_op, lo.to(self.prev_token.span));
|
2019-11-23 14:15:49 +00:00
|
|
|
|
(ast::BorrowKind::Raw, mutability)
|
|
|
|
|
} else {
|
2019-12-07 01:37:03 +00:00
|
|
|
|
// `mut?`
|
2019-11-23 14:15:49 +00:00
|
|
|
|
(ast::BorrowKind::Ref, self.parse_mutability())
|
2019-12-07 01:37:03 +00:00
|
|
|
|
}
|
2019-11-23 14:15:49 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-08-11 11:14:30 +00:00
|
|
|
|
/// Parses `a.b` or `a(13)` or `a[4]` or just `a`.
|
2021-01-22 18:28:08 +00:00
|
|
|
|
fn parse_dot_or_call_expr(&mut self, attrs: Option<AttrWrapper>) -> PResult<'a, P<Expr>> {
|
2019-12-07 01:30:54 +00:00
|
|
|
|
let attrs = self.parse_or_use_outer_attributes(attrs)?;
|
2021-01-22 18:28:08 +00:00
|
|
|
|
self.collect_tokens_for_expr(attrs, |this, attrs| {
|
|
|
|
|
let base = this.parse_bottom_expr();
|
|
|
|
|
let (span, base) = this.interpolated_or_expr_span(base)?;
|
|
|
|
|
this.parse_dot_or_call_expr_with(base, span, attrs)
|
|
|
|
|
})
|
2019-08-11 11:14:30 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
pub(super) fn parse_dot_or_call_expr_with(
|
|
|
|
|
&mut self,
|
|
|
|
|
e0: P<Expr>,
|
|
|
|
|
lo: Span,
|
2021-01-22 18:28:08 +00:00
|
|
|
|
mut attrs: Vec<ast::Attribute>,
|
2019-08-11 11:14:30 +00:00
|
|
|
|
) -> PResult<'a, P<Expr>> {
|
|
|
|
|
// Stitch the list of outer attributes onto the return value.
|
|
|
|
|
// A little bit ugly, but the best way given the current code
|
|
|
|
|
// structure
|
2019-12-22 22:42:04 +00:00
|
|
|
|
self.parse_dot_or_call_expr_with_(e0, lo).map(|expr| {
|
2019-08-11 11:14:30 +00:00
|
|
|
|
expr.map(|mut expr| {
|
|
|
|
|
attrs.extend::<Vec<_>>(expr.attrs.into());
|
2021-01-22 18:28:08 +00:00
|
|
|
|
expr.attrs = attrs.into();
|
2019-08-11 11:14:30 +00:00
|
|
|
|
expr
|
|
|
|
|
})
|
2019-12-22 22:42:04 +00:00
|
|
|
|
})
|
2019-08-11 11:14:30 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-12-07 01:01:58 +00:00
|
|
|
|
fn parse_dot_or_call_expr_with_(&mut self, mut e: P<Expr>, lo: Span) -> PResult<'a, P<Expr>> {
|
2019-08-11 11:14:30 +00:00
|
|
|
|
loop {
|
2019-12-07 01:01:58 +00:00
|
|
|
|
if self.eat(&token::Question) {
|
|
|
|
|
// `expr?`
|
2020-02-29 11:56:15 +00:00
|
|
|
|
e = self.mk_expr(lo.to(self.prev_token.span), ExprKind::Try(e), AttrVec::new());
|
2019-12-07 01:01:58 +00:00
|
|
|
|
continue;
|
2019-08-11 11:14:30 +00:00
|
|
|
|
}
|
|
|
|
|
if self.eat(&token::Dot) {
|
2019-12-07 01:01:58 +00:00
|
|
|
|
// expr.f
|
2019-12-07 00:52:53 +00:00
|
|
|
|
e = self.parse_dot_suffix_expr(lo, e)?;
|
2019-08-11 11:14:30 +00:00
|
|
|
|
continue;
|
|
|
|
|
}
|
2019-12-22 22:42:04 +00:00
|
|
|
|
if self.expr_is_complete(&e) {
|
2019-12-07 01:01:58 +00:00
|
|
|
|
return Ok(e);
|
2019-12-22 22:42:04 +00:00
|
|
|
|
}
|
2019-12-07 01:01:58 +00:00
|
|
|
|
e = match self.token.kind {
|
|
|
|
|
token::OpenDelim(token::Paren) => self.parse_fn_call_expr(lo, e),
|
|
|
|
|
token::OpenDelim(token::Bracket) => self.parse_index_expr(lo, e)?,
|
2019-12-22 22:42:04 +00:00
|
|
|
|
_ => return Ok(e),
|
2019-08-11 11:14:30 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2021-09-07 17:45:16 +00:00
|
|
|
|
fn look_ahead_type_ascription_as_field(&mut self) -> bool {
|
|
|
|
|
self.look_ahead(1, |t| t.is_ident())
|
|
|
|
|
&& self.look_ahead(2, |t| t == &token::Colon)
|
|
|
|
|
&& self.look_ahead(3, |t| t.can_begin_expr())
|
|
|
|
|
}
|
|
|
|
|
|
2019-12-07 00:52:53 +00:00
|
|
|
|
fn parse_dot_suffix_expr(&mut self, lo: Span, base: P<Expr>) -> PResult<'a, P<Expr>> {
|
2020-03-07 11:37:38 +00:00
|
|
|
|
match self.token.uninterpolate().kind {
|
2019-12-07 00:52:53 +00:00
|
|
|
|
token::Ident(..) => self.parse_dot_suffix(base, lo),
|
|
|
|
|
token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) => {
|
2020-04-18 18:26:10 +00:00
|
|
|
|
Ok(self.parse_tuple_field_access_expr(lo, base, symbol, suffix, None))
|
2019-12-07 00:52:53 +00:00
|
|
|
|
}
|
2020-04-18 18:26:10 +00:00
|
|
|
|
token::Literal(token::Lit { kind: token::Float, symbol, suffix }) => {
|
|
|
|
|
Ok(self.parse_tuple_field_access_expr_float(lo, base, symbol, suffix))
|
2019-12-07 00:52:53 +00:00
|
|
|
|
}
|
|
|
|
|
_ => {
|
|
|
|
|
self.error_unexpected_after_dot();
|
|
|
|
|
Ok(base)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2019-12-06 23:59:56 +00:00
|
|
|
|
fn error_unexpected_after_dot(&self) {
|
|
|
|
|
// FIXME Could factor this out into non_fatal_unexpected or something.
|
2019-12-07 02:07:35 +00:00
|
|
|
|
let actual = pprust::token_to_string(&self.token);
|
2019-12-06 23:59:56 +00:00
|
|
|
|
self.struct_span_err(self.token.span, &format!("unexpected token: `{}`", actual)).emit();
|
|
|
|
|
}
|
|
|
|
|
|
2020-10-27 21:23:58 +00:00
|
|
|
|
// We need an identifier or integer, but the next token is a float.
|
2020-04-18 18:26:10 +00:00
|
|
|
|
// Break the float into components to extract the identifier or integer.
|
|
|
|
|
// FIXME: With current `TokenCursor` it's hard to break tokens into more than 2
|
|
|
|
|
// parts unless those parts are processed immediately. `TokenCursor` should either
|
|
|
|
|
// support pushing "future tokens" (would be also helpful to `break_and_eat`), or
|
|
|
|
|
// we should break everything including floats into more basic proc-macro style
|
|
|
|
|
// tokens in the lexer (probably preferable).
|
|
|
|
|
fn parse_tuple_field_access_expr_float(
|
2019-12-06 23:34:32 +00:00
|
|
|
|
&mut self,
|
|
|
|
|
lo: Span,
|
2019-12-07 00:52:53 +00:00
|
|
|
|
base: P<Expr>,
|
2020-04-18 18:26:10 +00:00
|
|
|
|
float: Symbol,
|
|
|
|
|
suffix: Option<Symbol>,
|
|
|
|
|
) -> P<Expr> {
|
|
|
|
|
#[derive(Debug)]
|
|
|
|
|
enum FloatComponent {
|
|
|
|
|
IdentLike(String),
|
|
|
|
|
Punct(char),
|
|
|
|
|
}
|
|
|
|
|
use FloatComponent::*;
|
|
|
|
|
|
2020-10-09 23:01:44 +00:00
|
|
|
|
let float_str = float.as_str();
|
2020-04-18 18:26:10 +00:00
|
|
|
|
let mut components = Vec::new();
|
|
|
|
|
let mut ident_like = String::new();
|
2020-10-09 23:01:44 +00:00
|
|
|
|
for c in float_str.chars() {
|
2020-04-18 18:26:10 +00:00
|
|
|
|
if c == '_' || c.is_ascii_alphanumeric() {
|
|
|
|
|
ident_like.push(c);
|
|
|
|
|
} else if matches!(c, '.' | '+' | '-') {
|
|
|
|
|
if !ident_like.is_empty() {
|
|
|
|
|
components.push(IdentLike(mem::take(&mut ident_like)));
|
2019-12-07 00:52:53 +00:00
|
|
|
|
}
|
2020-04-18 18:26:10 +00:00
|
|
|
|
components.push(Punct(c));
|
|
|
|
|
} else {
|
|
|
|
|
panic!("unexpected character in a float token: {:?}", c)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
if !ident_like.is_empty() {
|
|
|
|
|
components.push(IdentLike(ident_like));
|
|
|
|
|
}
|
|
|
|
|
|
2020-10-09 23:01:44 +00:00
|
|
|
|
// With proc macros the span can refer to anything, the source may be too short,
|
|
|
|
|
// or too long, or non-ASCII. It only makes sense to break our span into components
|
|
|
|
|
// if its underlying text is identical to our float literal.
|
2020-04-18 18:26:10 +00:00
|
|
|
|
let span = self.token.span;
|
2020-10-09 23:01:44 +00:00
|
|
|
|
let can_take_span_apart =
|
|
|
|
|
|| self.span_to_snippet(span).as_deref() == Ok(float_str).as_deref();
|
|
|
|
|
|
2020-04-18 18:26:10 +00:00
|
|
|
|
match &*components {
|
|
|
|
|
// 1e2
|
|
|
|
|
[IdentLike(i)] => {
|
|
|
|
|
self.parse_tuple_field_access_expr(lo, base, Symbol::intern(&i), suffix, None)
|
|
|
|
|
}
|
|
|
|
|
// 1.
|
|
|
|
|
[IdentLike(i), Punct('.')] => {
|
2020-10-09 23:01:44 +00:00
|
|
|
|
let (ident_span, dot_span) = if can_take_span_apart() {
|
|
|
|
|
let (span, ident_len) = (span.data(), BytePos::from_usize(i.len()));
|
|
|
|
|
let ident_span = span.with_hi(span.lo + ident_len);
|
|
|
|
|
let dot_span = span.with_lo(span.lo + ident_len);
|
|
|
|
|
(ident_span, dot_span)
|
|
|
|
|
} else {
|
|
|
|
|
(span, span)
|
|
|
|
|
};
|
2020-04-18 18:26:10 +00:00
|
|
|
|
assert!(suffix.is_none());
|
|
|
|
|
let symbol = Symbol::intern(&i);
|
2020-10-09 23:01:44 +00:00
|
|
|
|
self.token = Token::new(token::Ident(symbol, false), ident_span);
|
Rewrite `collect_tokens` implementations to use a flattened buffer
Instead of trying to collect tokens at each depth, we 'flatten' the
stream as we go allong, pushing open/close delimiters to our buffer
just like regular tokens. One capturing is complete, we reconstruct a
nested `TokenTree::Delimited` structure, producing a normal
`TokenStream`.
The reconstructed `TokenStream` is not created immediately - instead, it is
produced on-demand by a closure (wrapped in a new `LazyTokenStream` type). This
closure stores a clone of the original `TokenCursor`, plus a record of the
number of calls to `next()/next_desugared()`. This is sufficient to reconstruct
the tokenstream seen by the callback without storing any additional state. If
the tokenstream is never used (e.g. when a captured `macro_rules!` argument is
never passed to a proc macro), we never actually create a `TokenStream`.
This implementation has a number of advantages over the previous one:
* It is significantly simpler, with no edge cases around capturing the
start/end of a delimited group.
* It can be easily extended to allow replacing tokens an an arbitrary
'depth' by just using `Vec::splice` at the proper position. This is
important for PR #76130, which requires us to track information about
attributes along with tokens.
* The lazy approach to `TokenStream` construction allows us to easily
parse an AST struct, and then decide after the fact whether we need a
`TokenStream`. This will be useful when we start collecting tokens for
`Attribute` - we can discard the `LazyTokenStream` if the parsed
attribute doesn't need tokens (e.g. is a builtin attribute).
The performance impact seems to be neglibile (see
https://github.com/rust-lang/rust/pull/77250#issuecomment-703960604). There is a
small slowdown on a few benchmarks, but it only rises above 1% for incremental
builds, where it represents a larger fraction of the much smaller instruction
count. There a ~1% speedup on a few other incremental benchmarks - my guess is
that the speedups and slowdowns will usually cancel out in practice.
2020-09-27 01:56:29 +00:00
|
|
|
|
let next_token = (Token::new(token::Dot, dot_span), self.token_spacing);
|
2020-04-18 18:26:10 +00:00
|
|
|
|
self.parse_tuple_field_access_expr(lo, base, symbol, None, Some(next_token))
|
|
|
|
|
}
|
|
|
|
|
// 1.2 | 1.2e3
|
|
|
|
|
[IdentLike(i1), Punct('.'), IdentLike(i2)] => {
|
2020-10-09 23:01:44 +00:00
|
|
|
|
let (ident1_span, dot_span, ident2_span) = if can_take_span_apart() {
|
|
|
|
|
let (span, ident1_len) = (span.data(), BytePos::from_usize(i1.len()));
|
|
|
|
|
let ident1_span = span.with_hi(span.lo + ident1_len);
|
|
|
|
|
let dot_span = span
|
|
|
|
|
.with_lo(span.lo + ident1_len)
|
|
|
|
|
.with_hi(span.lo + ident1_len + BytePos(1));
|
|
|
|
|
let ident2_span = self.token.span.with_lo(span.lo + ident1_len + BytePos(1));
|
|
|
|
|
(ident1_span, dot_span, ident2_span)
|
|
|
|
|
} else {
|
|
|
|
|
(span, span, span)
|
|
|
|
|
};
|
2020-04-18 18:26:10 +00:00
|
|
|
|
let symbol1 = Symbol::intern(&i1);
|
2020-10-09 23:01:44 +00:00
|
|
|
|
self.token = Token::new(token::Ident(symbol1, false), ident1_span);
|
Rewrite `collect_tokens` implementations to use a flattened buffer
Instead of trying to collect tokens at each depth, we 'flatten' the
stream as we go allong, pushing open/close delimiters to our buffer
just like regular tokens. One capturing is complete, we reconstruct a
nested `TokenTree::Delimited` structure, producing a normal
`TokenStream`.
The reconstructed `TokenStream` is not created immediately - instead, it is
produced on-demand by a closure (wrapped in a new `LazyTokenStream` type). This
closure stores a clone of the original `TokenCursor`, plus a record of the
number of calls to `next()/next_desugared()`. This is sufficient to reconstruct
the tokenstream seen by the callback without storing any additional state. If
the tokenstream is never used (e.g. when a captured `macro_rules!` argument is
never passed to a proc macro), we never actually create a `TokenStream`.
This implementation has a number of advantages over the previous one:
* It is significantly simpler, with no edge cases around capturing the
start/end of a delimited group.
* It can be easily extended to allow replacing tokens an an arbitrary
'depth' by just using `Vec::splice` at the proper position. This is
important for PR #76130, which requires us to track information about
attributes along with tokens.
* The lazy approach to `TokenStream` construction allows us to easily
parse an AST struct, and then decide after the fact whether we need a
`TokenStream`. This will be useful when we start collecting tokens for
`Attribute` - we can discard the `LazyTokenStream` if the parsed
attribute doesn't need tokens (e.g. is a builtin attribute).
The performance impact seems to be neglibile (see
https://github.com/rust-lang/rust/pull/77250#issuecomment-703960604). There is a
small slowdown on a few benchmarks, but it only rises above 1% for incremental
builds, where it represents a larger fraction of the much smaller instruction
count. There a ~1% speedup on a few other incremental benchmarks - my guess is
that the speedups and slowdowns will usually cancel out in practice.
2020-09-27 01:56:29 +00:00
|
|
|
|
// This needs to be `Spacing::Alone` to prevent regressions.
|
|
|
|
|
// See issue #76399 and PR #76285 for more details
|
|
|
|
|
let next_token1 = (Token::new(token::Dot, dot_span), Spacing::Alone);
|
2020-04-18 18:26:10 +00:00
|
|
|
|
let base1 =
|
|
|
|
|
self.parse_tuple_field_access_expr(lo, base, symbol1, None, Some(next_token1));
|
|
|
|
|
let symbol2 = Symbol::intern(&i2);
|
2020-10-09 23:01:44 +00:00
|
|
|
|
let next_token2 = Token::new(token::Ident(symbol2, false), ident2_span);
|
Rewrite `collect_tokens` implementations to use a flattened buffer
Instead of trying to collect tokens at each depth, we 'flatten' the
stream as we go allong, pushing open/close delimiters to our buffer
just like regular tokens. One capturing is complete, we reconstruct a
nested `TokenTree::Delimited` structure, producing a normal
`TokenStream`.
The reconstructed `TokenStream` is not created immediately - instead, it is
produced on-demand by a closure (wrapped in a new `LazyTokenStream` type). This
closure stores a clone of the original `TokenCursor`, plus a record of the
number of calls to `next()/next_desugared()`. This is sufficient to reconstruct
the tokenstream seen by the callback without storing any additional state. If
the tokenstream is never used (e.g. when a captured `macro_rules!` argument is
never passed to a proc macro), we never actually create a `TokenStream`.
This implementation has a number of advantages over the previous one:
* It is significantly simpler, with no edge cases around capturing the
start/end of a delimited group.
* It can be easily extended to allow replacing tokens an an arbitrary
'depth' by just using `Vec::splice` at the proper position. This is
important for PR #76130, which requires us to track information about
attributes along with tokens.
* The lazy approach to `TokenStream` construction allows us to easily
parse an AST struct, and then decide after the fact whether we need a
`TokenStream`. This will be useful when we start collecting tokens for
`Attribute` - we can discard the `LazyTokenStream` if the parsed
attribute doesn't need tokens (e.g. is a builtin attribute).
The performance impact seems to be neglibile (see
https://github.com/rust-lang/rust/pull/77250#issuecomment-703960604). There is a
small slowdown on a few benchmarks, but it only rises above 1% for incremental
builds, where it represents a larger fraction of the much smaller instruction
count. There a ~1% speedup on a few other incremental benchmarks - my guess is
that the speedups and slowdowns will usually cancel out in practice.
2020-09-27 01:56:29 +00:00
|
|
|
|
self.bump_with((next_token2, self.token_spacing)); // `.`
|
2020-04-18 18:26:10 +00:00
|
|
|
|
self.parse_tuple_field_access_expr(lo, base1, symbol2, suffix, None)
|
|
|
|
|
}
|
|
|
|
|
// 1e+ | 1e- (recovered)
|
|
|
|
|
[IdentLike(_), Punct('+' | '-')] |
|
|
|
|
|
// 1e+2 | 1e-2
|
|
|
|
|
[IdentLike(_), Punct('+' | '-'), IdentLike(_)] |
|
2021-11-15 16:46:44 +00:00
|
|
|
|
// 1.2e+ | 1.2e-
|
|
|
|
|
[IdentLike(_), Punct('.'), IdentLike(_), Punct('+' | '-')] |
|
2020-04-18 18:26:10 +00:00
|
|
|
|
// 1.2e+3 | 1.2e-3
|
|
|
|
|
[IdentLike(_), Punct('.'), IdentLike(_), Punct('+' | '-'), IdentLike(_)] => {
|
|
|
|
|
// See the FIXME about `TokenCursor` above.
|
|
|
|
|
self.error_unexpected_after_dot();
|
|
|
|
|
base
|
|
|
|
|
}
|
|
|
|
|
_ => panic!("unexpected components in a float token: {:?}", components),
|
2019-12-06 23:34:32 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2019-12-06 23:16:19 +00:00
|
|
|
|
fn parse_tuple_field_access_expr(
|
|
|
|
|
&mut self,
|
|
|
|
|
lo: Span,
|
|
|
|
|
base: P<Expr>,
|
|
|
|
|
field: Symbol,
|
|
|
|
|
suffix: Option<Symbol>,
|
Rewrite `collect_tokens` implementations to use a flattened buffer
Instead of trying to collect tokens at each depth, we 'flatten' the
stream as we go allong, pushing open/close delimiters to our buffer
just like regular tokens. One capturing is complete, we reconstruct a
nested `TokenTree::Delimited` structure, producing a normal
`TokenStream`.
The reconstructed `TokenStream` is not created immediately - instead, it is
produced on-demand by a closure (wrapped in a new `LazyTokenStream` type). This
closure stores a clone of the original `TokenCursor`, plus a record of the
number of calls to `next()/next_desugared()`. This is sufficient to reconstruct
the tokenstream seen by the callback without storing any additional state. If
the tokenstream is never used (e.g. when a captured `macro_rules!` argument is
never passed to a proc macro), we never actually create a `TokenStream`.
This implementation has a number of advantages over the previous one:
* It is significantly simpler, with no edge cases around capturing the
start/end of a delimited group.
* It can be easily extended to allow replacing tokens an an arbitrary
'depth' by just using `Vec::splice` at the proper position. This is
important for PR #76130, which requires us to track information about
attributes along with tokens.
* The lazy approach to `TokenStream` construction allows us to easily
parse an AST struct, and then decide after the fact whether we need a
`TokenStream`. This will be useful when we start collecting tokens for
`Attribute` - we can discard the `LazyTokenStream` if the parsed
attribute doesn't need tokens (e.g. is a builtin attribute).
The performance impact seems to be neglibile (see
https://github.com/rust-lang/rust/pull/77250#issuecomment-703960604). There is a
small slowdown on a few benchmarks, but it only rises above 1% for incremental
builds, where it represents a larger fraction of the much smaller instruction
count. There a ~1% speedup on a few other incremental benchmarks - my guess is
that the speedups and slowdowns will usually cancel out in practice.
2020-09-27 01:56:29 +00:00
|
|
|
|
next_token: Option<(Token, Spacing)>,
|
2019-12-06 23:16:19 +00:00
|
|
|
|
) -> P<Expr> {
|
2020-04-18 18:26:10 +00:00
|
|
|
|
match next_token {
|
|
|
|
|
Some(next_token) => self.bump_with(next_token),
|
|
|
|
|
None => self.bump(),
|
|
|
|
|
}
|
2020-02-24 10:04:13 +00:00
|
|
|
|
let span = self.prev_token.span;
|
2019-12-06 23:16:19 +00:00
|
|
|
|
let field = ExprKind::Field(base, Ident::new(field, span));
|
|
|
|
|
self.expect_no_suffix(span, "a tuple index", suffix);
|
|
|
|
|
self.mk_expr(lo.to(span), field, AttrVec::new())
|
|
|
|
|
}
|
|
|
|
|
|
2019-12-06 23:08:44 +00:00
|
|
|
|
/// Parse a function call expression, `expr(...)`.
|
|
|
|
|
fn parse_fn_call_expr(&mut self, lo: Span, fun: P<Expr>) -> P<Expr> {
|
2021-09-07 17:45:16 +00:00
|
|
|
|
let snapshot = if self.token.kind == token::OpenDelim(token::Paren)
|
|
|
|
|
&& self.look_ahead_type_ascription_as_field()
|
|
|
|
|
{
|
|
|
|
|
Some((self.clone(), fun.kind.clone()))
|
|
|
|
|
} else {
|
|
|
|
|
None
|
|
|
|
|
};
|
|
|
|
|
let open_paren = self.token.span;
|
|
|
|
|
|
|
|
|
|
let mut seq = self.parse_paren_expr_seq().map(|args| {
|
2020-02-29 11:56:15 +00:00
|
|
|
|
self.mk_expr(lo.to(self.prev_token.span), self.mk_call(fun, args), AttrVec::new())
|
2019-12-06 23:08:44 +00:00
|
|
|
|
});
|
2021-09-14 18:16:33 +00:00
|
|
|
|
if let Some(expr) =
|
|
|
|
|
self.maybe_recover_struct_lit_bad_delims(lo, open_paren, &mut seq, snapshot)
|
|
|
|
|
{
|
|
|
|
|
return expr;
|
|
|
|
|
}
|
2019-12-06 23:08:44 +00:00
|
|
|
|
self.recover_seq_parse_error(token::Paren, lo, seq)
|
|
|
|
|
}
|
|
|
|
|
|
2021-09-14 18:16:33 +00:00
|
|
|
|
/// If we encounter a parser state that looks like the user has written a `struct` literal with
|
|
|
|
|
/// parentheses instead of braces, recover the parser state and provide suggestions.
|
2021-09-20 15:24:47 +00:00
|
|
|
|
#[instrument(skip(self, seq, snapshot), level = "trace")]
|
2021-09-14 18:16:33 +00:00
|
|
|
|
fn maybe_recover_struct_lit_bad_delims(
|
|
|
|
|
&mut self,
|
|
|
|
|
lo: Span,
|
|
|
|
|
open_paren: Span,
|
|
|
|
|
seq: &mut PResult<'a, P<Expr>>,
|
|
|
|
|
snapshot: Option<(Self, ExprKind)>,
|
|
|
|
|
) -> Option<P<Expr>> {
|
2021-09-07 17:45:16 +00:00
|
|
|
|
match (seq.as_mut(), snapshot) {
|
|
|
|
|
(Err(ref mut err), Some((mut snapshot, ExprKind::Path(None, path)))) => {
|
|
|
|
|
let name = pprust::path_to_string(&path);
|
|
|
|
|
snapshot.bump(); // `(`
|
2021-10-07 20:31:33 +00:00
|
|
|
|
match snapshot.parse_struct_fields(path, false, token::Paren) {
|
2021-09-07 17:45:16 +00:00
|
|
|
|
Ok((fields, ..)) if snapshot.eat(&token::CloseDelim(token::Paren)) => {
|
2021-12-07 15:44:51 +00:00
|
|
|
|
// We are certain we have `Enum::Foo(a: 3, b: 4)`, suggest
|
2021-09-07 17:45:16 +00:00
|
|
|
|
// `Enum::Foo { a: 3, b: 4 }` or `Enum::Foo(3, 4)`.
|
|
|
|
|
*self = snapshot;
|
|
|
|
|
let close_paren = self.prev_token.span;
|
|
|
|
|
let span = lo.to(self.prev_token.span);
|
2021-12-07 15:44:51 +00:00
|
|
|
|
if !fields.is_empty() {
|
|
|
|
|
err.cancel();
|
|
|
|
|
let mut err = self.struct_span_err(
|
|
|
|
|
span,
|
|
|
|
|
"invalid `struct` delimiters or `fn` call arguments",
|
|
|
|
|
);
|
|
|
|
|
err.multipart_suggestion(
|
|
|
|
|
&format!("if `{}` is a struct, use braces as delimiters", name),
|
|
|
|
|
vec![
|
|
|
|
|
(open_paren, " { ".to_string()),
|
|
|
|
|
(close_paren, " }".to_string()),
|
|
|
|
|
],
|
|
|
|
|
Applicability::MaybeIncorrect,
|
|
|
|
|
);
|
|
|
|
|
err.multipart_suggestion(
|
|
|
|
|
&format!("if `{}` is a function, use the arguments directly", name),
|
|
|
|
|
fields
|
|
|
|
|
.into_iter()
|
|
|
|
|
.map(|field| (field.span.until(field.expr.span), String::new()))
|
|
|
|
|
.collect(),
|
|
|
|
|
Applicability::MaybeIncorrect,
|
|
|
|
|
);
|
|
|
|
|
err.emit();
|
|
|
|
|
} else {
|
|
|
|
|
err.emit();
|
|
|
|
|
}
|
2021-09-14 18:16:33 +00:00
|
|
|
|
return Some(self.mk_expr_err(span));
|
2021-09-07 17:45:16 +00:00
|
|
|
|
}
|
|
|
|
|
Ok(_) => {}
|
|
|
|
|
Err(mut err) => err.emit(),
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
_ => {}
|
|
|
|
|
}
|
2021-09-14 18:16:33 +00:00
|
|
|
|
None
|
2019-12-06 23:08:44 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-12-06 23:04:46 +00:00
|
|
|
|
/// Parse an indexing expression `expr[...]`.
|
|
|
|
|
fn parse_index_expr(&mut self, lo: Span, base: P<Expr>) -> PResult<'a, P<Expr>> {
|
|
|
|
|
self.bump(); // `[`
|
|
|
|
|
let index = self.parse_expr()?;
|
|
|
|
|
self.expect(&token::CloseDelim(token::Bracket))?;
|
2020-02-29 11:56:15 +00:00
|
|
|
|
Ok(self.mk_expr(lo.to(self.prev_token.span), self.mk_index(base, index), AttrVec::new()))
|
2019-08-11 11:14:30 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Assuming we have just parsed `.`, continue parsing into an expression.
|
|
|
|
|
fn parse_dot_suffix(&mut self, self_arg: P<Expr>, lo: Span) -> PResult<'a, P<Expr>> {
|
2020-03-04 21:34:57 +00:00
|
|
|
|
if self.token.uninterpolated_span().rust_2018() && self.eat_keyword(kw::Await) {
|
2021-02-21 12:01:01 +00:00
|
|
|
|
return Ok(self.mk_await_expr(self_arg, lo));
|
2019-08-11 11:14:30 +00:00
|
|
|
|
}
|
|
|
|
|
|
2020-06-09 19:34:23 +00:00
|
|
|
|
let fn_span_lo = self.token.span;
|
2021-05-15 21:56:28 +00:00
|
|
|
|
let mut segment = self.parse_path_segment(PathStyle::Expr, None)?;
|
2020-06-27 15:35:12 +00:00
|
|
|
|
self.check_trailing_angle_brackets(&segment, &[&token::OpenDelim(token::Paren)]);
|
2020-07-23 16:34:07 +00:00
|
|
|
|
self.check_turbofish_missing_angle_brackets(&mut segment);
|
2019-08-11 11:14:30 +00:00
|
|
|
|
|
2019-12-06 23:04:46 +00:00
|
|
|
|
if self.check(&token::OpenDelim(token::Paren)) {
|
|
|
|
|
// Method call `expr.f()`
|
|
|
|
|
let mut args = self.parse_paren_expr_seq()?;
|
|
|
|
|
args.insert(0, self_arg);
|
2019-08-11 11:14:30 +00:00
|
|
|
|
|
2020-06-09 19:34:23 +00:00
|
|
|
|
let fn_span = fn_span_lo.to(self.prev_token.span);
|
2020-02-29 11:56:15 +00:00
|
|
|
|
let span = lo.to(self.prev_token.span);
|
2020-06-09 19:34:23 +00:00
|
|
|
|
Ok(self.mk_expr(span, ExprKind::MethodCall(segment, args, fn_span), AttrVec::new()))
|
2019-12-06 23:04:46 +00:00
|
|
|
|
} else {
|
|
|
|
|
// Field access `expr.f`
|
|
|
|
|
if let Some(args) = segment.args {
|
2019-12-30 14:09:42 +00:00
|
|
|
|
self.struct_span_err(
|
|
|
|
|
args.span(),
|
2020-02-10 21:58:36 +00:00
|
|
|
|
"field expressions cannot have generic arguments",
|
2019-12-30 14:09:42 +00:00
|
|
|
|
)
|
|
|
|
|
.emit();
|
2019-08-11 11:14:30 +00:00
|
|
|
|
}
|
|
|
|
|
|
2020-02-29 11:56:15 +00:00
|
|
|
|
let span = lo.to(self.prev_token.span);
|
2019-12-06 23:04:46 +00:00
|
|
|
|
Ok(self.mk_expr(span, ExprKind::Field(self_arg, segment.ident), AttrVec::new()))
|
|
|
|
|
}
|
2019-08-11 11:14:30 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// At the bottom (top?) of the precedence hierarchy,
|
|
|
|
|
/// Parses things like parenthesized exprs, macros, `return`, etc.
|
|
|
|
|
///
|
|
|
|
|
/// N.B., this does not parse outer attributes, and is private because it only works
|
|
|
|
|
/// correctly if called from `parse_dot_or_call_expr()`.
|
|
|
|
|
fn parse_bottom_expr(&mut self) -> PResult<'a, P<Expr>> {
|
|
|
|
|
maybe_recover_from_interpolated_ty_qpath!(self, true);
|
|
|
|
|
maybe_whole_expr!(self);
|
|
|
|
|
|
|
|
|
|
// Outer attributes are already parsed and will be
|
|
|
|
|
// added to the return value after the fact.
|
|
|
|
|
//
|
|
|
|
|
// Therefore, prevent sub-parser from parsing
|
2021-08-22 12:46:15 +00:00
|
|
|
|
// attributes by giving them an empty "already-parsed" list.
|
2019-12-03 15:38:34 +00:00
|
|
|
|
let attrs = AttrVec::new();
|
2019-08-11 11:14:30 +00:00
|
|
|
|
|
2019-09-06 02:56:45 +00:00
|
|
|
|
// Note: when adding new syntax here, don't forget to adjust `TokenKind::can_begin_expr()`.
|
2019-12-03 14:31:45 +00:00
|
|
|
|
let lo = self.token.span;
|
2019-12-03 15:38:08 +00:00
|
|
|
|
if let token::Literal(_) = self.token.kind {
|
2019-08-11 11:14:30 +00:00
|
|
|
|
// This match arm is a special-case of the `_` match arm below and
|
|
|
|
|
// could be removed without changing functionality, but it's faster
|
|
|
|
|
// to have it here, especially for programs with large constants.
|
2019-12-03 15:38:08 +00:00
|
|
|
|
self.parse_lit_expr(attrs)
|
|
|
|
|
} else if self.check(&token::OpenDelim(token::Paren)) {
|
|
|
|
|
self.parse_tuple_parens_expr(attrs)
|
|
|
|
|
} else if self.check(&token::OpenDelim(token::Brace)) {
|
|
|
|
|
self.parse_block_expr(None, lo, BlockCheckMode::Default, attrs)
|
|
|
|
|
} else if self.check(&token::BinOp(token::Or)) || self.check(&token::OrOr) {
|
|
|
|
|
self.parse_closure_expr(attrs)
|
|
|
|
|
} else if self.check(&token::OpenDelim(token::Bracket)) {
|
Suggest replacing braces for brackets on array-esque invalid block expr
Newcomers may write `{1, 2, 3}` for making arrays, and the current error
message is not informative enough to quickly convince them what is
needed to fix the error.
This PR implements a diagnostic for this case, and its output looks like
this:
```text
error: this code is interpreted as a block expression, not an array
--> src/lib.rs:1:22
|
1 | const FOO: [u8; 3] = {
| ______________________^
2 | | 1, 2, 3
3 | | };
| |_^
|
= note: to define an array, one would use square brackets instead of curly braces
help: try using [] instead of {}
|
1 | const FOO: [u8; 3] = [
2 | 1, 2, 3
3 | ];
|
```
Fix #87672
2021-08-06 19:34:29 +00:00
|
|
|
|
self.parse_array_or_repeat_expr(attrs, token::Bracket)
|
2020-03-07 12:15:58 +00:00
|
|
|
|
} else if self.check_path() {
|
2019-12-03 15:38:08 +00:00
|
|
|
|
self.parse_path_start_expr(attrs)
|
|
|
|
|
} else if self.check_keyword(kw::Move) || self.check_keyword(kw::Static) {
|
|
|
|
|
self.parse_closure_expr(attrs)
|
|
|
|
|
} else if self.eat_keyword(kw::If) {
|
|
|
|
|
self.parse_if_expr(attrs)
|
2020-03-21 07:32:55 +00:00
|
|
|
|
} else if self.check_keyword(kw::For) {
|
|
|
|
|
if self.choose_generics_over_qpath(1) {
|
|
|
|
|
// NOTE(Centril, eddyb): DO NOT REMOVE! Beyond providing parser recovery,
|
|
|
|
|
// this is an insurance policy in case we allow qpaths in (tuple-)struct patterns.
|
|
|
|
|
// When `for <Foo as Bar>::Proj in $expr $block` is wanted,
|
|
|
|
|
// you can disambiguate in favor of a pattern with `(...)`.
|
|
|
|
|
self.recover_quantified_closure_expr(attrs)
|
|
|
|
|
} else {
|
|
|
|
|
assert!(self.eat_keyword(kw::For));
|
|
|
|
|
self.parse_for_expr(None, self.prev_token.span, attrs)
|
|
|
|
|
}
|
2019-12-03 15:38:08 +00:00
|
|
|
|
} else if self.eat_keyword(kw::While) {
|
2020-02-29 11:56:15 +00:00
|
|
|
|
self.parse_while_expr(None, self.prev_token.span, attrs)
|
2019-12-03 15:38:08 +00:00
|
|
|
|
} else if let Some(label) = self.eat_label() {
|
2021-01-21 04:03:29 +00:00
|
|
|
|
self.parse_labeled_expr(label, attrs, true)
|
2019-12-03 15:38:08 +00:00
|
|
|
|
} else if self.eat_keyword(kw::Loop) {
|
2020-02-29 11:56:15 +00:00
|
|
|
|
self.parse_loop_expr(None, self.prev_token.span, attrs)
|
2019-12-03 15:38:08 +00:00
|
|
|
|
} else if self.eat_keyword(kw::Continue) {
|
|
|
|
|
let kind = ExprKind::Continue(self.eat_label());
|
2020-02-29 11:56:15 +00:00
|
|
|
|
Ok(self.mk_expr(lo.to(self.prev_token.span), kind, attrs))
|
2019-12-03 15:38:08 +00:00
|
|
|
|
} else if self.eat_keyword(kw::Match) {
|
2020-02-29 11:56:15 +00:00
|
|
|
|
let match_sp = self.prev_token.span;
|
2019-12-03 15:38:08 +00:00
|
|
|
|
self.parse_match_expr(attrs).map_err(|mut err| {
|
|
|
|
|
err.span_label(match_sp, "while parsing this match expression");
|
|
|
|
|
err
|
|
|
|
|
})
|
|
|
|
|
} else if self.eat_keyword(kw::Unsafe) {
|
|
|
|
|
self.parse_block_expr(None, lo, BlockCheckMode::Unsafe(ast::UserProvided), attrs)
|
2020-10-19 21:44:37 +00:00
|
|
|
|
} else if self.check_inline_const(0) {
|
2021-11-22 16:25:28 +00:00
|
|
|
|
self.parse_const_block(lo.to(self.token.span), false)
|
2019-12-03 15:38:08 +00:00
|
|
|
|
} else if self.is_do_catch_block() {
|
|
|
|
|
self.recover_do_catch(attrs)
|
|
|
|
|
} else if self.is_try_block() {
|
|
|
|
|
self.expect_keyword(kw::Try)?;
|
|
|
|
|
self.parse_try_block(lo, attrs)
|
|
|
|
|
} else if self.eat_keyword(kw::Return) {
|
|
|
|
|
self.parse_return_expr(attrs)
|
|
|
|
|
} else if self.eat_keyword(kw::Break) {
|
|
|
|
|
self.parse_break_expr(attrs)
|
|
|
|
|
} else if self.eat_keyword(kw::Yield) {
|
|
|
|
|
self.parse_yield_expr(attrs)
|
|
|
|
|
} else if self.eat_keyword(kw::Let) {
|
|
|
|
|
self.parse_let_expr(attrs)
|
2020-11-11 13:15:15 +00:00
|
|
|
|
} else if self.eat_keyword(kw::Underscore) {
|
|
|
|
|
Ok(self.mk_expr(self.prev_token.span, ExprKind::Underscore, attrs))
|
2019-12-03 15:38:08 +00:00
|
|
|
|
} else if !self.unclosed_delims.is_empty() && self.check(&token::Semi) {
|
|
|
|
|
// Don't complain about bare semicolons after unclosed braces
|
|
|
|
|
// recovery in order to keep the error count down. Fixing the
|
|
|
|
|
// delimiters will possibly also fix the bare semicolon found in
|
|
|
|
|
// expression context. For example, silence the following error:
|
|
|
|
|
//
|
|
|
|
|
// error: expected expression, found `;`
|
|
|
|
|
// --> file.rs:2:13
|
|
|
|
|
// |
|
|
|
|
|
// 2 | foo(bar(;
|
|
|
|
|
// | ^ expected expression
|
|
|
|
|
self.bump();
|
|
|
|
|
Ok(self.mk_expr_err(self.token.span))
|
2020-03-04 21:34:57 +00:00
|
|
|
|
} else if self.token.uninterpolated_span().rust_2018() {
|
2019-12-03 15:38:08 +00:00
|
|
|
|
// `Span::rust_2018()` is somewhat expensive; don't get it repeatedly.
|
|
|
|
|
if self.check_keyword(kw::Async) {
|
2019-12-22 22:42:04 +00:00
|
|
|
|
if self.is_async_block() {
|
|
|
|
|
// Check for `async {` and `async move {`.
|
2019-12-03 15:38:08 +00:00
|
|
|
|
self.parse_async_block(attrs)
|
2019-08-11 11:14:30 +00:00
|
|
|
|
} else {
|
2019-12-03 15:38:08 +00:00
|
|
|
|
self.parse_closure_expr(attrs)
|
2019-08-11 11:14:30 +00:00
|
|
|
|
}
|
2019-12-03 15:38:08 +00:00
|
|
|
|
} else if self.eat_keyword(kw::Await) {
|
2020-02-29 11:56:15 +00:00
|
|
|
|
self.recover_incorrect_await_syntax(lo, self.prev_token.span, attrs)
|
2019-12-03 15:38:08 +00:00
|
|
|
|
} else {
|
|
|
|
|
self.parse_lit_expr(attrs)
|
2019-08-11 11:14:30 +00:00
|
|
|
|
}
|
2019-12-03 15:38:08 +00:00
|
|
|
|
} else {
|
|
|
|
|
self.parse_lit_expr(attrs)
|
2019-12-03 14:31:45 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
2019-08-11 11:14:30 +00:00
|
|
|
|
|
2019-12-03 15:38:34 +00:00
|
|
|
|
fn parse_lit_expr(&mut self, attrs: AttrVec) -> PResult<'a, P<Expr>> {
|
2019-12-03 14:31:45 +00:00
|
|
|
|
let lo = self.token.span;
|
|
|
|
|
match self.parse_opt_lit() {
|
|
|
|
|
Some(literal) => {
|
2020-02-29 11:56:15 +00:00
|
|
|
|
let expr = self.mk_expr(lo.to(self.prev_token.span), ExprKind::Lit(literal), attrs);
|
2019-12-03 14:31:45 +00:00
|
|
|
|
self.maybe_recover_from_bad_qpath(expr, true)
|
|
|
|
|
}
|
2020-04-17 17:10:29 +00:00
|
|
|
|
None => self.try_macro_suggestion(),
|
2019-12-03 14:31:45 +00:00
|
|
|
|
}
|
2019-08-11 11:14:30 +00:00
|
|
|
|
}
|
|
|
|
|
|
2021-03-19 23:52:07 +00:00
|
|
|
|
fn parse_tuple_parens_expr(&mut self, attrs: AttrVec) -> PResult<'a, P<Expr>> {
|
2019-12-03 10:36:40 +00:00
|
|
|
|
let lo = self.token.span;
|
2019-12-04 09:13:29 +00:00
|
|
|
|
self.expect(&token::OpenDelim(token::Paren))?;
|
|
|
|
|
let (es, trailing_comma) = match self.parse_seq_to_end(
|
|
|
|
|
&token::CloseDelim(token::Paren),
|
|
|
|
|
SeqSep::trailing_allowed(token::Comma),
|
|
|
|
|
|p| p.parse_expr_catch_underscore(),
|
|
|
|
|
) {
|
2019-12-03 10:36:40 +00:00
|
|
|
|
Ok(x) => x,
|
|
|
|
|
Err(err) => return Ok(self.recover_seq_parse_error(token::Paren, lo, Err(err))),
|
|
|
|
|
};
|
|
|
|
|
let kind = if es.len() == 1 && !trailing_comma {
|
|
|
|
|
// `(e)` is parenthesized `e`.
|
2020-03-03 00:19:00 +00:00
|
|
|
|
ExprKind::Paren(es.into_iter().next().unwrap())
|
2019-12-03 10:36:40 +00:00
|
|
|
|
} else {
|
|
|
|
|
// `(e,)` is a tuple with only one field, `e`.
|
|
|
|
|
ExprKind::Tup(es)
|
|
|
|
|
};
|
2020-02-29 11:56:15 +00:00
|
|
|
|
let expr = self.mk_expr(lo.to(self.prev_token.span), kind, attrs);
|
2019-12-03 10:36:40 +00:00
|
|
|
|
self.maybe_recover_from_bad_qpath(expr, true)
|
|
|
|
|
}
|
|
|
|
|
|
Suggest replacing braces for brackets on array-esque invalid block expr
Newcomers may write `{1, 2, 3}` for making arrays, and the current error
message is not informative enough to quickly convince them what is
needed to fix the error.
This PR implements a diagnostic for this case, and its output looks like
this:
```text
error: this code is interpreted as a block expression, not an array
--> src/lib.rs:1:22
|
1 | const FOO: [u8; 3] = {
| ______________________^
2 | | 1, 2, 3
3 | | };
| |_^
|
= note: to define an array, one would use square brackets instead of curly braces
help: try using [] instead of {}
|
1 | const FOO: [u8; 3] = [
2 | 1, 2, 3
3 | ];
|
```
Fix #87672
2021-08-06 19:34:29 +00:00
|
|
|
|
fn parse_array_or_repeat_expr(
|
|
|
|
|
&mut self,
|
|
|
|
|
attrs: AttrVec,
|
|
|
|
|
close_delim: token::DelimToken,
|
|
|
|
|
) -> PResult<'a, P<Expr>> {
|
2019-12-03 10:49:56 +00:00
|
|
|
|
let lo = self.token.span;
|
Suggest replacing braces for brackets on array-esque invalid block expr
Newcomers may write `{1, 2, 3}` for making arrays, and the current error
message is not informative enough to quickly convince them what is
needed to fix the error.
This PR implements a diagnostic for this case, and its output looks like
this:
```text
error: this code is interpreted as a block expression, not an array
--> src/lib.rs:1:22
|
1 | const FOO: [u8; 3] = {
| ______________________^
2 | | 1, 2, 3
3 | | };
| |_^
|
= note: to define an array, one would use square brackets instead of curly braces
help: try using [] instead of {}
|
1 | const FOO: [u8; 3] = [
2 | 1, 2, 3
3 | ];
|
```
Fix #87672
2021-08-06 19:34:29 +00:00
|
|
|
|
self.bump(); // `[` or other open delim
|
2019-12-03 10:49:56 +00:00
|
|
|
|
|
Suggest replacing braces for brackets on array-esque invalid block expr
Newcomers may write `{1, 2, 3}` for making arrays, and the current error
message is not informative enough to quickly convince them what is
needed to fix the error.
This PR implements a diagnostic for this case, and its output looks like
this:
```text
error: this code is interpreted as a block expression, not an array
--> src/lib.rs:1:22
|
1 | const FOO: [u8; 3] = {
| ______________________^
2 | | 1, 2, 3
3 | | };
| |_^
|
= note: to define an array, one would use square brackets instead of curly braces
help: try using [] instead of {}
|
1 | const FOO: [u8; 3] = [
2 | 1, 2, 3
3 | ];
|
```
Fix #87672
2021-08-06 19:34:29 +00:00
|
|
|
|
let close = &token::CloseDelim(close_delim);
|
2019-12-04 09:13:29 +00:00
|
|
|
|
let kind = if self.eat(close) {
|
2019-12-03 10:49:56 +00:00
|
|
|
|
// Empty vector
|
|
|
|
|
ExprKind::Array(Vec::new())
|
|
|
|
|
} else {
|
|
|
|
|
// Non-empty vector
|
|
|
|
|
let first_expr = self.parse_expr()?;
|
|
|
|
|
if self.eat(&token::Semi) {
|
|
|
|
|
// Repeating array syntax: `[ 0; 512 ]`
|
2019-12-08 07:19:53 +00:00
|
|
|
|
let count = self.parse_anon_const_expr()?;
|
2019-12-04 09:13:29 +00:00
|
|
|
|
self.expect(close)?;
|
2019-12-03 10:49:56 +00:00
|
|
|
|
ExprKind::Repeat(first_expr, count)
|
|
|
|
|
} else if self.eat(&token::Comma) {
|
|
|
|
|
// Vector with two or more elements.
|
2019-12-04 09:13:29 +00:00
|
|
|
|
let sep = SeqSep::trailing_allowed(token::Comma);
|
|
|
|
|
let (remaining_exprs, _) = self.parse_seq_to_end(close, sep, |p| p.parse_expr())?;
|
2019-12-03 10:49:56 +00:00
|
|
|
|
let mut exprs = vec![first_expr];
|
|
|
|
|
exprs.extend(remaining_exprs);
|
|
|
|
|
ExprKind::Array(exprs)
|
|
|
|
|
} else {
|
|
|
|
|
// Vector with one element
|
2019-12-04 09:13:29 +00:00
|
|
|
|
self.expect(close)?;
|
2019-12-03 10:49:56 +00:00
|
|
|
|
ExprKind::Array(vec![first_expr])
|
|
|
|
|
}
|
|
|
|
|
};
|
2020-02-29 11:56:15 +00:00
|
|
|
|
let expr = self.mk_expr(lo.to(self.prev_token.span), kind, attrs);
|
2019-12-03 10:49:56 +00:00
|
|
|
|
self.maybe_recover_from_bad_qpath(expr, true)
|
|
|
|
|
}
|
|
|
|
|
|
2019-12-03 15:38:34 +00:00
|
|
|
|
fn parse_path_start_expr(&mut self, attrs: AttrVec) -> PResult<'a, P<Expr>> {
|
2020-12-10 12:20:07 +00:00
|
|
|
|
let (qself, path) = if self.eat_lt() {
|
|
|
|
|
let (qself, path) = self.parse_qpath(PathStyle::Expr)?;
|
|
|
|
|
(Some(qself), path)
|
|
|
|
|
} else {
|
|
|
|
|
(None, self.parse_path(PathStyle::Expr)?)
|
|
|
|
|
};
|
2020-05-02 01:24:14 +00:00
|
|
|
|
let lo = path.span;
|
2019-12-03 11:43:45 +00:00
|
|
|
|
|
|
|
|
|
// `!`, as an operator, is prefix, so we know this isn't that.
|
|
|
|
|
let (hi, kind) = if self.eat(&token::Not) {
|
|
|
|
|
// MACRO INVOCATION expression
|
2020-12-10 12:20:07 +00:00
|
|
|
|
if qself.is_some() {
|
|
|
|
|
self.struct_span_err(path.span, "macros cannot use qualified paths").emit();
|
|
|
|
|
}
|
2020-02-29 16:32:20 +00:00
|
|
|
|
let mac = MacCall {
|
2019-12-03 11:43:45 +00:00
|
|
|
|
path,
|
|
|
|
|
args: self.parse_mac_args()?,
|
|
|
|
|
prior_type_ascription: self.last_type_ascription,
|
|
|
|
|
};
|
2020-02-29 16:32:20 +00:00
|
|
|
|
(self.prev_token.span, ExprKind::MacCall(mac))
|
2019-12-03 11:43:45 +00:00
|
|
|
|
} else if self.check(&token::OpenDelim(token::Brace)) {
|
2020-12-10 12:20:07 +00:00
|
|
|
|
if let Some(expr) = self.maybe_parse_struct_expr(qself.as_ref(), &path, &attrs) {
|
|
|
|
|
if qself.is_some() {
|
|
|
|
|
self.sess.gated_spans.gate(sym::more_qualified_paths, path.span);
|
|
|
|
|
}
|
2019-12-03 11:43:45 +00:00
|
|
|
|
return expr;
|
|
|
|
|
} else {
|
2020-12-10 12:20:07 +00:00
|
|
|
|
(path.span, ExprKind::Path(qself, path))
|
2019-12-03 11:43:45 +00:00
|
|
|
|
}
|
|
|
|
|
} else {
|
2020-12-10 12:20:07 +00:00
|
|
|
|
(path.span, ExprKind::Path(qself, path))
|
2019-12-03 11:43:45 +00:00
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
let expr = self.mk_expr(lo.to(hi), kind, attrs);
|
|
|
|
|
self.maybe_recover_from_bad_qpath(expr, true)
|
|
|
|
|
}
|
|
|
|
|
|
2020-03-05 06:54:22 +00:00
|
|
|
|
/// Parse `'label: $expr`. The label is already parsed.
|
2021-01-21 04:03:29 +00:00
|
|
|
|
fn parse_labeled_expr(
|
|
|
|
|
&mut self,
|
|
|
|
|
label: Label,
|
|
|
|
|
attrs: AttrVec,
|
2022-01-13 23:44:17 +00:00
|
|
|
|
mut consume_colon: bool,
|
2021-01-21 04:03:29 +00:00
|
|
|
|
) -> PResult<'a, P<Expr>> {
|
2019-12-03 11:48:08 +00:00
|
|
|
|
let lo = label.ident.span;
|
2020-03-05 00:47:15 +00:00
|
|
|
|
let label = Some(label);
|
2020-03-05 06:54:22 +00:00
|
|
|
|
let ate_colon = self.eat(&token::Colon);
|
|
|
|
|
let expr = if self.eat_keyword(kw::While) {
|
2020-03-05 00:47:15 +00:00
|
|
|
|
self.parse_while_expr(label, lo, attrs)
|
|
|
|
|
} else if self.eat_keyword(kw::For) {
|
|
|
|
|
self.parse_for_expr(label, lo, attrs)
|
|
|
|
|
} else if self.eat_keyword(kw::Loop) {
|
|
|
|
|
self.parse_loop_expr(label, lo, attrs)
|
2020-03-05 04:49:30 +00:00
|
|
|
|
} else if self.check(&token::OpenDelim(token::Brace)) || self.token.is_whole_block() {
|
2020-03-05 00:47:15 +00:00
|
|
|
|
self.parse_block_expr(label, lo, BlockCheckMode::Default, attrs)
|
2022-01-13 23:44:17 +00:00
|
|
|
|
} else if !ate_colon && (self.check(&TokenKind::Comma) || self.check(&TokenKind::Gt)) {
|
2022-02-02 18:33:13 +00:00
|
|
|
|
// We're probably inside of a `Path<'a>` that needs a turbofish
|
|
|
|
|
let msg = "expected `while`, `for`, `loop` or `{` after a label";
|
|
|
|
|
self.struct_span_err(self.token.span, msg).span_label(self.token.span, msg).emit();
|
2022-01-13 23:44:17 +00:00
|
|
|
|
consume_colon = false;
|
|
|
|
|
Ok(self.mk_expr_err(lo))
|
2020-03-05 00:47:15 +00:00
|
|
|
|
} else {
|
2022-01-25 19:08:37 +00:00
|
|
|
|
let msg = "expected `while`, `for`, `loop` or `{` after a label";
|
2020-03-05 00:47:15 +00:00
|
|
|
|
self.struct_span_err(self.token.span, msg).span_label(self.token.span, msg).emit();
|
|
|
|
|
// Continue as an expression in an effort to recover on `'label: non_block_expr`.
|
|
|
|
|
self.parse_expr()
|
2020-03-05 06:54:22 +00:00
|
|
|
|
}?;
|
|
|
|
|
|
2021-01-21 04:03:29 +00:00
|
|
|
|
if !ate_colon && consume_colon {
|
2020-03-06 02:43:59 +00:00
|
|
|
|
self.error_labeled_expr_must_be_followed_by_colon(lo, expr.span);
|
2019-12-03 11:48:08 +00:00
|
|
|
|
}
|
2020-03-05 06:54:22 +00:00
|
|
|
|
|
|
|
|
|
Ok(expr)
|
2019-12-03 11:48:08 +00:00
|
|
|
|
}
|
|
|
|
|
|
2020-03-06 02:43:59 +00:00
|
|
|
|
fn error_labeled_expr_must_be_followed_by_colon(&self, lo: Span, span: Span) {
|
|
|
|
|
self.struct_span_err(span, "labeled expression must be followed by `:`")
|
|
|
|
|
.span_label(lo, "the label")
|
|
|
|
|
.span_suggestion_short(
|
|
|
|
|
lo.shrink_to_hi(),
|
|
|
|
|
"add `:` after the label",
|
|
|
|
|
": ".to_string(),
|
|
|
|
|
Applicability::MachineApplicable,
|
|
|
|
|
)
|
2020-03-07 08:41:12 +00:00
|
|
|
|
.note("labels are used before loops and blocks, allowing e.g., `break 'label` to them")
|
2020-03-06 02:43:59 +00:00
|
|
|
|
.emit();
|
|
|
|
|
}
|
|
|
|
|
|
2019-12-03 12:35:05 +00:00
|
|
|
|
/// Recover on the syntax `do catch { ... }` suggesting `try { ... }` instead.
|
2019-12-03 15:38:34 +00:00
|
|
|
|
fn recover_do_catch(&mut self, attrs: AttrVec) -> PResult<'a, P<Expr>> {
|
2019-12-03 12:35:05 +00:00
|
|
|
|
let lo = self.token.span;
|
|
|
|
|
|
|
|
|
|
self.bump(); // `do`
|
|
|
|
|
self.bump(); // `catch`
|
|
|
|
|
|
2020-02-29 11:56:15 +00:00
|
|
|
|
let span_dc = lo.to(self.prev_token.span);
|
2019-12-03 12:35:05 +00:00
|
|
|
|
self.struct_span_err(span_dc, "found removed `do catch` syntax")
|
|
|
|
|
.span_suggestion(
|
|
|
|
|
span_dc,
|
|
|
|
|
"replace with the new syntax",
|
|
|
|
|
"try".to_string(),
|
|
|
|
|
Applicability::MachineApplicable,
|
|
|
|
|
)
|
|
|
|
|
.note("following RFC #2388, the new non-placeholder syntax is `try`")
|
|
|
|
|
.emit();
|
|
|
|
|
|
|
|
|
|
self.parse_try_block(lo, attrs)
|
|
|
|
|
}
|
|
|
|
|
|
2019-12-03 13:01:24 +00:00
|
|
|
|
/// Parse an expression if the token can begin one.
|
|
|
|
|
fn parse_expr_opt(&mut self) -> PResult<'a, Option<P<Expr>>> {
|
2019-12-22 22:42:04 +00:00
|
|
|
|
Ok(if self.token.can_begin_expr() { Some(self.parse_expr()?) } else { None })
|
2019-12-03 13:01:24 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Parse `"return" expr?`.
|
2019-12-03 15:38:34 +00:00
|
|
|
|
fn parse_return_expr(&mut self, attrs: AttrVec) -> PResult<'a, P<Expr>> {
|
2020-02-29 11:56:15 +00:00
|
|
|
|
let lo = self.prev_token.span;
|
2019-12-03 13:01:24 +00:00
|
|
|
|
let kind = ExprKind::Ret(self.parse_expr_opt()?);
|
2020-02-29 11:56:15 +00:00
|
|
|
|
let expr = self.mk_expr(lo.to(self.prev_token.span), kind, attrs);
|
2019-12-03 13:01:24 +00:00
|
|
|
|
self.maybe_recover_from_bad_qpath(expr, true)
|
|
|
|
|
}
|
|
|
|
|
|
2021-07-10 14:38:55 +00:00
|
|
|
|
/// Parse `"break" (('label (:? expr)?) | expr?)` with `"break"` token already eaten.
|
|
|
|
|
/// If the label is followed immediately by a `:` token, the label and `:` are
|
|
|
|
|
/// parsed as part of the expression (i.e. a labeled loop). The language team has
|
|
|
|
|
/// decided in #87026 to require parentheses as a visual aid to avoid confusion if
|
|
|
|
|
/// the break expression of an unlabeled break is a labeled loop (as in
|
|
|
|
|
/// `break 'lbl: loop {}`); a labeled break with an unlabeled loop as its value
|
|
|
|
|
/// expression only gets a warning for compatibility reasons; and a labeled break
|
|
|
|
|
/// with a labeled loop does not even get a warning because there is no ambiguity.
|
2019-12-03 15:38:34 +00:00
|
|
|
|
fn parse_break_expr(&mut self, attrs: AttrVec) -> PResult<'a, P<Expr>> {
|
2020-02-29 11:56:15 +00:00
|
|
|
|
let lo = self.prev_token.span;
|
2021-07-10 14:38:55 +00:00
|
|
|
|
let mut label = self.eat_label();
|
|
|
|
|
let kind = if label.is_some() && self.token == token::Colon {
|
|
|
|
|
// The value expression can be a labeled loop, see issue #86948, e.g.:
|
|
|
|
|
// `loop { break 'label: loop { break 'label 42; }; }`
|
|
|
|
|
let lexpr = self.parse_labeled_expr(label.take().unwrap(), AttrVec::new(), true)?;
|
|
|
|
|
self.struct_span_err(
|
|
|
|
|
lexpr.span,
|
|
|
|
|
"parentheses are required around this expression to avoid confusion with a labeled break expression",
|
|
|
|
|
)
|
|
|
|
|
.multipart_suggestion(
|
|
|
|
|
"wrap the expression in parentheses",
|
|
|
|
|
vec![
|
|
|
|
|
(lexpr.span.shrink_to_lo(), "(".to_string()),
|
|
|
|
|
(lexpr.span.shrink_to_hi(), ")".to_string()),
|
|
|
|
|
],
|
|
|
|
|
Applicability::MachineApplicable,
|
|
|
|
|
)
|
|
|
|
|
.emit();
|
|
|
|
|
Some(lexpr)
|
|
|
|
|
} else if self.token != token::OpenDelim(token::Brace)
|
2019-12-03 14:06:34 +00:00
|
|
|
|
|| !self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL)
|
|
|
|
|
{
|
2021-07-10 14:38:55 +00:00
|
|
|
|
let expr = self.parse_expr_opt()?;
|
|
|
|
|
if let Some(ref expr) = expr {
|
|
|
|
|
if label.is_some()
|
|
|
|
|
&& matches!(
|
|
|
|
|
expr.kind,
|
|
|
|
|
ExprKind::While(_, _, None)
|
|
|
|
|
| ExprKind::ForLoop(_, _, _, None)
|
|
|
|
|
| ExprKind::Loop(_, None)
|
|
|
|
|
| ExprKind::Block(_, None)
|
|
|
|
|
)
|
|
|
|
|
{
|
|
|
|
|
self.sess.buffer_lint_with_diagnostic(
|
|
|
|
|
BREAK_WITH_LABEL_AND_LOOP,
|
|
|
|
|
lo.to(expr.span),
|
|
|
|
|
ast::CRATE_NODE_ID,
|
|
|
|
|
"this labeled break expression is easy to confuse with an unlabeled break with a labeled value expression",
|
|
|
|
|
BuiltinLintDiagnostics::BreakWithLabelAndLoop(expr.span),
|
|
|
|
|
);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
expr
|
2019-12-03 14:06:34 +00:00
|
|
|
|
} else {
|
|
|
|
|
None
|
|
|
|
|
};
|
2020-02-29 11:56:15 +00:00
|
|
|
|
let expr = self.mk_expr(lo.to(self.prev_token.span), ExprKind::Break(label, kind), attrs);
|
2019-12-03 14:06:34 +00:00
|
|
|
|
self.maybe_recover_from_bad_qpath(expr, true)
|
|
|
|
|
}
|
|
|
|
|
|
2019-12-03 13:01:24 +00:00
|
|
|
|
/// Parse `"yield" expr?`.
|
2019-12-03 15:38:34 +00:00
|
|
|
|
fn parse_yield_expr(&mut self, attrs: AttrVec) -> PResult<'a, P<Expr>> {
|
2020-02-29 11:56:15 +00:00
|
|
|
|
let lo = self.prev_token.span;
|
2019-12-03 13:01:24 +00:00
|
|
|
|
let kind = ExprKind::Yield(self.parse_expr_opt()?);
|
2020-02-29 11:56:15 +00:00
|
|
|
|
let span = lo.to(self.prev_token.span);
|
2019-12-03 13:01:24 +00:00
|
|
|
|
self.sess.gated_spans.gate(sym::generators, span);
|
|
|
|
|
let expr = self.mk_expr(span, kind, attrs);
|
|
|
|
|
self.maybe_recover_from_bad_qpath(expr, true)
|
|
|
|
|
}
|
|
|
|
|
|
2019-11-16 17:11:05 +00:00
|
|
|
|
/// Returns a string literal if the next token is a string literal.
|
|
|
|
|
/// In case of error returns `Some(lit)` if the next token is a literal with a wrong kind,
|
|
|
|
|
/// and returns `None` if the next token is not literal at all.
|
2019-11-10 14:04:12 +00:00
|
|
|
|
pub fn parse_str_lit(&mut self) -> Result<ast::StrLit, Option<Lit>> {
|
|
|
|
|
match self.parse_opt_lit() {
|
|
|
|
|
Some(lit) => match lit.kind {
|
|
|
|
|
ast::LitKind::Str(symbol_unescaped, style) => Ok(ast::StrLit {
|
|
|
|
|
style,
|
|
|
|
|
symbol: lit.token.symbol,
|
|
|
|
|
suffix: lit.token.suffix,
|
|
|
|
|
span: lit.span,
|
|
|
|
|
symbol_unescaped,
|
|
|
|
|
}),
|
|
|
|
|
_ => Err(Some(lit)),
|
2019-12-22 22:42:04 +00:00
|
|
|
|
},
|
2019-11-10 14:04:12 +00:00
|
|
|
|
None => Err(None),
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2019-10-08 07:35:34 +00:00
|
|
|
|
pub(super) fn parse_lit(&mut self) -> PResult<'a, Lit> {
|
2019-11-10 12:32:41 +00:00
|
|
|
|
self.parse_opt_lit().ok_or_else(|| {
|
2021-09-16 19:00:11 +00:00
|
|
|
|
if let token::Interpolated(inner) = &self.token.kind {
|
|
|
|
|
let expr = match inner.as_ref() {
|
|
|
|
|
token::NtExpr(expr) => Some(expr),
|
|
|
|
|
token::NtLiteral(expr) => Some(expr),
|
|
|
|
|
_ => None,
|
|
|
|
|
};
|
|
|
|
|
if let Some(expr) = expr {
|
|
|
|
|
if matches!(expr.kind, ExprKind::Err) {
|
|
|
|
|
self.diagnostic()
|
|
|
|
|
.delay_span_bug(self.token.span, &"invalid interpolated expression");
|
|
|
|
|
return self.diagnostic().struct_dummy();
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
2019-12-07 02:07:35 +00:00
|
|
|
|
let msg = format!("unexpected token: {}", super::token_descr(&self.token));
|
2019-12-30 23:20:41 +00:00
|
|
|
|
self.struct_span_err(self.token.span, &msg)
|
2019-11-10 12:32:41 +00:00
|
|
|
|
})
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Matches `lit = true | false | token_lit`.
|
|
|
|
|
/// Returns `None` if the next token is not a literal.
|
|
|
|
|
pub(super) fn parse_opt_lit(&mut self) -> Option<Lit> {
|
2019-10-11 16:40:56 +00:00
|
|
|
|
let mut recovered = None;
|
|
|
|
|
if self.token == token::Dot {
|
2019-11-10 12:32:41 +00:00
|
|
|
|
// Attempt to recover `.4` as `0.4`. We don't currently have any syntax where
|
|
|
|
|
// dot would follow an optional literal, so we do this unconditionally.
|
2019-10-11 16:40:56 +00:00
|
|
|
|
recovered = self.look_ahead(1, |next_token| {
|
2019-12-22 22:42:04 +00:00
|
|
|
|
if let token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) =
|
|
|
|
|
next_token.kind
|
|
|
|
|
{
|
2019-10-11 16:40:56 +00:00
|
|
|
|
if self.token.span.hi() == next_token.span.lo() {
|
2021-12-15 03:39:23 +00:00
|
|
|
|
let s = String::from("0.") + symbol.as_str();
|
2019-10-11 16:40:56 +00:00
|
|
|
|
let kind = TokenKind::lit(token::Float, Symbol::intern(&s), suffix);
|
|
|
|
|
return Some(Token::new(kind, self.token.span.to(next_token.span)));
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
None
|
|
|
|
|
});
|
|
|
|
|
if let Some(token) = &recovered {
|
|
|
|
|
self.bump();
|
2019-12-06 22:51:18 +00:00
|
|
|
|
self.error_float_lits_must_have_int_part(&token);
|
2019-10-11 16:40:56 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
let token = recovered.as_ref().unwrap_or(&self.token);
|
|
|
|
|
match Lit::from_token(token) {
|
|
|
|
|
Ok(lit) => {
|
|
|
|
|
self.bump();
|
2019-11-10 12:32:41 +00:00
|
|
|
|
Some(lit)
|
2019-10-11 16:40:56 +00:00
|
|
|
|
}
|
2019-12-22 22:42:04 +00:00
|
|
|
|
Err(LitError::NotLiteral) => None,
|
2019-10-11 16:40:56 +00:00
|
|
|
|
Err(err) => {
|
2019-10-27 23:29:23 +00:00
|
|
|
|
let span = token.span;
|
2022-02-18 23:48:49 +00:00
|
|
|
|
let token::Literal(lit) = token.kind else {
|
|
|
|
|
unreachable!();
|
2019-10-27 23:29:23 +00:00
|
|
|
|
};
|
2019-10-11 16:40:56 +00:00
|
|
|
|
self.bump();
|
2019-11-10 12:32:41 +00:00
|
|
|
|
self.report_lit_error(err, lit, span);
|
2019-10-11 16:40:56 +00:00
|
|
|
|
// Pack possible quotes and prefixes from the original literal into
|
|
|
|
|
// the error literal's symbol so they can be pretty-printed faithfully.
|
|
|
|
|
let suffixless_lit = token::Lit::new(lit.kind, lit.symbol, None);
|
|
|
|
|
let symbol = Symbol::intern(&suffixless_lit.to_string());
|
|
|
|
|
let lit = token::Lit::new(token::Err, symbol, lit.suffix);
|
2019-11-10 12:32:41 +00:00
|
|
|
|
Some(Lit::from_lit_token(lit, span).unwrap_or_else(|_| unreachable!()))
|
2019-10-11 16:40:56 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2019-12-06 22:51:18 +00:00
|
|
|
|
fn error_float_lits_must_have_int_part(&self, token: &Token) {
|
|
|
|
|
self.struct_span_err(token.span, "float literals must have an integer part")
|
|
|
|
|
.span_suggestion(
|
|
|
|
|
token.span,
|
|
|
|
|
"must have an integer part",
|
2021-08-22 16:55:45 +00:00
|
|
|
|
pprust::token_to_string(token).into(),
|
2019-12-06 22:51:18 +00:00
|
|
|
|
Applicability::MachineApplicable,
|
|
|
|
|
)
|
|
|
|
|
.emit();
|
|
|
|
|
}
|
|
|
|
|
|
2019-11-10 12:32:41 +00:00
|
|
|
|
fn report_lit_error(&self, err: LitError, lit: token::Lit, span: Span) {
|
2019-10-11 16:40:56 +00:00
|
|
|
|
// Checks if `s` looks like i32 or u1234 etc.
|
|
|
|
|
fn looks_like_width_suffix(first_chars: &[char], s: &str) -> bool {
|
2019-12-22 22:42:04 +00:00
|
|
|
|
s.len() > 1 && s.starts_with(first_chars) && s[1..].chars().all(|c| c.is_ascii_digit())
|
2019-10-11 16:40:56 +00:00
|
|
|
|
}
|
|
|
|
|
|
2022-01-17 22:13:15 +00:00
|
|
|
|
// Try to lowercase the prefix if it's a valid base prefix.
|
|
|
|
|
fn fix_base_capitalisation(s: &str) -> Option<String> {
|
2022-02-03 20:44:47 +00:00
|
|
|
|
if let Some(stripped) = s.strip_prefix('B') {
|
2022-01-17 22:13:15 +00:00
|
|
|
|
Some(format!("0b{stripped}"))
|
2022-02-03 20:44:47 +00:00
|
|
|
|
} else if let Some(stripped) = s.strip_prefix('O') {
|
2022-01-17 22:13:15 +00:00
|
|
|
|
Some(format!("0o{stripped}"))
|
2022-02-03 20:44:47 +00:00
|
|
|
|
} else if let Some(stripped) = s.strip_prefix('X') {
|
2022-01-17 22:13:15 +00:00
|
|
|
|
Some(format!("0x{stripped}"))
|
|
|
|
|
} else {
|
|
|
|
|
None
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2019-10-11 16:40:56 +00:00
|
|
|
|
let token::Lit { kind, suffix, .. } = lit;
|
|
|
|
|
match err {
|
|
|
|
|
// `NotLiteral` is not an error by itself, so we don't report
|
|
|
|
|
// it and give the parser opportunity to try something else.
|
|
|
|
|
LitError::NotLiteral => {}
|
|
|
|
|
// `LexerError` *is* an error, but it was already reported
|
|
|
|
|
// by lexer, so here we don't report it the second time.
|
|
|
|
|
LitError::LexerError => {}
|
|
|
|
|
LitError::InvalidSuffix => {
|
|
|
|
|
self.expect_no_suffix(
|
|
|
|
|
span,
|
|
|
|
|
&format!("{} {} literal", kind.article(), kind.descr()),
|
|
|
|
|
suffix,
|
|
|
|
|
);
|
|
|
|
|
}
|
|
|
|
|
LitError::InvalidIntSuffix => {
|
2021-12-14 21:32:21 +00:00
|
|
|
|
let suf = suffix.expect("suffix error with no suffix");
|
|
|
|
|
let suf = suf.as_str();
|
2019-10-11 16:40:56 +00:00
|
|
|
|
if looks_like_width_suffix(&['i', 'u'], &suf) {
|
|
|
|
|
// If it looks like a width, try to be helpful.
|
|
|
|
|
let msg = format!("invalid width `{}` for integer literal", &suf[1..]);
|
|
|
|
|
self.struct_span_err(span, &msg)
|
|
|
|
|
.help("valid widths are 8, 16, 32, 64 and 128")
|
|
|
|
|
.emit();
|
2022-01-17 22:13:15 +00:00
|
|
|
|
} else if let Some(fixed) = fix_base_capitalisation(suf) {
|
2022-01-27 22:22:33 +00:00
|
|
|
|
let msg = "invalid base prefix for number literal";
|
2022-01-17 22:13:15 +00:00
|
|
|
|
|
|
|
|
|
self.struct_span_err(span, &msg)
|
2022-01-27 22:22:33 +00:00
|
|
|
|
.note("base prefixes (`0xff`, `0b1010`, `0o755`) are lowercase")
|
2022-01-17 22:13:15 +00:00
|
|
|
|
.span_suggestion(
|
|
|
|
|
span,
|
|
|
|
|
"try making the prefix lowercase",
|
|
|
|
|
fixed,
|
|
|
|
|
Applicability::MaybeIncorrect,
|
|
|
|
|
)
|
|
|
|
|
.emit();
|
2019-10-11 16:40:56 +00:00
|
|
|
|
} else {
|
2020-11-22 22:29:46 +00:00
|
|
|
|
let msg = format!("invalid suffix `{}` for number literal", suf);
|
2019-10-11 16:40:56 +00:00
|
|
|
|
self.struct_span_err(span, &msg)
|
|
|
|
|
.span_label(span, format!("invalid suffix `{}`", suf))
|
2020-11-22 22:29:46 +00:00
|
|
|
|
.help("the suffix must be one of the numeric types (`u32`, `isize`, `f32`, etc.)")
|
2019-10-11 16:40:56 +00:00
|
|
|
|
.emit();
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
LitError::InvalidFloatSuffix => {
|
2021-12-14 21:32:21 +00:00
|
|
|
|
let suf = suffix.expect("suffix error with no suffix");
|
|
|
|
|
let suf = suf.as_str();
|
2021-12-15 03:39:23 +00:00
|
|
|
|
if looks_like_width_suffix(&['f'], suf) {
|
2019-10-11 16:40:56 +00:00
|
|
|
|
// If it looks like a width, try to be helpful.
|
|
|
|
|
let msg = format!("invalid width `{}` for float literal", &suf[1..]);
|
2019-12-22 22:42:04 +00:00
|
|
|
|
self.struct_span_err(span, &msg).help("valid widths are 32 and 64").emit();
|
2019-10-11 16:40:56 +00:00
|
|
|
|
} else {
|
|
|
|
|
let msg = format!("invalid suffix `{}` for float literal", suf);
|
|
|
|
|
self.struct_span_err(span, &msg)
|
|
|
|
|
.span_label(span, format!("invalid suffix `{}`", suf))
|
|
|
|
|
.help("valid suffixes are `f32` and `f64`")
|
|
|
|
|
.emit();
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
LitError::NonDecimalFloat(base) => {
|
|
|
|
|
let descr = match base {
|
|
|
|
|
16 => "hexadecimal",
|
|
|
|
|
8 => "octal",
|
|
|
|
|
2 => "binary",
|
|
|
|
|
_ => unreachable!(),
|
|
|
|
|
};
|
|
|
|
|
self.struct_span_err(span, &format!("{} float literal is not supported", descr))
|
|
|
|
|
.span_label(span, "not supported")
|
|
|
|
|
.emit();
|
|
|
|
|
}
|
|
|
|
|
LitError::IntTooLarge => {
|
2019-12-22 22:42:04 +00:00
|
|
|
|
self.struct_span_err(span, "integer literal is too large").emit();
|
2019-10-11 16:40:56 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
pub(super) fn expect_no_suffix(&self, sp: Span, kind: &str, suffix: Option<Symbol>) {
|
|
|
|
|
if let Some(suf) = suffix {
|
|
|
|
|
let mut err = if kind == "a tuple index"
|
|
|
|
|
&& [sym::i32, sym::u32, sym::isize, sym::usize].contains(&suf)
|
|
|
|
|
{
|
|
|
|
|
// #59553: warn instead of reject out of hand to allow the fix to percolate
|
|
|
|
|
// through the ecosystem when people fix their macros
|
2019-12-22 22:42:04 +00:00
|
|
|
|
let mut err = self
|
|
|
|
|
.sess
|
|
|
|
|
.span_diagnostic
|
|
|
|
|
.struct_span_warn(sp, &format!("suffixes on {} are invalid", kind));
|
2019-10-11 16:40:56 +00:00
|
|
|
|
err.note(&format!(
|
|
|
|
|
"`{}` is *temporarily* accepted on tuple index fields as it was \
|
|
|
|
|
incorrectly accepted on stable for a few releases",
|
|
|
|
|
suf,
|
|
|
|
|
));
|
|
|
|
|
err.help(
|
|
|
|
|
"on proc macros, you'll want to use `syn::Index::from` or \
|
|
|
|
|
`proc_macro::Literal::*_unsuffixed` for code that will desugar \
|
|
|
|
|
to tuple field access",
|
|
|
|
|
);
|
2020-02-07 12:06:35 +00:00
|
|
|
|
err.note(
|
|
|
|
|
"see issue #60210 <https://github.com/rust-lang/rust/issues/60210> \
|
|
|
|
|
for more information",
|
|
|
|
|
);
|
2019-10-11 16:40:56 +00:00
|
|
|
|
err
|
|
|
|
|
} else {
|
|
|
|
|
self.struct_span_err(sp, &format!("suffixes on {} are invalid", kind))
|
|
|
|
|
};
|
|
|
|
|
err.span_label(sp, format!("invalid suffix `{}`", suf));
|
|
|
|
|
err.emit();
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2019-08-11 11:14:30 +00:00
|
|
|
|
/// Matches `'-' lit | lit` (cf. `ast_validation::AstValidator::check_expr_within_pat`).
|
2020-03-16 22:36:14 +00:00
|
|
|
|
/// Keep this in sync with `Token::can_begin_literal_maybe_minus`.
|
2020-08-28 21:04:42 +00:00
|
|
|
|
pub fn parse_literal_maybe_minus(&mut self) -> PResult<'a, P<Expr>> {
|
2019-08-11 11:14:30 +00:00
|
|
|
|
maybe_whole_expr!(self);
|
|
|
|
|
|
|
|
|
|
let lo = self.token.span;
|
2019-12-06 22:44:23 +00:00
|
|
|
|
let minus_present = self.eat(&token::BinOp(token::Minus));
|
|
|
|
|
let lit = self.parse_lit()?;
|
|
|
|
|
let expr = self.mk_expr(lit.span, ExprKind::Lit(lit), AttrVec::new());
|
2019-08-11 11:14:30 +00:00
|
|
|
|
|
|
|
|
|
if minus_present {
|
2020-02-29 11:56:15 +00:00
|
|
|
|
Ok(self.mk_expr(
|
|
|
|
|
lo.to(self.prev_token.span),
|
|
|
|
|
self.mk_unary(UnOp::Neg, expr),
|
|
|
|
|
AttrVec::new(),
|
|
|
|
|
))
|
2019-08-11 11:14:30 +00:00
|
|
|
|
} else {
|
|
|
|
|
Ok(expr)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
Suggest replacing braces for brackets on array-esque invalid block expr
Newcomers may write `{1, 2, 3}` for making arrays, and the current error
message is not informative enough to quickly convince them what is
needed to fix the error.
This PR implements a diagnostic for this case, and its output looks like
this:
```text
error: this code is interpreted as a block expression, not an array
--> src/lib.rs:1:22
|
1 | const FOO: [u8; 3] = {
| ______________________^
2 | | 1, 2, 3
3 | | };
| |_^
|
= note: to define an array, one would use square brackets instead of curly braces
help: try using [] instead of {}
|
1 | const FOO: [u8; 3] = [
2 | 1, 2, 3
3 | ];
|
```
Fix #87672
2021-08-06 19:34:29 +00:00
|
|
|
|
fn is_array_like_block(&mut self) -> bool {
|
|
|
|
|
self.look_ahead(1, |t| matches!(t.kind, TokenKind::Ident(..) | TokenKind::Literal(_)))
|
|
|
|
|
&& self.look_ahead(2, |t| t == &token::Comma)
|
|
|
|
|
&& self.look_ahead(3, |t| t.can_begin_expr())
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Emits a suggestion if it looks like the user meant an array but
|
|
|
|
|
/// accidentally used braces, causing the code to be interpreted as a block
|
|
|
|
|
/// expression.
|
|
|
|
|
fn maybe_suggest_brackets_instead_of_braces(
|
|
|
|
|
&mut self,
|
|
|
|
|
lo: Span,
|
|
|
|
|
attrs: AttrVec,
|
|
|
|
|
) -> Option<P<Expr>> {
|
|
|
|
|
let mut snapshot = self.clone();
|
|
|
|
|
match snapshot.parse_array_or_repeat_expr(attrs, token::Brace) {
|
|
|
|
|
Ok(arr) => {
|
|
|
|
|
let hi = snapshot.prev_token.span;
|
|
|
|
|
self.struct_span_err(
|
|
|
|
|
arr.span,
|
|
|
|
|
"this code is interpreted as a block expression, not an array",
|
|
|
|
|
)
|
|
|
|
|
.multipart_suggestion(
|
|
|
|
|
"try using [] instead of {}",
|
|
|
|
|
vec![(lo, "[".to_owned()), (hi, "]".to_owned())],
|
|
|
|
|
Applicability::MaybeIncorrect,
|
|
|
|
|
)
|
|
|
|
|
.note("to define an array, one would use square brackets instead of curly braces")
|
|
|
|
|
.emit();
|
|
|
|
|
|
|
|
|
|
*self = snapshot;
|
|
|
|
|
Some(self.mk_expr_err(arr.span))
|
|
|
|
|
}
|
|
|
|
|
Err(mut e) => {
|
|
|
|
|
e.cancel();
|
|
|
|
|
None
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2019-08-11 11:14:30 +00:00
|
|
|
|
/// Parses a block or unsafe block.
|
2019-10-08 07:35:34 +00:00
|
|
|
|
pub(super) fn parse_block_expr(
|
2019-08-11 11:14:30 +00:00
|
|
|
|
&mut self,
|
|
|
|
|
opt_label: Option<Label>,
|
|
|
|
|
lo: Span,
|
|
|
|
|
blk_mode: BlockCheckMode,
|
2020-03-05 04:49:30 +00:00
|
|
|
|
mut attrs: AttrVec,
|
2019-08-11 11:14:30 +00:00
|
|
|
|
) -> PResult<'a, P<Expr>> {
|
Suggest replacing braces for brackets on array-esque invalid block expr
Newcomers may write `{1, 2, 3}` for making arrays, and the current error
message is not informative enough to quickly convince them what is
needed to fix the error.
This PR implements a diagnostic for this case, and its output looks like
this:
```text
error: this code is interpreted as a block expression, not an array
--> src/lib.rs:1:22
|
1 | const FOO: [u8; 3] = {
| ______________________^
2 | | 1, 2, 3
3 | | };
| |_^
|
= note: to define an array, one would use square brackets instead of curly braces
help: try using [] instead of {}
|
1 | const FOO: [u8; 3] = [
2 | 1, 2, 3
3 | ];
|
```
Fix #87672
2021-08-06 19:34:29 +00:00
|
|
|
|
if self.is_array_like_block() {
|
|
|
|
|
if let Some(arr) = self.maybe_suggest_brackets_instead_of_braces(lo, attrs.clone()) {
|
|
|
|
|
return Ok(arr);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2019-09-21 21:54:05 +00:00
|
|
|
|
if let Some(label) = opt_label {
|
2019-10-30 15:38:16 +00:00
|
|
|
|
self.sess.gated_spans.gate(sym::label_break_value, label.ident.span);
|
2019-09-21 21:54:05 +00:00
|
|
|
|
}
|
|
|
|
|
|
2020-03-05 04:49:30 +00:00
|
|
|
|
if self.token.is_whole_block() {
|
|
|
|
|
self.struct_span_err(self.token.span, "cannot use a `block` macro fragment here")
|
|
|
|
|
.span_label(lo.to(self.token.span), "the `block` fragment is within this context")
|
|
|
|
|
.emit();
|
|
|
|
|
}
|
2019-08-11 11:14:30 +00:00
|
|
|
|
|
2020-03-05 04:49:30 +00:00
|
|
|
|
let (inner_attrs, blk) = self.parse_block_common(lo, blk_mode)?;
|
|
|
|
|
attrs.extend(inner_attrs);
|
2019-09-06 02:56:45 +00:00
|
|
|
|
Ok(self.mk_expr(blk.span, ExprKind::Block(blk, opt_label), attrs))
|
2019-08-11 11:14:30 +00:00
|
|
|
|
}
|
|
|
|
|
|
2020-03-21 07:32:55 +00:00
|
|
|
|
/// Recover on an explicitly quantified closure expression, e.g., `for<'a> |x: &'a u8| *x + 1`.
|
|
|
|
|
fn recover_quantified_closure_expr(&mut self, attrs: AttrVec) -> PResult<'a, P<Expr>> {
|
|
|
|
|
let lo = self.token.span;
|
|
|
|
|
let _ = self.parse_late_bound_lifetime_defs()?;
|
|
|
|
|
let span_for = lo.to(self.prev_token.span);
|
|
|
|
|
let closure = self.parse_closure_expr(attrs)?;
|
|
|
|
|
|
|
|
|
|
self.struct_span_err(span_for, "cannot introduce explicit parameters for a closure")
|
|
|
|
|
.span_label(closure.span, "the parameters are attached to this closure")
|
|
|
|
|
.span_suggestion(
|
|
|
|
|
span_for,
|
|
|
|
|
"remove the parameters",
|
|
|
|
|
String::new(),
|
|
|
|
|
Applicability::MachineApplicable,
|
|
|
|
|
)
|
|
|
|
|
.emit();
|
|
|
|
|
|
|
|
|
|
Ok(self.mk_expr_err(lo.to(closure.span)))
|
|
|
|
|
}
|
|
|
|
|
|
2019-09-06 21:38:07 +00:00
|
|
|
|
/// Parses a closure expression (e.g., `move |args| expr`).
|
2019-12-03 15:38:34 +00:00
|
|
|
|
fn parse_closure_expr(&mut self, attrs: AttrVec) -> PResult<'a, P<Expr>> {
|
2019-08-11 11:14:30 +00:00
|
|
|
|
let lo = self.token.span;
|
|
|
|
|
|
2019-12-22 22:42:04 +00:00
|
|
|
|
let movability =
|
|
|
|
|
if self.eat_keyword(kw::Static) { Movability::Static } else { Movability::Movable };
|
2019-08-11 11:14:30 +00:00
|
|
|
|
|
2020-03-04 21:34:57 +00:00
|
|
|
|
let asyncness = if self.token.uninterpolated_span().rust_2018() {
|
|
|
|
|
self.parse_asyncness()
|
|
|
|
|
} else {
|
|
|
|
|
Async::No
|
|
|
|
|
};
|
2019-08-11 11:14:30 +00:00
|
|
|
|
|
2020-12-18 16:32:26 +00:00
|
|
|
|
let capture_clause = self.parse_capture_clause()?;
|
2019-08-11 11:14:30 +00:00
|
|
|
|
let decl = self.parse_fn_block_decl()?;
|
2020-02-29 11:56:15 +00:00
|
|
|
|
let decl_hi = self.prev_token.span;
|
2021-08-16 13:22:36 +00:00
|
|
|
|
let mut body = match decl.output {
|
2020-02-15 03:10:59 +00:00
|
|
|
|
FnRetTy::Default(_) => {
|
2019-08-11 11:14:30 +00:00
|
|
|
|
let restrictions = self.restrictions - Restrictions::STMT_EXPR;
|
|
|
|
|
self.parse_expr_res(restrictions, None)?
|
2019-12-22 22:42:04 +00:00
|
|
|
|
}
|
2019-08-11 11:14:30 +00:00
|
|
|
|
_ => {
|
2019-09-06 02:56:45 +00:00
|
|
|
|
// If an explicit return type is given, require a block to appear (RFC 968).
|
2019-08-11 11:14:30 +00:00
|
|
|
|
let body_lo = self.token.span;
|
2019-12-03 15:38:34 +00:00
|
|
|
|
self.parse_block_expr(None, body_lo, BlockCheckMode::Default, AttrVec::new())?
|
2019-08-11 11:14:30 +00:00
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
|
2020-09-09 22:16:34 +00:00
|
|
|
|
if let Async::Yes { span, .. } = asyncness {
|
|
|
|
|
// Feature-gate `async ||` closures.
|
|
|
|
|
self.sess.gated_spans.gate(sym::async_closure, span);
|
|
|
|
|
}
|
|
|
|
|
|
2021-08-16 13:22:36 +00:00
|
|
|
|
if self.token.kind == TokenKind::Semi && self.token_cursor.frame.delim == DelimToken::Paren
|
|
|
|
|
{
|
|
|
|
|
// It is likely that the closure body is a block but where the
|
|
|
|
|
// braces have been removed. We will recover and eat the next
|
|
|
|
|
// statements later in the parsing process.
|
|
|
|
|
body = self.mk_expr_err(body.span);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
let body_span = body.span;
|
|
|
|
|
|
|
|
|
|
let closure = self.mk_expr(
|
2019-08-11 11:14:30 +00:00
|
|
|
|
lo.to(body.span),
|
|
|
|
|
ExprKind::Closure(capture_clause, asyncness, movability, decl, body, lo.to(decl_hi)),
|
2019-12-22 22:42:04 +00:00
|
|
|
|
attrs,
|
2021-08-16 13:22:36 +00:00
|
|
|
|
);
|
|
|
|
|
|
|
|
|
|
// Disable recovery for closure body
|
|
|
|
|
let spans =
|
|
|
|
|
ClosureSpans { whole_closure: closure.span, closing_pipe: decl_hi, body: body_span };
|
|
|
|
|
self.current_closure = Some(spans);
|
|
|
|
|
|
|
|
|
|
Ok(closure)
|
2019-08-11 11:14:30 +00:00
|
|
|
|
}
|
|
|
|
|
|
2020-03-06 11:13:55 +00:00
|
|
|
|
/// Parses an optional `move` prefix to a closure-like construct.
|
2020-12-18 16:32:26 +00:00
|
|
|
|
fn parse_capture_clause(&mut self) -> PResult<'a, CaptureBy> {
|
|
|
|
|
if self.eat_keyword(kw::Move) {
|
|
|
|
|
// Check for `move async` and recover
|
|
|
|
|
if self.check_keyword(kw::Async) {
|
|
|
|
|
let move_async_span = self.token.span.with_lo(self.prev_token.span.data().lo);
|
|
|
|
|
Err(self.incorrect_move_async_order_found(move_async_span))
|
|
|
|
|
} else {
|
|
|
|
|
Ok(CaptureBy::Value)
|
|
|
|
|
}
|
|
|
|
|
} else {
|
|
|
|
|
Ok(CaptureBy::Ref)
|
|
|
|
|
}
|
2019-08-11 11:14:30 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-08-11 18:04:09 +00:00
|
|
|
|
/// Parses the `|arg, arg|` header of a closure.
|
|
|
|
|
fn parse_fn_block_decl(&mut self) -> PResult<'a, P<FnDecl>> {
|
2019-12-06 22:35:48 +00:00
|
|
|
|
let inputs = if self.eat(&token::OrOr) {
|
|
|
|
|
Vec::new()
|
|
|
|
|
} else {
|
|
|
|
|
self.expect(&token::BinOp(token::Or))?;
|
|
|
|
|
let args = self
|
|
|
|
|
.parse_seq_to_before_tokens(
|
|
|
|
|
&[&token::BinOp(token::Or), &token::OrOr],
|
|
|
|
|
SeqSep::trailing_allowed(token::Comma),
|
|
|
|
|
TokenExpectType::NoExpect,
|
|
|
|
|
|p| p.parse_fn_block_param(),
|
|
|
|
|
)?
|
|
|
|
|
.0;
|
|
|
|
|
self.expect_or()?;
|
|
|
|
|
args
|
2019-08-11 18:04:09 +00:00
|
|
|
|
};
|
2020-10-15 19:21:45 +00:00
|
|
|
|
let output =
|
|
|
|
|
self.parse_ret_ty(AllowPlus::Yes, RecoverQPath::Yes, RecoverReturnSign::Yes)?;
|
2019-08-11 18:04:09 +00:00
|
|
|
|
|
2019-12-06 22:35:48 +00:00
|
|
|
|
Ok(P(FnDecl { inputs, output }))
|
2019-08-11 18:04:09 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-09-06 02:56:45 +00:00
|
|
|
|
/// Parses a parameter in a closure header (e.g., `|arg, arg|`).
|
2019-08-27 11:24:32 +00:00
|
|
|
|
fn parse_fn_block_param(&mut self) -> PResult<'a, Param> {
|
2019-08-11 18:04:09 +00:00
|
|
|
|
let lo = self.token.span;
|
2019-08-29 23:44:30 +00:00
|
|
|
|
let attrs = self.parse_outer_attributes()?;
|
2021-01-22 18:28:08 +00:00
|
|
|
|
self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| {
|
2021-02-08 02:40:33 +00:00
|
|
|
|
let pat = this.parse_pat_no_top_alt(PARAM_EXPECTED)?;
|
2021-01-22 18:28:08 +00:00
|
|
|
|
let ty = if this.eat(&token::Colon) {
|
|
|
|
|
this.parse_ty()?
|
|
|
|
|
} else {
|
|
|
|
|
this.mk_ty(this.prev_token.span, TyKind::Infer)
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
Ok((
|
|
|
|
|
Param {
|
|
|
|
|
attrs: attrs.into(),
|
|
|
|
|
ty,
|
|
|
|
|
pat,
|
|
|
|
|
span: lo.to(this.token.span),
|
|
|
|
|
id: DUMMY_NODE_ID,
|
|
|
|
|
is_placeholder: false,
|
|
|
|
|
},
|
|
|
|
|
TrailingToken::MaybeComma,
|
|
|
|
|
))
|
2019-08-11 18:04:09 +00:00
|
|
|
|
})
|
|
|
|
|
}
|
|
|
|
|
|
2019-08-11 11:14:30 +00:00
|
|
|
|
/// Parses an `if` expression (`if` token already eaten).
|
2019-12-03 15:38:34 +00:00
|
|
|
|
fn parse_if_expr(&mut self, attrs: AttrVec) -> PResult<'a, P<Expr>> {
|
2020-02-29 11:56:15 +00:00
|
|
|
|
let lo = self.prev_token.span;
|
2019-08-11 11:14:30 +00:00
|
|
|
|
let cond = self.parse_cond_expr()?;
|
|
|
|
|
|
2021-12-01 21:36:50 +00:00
|
|
|
|
let missing_then_block_binop_span = || {
|
|
|
|
|
match cond.kind {
|
|
|
|
|
ExprKind::Binary(Spanned { span: binop_span, .. }, _, ref right)
|
|
|
|
|
if let ExprKind::Block(..) = right.kind => Some(binop_span),
|
|
|
|
|
_ => None
|
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
|
2019-08-11 11:14:30 +00:00
|
|
|
|
// Verify that the parsed `if` condition makes sense as a condition. If it is a block, then
|
|
|
|
|
// verify that the last statement is either an implicit return (no `;`) or an explicit
|
|
|
|
|
// return. This won't catch blocks with an explicit `return`, but that would be caught by
|
|
|
|
|
// the dead code lint.
|
2021-12-01 21:36:50 +00:00
|
|
|
|
let thn = if self.token.is_keyword(kw::Else) || !cond.returns() {
|
|
|
|
|
if let Some(binop_span) = missing_then_block_binop_span() {
|
|
|
|
|
self.error_missing_if_then_block(lo, None, Some(binop_span)).emit();
|
|
|
|
|
self.mk_block_err(cond.span)
|
|
|
|
|
} else {
|
|
|
|
|
self.error_missing_if_cond(lo, cond.span)
|
|
|
|
|
}
|
2019-12-06 22:23:30 +00:00
|
|
|
|
} else {
|
2021-01-22 18:28:08 +00:00
|
|
|
|
let attrs = self.parse_outer_attributes()?.take_for_recovery(); // For recovery.
|
2019-12-06 22:23:30 +00:00
|
|
|
|
let not_block = self.token != token::OpenDelim(token::Brace);
|
2021-12-01 21:36:50 +00:00
|
|
|
|
let block = self.parse_block().map_err(|err| {
|
2019-12-06 22:23:30 +00:00
|
|
|
|
if not_block {
|
2021-12-01 21:36:50 +00:00
|
|
|
|
self.error_missing_if_then_block(lo, Some(err), missing_then_block_binop_span())
|
|
|
|
|
} else {
|
|
|
|
|
err
|
2019-12-06 22:23:30 +00:00
|
|
|
|
}
|
2020-03-07 16:16:29 +00:00
|
|
|
|
})?;
|
|
|
|
|
self.error_on_if_block_attrs(lo, false, block.span, &attrs);
|
|
|
|
|
block
|
2019-12-06 22:23:30 +00:00
|
|
|
|
};
|
2019-12-06 22:33:13 +00:00
|
|
|
|
let els = if self.eat_keyword(kw::Else) { Some(self.parse_else_expr()?) } else { None };
|
2020-02-29 11:56:15 +00:00
|
|
|
|
Ok(self.mk_expr(lo.to(self.prev_token.span), ExprKind::If(cond, thn, els), attrs))
|
2019-08-11 11:14:30 +00:00
|
|
|
|
}
|
|
|
|
|
|
2021-12-01 21:36:50 +00:00
|
|
|
|
fn error_missing_if_then_block(
|
|
|
|
|
&self,
|
|
|
|
|
if_span: Span,
|
|
|
|
|
err: Option<DiagnosticBuilder<'a>>,
|
|
|
|
|
binop_span: Option<Span>,
|
|
|
|
|
) -> DiagnosticBuilder<'a> {
|
|
|
|
|
let msg = "this `if` expression has a condition, but no block";
|
|
|
|
|
|
|
|
|
|
let mut err = if let Some(mut err) = err {
|
|
|
|
|
err.span_label(if_span, msg);
|
|
|
|
|
err
|
|
|
|
|
} else {
|
|
|
|
|
self.struct_span_err(if_span, msg)
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
if let Some(binop_span) = binop_span {
|
|
|
|
|
err.span_help(binop_span, "maybe you forgot the right operand of the condition?");
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
err
|
|
|
|
|
}
|
|
|
|
|
|
2019-12-06 22:23:30 +00:00
|
|
|
|
fn error_missing_if_cond(&self, lo: Span, span: Span) -> P<ast::Block> {
|
|
|
|
|
let sp = self.sess.source_map().next_point(lo);
|
|
|
|
|
self.struct_span_err(sp, "missing condition for `if` expression")
|
|
|
|
|
.span_label(sp, "expected if condition here")
|
|
|
|
|
.emit();
|
2019-12-31 00:57:42 +00:00
|
|
|
|
self.mk_block_err(span)
|
2019-08-11 11:14:30 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-09-06 02:56:45 +00:00
|
|
|
|
/// Parses the condition of a `if` or `while` expression.
|
2019-08-11 11:14:30 +00:00
|
|
|
|
fn parse_cond_expr(&mut self) -> PResult<'a, P<Expr>> {
|
|
|
|
|
let cond = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, None)?;
|
|
|
|
|
|
2019-09-26 13:39:48 +00:00
|
|
|
|
if let ExprKind::Let(..) = cond.kind {
|
2019-08-11 11:14:30 +00:00
|
|
|
|
// Remove the last feature gating of a `let` expression since it's stable.
|
2019-10-30 15:38:16 +00:00
|
|
|
|
self.sess.gated_spans.ungate_last(sym::let_chains, cond.span);
|
2019-08-11 11:14:30 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
Ok(cond)
|
|
|
|
|
}
|
|
|
|
|
|
2019-08-18 20:04:28 +00:00
|
|
|
|
/// Parses a `let $pat = $expr` pseudo-expression.
|
2019-08-11 11:14:30 +00:00
|
|
|
|
/// The `let` token has already been eaten.
|
2019-12-03 15:38:34 +00:00
|
|
|
|
fn parse_let_expr(&mut self, attrs: AttrVec) -> PResult<'a, P<Expr>> {
|
2020-02-29 11:56:15 +00:00
|
|
|
|
let lo = self.prev_token.span;
|
2021-07-13 11:18:03 +00:00
|
|
|
|
let pat = self.parse_pat_allow_top_alt(None, RecoverComma::Yes, RecoverColon::Yes)?;
|
2019-08-11 11:14:30 +00:00
|
|
|
|
self.expect(&token::Eq)?;
|
2020-10-03 18:30:32 +00:00
|
|
|
|
let expr = self.with_res(self.restrictions | Restrictions::NO_STRUCT_LITERAL, |this| {
|
2019-12-22 22:42:04 +00:00
|
|
|
|
this.parse_assoc_expr_with(1 + prec_let_scrutinee_needs_par(), None.into())
|
|
|
|
|
})?;
|
2019-08-11 11:14:30 +00:00
|
|
|
|
let span = lo.to(expr.span);
|
2019-10-30 15:38:16 +00:00
|
|
|
|
self.sess.gated_spans.gate(sym::let_chains, span);
|
2021-08-08 14:49:13 +00:00
|
|
|
|
Ok(self.mk_expr(span, ExprKind::Let(pat, expr, span), attrs))
|
2019-08-11 11:14:30 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-09-06 02:56:45 +00:00
|
|
|
|
/// Parses an `else { ... }` expression (`else` token already eaten).
|
2019-08-11 11:14:30 +00:00
|
|
|
|
fn parse_else_expr(&mut self) -> PResult<'a, P<Expr>> {
|
2020-03-07 16:16:29 +00:00
|
|
|
|
let ctx_span = self.prev_token.span; // `else`
|
2021-01-22 18:28:08 +00:00
|
|
|
|
let attrs = self.parse_outer_attributes()?.take_for_recovery(); // For recovery.
|
2020-03-07 16:16:29 +00:00
|
|
|
|
let expr = if self.eat_keyword(kw::If) {
|
|
|
|
|
self.parse_if_expr(AttrVec::new())?
|
2019-08-11 11:14:30 +00:00
|
|
|
|
} else {
|
|
|
|
|
let blk = self.parse_block()?;
|
2020-03-07 16:16:29 +00:00
|
|
|
|
self.mk_expr(blk.span, ExprKind::Block(blk, None), AttrVec::new())
|
|
|
|
|
};
|
|
|
|
|
self.error_on_if_block_attrs(ctx_span, true, expr.span, &attrs);
|
|
|
|
|
Ok(expr)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn error_on_if_block_attrs(
|
|
|
|
|
&self,
|
|
|
|
|
ctx_span: Span,
|
|
|
|
|
is_ctx_else: bool,
|
|
|
|
|
branch_span: Span,
|
|
|
|
|
attrs: &[ast::Attribute],
|
|
|
|
|
) {
|
|
|
|
|
let (span, last) = match attrs {
|
|
|
|
|
[] => return,
|
|
|
|
|
[x0 @ xn] | [x0, .., xn] => (x0.span.to(xn.span), xn.span),
|
|
|
|
|
};
|
|
|
|
|
let ctx = if is_ctx_else { "else" } else { "if" };
|
|
|
|
|
self.struct_span_err(last, "outer attributes are not allowed on `if` and `else` branches")
|
|
|
|
|
.span_label(branch_span, "the attributes are attached to this branch")
|
|
|
|
|
.span_label(ctx_span, format!("the branch belongs to this `{}`", ctx))
|
|
|
|
|
.span_suggestion(
|
|
|
|
|
span,
|
|
|
|
|
"remove the attributes",
|
|
|
|
|
String::new(),
|
|
|
|
|
Applicability::MachineApplicable,
|
|
|
|
|
)
|
|
|
|
|
.emit();
|
2019-08-11 11:14:30 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-12-06 21:41:10 +00:00
|
|
|
|
/// Parses `for <src_pat> in <src_expr> <src_loop_block>` (`for` token already eaten).
|
2019-08-11 11:14:30 +00:00
|
|
|
|
fn parse_for_expr(
|
|
|
|
|
&mut self,
|
|
|
|
|
opt_label: Option<Label>,
|
2019-12-06 21:41:10 +00:00
|
|
|
|
lo: Span,
|
2019-12-22 22:42:04 +00:00
|
|
|
|
mut attrs: AttrVec,
|
2019-08-11 11:14:30 +00:00
|
|
|
|
) -> PResult<'a, P<Expr>> {
|
|
|
|
|
// Record whether we are about to parse `for (`.
|
|
|
|
|
// This is used below for recovery in case of `for ( $stuff ) $block`
|
|
|
|
|
// in which case we will suggest `for $stuff $block`.
|
|
|
|
|
let begin_paren = match self.token.kind {
|
|
|
|
|
token::OpenDelim(token::Paren) => Some(self.token.span),
|
|
|
|
|
_ => None,
|
|
|
|
|
};
|
|
|
|
|
|
2021-07-13 11:18:03 +00:00
|
|
|
|
let pat = self.parse_pat_allow_top_alt(None, RecoverComma::Yes, RecoverColon::Yes)?;
|
2019-08-11 11:14:30 +00:00
|
|
|
|
if !self.eat_keyword(kw::In) {
|
2019-12-06 21:41:10 +00:00
|
|
|
|
self.error_missing_in_for_loop();
|
2019-08-11 11:14:30 +00:00
|
|
|
|
}
|
2020-02-29 11:56:15 +00:00
|
|
|
|
self.check_for_for_in_in_typo(self.prev_token.span);
|
2019-08-11 11:14:30 +00:00
|
|
|
|
let expr = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, None)?;
|
|
|
|
|
|
2021-09-17 04:39:26 +00:00
|
|
|
|
let pat = self.recover_parens_around_for_head(pat, begin_paren);
|
2019-08-11 11:14:30 +00:00
|
|
|
|
|
|
|
|
|
let (iattrs, loop_block) = self.parse_inner_attrs_and_block()?;
|
|
|
|
|
attrs.extend(iattrs);
|
|
|
|
|
|
2019-12-06 21:41:10 +00:00
|
|
|
|
let kind = ExprKind::ForLoop(pat, expr, loop_block, opt_label);
|
2020-02-29 11:56:15 +00:00
|
|
|
|
Ok(self.mk_expr(lo.to(self.prev_token.span), kind, attrs))
|
2019-12-06 21:41:10 +00:00
|
|
|
|
}
|
|
|
|
|
|
2020-08-09 03:53:40 +00:00
|
|
|
|
fn error_missing_in_for_loop(&mut self) {
|
|
|
|
|
let (span, msg, sugg) = if self.token.is_ident_named(sym::of) {
|
|
|
|
|
// Possibly using JS syntax (#75311).
|
|
|
|
|
let span = self.token.span;
|
|
|
|
|
self.bump();
|
|
|
|
|
(span, "try using `in` here instead", "in")
|
|
|
|
|
} else {
|
|
|
|
|
(self.prev_token.span.between(self.token.span), "try adding `in` here", " in ")
|
|
|
|
|
};
|
|
|
|
|
self.struct_span_err(span, "missing `in` in `for` loop")
|
2019-12-06 21:41:10 +00:00
|
|
|
|
.span_suggestion_short(
|
2020-08-09 03:53:40 +00:00
|
|
|
|
span,
|
|
|
|
|
msg,
|
|
|
|
|
sugg.into(),
|
2019-12-06 21:41:10 +00:00
|
|
|
|
// Has been misleading, at least in the past (closed Issue #48492).
|
|
|
|
|
Applicability::MaybeIncorrect,
|
|
|
|
|
)
|
|
|
|
|
.emit();
|
2019-08-11 11:14:30 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Parses a `while` or `while let` expression (`while` token already eaten).
|
|
|
|
|
fn parse_while_expr(
|
|
|
|
|
&mut self,
|
|
|
|
|
opt_label: Option<Label>,
|
2019-12-06 21:41:10 +00:00
|
|
|
|
lo: Span,
|
2019-12-22 22:42:04 +00:00
|
|
|
|
mut attrs: AttrVec,
|
2019-08-11 11:14:30 +00:00
|
|
|
|
) -> PResult<'a, P<Expr>> {
|
|
|
|
|
let cond = self.parse_cond_expr()?;
|
|
|
|
|
let (iattrs, body) = self.parse_inner_attrs_and_block()?;
|
|
|
|
|
attrs.extend(iattrs);
|
2020-02-29 11:56:15 +00:00
|
|
|
|
Ok(self.mk_expr(lo.to(self.prev_token.span), ExprKind::While(cond, body, opt_label), attrs))
|
2019-08-11 11:14:30 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-09-06 02:56:45 +00:00
|
|
|
|
/// Parses `loop { ... }` (`loop` token already eaten).
|
2019-08-11 11:14:30 +00:00
|
|
|
|
fn parse_loop_expr(
|
|
|
|
|
&mut self,
|
|
|
|
|
opt_label: Option<Label>,
|
2019-12-06 21:41:10 +00:00
|
|
|
|
lo: Span,
|
2019-12-22 22:42:04 +00:00
|
|
|
|
mut attrs: AttrVec,
|
2019-08-11 11:14:30 +00:00
|
|
|
|
) -> PResult<'a, P<Expr>> {
|
|
|
|
|
let (iattrs, body) = self.parse_inner_attrs_and_block()?;
|
|
|
|
|
attrs.extend(iattrs);
|
2020-02-29 11:56:15 +00:00
|
|
|
|
Ok(self.mk_expr(lo.to(self.prev_token.span), ExprKind::Loop(body, opt_label), attrs))
|
2019-08-11 11:14:30 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn eat_label(&mut self) -> Option<Label> {
|
2019-12-06 21:41:10 +00:00
|
|
|
|
self.token.lifetime().map(|ident| {
|
2019-08-11 11:14:30 +00:00
|
|
|
|
self.bump();
|
2020-02-24 10:04:13 +00:00
|
|
|
|
Label { ident }
|
2019-12-06 21:41:10 +00:00
|
|
|
|
})
|
2019-08-11 11:14:30 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-09-06 02:56:45 +00:00
|
|
|
|
/// Parses a `match ... { ... }` expression (`match` token already eaten).
|
2021-05-11 19:18:09 +00:00
|
|
|
|
fn parse_match_expr(&mut self, mut attrs: AttrVec) -> PResult<'a, P<Expr>> {
|
2020-02-29 11:56:15 +00:00
|
|
|
|
let match_span = self.prev_token.span;
|
|
|
|
|
let lo = self.prev_token.span;
|
2019-12-06 21:05:47 +00:00
|
|
|
|
let scrutinee = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, None)?;
|
2019-08-11 11:14:30 +00:00
|
|
|
|
if let Err(mut e) = self.expect(&token::OpenDelim(token::Brace)) {
|
|
|
|
|
if self.token == token::Semi {
|
|
|
|
|
e.span_suggestion_short(
|
|
|
|
|
match_span,
|
|
|
|
|
"try removing this `match`",
|
|
|
|
|
String::new(),
|
2019-12-22 22:42:04 +00:00
|
|
|
|
Applicability::MaybeIncorrect, // speculative
|
2019-08-11 11:14:30 +00:00
|
|
|
|
);
|
|
|
|
|
}
|
2019-12-22 22:42:04 +00:00
|
|
|
|
return Err(e);
|
2019-08-11 11:14:30 +00:00
|
|
|
|
}
|
2021-05-11 19:18:09 +00:00
|
|
|
|
attrs.extend(self.parse_inner_attributes()?);
|
2019-08-11 11:14:30 +00:00
|
|
|
|
|
|
|
|
|
let mut arms: Vec<Arm> = Vec::new();
|
|
|
|
|
while self.token != token::CloseDelim(token::Brace) {
|
|
|
|
|
match self.parse_arm() {
|
|
|
|
|
Ok(arm) => arms.push(arm),
|
|
|
|
|
Err(mut e) => {
|
|
|
|
|
// Recover by skipping to the end of the block.
|
|
|
|
|
e.emit();
|
|
|
|
|
self.recover_stmt();
|
|
|
|
|
let span = lo.to(self.token.span);
|
|
|
|
|
if self.token == token::CloseDelim(token::Brace) {
|
|
|
|
|
self.bump();
|
|
|
|
|
}
|
2019-12-06 21:05:47 +00:00
|
|
|
|
return Ok(self.mk_expr(span, ExprKind::Match(scrutinee, arms), attrs));
|
2019-08-11 11:14:30 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
let hi = self.token.span;
|
|
|
|
|
self.bump();
|
2020-03-20 14:03:11 +00:00
|
|
|
|
Ok(self.mk_expr(lo.to(hi), ExprKind::Match(scrutinee, arms), attrs))
|
2019-08-11 11:14:30 +00:00
|
|
|
|
}
|
|
|
|
|
|
2021-02-26 03:29:50 +00:00
|
|
|
|
/// Attempt to recover from match arm body with statements and no surrounding braces.
|
|
|
|
|
fn parse_arm_body_missing_braces(
|
|
|
|
|
&mut self,
|
|
|
|
|
first_expr: &P<Expr>,
|
|
|
|
|
arrow_span: Span,
|
|
|
|
|
) -> Option<P<Expr>> {
|
|
|
|
|
if self.token.kind != token::Semi {
|
|
|
|
|
return None;
|
|
|
|
|
}
|
|
|
|
|
let start_snapshot = self.clone();
|
|
|
|
|
let semi_sp = self.token.span;
|
|
|
|
|
self.bump(); // `;`
|
|
|
|
|
let mut stmts =
|
|
|
|
|
vec![self.mk_stmt(first_expr.span, ast::StmtKind::Expr(first_expr.clone()))];
|
|
|
|
|
let err = |this: &mut Parser<'_>, stmts: Vec<ast::Stmt>| {
|
|
|
|
|
let span = stmts[0].span.to(stmts[stmts.len() - 1].span);
|
|
|
|
|
let mut err = this.struct_span_err(span, "`match` arm body without braces");
|
|
|
|
|
let (these, s, are) =
|
|
|
|
|
if stmts.len() > 1 { ("these", "s", "are") } else { ("this", "", "is") };
|
|
|
|
|
err.span_label(
|
|
|
|
|
span,
|
|
|
|
|
&format!(
|
|
|
|
|
"{these} statement{s} {are} not surrounded by a body",
|
|
|
|
|
these = these,
|
|
|
|
|
s = s,
|
|
|
|
|
are = are
|
|
|
|
|
),
|
|
|
|
|
);
|
|
|
|
|
err.span_label(arrow_span, "while parsing the `match` arm starting here");
|
|
|
|
|
if stmts.len() > 1 {
|
|
|
|
|
err.multipart_suggestion(
|
|
|
|
|
&format!("surround the statement{} with a body", s),
|
|
|
|
|
vec![
|
|
|
|
|
(span.shrink_to_lo(), "{ ".to_string()),
|
|
|
|
|
(span.shrink_to_hi(), " }".to_string()),
|
|
|
|
|
],
|
|
|
|
|
Applicability::MachineApplicable,
|
|
|
|
|
);
|
|
|
|
|
} else {
|
|
|
|
|
err.span_suggestion(
|
|
|
|
|
semi_sp,
|
|
|
|
|
"use a comma to end a `match` arm expression",
|
|
|
|
|
",".to_string(),
|
|
|
|
|
Applicability::MachineApplicable,
|
|
|
|
|
);
|
|
|
|
|
}
|
|
|
|
|
err.emit();
|
|
|
|
|
this.mk_expr_err(span)
|
|
|
|
|
};
|
|
|
|
|
// We might have either a `,` -> `;` typo, or a block without braces. We need
|
|
|
|
|
// a more subtle parsing strategy.
|
|
|
|
|
loop {
|
|
|
|
|
if self.token.kind == token::CloseDelim(token::Brace) {
|
|
|
|
|
// We have reached the closing brace of the `match` expression.
|
|
|
|
|
return Some(err(self, stmts));
|
|
|
|
|
}
|
|
|
|
|
if self.token.kind == token::Comma {
|
|
|
|
|
*self = start_snapshot;
|
|
|
|
|
return None;
|
|
|
|
|
}
|
|
|
|
|
let pre_pat_snapshot = self.clone();
|
|
|
|
|
match self.parse_pat_no_top_alt(None) {
|
|
|
|
|
Ok(_pat) => {
|
|
|
|
|
if self.token.kind == token::FatArrow {
|
|
|
|
|
// Reached arm end.
|
|
|
|
|
*self = pre_pat_snapshot;
|
|
|
|
|
return Some(err(self, stmts));
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
Err(mut err) => {
|
|
|
|
|
err.cancel();
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
*self = pre_pat_snapshot;
|
|
|
|
|
match self.parse_stmt_without_recovery(true, ForceCollect::No) {
|
|
|
|
|
// Consume statements for as long as possible.
|
|
|
|
|
Ok(Some(stmt)) => {
|
|
|
|
|
stmts.push(stmt);
|
|
|
|
|
}
|
|
|
|
|
Ok(None) => {
|
|
|
|
|
*self = start_snapshot;
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
// We couldn't parse either yet another statement missing it's
|
|
|
|
|
// enclosing block nor the next arm's pattern or closing brace.
|
|
|
|
|
Err(mut stmt_err) => {
|
|
|
|
|
stmt_err.cancel();
|
|
|
|
|
*self = start_snapshot;
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
None
|
|
|
|
|
}
|
|
|
|
|
|
2019-10-08 07:35:34 +00:00
|
|
|
|
pub(super) fn parse_arm(&mut self) -> PResult<'a, Arm> {
|
2022-01-22 20:45:45 +00:00
|
|
|
|
fn check_let_expr(expr: &Expr) -> (bool, bool) {
|
|
|
|
|
match expr.kind {
|
|
|
|
|
ExprKind::Binary(_, ref lhs, ref rhs) => {
|
|
|
|
|
let lhs_rslt = check_let_expr(lhs);
|
|
|
|
|
let rhs_rslt = check_let_expr(rhs);
|
|
|
|
|
(lhs_rslt.0 || rhs_rslt.0, false)
|
|
|
|
|
}
|
|
|
|
|
ExprKind::Let(..) => (true, true),
|
|
|
|
|
_ => (false, true),
|
|
|
|
|
}
|
|
|
|
|
}
|
2019-08-11 11:14:30 +00:00
|
|
|
|
let attrs = self.parse_outer_attributes()?;
|
2021-01-22 18:28:08 +00:00
|
|
|
|
self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| {
|
|
|
|
|
let lo = this.token.span;
|
2021-07-13 11:18:03 +00:00
|
|
|
|
let pat = this.parse_pat_allow_top_alt(None, RecoverComma::Yes, RecoverColon::Yes)?;
|
2021-01-22 18:28:08 +00:00
|
|
|
|
let guard = if this.eat_keyword(kw::If) {
|
|
|
|
|
let if_span = this.prev_token.span;
|
|
|
|
|
let cond = this.parse_expr()?;
|
2022-01-22 20:45:45 +00:00
|
|
|
|
let (has_let_expr, does_not_have_bin_op) = check_let_expr(&cond);
|
|
|
|
|
if has_let_expr {
|
|
|
|
|
if does_not_have_bin_op {
|
|
|
|
|
// Remove the last feature gating of a `let` expression since it's stable.
|
|
|
|
|
this.sess.gated_spans.ungate_last(sym::let_chains, cond.span);
|
|
|
|
|
}
|
2021-01-22 18:28:08 +00:00
|
|
|
|
let span = if_span.to(cond.span);
|
|
|
|
|
this.sess.gated_spans.gate(sym::if_let_guard, span);
|
|
|
|
|
}
|
|
|
|
|
Some(cond)
|
|
|
|
|
} else {
|
|
|
|
|
None
|
|
|
|
|
};
|
|
|
|
|
let arrow_span = this.token.span;
|
2021-10-03 12:14:35 +00:00
|
|
|
|
if let Err(mut err) = this.expect(&token::FatArrow) {
|
|
|
|
|
// We might have a `=>` -> `=` or `->` typo (issue #89396).
|
2021-10-04 20:13:00 +00:00
|
|
|
|
if TokenKind::FatArrow
|
|
|
|
|
.similar_tokens()
|
|
|
|
|
.map_or(false, |similar_tokens| similar_tokens.contains(&this.token.kind))
|
|
|
|
|
{
|
2021-10-03 12:14:35 +00:00
|
|
|
|
err.span_suggestion(
|
|
|
|
|
this.token.span,
|
|
|
|
|
"try using a fat arrow here",
|
|
|
|
|
"=>".to_string(),
|
|
|
|
|
Applicability::MaybeIncorrect,
|
|
|
|
|
);
|
|
|
|
|
err.emit();
|
|
|
|
|
this.bump();
|
|
|
|
|
} else {
|
|
|
|
|
return Err(err);
|
|
|
|
|
}
|
|
|
|
|
}
|
2021-01-22 18:28:08 +00:00
|
|
|
|
let arm_start_span = this.token.span;
|
2019-08-11 11:14:30 +00:00
|
|
|
|
|
2021-01-22 18:28:08 +00:00
|
|
|
|
let expr = this.parse_expr_res(Restrictions::STMT_EXPR, None).map_err(|mut err| {
|
|
|
|
|
err.span_label(arrow_span, "while parsing the `match` arm starting here");
|
|
|
|
|
err
|
|
|
|
|
})?;
|
2019-08-11 11:14:30 +00:00
|
|
|
|
|
2021-01-22 18:28:08 +00:00
|
|
|
|
let require_comma = classify::expr_requires_semi_to_be_stmt(&expr)
|
|
|
|
|
&& this.token != token::CloseDelim(token::Brace);
|
|
|
|
|
|
|
|
|
|
let hi = this.prev_token.span;
|
|
|
|
|
|
|
|
|
|
if require_comma {
|
|
|
|
|
let sm = this.sess.source_map();
|
2021-02-26 03:29:50 +00:00
|
|
|
|
if let Some(body) = this.parse_arm_body_missing_braces(&expr, arrow_span) {
|
|
|
|
|
let span = body.span;
|
|
|
|
|
return Ok((
|
|
|
|
|
ast::Arm {
|
2021-06-16 22:11:13 +00:00
|
|
|
|
attrs: attrs.into(),
|
2021-02-26 03:29:50 +00:00
|
|
|
|
pat,
|
|
|
|
|
guard,
|
|
|
|
|
body,
|
|
|
|
|
span,
|
|
|
|
|
id: DUMMY_NODE_ID,
|
|
|
|
|
is_placeholder: false,
|
|
|
|
|
},
|
|
|
|
|
TrailingToken::None,
|
|
|
|
|
));
|
|
|
|
|
}
|
2021-01-22 18:28:08 +00:00
|
|
|
|
this.expect_one_of(&[token::Comma], &[token::CloseDelim(token::Brace)]).map_err(
|
|
|
|
|
|mut err| {
|
|
|
|
|
match (sm.span_to_lines(expr.span), sm.span_to_lines(arm_start_span)) {
|
|
|
|
|
(Ok(ref expr_lines), Ok(ref arm_start_lines))
|
|
|
|
|
if arm_start_lines.lines[0].end_col
|
|
|
|
|
== expr_lines.lines[0].end_col
|
|
|
|
|
&& expr_lines.lines.len() == 2
|
|
|
|
|
&& this.token == token::FatArrow =>
|
|
|
|
|
{
|
|
|
|
|
// We check whether there's any trailing code in the parse span,
|
|
|
|
|
// if there isn't, we very likely have the following:
|
|
|
|
|
//
|
|
|
|
|
// X | &Y => "y"
|
|
|
|
|
// | -- - missing comma
|
|
|
|
|
// | |
|
|
|
|
|
// | arrow_span
|
|
|
|
|
// X | &X => "x"
|
|
|
|
|
// | - ^^ self.token.span
|
|
|
|
|
// | |
|
|
|
|
|
// | parsed until here as `"y" & X`
|
|
|
|
|
err.span_suggestion_short(
|
|
|
|
|
arm_start_span.shrink_to_hi(),
|
|
|
|
|
"missing a comma here to end this `match` arm",
|
|
|
|
|
",".to_owned(),
|
|
|
|
|
Applicability::MachineApplicable,
|
|
|
|
|
);
|
|
|
|
|
}
|
|
|
|
|
_ => {
|
|
|
|
|
err.span_label(
|
|
|
|
|
arrow_span,
|
|
|
|
|
"while parsing the `match` arm starting here",
|
|
|
|
|
);
|
|
|
|
|
}
|
2019-08-11 11:14:30 +00:00
|
|
|
|
}
|
2021-01-22 18:28:08 +00:00
|
|
|
|
err
|
|
|
|
|
},
|
|
|
|
|
)?;
|
|
|
|
|
} else {
|
|
|
|
|
this.eat(&token::Comma);
|
|
|
|
|
}
|
2019-08-11 11:14:30 +00:00
|
|
|
|
|
2021-01-22 18:28:08 +00:00
|
|
|
|
Ok((
|
|
|
|
|
ast::Arm {
|
2021-06-16 22:11:13 +00:00
|
|
|
|
attrs: attrs.into(),
|
2021-01-22 18:28:08 +00:00
|
|
|
|
pat,
|
|
|
|
|
guard,
|
|
|
|
|
body: expr,
|
|
|
|
|
span: lo.to(hi),
|
|
|
|
|
id: DUMMY_NODE_ID,
|
|
|
|
|
is_placeholder: false,
|
|
|
|
|
},
|
|
|
|
|
TrailingToken::None,
|
|
|
|
|
))
|
2019-08-11 11:14:30 +00:00
|
|
|
|
})
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Parses a `try {...}` expression (`try` token already eaten).
|
2019-12-03 15:38:34 +00:00
|
|
|
|
fn parse_try_block(&mut self, span_lo: Span, mut attrs: AttrVec) -> PResult<'a, P<Expr>> {
|
2019-08-11 11:14:30 +00:00
|
|
|
|
let (iattrs, body) = self.parse_inner_attrs_and_block()?;
|
|
|
|
|
attrs.extend(iattrs);
|
|
|
|
|
if self.eat_keyword(kw::Catch) {
|
2020-02-29 11:56:15 +00:00
|
|
|
|
let mut error = self.struct_span_err(
|
|
|
|
|
self.prev_token.span,
|
|
|
|
|
"keyword `catch` cannot follow a `try` block",
|
|
|
|
|
);
|
2019-08-11 11:14:30 +00:00
|
|
|
|
error.help("try using `match` on the result of the `try` block instead");
|
|
|
|
|
error.emit();
|
|
|
|
|
Err(error)
|
|
|
|
|
} else {
|
2019-09-21 21:09:17 +00:00
|
|
|
|
let span = span_lo.to(body.span);
|
2019-10-30 15:38:16 +00:00
|
|
|
|
self.sess.gated_spans.gate(sym::try_blocks, span);
|
2019-09-21 21:09:17 +00:00
|
|
|
|
Ok(self.mk_expr(span, ExprKind::TryBlock(body), attrs))
|
2019-08-11 11:14:30 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn is_do_catch_block(&self) -> bool {
|
2019-12-22 22:42:04 +00:00
|
|
|
|
self.token.is_keyword(kw::Do)
|
|
|
|
|
&& self.is_keyword_ahead(1, &[kw::Catch])
|
|
|
|
|
&& self.look_ahead(2, |t| *t == token::OpenDelim(token::Brace))
|
|
|
|
|
&& !self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL)
|
2019-08-11 11:14:30 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn is_try_block(&self) -> bool {
|
2020-04-14 15:45:00 +00:00
|
|
|
|
self.token.is_keyword(kw::Try)
|
|
|
|
|
&& self.look_ahead(1, |t| *t == token::OpenDelim(token::Brace))
|
|
|
|
|
&& self.token.uninterpolated_span().rust_2018()
|
2019-08-11 11:14:30 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Parses an `async move? {...}` expression.
|
2019-12-03 15:38:34 +00:00
|
|
|
|
fn parse_async_block(&mut self, mut attrs: AttrVec) -> PResult<'a, P<Expr>> {
|
2019-12-06 21:05:47 +00:00
|
|
|
|
let lo = self.token.span;
|
2019-08-11 11:14:30 +00:00
|
|
|
|
self.expect_keyword(kw::Async)?;
|
2020-12-18 16:32:26 +00:00
|
|
|
|
let capture_clause = self.parse_capture_clause()?;
|
2019-08-11 11:14:30 +00:00
|
|
|
|
let (iattrs, body) = self.parse_inner_attrs_and_block()?;
|
|
|
|
|
attrs.extend(iattrs);
|
2019-12-06 21:05:47 +00:00
|
|
|
|
let kind = ExprKind::Async(capture_clause, DUMMY_NODE_ID, body);
|
2020-02-29 11:56:15 +00:00
|
|
|
|
Ok(self.mk_expr(lo.to(self.prev_token.span), kind, attrs))
|
2019-08-11 11:14:30 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn is_async_block(&self) -> bool {
|
2019-12-22 22:42:04 +00:00
|
|
|
|
self.token.is_keyword(kw::Async)
|
|
|
|
|
&& ((
|
|
|
|
|
// `async move {`
|
|
|
|
|
self.is_keyword_ahead(1, &[kw::Move])
|
|
|
|
|
&& self.look_ahead(2, |t| *t == token::OpenDelim(token::Brace))
|
|
|
|
|
) || (
|
|
|
|
|
// `async {`
|
2019-08-11 11:14:30 +00:00
|
|
|
|
self.look_ahead(1, |t| *t == token::OpenDelim(token::Brace))
|
2019-12-22 22:42:04 +00:00
|
|
|
|
))
|
2019-08-11 11:14:30 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-12-04 03:31:44 +00:00
|
|
|
|
fn is_certainly_not_a_block(&self) -> bool {
|
|
|
|
|
self.look_ahead(1, |t| t.is_ident())
|
|
|
|
|
&& (
|
|
|
|
|
// `{ ident, ` cannot start a block.
|
|
|
|
|
self.look_ahead(2, |t| t == &token::Comma)
|
|
|
|
|
|| self.look_ahead(2, |t| t == &token::Colon)
|
|
|
|
|
&& (
|
|
|
|
|
// `{ ident: token, ` cannot start a block.
|
|
|
|
|
self.look_ahead(4, |t| t == &token::Comma) ||
|
|
|
|
|
// `{ ident: ` cannot start a block unless it's a type ascription `ident: Type`.
|
|
|
|
|
self.look_ahead(3, |t| !t.can_begin_type())
|
|
|
|
|
)
|
|
|
|
|
)
|
|
|
|
|
}
|
|
|
|
|
|
2019-08-11 11:14:30 +00:00
|
|
|
|
fn maybe_parse_struct_expr(
|
|
|
|
|
&mut self,
|
2020-12-10 12:20:07 +00:00
|
|
|
|
qself: Option<&ast::QSelf>,
|
2019-08-11 11:14:30 +00:00
|
|
|
|
path: &ast::Path,
|
2019-12-03 15:38:34 +00:00
|
|
|
|
attrs: &AttrVec,
|
2019-08-11 11:14:30 +00:00
|
|
|
|
) -> Option<PResult<'a, P<Expr>>> {
|
|
|
|
|
let struct_allowed = !self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL);
|
2019-12-04 03:31:44 +00:00
|
|
|
|
if struct_allowed || self.is_certainly_not_a_block() {
|
2020-08-12 22:39:15 +00:00
|
|
|
|
if let Err(err) = self.expect(&token::OpenDelim(token::Brace)) {
|
|
|
|
|
return Some(Err(err));
|
|
|
|
|
}
|
2020-12-10 12:20:07 +00:00
|
|
|
|
let expr = self.parse_struct_expr(qself.cloned(), path.clone(), attrs.clone(), true);
|
2019-08-11 11:14:30 +00:00
|
|
|
|
if let (Ok(expr), false) = (&expr, struct_allowed) {
|
2020-08-12 22:39:15 +00:00
|
|
|
|
// This is a struct literal, but we don't can't accept them here.
|
2020-05-02 01:24:14 +00:00
|
|
|
|
self.error_struct_lit_not_allowed_here(path.span, expr.span);
|
2019-08-11 11:14:30 +00:00
|
|
|
|
}
|
|
|
|
|
return Some(expr);
|
|
|
|
|
}
|
|
|
|
|
None
|
|
|
|
|
}
|
|
|
|
|
|
2019-12-04 03:24:53 +00:00
|
|
|
|
fn error_struct_lit_not_allowed_here(&self, lo: Span, sp: Span) {
|
|
|
|
|
self.struct_span_err(sp, "struct literals are not allowed here")
|
|
|
|
|
.multipart_suggestion(
|
|
|
|
|
"surround the struct literal with parentheses",
|
|
|
|
|
vec![(lo.shrink_to_lo(), "(".to_string()), (sp.shrink_to_hi(), ")".to_string())],
|
|
|
|
|
Applicability::MachineApplicable,
|
|
|
|
|
)
|
|
|
|
|
.emit();
|
|
|
|
|
}
|
|
|
|
|
|
2021-09-07 17:45:16 +00:00
|
|
|
|
pub(super) fn parse_struct_fields(
|
2019-08-11 11:14:30 +00:00
|
|
|
|
&mut self,
|
|
|
|
|
pth: ast::Path,
|
2020-08-12 22:39:15 +00:00
|
|
|
|
recover: bool,
|
2021-09-07 17:45:16 +00:00
|
|
|
|
close_delim: token::DelimToken,
|
|
|
|
|
) -> PResult<'a, (Vec<ExprField>, ast::StructRest, bool)> {
|
2019-08-11 11:14:30 +00:00
|
|
|
|
let mut fields = Vec::new();
|
2020-11-07 14:28:55 +00:00
|
|
|
|
let mut base = ast::StructRest::None;
|
2020-05-02 01:24:14 +00:00
|
|
|
|
let mut recover_async = false;
|
2019-08-11 11:14:30 +00:00
|
|
|
|
|
2022-01-23 20:41:46 +00:00
|
|
|
|
let mut async_block_err = |e: &mut Diagnostic, span: Span| {
|
2020-05-02 01:24:14 +00:00
|
|
|
|
recover_async = true;
|
2020-11-30 21:11:29 +00:00
|
|
|
|
e.span_label(span, "`async` blocks are only allowed in Rust 2018 or later");
|
2020-12-30 13:33:46 +00:00
|
|
|
|
e.help(&format!("set `edition = \"{}\"` in `Cargo.toml`", LATEST_STABLE_EDITION));
|
2020-05-02 01:24:14 +00:00
|
|
|
|
e.note("for more on editions, read https://doc.rust-lang.org/edition-guide");
|
|
|
|
|
};
|
|
|
|
|
|
2021-09-07 17:45:16 +00:00
|
|
|
|
while self.token != token::CloseDelim(close_delim) {
|
2019-08-11 11:14:30 +00:00
|
|
|
|
if self.eat(&token::DotDot) {
|
2020-02-29 11:56:15 +00:00
|
|
|
|
let exp_span = self.prev_token.span;
|
2020-11-07 14:28:55 +00:00
|
|
|
|
// We permit `.. }` on the left-hand side of a destructuring assignment.
|
2021-09-07 17:45:16 +00:00
|
|
|
|
if self.check(&token::CloseDelim(close_delim)) {
|
2020-11-07 14:28:55 +00:00
|
|
|
|
base = ast::StructRest::Rest(self.prev_token.span.shrink_to_hi());
|
|
|
|
|
break;
|
|
|
|
|
}
|
2019-08-11 11:14:30 +00:00
|
|
|
|
match self.parse_expr() {
|
2020-11-07 14:28:55 +00:00
|
|
|
|
Ok(e) => base = ast::StructRest::Base(e),
|
2020-08-12 22:39:15 +00:00
|
|
|
|
Err(mut e) if recover => {
|
2019-08-11 11:14:30 +00:00
|
|
|
|
e.emit();
|
|
|
|
|
self.recover_stmt();
|
|
|
|
|
}
|
2020-08-12 22:39:15 +00:00
|
|
|
|
Err(e) => return Err(e),
|
2019-08-11 11:14:30 +00:00
|
|
|
|
}
|
2019-12-04 02:31:32 +00:00
|
|
|
|
self.recover_struct_comma_after_dotdot(exp_span);
|
2019-08-11 11:14:30 +00:00
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
|
2019-12-04 02:47:18 +00:00
|
|
|
|
let recovery_field = self.find_struct_error_after_field_looking_code();
|
2021-03-15 21:36:07 +00:00
|
|
|
|
let parsed_field = match self.parse_expr_field() {
|
2019-12-04 02:47:18 +00:00
|
|
|
|
Ok(f) => Some(f),
|
2019-08-11 11:14:30 +00:00
|
|
|
|
Err(mut e) => {
|
2020-05-02 01:24:14 +00:00
|
|
|
|
if pth == kw::Async {
|
|
|
|
|
async_block_err(&mut e, pth.span);
|
|
|
|
|
} else {
|
|
|
|
|
e.span_label(pth.span, "while parsing this struct");
|
|
|
|
|
}
|
2019-08-11 11:14:30 +00:00
|
|
|
|
e.emit();
|
|
|
|
|
|
|
|
|
|
// If the next token is a comma, then try to parse
|
|
|
|
|
// what comes next as additional fields, rather than
|
|
|
|
|
// bailing out until next `}`.
|
|
|
|
|
if self.token != token::Comma {
|
|
|
|
|
self.recover_stmt_(SemiColonMode::Comma, BlockMode::Ignore);
|
|
|
|
|
if self.token != token::Comma {
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
}
|
2019-12-04 02:47:18 +00:00
|
|
|
|
None
|
2019-08-11 11:14:30 +00:00
|
|
|
|
}
|
2019-12-04 02:47:18 +00:00
|
|
|
|
};
|
2019-08-11 11:14:30 +00:00
|
|
|
|
|
2021-09-07 17:45:16 +00:00
|
|
|
|
match self.expect_one_of(&[token::Comma], &[token::CloseDelim(close_delim)]) {
|
2019-12-22 22:42:04 +00:00
|
|
|
|
Ok(_) => {
|
|
|
|
|
if let Some(f) = parsed_field.or(recovery_field) {
|
|
|
|
|
// Only include the field if there's no parse error for the field name.
|
|
|
|
|
fields.push(f);
|
|
|
|
|
}
|
2019-08-11 11:14:30 +00:00
|
|
|
|
}
|
|
|
|
|
Err(mut e) => {
|
2020-05-02 01:24:14 +00:00
|
|
|
|
if pth == kw::Async {
|
|
|
|
|
async_block_err(&mut e, pth.span);
|
|
|
|
|
} else {
|
|
|
|
|
e.span_label(pth.span, "while parsing this struct");
|
|
|
|
|
if let Some(f) = recovery_field {
|
|
|
|
|
fields.push(f);
|
|
|
|
|
e.span_suggestion(
|
|
|
|
|
self.prev_token.span.shrink_to_hi(),
|
|
|
|
|
"try adding a comma",
|
|
|
|
|
",".into(),
|
|
|
|
|
Applicability::MachineApplicable,
|
|
|
|
|
);
|
|
|
|
|
}
|
2019-08-11 11:14:30 +00:00
|
|
|
|
}
|
2020-08-12 22:39:15 +00:00
|
|
|
|
if !recover {
|
|
|
|
|
return Err(e);
|
|
|
|
|
}
|
2019-08-11 11:14:30 +00:00
|
|
|
|
e.emit();
|
|
|
|
|
self.recover_stmt_(SemiColonMode::Comma, BlockMode::Ignore);
|
|
|
|
|
self.eat(&token::Comma);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
2021-09-07 17:45:16 +00:00
|
|
|
|
Ok((fields, base, recover_async))
|
|
|
|
|
}
|
2019-08-11 11:14:30 +00:00
|
|
|
|
|
2021-09-07 17:45:16 +00:00
|
|
|
|
/// Precondition: already parsed the '{'.
|
|
|
|
|
pub(super) fn parse_struct_expr(
|
|
|
|
|
&mut self,
|
|
|
|
|
qself: Option<ast::QSelf>,
|
|
|
|
|
pth: ast::Path,
|
|
|
|
|
attrs: AttrVec,
|
|
|
|
|
recover: bool,
|
|
|
|
|
) -> PResult<'a, P<Expr>> {
|
|
|
|
|
let lo = pth.span;
|
|
|
|
|
let (fields, base, recover_async) =
|
|
|
|
|
self.parse_struct_fields(pth.clone(), recover, token::Brace)?;
|
|
|
|
|
let span = lo.to(self.token.span);
|
2019-08-11 11:14:30 +00:00
|
|
|
|
self.expect(&token::CloseDelim(token::Brace))?;
|
2021-03-16 00:15:53 +00:00
|
|
|
|
let expr = if recover_async {
|
|
|
|
|
ExprKind::Err
|
|
|
|
|
} else {
|
2020-12-10 12:20:07 +00:00
|
|
|
|
ExprKind::Struct(P(ast::StructExpr { qself, path: pth, fields, rest: base }))
|
2021-03-16 00:15:53 +00:00
|
|
|
|
};
|
2020-05-02 01:24:14 +00:00
|
|
|
|
Ok(self.mk_expr(span, expr, attrs))
|
2019-12-04 02:47:18 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Use in case of error after field-looking code: `S { foo: () with a }`.
|
2021-03-15 21:36:07 +00:00
|
|
|
|
fn find_struct_error_after_field_looking_code(&self) -> Option<ExprField> {
|
2020-03-04 20:37:52 +00:00
|
|
|
|
match self.token.ident() {
|
|
|
|
|
Some((ident, is_raw))
|
|
|
|
|
if (is_raw || !ident.is_reserved())
|
|
|
|
|
&& self.look_ahead(1, |t| *t == token::Colon) =>
|
|
|
|
|
{
|
2021-03-15 21:36:07 +00:00
|
|
|
|
Some(ast::ExprField {
|
2020-03-04 20:37:52 +00:00
|
|
|
|
ident,
|
2020-02-24 10:04:13 +00:00
|
|
|
|
span: self.token.span,
|
|
|
|
|
expr: self.mk_expr_err(self.token.span),
|
2019-12-04 02:47:18 +00:00
|
|
|
|
is_shorthand: false,
|
|
|
|
|
attrs: AttrVec::new(),
|
|
|
|
|
id: DUMMY_NODE_ID,
|
|
|
|
|
is_placeholder: false,
|
2020-03-04 20:37:52 +00:00
|
|
|
|
})
|
2019-12-04 02:47:18 +00:00
|
|
|
|
}
|
2020-03-04 20:37:52 +00:00
|
|
|
|
_ => None,
|
2019-12-04 02:47:18 +00:00
|
|
|
|
}
|
2019-08-11 11:14:30 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-12-04 02:31:32 +00:00
|
|
|
|
fn recover_struct_comma_after_dotdot(&mut self, span: Span) {
|
|
|
|
|
if self.token != token::Comma {
|
|
|
|
|
return;
|
|
|
|
|
}
|
2020-02-29 11:56:15 +00:00
|
|
|
|
self.struct_span_err(
|
|
|
|
|
span.to(self.prev_token.span),
|
|
|
|
|
"cannot use a comma after the base struct",
|
|
|
|
|
)
|
|
|
|
|
.span_suggestion_short(
|
|
|
|
|
self.token.span,
|
|
|
|
|
"remove this comma",
|
|
|
|
|
String::new(),
|
|
|
|
|
Applicability::MachineApplicable,
|
|
|
|
|
)
|
|
|
|
|
.note("the base struct must always be the last field")
|
|
|
|
|
.emit();
|
2019-12-04 02:31:32 +00:00
|
|
|
|
self.recover_stmt();
|
2019-08-11 11:14:30 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-09-06 02:56:45 +00:00
|
|
|
|
/// Parses `ident (COLON expr)?`.
|
2021-03-15 21:36:07 +00:00
|
|
|
|
fn parse_expr_field(&mut self) -> PResult<'a, ExprField> {
|
2021-01-22 18:28:08 +00:00
|
|
|
|
let attrs = self.parse_outer_attributes()?;
|
|
|
|
|
self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| {
|
|
|
|
|
let lo = this.token.span;
|
2019-08-11 11:14:30 +00:00
|
|
|
|
|
2021-01-22 18:28:08 +00:00
|
|
|
|
// Check if a colon exists one ahead. This means we're parsing a fieldname.
|
|
|
|
|
let is_shorthand = !this.look_ahead(1, |t| t == &token::Colon || t == &token::Eq);
|
|
|
|
|
let (ident, expr) = if is_shorthand {
|
|
|
|
|
// Mimic `x: x` for the `x` field shorthand.
|
|
|
|
|
let ident = this.parse_ident_common(false)?;
|
|
|
|
|
let path = ast::Path::from_ident(ident);
|
|
|
|
|
(ident, this.mk_expr(ident.span, ExprKind::Path(None, path), AttrVec::new()))
|
|
|
|
|
} else {
|
|
|
|
|
let ident = this.parse_field_name()?;
|
|
|
|
|
this.error_on_eq_field_init(ident);
|
|
|
|
|
this.bump(); // `:`
|
|
|
|
|
(ident, this.parse_expr()?)
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
Ok((
|
2021-03-15 21:36:07 +00:00
|
|
|
|
ast::ExprField {
|
2021-01-22 18:28:08 +00:00
|
|
|
|
ident,
|
|
|
|
|
span: lo.to(expr.span),
|
|
|
|
|
expr,
|
|
|
|
|
is_shorthand,
|
|
|
|
|
attrs: attrs.into(),
|
|
|
|
|
id: DUMMY_NODE_ID,
|
|
|
|
|
is_placeholder: false,
|
|
|
|
|
},
|
|
|
|
|
TrailingToken::MaybeComma,
|
|
|
|
|
))
|
2019-08-11 11:14:30 +00:00
|
|
|
|
})
|
|
|
|
|
}
|
|
|
|
|
|
2019-12-04 02:23:20 +00:00
|
|
|
|
/// Check for `=`. This means the source incorrectly attempts to
|
|
|
|
|
/// initialize a field with an eq rather than a colon.
|
|
|
|
|
fn error_on_eq_field_init(&self, field_name: Ident) {
|
|
|
|
|
if self.token != token::Eq {
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
|
2019-12-30 13:56:57 +00:00
|
|
|
|
self.struct_span_err(self.token.span, "expected `:`, found `=`")
|
2019-12-04 02:23:20 +00:00
|
|
|
|
.span_suggestion(
|
|
|
|
|
field_name.span.shrink_to_hi().to(self.token.span),
|
|
|
|
|
"replace equals symbol with a colon",
|
|
|
|
|
":".to_string(),
|
|
|
|
|
Applicability::MachineApplicable,
|
|
|
|
|
)
|
|
|
|
|
.emit();
|
|
|
|
|
}
|
|
|
|
|
|
2019-08-11 11:14:30 +00:00
|
|
|
|
fn err_dotdotdot_syntax(&self, span: Span) {
|
|
|
|
|
self.struct_span_err(span, "unexpected token: `...`")
|
|
|
|
|
.span_suggestion(
|
|
|
|
|
span,
|
2019-12-22 22:42:04 +00:00
|
|
|
|
"use `..` for an exclusive range",
|
|
|
|
|
"..".to_owned(),
|
|
|
|
|
Applicability::MaybeIncorrect,
|
2019-08-11 11:14:30 +00:00
|
|
|
|
)
|
|
|
|
|
.span_suggestion(
|
|
|
|
|
span,
|
2019-12-22 22:42:04 +00:00
|
|
|
|
"or `..=` for an inclusive range",
|
|
|
|
|
"..=".to_owned(),
|
|
|
|
|
Applicability::MaybeIncorrect,
|
2019-08-11 11:14:30 +00:00
|
|
|
|
)
|
|
|
|
|
.emit();
|
|
|
|
|
}
|
|
|
|
|
|
2019-08-11 21:37:05 +00:00
|
|
|
|
fn err_larrow_operator(&self, span: Span) {
|
2019-12-22 22:42:04 +00:00
|
|
|
|
self.struct_span_err(span, "unexpected token: `<-`")
|
|
|
|
|
.span_suggestion(
|
|
|
|
|
span,
|
|
|
|
|
"if you meant to write a comparison against a negative value, add a \
|
2019-08-11 21:37:05 +00:00
|
|
|
|
space in between `<` and `-`",
|
2019-12-22 22:42:04 +00:00
|
|
|
|
"< -".to_string(),
|
|
|
|
|
Applicability::MaybeIncorrect,
|
|
|
|
|
)
|
|
|
|
|
.emit();
|
2019-08-11 21:37:05 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-08-11 11:14:30 +00:00
|
|
|
|
fn mk_assign_op(&self, binop: BinOp, lhs: P<Expr>, rhs: P<Expr>) -> ExprKind {
|
|
|
|
|
ExprKind::AssignOp(binop, lhs, rhs)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn mk_range(
|
2021-06-26 02:46:41 +00:00
|
|
|
|
&mut self,
|
2019-08-11 11:14:30 +00:00
|
|
|
|
start: Option<P<Expr>>,
|
|
|
|
|
end: Option<P<Expr>>,
|
2019-12-22 22:42:04 +00:00
|
|
|
|
limits: RangeLimits,
|
2021-02-21 11:54:49 +00:00
|
|
|
|
) -> ExprKind {
|
2019-08-11 11:14:30 +00:00
|
|
|
|
if end.is_none() && limits == RangeLimits::Closed {
|
2021-06-26 02:46:41 +00:00
|
|
|
|
self.inclusive_range_with_incorrect_end(self.prev_token.span);
|
2021-02-21 11:54:49 +00:00
|
|
|
|
ExprKind::Err
|
2019-08-11 11:14:30 +00:00
|
|
|
|
} else {
|
2021-02-21 11:54:49 +00:00
|
|
|
|
ExprKind::Range(start, end, limits)
|
2019-08-11 11:14:30 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn mk_unary(&self, unop: UnOp, expr: P<Expr>) -> ExprKind {
|
|
|
|
|
ExprKind::Unary(unop, expr)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn mk_binary(&self, binop: BinOp, lhs: P<Expr>, rhs: P<Expr>) -> ExprKind {
|
|
|
|
|
ExprKind::Binary(binop, lhs, rhs)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn mk_index(&self, expr: P<Expr>, idx: P<Expr>) -> ExprKind {
|
|
|
|
|
ExprKind::Index(expr, idx)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn mk_call(&self, f: P<Expr>, args: Vec<P<Expr>>) -> ExprKind {
|
|
|
|
|
ExprKind::Call(f, args)
|
|
|
|
|
}
|
|
|
|
|
|
2021-02-21 12:01:01 +00:00
|
|
|
|
fn mk_await_expr(&mut self, self_arg: P<Expr>, lo: Span) -> P<Expr> {
|
2020-02-29 11:56:15 +00:00
|
|
|
|
let span = lo.to(self.prev_token.span);
|
2019-12-03 15:38:34 +00:00
|
|
|
|
let await_expr = self.mk_expr(span, ExprKind::Await(self_arg), AttrVec::new());
|
2019-08-11 11:14:30 +00:00
|
|
|
|
self.recover_from_await_method_call();
|
2021-02-21 12:01:01 +00:00
|
|
|
|
await_expr
|
2019-08-11 11:14:30 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-12-03 15:38:34 +00:00
|
|
|
|
crate fn mk_expr(&self, span: Span, kind: ExprKind, attrs: AttrVec) -> P<Expr> {
|
2020-05-19 20:56:20 +00:00
|
|
|
|
P(Expr { kind, span, attrs, id: DUMMY_NODE_ID, tokens: None })
|
2019-08-11 11:14:30 +00:00
|
|
|
|
}
|
2019-10-08 12:39:58 +00:00
|
|
|
|
|
|
|
|
|
pub(super) fn mk_expr_err(&self, span: Span) -> P<Expr> {
|
2019-12-03 15:38:34 +00:00
|
|
|
|
self.mk_expr(span, ExprKind::Err, AttrVec::new())
|
2019-10-08 12:39:58 +00:00
|
|
|
|
}
|
2020-10-10 04:40:27 +00:00
|
|
|
|
|
|
|
|
|
/// Create expression span ensuring the span of the parent node
|
|
|
|
|
/// is larger than the span of lhs and rhs, including the attributes.
|
|
|
|
|
fn mk_expr_sp(&self, lhs: &P<Expr>, lhs_span: Span, rhs_span: Span) -> Span {
|
|
|
|
|
lhs.attrs
|
|
|
|
|
.iter()
|
|
|
|
|
.find(|a| a.style == AttrStyle::Outer)
|
|
|
|
|
.map_or(lhs_span, |a| a.span)
|
|
|
|
|
.to(rhs_span)
|
|
|
|
|
}
|
2021-01-22 18:28:08 +00:00
|
|
|
|
|
|
|
|
|
fn collect_tokens_for_expr(
|
|
|
|
|
&mut self,
|
|
|
|
|
attrs: AttrWrapper,
|
|
|
|
|
f: impl FnOnce(&mut Self, Vec<ast::Attribute>) -> PResult<'a, P<Expr>>,
|
|
|
|
|
) -> PResult<'a, P<Expr>> {
|
2020-11-28 23:33:17 +00:00
|
|
|
|
self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| {
|
2021-01-22 18:28:08 +00:00
|
|
|
|
let res = f(this, attrs)?;
|
|
|
|
|
let trailing = if this.restrictions.contains(Restrictions::STMT_EXPR)
|
|
|
|
|
&& this.token.kind == token::Semi
|
|
|
|
|
{
|
|
|
|
|
TrailingToken::Semi
|
|
|
|
|
} else {
|
2020-11-28 23:33:17 +00:00
|
|
|
|
// FIXME - pass this through from the place where we know
|
|
|
|
|
// we need a comma, rather than assuming that `#[attr] expr,`
|
|
|
|
|
// always captures a trailing comma
|
|
|
|
|
TrailingToken::MaybeComma
|
2021-01-22 18:28:08 +00:00
|
|
|
|
};
|
|
|
|
|
Ok((res, trailing))
|
|
|
|
|
})
|
|
|
|
|
}
|
2019-08-11 11:14:30 +00:00
|
|
|
|
}
|