mirror of
https://github.com/rust-lang/rust.git
synced 2024-11-22 14:55:26 +00:00
review comments
This commit is contained in:
parent
24160171e4
commit
5c5fa775e5
@ -627,7 +627,7 @@ mod tests {
|
||||
use super::*;
|
||||
|
||||
fn same(fmt: &'static str, p: &[Piece<'static>]) {
|
||||
let parser = Parser::new(fmt, None, vec![], false, None);
|
||||
let parser = Parser::new(fmt, None, vec![], false);
|
||||
assert!(parser.collect::<Vec<Piece<'static>>>() == p);
|
||||
}
|
||||
|
||||
@ -643,7 +643,7 @@ mod tests {
|
||||
}
|
||||
|
||||
fn musterr(s: &str) {
|
||||
let mut p = Parser::new(s, None, vec![], false, None);
|
||||
let mut p = Parser::new(s, None, vec![], false);
|
||||
p.next();
|
||||
assert!(!p.errors.is_empty());
|
||||
}
|
||||
|
@ -98,7 +98,7 @@ impl<'a> StripUnconfigured<'a> {
|
||||
self.sess.span_diagnostic.struct_span_err(attr.span, "bad `cfg_attr` attribute")
|
||||
.span_label(attr.span, "missing condition and attribute")
|
||||
.note("`cfg_attr` must be of the form: \
|
||||
`#[cfg_attr(condition, attribute)]`")
|
||||
`#[cfg_attr(condition, attribute, other_attribute, ...)]`")
|
||||
.note("for more information, visit \
|
||||
<https://doc.rust-lang.org/reference/conditional-compilation.html\
|
||||
#the-cfg_attr-attribute>")
|
||||
|
@ -11,7 +11,7 @@ use crate::parse::{self, parser, DirectoryOwnership};
|
||||
use crate::parse::token;
|
||||
use crate::ptr::P;
|
||||
use crate::symbol::{kw, sym, Ident, Symbol};
|
||||
use crate::ThinVec;
|
||||
use crate::{ThinVec, MACRO_ARGUMENTS};
|
||||
use crate::tokenstream::{self, TokenStream};
|
||||
|
||||
use errors::{DiagnosticBuilder, DiagnosticId};
|
||||
@ -850,11 +850,7 @@ impl<'a> ExtCtxt<'a> {
|
||||
}
|
||||
|
||||
pub fn new_parser_from_tts(&self, tts: &[tokenstream::TokenTree]) -> parser::Parser<'a> {
|
||||
parse::stream_to_parser(
|
||||
self.parse_sess,
|
||||
tts.iter().cloned().collect(),
|
||||
Some("macro arguments"),
|
||||
)
|
||||
parse::stream_to_parser(self.parse_sess, tts.iter().cloned().collect(), MACRO_ARGUMENTS)
|
||||
}
|
||||
pub fn source_map(&self) -> &'a SourceMap { self.parse_sess.source_map() }
|
||||
pub fn parse_sess(&self) -> &'a parse::ParseSess { self.parse_sess }
|
||||
|
@ -664,7 +664,7 @@ pub fn parse(
|
||||
directory,
|
||||
recurse_into_modules,
|
||||
true,
|
||||
Some("macro arguments"),
|
||||
crate::MACRO_ARGUMENTS,
|
||||
);
|
||||
|
||||
// A queue of possible matcher positions. We initialize it with the matcher position in which
|
||||
|
@ -31,6 +31,8 @@ pub use rustc_data_structures::thin_vec::ThinVec;
|
||||
use ast::AttrId;
|
||||
use syntax_pos::edition::Edition;
|
||||
|
||||
const MACRO_ARGUMENTS: Option<&'static str> = Some("macro arguments");
|
||||
|
||||
// A variant of 'try!' that panics on an Err. This is used as a crutch on the
|
||||
// way towards a non-panic!-prone parser. It should be used for fatal parsing
|
||||
// errors; eventually we plan to convert all code using panictry to just use
|
||||
|
@ -13,7 +13,7 @@ use crate::symbol::kw;
|
||||
use crate::ThinVec;
|
||||
use errors::{Applicability, DiagnosticBuilder};
|
||||
use log::debug;
|
||||
use syntax_pos::Span;
|
||||
use syntax_pos::{Span, DUMMY_SP};
|
||||
|
||||
pub trait RecoverQPath: Sized + 'static {
|
||||
const PATH_STYLE: PathStyle = PathStyle::Expr;
|
||||
@ -201,7 +201,7 @@ impl<'a> Parser<'a> {
|
||||
|
||||
let mut path = ast::Path {
|
||||
segments: Vec::new(),
|
||||
span: syntax_pos::DUMMY_SP,
|
||||
span: DUMMY_SP,
|
||||
};
|
||||
self.parse_path_segments(&mut path.segments, T::PATH_STYLE)?;
|
||||
path.span = ty_span.to(self.prev_span);
|
||||
@ -267,6 +267,58 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
/// Create a `DiagnosticBuilder` for an unexpected token `t` and try to recover if it is a
|
||||
/// closing delimiter.
|
||||
pub fn unexpected_try_recover(
|
||||
&mut self,
|
||||
t: &token::Token,
|
||||
) -> PResult<'a, bool /* recovered */> {
|
||||
let token_str = pprust::token_to_string(t);
|
||||
let this_token_str = self.this_token_descr();
|
||||
let (prev_sp, sp) = match (&self.token, self.subparser_name) {
|
||||
// Point at the end of the macro call when reaching end of macro arguments.
|
||||
(token::Token::Eof, Some(_)) => {
|
||||
let sp = self.sess.source_map().next_point(self.span);
|
||||
(sp, sp)
|
||||
}
|
||||
// We don't want to point at the following span after DUMMY_SP.
|
||||
// This happens when the parser finds an empty TokenStream.
|
||||
_ if self.prev_span == DUMMY_SP => (self.span, self.span),
|
||||
// EOF, don't want to point at the following char, but rather the last token.
|
||||
(token::Token::Eof, None) => (self.prev_span, self.span),
|
||||
_ => (self.sess.source_map().next_point(self.prev_span), self.span),
|
||||
};
|
||||
let msg = format!(
|
||||
"expected `{}`, found {}",
|
||||
token_str,
|
||||
match (&self.token, self.subparser_name) {
|
||||
(token::Token::Eof, Some(origin)) => format!("end of {}", origin),
|
||||
_ => this_token_str,
|
||||
},
|
||||
);
|
||||
let mut err = self.struct_span_err(sp, &msg);
|
||||
let label_exp = format!("expected `{}`", token_str);
|
||||
match self.recover_closing_delimiter(&[t.clone()], err) {
|
||||
Err(e) => err = e,
|
||||
Ok(recovered) => {
|
||||
return Ok(recovered);
|
||||
}
|
||||
}
|
||||
let cm = self.sess.source_map();
|
||||
match (cm.lookup_line(prev_sp.lo()), cm.lookup_line(sp.lo())) {
|
||||
(Ok(ref a), Ok(ref b)) if a.line == b.line => {
|
||||
// When the spans are in the same line, it means that the only content
|
||||
// between them is whitespace, point only at the found token.
|
||||
err.span_label(sp, label_exp);
|
||||
}
|
||||
_ => {
|
||||
err.span_label(prev_sp, label_exp);
|
||||
err.span_label(sp, "unexpected token");
|
||||
}
|
||||
}
|
||||
Err(err)
|
||||
}
|
||||
|
||||
/// Consume alternative await syntaxes like `await <expr>`, `await? <expr>`, `await(<expr>)`
|
||||
/// and `await { <expr> }`.
|
||||
crate fn parse_incorrect_await_syntax(
|
||||
|
@ -248,7 +248,7 @@ fn maybe_source_file_to_parser(
|
||||
// must preserve old name for now, because quote! from the *existing*
|
||||
// compiler expands into it
|
||||
pub fn new_parser_from_tts(sess: &ParseSess, tts: Vec<TokenTree>) -> Parser<'_> {
|
||||
stream_to_parser(sess, tts.into_iter().collect(), Some("macro arguments"))
|
||||
stream_to_parser(sess, tts.into_iter().collect(), crate::MACRO_ARGUMENTS)
|
||||
}
|
||||
|
||||
|
||||
@ -331,9 +331,9 @@ pub fn maybe_file_to_stream(
|
||||
pub fn stream_to_parser<'a>(
|
||||
sess: &'a ParseSess,
|
||||
stream: TokenStream,
|
||||
is_subparser: Option<&'static str>,
|
||||
subparser_name: Option<&'static str>,
|
||||
) -> Parser<'a> {
|
||||
Parser::new(sess, stream, None, true, false, is_subparser)
|
||||
Parser::new(sess, stream, None, true, false, subparser_name)
|
||||
}
|
||||
|
||||
/// Given stream, the `ParseSess` and the base directory, produces a parser.
|
||||
|
@ -233,8 +233,8 @@ pub struct Parser<'a> {
|
||||
/// error.
|
||||
crate unclosed_delims: Vec<UnmatchedBrace>,
|
||||
last_unexpected_token_span: Option<Span>,
|
||||
/// If `true`, this `Parser` is not parsing Rust code but rather a macro call.
|
||||
is_subparser: Option<&'static str>,
|
||||
/// If present, this `Parser` is not parsing Rust code but rather a macro call.
|
||||
crate subparser_name: Option<&'static str>,
|
||||
}
|
||||
|
||||
impl<'a> Drop for Parser<'a> {
|
||||
@ -541,7 +541,7 @@ impl<'a> Parser<'a> {
|
||||
directory: Option<Directory<'a>>,
|
||||
recurse_into_file_modules: bool,
|
||||
desugar_doc_comments: bool,
|
||||
is_subparser: Option<&'static str>,
|
||||
subparser_name: Option<&'static str>,
|
||||
) -> Self {
|
||||
let mut parser = Parser {
|
||||
sess,
|
||||
@ -572,7 +572,7 @@ impl<'a> Parser<'a> {
|
||||
max_angle_bracket_count: 0,
|
||||
unclosed_delims: Vec::new(),
|
||||
last_unexpected_token_span: None,
|
||||
is_subparser,
|
||||
subparser_name,
|
||||
};
|
||||
|
||||
let tok = parser.next_tok();
|
||||
@ -636,56 +636,13 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
|
||||
/// Expects and consumes the token `t`. Signals an error if the next token is not `t`.
|
||||
pub fn expect(&mut self, t: &token::Token) -> PResult<'a, bool /* recovered */> {
|
||||
pub fn expect(&mut self, t: &token::Token) -> PResult<'a, bool /* recovered */> {
|
||||
if self.expected_tokens.is_empty() {
|
||||
if self.token == *t {
|
||||
self.bump();
|
||||
Ok(false)
|
||||
} else {
|
||||
let token_str = pprust::token_to_string(t);
|
||||
let this_token_str = self.this_token_descr();
|
||||
let (prev_sp, sp) = match (&self.token, self.is_subparser) {
|
||||
// Point at the end of the macro call when reaching end of macro arguments.
|
||||
(token::Token::Eof, Some(_)) => {
|
||||
let sp = self.sess.source_map().next_point(self.span);
|
||||
(sp, sp)
|
||||
}
|
||||
// We don't want to point at the following span after DUMMY_SP.
|
||||
// This happens when the parser finds an empty TokenStream.
|
||||
_ if self.prev_span == DUMMY_SP => (self.span, self.span),
|
||||
// EOF, don't want to point at the following char, but rather the last token.
|
||||
(token::Token::Eof, None) => (self.prev_span, self.span),
|
||||
_ => (self.sess.source_map().next_point(self.prev_span), self.span),
|
||||
};
|
||||
let msg = format!(
|
||||
"expected `{}`, found {}",
|
||||
token_str,
|
||||
match (&self.token, self.is_subparser) {
|
||||
(token::Token::Eof, Some(origin)) => format!("end of {}", origin),
|
||||
_ => this_token_str,
|
||||
},
|
||||
);
|
||||
let mut err = self.struct_span_err(sp, &msg);
|
||||
let label_exp = format!("expected `{}`", token_str);
|
||||
match self.recover_closing_delimiter(&[t.clone()], err) {
|
||||
Err(e) => err = e,
|
||||
Ok(recovered) => {
|
||||
return Ok(recovered);
|
||||
}
|
||||
}
|
||||
let cm = self.sess.source_map();
|
||||
match (cm.lookup_line(prev_sp.lo()), cm.lookup_line(sp.lo())) {
|
||||
(Ok(ref a), Ok(ref b)) if a.line == b.line => {
|
||||
// When the spans are in the same line, it means that the only content
|
||||
// between them is whitespace, point only at the found token.
|
||||
err.span_label(sp, label_exp);
|
||||
}
|
||||
_ => {
|
||||
err.span_label(prev_sp, label_exp);
|
||||
err.span_label(sp, "unexpected token");
|
||||
}
|
||||
}
|
||||
Err(err)
|
||||
self.unexpected_try_recover(t)
|
||||
}
|
||||
} else {
|
||||
self.expect_one_of(slice::from_ref(t), &[])
|
||||
@ -2644,7 +2601,7 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
Err(mut err) => {
|
||||
self.cancel(&mut err);
|
||||
let (span, msg) = match (&self.token, self.is_subparser) {
|
||||
let (span, msg) = match (&self.token, self.subparser_name) {
|
||||
(&token::Token::Eof, Some(origin)) => {
|
||||
let sp = self.sess.source_map().next_point(self.span);
|
||||
(sp, format!( "expected expression, found end of {}", origin))
|
||||
|
@ -4,7 +4,7 @@ error: bad `cfg_attr` attribute
|
||||
LL | #[cfg_attr]
|
||||
| ^^^^^^^^^^^ missing condition and attribute
|
||||
|
|
||||
= note: `cfg_attr` must be of the form: `#[cfg_attr(condition, attribute)]`
|
||||
= note: `cfg_attr` must be of the form: `#[cfg_attr(condition, attribute, other_attribute, ...)]`
|
||||
= note: for more information, visit <https://doc.rust-lang.org/reference/conditional-compilation.html#the-cfg_attr-attribute>
|
||||
|
||||
error: expected `(`, found `=`
|
||||
|
Loading…
Reference in New Issue
Block a user