no need to return unmatched_delims from tokentrees

This commit is contained in:
yukang 2023-02-22 06:09:57 +00:00
parent 9ce7472db4
commit 88de2e1115
2 changed files with 6 additions and 11 deletions

View File

@ -43,7 +43,6 @@ pub(crate) fn string_to_stream(source_str: String) -> TokenStream {
ps.source_map().new_source_file(PathBuf::from("bogofile").into(), source_str), ps.source_map().new_source_file(PathBuf::from("bogofile").into(), source_str),
None, None,
) )
.0
} }
/// Parses a string, returns a crate. /// Parses a string, returns a crate.

View File

@ -30,7 +30,7 @@ pub const MACRO_ARGUMENTS: Option<&str> = Some("macro arguments");
#[macro_use] #[macro_use]
pub mod parser; pub mod parser;
use parser::{emit_unclosed_delims, make_unclosed_delims_error, Parser}; use parser::{make_unclosed_delims_error, Parser};
pub mod lexer; pub mod lexer;
pub mod validate_attr; pub mod validate_attr;
@ -96,10 +96,7 @@ pub fn parse_stream_from_source_str(
sess: &ParseSess, sess: &ParseSess,
override_span: Option<Span>, override_span: Option<Span>,
) -> TokenStream { ) -> TokenStream {
let (stream, mut errors) = source_file_to_stream(sess, sess.source_map().new_source_file(name, source), override_span)
source_file_to_stream(sess, sess.source_map().new_source_file(name, source), override_span);
emit_unclosed_delims(&mut errors, &sess);
stream
} }
/// Creates a new parser from a source string. /// Creates a new parser from a source string.
@ -135,9 +132,8 @@ fn maybe_source_file_to_parser(
source_file: Lrc<SourceFile>, source_file: Lrc<SourceFile>,
) -> Result<Parser<'_>, Vec<Diagnostic>> { ) -> Result<Parser<'_>, Vec<Diagnostic>> {
let end_pos = source_file.end_pos; let end_pos = source_file.end_pos;
let (stream, unclosed_delims) = maybe_file_to_stream(sess, source_file, None)?; let stream = maybe_file_to_stream(sess, source_file, None)?;
let mut parser = stream_to_parser(sess, stream, None); let mut parser = stream_to_parser(sess, stream, None);
parser.unclosed_delims = unclosed_delims;
if parser.token == token::Eof { if parser.token == token::Eof {
parser.token.span = Span::new(end_pos, end_pos, parser.token.span.ctxt(), None); parser.token.span = Span::new(end_pos, end_pos, parser.token.span.ctxt(), None);
} }
@ -182,7 +178,7 @@ pub fn source_file_to_stream(
sess: &ParseSess, sess: &ParseSess,
source_file: Lrc<SourceFile>, source_file: Lrc<SourceFile>,
override_span: Option<Span>, override_span: Option<Span>,
) -> (TokenStream, Vec<lexer::UnmatchedDelim>) { ) -> TokenStream {
panictry_buffer!(&sess.span_diagnostic, maybe_file_to_stream(sess, source_file, override_span)) panictry_buffer!(&sess.span_diagnostic, maybe_file_to_stream(sess, source_file, override_span))
} }
@ -192,7 +188,7 @@ pub fn maybe_file_to_stream(
sess: &ParseSess, sess: &ParseSess,
source_file: Lrc<SourceFile>, source_file: Lrc<SourceFile>,
override_span: Option<Span>, override_span: Option<Span>,
) -> Result<(TokenStream, Vec<lexer::UnmatchedDelim>), Vec<Diagnostic>> { ) -> Result<TokenStream, Vec<Diagnostic>> {
let src = source_file.src.as_ref().unwrap_or_else(|| { let src = source_file.src.as_ref().unwrap_or_else(|| {
sess.span_diagnostic.bug(&format!( sess.span_diagnostic.bug(&format!(
"cannot lex `source_file` without source: {}", "cannot lex `source_file` without source: {}",
@ -204,7 +200,7 @@ pub fn maybe_file_to_stream(
lexer::parse_token_trees(sess, src.as_str(), source_file.start_pos, override_span); lexer::parse_token_trees(sess, src.as_str(), source_file.start_pos, override_span);
match token_trees { match token_trees {
Ok(stream) if unmatched_delims.is_empty() => Ok((stream, unmatched_delims)), Ok(stream) if unmatched_delims.is_empty() => Ok(stream),
_ => { _ => {
// Return error if there are unmatched delimiters or unclosng delimiters. // Return error if there are unmatched delimiters or unclosng delimiters.
// We emit delimiter mismatch errors first, then emit the unclosing delimiter mismatch // We emit delimiter mismatch errors first, then emit the unclosing delimiter mismatch