internal: make name consistent with usage

This commit is contained in:
Aleksey Kladov 2021-09-06 18:34:03 +03:00
parent 7d9eb4fd73
commit 104cd0ce88
14 changed files with 105 additions and 89 deletions

View File

@ -695,7 +695,8 @@ impl Attr {
hygiene: &Hygiene,
id: AttrId,
) -> Option<Attr> {
let (parse, _) = mbe::token_tree_to_syntax_node(tt, mbe::FragmentKind::MetaItem).ok()?;
let (parse, _) =
mbe::token_tree_to_syntax_node(tt, mbe::ParserEntryPoint::MetaItem).ok()?;
let ast = ast::Meta::cast(parse.syntax_node())?;
Self::from_src(db, ast, hygiene, id)

View File

@ -72,7 +72,7 @@ struct BasicAdtInfo {
}
fn parse_adt(tt: &tt::Subtree) -> Result<BasicAdtInfo, mbe::ExpandError> {
let (parsed, token_map) = mbe::token_tree_to_syntax_node(tt, mbe::FragmentKind::Items)?; // FragmentKind::Items doesn't parse attrs?
let (parsed, token_map) = mbe::token_tree_to_syntax_node(tt, mbe::ParserEntryPoint::Items)?; // FragmentKind::Items doesn't parse attrs?
let macro_items = ast::MacroItems::cast(parsed.syntax_node()).ok_or_else(|| {
debug!("derive node didn't parse");
mbe::ExpandError::UnexpectedToken

View File

@ -444,12 +444,12 @@ fn token_tree_to_syntax_node(
tt: &tt::Subtree,
expand_to: ExpandTo,
) -> Result<(Parse<SyntaxNode>, mbe::TokenMap), ExpandError> {
let fragment = match expand_to {
ExpandTo::Statements => mbe::FragmentKind::Statements,
ExpandTo::Items => mbe::FragmentKind::Items,
ExpandTo::Pattern => mbe::FragmentKind::Pattern,
ExpandTo::Type => mbe::FragmentKind::Type,
ExpandTo::Expr => mbe::FragmentKind::Expr,
let entry_point = match expand_to {
ExpandTo::Statements => mbe::ParserEntryPoint::Statements,
ExpandTo::Items => mbe::ParserEntryPoint::Items,
ExpandTo::Pattern => mbe::ParserEntryPoint::Pattern,
ExpandTo::Type => mbe::ParserEntryPoint::Type,
ExpandTo::Expr => mbe::ParserEntryPoint::Expr,
};
mbe::token_tree_to_syntax_node(tt, fragment)
mbe::token_tree_to_syntax_node(tt, entry_point)
}

View File

@ -131,7 +131,7 @@ pub fn expand_eager_macro(
let arg_file_id = arg_id;
let parsed_args = diagnostic_sink
.result(mbe::token_tree_to_syntax_node(&parsed_args, mbe::FragmentKind::Expr))?
.result(mbe::token_tree_to_syntax_node(&parsed_args, mbe::ParserEntryPoint::Expr))?
.0;
let result = eager_macro_recur(
db,

View File

@ -69,7 +69,7 @@ use crate::{
};
use super::ExpandResult;
use parser::FragmentKind::*;
use parser::ParserEntryPoint::*;
use smallvec::{smallvec, SmallVec};
use syntax::SmolStr;

View File

@ -24,7 +24,7 @@ use crate::{
};
// FIXME: we probably should re-think `token_tree_to_syntax_node` interfaces
pub use ::parser::FragmentKind;
pub use ::parser::ParserEntryPoint;
pub use tt::{Delimiter, DelimiterKind, Punct};
#[derive(Debug, PartialEq, Eq)]

View File

@ -13,7 +13,7 @@ use syntax::{
use tt::buffer::{Cursor, TokenBuffer};
use crate::{
subtree_source::SubtreeTokenSource, tt_iter::TtIter, ExpandError, FragmentKind, TokenMap,
subtree_source::SubtreeTokenSource, tt_iter::TtIter, ExpandError, ParserEntryPoint, TokenMap,
};
/// Convert the syntax node to a `TokenTree` (what macro
@ -49,7 +49,7 @@ pub fn syntax_node_to_token_tree_censored(
pub fn token_tree_to_syntax_node(
tt: &tt::Subtree,
fragment_kind: FragmentKind,
entry_point: ParserEntryPoint,
) -> Result<(Parse<SyntaxNode>, TokenMap), ExpandError> {
let buffer = match tt {
tt::Subtree { delimiter: None, token_trees } => {
@ -59,7 +59,7 @@ pub fn token_tree_to_syntax_node(
};
let mut token_source = SubtreeTokenSource::new(&buffer);
let mut tree_sink = TtTreeSink::new(buffer.begin());
parser::parse_fragment(&mut token_source, &mut tree_sink, fragment_kind);
parser::parse(&mut token_source, &mut tree_sink, entry_point);
if tree_sink.roots.len() != 1 {
return Err(ExpandError::ConversionError);
}
@ -100,7 +100,7 @@ pub fn parse_exprs_with_sep(tt: &tt::Subtree, sep: char) -> Vec<tt::Subtree> {
let mut res = Vec::new();
while iter.peek_n(0).is_some() {
let expanded = iter.expect_fragment(FragmentKind::Expr);
let expanded = iter.expect_fragment(ParserEntryPoint::Expr);
if expanded.err.is_some() {
break;
}
@ -790,7 +790,7 @@ mod tests {
"#,
)
.expand_tt("stmts!();");
assert!(token_tree_to_syntax_node(&expansion, FragmentKind::Expr).is_err());
assert!(token_tree_to_syntax_node(&expansion, ParserEntryPoint::Expr).is_err());
}
#[test]
@ -823,7 +823,7 @@ mod tests {
let source_file = ast::SourceFile::parse("struct Foo { a: x::Y }").ok().unwrap();
let struct_def = source_file.syntax().descendants().find_map(ast::Struct::cast).unwrap();
let tt = syntax_node_to_token_tree(struct_def.syntax()).0;
token_tree_to_syntax_node(&tt, FragmentKind::Item).unwrap();
token_tree_to_syntax_node(&tt, ParserEntryPoint::Item).unwrap();
}
#[test]

View File

@ -6,7 +6,7 @@ use std::fmt::Write;
use syntax::{ast, AstNode, NodeOrToken, SyntaxNode, WalkEvent};
use test_utils::assert_eq_text;
use crate::FragmentKind;
use crate::ParserEntryPoint;
use super::*;
@ -44,13 +44,16 @@ macro_rules! impl_fixture {
#[allow(unused)]
fn expand_items(&self, invocation: &str) -> SyntaxNode {
let expanded = self.expand_tt(invocation);
token_tree_to_syntax_node(&expanded, FragmentKind::Items).unwrap().0.syntax_node()
token_tree_to_syntax_node(&expanded, ParserEntryPoint::Items)
.unwrap()
.0
.syntax_node()
}
#[allow(unused)]
fn expand_statements(&self, invocation: &str) -> SyntaxNode {
let expanded = self.expand_tt(invocation);
token_tree_to_syntax_node(&expanded, FragmentKind::Statements)
token_tree_to_syntax_node(&expanded, ParserEntryPoint::Statements)
.unwrap()
.0
.syntax_node()
@ -59,7 +62,10 @@ macro_rules! impl_fixture {
#[allow(unused)]
fn expand_expr(&self, invocation: &str) -> SyntaxNode {
let expanded = self.expand_tt(invocation);
token_tree_to_syntax_node(&expanded, FragmentKind::Expr).unwrap().0.syntax_node()
token_tree_to_syntax_node(&expanded, ParserEntryPoint::Expr)
.unwrap()
.0
.syntax_node()
}
#[allow(unused)]
@ -76,17 +82,17 @@ macro_rules! impl_fixture {
}
fn assert_expand_items(&self, invocation: &str, expected: &str) -> &$name {
self.assert_expansion(FragmentKind::Items, invocation, expected);
self.assert_expansion(ParserEntryPoint::Items, invocation, expected);
self
}
#[allow(unused)]
fn assert_expand_statements(&self, invocation: &str, expected: &str) -> &$name {
self.assert_expansion(FragmentKind::Statements, invocation, expected);
self.assert_expansion(ParserEntryPoint::Statements, invocation, expected);
self
}
fn assert_expansion(&self, kind: FragmentKind, invocation: &str, expected: &str) {
fn assert_expansion(&self, kind: ParserEntryPoint, invocation: &str, expected: &str) {
let expanded = self.expand_tt(invocation);
assert_eq!(expanded.to_string(), expected);

View File

@ -1,4 +1,4 @@
use ::parser::FragmentKind;
use ::parser::ParserEntryPoint;
use syntax::{
SyntaxKind::{ERROR, IDENT},
T,
@ -55,7 +55,7 @@ macro_rules! foobar {
)
.expand_tt("foobar!(baz);");
let (node, token_map) = token_tree_to_syntax_node(&expanded, FragmentKind::Items).unwrap();
let (node, token_map) = token_tree_to_syntax_node(&expanded, ParserEntryPoint::Items).unwrap();
let content = node.syntax_node().to_string();
let get_text = |id, kind| -> String {

View File

@ -1,7 +1,7 @@
//! A "Parser" structure for token trees. We use this when parsing a declarative
//! macro definition into a list of patterns and templates.
use crate::{subtree_source::SubtreeTokenSource, ExpandError, ExpandResult};
use crate::{subtree_source::SubtreeTokenSource, ExpandError, ExpandResult, ParserEntryPoint};
use parser::TreeSink;
use syntax::SyntaxKind;
@ -92,7 +92,7 @@ impl<'a> TtIter<'a> {
pub(crate) fn expect_fragment(
&mut self,
fragment_kind: parser::FragmentKind,
entry_point: ParserEntryPoint,
) -> ExpandResult<Option<tt::TokenTree>> {
struct OffsetTokenSink<'a> {
cursor: Cursor<'a>,
@ -119,10 +119,10 @@ impl<'a> TtIter<'a> {
let mut src = SubtreeTokenSource::new(&buffer);
let mut sink = OffsetTokenSink { cursor: buffer.begin(), error: false };
parser::parse_fragment(&mut src, &mut sink, fragment_kind);
parser::parse(&mut src, &mut sink, entry_point);
let mut err = if !sink.cursor.is_root() || sink.error {
Some(err!("expected {:?}", fragment_kind))
Some(err!("expected {:?}", entry_point))
} else {
None
};

View File

@ -44,20 +44,23 @@ use crate::{
TokenSet,
};
pub(crate) fn root(p: &mut Parser) {
let m = p.start();
p.eat(SHEBANG);
items::mod_contents(p, false);
m.complete(p, SOURCE_FILE);
}
/// Various pieces of syntax that can be parsed by macros by example
pub(crate) mod fragments {
pub(crate) mod entry_points {
use super::*;
pub(crate) use super::{
expressions::block_expr, paths::type_path as path, patterns::pattern_single, types::type_,
};
pub(crate) fn source_file(p: &mut Parser) {
let m = p.start();
p.eat(SHEBANG);
items::mod_contents(p, false);
m.complete(p, SOURCE_FILE);
}
pub(crate) use expressions::block_expr;
pub(crate) use paths::type_path as path;
pub(crate) use patterns::pattern_single as pattern;
pub(crate) use types::type_;
pub(crate) fn expr(p: &mut Parser) {
let _ = expressions::expr_with_attrs(p);
@ -71,8 +74,8 @@ pub(crate) mod fragments {
expressions::stmt(p, expressions::StmtWithSemi::Optional, false)
}
pub(crate) fn opt_visibility(p: &mut Parser) {
let _ = super::opt_visibility(p);
pub(crate) fn visibility(p: &mut Parser) {
let _ = opt_visibility(p);
}
// Parse a meta item , which excluded [], e.g : #[ MetaItem ]

View File

@ -71,23 +71,13 @@ pub trait TreeSink {
fn error(&mut self, error: ParseError);
}
fn parse_from_tokens<F>(token_source: &mut dyn TokenSource, tree_sink: &mut dyn TreeSink, f: F)
where
F: FnOnce(&mut parser::Parser),
{
let mut p = parser::Parser::new(token_source);
f(&mut p);
let events = p.finish();
event::process(tree_sink, events);
}
/// Parse given tokens into the given sink as a rust file.
pub fn parse(token_source: &mut dyn TokenSource, tree_sink: &mut dyn TreeSink) {
parse_from_tokens(token_source, tree_sink, grammar::root);
}
/// rust-analyzer parser allows you to choose one of the possible entry points.
///
/// The primary consumer of this API are declarative macros, `$x:expr` matchers
/// are implemented by calling into the parser with non-standard entry point.
#[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)]
pub enum FragmentKind {
pub enum ParserEntryPoint {
SourceFile,
Path,
Expr,
Statement,
@ -103,27 +93,37 @@ pub enum FragmentKind {
Attr,
}
pub fn parse_fragment(
/// Parse given tokens into the given sink as a rust file.
pub fn parse_source_file(token_source: &mut dyn TokenSource, tree_sink: &mut dyn TreeSink) {
parse(token_source, tree_sink, ParserEntryPoint::SourceFile);
}
pub fn parse(
token_source: &mut dyn TokenSource,
tree_sink: &mut dyn TreeSink,
fragment_kind: FragmentKind,
entry_point: ParserEntryPoint,
) {
let parser: fn(&'_ mut parser::Parser) = match fragment_kind {
FragmentKind::Path => grammar::fragments::path,
FragmentKind::Expr => grammar::fragments::expr,
FragmentKind::Type => grammar::fragments::type_,
FragmentKind::Pattern => grammar::fragments::pattern_single,
FragmentKind::Item => grammar::fragments::item,
FragmentKind::Block => grammar::fragments::block_expr,
FragmentKind::Visibility => grammar::fragments::opt_visibility,
FragmentKind::MetaItem => grammar::fragments::meta_item,
FragmentKind::Statement => grammar::fragments::stmt,
FragmentKind::StatementOptionalSemi => grammar::fragments::stmt_optional_semi,
FragmentKind::Items => grammar::fragments::macro_items,
FragmentKind::Statements => grammar::fragments::macro_stmts,
FragmentKind::Attr => grammar::fragments::attr,
let entry_point: fn(&'_ mut parser::Parser) = match entry_point {
ParserEntryPoint::SourceFile => grammar::entry_points::source_file,
ParserEntryPoint::Path => grammar::entry_points::path,
ParserEntryPoint::Expr => grammar::entry_points::expr,
ParserEntryPoint::Type => grammar::entry_points::type_,
ParserEntryPoint::Pattern => grammar::entry_points::pattern,
ParserEntryPoint::Item => grammar::entry_points::item,
ParserEntryPoint::Block => grammar::entry_points::block_expr,
ParserEntryPoint::Visibility => grammar::entry_points::visibility,
ParserEntryPoint::MetaItem => grammar::entry_points::meta_item,
ParserEntryPoint::Statement => grammar::entry_points::stmt,
ParserEntryPoint::StatementOptionalSemi => grammar::entry_points::stmt_optional_semi,
ParserEntryPoint::Items => grammar::entry_points::macro_items,
ParserEntryPoint::Statements => grammar::entry_points::macro_stmts,
ParserEntryPoint::Attr => grammar::entry_points::attr,
};
parse_from_tokens(token_source, tree_sink, parser)
let mut p = parser::Parser::new(token_source);
entry_point(&mut p);
let events = p.finish();
event::process(tree_sink, events);
}
/// A parsing function for a specific braced-block.

View File

@ -173,52 +173,58 @@ impl SourceFile {
}
}
// FIXME: `parse` functions shouldn't hang directly from AST nodes, and they
// shouldn't return `Result`.
//
// We need a dedicated module for parser entry points, and they should always
// return `Parse`.
impl ast::Path {
/// Returns `text`, parsed as a path, but only if it has no errors.
pub fn parse(text: &str) -> Result<Self, ()> {
parsing::parse_text_fragment(text, parser::FragmentKind::Path)
parsing::parse_text_as(text, parser::ParserEntryPoint::Path)
}
}
impl ast::Pat {
/// Returns `text`, parsed as a pattern, but only if it has no errors.
pub fn parse(text: &str) -> Result<Self, ()> {
parsing::parse_text_fragment(text, parser::FragmentKind::Pattern)
parsing::parse_text_as(text, parser::ParserEntryPoint::Pattern)
}
}
impl ast::Expr {
/// Returns `text`, parsed as an expression, but only if it has no errors.
pub fn parse(text: &str) -> Result<Self, ()> {
parsing::parse_text_fragment(text, parser::FragmentKind::Expr)
parsing::parse_text_as(text, parser::ParserEntryPoint::Expr)
}
}
impl ast::Item {
/// Returns `text`, parsed as an item, but only if it has no errors.
pub fn parse(text: &str) -> Result<Self, ()> {
parsing::parse_text_fragment(text, parser::FragmentKind::Item)
parsing::parse_text_as(text, parser::ParserEntryPoint::Item)
}
}
impl ast::Type {
/// Returns `text`, parsed as an type reference, but only if it has no errors.
pub fn parse(text: &str) -> Result<Self, ()> {
parsing::parse_text_fragment(text, parser::FragmentKind::Type)
parsing::parse_text_as(text, parser::ParserEntryPoint::Type)
}
}
impl ast::Attr {
/// Returns `text`, parsed as an attribute, but only if it has no errors.
pub fn parse(text: &str) -> Result<Self, ()> {
parsing::parse_text_fragment(text, parser::FragmentKind::Attr)
parsing::parse_text_as(text, parser::ParserEntryPoint::Attr)
}
}
impl ast::Stmt {
/// Returns `text`, parsed as statement, but only if it has no errors.
pub fn parse(text: &str) -> Result<Self, ()> {
parsing::parse_text_fragment(text, parser::FragmentKind::StatementOptionalSemi)
parsing::parse_text_as(text, parser::ParserEntryPoint::StatementOptionalSemi)
}
}

View File

@ -20,7 +20,7 @@ pub(crate) fn parse_text(text: &str) -> (GreenNode, Vec<SyntaxError>) {
let mut token_source = TextTokenSource::new(text, &tokens);
let mut tree_sink = TextTreeSink::new(text, &tokens);
parser::parse(&mut token_source, &mut tree_sink);
parser::parse_source_file(&mut token_source, &mut tree_sink);
let (tree, mut parser_errors) = tree_sink.finish();
parser_errors.extend(lexer_errors);
@ -29,9 +29,9 @@ pub(crate) fn parse_text(text: &str) -> (GreenNode, Vec<SyntaxError>) {
}
/// Returns `text` parsed as a `T` provided there are no parse errors.
pub(crate) fn parse_text_fragment<T: AstNode>(
pub(crate) fn parse_text_as<T: AstNode>(
text: &str,
fragment_kind: parser::FragmentKind,
entry_point: parser::ParserEntryPoint,
) -> Result<T, ()> {
let (tokens, lexer_errors) = tokenize(text);
if !lexer_errors.is_empty() {
@ -45,7 +45,7 @@ pub(crate) fn parse_text_fragment<T: AstNode>(
// tokens. We arbitrarily give it a SourceFile.
use parser::TreeSink;
tree_sink.start_node(SyntaxKind::SOURCE_FILE);
parser::parse_fragment(&mut token_source, &mut tree_sink, fragment_kind);
parser::parse(&mut token_source, &mut tree_sink, entry_point);
tree_sink.finish_node();
let (tree, parser_errors) = tree_sink.finish();