1148: Add token_tree_to_xxx functions r=matklad a=edwin0cheng

<del>As discus in  PR #1147 , this PR added a `mbe::MacroKind` .
Currently only 2 kind of macro are supported, `SourceFile` and `Block`.</del>

Added following functions for `tt::TokenTree` and `ast::Node` conversion:

* token_tree_to_expr
* token_tree_to_pat
* token_tree_to_ty
* token_tree_to_macro_stmts
* token_tree_to_macro_items

And added two new syntax kind:

* MACRO_ITEMS
* MACRO_STMTS

Co-authored-by: Edwin Cheng <edwin0cheng@gmail.com>
This commit is contained in:
bors[bot] 2019-04-19 08:56:39 +00:00
commit ab0a96586f
8 changed files with 295 additions and 20 deletions

View File

@ -37,9 +37,19 @@ pub enum ExpandError {
NoMatchingRule,
UnexpectedToken,
BindingError(String),
ConversionError,
}
pub use crate::syntax_bridge::{ast_to_token_tree, token_tree_to_ast_item_list, syntax_node_to_token_tree};
pub use crate::syntax_bridge::{
ast_to_token_tree,
token_tree_to_ast_item_list,
syntax_node_to_token_tree,
token_tree_to_expr,
token_tree_to_pat,
token_tree_to_ty,
token_tree_to_macro_items,
token_tree_to_macro_stmts,
};
/// This struct contains AST for a single `macro_rules` definition. What might
/// be very confusing is that AST has almost exactly the same shape as
@ -192,23 +202,26 @@ impl_froms!(TokenTree: Leaf, Subtree);
pub(crate) fn expand_to_syntax(
rules: &MacroRules,
invocation: &str,
) -> ra_syntax::TreeArc<ast::SourceFile> {
) -> ra_syntax::TreeArc<ast::MacroItems> {
let expanded = expand(rules, invocation);
token_tree_to_ast_item_list(&expanded)
token_tree_to_macro_items(&expanded).unwrap()
}
pub(crate) fn assert_expansion(rules: &MacroRules, invocation: &str, expansion: &str) {
let expanded = expand(rules, invocation);
assert_eq!(expanded.to_string(), expansion);
let tree = token_tree_to_ast_item_list(&expanded);
let tree = token_tree_to_macro_items(&expanded);
// Eat all white space by parse it back and forth
let expansion = ast::SourceFile::parse(expansion);
let expansion = syntax_node_to_token_tree(expansion.syntax()).unwrap().0;
let file = token_tree_to_ast_item_list(&expansion);
let file = token_tree_to_macro_items(&expansion);
assert_eq!(tree.syntax().debug_dump().trim(), file.syntax().debug_dump().trim());
assert_eq!(
tree.unwrap().syntax().debug_dump().trim(),
file.unwrap().syntax().debug_dump().trim()
);
}
#[test]
@ -346,11 +359,11 @@ impl_froms!(TokenTree: Leaf, Subtree);
",
);
let expansion = expand(&rules, "structs!(Foo, Bar)");
let tree = token_tree_to_ast_item_list(&expansion);
let tree = token_tree_to_macro_items(&expansion);
assert_eq!(
tree.syntax().debug_dump().trim(),
tree.unwrap().syntax().debug_dump().trim(),
r#"
SOURCE_FILE@[0; 40)
MACRO_ITEMS@[0; 40)
STRUCT_DEF@[0; 20)
STRUCT_KW@[0; 6) "struct"
NAME@[6; 9)
@ -444,6 +457,59 @@ SOURCE_FILE@[0; 40)
assert_expansion(&rules, "foo! { foo, bar }", "fn foo () {let a = foo ; let b = bar ;}");
}
#[test]
fn test_tt_to_stmts() {
let rules = create_rules(
r#"
macro_rules! foo {
() => {
let a = 0;
a = 10 + 1;
a
}
}
"#,
);
let expanded = expand(&rules, "foo!{}");
let stmts = token_tree_to_macro_stmts(&expanded);
assert_eq!(
stmts.unwrap().syntax().debug_dump().trim(),
r#"MACRO_STMTS@[0; 15)
LET_STMT@[0; 7)
LET_KW@[0; 3) "let"
BIND_PAT@[3; 4)
NAME@[3; 4)
IDENT@[3; 4) "a"
EQ@[4; 5) "="
LITERAL@[5; 6)
INT_NUMBER@[5; 6) "0"
SEMI@[6; 7) ";"
EXPR_STMT@[7; 14)
BIN_EXPR@[7; 13)
PATH_EXPR@[7; 8)
PATH@[7; 8)
PATH_SEGMENT@[7; 8)
NAME_REF@[7; 8)
IDENT@[7; 8) "a"
EQ@[8; 9) "="
BIN_EXPR@[9; 13)
LITERAL@[9; 11)
INT_NUMBER@[9; 11) "10"
PLUS@[11; 12) "+"
LITERAL@[12; 13)
INT_NUMBER@[12; 13) "1"
SEMI@[13; 14) ";"
EXPR_STMT@[14; 15)
PATH_EXPR@[14; 15)
PATH@[14; 15)
PATH_SEGMENT@[14; 15)
NAME_REF@[14; 15)
IDENT@[14; 15) "a""#,
);
}
// The following tests are port from intellij-rust directly
// https://github.com/intellij-rust/intellij-rust/blob/c4e9feee4ad46e7953b1948c112533360b6087bb/src/test/kotlin/org/rust/lang/core/macros/RsMacroExpansionTest.kt
@ -527,7 +593,7 @@ SOURCE_FILE@[0; 40)
assert_eq!(
expand_to_syntax(&rules, "foo! { 1 + 1 }").syntax().debug_dump().trim(),
r#"SOURCE_FILE@[0; 15)
r#"MACRO_ITEMS@[0; 15)
FN_DEF@[0; 15)
FN_KW@[0; 2) "fn"
NAME@[2; 5)

View File

@ -5,6 +5,7 @@ use ra_syntax::{
};
use crate::subtree_source::{SubtreeTokenSource, Querier};
use crate::ExpandError;
/// Maps `tt::TokenId` to the relative range of the original token.
#[derive(Default)]
@ -30,6 +31,71 @@ pub fn syntax_node_to_token_tree(node: &SyntaxNode) -> Option<(tt::Subtree, Toke
Some((tt, token_map))
}
// The following items are what `rustc` macro can be parsed into :
// link: https://github.com/rust-lang/rust/blob/9ebf47851a357faa4cd97f4b1dc7835f6376e639/src/libsyntax/ext/expand.rs#L141
// * Expr(P<ast::Expr>) -> token_tree_to_expr
// * Pat(P<ast::Pat>) -> token_tree_to_pat
// * Ty(P<ast::Ty>) -> token_tree_to_ty
// * Stmts(SmallVec<[ast::Stmt; 1]>) -> token_tree_to_stmts
// * Items(SmallVec<[P<ast::Item>; 1]>) -> token_tree_to_items
//
// * TraitItems(SmallVec<[ast::TraitItem; 1]>)
// * ImplItems(SmallVec<[ast::ImplItem; 1]>)
// * ForeignItems(SmallVec<[ast::ForeignItem; 1]>
//
//
/// Parses the token tree (result of macro expansion) to an expression
pub fn token_tree_to_expr(tt: &tt::Subtree) -> Result<TreeArc<ast::Expr>, ExpandError> {
let token_source = SubtreeTokenSource::new(tt);
let mut tree_sink = TtTreeSink::new(token_source.querier());
ra_parser::parse_expr(&token_source, &mut tree_sink);
let syntax = tree_sink.inner.finish();
ast::Expr::cast(&syntax)
.map(|m| m.to_owned())
.ok_or_else(|| crate::ExpandError::ConversionError)
}
/// Parses the token tree (result of macro expansion) to a Pattern
pub fn token_tree_to_pat(tt: &tt::Subtree) -> Result<TreeArc<ast::Pat>, ExpandError> {
let token_source = SubtreeTokenSource::new(tt);
let mut tree_sink = TtTreeSink::new(token_source.querier());
ra_parser::parse_pat(&token_source, &mut tree_sink);
let syntax = tree_sink.inner.finish();
ast::Pat::cast(&syntax).map(|m| m.to_owned()).ok_or_else(|| ExpandError::ConversionError)
}
/// Parses the token tree (result of macro expansion) to a Type
pub fn token_tree_to_ty(tt: &tt::Subtree) -> Result<TreeArc<ast::TypeRef>, ExpandError> {
let token_source = SubtreeTokenSource::new(tt);
let mut tree_sink = TtTreeSink::new(token_source.querier());
ra_parser::parse_ty(&token_source, &mut tree_sink);
let syntax = tree_sink.inner.finish();
ast::TypeRef::cast(&syntax).map(|m| m.to_owned()).ok_or_else(|| ExpandError::ConversionError)
}
/// Parses the token tree (result of macro expansion) as a sequence of stmts
pub fn token_tree_to_macro_stmts(
tt: &tt::Subtree,
) -> Result<TreeArc<ast::MacroStmts>, ExpandError> {
let token_source = SubtreeTokenSource::new(tt);
let mut tree_sink = TtTreeSink::new(token_source.querier());
ra_parser::parse_macro_stmts(&token_source, &mut tree_sink);
let syntax = tree_sink.inner.finish();
ast::MacroStmts::cast(&syntax).map(|m| m.to_owned()).ok_or_else(|| ExpandError::ConversionError)
}
/// Parses the token tree (result of macro expansion) as a sequence of items
pub fn token_tree_to_macro_items(
tt: &tt::Subtree,
) -> Result<TreeArc<ast::MacroItems>, ExpandError> {
let token_source = SubtreeTokenSource::new(tt);
let mut tree_sink = TtTreeSink::new(token_source.querier());
ra_parser::parse_macro_items(&token_source, &mut tree_sink);
let syntax = tree_sink.inner.finish();
ast::MacroItems::cast(&syntax).map(|m| m.to_owned()).ok_or_else(|| ExpandError::ConversionError)
}
/// Parses the token tree (result of macro expansion) as a sequence of items
pub fn token_tree_to_ast_item_list(tt: &tt::Subtree) -> TreeArc<ast::SourceFile> {
let token_source = SubtreeTokenSource::new(tt);

View File

@ -49,6 +49,27 @@ pub(crate) fn root(p: &mut Parser) {
m.complete(p, SOURCE_FILE);
}
pub(crate) fn macro_items(p: &mut Parser) {
let m = p.start();
items::mod_contents(p, false);
m.complete(p, MACRO_ITEMS);
}
pub(crate) fn macro_stmts(p: &mut Parser) {
let m = p.start();
while !p.at(EOF) {
if p.current() == SEMI {
p.bump();
continue;
}
expressions::stmt(p, expressions::StmtWithSemi::Optional);
}
m.complete(p, MACRO_STMTS);
}
pub(crate) fn path(p: &mut Parser) {
paths::type_path(p);
}
@ -66,6 +87,11 @@ pub(crate) fn pattern(p: &mut Parser) {
}
pub(crate) fn stmt(p: &mut Parser, with_semi: bool) {
let with_semi = match with_semi {
true => expressions::StmtWithSemi::Yes,
false => expressions::StmtWithSemi::No,
};
expressions::stmt(p, with_semi)
}

View File

@ -4,6 +4,12 @@ pub(crate) use self::atom::match_arm_list;
pub(super) use self::atom::{literal, LITERAL_FIRST};
use super::*;
pub(super) enum StmtWithSemi {
Yes,
No,
Optional,
}
const EXPR_FIRST: TokenSet = LHS_FIRST;
pub(super) fn expr(p: &mut Parser) -> BlockLike {
@ -48,7 +54,7 @@ fn is_expr_stmt_attr_allowed(kind: SyntaxKind) -> bool {
}
}
pub(super) fn stmt(p: &mut Parser, with_semi: bool) {
pub(super) fn stmt(p: &mut Parser, with_semi: StmtWithSemi) {
// test block_items
// fn a() { fn b() {} }
let m = p.start();
@ -111,13 +117,23 @@ pub(super) fn stmt(p: &mut Parser, with_semi: bool) {
// }
// test!{}
// }
if with_semi {
if blocklike.is_block() {
p.eat(SEMI);
} else {
p.expect(SEMI);
match with_semi {
StmtWithSemi::Yes => {
if blocklike.is_block() {
p.eat(SEMI);
} else {
p.expect(SEMI);
}
}
StmtWithSemi::No => {}
StmtWithSemi::Optional => {
if p.at(SEMI) {
p.eat(SEMI);
}
}
}
m.complete(p, EXPR_STMT);
}
@ -128,7 +144,7 @@ pub(super) fn stmt(p: &mut Parser, with_semi: bool) {
// let c = 92;
// let d: i32 = 92;
// }
fn let_stmt(p: &mut Parser, m: Marker, with_semi: bool) {
fn let_stmt(p: &mut Parser, m: Marker, with_semi: StmtWithSemi) {
assert!(p.at(LET_KW));
p.bump();
patterns::pattern(p);
@ -139,8 +155,16 @@ pub(super) fn stmt(p: &mut Parser, with_semi: bool) {
expressions::expr(p);
}
if with_semi {
p.expect(SEMI);
match with_semi {
StmtWithSemi::Yes => {
p.expect(SEMI);
}
StmtWithSemi::No => {}
StmtWithSemi::Optional => {
if p.at(SEMI) {
p.eat(SEMI);
}
}
}
m.complete(p, LET_STMT);
}
@ -160,7 +184,7 @@ pub(crate) fn expr_block_contents(p: &mut Parser) {
continue;
}
stmt(p, true)
stmt(p, StmtWithSemi::Yes)
}
}

View File

@ -98,6 +98,14 @@ pub fn parse_item(token_source: &dyn TokenSource, tree_sink: &mut dyn TreeSink)
parse_from_tokens(token_source, tree_sink, grammar::item);
}
pub fn parse_macro_items(token_source: &dyn TokenSource, tree_sink: &mut dyn TreeSink) {
parse_from_tokens(token_source, tree_sink, grammar::macro_items);
}
pub fn parse_macro_stmts(token_source: &dyn TokenSource, tree_sink: &mut dyn TreeSink) {
parse_from_tokens(token_source, tree_sink, grammar::macro_stmts);
}
/// A parsing function for a specific braced-block.
pub struct Reparser(fn(&mut parser::Parser));

View File

@ -233,6 +233,8 @@ pub enum SyntaxKind {
ARG_LIST,
TYPE_BOUND,
TYPE_BOUND_LIST,
MACRO_ITEMS,
MACRO_STMTS,
// Technical kind so that we can cast from u16 safely
#[doc(hidden)]
__LAST,
@ -592,6 +594,8 @@ impl SyntaxKind {
ARG_LIST => &SyntaxInfo { name: "ARG_LIST" },
TYPE_BOUND => &SyntaxInfo { name: "TYPE_BOUND" },
TYPE_BOUND_LIST => &SyntaxInfo { name: "TYPE_BOUND_LIST" },
MACRO_ITEMS => &SyntaxInfo { name: "MACRO_ITEMS" },
MACRO_STMTS => &SyntaxInfo { name: "MACRO_STMTS" },
TOMBSTONE => &SyntaxInfo { name: "TOMBSTONE" },
EOF => &SyntaxInfo { name: "EOF" },
__LAST => &SyntaxInfo { name: "__LAST" },

View File

@ -1770,6 +1770,72 @@ impl MacroCall {
}
}
// MacroItems
#[derive(Debug, PartialEq, Eq, Hash)]
#[repr(transparent)]
pub struct MacroItems {
pub(crate) syntax: SyntaxNode,
}
unsafe impl TransparentNewType for MacroItems {
type Repr = rowan::SyntaxNode;
}
impl AstNode for MacroItems {
fn cast(syntax: &SyntaxNode) -> Option<&Self> {
match syntax.kind() {
MACRO_ITEMS => Some(MacroItems::from_repr(syntax.into_repr())),
_ => None,
}
}
fn syntax(&self) -> &SyntaxNode { &self.syntax }
}
impl ToOwned for MacroItems {
type Owned = TreeArc<MacroItems>;
fn to_owned(&self) -> TreeArc<MacroItems> { TreeArc::cast(self.syntax.to_owned()) }
}
impl ast::ModuleItemOwner for MacroItems {}
impl ast::FnDefOwner for MacroItems {}
impl MacroItems {}
// MacroStmts
#[derive(Debug, PartialEq, Eq, Hash)]
#[repr(transparent)]
pub struct MacroStmts {
pub(crate) syntax: SyntaxNode,
}
unsafe impl TransparentNewType for MacroStmts {
type Repr = rowan::SyntaxNode;
}
impl AstNode for MacroStmts {
fn cast(syntax: &SyntaxNode) -> Option<&Self> {
match syntax.kind() {
MACRO_STMTS => Some(MacroStmts::from_repr(syntax.into_repr())),
_ => None,
}
}
fn syntax(&self) -> &SyntaxNode { &self.syntax }
}
impl ToOwned for MacroStmts {
type Owned = TreeArc<MacroStmts>;
fn to_owned(&self) -> TreeArc<MacroStmts> { TreeArc::cast(self.syntax.to_owned()) }
}
impl MacroStmts {
pub fn statements(&self) -> impl Iterator<Item = &Stmt> {
super::children(self)
}
pub fn expr(&self) -> Option<&Expr> {
super::child_opt(self)
}
}
// MatchArm
#[derive(Debug, PartialEq, Eq, Hash)]
#[repr(transparent)]

View File

@ -247,6 +247,10 @@ Grammar(
"ARG_LIST",
"TYPE_BOUND",
"TYPE_BOUND_LIST",
// macro related
"MACRO_ITEMS",
"MACRO_STMTS",
],
ast: {
"SourceFile": (
@ -668,5 +672,16 @@ Grammar(
"TypeArg": (options: ["TypeRef"]),
"AssocTypeArg": (options: ["NameRef", "TypeRef"]),
"LifetimeArg": (),
"MacroItems": (
traits: [ "ModuleItemOwner", "FnDefOwner" ],
),
"MacroStmts" : (
options: [ "Expr" ],
collections: [
["statements", "Stmt"],
],
)
},
)