mirror of
https://github.com/rust-lang/rust.git
synced 2024-12-13 00:56:14 +00:00
Implement APIs for parsing expressions, types, paths, patterns and items
This commit is contained in:
parent
902a9c6da7
commit
bc99e95d7d
@ -168,6 +168,41 @@ impl SourceFile {
|
||||
}
|
||||
}
|
||||
|
||||
impl ast::Path {
|
||||
/// Returns `text`, parsed as a path, but only if it has no errors.
|
||||
pub fn parse(text: &str) -> Result<Self, ()> {
|
||||
parsing::parse_text_fragment(text, ra_parser::FragmentKind::Path)
|
||||
}
|
||||
}
|
||||
|
||||
impl ast::Pat {
|
||||
/// Returns `text`, parsed as a pattern, but only if it has no errors.
|
||||
pub fn parse(text: &str) -> Result<Self, ()> {
|
||||
parsing::parse_text_fragment(text, ra_parser::FragmentKind::Pattern)
|
||||
}
|
||||
}
|
||||
|
||||
impl ast::Expr {
|
||||
/// Returns `text`, parsed as an expression, but only if it has no errors.
|
||||
pub fn parse(text: &str) -> Result<Self, ()> {
|
||||
parsing::parse_text_fragment(text, ra_parser::FragmentKind::Expr)
|
||||
}
|
||||
}
|
||||
|
||||
impl ast::ModuleItem {
|
||||
/// Returns `text`, parsed as an item, but only if it has no errors.
|
||||
pub fn parse(text: &str) -> Result<Self, ()> {
|
||||
parsing::parse_text_fragment(text, ra_parser::FragmentKind::Item)
|
||||
}
|
||||
}
|
||||
|
||||
impl ast::TypeRef {
|
||||
/// Returns `text`, parsed as an type reference, but only if it has no errors.
|
||||
pub fn parse(text: &str) -> Result<Self, ()> {
|
||||
parsing::parse_text_fragment(text, ra_parser::FragmentKind::Type)
|
||||
}
|
||||
}
|
||||
|
||||
/// Matches a `SyntaxNode` against an `ast` type.
|
||||
///
|
||||
/// # Example:
|
||||
|
@ -6,13 +6,14 @@ mod text_token_source;
|
||||
mod text_tree_sink;
|
||||
mod reparsing;
|
||||
|
||||
use crate::{syntax_node::GreenNode, SyntaxError};
|
||||
use crate::{syntax_node::GreenNode, AstNode, SyntaxError, SyntaxNode};
|
||||
use text_token_source::TextTokenSource;
|
||||
use text_tree_sink::TextTreeSink;
|
||||
|
||||
pub use lexer::*;
|
||||
|
||||
pub(crate) use self::reparsing::incremental_reparse;
|
||||
use ra_parser::SyntaxKind;
|
||||
|
||||
pub(crate) fn parse_text(text: &str) -> (GreenNode, Vec<SyntaxError>) {
|
||||
let (tokens, lexer_errors) = tokenize(&text);
|
||||
@ -27,3 +28,32 @@ pub(crate) fn parse_text(text: &str) -> (GreenNode, Vec<SyntaxError>) {
|
||||
|
||||
(tree, parser_errors)
|
||||
}
|
||||
|
||||
/// Returns `text` parsed as a `T` provided there are no parse errors.
|
||||
pub(crate) fn parse_text_fragment<T: AstNode>(
|
||||
text: &str,
|
||||
fragment_kind: ra_parser::FragmentKind,
|
||||
) -> Result<T, ()> {
|
||||
let (tokens, lexer_errors) = tokenize(&text);
|
||||
if !lexer_errors.is_empty() {
|
||||
return Err(());
|
||||
}
|
||||
|
||||
let mut token_source = TextTokenSource::new(text, &tokens);
|
||||
let mut tree_sink = TextTreeSink::new(text, &tokens);
|
||||
|
||||
// TextTreeSink assumes that there's at least some root node to which it can attach errors and
|
||||
// tokens. We arbitrarily give it a SourceFile.
|
||||
use ra_parser::TreeSink;
|
||||
tree_sink.start_node(SyntaxKind::SOURCE_FILE);
|
||||
ra_parser::parse_fragment(&mut token_source, &mut tree_sink, fragment_kind);
|
||||
tree_sink.finish_node();
|
||||
|
||||
let (tree, parser_errors) = tree_sink.finish();
|
||||
use ra_parser::TokenSource;
|
||||
if !parser_errors.is_empty() || token_source.current().kind != SyntaxKind::EOF {
|
||||
return Err(());
|
||||
}
|
||||
|
||||
SyntaxNode::new_root(tree).first_child().and_then(T::cast).ok_or(())
|
||||
}
|
||||
|
@ -54,6 +54,51 @@ fn parser_tests() {
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn expr_parser_tests() {
|
||||
fragment_parser_dir_test(
|
||||
&["parser/fragments/expr/ok"],
|
||||
&["parser/fragments/expr/err"],
|
||||
crate::ast::Expr::parse,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn path_parser_tests() {
|
||||
fragment_parser_dir_test(
|
||||
&["parser/fragments/path/ok"],
|
||||
&["parser/fragments/path/err"],
|
||||
crate::ast::Path::parse,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn pattern_parser_tests() {
|
||||
fragment_parser_dir_test(
|
||||
&["parser/fragments/pattern/ok"],
|
||||
&["parser/fragments/pattern/err"],
|
||||
crate::ast::Pat::parse,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn item_parser_tests() {
|
||||
fragment_parser_dir_test(
|
||||
&["parser/fragments/item/ok"],
|
||||
&["parser/fragments/item/err"],
|
||||
crate::ast::ModuleItem::parse,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn type_parser_tests() {
|
||||
fragment_parser_dir_test(
|
||||
&["parser/fragments/type/ok"],
|
||||
&["parser/fragments/type/err"],
|
||||
crate::ast::TypeRef::parse,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parser_fuzz_tests() {
|
||||
for (_, text) in collect_rust_files(&test_data_dir(), &["parser/fuzz-failures"]) {
|
||||
@ -134,3 +179,24 @@ fn dump_tokens_and_errors(tokens: &[Token], errors: &[SyntaxError], text: &str)
|
||||
}
|
||||
acc
|
||||
}
|
||||
|
||||
fn fragment_parser_dir_test<T, F>(ok_paths: &[&str], err_paths: &[&str], f: F)
|
||||
where
|
||||
T: crate::AstNode,
|
||||
F: Fn(&str) -> Result<T, ()>,
|
||||
{
|
||||
dir_tests(&test_data_dir(), ok_paths, "rast", |text, path| {
|
||||
if let Ok(node) = f(text) {
|
||||
format!("{:#?}", crate::ast::AstNode::syntax(&node))
|
||||
} else {
|
||||
panic!("Failed to parse '{:?}'", path);
|
||||
}
|
||||
});
|
||||
dir_tests(&test_data_dir(), err_paths, "rast", |text, path| {
|
||||
if let Ok(_) = f(text) {
|
||||
panic!("'{:?}' successfully parsed when it should have errored", path);
|
||||
} else {
|
||||
"ERROR\n".to_owned()
|
||||
}
|
||||
});
|
||||
}
|
||||
|
@ -0,0 +1 @@
|
||||
ERROR
|
@ -0,0 +1 @@
|
||||
1 +
|
@ -0,0 +1,8 @@
|
||||
BIN_EXPR@0..5
|
||||
LITERAL@0..1
|
||||
INT_NUMBER@0..1 "1"
|
||||
WHITESPACE@1..2 " "
|
||||
PLUS@2..3 "+"
|
||||
WHITESPACE@3..4 " "
|
||||
LITERAL@4..5
|
||||
INT_NUMBER@4..5 "2"
|
@ -0,0 +1 @@
|
||||
1 + 2
|
@ -0,0 +1 @@
|
||||
ERROR
|
@ -0,0 +1 @@
|
||||
fn fn foo() {}
|
@ -0,0 +1,12 @@
|
||||
FN_DEF@0..11
|
||||
FN_KW@0..2 "fn"
|
||||
WHITESPACE@2..3 " "
|
||||
NAME@3..6
|
||||
IDENT@3..6 "foo"
|
||||
PARAM_LIST@6..8
|
||||
L_PAREN@6..7 "("
|
||||
R_PAREN@7..8 ")"
|
||||
WHITESPACE@8..9 " "
|
||||
BLOCK_EXPR@9..11
|
||||
L_CURLY@9..10 "{"
|
||||
R_CURLY@10..11 "}"
|
@ -0,0 +1 @@
|
||||
fn foo() {}
|
@ -0,0 +1 @@
|
||||
ERROR
|
@ -0,0 +1 @@
|
||||
struct
|
@ -0,0 +1 @@
|
||||
ERROR
|
@ -0,0 +1 @@
|
||||
a + b
|
@ -0,0 +1,4 @@
|
||||
PATH@0..3
|
||||
PATH_SEGMENT@0..3
|
||||
NAME_REF@0..3
|
||||
IDENT@0..3 "foo"
|
@ -0,0 +1 @@
|
||||
foo
|
@ -0,0 +1,14 @@
|
||||
PATH@0..13
|
||||
PATH@0..8
|
||||
PATH@0..3
|
||||
PATH_SEGMENT@0..3
|
||||
NAME_REF@0..3
|
||||
IDENT@0..3 "foo"
|
||||
COLON2@3..5 "::"
|
||||
PATH_SEGMENT@5..8
|
||||
NAME_REF@5..8
|
||||
IDENT@5..8 "bar"
|
||||
COLON2@8..10 "::"
|
||||
PATH_SEGMENT@10..13
|
||||
NAME_REF@10..13
|
||||
IDENT@10..13 "baz"
|
@ -0,0 +1 @@
|
||||
foo::bar::baz
|
@ -0,0 +1 @@
|
||||
ERROR
|
@ -0,0 +1 @@
|
||||
fn
|
@ -0,0 +1 @@
|
||||
ERROR
|
@ -0,0 +1 @@
|
||||
Some(x
|
@ -0,0 +1,10 @@
|
||||
TUPLE_STRUCT_PAT@0..7
|
||||
PATH@0..4
|
||||
PATH_SEGMENT@0..4
|
||||
NAME_REF@0..4
|
||||
IDENT@0..4 "Some"
|
||||
L_PAREN@4..5 "("
|
||||
BIND_PAT@5..6
|
||||
NAME@5..6
|
||||
IDENT@5..6 "x"
|
||||
R_PAREN@6..7 ")"
|
@ -0,0 +1 @@
|
||||
Some(x)
|
@ -0,0 +1 @@
|
||||
ERROR
|
@ -0,0 +1 @@
|
||||
Result<Foo, Bar
|
@ -0,0 +1,22 @@
|
||||
PATH_TYPE@0..16
|
||||
PATH@0..16
|
||||
PATH_SEGMENT@0..16
|
||||
NAME_REF@0..6
|
||||
IDENT@0..6 "Result"
|
||||
TYPE_ARG_LIST@6..16
|
||||
L_ANGLE@6..7 "<"
|
||||
TYPE_ARG@7..10
|
||||
PATH_TYPE@7..10
|
||||
PATH@7..10
|
||||
PATH_SEGMENT@7..10
|
||||
NAME_REF@7..10
|
||||
IDENT@7..10 "Foo"
|
||||
COMMA@10..11 ","
|
||||
WHITESPACE@11..12 " "
|
||||
TYPE_ARG@12..15
|
||||
PATH_TYPE@12..15
|
||||
PATH@12..15
|
||||
PATH_SEGMENT@12..15
|
||||
NAME_REF@12..15
|
||||
IDENT@12..15 "Bar"
|
||||
R_ANGLE@15..16 ">"
|
@ -0,0 +1 @@
|
||||
Result<Foo, Bar>
|
Loading…
Reference in New Issue
Block a user