move tests

This commit is contained in:
Aleksey Kladov 2021-10-09 14:38:57 +03:00
parent 1abe3f8275
commit f4ee0d736c
2 changed files with 66 additions and 92 deletions

View File

@ -16,7 +16,8 @@ use expect_test::{expect, Expect};
use hir_expand::{db::AstDatabase, InFile, MacroFile};
use stdx::format_to;
use syntax::{
ast, AstNode,
ast::{self, edit::IndentLevel},
AstNode,
SyntaxKind::{self, IDENT},
SyntaxNode, T,
};
@ -63,6 +64,8 @@ fn check(ra_fixture: &str, expect: Expect) {
}
if let Some((parse, _token_map)) = exp.value {
let pp = pretty_print_macro_expansion(parse.syntax_node());
let indent = IndentLevel::from_node(call.syntax());
let pp = reindent(indent, pp);
format_to!(expn_text, "{}", pp);
}
let range = call.syntax().text_range();
@ -73,23 +76,38 @@ fn check(ra_fixture: &str, expect: Expect) {
expect.assert_eq(&expanded_text);
}
fn reindent(indent: IndentLevel, pp: String) -> String {
if !pp.contains('\n') {
return pp;
}
let mut lines = pp.split_inclusive('\n');
let mut res = lines.next().unwrap().to_string();
for line in lines {
if line.trim().is_empty() {
res.push_str(&line)
} else {
format_to!(res, "{}{}", indent, line)
}
}
res
}
fn pretty_print_macro_expansion(expn: SyntaxNode) -> String {
let mut res = String::new();
let mut prev_kind = SyntaxKind::EOF;
for token in iter::successors(expn.first_token(), |t| t.next_token()) {
let curr_kind = token.kind();
let needs_space = match (prev_kind, curr_kind) {
_ if prev_kind.is_trivia() || curr_kind.is_trivia() => false,
(T![=], _) | (_, T![=]) => true,
(IDENT, IDENT) => true,
(IDENT, _) => curr_kind.is_keyword(),
(_, IDENT) => prev_kind.is_keyword(),
_ => false,
let space = match (prev_kind, curr_kind) {
_ if prev_kind.is_trivia() || curr_kind.is_trivia() => "",
(T![=], _) | (_, T![=]) => " ",
(T![;], _) => "\n",
(IDENT, IDENT) => " ",
(IDENT, _) if curr_kind.is_keyword() => " ",
(_, IDENT) if prev_kind.is_keyword() => " ",
_ => "",
};
if needs_space {
res.push(' ')
}
res.push_str(space);
prev_kind = curr_kind;
format_to!(res, "{}", token)
}
@ -148,10 +166,45 @@ macro_rules! m {
() => { type qual: ::T = qual::T; }
}
type qual: ::T = qual::T;
"#]],
"#]],
)
}
#[test]
fn round_trips_literals() {
check(
r#"
macro_rules! m {
() => {
let _ = 'c';
let _ = 1000;
let _ = 12E+99_f64;
let _ = "rust1";
}
}
fn f() {
m!()
}
"#,
expect![[r#"
macro_rules! m {
() => {
let _ = 'c';
let _ = 1000;
let _ = 12E+99_f64;
let _ = "rust1";
}
}
fn f() {
let_ = 'c';
let_ = 1000;
let_ = 12E+99_f64;
let_ = "rust1";
}
"#]],
);
}
#[test]
fn broken_parenthesis_sequence() {
check(
@ -168,6 +221,6 @@ macro_rules! m2 { ($x:ident) => {} }
/* error: Failed to find macro definition */
/* error: Failed to lower macro args to token tree */
"#]],
"#]],
)
}

View File

@ -743,82 +743,3 @@ impl<'a> TreeSink for TtTreeSink<'a> {
self.inner.error(error, self.text_pos)
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::tests::parse_macro;
use parser::TokenSource;
use syntax::{
ast::{make, AstNode},
ted,
};
use test_utils::assert_eq_text;
#[test]
fn convert_tt_token_source() {
let expansion = parse_macro(
r#"
macro_rules! literals {
($i:ident) => {
{
let a = 'c';
let c = 1000;
let f = 12E+99_f64;
let s = "rust1";
}
}
}
"#,
)
.expand_tt("literals!(foo);");
let tts = &[expansion.into()];
let buffer = tt::buffer::TokenBuffer::from_tokens(tts);
let mut tt_src = SubtreeTokenSource::new(&buffer);
let mut tokens = vec![];
while tt_src.current().kind != EOF {
tokens.push((tt_src.current().kind, tt_src.text()));
tt_src.bump();
}
// [${]
// [let] [a] [=] ['c'] [;]
assert_eq!(tokens[2 + 3].1, "'c'");
assert_eq!(tokens[2 + 3].0, CHAR);
// [let] [c] [=] [1000] [;]
assert_eq!(tokens[2 + 5 + 3].1, "1000");
assert_eq!(tokens[2 + 5 + 3].0, INT_NUMBER);
// [let] [f] [=] [12E+99_f64] [;]
assert_eq!(tokens[2 + 10 + 3].1, "12E+99_f64");
assert_eq!(tokens[2 + 10 + 3].0, FLOAT_NUMBER);
// [let] [s] [=] ["rust1"] [;]
assert_eq!(tokens[2 + 15 + 3].1, "\"rust1\"");
assert_eq!(tokens[2 + 15 + 3].0, STRING);
}
#[test]
fn test_token_tree_last_child_is_white_space() {
let source_file = ast::SourceFile::parse("f!{}").ok().unwrap();
let macro_call = source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap();
let token_tree = macro_call.token_tree().unwrap();
// Token Tree now is :
// TokenTree
// - TokenTree
// - T!['{']
// - T!['}']
let token_tree = token_tree.clone_for_update();
ted::append_child(token_tree.syntax(), make::tokens::single_space());
let token_tree = token_tree.clone_subtree();
// Token Tree now is :
// TokenTree
// - T!['{']
// - T!['}']
// - WHITE_SPACE
let tt = syntax_node_to_token_tree(token_tree.syntax()).0;
assert_eq!(tt.delimiter_kind(), Some(tt::DelimiterKind::Brace));
}
}