Simplify tests

This commit is contained in:
Aleksey Kladov 2018-08-11 10:03:03 +03:00
parent b18d2882f4
commit ce898183b8
5 changed files with 36 additions and 53 deletions

View File

@ -12,4 +12,4 @@ drop_bomb = "0.1.4"
parking_lot = "0.6.0"
[dev-dependencies]
testutils = { path = "./tests/testutils" }
difference = "2.0.0"

View File

@ -1,28 +0,0 @@
extern crate libsyntax2;
extern crate testutils;
use std::fmt::Write;
use libsyntax2::{tokenize, Token};
use testutils::dir_tests;
#[test]
fn lexer_tests() {
dir_tests(&["lexer"], |text| {
let tokens = tokenize(text);
dump_tokens(&tokens, text)
})
}
fn dump_tokens(tokens: &[Token], text: &str) -> String {
let mut acc = String::new();
let mut offset = 0;
for token in tokens {
let len: u32 = token.len.into();
let len = len as usize;
let token_text = &text[offset..offset + len];
offset += len;
write!(acc, "{:?} {} {:?}\n", token.kind, token.len, token_text).unwrap()
}
acc
}

View File

@ -1,14 +0,0 @@
extern crate libsyntax2;
extern crate testutils;
use libsyntax2::parse;
use libsyntax2::utils::dump_tree;
use testutils::dir_tests;
#[test]
fn parser_tests() {
dir_tests(&["parser/inline", "parser/ok", "parser/err"], |text| {
let file = parse(text);
dump_tree(&file)
})
}

View File

@ -1,12 +1,31 @@
extern crate libsyntax2;
extern crate difference;
use std::{
fs,
path::{Path, PathBuf},
fmt::Write,
};
use difference::Changeset;
#[test]
fn lexer_tests() {
dir_tests(&["lexer"], |text| {
let tokens = libsyntax2::tokenize(text);
dump_tokens(&tokens, text)
})
}
#[test]
fn parser_tests() {
dir_tests(&["parser/inline", "parser/ok", "parser/err"], |text| {
let file = libsyntax2::parse(text);
libsyntax2::utils::dump_tree(&file)
})
}
/// Read file and normalize newlines.
///
/// `rustc` seems to always normalize `\r\n` newlines to `\n`:
@ -23,7 +42,7 @@ fn read_text(path: &Path) -> String {
}
pub fn dir_tests<F>(paths: &[&str], f: F)
where
where
F: Fn(&str) -> String,
{
for path in collect_tests(paths) {
@ -107,5 +126,18 @@ fn project_dir() -> PathBuf {
}
fn test_data_dir() -> PathBuf {
project_dir().join("tests/data")
project_dir().join("crates/libsyntax2/tests/data")
}
fn dump_tokens(tokens: &[libsyntax2::Token], text: &str) -> String {
let mut acc = String::new();
let mut offset = 0;
for token in tokens {
let len: u32 = token.len.into();
let len = len as usize;
let token_text = &text[offset..offset + len];
offset += len;
write!(acc, "{:?} {} {:?}\n", token.kind, token.len, token_text).unwrap()
}
acc
}

View File

@ -1,7 +0,0 @@
[package]
name = "testutils"
version = "0.1.0"
authors = ["Aleksey Kladov <aleksey.kladov@gmail.com>"]
[dependencies]
difference = "2.0.0"