mirror of
https://github.com/rust-lang/rust.git
synced 2024-11-22 06:44:35 +00:00
Move some tests from rustc_expand
to rustc_parse
.
There are some test cases involving `parse` and `tokenstream` and `mut_visit` that are located in `rustc_expand`. Because it used to be the case that constructing a `ParseSess` required the involvement of `rustc_expand`. However, since #64197 merged (a long time ago) `rust_expand` no longer needs to be involved. This commit moves the tests into `rustc_parse`. This is the optimal place for the `parse` tests. It's not ideal for the `tokenstream` and `mut_visit` tests -- they would be better in `rustc_ast` -- but they still rely on parsing, which is not available in `rustc_ast`. But `rustc_parse` is lower down in the crate graph and closer to `rustc_ast` than `rust_expand`, so it's still an improvement for them. The exact renaming is as follows: - rustc_expand/src/mut_visit/tests.rs -> rustc_parse/src/parser/mut_visit/tests.rs - rustc_expand/src/tokenstream/tests.rs -> rustc_parse/src/parser/tokenstream/tests.rs - rustc_expand/src/tests.rs + rustc_expand/src/parse/tests.rs -> compiler/rustc_parse/src/parser/tests.rs The latter two test files are combined because there's no need for them to be separate, and having a `rustc_parse::parser::parse` module would be weird. This also means some `pub(crate)`s can be removed.
This commit is contained in:
parent
9c9b568792
commit
2acbe9c743
@ -3976,7 +3976,6 @@ dependencies = [
|
|||||||
"rustc_session",
|
"rustc_session",
|
||||||
"rustc_span",
|
"rustc_span",
|
||||||
"smallvec",
|
"smallvec",
|
||||||
"termcolor",
|
|
||||||
"thin-vec",
|
"thin-vec",
|
||||||
"tracing",
|
"tracing",
|
||||||
]
|
]
|
||||||
@ -4464,6 +4463,7 @@ dependencies = [
|
|||||||
"rustc_macros",
|
"rustc_macros",
|
||||||
"rustc_session",
|
"rustc_session",
|
||||||
"rustc_span",
|
"rustc_span",
|
||||||
|
"termcolor",
|
||||||
"thin-vec",
|
"thin-vec",
|
||||||
"tracing",
|
"tracing",
|
||||||
"unicode-normalization",
|
"unicode-normalization",
|
||||||
|
@ -25,7 +25,6 @@ rustc_serialize = { path = "../rustc_serialize" }
|
|||||||
rustc_session = { path = "../rustc_session" }
|
rustc_session = { path = "../rustc_session" }
|
||||||
rustc_span = { path = "../rustc_span" }
|
rustc_span = { path = "../rustc_span" }
|
||||||
smallvec = { version = "1.8.1", features = ["union", "may_dangle"] }
|
smallvec = { version = "1.8.1", features = ["union", "may_dangle"] }
|
||||||
termcolor = "1.2"
|
|
||||||
thin-vec = "0.2.12"
|
thin-vec = "0.2.12"
|
||||||
tracing = "0.1"
|
tracing = "0.1"
|
||||||
# tidy-alphabetical-end
|
# tidy-alphabetical-end
|
||||||
|
@ -34,24 +34,4 @@ pub mod module;
|
|||||||
#[allow(rustc::untranslatable_diagnostic)]
|
#[allow(rustc::untranslatable_diagnostic)]
|
||||||
pub mod proc_macro;
|
pub mod proc_macro;
|
||||||
|
|
||||||
// HACK(Centril, #64197): These shouldn't really be here.
|
|
||||||
// Rather, they should be with their respective modules which are defined in other crates.
|
|
||||||
// However, since for now constructing a `ParseSess` sorta requires `config` from this crate,
|
|
||||||
// these tests will need to live here in the interim.
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests;
|
|
||||||
#[cfg(test)]
|
|
||||||
mod parse {
|
|
||||||
mod tests;
|
|
||||||
}
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tokenstream {
|
|
||||||
mod tests;
|
|
||||||
}
|
|
||||||
#[cfg(test)]
|
|
||||||
mod mut_visit {
|
|
||||||
mod tests;
|
|
||||||
}
|
|
||||||
|
|
||||||
rustc_fluent_macro::fluent_messages! { "../messages.ftl" }
|
rustc_fluent_macro::fluent_messages! { "../messages.ftl" }
|
||||||
|
@ -1,382 +0,0 @@
|
|||||||
use crate::tests::{
|
|
||||||
matches_codepattern, psess, string_to_stream, with_error_checking_parse,
|
|
||||||
with_expected_parse_error,
|
|
||||||
};
|
|
||||||
|
|
||||||
use ast::token::IdentIsRaw;
|
|
||||||
use rustc_ast::ptr::P;
|
|
||||||
use rustc_ast::token::{self, Delimiter, Token};
|
|
||||||
use rustc_ast::tokenstream::{DelimSpacing, DelimSpan, Spacing, TokenStream, TokenTree};
|
|
||||||
use rustc_ast::visit;
|
|
||||||
use rustc_ast::{self as ast, PatKind};
|
|
||||||
use rustc_ast_pretty::pprust::item_to_string;
|
|
||||||
use rustc_errors::PResult;
|
|
||||||
use rustc_parse::new_parser_from_source_str;
|
|
||||||
use rustc_parse::parser::ForceCollect;
|
|
||||||
use rustc_session::parse::ParseSess;
|
|
||||||
use rustc_span::create_default_session_globals_then;
|
|
||||||
use rustc_span::symbol::{kw, sym, Symbol};
|
|
||||||
use rustc_span::{BytePos, FileName, Pos, Span};
|
|
||||||
use std::path::PathBuf;
|
|
||||||
|
|
||||||
/// Parses an item.
|
|
||||||
///
|
|
||||||
/// Returns `Ok(Some(item))` when successful, `Ok(None)` when no item was found, and `Err`
|
|
||||||
/// when a syntax error occurred.
|
|
||||||
fn parse_item_from_source_str(
|
|
||||||
name: FileName,
|
|
||||||
source: String,
|
|
||||||
psess: &ParseSess,
|
|
||||||
) -> PResult<'_, Option<P<ast::Item>>> {
|
|
||||||
new_parser_from_source_str(psess, name, source).parse_item(ForceCollect::No)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Produces a `rustc_span::span`.
|
|
||||||
fn sp(a: u32, b: u32) -> Span {
|
|
||||||
Span::with_root_ctxt(BytePos(a), BytePos(b))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Parses a string, return an expression.
|
|
||||||
fn string_to_expr(source_str: String) -> P<ast::Expr> {
|
|
||||||
with_error_checking_parse(source_str, &psess(), |p| p.parse_expr())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Parses a string, returns an item.
|
|
||||||
fn string_to_item(source_str: String) -> Option<P<ast::Item>> {
|
|
||||||
with_error_checking_parse(source_str, &psess(), |p| p.parse_item(ForceCollect::No))
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn bad_path_expr_1() {
|
|
||||||
// This should trigger error: expected identifier, found keyword `return`
|
|
||||||
create_default_session_globals_then(|| {
|
|
||||||
with_expected_parse_error(
|
|
||||||
"::abc::def::return",
|
|
||||||
"expected identifier, found keyword `return`",
|
|
||||||
|p| p.parse_expr(),
|
|
||||||
);
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
// Checks the token-tree-ization of macros.
|
|
||||||
#[test]
|
|
||||||
fn string_to_tts_macro() {
|
|
||||||
create_default_session_globals_then(|| {
|
|
||||||
let stream = string_to_stream("macro_rules! zip (($a)=>($a))".to_string());
|
|
||||||
let tts = &stream.trees().collect::<Vec<_>>()[..];
|
|
||||||
|
|
||||||
match tts {
|
|
||||||
[
|
|
||||||
TokenTree::Token(
|
|
||||||
Token { kind: token::Ident(name_macro_rules, IdentIsRaw::No), .. },
|
|
||||||
_,
|
|
||||||
),
|
|
||||||
TokenTree::Token(Token { kind: token::Not, .. }, _),
|
|
||||||
TokenTree::Token(Token { kind: token::Ident(name_zip, IdentIsRaw::No), .. }, _),
|
|
||||||
TokenTree::Delimited(.., macro_delim, macro_tts),
|
|
||||||
] if name_macro_rules == &kw::MacroRules && name_zip.as_str() == "zip" => {
|
|
||||||
let tts = ¯o_tts.trees().collect::<Vec<_>>();
|
|
||||||
match &tts[..] {
|
|
||||||
[
|
|
||||||
TokenTree::Delimited(.., first_delim, first_tts),
|
|
||||||
TokenTree::Token(Token { kind: token::FatArrow, .. }, _),
|
|
||||||
TokenTree::Delimited(.., second_delim, second_tts),
|
|
||||||
] if macro_delim == &Delimiter::Parenthesis => {
|
|
||||||
let tts = &first_tts.trees().collect::<Vec<_>>();
|
|
||||||
match &tts[..] {
|
|
||||||
[
|
|
||||||
TokenTree::Token(Token { kind: token::Dollar, .. }, _),
|
|
||||||
TokenTree::Token(
|
|
||||||
Token { kind: token::Ident(name, IdentIsRaw::No), .. },
|
|
||||||
_,
|
|
||||||
),
|
|
||||||
] if first_delim == &Delimiter::Parenthesis && name.as_str() == "a" => {
|
|
||||||
}
|
|
||||||
_ => panic!("value 3: {:?} {:?}", first_delim, first_tts),
|
|
||||||
}
|
|
||||||
let tts = &second_tts.trees().collect::<Vec<_>>();
|
|
||||||
match &tts[..] {
|
|
||||||
[
|
|
||||||
TokenTree::Token(Token { kind: token::Dollar, .. }, _),
|
|
||||||
TokenTree::Token(
|
|
||||||
Token { kind: token::Ident(name, IdentIsRaw::No), .. },
|
|
||||||
_,
|
|
||||||
),
|
|
||||||
] if second_delim == &Delimiter::Parenthesis
|
|
||||||
&& name.as_str() == "a" => {}
|
|
||||||
_ => panic!("value 4: {:?} {:?}", second_delim, second_tts),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
_ => panic!("value 2: {:?} {:?}", macro_delim, macro_tts),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
_ => panic!("value: {:?}", tts),
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn string_to_tts_1() {
|
|
||||||
create_default_session_globals_then(|| {
|
|
||||||
let tts = string_to_stream("fn a(b: i32) { b; }".to_string());
|
|
||||||
|
|
||||||
let expected = TokenStream::new(vec![
|
|
||||||
TokenTree::token_alone(token::Ident(kw::Fn, IdentIsRaw::No), sp(0, 2)),
|
|
||||||
TokenTree::token_joint_hidden(
|
|
||||||
token::Ident(Symbol::intern("a"), IdentIsRaw::No),
|
|
||||||
sp(3, 4),
|
|
||||||
),
|
|
||||||
TokenTree::Delimited(
|
|
||||||
DelimSpan::from_pair(sp(4, 5), sp(11, 12)),
|
|
||||||
// `JointHidden` because the `(` is followed immediately by
|
|
||||||
// `b`, `Alone` because the `)` is followed by whitespace.
|
|
||||||
DelimSpacing::new(Spacing::JointHidden, Spacing::Alone),
|
|
||||||
Delimiter::Parenthesis,
|
|
||||||
TokenStream::new(vec![
|
|
||||||
TokenTree::token_joint(
|
|
||||||
token::Ident(Symbol::intern("b"), IdentIsRaw::No),
|
|
||||||
sp(5, 6),
|
|
||||||
),
|
|
||||||
TokenTree::token_alone(token::Colon, sp(6, 7)),
|
|
||||||
// `JointHidden` because the `i32` is immediately followed by the `)`.
|
|
||||||
TokenTree::token_joint_hidden(
|
|
||||||
token::Ident(sym::i32, IdentIsRaw::No),
|
|
||||||
sp(8, 11),
|
|
||||||
),
|
|
||||||
])
|
|
||||||
.into(),
|
|
||||||
),
|
|
||||||
TokenTree::Delimited(
|
|
||||||
DelimSpan::from_pair(sp(13, 14), sp(18, 19)),
|
|
||||||
// First `Alone` because the `{` is followed by whitespace,
|
|
||||||
// second `Alone` because the `}` is followed immediately by
|
|
||||||
// EOF.
|
|
||||||
DelimSpacing::new(Spacing::Alone, Spacing::Alone),
|
|
||||||
Delimiter::Brace,
|
|
||||||
TokenStream::new(vec![
|
|
||||||
TokenTree::token_joint(
|
|
||||||
token::Ident(Symbol::intern("b"), IdentIsRaw::No),
|
|
||||||
sp(15, 16),
|
|
||||||
),
|
|
||||||
// `Alone` because the `;` is followed by whitespace.
|
|
||||||
TokenTree::token_alone(token::Semi, sp(16, 17)),
|
|
||||||
])
|
|
||||||
.into(),
|
|
||||||
),
|
|
||||||
]);
|
|
||||||
|
|
||||||
assert_eq!(tts, expected);
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn parse_use() {
|
|
||||||
create_default_session_globals_then(|| {
|
|
||||||
let use_s = "use foo::bar::baz;";
|
|
||||||
let vitem = string_to_item(use_s.to_string()).unwrap();
|
|
||||||
let vitem_s = item_to_string(&vitem);
|
|
||||||
assert_eq!(&vitem_s[..], use_s);
|
|
||||||
|
|
||||||
let use_s = "use foo::bar as baz;";
|
|
||||||
let vitem = string_to_item(use_s.to_string()).unwrap();
|
|
||||||
let vitem_s = item_to_string(&vitem);
|
|
||||||
assert_eq!(&vitem_s[..], use_s);
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn parse_extern_crate() {
|
|
||||||
create_default_session_globals_then(|| {
|
|
||||||
let ex_s = "extern crate foo;";
|
|
||||||
let vitem = string_to_item(ex_s.to_string()).unwrap();
|
|
||||||
let vitem_s = item_to_string(&vitem);
|
|
||||||
assert_eq!(&vitem_s[..], ex_s);
|
|
||||||
|
|
||||||
let ex_s = "extern crate foo as bar;";
|
|
||||||
let vitem = string_to_item(ex_s.to_string()).unwrap();
|
|
||||||
let vitem_s = item_to_string(&vitem);
|
|
||||||
assert_eq!(&vitem_s[..], ex_s);
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
fn get_spans_of_pat_idents(src: &str) -> Vec<Span> {
|
|
||||||
let item = string_to_item(src.to_string()).unwrap();
|
|
||||||
|
|
||||||
struct PatIdentVisitor {
|
|
||||||
spans: Vec<Span>,
|
|
||||||
}
|
|
||||||
impl<'a> visit::Visitor<'a> for PatIdentVisitor {
|
|
||||||
fn visit_pat(&mut self, p: &'a ast::Pat) {
|
|
||||||
match &p.kind {
|
|
||||||
PatKind::Ident(_, ident, _) => {
|
|
||||||
self.spans.push(ident.span);
|
|
||||||
}
|
|
||||||
_ => {
|
|
||||||
visit::walk_pat(self, p);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
let mut v = PatIdentVisitor { spans: Vec::new() };
|
|
||||||
visit::walk_item(&mut v, &item);
|
|
||||||
return v.spans;
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn span_of_self_arg_pat_idents_are_correct() {
|
|
||||||
create_default_session_globals_then(|| {
|
|
||||||
let srcs = [
|
|
||||||
"impl z { fn a (&self, &myarg: i32) {} }",
|
|
||||||
"impl z { fn a (&mut self, &myarg: i32) {} }",
|
|
||||||
"impl z { fn a (&'a self, &myarg: i32) {} }",
|
|
||||||
"impl z { fn a (self, &myarg: i32) {} }",
|
|
||||||
"impl z { fn a (self: Foo, &myarg: i32) {} }",
|
|
||||||
];
|
|
||||||
|
|
||||||
for src in srcs {
|
|
||||||
let spans = get_spans_of_pat_idents(src);
|
|
||||||
let (lo, hi) = (spans[0].lo(), spans[0].hi());
|
|
||||||
assert!(
|
|
||||||
"self" == &src[lo.to_usize()..hi.to_usize()],
|
|
||||||
"\"{}\" != \"self\". src=\"{}\"",
|
|
||||||
&src[lo.to_usize()..hi.to_usize()],
|
|
||||||
src
|
|
||||||
)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn parse_exprs() {
|
|
||||||
create_default_session_globals_then(|| {
|
|
||||||
// just make sure that they parse....
|
|
||||||
string_to_expr("3 + 4".to_string());
|
|
||||||
string_to_expr("a::z.froob(b,&(987+3))".to_string());
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn attrs_fix_bug() {
|
|
||||||
create_default_session_globals_then(|| {
|
|
||||||
string_to_item(
|
|
||||||
"pub fn mk_file_writer(path: &Path, flags: &[FileFlag])
|
|
||||||
-> Result<Box<Writer>, String> {
|
|
||||||
#[cfg(windows)]
|
|
||||||
fn wb() -> c_int {
|
|
||||||
(O_WRONLY | libc::consts::os::extra::O_BINARY) as c_int
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(unix)]
|
|
||||||
fn wb() -> c_int { O_WRONLY as c_int }
|
|
||||||
|
|
||||||
let mut fflags: c_int = wb();
|
|
||||||
}"
|
|
||||||
.to_string(),
|
|
||||||
);
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn crlf_doc_comments() {
|
|
||||||
create_default_session_globals_then(|| {
|
|
||||||
let psess = psess();
|
|
||||||
|
|
||||||
let name_1 = FileName::Custom("crlf_source_1".to_string());
|
|
||||||
let source = "/// doc comment\r\nfn foo() {}".to_string();
|
|
||||||
let item = parse_item_from_source_str(name_1, source, &psess).unwrap().unwrap();
|
|
||||||
let doc = item.attrs.iter().filter_map(|at| at.doc_str()).next().unwrap();
|
|
||||||
assert_eq!(doc.as_str(), " doc comment");
|
|
||||||
|
|
||||||
let name_2 = FileName::Custom("crlf_source_2".to_string());
|
|
||||||
let source = "/// doc comment\r\n/// line 2\r\nfn foo() {}".to_string();
|
|
||||||
let item = parse_item_from_source_str(name_2, source, &psess).unwrap().unwrap();
|
|
||||||
let docs = item.attrs.iter().filter_map(|at| at.doc_str()).collect::<Vec<_>>();
|
|
||||||
let b: &[_] = &[Symbol::intern(" doc comment"), Symbol::intern(" line 2")];
|
|
||||||
assert_eq!(&docs[..], b);
|
|
||||||
|
|
||||||
let name_3 = FileName::Custom("clrf_source_3".to_string());
|
|
||||||
let source = "/** doc comment\r\n * with CRLF */\r\nfn foo() {}".to_string();
|
|
||||||
let item = parse_item_from_source_str(name_3, source, &psess).unwrap().unwrap();
|
|
||||||
let doc = item.attrs.iter().filter_map(|at| at.doc_str()).next().unwrap();
|
|
||||||
assert_eq!(doc.as_str(), " doc comment\n * with CRLF ");
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn ttdelim_span() {
|
|
||||||
fn parse_expr_from_source_str(
|
|
||||||
name: FileName,
|
|
||||||
source: String,
|
|
||||||
psess: &ParseSess,
|
|
||||||
) -> PResult<'_, P<ast::Expr>> {
|
|
||||||
new_parser_from_source_str(psess, name, source).parse_expr()
|
|
||||||
}
|
|
||||||
|
|
||||||
create_default_session_globals_then(|| {
|
|
||||||
let psess = psess();
|
|
||||||
let expr = parse_expr_from_source_str(
|
|
||||||
PathBuf::from("foo").into(),
|
|
||||||
"foo!( fn main() { body } )".to_string(),
|
|
||||||
&psess,
|
|
||||||
)
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
let ast::ExprKind::MacCall(mac) = &expr.kind else { panic!("not a macro") };
|
|
||||||
let span = mac.args.tokens.trees().last().unwrap().span();
|
|
||||||
|
|
||||||
match psess.source_map().span_to_snippet(span) {
|
|
||||||
Ok(s) => assert_eq!(&s[..], "{ body }"),
|
|
||||||
Err(_) => panic!("could not get snippet"),
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// This tests that when parsing a string (rather than a file) we don't try
|
|
||||||
// and read in a file for a module declaration and just parse a stub.
|
|
||||||
// See `recurse_into_file_modules` in the parser.
|
|
||||||
#[test]
|
|
||||||
fn out_of_line_mod() {
|
|
||||||
create_default_session_globals_then(|| {
|
|
||||||
let item = parse_item_from_source_str(
|
|
||||||
PathBuf::from("foo").into(),
|
|
||||||
"mod foo { struct S; mod this_does_not_exist; }".to_owned(),
|
|
||||||
&psess(),
|
|
||||||
)
|
|
||||||
.unwrap()
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
let ast::ItemKind::Mod(_, mod_kind) = &item.kind else { panic!() };
|
|
||||||
assert!(matches!(mod_kind, ast::ModKind::Loaded(items, ..) if items.len() == 2));
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn eqmodws() {
|
|
||||||
assert_eq!(matches_codepattern("", ""), true);
|
|
||||||
assert_eq!(matches_codepattern("", "a"), false);
|
|
||||||
assert_eq!(matches_codepattern("a", ""), false);
|
|
||||||
assert_eq!(matches_codepattern("a", "a"), true);
|
|
||||||
assert_eq!(matches_codepattern("a b", "a \n\t\r b"), true);
|
|
||||||
assert_eq!(matches_codepattern("a b ", "a \n\t\r b"), true);
|
|
||||||
assert_eq!(matches_codepattern("a b", "a \n\t\r b "), false);
|
|
||||||
assert_eq!(matches_codepattern("a b", "a b"), true);
|
|
||||||
assert_eq!(matches_codepattern("ab", "a b"), false);
|
|
||||||
assert_eq!(matches_codepattern("a b", "ab"), true);
|
|
||||||
assert_eq!(matches_codepattern(" a b", "ab"), true);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn pattern_whitespace() {
|
|
||||||
assert_eq!(matches_codepattern("", "\x0C"), false);
|
|
||||||
assert_eq!(matches_codepattern("a b ", "a \u{0085}\n\t\r b"), true);
|
|
||||||
assert_eq!(matches_codepattern("a b", "a \u{0085}\n\t\r b "), false);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn non_pattern_whitespace() {
|
|
||||||
// These have the property 'White_Space' but not 'Pattern_White_Space'
|
|
||||||
assert_eq!(matches_codepattern("a b", "a\u{2002}b"), false);
|
|
||||||
assert_eq!(matches_codepattern("a b", "a\u{2002}b"), false);
|
|
||||||
assert_eq!(matches_codepattern("\u{205F}a b", "ab"), false);
|
|
||||||
assert_eq!(matches_codepattern("a \u{3000}b", "ab"), false);
|
|
||||||
}
|
|
@ -21,3 +21,7 @@ tracing = "0.1"
|
|||||||
unicode-normalization = "0.1.11"
|
unicode-normalization = "0.1.11"
|
||||||
unicode-width = "0.1.4"
|
unicode-width = "0.1.4"
|
||||||
# tidy-alphabetical-end
|
# tidy-alphabetical-end
|
||||||
|
|
||||||
|
[dev-dependencies]
|
||||||
|
termcolor = "1.2"
|
||||||
|
|
||||||
|
@ -45,6 +45,20 @@ use crate::errors::{
|
|||||||
self, IncorrectVisibilityRestriction, MismatchedClosingDelimiter, NonStringAbiLiteral,
|
self, IncorrectVisibilityRestriction, MismatchedClosingDelimiter, NonStringAbiLiteral,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests;
|
||||||
|
|
||||||
|
// Ideally, these tests would be in `rustc_ast`. But they depend on having a
|
||||||
|
// parser, so they are here.
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tokenstream {
|
||||||
|
mod tests;
|
||||||
|
}
|
||||||
|
#[cfg(test)]
|
||||||
|
mod mut_visit {
|
||||||
|
mod tests;
|
||||||
|
}
|
||||||
|
|
||||||
bitflags::bitflags! {
|
bitflags::bitflags! {
|
||||||
#[derive(Clone, Copy)]
|
#[derive(Clone, Copy)]
|
||||||
struct Restrictions: u8 {
|
struct Restrictions: u8 {
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
use crate::tests::{matches_codepattern, string_to_crate};
|
use crate::parser::tests::{matches_codepattern, string_to_crate};
|
||||||
|
|
||||||
use rustc_ast as ast;
|
use rustc_ast as ast;
|
||||||
use rustc_ast::mut_visit::MutVisitor;
|
use rustc_ast::mut_visit::MutVisitor;
|
||||||
use rustc_ast_pretty::pprust;
|
use rustc_ast_pretty::pprust;
|
@ -1,25 +1,30 @@
|
|||||||
use rustc_ast as ast;
|
use crate::parser::ForceCollect;
|
||||||
use rustc_ast::tokenstream::TokenStream;
|
use crate::{new_parser_from_source_str, parser::Parser, source_file_to_stream};
|
||||||
use rustc_parse::{new_parser_from_source_str, parser::Parser, source_file_to_stream};
|
use ast::token::IdentIsRaw;
|
||||||
use rustc_session::parse::ParseSess;
|
use rustc_ast::ptr::P;
|
||||||
use rustc_span::create_default_session_globals_then;
|
use rustc_ast::token::{self, Delimiter, Token};
|
||||||
use rustc_span::source_map::{FilePathMapping, SourceMap};
|
use rustc_ast::tokenstream::{DelimSpacing, DelimSpan, Spacing, TokenStream, TokenTree};
|
||||||
use rustc_span::{BytePos, Span};
|
use rustc_ast::visit;
|
||||||
|
use rustc_ast::{self as ast, PatKind};
|
||||||
|
use rustc_ast_pretty::pprust::item_to_string;
|
||||||
use rustc_data_structures::sync::Lrc;
|
use rustc_data_structures::sync::Lrc;
|
||||||
use rustc_errors::emitter::HumanEmitter;
|
use rustc_errors::emitter::HumanEmitter;
|
||||||
use rustc_errors::{DiagCtxt, MultiSpan, PResult};
|
use rustc_errors::{DiagCtxt, MultiSpan, PResult};
|
||||||
use termcolor::WriteColor;
|
use rustc_session::parse::ParseSess;
|
||||||
|
use rustc_span::create_default_session_globals_then;
|
||||||
|
use rustc_span::source_map::{FilePathMapping, SourceMap};
|
||||||
|
use rustc_span::symbol::{kw, sym, Symbol};
|
||||||
|
use rustc_span::{BytePos, FileName, Pos, Span};
|
||||||
use std::io;
|
use std::io;
|
||||||
use std::io::prelude::*;
|
use std::io::prelude::*;
|
||||||
use std::iter::Peekable;
|
use std::iter::Peekable;
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
use std::str;
|
use std::str;
|
||||||
use std::sync::{Arc, Mutex};
|
use std::sync::{Arc, Mutex};
|
||||||
|
use termcolor::WriteColor;
|
||||||
|
|
||||||
pub(crate) fn psess() -> ParseSess {
|
fn psess() -> ParseSess {
|
||||||
ParseSess::new(vec![crate::DEFAULT_LOCALE_RESOURCE, rustc_parse::DEFAULT_LOCALE_RESOURCE])
|
ParseSess::new(vec![crate::DEFAULT_LOCALE_RESOURCE, crate::DEFAULT_LOCALE_RESOURCE])
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Map string to parser (via tts).
|
/// Map string to parser (via tts).
|
||||||
@ -31,7 +36,7 @@ fn create_test_handler() -> (DiagCtxt, Lrc<SourceMap>, Arc<Mutex<Vec<u8>>>) {
|
|||||||
let output = Arc::new(Mutex::new(Vec::new()));
|
let output = Arc::new(Mutex::new(Vec::new()));
|
||||||
let source_map = Lrc::new(SourceMap::new(FilePathMapping::empty()));
|
let source_map = Lrc::new(SourceMap::new(FilePathMapping::empty()));
|
||||||
let fallback_bundle = rustc_errors::fallback_fluent_bundle(
|
let fallback_bundle = rustc_errors::fallback_fluent_bundle(
|
||||||
vec![crate::DEFAULT_LOCALE_RESOURCE, rustc_parse::DEFAULT_LOCALE_RESOURCE],
|
vec![crate::DEFAULT_LOCALE_RESOURCE, crate::DEFAULT_LOCALE_RESOURCE],
|
||||||
false,
|
false,
|
||||||
);
|
);
|
||||||
let emitter = HumanEmitter::new(Box::new(Shared { data: output.clone() }), fallback_bundle)
|
let emitter = HumanEmitter::new(Box::new(Shared { data: output.clone() }), fallback_bundle)
|
||||||
@ -44,7 +49,7 @@ fn create_test_handler() -> (DiagCtxt, Lrc<SourceMap>, Arc<Mutex<Vec<u8>>>) {
|
|||||||
/// Returns the result of parsing the given string via the given callback.
|
/// Returns the result of parsing the given string via the given callback.
|
||||||
///
|
///
|
||||||
/// If there are any errors, this will panic.
|
/// If there are any errors, this will panic.
|
||||||
pub(crate) fn with_error_checking_parse<'a, T, F>(s: String, psess: &'a ParseSess, f: F) -> T
|
fn with_error_checking_parse<'a, T, F>(s: String, psess: &'a ParseSess, f: F) -> T
|
||||||
where
|
where
|
||||||
F: FnOnce(&mut Parser<'a>) -> PResult<'a, T>,
|
F: FnOnce(&mut Parser<'a>) -> PResult<'a, T>,
|
||||||
{
|
{
|
||||||
@ -56,7 +61,7 @@ where
|
|||||||
|
|
||||||
/// Verifies that parsing the given string using the given callback will
|
/// Verifies that parsing the given string using the given callback will
|
||||||
/// generate an error that contains the given text.
|
/// generate an error that contains the given text.
|
||||||
pub(crate) fn with_expected_parse_error<T, F>(source_str: &str, expected_output: &str, f: F)
|
fn with_expected_parse_error<T, F>(source_str: &str, expected_output: &str, f: F)
|
||||||
where
|
where
|
||||||
F: for<'a> FnOnce(&mut Parser<'a>) -> PResult<'a, T>,
|
F: for<'a> FnOnce(&mut Parser<'a>) -> PResult<'a, T>,
|
||||||
{
|
{
|
||||||
@ -150,8 +155,8 @@ struct SpanLabel {
|
|||||||
label: &'static str,
|
label: &'static str,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) struct Shared<T: Write> {
|
struct Shared<T: Write> {
|
||||||
pub data: Arc<Mutex<T>>,
|
data: Arc<Mutex<T>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T: Write> WriteColor for Shared<T> {
|
impl<T: Write> WriteColor for Shared<T> {
|
||||||
@ -1053,3 +1058,365 @@ error: foo
|
|||||||
"#,
|
"#,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Parses an item.
|
||||||
|
///
|
||||||
|
/// Returns `Ok(Some(item))` when successful, `Ok(None)` when no item was found, and `Err`
|
||||||
|
/// when a syntax error occurred.
|
||||||
|
fn parse_item_from_source_str(
|
||||||
|
name: FileName,
|
||||||
|
source: String,
|
||||||
|
psess: &ParseSess,
|
||||||
|
) -> PResult<'_, Option<P<ast::Item>>> {
|
||||||
|
new_parser_from_source_str(psess, name, source).parse_item(ForceCollect::No)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Produces a `rustc_span::span`.
|
||||||
|
fn sp(a: u32, b: u32) -> Span {
|
||||||
|
Span::with_root_ctxt(BytePos(a), BytePos(b))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Parses a string, return an expression.
|
||||||
|
fn string_to_expr(source_str: String) -> P<ast::Expr> {
|
||||||
|
with_error_checking_parse(source_str, &psess(), |p| p.parse_expr())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Parses a string, returns an item.
|
||||||
|
fn string_to_item(source_str: String) -> Option<P<ast::Item>> {
|
||||||
|
with_error_checking_parse(source_str, &psess(), |p| p.parse_item(ForceCollect::No))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn bad_path_expr_1() {
|
||||||
|
// This should trigger error: expected identifier, found keyword `return`
|
||||||
|
create_default_session_globals_then(|| {
|
||||||
|
with_expected_parse_error(
|
||||||
|
"::abc::def::return",
|
||||||
|
"expected identifier, found keyword `return`",
|
||||||
|
|p| p.parse_expr(),
|
||||||
|
);
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// Checks the token-tree-ization of macros.
|
||||||
|
#[test]
|
||||||
|
fn string_to_tts_macro() {
|
||||||
|
create_default_session_globals_then(|| {
|
||||||
|
let stream = string_to_stream("macro_rules! zip (($a)=>($a))".to_string());
|
||||||
|
let tts = &stream.trees().collect::<Vec<_>>()[..];
|
||||||
|
|
||||||
|
match tts {
|
||||||
|
[
|
||||||
|
TokenTree::Token(
|
||||||
|
Token { kind: token::Ident(name_macro_rules, IdentIsRaw::No), .. },
|
||||||
|
_,
|
||||||
|
),
|
||||||
|
TokenTree::Token(Token { kind: token::Not, .. }, _),
|
||||||
|
TokenTree::Token(Token { kind: token::Ident(name_zip, IdentIsRaw::No), .. }, _),
|
||||||
|
TokenTree::Delimited(.., macro_delim, macro_tts),
|
||||||
|
] if name_macro_rules == &kw::MacroRules && name_zip.as_str() == "zip" => {
|
||||||
|
let tts = ¯o_tts.trees().collect::<Vec<_>>();
|
||||||
|
match &tts[..] {
|
||||||
|
[
|
||||||
|
TokenTree::Delimited(.., first_delim, first_tts),
|
||||||
|
TokenTree::Token(Token { kind: token::FatArrow, .. }, _),
|
||||||
|
TokenTree::Delimited(.., second_delim, second_tts),
|
||||||
|
] if macro_delim == &Delimiter::Parenthesis => {
|
||||||
|
let tts = &first_tts.trees().collect::<Vec<_>>();
|
||||||
|
match &tts[..] {
|
||||||
|
[
|
||||||
|
TokenTree::Token(Token { kind: token::Dollar, .. }, _),
|
||||||
|
TokenTree::Token(
|
||||||
|
Token { kind: token::Ident(name, IdentIsRaw::No), .. },
|
||||||
|
_,
|
||||||
|
),
|
||||||
|
] if first_delim == &Delimiter::Parenthesis && name.as_str() == "a" => {
|
||||||
|
}
|
||||||
|
_ => panic!("value 3: {:?} {:?}", first_delim, first_tts),
|
||||||
|
}
|
||||||
|
let tts = &second_tts.trees().collect::<Vec<_>>();
|
||||||
|
match &tts[..] {
|
||||||
|
[
|
||||||
|
TokenTree::Token(Token { kind: token::Dollar, .. }, _),
|
||||||
|
TokenTree::Token(
|
||||||
|
Token { kind: token::Ident(name, IdentIsRaw::No), .. },
|
||||||
|
_,
|
||||||
|
),
|
||||||
|
] if second_delim == &Delimiter::Parenthesis
|
||||||
|
&& name.as_str() == "a" => {}
|
||||||
|
_ => panic!("value 4: {:?} {:?}", second_delim, second_tts),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => panic!("value 2: {:?} {:?}", macro_delim, macro_tts),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => panic!("value: {:?}", tts),
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn string_to_tts_1() {
|
||||||
|
create_default_session_globals_then(|| {
|
||||||
|
let tts = string_to_stream("fn a(b: i32) { b; }".to_string());
|
||||||
|
|
||||||
|
let expected = TokenStream::new(vec![
|
||||||
|
TokenTree::token_alone(token::Ident(kw::Fn, IdentIsRaw::No), sp(0, 2)),
|
||||||
|
TokenTree::token_joint_hidden(
|
||||||
|
token::Ident(Symbol::intern("a"), IdentIsRaw::No),
|
||||||
|
sp(3, 4),
|
||||||
|
),
|
||||||
|
TokenTree::Delimited(
|
||||||
|
DelimSpan::from_pair(sp(4, 5), sp(11, 12)),
|
||||||
|
// `JointHidden` because the `(` is followed immediately by
|
||||||
|
// `b`, `Alone` because the `)` is followed by whitespace.
|
||||||
|
DelimSpacing::new(Spacing::JointHidden, Spacing::Alone),
|
||||||
|
Delimiter::Parenthesis,
|
||||||
|
TokenStream::new(vec![
|
||||||
|
TokenTree::token_joint(
|
||||||
|
token::Ident(Symbol::intern("b"), IdentIsRaw::No),
|
||||||
|
sp(5, 6),
|
||||||
|
),
|
||||||
|
TokenTree::token_alone(token::Colon, sp(6, 7)),
|
||||||
|
// `JointHidden` because the `i32` is immediately followed by the `)`.
|
||||||
|
TokenTree::token_joint_hidden(
|
||||||
|
token::Ident(sym::i32, IdentIsRaw::No),
|
||||||
|
sp(8, 11),
|
||||||
|
),
|
||||||
|
])
|
||||||
|
.into(),
|
||||||
|
),
|
||||||
|
TokenTree::Delimited(
|
||||||
|
DelimSpan::from_pair(sp(13, 14), sp(18, 19)),
|
||||||
|
// First `Alone` because the `{` is followed by whitespace,
|
||||||
|
// second `Alone` because the `}` is followed immediately by
|
||||||
|
// EOF.
|
||||||
|
DelimSpacing::new(Spacing::Alone, Spacing::Alone),
|
||||||
|
Delimiter::Brace,
|
||||||
|
TokenStream::new(vec![
|
||||||
|
TokenTree::token_joint(
|
||||||
|
token::Ident(Symbol::intern("b"), IdentIsRaw::No),
|
||||||
|
sp(15, 16),
|
||||||
|
),
|
||||||
|
// `Alone` because the `;` is followed by whitespace.
|
||||||
|
TokenTree::token_alone(token::Semi, sp(16, 17)),
|
||||||
|
])
|
||||||
|
.into(),
|
||||||
|
),
|
||||||
|
]);
|
||||||
|
|
||||||
|
assert_eq!(tts, expected);
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn parse_use() {
|
||||||
|
create_default_session_globals_then(|| {
|
||||||
|
let use_s = "use foo::bar::baz;";
|
||||||
|
let vitem = string_to_item(use_s.to_string()).unwrap();
|
||||||
|
let vitem_s = item_to_string(&vitem);
|
||||||
|
assert_eq!(&vitem_s[..], use_s);
|
||||||
|
|
||||||
|
let use_s = "use foo::bar as baz;";
|
||||||
|
let vitem = string_to_item(use_s.to_string()).unwrap();
|
||||||
|
let vitem_s = item_to_string(&vitem);
|
||||||
|
assert_eq!(&vitem_s[..], use_s);
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn parse_extern_crate() {
|
||||||
|
create_default_session_globals_then(|| {
|
||||||
|
let ex_s = "extern crate foo;";
|
||||||
|
let vitem = string_to_item(ex_s.to_string()).unwrap();
|
||||||
|
let vitem_s = item_to_string(&vitem);
|
||||||
|
assert_eq!(&vitem_s[..], ex_s);
|
||||||
|
|
||||||
|
let ex_s = "extern crate foo as bar;";
|
||||||
|
let vitem = string_to_item(ex_s.to_string()).unwrap();
|
||||||
|
let vitem_s = item_to_string(&vitem);
|
||||||
|
assert_eq!(&vitem_s[..], ex_s);
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_spans_of_pat_idents(src: &str) -> Vec<Span> {
|
||||||
|
let item = string_to_item(src.to_string()).unwrap();
|
||||||
|
|
||||||
|
struct PatIdentVisitor {
|
||||||
|
spans: Vec<Span>,
|
||||||
|
}
|
||||||
|
impl<'a> visit::Visitor<'a> for PatIdentVisitor {
|
||||||
|
fn visit_pat(&mut self, p: &'a ast::Pat) {
|
||||||
|
match &p.kind {
|
||||||
|
PatKind::Ident(_, ident, _) => {
|
||||||
|
self.spans.push(ident.span);
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
visit::walk_pat(self, p);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let mut v = PatIdentVisitor { spans: Vec::new() };
|
||||||
|
visit::walk_item(&mut v, &item);
|
||||||
|
return v.spans;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn span_of_self_arg_pat_idents_are_correct() {
|
||||||
|
create_default_session_globals_then(|| {
|
||||||
|
let srcs = [
|
||||||
|
"impl z { fn a (&self, &myarg: i32) {} }",
|
||||||
|
"impl z { fn a (&mut self, &myarg: i32) {} }",
|
||||||
|
"impl z { fn a (&'a self, &myarg: i32) {} }",
|
||||||
|
"impl z { fn a (self, &myarg: i32) {} }",
|
||||||
|
"impl z { fn a (self: Foo, &myarg: i32) {} }",
|
||||||
|
];
|
||||||
|
|
||||||
|
for src in srcs {
|
||||||
|
let spans = get_spans_of_pat_idents(src);
|
||||||
|
let (lo, hi) = (spans[0].lo(), spans[0].hi());
|
||||||
|
assert!(
|
||||||
|
"self" == &src[lo.to_usize()..hi.to_usize()],
|
||||||
|
"\"{}\" != \"self\". src=\"{}\"",
|
||||||
|
&src[lo.to_usize()..hi.to_usize()],
|
||||||
|
src
|
||||||
|
)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn parse_exprs() {
|
||||||
|
create_default_session_globals_then(|| {
|
||||||
|
// just make sure that they parse....
|
||||||
|
string_to_expr("3 + 4".to_string());
|
||||||
|
string_to_expr("a::z.froob(b,&(987+3))".to_string());
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn attrs_fix_bug() {
|
||||||
|
create_default_session_globals_then(|| {
|
||||||
|
string_to_item(
|
||||||
|
"pub fn mk_file_writer(path: &Path, flags: &[FileFlag])
|
||||||
|
-> Result<Box<Writer>, String> {
|
||||||
|
#[cfg(windows)]
|
||||||
|
fn wb() -> c_int {
|
||||||
|
(O_WRONLY | libc::consts::os::extra::O_BINARY) as c_int
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(unix)]
|
||||||
|
fn wb() -> c_int { O_WRONLY as c_int }
|
||||||
|
|
||||||
|
let mut fflags: c_int = wb();
|
||||||
|
}"
|
||||||
|
.to_string(),
|
||||||
|
);
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn crlf_doc_comments() {
|
||||||
|
create_default_session_globals_then(|| {
|
||||||
|
let psess = psess();
|
||||||
|
|
||||||
|
let name_1 = FileName::Custom("crlf_source_1".to_string());
|
||||||
|
let source = "/// doc comment\r\nfn foo() {}".to_string();
|
||||||
|
let item = parse_item_from_source_str(name_1, source, &psess).unwrap().unwrap();
|
||||||
|
let doc = item.attrs.iter().filter_map(|at| at.doc_str()).next().unwrap();
|
||||||
|
assert_eq!(doc.as_str(), " doc comment");
|
||||||
|
|
||||||
|
let name_2 = FileName::Custom("crlf_source_2".to_string());
|
||||||
|
let source = "/// doc comment\r\n/// line 2\r\nfn foo() {}".to_string();
|
||||||
|
let item = parse_item_from_source_str(name_2, source, &psess).unwrap().unwrap();
|
||||||
|
let docs = item.attrs.iter().filter_map(|at| at.doc_str()).collect::<Vec<_>>();
|
||||||
|
let b: &[_] = &[Symbol::intern(" doc comment"), Symbol::intern(" line 2")];
|
||||||
|
assert_eq!(&docs[..], b);
|
||||||
|
|
||||||
|
let name_3 = FileName::Custom("clrf_source_3".to_string());
|
||||||
|
let source = "/** doc comment\r\n * with CRLF */\r\nfn foo() {}".to_string();
|
||||||
|
let item = parse_item_from_source_str(name_3, source, &psess).unwrap().unwrap();
|
||||||
|
let doc = item.attrs.iter().filter_map(|at| at.doc_str()).next().unwrap();
|
||||||
|
assert_eq!(doc.as_str(), " doc comment\n * with CRLF ");
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn ttdelim_span() {
|
||||||
|
fn parse_expr_from_source_str(
|
||||||
|
name: FileName,
|
||||||
|
source: String,
|
||||||
|
psess: &ParseSess,
|
||||||
|
) -> PResult<'_, P<ast::Expr>> {
|
||||||
|
new_parser_from_source_str(psess, name, source).parse_expr()
|
||||||
|
}
|
||||||
|
|
||||||
|
create_default_session_globals_then(|| {
|
||||||
|
let psess = psess();
|
||||||
|
let expr = parse_expr_from_source_str(
|
||||||
|
PathBuf::from("foo").into(),
|
||||||
|
"foo!( fn main() { body } )".to_string(),
|
||||||
|
&psess,
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
let ast::ExprKind::MacCall(mac) = &expr.kind else { panic!("not a macro") };
|
||||||
|
let span = mac.args.tokens.trees().last().unwrap().span();
|
||||||
|
|
||||||
|
match psess.source_map().span_to_snippet(span) {
|
||||||
|
Ok(s) => assert_eq!(&s[..], "{ body }"),
|
||||||
|
Err(_) => panic!("could not get snippet"),
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// This tests that when parsing a string (rather than a file) we don't try
|
||||||
|
// and read in a file for a module declaration and just parse a stub.
|
||||||
|
// See `recurse_into_file_modules` in the parser.
|
||||||
|
#[test]
|
||||||
|
fn out_of_line_mod() {
|
||||||
|
create_default_session_globals_then(|| {
|
||||||
|
let item = parse_item_from_source_str(
|
||||||
|
PathBuf::from("foo").into(),
|
||||||
|
"mod foo { struct S; mod this_does_not_exist; }".to_owned(),
|
||||||
|
&psess(),
|
||||||
|
)
|
||||||
|
.unwrap()
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
let ast::ItemKind::Mod(_, mod_kind) = &item.kind else { panic!() };
|
||||||
|
assert!(matches!(mod_kind, ast::ModKind::Loaded(items, ..) if items.len() == 2));
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn eqmodws() {
|
||||||
|
assert_eq!(matches_codepattern("", ""), true);
|
||||||
|
assert_eq!(matches_codepattern("", "a"), false);
|
||||||
|
assert_eq!(matches_codepattern("a", ""), false);
|
||||||
|
assert_eq!(matches_codepattern("a", "a"), true);
|
||||||
|
assert_eq!(matches_codepattern("a b", "a \n\t\r b"), true);
|
||||||
|
assert_eq!(matches_codepattern("a b ", "a \n\t\r b"), true);
|
||||||
|
assert_eq!(matches_codepattern("a b", "a \n\t\r b "), false);
|
||||||
|
assert_eq!(matches_codepattern("a b", "a b"), true);
|
||||||
|
assert_eq!(matches_codepattern("ab", "a b"), false);
|
||||||
|
assert_eq!(matches_codepattern("a b", "ab"), true);
|
||||||
|
assert_eq!(matches_codepattern(" a b", "ab"), true);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn pattern_whitespace() {
|
||||||
|
assert_eq!(matches_codepattern("", "\x0C"), false);
|
||||||
|
assert_eq!(matches_codepattern("a b ", "a \u{0085}\n\t\r b"), true);
|
||||||
|
assert_eq!(matches_codepattern("a b", "a \u{0085}\n\t\r b "), false);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn non_pattern_whitespace() {
|
||||||
|
// These have the property 'White_Space' but not 'Pattern_White_Space'
|
||||||
|
assert_eq!(matches_codepattern("a b", "a\u{2002}b"), false);
|
||||||
|
assert_eq!(matches_codepattern("a b", "a\u{2002}b"), false);
|
||||||
|
assert_eq!(matches_codepattern("\u{205F}a b", "ab"), false);
|
||||||
|
assert_eq!(matches_codepattern("a \u{3000}b", "ab"), false);
|
||||||
|
}
|
@ -1,5 +1,4 @@
|
|||||||
use crate::tests::string_to_stream;
|
use crate::parser::tests::string_to_stream;
|
||||||
|
|
||||||
use rustc_ast::token::{self, IdentIsRaw};
|
use rustc_ast::token::{self, IdentIsRaw};
|
||||||
use rustc_ast::tokenstream::{TokenStream, TokenTree};
|
use rustc_ast::tokenstream::{TokenStream, TokenTree};
|
||||||
use rustc_span::create_default_session_globals_then;
|
use rustc_span::create_default_session_globals_then;
|
Loading…
Reference in New Issue
Block a user