From 106cd6d188d3a35eca8aba1e70a038e937341c00 Mon Sep 17 00:00:00 2001 From: LaughingMan Date: Sun, 5 Jul 2020 08:35:50 +0200 Subject: [PATCH] WGSL front end: Add basic lexer tests --- src/front/wgsl.rs | 29 ++++++++++++++++++++++++++++- 1 file changed, 28 insertions(+), 1 deletion(-) diff --git a/src/front/wgsl.rs b/src/front/wgsl.rs index 3b5b1a500..eeaffc331 100644 --- a/src/front/wgsl.rs +++ b/src/front/wgsl.rs @@ -7,7 +7,7 @@ use crate::{ FastHashMap, }; -#[derive(Debug, PartialEq)] +#[derive(Copy, Clone, Debug, PartialEq)] pub enum Token<'a> { Separator(char), DoubleColon, @@ -1482,6 +1482,8 @@ pub fn parse_str(source: &str) -> Result { #[cfg(test)] mod tests { + use crate::front::wgsl::{Lexer, Token}; + #[test] fn check_constant_type_scalar_ok() { let wgsl = "const a : i32 = 2;"; @@ -1493,4 +1495,29 @@ mod tests { let wgsl = "const a : i32 = 2.0;"; assert!(super::parse_str(wgsl).is_err()); } + + #[test] + fn check_lexer() { + use Token::{End, Number, String, Unknown, Word}; + let data = vec![ + ("id123_OK", vec![Word("id123_OK"), End]), + ("92No", vec![Number("92"), Word("No"), End]), + ("æNoø", vec![Unknown('æ'), Word("No"), Unknown('ø'), End]), + ("No¾", vec![Word("No"), Unknown('¾'), End]), + ("No好", vec![Word("No"), Unknown('好'), End]), + ("\"\u{2}ПЀ\u{0}\"", vec![String("\u{2}ПЀ\u{0}"), End]), // https://github.com/gfx-rs/naga/issues/90 + ]; + for (x, expected) in data { + let mut lex = Lexer::new(x); + let mut results = vec![]; + loop { + let result = lex.next(); + results.push(result); + if result == Token::End { + break; + } + } + assert_eq!(expected, results); + } + } }