mirror of
https://github.com/gfx-rs/wgpu.git
synced 2026-04-22 03:02:01 -04:00
[wgsl] extend and refactor lexer tests
This commit is contained in:
committed by
Dzmitry Malyshau
parent
aa7005fdf3
commit
0aa91927b2
@@ -286,3 +286,51 @@ impl<'a> Lexer<'a> {
|
||||
source.len() - self.input.len()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
fn sub_test(source: &str, expected_tokens: &[Token]) {
|
||||
let mut lex = Lexer::new(source);
|
||||
for &token in expected_tokens {
|
||||
assert_eq!(lex.next(), token);
|
||||
}
|
||||
assert_eq!(lex.next(), Token::End);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_tokens() {
|
||||
sub_test("id123_OK", &[Token::Word("id123_OK")]);
|
||||
sub_test("92No", &[Token::Number("92"), Token::Word("No")]);
|
||||
sub_test(
|
||||
"æNoø",
|
||||
&[Token::Unknown('æ'), Token::Word("No"), Token::Unknown('ø')],
|
||||
);
|
||||
sub_test("No¾", &[Token::Word("No"), Token::Unknown('¾')]);
|
||||
sub_test("No好", &[Token::Word("No"), Token::Unknown('好')]);
|
||||
sub_test("\"\u{2}ПЀ\u{0}\"", &[Token::String("\u{2}ПЀ\u{0}")]); // https://github.com/gfx-rs/naga/issues/90
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_variable_decl() {
|
||||
sub_test(
|
||||
"[[ group(0 )]] var< uniform> texture: texture_multisampled_2d <f32 >;",
|
||||
&[
|
||||
Token::DoubleParen('['),
|
||||
Token::Word("group"),
|
||||
Token::Paren('('),
|
||||
Token::Number("0"),
|
||||
Token::Paren(')'),
|
||||
Token::DoubleParen(']'),
|
||||
Token::Word("var"),
|
||||
Token::Paren('<'),
|
||||
Token::Word("uniform"),
|
||||
Token::Paren('>'),
|
||||
Token::Word("texture"),
|
||||
Token::Separator(':'),
|
||||
Token::Word("texture_multisampled_2d"),
|
||||
Token::Paren('<'),
|
||||
Token::Word("f32"),
|
||||
Token::Paren('>'),
|
||||
Token::Separator(';'),
|
||||
],
|
||||
)
|
||||
}
|
||||
|
||||
@@ -1830,44 +1830,8 @@ pub fn parse_str(source: &str) -> Result<crate::Module, ParseError> {
|
||||
Parser::new().parse(source)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::front::wgsl::{Lexer, Token};
|
||||
|
||||
#[test]
|
||||
fn check_constant_type_scalar_ok() {
|
||||
let wgsl = "const a : i32 = 2;";
|
||||
assert!(super::parse_str(wgsl).is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn check_constant_type_scalar_err() {
|
||||
let wgsl = "const a : i32 = 2.0;";
|
||||
assert!(super::parse_str(wgsl).is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn check_lexer() {
|
||||
use Token::{End, Number, String, Unknown, Word};
|
||||
let data = vec![
|
||||
("id123_OK", vec![Word("id123_OK"), End]),
|
||||
("92No", vec![Number("92"), Word("No"), End]),
|
||||
("æNoø", vec![Unknown('æ'), Word("No"), Unknown('ø'), End]),
|
||||
("No¾", vec![Word("No"), Unknown('¾'), End]),
|
||||
("No好", vec![Word("No"), Unknown('好'), End]),
|
||||
("\"\u{2}ПЀ\u{0}\"", vec![String("\u{2}ПЀ\u{0}"), End]), // https://github.com/gfx-rs/naga/issues/90
|
||||
];
|
||||
for (x, expected) in data {
|
||||
let mut lex = Lexer::new(x);
|
||||
let mut results = vec![];
|
||||
loop {
|
||||
let result = lex.next();
|
||||
results.push(result);
|
||||
if result == Token::End {
|
||||
break;
|
||||
}
|
||||
}
|
||||
assert_eq!(expected, results);
|
||||
}
|
||||
}
|
||||
#[test]
|
||||
fn parse_types() {
|
||||
assert!(parse_str("const a : i32 = 2;").is_ok());
|
||||
assert!(parse_str("const a : i32 = 2.0;").is_err());
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user