diff --git a/cli/src/main.rs b/cli/src/main.rs index 0339becfb4..0aa384c45d 100644 --- a/cli/src/main.rs +++ b/cli/src/main.rs @@ -238,9 +238,9 @@ fn run() -> Result<(), Box> { }, &input, ) - .unwrap_or_else(|err| { + .unwrap_or_else(|errors| { let filename = input_path.file_name().and_then(std::ffi::OsStr::to_str); - emit_glsl_parser_error(err, filename.unwrap_or("glsl"), &input); + emit_glsl_parser_error(errors, filename.unwrap_or("glsl"), &input); std::process::exit(1); }) } @@ -409,16 +409,23 @@ use codespan_reporting::{ }, }; -pub fn emit_glsl_parser_error(err: naga::front::glsl::ParseError, filename: &str, source: &str) { - let diagnostic = match err.kind.metadata() { - Some(metadata) => Diagnostic::error() - .with_message(err.kind.to_string()) - .with_labels(vec![Label::primary((), metadata.start..metadata.end)]), - None => Diagnostic::error().with_message(err.kind.to_string()), - }; - +pub fn emit_glsl_parser_error( + errors: Vec, + filename: &str, + source: &str, +) { let files = SimpleFile::new(filename, source); let config = codespan_reporting::term::Config::default(); let writer = StandardStream::stderr(ColorChoice::Auto); - term::emit(&mut writer.lock(), &config, &files, &diagnostic).expect("cannot write error"); + + for err in errors { + let diagnostic = match err.kind.metadata() { + Some(metadata) => Diagnostic::error() + .with_message(err.kind.to_string()) + .with_labels(vec![Label::primary((), metadata.start..metadata.end)]), + None => Diagnostic::error().with_message(err.kind.to_string()), + }; + + term::emit(&mut writer.lock(), &config, &files, &diagnostic).expect("cannot write error"); + } } diff --git a/src/front/glsl/error.rs b/src/front/glsl/error.rs index 5d2c62681b..eb9f5395e2 100644 --- a/src/front/glsl/error.rs +++ b/src/front/glsl/error.rs @@ -2,6 +2,7 @@ use super::{ constants::ConstantSolvingError, token::{SourceMetadata, Token, TokenValue}, }; +use pp_rs::token::PreprocessorError; use std::borrow::Cow; use thiserror::Error; @@ -73,6 +74,8 @@ pub enum ErrorKind { VariableAlreadyDeclared(SourceMetadata, String), #[error("{1}")] SemanticError(SourceMetadata, Cow<'static, str>), + #[error("{1:?}")] + PreprocessorError(SourceMetadata, PreprocessorError), } impl ErrorKind { @@ -116,6 +119,7 @@ impl From<(SourceMetadata, ConstantSolvingError)> for ErrorKind { #[derive(Debug, Error)] #[error("{kind}")] +#[cfg_attr(test, derive(PartialEq))] pub struct ParseError { pub kind: ErrorKind, } diff --git a/src/front/glsl/lex.rs b/src/front/glsl/lex.rs index 07c4d45071..e054217a70 100644 --- a/src/front/glsl/lex.rs +++ b/src/front/glsl/lex.rs @@ -1,18 +1,31 @@ use super::{ ast::Precision, - token::{SourceMetadata, Token, TokenValue}, + token::{Directive, DirectiveKind, SourceMetadata, Token, TokenValue}, types::parse_type, }; use crate::{FastHashMap, StorageAccess}; use pp_rs::{ pp::Preprocessor, - token::{Punct, Token as PPToken, TokenValue as PPTokenValue}, + token::{PreprocessorError, Punct, TokenValue as PPTokenValue}, }; -use std::collections::VecDeque; + +#[derive(Debug)] +#[cfg_attr(test, derive(PartialEq))] +pub struct LexerResult { + pub kind: LexerResultKind, + pub meta: SourceMetadata, +} + +#[derive(Debug)] +#[cfg_attr(test, derive(PartialEq))] +pub enum LexerResultKind { + Token(Token), + Directive(Directive), + Error(PreprocessorError), +} pub struct Lexer<'a> { pp: Preprocessor<'a>, - tokens: VecDeque, } impl<'a> Lexer<'a> { @@ -21,40 +34,33 @@ impl<'a> Lexer<'a> { for (define, value) in defines { pp.add_define(define, value).unwrap(); //TODO: handle error } - Lexer { - pp, - tokens: Default::default(), - } + Lexer { pp } } } impl<'a> Iterator for Lexer<'a> { - type Item = Token; + type Item = LexerResult; fn next(&mut self) -> Option { - let mut meta = SourceMetadata::default(); - let pp_token = match self.tokens.pop_front() { - Some(t) => t, - None => match self.pp.next()? { - Ok(t) => t, - Err((err, loc)) => { - meta.start = loc.start as usize; - meta.end = loc.end as usize; - return Some(Token { - value: TokenValue::Unknown(err), - meta, - }); - } - }, + let pp_token = match self.pp.next()? { + Ok(t) => t, + Err((err, loc)) => { + return Some(LexerResult { + kind: LexerResultKind::Error(err), + meta: loc.into(), + }); + } }; - meta.start = pp_token.location.start as usize; - meta.end = pp_token.location.end as usize; + let meta = pp_token.location.into(); let value = match pp_token.value { PPTokenValue::Extension(extension) => { - for t in extension.tokens { - self.tokens.push_back(t); - } - TokenValue::Extension + return Some(LexerResult { + kind: LexerResultKind::Directive(Directive { + kind: DirectiveKind::Extension, + tokens: extension.tokens, + }), + meta, + }) } PPTokenValue::Float(float) => TokenValue::FloatConstant(float), PPTokenValue::Ident(ident) => { @@ -162,30 +168,41 @@ impl<'a> Iterator for Lexer<'a> { Punct::Question => TokenValue::Question, }, PPTokenValue::Pragma(pragma) => { - for t in pragma.tokens { - self.tokens.push_back(t); - } - TokenValue::Pragma + return Some(LexerResult { + kind: LexerResultKind::Directive(Directive { + kind: DirectiveKind::Pragma, + tokens: pragma.tokens, + }), + meta, + }) } PPTokenValue::Version(version) => { - for t in version.tokens { - self.tokens.push_back(t); - } - TokenValue::Version + return Some(LexerResult { + kind: LexerResultKind::Directive(Directive { + kind: DirectiveKind::Version { + is_first_directive: version.is_first_directive, + }, + tokens: version.tokens, + }), + meta, + }) } }; - Some(Token { value, meta }) + Some(LexerResult { + kind: LexerResultKind::Token(Token { value, meta }), + meta, + }) } } #[cfg(test)] mod tests { - use pp_rs::token::Integer; + use pp_rs::token::{Integer, Location, Token as PPToken, TokenValue as PPTokenValue}; use super::{ - super::token::{SourceMetadata, Token, TokenValue}, - Lexer, + super::token::{Directive, DirectiveKind, SourceMetadata, Token, TokenValue}, + Lexer, LexerResult, LexerResultKind, }; #[test] @@ -194,63 +211,85 @@ mod tests { // line comments let mut lex = Lexer::new("#version 450\nvoid main () {}", &defines); + let mut location = Location::default(); + location.start = 9; + location.end = 12; assert_eq!( lex.next().unwrap(), - Token { - value: TokenValue::Version, + LexerResult { + kind: LexerResultKind::Directive(Directive { + kind: DirectiveKind::Version { + is_first_directive: true + }, + tokens: vec![PPToken { + value: PPTokenValue::Integer(Integer { + signed: true, + value: 450, + width: 32 + }), + location + }] + }), meta: SourceMetadata { start: 1, end: 8 } } ); assert_eq!( lex.next().unwrap(), - Token { - value: TokenValue::IntConstant(Integer { - signed: true, - value: 450, - width: 32 + LexerResult { + kind: LexerResultKind::Token(Token { + value: TokenValue::Void, + meta: SourceMetadata { start: 13, end: 17 } }), - meta: SourceMetadata { start: 9, end: 12 }, - } - ); - assert_eq!( - lex.next().unwrap(), - Token { - value: TokenValue::Void, meta: SourceMetadata { start: 13, end: 17 } } ); assert_eq!( lex.next().unwrap(), - Token { - value: TokenValue::Identifier("main".into()), + LexerResult { + kind: LexerResultKind::Token(Token { + value: TokenValue::Identifier("main".into()), + meta: SourceMetadata { start: 18, end: 22 } + }), meta: SourceMetadata { start: 18, end: 22 } } ); assert_eq!( lex.next().unwrap(), - Token { - value: TokenValue::LeftParen, + LexerResult { + kind: LexerResultKind::Token(Token { + value: TokenValue::LeftParen, + meta: SourceMetadata { start: 23, end: 24 } + }), meta: SourceMetadata { start: 23, end: 24 } } ); assert_eq!( lex.next().unwrap(), - Token { - value: TokenValue::RightParen, + LexerResult { + kind: LexerResultKind::Token(Token { + value: TokenValue::RightParen, + meta: SourceMetadata { start: 24, end: 25 } + }), meta: SourceMetadata { start: 24, end: 25 } } ); assert_eq!( lex.next().unwrap(), - Token { - value: TokenValue::LeftBrace, + LexerResult { + kind: LexerResultKind::Token(Token { + value: TokenValue::LeftBrace, + meta: SourceMetadata { start: 26, end: 27 } + }), meta: SourceMetadata { start: 26, end: 27 } } ); assert_eq!( lex.next().unwrap(), - Token { - value: TokenValue::RightBrace, + LexerResult { + kind: LexerResultKind::Token(Token { + value: TokenValue::RightBrace, + meta: SourceMetadata { start: 27, end: 28 } + }), meta: SourceMetadata { start: 27, end: 28 } } ); diff --git a/src/front/glsl/mod.rs b/src/front/glsl/mod.rs index 8e487b8159..2d9f0725d9 100644 --- a/src/front/glsl/mod.rs +++ b/src/front/glsl/mod.rs @@ -2,7 +2,7 @@ pub use ast::Profile; pub use error::{ErrorKind, ParseError}; pub use token::{SourceMetadata, Token}; -use crate::{FastHashMap, Handle, Module, ShaderStage, Type}; +use crate::{FastHashMap, FastHashSet, Handle, Module, ShaderStage, Type}; use ast::{EntryArg, FunctionDeclaration, GlobalLookup}; use parser::ParsingContext; @@ -36,7 +36,7 @@ pub struct ShaderMetadata { pub workgroup_size: [u32; 3], pub early_fragment_tests: bool, - pub extensions: FastHashMap, + pub extensions: FastHashSet, } impl ShaderMetadata { @@ -58,7 +58,7 @@ impl Default for ShaderMetadata { stage: ShaderStage::Vertex, workgroup_size: [0; 3], early_fragment_tests: false, - extensions: FastHashMap::default(), + extensions: FastHashSet::default(), } } } @@ -74,6 +74,8 @@ pub struct Parser { entry_args: Vec, + errors: Vec, + module: Module, } @@ -95,17 +97,25 @@ impl Parser { &mut self, options: &Options, source: &str, - ) -> std::result::Result { + ) -> std::result::Result> { self.reset(options.stage); let lexer = lex::Lexer::new(source, &options.defines); let mut ctx = ParsingContext::new(lexer); - ctx.parse(self).map_err(|kind| ParseError { kind })?; + if let Err(kind) = ctx.parse(self) { + self.errors.push(ParseError { kind }); + } - let mut module = Module::default(); - std::mem::swap(&mut self.module, &mut module); - Ok(module) + if self.errors.is_empty() { + let mut module = Module::default(); + std::mem::swap(&mut self.module, &mut module); + Ok(module) + } else { + let mut errors = Vec::new(); + std::mem::swap(&mut self.errors, &mut errors); + Err(errors) + } } pub fn metadata(&self) -> &ShaderMetadata { diff --git a/src/front/glsl/parser.rs b/src/front/glsl/parser.rs index 56982391c6..09752df984 100644 --- a/src/front/glsl/parser.rs +++ b/src/front/glsl/parser.rs @@ -5,14 +5,16 @@ use crate::{ context::Context, error::ErrorKind, error::ExpectedToken, - lex::Lexer, + lex::{Lexer, LexerResultKind}, + token::{Directive, DirectiveKind}, token::{SourceMetadata, Token, TokenValue}, variables::{GlobalOrConstant, VarDeclaration}, - Parser, Result, + ParseError, Parser, Result, }, Block, Constant, ConstantInner, Expression, ScalarValue, Type, }; use core::convert::TryFrom; +use pp_rs::token::{PreprocessorError, Token as PPToken, TokenValue as PPTokenValue}; use std::iter::Peekable; mod declarations; @@ -53,8 +55,20 @@ impl<'source> ParsingContext<'source> { } } - pub fn next(&mut self, _parser: &mut Parser) -> Option { - self.lexer.next() + pub fn next(&mut self, parser: &mut Parser) -> Option { + loop { + let res = self.lexer.next()?; + + match res.kind { + LexerResultKind::Token(token) => break Some(token), + LexerResultKind::Directive(directive) => { + parser.handle_directive(directive, res.meta) + } + LexerResultKind::Error(error) => parser.errors.push(ParseError { + kind: ErrorKind::PreprocessorError(res.meta, error), + }), + } + } } pub fn bump(&mut self, parser: &mut Parser) -> Result { @@ -70,8 +84,32 @@ impl<'source> ParsingContext<'source> { } } - pub fn peek(&mut self, _parser: &mut Parser) -> Option<&Token> { - self.lexer.peek() + pub fn peek(&mut self, parser: &mut Parser) -> Option<&Token> { + match self.lexer.peek()?.kind { + LexerResultKind::Token(_) => { + let res = self.lexer.peek()?; + + match res.kind { + LexerResultKind::Token(ref token) => Some(token), + _ => unreachable!(), + } + } + LexerResultKind::Error(_) | LexerResultKind::Directive(_) => { + let res = self.lexer.next()?; + + match res.kind { + LexerResultKind::Directive(directive) => { + parser.handle_directive(directive, res.meta) + } + LexerResultKind::Error(error) => parser.errors.push(ParseError { + kind: ErrorKind::PreprocessorError(res.meta, error), + }), + _ => unreachable!(), + } + + self.peek(parser) + } + } } pub fn expect_peek(&mut self, parser: &mut Parser) -> Result<&Token> { @@ -79,8 +117,6 @@ impl<'source> ParsingContext<'source> { } pub fn parse(&mut self, parser: &mut Parser) -> Result<()> { - self.parse_version(parser)?; - // Body and expression arena for global initialization let mut body = Block::new(); let mut ctx = Context::new(parser, &mut body); @@ -107,42 +143,6 @@ impl<'source> ParsingContext<'source> { Ok(()) } - fn parse_version(&mut self, parser: &mut Parser) -> Result<()> { - self.expect(parser, TokenValue::Version)?; - - let version = self.bump(parser)?; - match version.value { - TokenValue::IntConstant(i) => match i.value { - 440 | 450 | 460 => parser.meta.version = i.value as u16, - _ => return Err(ErrorKind::InvalidVersion(version.meta, i.value)), - }, - _ => { - return Err(ErrorKind::InvalidToken( - version, - vec![ExpectedToken::IntLiteral], - )) - } - } - - let profile = self.lexer.peek(); - parser.meta.profile = match profile { - Some(&Token { - value: TokenValue::Identifier(_), - .. - }) => { - let (name, meta) = self.expect_ident(parser)?; - - match name.as_str() { - "core" => Profile::Core, - _ => return Err(ErrorKind::InvalidProfile(meta, name)), - } - } - _ => Profile::Core, - }; - - Ok(()) - } - fn parse_uint_constant(&mut self, parser: &mut Parser) -> Result<(u32, SourceMetadata)> { let (value, meta) = self.parse_constant_expression(parser)?; @@ -181,6 +181,171 @@ impl<'source> ParsingContext<'source> { } } +impl Parser { + fn handle_directive(&mut self, directive: Directive, meta: SourceMetadata) { + let mut tokens = directive.tokens.into_iter(); + + match directive.kind { + DirectiveKind::Version { is_first_directive } => { + if !is_first_directive { + self.errors.push(ParseError { + kind: ErrorKind::SemanticError( + meta, + "#version must occur first in shader".into(), + ), + }) + } + + match tokens.next() { + Some(PPToken { + value: PPTokenValue::Integer(int), + location, + }) => match int.value { + 440 | 450 | 460 => self.meta.version = int.value as u16, + _ => self.errors.push(ParseError { + kind: ErrorKind::InvalidVersion(location.into(), int.value), + }), + }, + Some(PPToken { value, location }) => self.errors.push(ParseError { + kind: ErrorKind::PreprocessorError( + location.into(), + PreprocessorError::UnexpectedToken(value), + ), + }), + None => self.errors.push(ParseError { + kind: ErrorKind::PreprocessorError( + meta, + PreprocessorError::UnexpectedNewLine, + ), + }), + }; + + match tokens.next() { + Some(PPToken { + value: PPTokenValue::Ident(name), + location, + }) => match name.as_str() { + "core" => self.meta.profile = Profile::Core, + _ => self.errors.push(ParseError { + kind: ErrorKind::InvalidProfile(location.into(), name), + }), + }, + Some(PPToken { value, location }) => self.errors.push(ParseError { + kind: ErrorKind::PreprocessorError( + location.into(), + PreprocessorError::UnexpectedToken(value), + ), + }), + None => {} + }; + + if let Some(PPToken { value, location }) = tokens.next() { + self.errors.push(ParseError { + kind: ErrorKind::PreprocessorError( + location.into(), + PreprocessorError::UnexpectedToken(value), + ), + }) + } + } + DirectiveKind::Extension => { + // TODO: Proper extension handling + // - Checking for extension support in the compiler + // - Handle behaviors such as warn + // - Handle the all extension + let name = match tokens.next() { + Some(PPToken { + value: PPTokenValue::Ident(name), + .. + }) => Some(name), + Some(PPToken { value, location }) => { + self.errors.push(ParseError { + kind: ErrorKind::PreprocessorError( + location.into(), + PreprocessorError::UnexpectedToken(value), + ), + }); + + None + } + None => { + self.errors.push(ParseError { + kind: ErrorKind::PreprocessorError( + meta, + PreprocessorError::UnexpectedNewLine, + ), + }); + + None + } + }; + + match tokens.next() { + Some(PPToken { + value: PPTokenValue::Punct(pp_rs::token::Punct::Colon), + .. + }) => {} + Some(PPToken { value, location }) => self.errors.push(ParseError { + kind: ErrorKind::PreprocessorError( + location.into(), + PreprocessorError::UnexpectedToken(value), + ), + }), + None => self.errors.push(ParseError { + kind: ErrorKind::PreprocessorError( + meta, + PreprocessorError::UnexpectedNewLine, + ), + }), + }; + + match tokens.next() { + Some(PPToken { + value: PPTokenValue::Ident(behavior), + location, + }) => match behavior.as_str() { + "require" | "enable" | "warn" | "disable" => { + if let Some(name) = name { + self.meta.extensions.insert(name); + } + } + _ => self.errors.push(ParseError { + kind: ErrorKind::PreprocessorError( + location.into(), + PreprocessorError::UnexpectedToken(PPTokenValue::Ident(behavior)), + ), + }), + }, + Some(PPToken { value, location }) => self.errors.push(ParseError { + kind: ErrorKind::PreprocessorError( + location.into(), + PreprocessorError::UnexpectedToken(value), + ), + }), + None => self.errors.push(ParseError { + kind: ErrorKind::PreprocessorError( + meta, + PreprocessorError::UnexpectedNewLine, + ), + }), + } + + if let Some(PPToken { value, location }) = tokens.next() { + self.errors.push(ParseError { + kind: ErrorKind::PreprocessorError( + location.into(), + PreprocessorError::UnexpectedToken(value), + ), + }) + } + } + DirectiveKind::Pragma => { + // TODO: handle some common pragmas? + } + } + } +} + pub struct DeclarationContext<'ctx> { qualifiers: Vec<(TypeQualifier, SourceMetadata)>, external: bool, diff --git a/src/front/glsl/parser_tests.rs b/src/front/glsl/parser_tests.rs index aec3824805..282f0c0424 100644 --- a/src/front/glsl/parser_tests.rs +++ b/src/front/glsl/parser_tests.rs @@ -1,19 +1,19 @@ -use pp_rs::token::PreprocessorError; - use crate::{ front::glsl::{ - ast::Profile, error::ErrorKind, error::ExpectedToken, token::TokenValue, Options, Parser, - SourceMetadata, Token, + ast::Profile, + error::ExpectedToken, + error::{ErrorKind, ParseError}, + token::TokenValue, + Options, Parser, SourceMetadata, Token, }, Module, ShaderStage, }; +use pp_rs::token::PreprocessorError; -fn parse(parser: &mut Parser, source: &str, stage: ShaderStage) -> Result { +fn parse(parser: &mut Parser, source: &str, stage: ShaderStage) -> Result> { let defines = crate::FastHashMap::default(); - parser - .parse(&Options { stage, defines }, source) - .map_err(|e| e.kind) + parser.parse(&Options { stage, defines }, source) } #[test] @@ -22,41 +22,69 @@ fn version() { // invalid versions assert_eq!( - parse(&mut parser, "#version 99000", ShaderStage::Vertex) - .err() - .unwrap(), - ErrorKind::InvalidVersion(SourceMetadata { start: 9, end: 14 }, 99000), - ); - - assert_eq!( - parse(&mut parser, "#version 449", ShaderStage::Vertex) - .err() - .unwrap(), - ErrorKind::InvalidVersion(SourceMetadata { start: 9, end: 12 }, 449) - ); - - assert_eq!( - parse(&mut parser, "#version 450 smart", ShaderStage::Vertex) - .err() - .unwrap(), - ErrorKind::InvalidProfile(SourceMetadata { start: 13, end: 18 }, "smart".into()) + parse( + &mut parser, + "#version 99000\n void main(){}", + ShaderStage::Vertex + ) + .err() + .unwrap(), + vec![ParseError { + kind: ErrorKind::InvalidVersion(SourceMetadata { start: 9, end: 14 }, 99000) + }], ); assert_eq!( parse( &mut parser, - "#version 450\nvoid f(){} #version 450", + "#version 449\n void main(){}", ShaderStage::Vertex ) .err() .unwrap(), - ErrorKind::InvalidToken( - Token { - value: TokenValue::Unknown(PreprocessorError::UnexpectedHash), - meta: SourceMetadata { start: 24, end: 25 } - }, - vec![ExpectedToken::Eof] + vec![ParseError { + kind: ErrorKind::InvalidVersion(SourceMetadata { start: 9, end: 12 }, 449) + }] + ); + + assert_eq!( + parse( + &mut parser, + "#version 450 smart\n void main(){}", + ShaderStage::Vertex ) + .err() + .unwrap(), + vec![ParseError { + kind: ErrorKind::InvalidProfile(SourceMetadata { start: 13, end: 18 }, "smart".into()) + }] + ); + + assert_eq!( + parse( + &mut parser, + "#version 450\nvoid main(){} #version 450", + ShaderStage::Vertex + ) + .err() + .unwrap(), + vec![ + ParseError { + kind: ErrorKind::PreprocessorError( + SourceMetadata { start: 27, end: 28 }, + PreprocessorError::UnexpectedHash, + ) + }, + ParseError { + kind: ErrorKind::InvalidToken( + Token { + value: TokenValue::Identifier("version".into()), + meta: SourceMetadata { start: 28, end: 35 } + }, + vec![ExpectedToken::Eof] + ) + } + ] ); // valid versions @@ -427,13 +455,15 @@ fn functions() { ) .err() .unwrap(), - ErrorKind::SemanticError( - SourceMetadata { - start: 134, - end: 152 - }, - "Function already defined".into() - ) + vec![ParseError { + kind: ErrorKind::SemanticError( + SourceMetadata { + start: 134, + end: 152 + }, + "Function already defined".into() + ) + }] ); println!(); @@ -586,13 +616,15 @@ fn implicit_conversions() { ) .err() .unwrap(), - ErrorKind::SemanticError( - SourceMetadata { - start: 156, - end: 165 - }, - "Unknown function \'test\'".into() - ) + vec![ParseError { + kind: ErrorKind::SemanticError( + SourceMetadata { + start: 156, + end: 165 + }, + "Unknown function \'test\'".into() + ) + }] ); assert_eq!( @@ -611,13 +643,15 @@ fn implicit_conversions() { ) .err() .unwrap(), - ErrorKind::SemanticError( - SourceMetadata { - start: 158, - end: 165 - }, - "Ambiguous best function for \'test\'".into() - ) + vec![ParseError { + kind: ErrorKind::SemanticError( + SourceMetadata { + start: 158, + end: 165 + }, + "Ambiguous best function for \'test\'".into() + ) + }] ); } diff --git a/src/front/glsl/token.rs b/src/front/glsl/token.rs index 616c7d2d3e..bcff732f93 100644 --- a/src/front/glsl/token.rs +++ b/src/front/glsl/token.rs @@ -1,4 +1,5 @@ -pub use pp_rs::token::{Float, Integer, PreprocessorError}; +use pp_rs::token::Location; +pub use pp_rs::token::{Float, Integer, PreprocessorError, Token as PPToken}; use super::ast::Precision; use crate::{Interpolation, Sampling, Type}; @@ -23,6 +24,15 @@ impl SourceMetadata { } } +impl From for SourceMetadata { + fn from(loc: Location) -> Self { + SourceMetadata { + start: loc.start as usize, + end: loc.end as usize, + } + } +} + impl From for Range { fn from(meta: SourceMetadata) -> Self { meta.start..meta.end @@ -38,13 +48,8 @@ pub struct Token { #[derive(Debug, PartialEq)] pub enum TokenValue { - Unknown(PreprocessorError), Identifier(String), - Extension, - Version, - Pragma, - FloatConstant(Float), IntConstant(Integer), BoolConstant(bool), @@ -141,3 +146,18 @@ impl fmt::Display for Token { write!(f, "{:?}", self.value) } } + +#[derive(Debug)] +#[cfg_attr(test, derive(PartialEq))] +pub struct Directive { + pub kind: DirectiveKind, + pub tokens: Vec, +} + +#[derive(Debug)] +#[cfg_attr(test, derive(PartialEq))] +pub enum DirectiveKind { + Version { is_first_directive: bool }, + Extension, + Pragma, +}