Convert glsl module line endings to Unix

This commit is contained in:
Dzmitry Malyshau 2020-06-16 22:41:20 -04:00 committed by Dzmitry Malyshau
parent d55cd79a21
commit 8e0ae1cf06
7 changed files with 3689 additions and 3689 deletions

View File

@ -1,285 +1,285 @@
use crate::{Arena, ImageFlags, ScalarKind, Type, TypeInner, VectorSize};
use glsl::syntax::{BinaryOp, TypeSpecifierNonArray};
use spirv::Dim;
pub fn glsl_to_spirv_binary_op(op: BinaryOp) -> crate::BinaryOperator {
match op {
BinaryOp::Or => crate::BinaryOperator::LogicalOr,
BinaryOp::Xor => todo!(),
BinaryOp::And => crate::BinaryOperator::LogicalAnd,
BinaryOp::BitOr => crate::BinaryOperator::InclusiveOr,
BinaryOp::BitXor => crate::BinaryOperator::ExclusiveOr,
BinaryOp::BitAnd => crate::BinaryOperator::And,
BinaryOp::Equal => crate::BinaryOperator::Equal,
BinaryOp::NonEqual => crate::BinaryOperator::NotEqual,
BinaryOp::LT => crate::BinaryOperator::Less,
BinaryOp::GT => crate::BinaryOperator::Greater,
BinaryOp::LTE => crate::BinaryOperator::LessEqual,
BinaryOp::GTE => crate::BinaryOperator::GreaterEqual,
BinaryOp::LShift => crate::BinaryOperator::ShiftLeftLogical,
BinaryOp::RShift => crate::BinaryOperator::ShiftRightArithmetic,
BinaryOp::Add => crate::BinaryOperator::Add,
BinaryOp::Sub => crate::BinaryOperator::Subtract,
BinaryOp::Mult => crate::BinaryOperator::Multiply,
BinaryOp::Div => crate::BinaryOperator::Divide,
BinaryOp::Mod => crate::BinaryOperator::Modulo,
}
}
pub fn glsl_to_spirv_type(ty: TypeSpecifierNonArray, types: &mut Arena<Type>) -> Option<TypeInner> {
use TypeSpecifierNonArray::*;
Some(match ty {
Void => return None,
Bool => TypeInner::Scalar {
kind: ScalarKind::Bool,
width: 1,
},
Int => TypeInner::Scalar {
kind: ScalarKind::Sint,
width: 32,
},
UInt => TypeInner::Scalar {
kind: ScalarKind::Uint,
width: 32,
},
Float => TypeInner::Scalar {
kind: ScalarKind::Float,
width: 32,
},
Double => TypeInner::Scalar {
kind: ScalarKind::Float,
width: 64,
},
Vec2 => TypeInner::Vector {
size: VectorSize::Bi,
kind: ScalarKind::Float,
width: 32,
},
Vec3 => TypeInner::Vector {
size: VectorSize::Tri,
kind: ScalarKind::Float,
width: 32,
},
Vec4 => TypeInner::Vector {
size: VectorSize::Quad,
kind: ScalarKind::Float,
width: 32,
},
DVec2 => TypeInner::Vector {
size: VectorSize::Bi,
kind: ScalarKind::Float,
width: 64,
},
DVec3 => TypeInner::Vector {
size: VectorSize::Tri,
kind: ScalarKind::Float,
width: 64,
},
DVec4 => TypeInner::Vector {
size: VectorSize::Quad,
kind: ScalarKind::Float,
width: 64,
},
BVec2 => TypeInner::Vector {
size: VectorSize::Bi,
kind: ScalarKind::Bool,
width: 1,
},
BVec3 => TypeInner::Vector {
size: VectorSize::Tri,
kind: ScalarKind::Bool,
width: 1,
},
BVec4 => TypeInner::Vector {
size: VectorSize::Quad,
kind: ScalarKind::Bool,
width: 1,
},
IVec2 => TypeInner::Vector {
size: VectorSize::Bi,
kind: ScalarKind::Sint,
width: 32,
},
IVec3 => TypeInner::Vector {
size: VectorSize::Tri,
kind: ScalarKind::Sint,
width: 32,
},
IVec4 => TypeInner::Vector {
size: VectorSize::Quad,
kind: ScalarKind::Sint,
width: 32,
},
UVec2 => TypeInner::Vector {
size: VectorSize::Bi,
kind: ScalarKind::Uint,
width: 32,
},
UVec3 => TypeInner::Vector {
size: VectorSize::Tri,
kind: ScalarKind::Uint,
width: 32,
},
UVec4 => TypeInner::Vector {
size: VectorSize::Quad,
kind: ScalarKind::Uint,
width: 32,
},
// Float Matrices
Mat2 => TypeInner::Matrix {
columns: VectorSize::Bi,
rows: VectorSize::Bi,
kind: ScalarKind::Float,
width: 32,
},
Mat3 => TypeInner::Matrix {
columns: VectorSize::Tri,
rows: VectorSize::Tri,
kind: ScalarKind::Float,
width: 32,
},
Mat4 => TypeInner::Matrix {
columns: VectorSize::Quad,
rows: VectorSize::Quad,
kind: ScalarKind::Float,
width: 32,
},
Mat23 => TypeInner::Matrix {
columns: VectorSize::Bi,
rows: VectorSize::Tri,
kind: ScalarKind::Float,
width: 32,
},
Mat24 => TypeInner::Matrix {
columns: VectorSize::Bi,
rows: VectorSize::Quad,
kind: ScalarKind::Float,
width: 32,
},
Mat32 => TypeInner::Matrix {
columns: VectorSize::Tri,
rows: VectorSize::Bi,
kind: ScalarKind::Float,
width: 32,
},
Mat34 => TypeInner::Matrix {
columns: VectorSize::Tri,
rows: VectorSize::Quad,
kind: ScalarKind::Float,
width: 32,
},
Mat42 => TypeInner::Matrix {
columns: VectorSize::Quad,
rows: VectorSize::Bi,
kind: ScalarKind::Float,
width: 32,
},
Mat43 => TypeInner::Matrix {
columns: VectorSize::Quad,
rows: VectorSize::Tri,
kind: ScalarKind::Float,
width: 32,
},
// Double Matrices
DMat2 => TypeInner::Matrix {
columns: VectorSize::Bi,
rows: VectorSize::Bi,
kind: ScalarKind::Float,
width: 64,
},
DMat3 => TypeInner::Matrix {
columns: VectorSize::Tri,
rows: VectorSize::Tri,
kind: ScalarKind::Float,
width: 64,
},
DMat4 => TypeInner::Matrix {
columns: VectorSize::Quad,
rows: VectorSize::Quad,
kind: ScalarKind::Float,
width: 64,
},
DMat23 => TypeInner::Matrix {
columns: VectorSize::Bi,
rows: VectorSize::Tri,
kind: ScalarKind::Float,
width: 64,
},
DMat24 => TypeInner::Matrix {
columns: VectorSize::Bi,
rows: VectorSize::Quad,
kind: ScalarKind::Float,
width: 64,
},
DMat32 => TypeInner::Matrix {
columns: VectorSize::Tri,
rows: VectorSize::Bi,
kind: ScalarKind::Float,
width: 64,
},
DMat34 => TypeInner::Matrix {
columns: VectorSize::Tri,
rows: VectorSize::Quad,
kind: ScalarKind::Float,
width: 64,
},
DMat42 => TypeInner::Matrix {
columns: VectorSize::Quad,
rows: VectorSize::Bi,
kind: ScalarKind::Float,
width: 64,
},
DMat43 => TypeInner::Matrix {
columns: VectorSize::Quad,
rows: VectorSize::Tri,
kind: ScalarKind::Float,
width: 64,
},
TypeName(ty_name) => {
if let Some(t_pos) = ty_name.0.find("texture") {
let scalar_kind = match &ty_name.0[..t_pos] {
"" => ScalarKind::Float,
"i" => ScalarKind::Sint,
"u" => ScalarKind::Uint,
_ => panic!(),
};
let base = types.fetch_or_append(Type {
name: None,
inner: TypeInner::Scalar {
kind: scalar_kind,
width: 32,
},
});
let (dim, flags) = match &ty_name.0[(t_pos + 7)..] {
"1D" => (Dim::Dim1D, ImageFlags::SAMPLED),
"2D" => (Dim::Dim2D, ImageFlags::SAMPLED),
"3D" => (Dim::Dim3D, ImageFlags::SAMPLED),
"1DArray" => (Dim::Dim1D, ImageFlags::SAMPLED | ImageFlags::ARRAYED),
"2DArray" => (Dim::Dim2D, ImageFlags::SAMPLED | ImageFlags::ARRAYED),
"3DArray" => (Dim::Dim3D, ImageFlags::SAMPLED | ImageFlags::ARRAYED),
"2DMS" => (Dim::Dim2D, ImageFlags::SAMPLED | ImageFlags::MULTISAMPLED),
"2DMSArray" => (
Dim::Dim2D,
ImageFlags::SAMPLED | ImageFlags::ARRAYED | ImageFlags::MULTISAMPLED,
),
"2DRect" => (Dim::DimRect, ImageFlags::SAMPLED),
"Cube" => (Dim::DimCube, ImageFlags::SAMPLED),
"CubeArray" => (Dim::DimCube, ImageFlags::SAMPLED | ImageFlags::ARRAYED),
"Buffer" => (Dim::DimBuffer, ImageFlags::SAMPLED),
_ => panic!(),
};
return Some(TypeInner::Image { base, dim, flags });
}
match ty_name.0.as_str() {
"sampler" => TypeInner::Sampler { comparison: false },
"samplerShadow" => TypeInner::Sampler { comparison: true },
_ => unimplemented!(),
}
}
_ => unimplemented!(),
})
}
use crate::{Arena, ImageFlags, ScalarKind, Type, TypeInner, VectorSize};
use glsl::syntax::{BinaryOp, TypeSpecifierNonArray};
use spirv::Dim;
pub fn glsl_to_spirv_binary_op(op: BinaryOp) -> crate::BinaryOperator {
match op {
BinaryOp::Or => crate::BinaryOperator::LogicalOr,
BinaryOp::Xor => todo!(),
BinaryOp::And => crate::BinaryOperator::LogicalAnd,
BinaryOp::BitOr => crate::BinaryOperator::InclusiveOr,
BinaryOp::BitXor => crate::BinaryOperator::ExclusiveOr,
BinaryOp::BitAnd => crate::BinaryOperator::And,
BinaryOp::Equal => crate::BinaryOperator::Equal,
BinaryOp::NonEqual => crate::BinaryOperator::NotEqual,
BinaryOp::LT => crate::BinaryOperator::Less,
BinaryOp::GT => crate::BinaryOperator::Greater,
BinaryOp::LTE => crate::BinaryOperator::LessEqual,
BinaryOp::GTE => crate::BinaryOperator::GreaterEqual,
BinaryOp::LShift => crate::BinaryOperator::ShiftLeftLogical,
BinaryOp::RShift => crate::BinaryOperator::ShiftRightArithmetic,
BinaryOp::Add => crate::BinaryOperator::Add,
BinaryOp::Sub => crate::BinaryOperator::Subtract,
BinaryOp::Mult => crate::BinaryOperator::Multiply,
BinaryOp::Div => crate::BinaryOperator::Divide,
BinaryOp::Mod => crate::BinaryOperator::Modulo,
}
}
pub fn glsl_to_spirv_type(ty: TypeSpecifierNonArray, types: &mut Arena<Type>) -> Option<TypeInner> {
use TypeSpecifierNonArray::*;
Some(match ty {
Void => return None,
Bool => TypeInner::Scalar {
kind: ScalarKind::Bool,
width: 1,
},
Int => TypeInner::Scalar {
kind: ScalarKind::Sint,
width: 32,
},
UInt => TypeInner::Scalar {
kind: ScalarKind::Uint,
width: 32,
},
Float => TypeInner::Scalar {
kind: ScalarKind::Float,
width: 32,
},
Double => TypeInner::Scalar {
kind: ScalarKind::Float,
width: 64,
},
Vec2 => TypeInner::Vector {
size: VectorSize::Bi,
kind: ScalarKind::Float,
width: 32,
},
Vec3 => TypeInner::Vector {
size: VectorSize::Tri,
kind: ScalarKind::Float,
width: 32,
},
Vec4 => TypeInner::Vector {
size: VectorSize::Quad,
kind: ScalarKind::Float,
width: 32,
},
DVec2 => TypeInner::Vector {
size: VectorSize::Bi,
kind: ScalarKind::Float,
width: 64,
},
DVec3 => TypeInner::Vector {
size: VectorSize::Tri,
kind: ScalarKind::Float,
width: 64,
},
DVec4 => TypeInner::Vector {
size: VectorSize::Quad,
kind: ScalarKind::Float,
width: 64,
},
BVec2 => TypeInner::Vector {
size: VectorSize::Bi,
kind: ScalarKind::Bool,
width: 1,
},
BVec3 => TypeInner::Vector {
size: VectorSize::Tri,
kind: ScalarKind::Bool,
width: 1,
},
BVec4 => TypeInner::Vector {
size: VectorSize::Quad,
kind: ScalarKind::Bool,
width: 1,
},
IVec2 => TypeInner::Vector {
size: VectorSize::Bi,
kind: ScalarKind::Sint,
width: 32,
},
IVec3 => TypeInner::Vector {
size: VectorSize::Tri,
kind: ScalarKind::Sint,
width: 32,
},
IVec4 => TypeInner::Vector {
size: VectorSize::Quad,
kind: ScalarKind::Sint,
width: 32,
},
UVec2 => TypeInner::Vector {
size: VectorSize::Bi,
kind: ScalarKind::Uint,
width: 32,
},
UVec3 => TypeInner::Vector {
size: VectorSize::Tri,
kind: ScalarKind::Uint,
width: 32,
},
UVec4 => TypeInner::Vector {
size: VectorSize::Quad,
kind: ScalarKind::Uint,
width: 32,
},
// Float Matrices
Mat2 => TypeInner::Matrix {
columns: VectorSize::Bi,
rows: VectorSize::Bi,
kind: ScalarKind::Float,
width: 32,
},
Mat3 => TypeInner::Matrix {
columns: VectorSize::Tri,
rows: VectorSize::Tri,
kind: ScalarKind::Float,
width: 32,
},
Mat4 => TypeInner::Matrix {
columns: VectorSize::Quad,
rows: VectorSize::Quad,
kind: ScalarKind::Float,
width: 32,
},
Mat23 => TypeInner::Matrix {
columns: VectorSize::Bi,
rows: VectorSize::Tri,
kind: ScalarKind::Float,
width: 32,
},
Mat24 => TypeInner::Matrix {
columns: VectorSize::Bi,
rows: VectorSize::Quad,
kind: ScalarKind::Float,
width: 32,
},
Mat32 => TypeInner::Matrix {
columns: VectorSize::Tri,
rows: VectorSize::Bi,
kind: ScalarKind::Float,
width: 32,
},
Mat34 => TypeInner::Matrix {
columns: VectorSize::Tri,
rows: VectorSize::Quad,
kind: ScalarKind::Float,
width: 32,
},
Mat42 => TypeInner::Matrix {
columns: VectorSize::Quad,
rows: VectorSize::Bi,
kind: ScalarKind::Float,
width: 32,
},
Mat43 => TypeInner::Matrix {
columns: VectorSize::Quad,
rows: VectorSize::Tri,
kind: ScalarKind::Float,
width: 32,
},
// Double Matrices
DMat2 => TypeInner::Matrix {
columns: VectorSize::Bi,
rows: VectorSize::Bi,
kind: ScalarKind::Float,
width: 64,
},
DMat3 => TypeInner::Matrix {
columns: VectorSize::Tri,
rows: VectorSize::Tri,
kind: ScalarKind::Float,
width: 64,
},
DMat4 => TypeInner::Matrix {
columns: VectorSize::Quad,
rows: VectorSize::Quad,
kind: ScalarKind::Float,
width: 64,
},
DMat23 => TypeInner::Matrix {
columns: VectorSize::Bi,
rows: VectorSize::Tri,
kind: ScalarKind::Float,
width: 64,
},
DMat24 => TypeInner::Matrix {
columns: VectorSize::Bi,
rows: VectorSize::Quad,
kind: ScalarKind::Float,
width: 64,
},
DMat32 => TypeInner::Matrix {
columns: VectorSize::Tri,
rows: VectorSize::Bi,
kind: ScalarKind::Float,
width: 64,
},
DMat34 => TypeInner::Matrix {
columns: VectorSize::Tri,
rows: VectorSize::Quad,
kind: ScalarKind::Float,
width: 64,
},
DMat42 => TypeInner::Matrix {
columns: VectorSize::Quad,
rows: VectorSize::Bi,
kind: ScalarKind::Float,
width: 64,
},
DMat43 => TypeInner::Matrix {
columns: VectorSize::Quad,
rows: VectorSize::Tri,
kind: ScalarKind::Float,
width: 64,
},
TypeName(ty_name) => {
if let Some(t_pos) = ty_name.0.find("texture") {
let scalar_kind = match &ty_name.0[..t_pos] {
"" => ScalarKind::Float,
"i" => ScalarKind::Sint,
"u" => ScalarKind::Uint,
_ => panic!(),
};
let base = types.fetch_or_append(Type {
name: None,
inner: TypeInner::Scalar {
kind: scalar_kind,
width: 32,
},
});
let (dim, flags) = match &ty_name.0[(t_pos + 7)..] {
"1D" => (Dim::Dim1D, ImageFlags::SAMPLED),
"2D" => (Dim::Dim2D, ImageFlags::SAMPLED),
"3D" => (Dim::Dim3D, ImageFlags::SAMPLED),
"1DArray" => (Dim::Dim1D, ImageFlags::SAMPLED | ImageFlags::ARRAYED),
"2DArray" => (Dim::Dim2D, ImageFlags::SAMPLED | ImageFlags::ARRAYED),
"3DArray" => (Dim::Dim3D, ImageFlags::SAMPLED | ImageFlags::ARRAYED),
"2DMS" => (Dim::Dim2D, ImageFlags::SAMPLED | ImageFlags::MULTISAMPLED),
"2DMSArray" => (
Dim::Dim2D,
ImageFlags::SAMPLED | ImageFlags::ARRAYED | ImageFlags::MULTISAMPLED,
),
"2DRect" => (Dim::DimRect, ImageFlags::SAMPLED),
"Cube" => (Dim::DimCube, ImageFlags::SAMPLED),
"CubeArray" => (Dim::DimCube, ImageFlags::SAMPLED | ImageFlags::ARRAYED),
"Buffer" => (Dim::DimBuffer, ImageFlags::SAMPLED),
_ => panic!(),
};
return Some(TypeInner::Image { base, dim, flags });
}
match ty_name.0.as_str() {
"sampler" => TypeInner::Sampler { comparison: false },
"samplerShadow" => TypeInner::Sampler { comparison: true },
_ => unimplemented!(),
}
}
_ => unimplemented!(),
})
}

File diff suppressed because it is too large Load Diff

View File

@ -1,246 +1,246 @@
#![allow(clippy::all)]
#![allow(dead_code)]
use super::{Error, ErrorKind};
use crate::FastHashMap;
use std::{
fmt,
iter::Peekable,
ops::{Deref, Range},
vec::IntoIter,
};
pub mod lex;
#[cfg(feature = "glsl_preprocessor")]
#[path = "./preprocessor.rs"]
pub mod preprocessor;
type Tokens = Peekable<IntoIter<TokenMetadata>>;
#[derive(Debug, Clone)]
pub struct TokenMetadata {
pub token: Token,
pub line: usize,
pub chars: Range<usize>,
}
impl Deref for TokenMetadata {
type Target = Token;
fn deref(&self) -> &Token {
&self.token
}
}
#[derive(Debug, PartialEq, Clone)]
pub enum Token {
Separator(char),
DoubleColon,
Paren(char),
Integral(usize),
Float(f32),
Double(f64),
Word(String),
Operation(char),
OpAssign(char),
LogicalOperation(char),
ShiftOperation(char),
Unknown(char),
LineComment,
MultiLineCommentOpen,
MultiLineCommentClose,
Preprocessor,
End,
Selection,
Sufix(char),
TokenPasting,
}
impl Token {
pub fn type_to_string(&self) -> String {
match self {
Token::Separator(separator) => separator.to_string(),
Token::DoubleColon => ":".to_string(),
Token::Paren(paren) => paren.to_string(),
Token::Integral(_) => "integer".to_string(),
Token::Float(_) => "float".to_string(),
Token::Double(_) => "double".to_string(),
Token::Word(_) => "word".to_string(),
Token::Operation(op) => op.to_string(),
Token::OpAssign(op) => format!("{}=", op),
Token::LogicalOperation(op) => format!("{}=", op),
Token::ShiftOperation(op) => format!("{0}{0}", op),
Token::Unknown(_) => "unknown".to_string(),
Token::LineComment => "//".to_string(),
Token::MultiLineCommentOpen => "/*".to_string(),
Token::MultiLineCommentClose => "*/".to_string(),
Token::Preprocessor => "#".to_string(),
Token::End => "EOF".to_string(),
Token::Selection => "?".to_string(),
Token::Sufix(op) => format!("{0}{0}", op),
Token::TokenPasting => "##".to_string(),
}
}
}
impl fmt::Display for Token {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Token::Separator(sep) => write!(f, "{}", sep),
Token::DoubleColon => write!(f, ":"),
Token::Paren(paren) => write!(f, "{}", paren),
Token::Integral(int) => write!(f, "{}", int),
Token::Float(float) => write!(f, "{}", float),
Token::Double(double) => write!(f, "{}", double),
Token::Word(word) => write!(f, "{}", word),
Token::Operation(op) => write!(f, "{}", op),
Token::OpAssign(op) => write!(f, "{}=", op),
Token::LogicalOperation(op) => write!(f, "{0}=", op),
Token::ShiftOperation(op) => write!(f, "{0}{0}", op),
Token::Unknown(unknown) => write!(f, "{}", unknown),
Token::LineComment => write!(f, "//"),
Token::MultiLineCommentOpen => write!(f, "/*"),
Token::MultiLineCommentClose => write!(f, "*/"),
Token::Preprocessor => write!(f, "#"),
Token::End => write!(f, ""),
Token::Selection => write!(f, "?"),
Token::Sufix(op) => write!(f, "{0}{0}", op),
Token::TokenPasting => write!(f, "##"),
}
}
}
#[derive(Debug)]
pub enum Node {
Ident(String),
Const(Literal),
}
#[derive(Debug, Copy, Clone)]
pub enum Literal {
Double(f64),
Float(f32),
Uint(usize),
Sint(isize),
Bool(bool),
}
fn parse_primary_expression(tokens: &mut Tokens) -> Result<Node, Error> {
let token = tokens.next().ok_or(Error {
kind: ErrorKind::EOF,
})?;
match token.token {
Token::Word(ident) => Ok(match ident.as_str() {
"true" => Node::Const(Literal::Bool(true)),
"false" => Node::Const(Literal::Bool(false)),
_ => Node::Ident(ident),
}),
Token::Integral(uint) => Ok(Node::Const(Literal::Uint(uint))),
Token::Float(float) => Ok(Node::Const(Literal::Float(float))),
Token::Double(double) => Ok(Node::Const(Literal::Double(double))),
Token::Paren('(') => todo!(), /* parse_expression */
_ => Err(Error {
kind: ErrorKind::UnexpectedToken {
expected: vec![
Token::Word(String::new()),
Token::Integral(0),
Token::Double(0.0),
Token::Float(0.0),
Token::Paren('('),
],
got: token,
},
}),
}
}
pub(self) fn parse_comments(mut lexer: lex::Lexer) -> Result<Vec<TokenMetadata>, Error> {
let mut tokens = Vec::new();
loop {
let token = lexer.next();
match token.token {
Token::MultiLineCommentOpen => {
let mut token = lexer.next();
while Token::MultiLineCommentClose != token.token {
match token.token {
Token::End => {
return Err(Error {
kind: ErrorKind::EOF,
})
}
_ => {}
}
token = lexer.next();
}
}
Token::LineComment => {
while token.line == lexer.peek().line && Token::End != lexer.peek().token {
let _ = lexer.next();
}
}
Token::End => {
tokens.push(token);
break;
}
_ => tokens.push(token),
}
}
Ok(tokens)
}
pub fn parse(input: &str) -> Result<String, Error> {
let lexer = lex::Lexer::new(input);
let tokens = parse_comments(lexer)?;
let mut macros = FastHashMap::default();
macros.insert(
String::from("GL_SPIRV"),
vec![TokenMetadata {
token: Token::Integral(100),
line: 0,
chars: 0..1,
}],
);
macros.insert(
String::from("VULKAN"),
vec![TokenMetadata {
token: Token::Integral(100),
line: 0,
chars: 0..1,
}],
);
log::trace!("------GLSL COMMENT STRIPPED------");
log::trace!("\n{:#?}", tokens);
log::trace!("---------------------------------");
#[cfg(feature = "glsl_preprocessor")]
let tokens = preprocessor::preprocess(&mut tokens.into_iter().peekable(), &mut macros)?;
let mut line = 0;
let mut start = 0;
Ok(tokens.into_iter().fold(String::new(), |mut acc, token| {
if token.line - line != 0 {
acc.push_str(&"\n".repeat(token.line - line));
start = 0;
line = token.line;
}
acc.push_str(&" ".repeat(token.chars.start - start));
acc.push_str(&token.token.to_string());
start = token.chars.end;
acc
}))
}
#![allow(clippy::all)]
#![allow(dead_code)]
use super::{Error, ErrorKind};
use crate::FastHashMap;
use std::{
fmt,
iter::Peekable,
ops::{Deref, Range},
vec::IntoIter,
};
pub mod lex;
#[cfg(feature = "glsl_preprocessor")]
#[path = "./preprocessor.rs"]
pub mod preprocessor;
type Tokens = Peekable<IntoIter<TokenMetadata>>;
#[derive(Debug, Clone)]
pub struct TokenMetadata {
pub token: Token,
pub line: usize,
pub chars: Range<usize>,
}
impl Deref for TokenMetadata {
type Target = Token;
fn deref(&self) -> &Token {
&self.token
}
}
#[derive(Debug, PartialEq, Clone)]
pub enum Token {
Separator(char),
DoubleColon,
Paren(char),
Integral(usize),
Float(f32),
Double(f64),
Word(String),
Operation(char),
OpAssign(char),
LogicalOperation(char),
ShiftOperation(char),
Unknown(char),
LineComment,
MultiLineCommentOpen,
MultiLineCommentClose,
Preprocessor,
End,
Selection,
Sufix(char),
TokenPasting,
}
impl Token {
pub fn type_to_string(&self) -> String {
match self {
Token::Separator(separator) => separator.to_string(),
Token::DoubleColon => ":".to_string(),
Token::Paren(paren) => paren.to_string(),
Token::Integral(_) => "integer".to_string(),
Token::Float(_) => "float".to_string(),
Token::Double(_) => "double".to_string(),
Token::Word(_) => "word".to_string(),
Token::Operation(op) => op.to_string(),
Token::OpAssign(op) => format!("{}=", op),
Token::LogicalOperation(op) => format!("{}=", op),
Token::ShiftOperation(op) => format!("{0}{0}", op),
Token::Unknown(_) => "unknown".to_string(),
Token::LineComment => "//".to_string(),
Token::MultiLineCommentOpen => "/*".to_string(),
Token::MultiLineCommentClose => "*/".to_string(),
Token::Preprocessor => "#".to_string(),
Token::End => "EOF".to_string(),
Token::Selection => "?".to_string(),
Token::Sufix(op) => format!("{0}{0}", op),
Token::TokenPasting => "##".to_string(),
}
}
}
impl fmt::Display for Token {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Token::Separator(sep) => write!(f, "{}", sep),
Token::DoubleColon => write!(f, ":"),
Token::Paren(paren) => write!(f, "{}", paren),
Token::Integral(int) => write!(f, "{}", int),
Token::Float(float) => write!(f, "{}", float),
Token::Double(double) => write!(f, "{}", double),
Token::Word(word) => write!(f, "{}", word),
Token::Operation(op) => write!(f, "{}", op),
Token::OpAssign(op) => write!(f, "{}=", op),
Token::LogicalOperation(op) => write!(f, "{0}=", op),
Token::ShiftOperation(op) => write!(f, "{0}{0}", op),
Token::Unknown(unknown) => write!(f, "{}", unknown),
Token::LineComment => write!(f, "//"),
Token::MultiLineCommentOpen => write!(f, "/*"),
Token::MultiLineCommentClose => write!(f, "*/"),
Token::Preprocessor => write!(f, "#"),
Token::End => write!(f, ""),
Token::Selection => write!(f, "?"),
Token::Sufix(op) => write!(f, "{0}{0}", op),
Token::TokenPasting => write!(f, "##"),
}
}
}
#[derive(Debug)]
pub enum Node {
Ident(String),
Const(Literal),
}
#[derive(Debug, Copy, Clone)]
pub enum Literal {
Double(f64),
Float(f32),
Uint(usize),
Sint(isize),
Bool(bool),
}
fn parse_primary_expression(tokens: &mut Tokens) -> Result<Node, Error> {
let token = tokens.next().ok_or(Error {
kind: ErrorKind::EOF,
})?;
match token.token {
Token::Word(ident) => Ok(match ident.as_str() {
"true" => Node::Const(Literal::Bool(true)),
"false" => Node::Const(Literal::Bool(false)),
_ => Node::Ident(ident),
}),
Token::Integral(uint) => Ok(Node::Const(Literal::Uint(uint))),
Token::Float(float) => Ok(Node::Const(Literal::Float(float))),
Token::Double(double) => Ok(Node::Const(Literal::Double(double))),
Token::Paren('(') => todo!(), /* parse_expression */
_ => Err(Error {
kind: ErrorKind::UnexpectedToken {
expected: vec![
Token::Word(String::new()),
Token::Integral(0),
Token::Double(0.0),
Token::Float(0.0),
Token::Paren('('),
],
got: token,
},
}),
}
}
pub(self) fn parse_comments(mut lexer: lex::Lexer) -> Result<Vec<TokenMetadata>, Error> {
let mut tokens = Vec::new();
loop {
let token = lexer.next();
match token.token {
Token::MultiLineCommentOpen => {
let mut token = lexer.next();
while Token::MultiLineCommentClose != token.token {
match token.token {
Token::End => {
return Err(Error {
kind: ErrorKind::EOF,
})
}
_ => {}
}
token = lexer.next();
}
}
Token::LineComment => {
while token.line == lexer.peek().line && Token::End != lexer.peek().token {
let _ = lexer.next();
}
}
Token::End => {
tokens.push(token);
break;
}
_ => tokens.push(token),
}
}
Ok(tokens)
}
pub fn parse(input: &str) -> Result<String, Error> {
let lexer = lex::Lexer::new(input);
let tokens = parse_comments(lexer)?;
let mut macros = FastHashMap::default();
macros.insert(
String::from("GL_SPIRV"),
vec![TokenMetadata {
token: Token::Integral(100),
line: 0,
chars: 0..1,
}],
);
macros.insert(
String::from("VULKAN"),
vec![TokenMetadata {
token: Token::Integral(100),
line: 0,
chars: 0..1,
}],
);
log::trace!("------GLSL COMMENT STRIPPED------");
log::trace!("\n{:#?}", tokens);
log::trace!("---------------------------------");
#[cfg(feature = "glsl_preprocessor")]
let tokens = preprocessor::preprocess(&mut tokens.into_iter().peekable(), &mut macros)?;
let mut line = 0;
let mut start = 0;
Ok(tokens.into_iter().fold(String::new(), |mut acc, token| {
if token.line - line != 0 {
acc.push_str(&"\n".repeat(token.line - line));
start = 0;
line = token.line;
}
acc.push_str(&" ".repeat(token.chars.start - start));
acc.push_str(&token.token.to_string());
start = token.chars.end;
acc
}))
}

View File

@ -1,254 +1,254 @@
use super::{Token, TokenMetadata};
use std::{iter::Enumerate, str::Lines};
fn _consume_str<'a>(input: &'a str, what: &str) -> Option<&'a str> {
if input.starts_with(what) {
Some(&input[what.len()..])
} else {
None
}
}
fn consume_any(input: &str, what: impl Fn(char) -> bool) -> (&str, &str, usize) {
let pos = input.find(|c| !what(c)).unwrap_or_else(|| input.len());
let (o, i) = input.split_at(pos);
(o, i, pos)
}
pub fn consume_token(input: &String) -> (Token, &str, usize, usize) {
let mut input = input.as_str();
let start = input
.find(|c: char| !c.is_whitespace())
.unwrap_or(input.chars().count());
input = &input[start..];
let mut chars = input.chars();
let cur = match chars.next() {
Some(c) => c,
None => return (Token::End, input, start, start + 1),
};
match cur {
':' => {
input = chars.as_str();
if chars.next() == Some(':') {
(Token::DoubleColon, chars.as_str(), start, start + 2)
} else {
(Token::Separator(cur), input, start, start + 1)
}
}
';' | ',' | '.' => (Token::Separator(cur), chars.as_str(), start, start + 1),
'(' | ')' | '{' | '}' | '[' | ']' => (Token::Paren(cur), chars.as_str(), start, start + 1),
'<' | '>' => {
input = chars.as_str();
let next = chars.next();
if next == Some('=') {
(
Token::LogicalOperation(cur),
chars.as_str(),
start,
start + 1,
)
} else if next == Some(cur) {
(Token::ShiftOperation(cur), chars.as_str(), start, start + 2)
} else {
(Token::Operation(cur), input, start, start + 1)
}
}
'0'..='9' => {
let (number, rest, pos) = consume_any(input, |c| (c >= '0' && c <= '9' || c == '.'));
if let Some(_) = number.find('.') {
if (
chars.next().map(|c| c.to_lowercase().next().unwrap()),
chars.next().map(|c| c.to_lowercase().next().unwrap()),
) == (Some('l'), Some('f'))
{
(
Token::Double(number.parse().unwrap()),
chars.as_str(),
start,
start + pos + 2,
)
} else {
(
Token::Float(number.parse().unwrap()),
chars.as_str(),
start,
start + pos,
)
}
} else {
(
Token::Integral(number.parse().unwrap()),
rest,
start,
start + pos,
)
}
}
'a'..='z' | 'A'..='Z' | '_' => {
let (word, rest, pos) = consume_any(input, |c| c.is_alphanumeric() || c == '_');
(Token::Word(String::from(word)), rest, start, start + pos)
}
'+' | '-' => {
input = chars.as_str();
match chars.next() {
Some('=') => (Token::OpAssign(cur), chars.as_str(), start, start + 2),
Some(next) if cur == next => (Token::Sufix(cur), chars.as_str(), start, start + 2),
_ => (Token::Operation(cur), input, start, start + 1),
}
}
'%' | '^' => {
input = chars.as_str();
if chars.next() == Some('=') {
(Token::OpAssign(cur), chars.as_str(), start, start + 2)
} else {
(Token::Operation(cur), input, start, start + 1)
}
}
'!' => {
input = chars.as_str();
if chars.next() == Some('=') {
(
Token::LogicalOperation(cur),
chars.as_str(),
start,
start + 2,
)
} else {
(Token::Operation(cur), input, start, start + 1)
}
}
'*' => {
input = chars.as_str();
match chars.next() {
Some('=') => (Token::OpAssign(cur), chars.as_str(), start, start + 2),
Some('/') => (
Token::MultiLineCommentClose,
chars.as_str(),
start,
start + 2,
),
_ => (Token::Operation(cur), input, start, start + 1),
}
}
'/' => {
input = chars.as_str();
match chars.next() {
Some('=') => (Token::OpAssign(cur), chars.as_str(), start, start + 2),
Some('/') => (Token::LineComment, chars.as_str(), start, start + 2),
Some('*') => (
Token::MultiLineCommentOpen,
chars.as_str(),
start,
start + 2,
),
_ => (Token::Operation(cur), input, start, start + 1),
}
}
'=' | '&' | '|' => {
input = chars.as_str();
if chars.next() == Some(cur) {
(
Token::LogicalOperation(cur),
chars.as_str(),
start,
start + 2,
)
} else {
(Token::Operation(cur), input, start, start + 1)
}
}
'#' => {
input = chars.as_str();
if chars.next() == Some(cur) {
(Token::TokenPasting, chars.as_str(), start, start + 2)
} else {
(Token::Preprocessor, input, start, start + 1)
}
}
'~' => (Token::Operation(cur), chars.as_str(), start, start + 1),
'?' => (Token::Selection, chars.as_str(), start, start + 1),
_ => (Token::Unknown(cur), chars.as_str(), start, start + 1),
}
}
#[derive(Clone, Debug)]
pub struct Lexer<'a> {
lines: Enumerate<Lines<'a>>,
input: String,
line: usize,
offset: usize,
}
impl<'a> Lexer<'a> {
pub fn new(input: &'a str) -> Self {
let mut lines = input.lines().enumerate();
let (line, input) = lines.next().unwrap_or((0, ""));
let mut input = String::from(input);
while input.chars().last() == Some('\\') {
if let Some((_, next)) = lines.next() {
input.pop();
input.push_str(next);
} else {
break;
}
}
Lexer {
lines,
input,
line,
offset: 0,
}
}
#[must_use]
pub fn next(&mut self) -> TokenMetadata {
let (token, rest, start, end) = consume_token(&self.input);
if token == Token::End {
match self.lines.next() {
Some((line, input)) => {
let mut input = String::from(input);
while input.chars().last() == Some('\\') {
if let Some((_, next)) = self.lines.next() {
input.pop();
input.push_str(next);
} else {
break;
}
}
self.input = input;
self.line = line;
self.offset = 0;
self.next()
}
None => TokenMetadata {
token: Token::End,
line: self.line,
chars: self.offset + start..end + self.offset,
},
}
} else {
self.input = String::from(rest);
let metadata = TokenMetadata {
token,
line: self.line,
chars: self.offset + start..end + self.offset,
};
self.offset += end;
metadata
}
}
#[must_use]
pub fn peek(&mut self) -> TokenMetadata {
self.clone().next()
}
}
use super::{Token, TokenMetadata};
use std::{iter::Enumerate, str::Lines};
fn _consume_str<'a>(input: &'a str, what: &str) -> Option<&'a str> {
if input.starts_with(what) {
Some(&input[what.len()..])
} else {
None
}
}
fn consume_any(input: &str, what: impl Fn(char) -> bool) -> (&str, &str, usize) {
let pos = input.find(|c| !what(c)).unwrap_or_else(|| input.len());
let (o, i) = input.split_at(pos);
(o, i, pos)
}
pub fn consume_token(input: &String) -> (Token, &str, usize, usize) {
let mut input = input.as_str();
let start = input
.find(|c: char| !c.is_whitespace())
.unwrap_or(input.chars().count());
input = &input[start..];
let mut chars = input.chars();
let cur = match chars.next() {
Some(c) => c,
None => return (Token::End, input, start, start + 1),
};
match cur {
':' => {
input = chars.as_str();
if chars.next() == Some(':') {
(Token::DoubleColon, chars.as_str(), start, start + 2)
} else {
(Token::Separator(cur), input, start, start + 1)
}
}
';' | ',' | '.' => (Token::Separator(cur), chars.as_str(), start, start + 1),
'(' | ')' | '{' | '}' | '[' | ']' => (Token::Paren(cur), chars.as_str(), start, start + 1),
'<' | '>' => {
input = chars.as_str();
let next = chars.next();
if next == Some('=') {
(
Token::LogicalOperation(cur),
chars.as_str(),
start,
start + 1,
)
} else if next == Some(cur) {
(Token::ShiftOperation(cur), chars.as_str(), start, start + 2)
} else {
(Token::Operation(cur), input, start, start + 1)
}
}
'0'..='9' => {
let (number, rest, pos) = consume_any(input, |c| (c >= '0' && c <= '9' || c == '.'));
if let Some(_) = number.find('.') {
if (
chars.next().map(|c| c.to_lowercase().next().unwrap()),
chars.next().map(|c| c.to_lowercase().next().unwrap()),
) == (Some('l'), Some('f'))
{
(
Token::Double(number.parse().unwrap()),
chars.as_str(),
start,
start + pos + 2,
)
} else {
(
Token::Float(number.parse().unwrap()),
chars.as_str(),
start,
start + pos,
)
}
} else {
(
Token::Integral(number.parse().unwrap()),
rest,
start,
start + pos,
)
}
}
'a'..='z' | 'A'..='Z' | '_' => {
let (word, rest, pos) = consume_any(input, |c| c.is_alphanumeric() || c == '_');
(Token::Word(String::from(word)), rest, start, start + pos)
}
'+' | '-' => {
input = chars.as_str();
match chars.next() {
Some('=') => (Token::OpAssign(cur), chars.as_str(), start, start + 2),
Some(next) if cur == next => (Token::Sufix(cur), chars.as_str(), start, start + 2),
_ => (Token::Operation(cur), input, start, start + 1),
}
}
'%' | '^' => {
input = chars.as_str();
if chars.next() == Some('=') {
(Token::OpAssign(cur), chars.as_str(), start, start + 2)
} else {
(Token::Operation(cur), input, start, start + 1)
}
}
'!' => {
input = chars.as_str();
if chars.next() == Some('=') {
(
Token::LogicalOperation(cur),
chars.as_str(),
start,
start + 2,
)
} else {
(Token::Operation(cur), input, start, start + 1)
}
}
'*' => {
input = chars.as_str();
match chars.next() {
Some('=') => (Token::OpAssign(cur), chars.as_str(), start, start + 2),
Some('/') => (
Token::MultiLineCommentClose,
chars.as_str(),
start,
start + 2,
),
_ => (Token::Operation(cur), input, start, start + 1),
}
}
'/' => {
input = chars.as_str();
match chars.next() {
Some('=') => (Token::OpAssign(cur), chars.as_str(), start, start + 2),
Some('/') => (Token::LineComment, chars.as_str(), start, start + 2),
Some('*') => (
Token::MultiLineCommentOpen,
chars.as_str(),
start,
start + 2,
),
_ => (Token::Operation(cur), input, start, start + 1),
}
}
'=' | '&' | '|' => {
input = chars.as_str();
if chars.next() == Some(cur) {
(
Token::LogicalOperation(cur),
chars.as_str(),
start,
start + 2,
)
} else {
(Token::Operation(cur), input, start, start + 1)
}
}
'#' => {
input = chars.as_str();
if chars.next() == Some(cur) {
(Token::TokenPasting, chars.as_str(), start, start + 2)
} else {
(Token::Preprocessor, input, start, start + 1)
}
}
'~' => (Token::Operation(cur), chars.as_str(), start, start + 1),
'?' => (Token::Selection, chars.as_str(), start, start + 1),
_ => (Token::Unknown(cur), chars.as_str(), start, start + 1),
}
}
#[derive(Clone, Debug)]
pub struct Lexer<'a> {
lines: Enumerate<Lines<'a>>,
input: String,
line: usize,
offset: usize,
}
impl<'a> Lexer<'a> {
pub fn new(input: &'a str) -> Self {
let mut lines = input.lines().enumerate();
let (line, input) = lines.next().unwrap_or((0, ""));
let mut input = String::from(input);
while input.chars().last() == Some('\\') {
if let Some((_, next)) = lines.next() {
input.pop();
input.push_str(next);
} else {
break;
}
}
Lexer {
lines,
input,
line,
offset: 0,
}
}
#[must_use]
pub fn next(&mut self) -> TokenMetadata {
let (token, rest, start, end) = consume_token(&self.input);
if token == Token::End {
match self.lines.next() {
Some((line, input)) => {
let mut input = String::from(input);
while input.chars().last() == Some('\\') {
if let Some((_, next)) = self.lines.next() {
input.pop();
input.push_str(next);
} else {
break;
}
}
self.input = input;
self.line = line;
self.offset = 0;
self.next()
}
None => TokenMetadata {
token: Token::End,
line: self.line,
chars: self.offset + start..end + self.offset,
},
}
} else {
self.input = String::from(rest);
let metadata = TokenMetadata {
token,
line: self.line,
chars: self.offset + start..end + self.offset,
};
self.offset += end;
metadata
}
}
#[must_use]
pub fn peek(&mut self) -> TokenMetadata {
self.clone().next()
}
}

File diff suppressed because it is too large Load Diff

View File

@ -1,14 +1,14 @@
#version 460 core
#define TEST 3
#define TEST_EXPR 2 && 2
#if TEST_EXPR - 2 == 0
#error 0
#elif TEST_EXPR - 2 == 1
#error 1
#elif TEST_EXPR - 2 == 2
#error 2
#else
#error You shouldn't do that
#endif
#version 460 core
#define TEST 3
#define TEST_EXPR 2 && 2
#if TEST_EXPR - 2 == 0
#error 0
#elif TEST_EXPR - 2 == 1
#error 1
#elif TEST_EXPR - 2 == 2
#error 2
#else
#error You shouldn't do that
#endif

View File

@ -1,11 +1,11 @@
#version 450 core
#define MAIN void main() {
#define V_POSITION layout(location=0) in vec4 a_position;
#define ASSIGN_POSITION gl_Position = a_position;
V_POSITION
MAIN
ASSIGN_POSITION
#version 450 core
#define MAIN void main() {
#define V_POSITION layout(location=0) in vec4 a_position;
#define ASSIGN_POSITION gl_Position = a_position;
V_POSITION
MAIN
ASSIGN_POSITION
}