[glsl] Promote glsl-new to glsl (#184)

* [glsl] Promote glsl-new to glsl

* [glsl-in] rename feature glsl > glsl-in
This commit is contained in:
Pelle Johnsen 2020-09-10 07:11:52 +02:00 committed by GitHub
parent 2fa8eb596d
commit 2ea0310b63
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
19 changed files with 74 additions and 3905 deletions

View File

@ -15,7 +15,6 @@ fxhash = "0.2"
log = "0.4"
num-traits = "0.2"
spirv = { package = "spirv_headers", version = "1.4.2", optional = true }
glsl = { version = "4.1", optional = true }
pomelo = { version = "0.1.4", optional = true }
thiserror = "1.0"
serde = { version = "1.0", features = ["derive"], optional = true }
@ -23,8 +22,7 @@ petgraph = { version ="0.5", optional = true }
[features]
default = []
glsl_preprocessor = ["glsl"]
glsl-new = ["pomelo"]
glsl-in = ["pomelo"]
glsl-validate = []
glsl-out = []
serialize = ["serde"]

View File

@ -29,9 +29,6 @@ fn main() {
println!("Call with <input> <output>");
return;
}
#[cfg(any(feature = "glsl", feature = "glsl-new"))]
let prefer_glsl_new =
!cfg!(feature = "glsl") || env::var("PREFER_GLSL_NEW").unwrap_or_default() == "1";
let module = match Path::new(&args[1])
.extension()
.expect("Input has no extension?")
@ -48,75 +45,25 @@ fn main() {
let input = fs::read_to_string(&args[1]).unwrap();
naga::front::wgsl::parse_str(&input).unwrap()
}
#[cfg(any(feature = "glsl", feature = "glsl-new"))]
#[cfg(feature = "glsl-in")]
"vert" => {
let input = fs::read_to_string(&args[1]).unwrap();
let mut module: Option<naga::Module> = None;
if prefer_glsl_new {
#[cfg(feature = "glsl-new")]
{
module = Some(
naga::front::glsl_new::parse_str(
&input,
"main".to_string(),
naga::ShaderStage::Vertex,
)
.unwrap(),
)
}
}
if module.is_none() {
#[cfg(feature = "glsl")]
{
module = Some(
naga::front::glsl::parse_str(
&input,
"main".to_string(),
naga::ShaderStage::Vertex,
)
.unwrap(),
)
}
}
module.unwrap()
naga::front::glsl::parse_str(&input, "main".to_string(), naga::ShaderStage::Vertex)
.unwrap()
}
#[cfg(any(feature = "glsl", feature = "glsl-new"))]
#[cfg(feature = "glsl-in")]
"frag" => {
let input = fs::read_to_string(&args[1]).unwrap();
let mut module = None;
if prefer_glsl_new {
#[cfg(feature = "glsl-new")]
{
module = Some(
naga::front::glsl_new::parse_str(
&input,
"main".to_string(),
naga::ShaderStage::Fragment {
early_depth_test: None,
},
)
.unwrap(),
)
}
}
if module.is_none() {
#[cfg(feature = "glsl")]
{
module = Some(
naga::front::glsl::parse_str(
&input,
"main".to_string(),
naga::ShaderStage::Fragment {
early_depth_test: None,
},
)
.unwrap(),
)
}
}
module.unwrap()
naga::front::glsl::parse_str(
&input,
"main".to_string(),
naga::ShaderStage::Fragment {
early_depth_test: None,
},
)
.unwrap()
}
#[cfg(feature = "glsl")]
#[cfg(feature = "glsl-in")]
"comp" => {
let input = fs::read_to_string(&args[1]).unwrap();
naga::front::glsl::parse_str(

View File

@ -1,283 +0,0 @@
use crate::{ImageClass, ImageDimension, ScalarKind, TypeInner, VectorSize};
use glsl::syntax::{BinaryOp, TypeSpecifierNonArray, UnaryOp};
pub fn glsl_to_spirv_unary_op(op: UnaryOp) -> crate::UnaryOperator {
match op {
UnaryOp::Inc => todo!(),
UnaryOp::Dec => todo!(),
UnaryOp::Add => todo!(),
UnaryOp::Minus => crate::UnaryOperator::Negate,
UnaryOp::Not => crate::UnaryOperator::Not,
UnaryOp::Complement => todo!(),
}
}
pub fn glsl_to_spirv_binary_op(op: BinaryOp) -> crate::BinaryOperator {
match op {
BinaryOp::Or => crate::BinaryOperator::LogicalOr,
BinaryOp::Xor => todo!(),
BinaryOp::And => crate::BinaryOperator::LogicalAnd,
BinaryOp::BitOr => crate::BinaryOperator::InclusiveOr,
BinaryOp::BitXor => crate::BinaryOperator::ExclusiveOr,
BinaryOp::BitAnd => crate::BinaryOperator::And,
BinaryOp::Equal => crate::BinaryOperator::Equal,
BinaryOp::NonEqual => crate::BinaryOperator::NotEqual,
BinaryOp::LT => crate::BinaryOperator::Less,
BinaryOp::GT => crate::BinaryOperator::Greater,
BinaryOp::LTE => crate::BinaryOperator::LessEqual,
BinaryOp::GTE => crate::BinaryOperator::GreaterEqual,
BinaryOp::LShift => crate::BinaryOperator::ShiftLeftLogical,
BinaryOp::RShift => crate::BinaryOperator::ShiftRightArithmetic,
BinaryOp::Add => crate::BinaryOperator::Add,
BinaryOp::Sub => crate::BinaryOperator::Subtract,
BinaryOp::Mult => crate::BinaryOperator::Multiply,
BinaryOp::Div => crate::BinaryOperator::Divide,
BinaryOp::Mod => crate::BinaryOperator::Modulo,
}
}
pub fn glsl_to_spirv_type(ty: TypeSpecifierNonArray) -> Option<TypeInner> {
use TypeSpecifierNonArray::*;
Some(match ty {
Void => return None,
Bool => TypeInner::Scalar {
kind: ScalarKind::Bool,
width: 1,
},
Int => TypeInner::Scalar {
kind: ScalarKind::Sint,
width: 4,
},
UInt => TypeInner::Scalar {
kind: ScalarKind::Uint,
width: 4,
},
Float => TypeInner::Scalar {
kind: ScalarKind::Float,
width: 4,
},
Double => TypeInner::Scalar {
kind: ScalarKind::Float,
width: 8,
},
Vec2 => TypeInner::Vector {
size: VectorSize::Bi,
kind: ScalarKind::Float,
width: 4,
},
Vec3 => TypeInner::Vector {
size: VectorSize::Tri,
kind: ScalarKind::Float,
width: 4,
},
Vec4 => TypeInner::Vector {
size: VectorSize::Quad,
kind: ScalarKind::Float,
width: 4,
},
DVec2 => TypeInner::Vector {
size: VectorSize::Bi,
kind: ScalarKind::Float,
width: 8,
},
DVec3 => TypeInner::Vector {
size: VectorSize::Tri,
kind: ScalarKind::Float,
width: 8,
},
DVec4 => TypeInner::Vector {
size: VectorSize::Quad,
kind: ScalarKind::Float,
width: 8,
},
BVec2 => TypeInner::Vector {
size: VectorSize::Bi,
kind: ScalarKind::Bool,
width: 1,
},
BVec3 => TypeInner::Vector {
size: VectorSize::Tri,
kind: ScalarKind::Bool,
width: 1,
},
BVec4 => TypeInner::Vector {
size: VectorSize::Quad,
kind: ScalarKind::Bool,
width: 1,
},
IVec2 => TypeInner::Vector {
size: VectorSize::Bi,
kind: ScalarKind::Sint,
width: 4,
},
IVec3 => TypeInner::Vector {
size: VectorSize::Tri,
kind: ScalarKind::Sint,
width: 4,
},
IVec4 => TypeInner::Vector {
size: VectorSize::Quad,
kind: ScalarKind::Sint,
width: 4,
},
UVec2 => TypeInner::Vector {
size: VectorSize::Bi,
kind: ScalarKind::Uint,
width: 4,
},
UVec3 => TypeInner::Vector {
size: VectorSize::Tri,
kind: ScalarKind::Uint,
width: 4,
},
UVec4 => TypeInner::Vector {
size: VectorSize::Quad,
kind: ScalarKind::Uint,
width: 4,
},
// Float Matrices
Mat2 => TypeInner::Matrix {
columns: VectorSize::Bi,
rows: VectorSize::Bi,
kind: ScalarKind::Float,
width: 4,
},
Mat3 => TypeInner::Matrix {
columns: VectorSize::Tri,
rows: VectorSize::Tri,
kind: ScalarKind::Float,
width: 4,
},
Mat4 => TypeInner::Matrix {
columns: VectorSize::Quad,
rows: VectorSize::Quad,
kind: ScalarKind::Float,
width: 4,
},
Mat23 => TypeInner::Matrix {
columns: VectorSize::Bi,
rows: VectorSize::Tri,
kind: ScalarKind::Float,
width: 4,
},
Mat24 => TypeInner::Matrix {
columns: VectorSize::Bi,
rows: VectorSize::Quad,
kind: ScalarKind::Float,
width: 4,
},
Mat32 => TypeInner::Matrix {
columns: VectorSize::Tri,
rows: VectorSize::Bi,
kind: ScalarKind::Float,
width: 4,
},
Mat34 => TypeInner::Matrix {
columns: VectorSize::Tri,
rows: VectorSize::Quad,
kind: ScalarKind::Float,
width: 4,
},
Mat42 => TypeInner::Matrix {
columns: VectorSize::Quad,
rows: VectorSize::Bi,
kind: ScalarKind::Float,
width: 4,
},
Mat43 => TypeInner::Matrix {
columns: VectorSize::Quad,
rows: VectorSize::Tri,
kind: ScalarKind::Float,
width: 4,
},
// Double Matrices
DMat2 => TypeInner::Matrix {
columns: VectorSize::Bi,
rows: VectorSize::Bi,
kind: ScalarKind::Float,
width: 8,
},
DMat3 => TypeInner::Matrix {
columns: VectorSize::Tri,
rows: VectorSize::Tri,
kind: ScalarKind::Float,
width: 8,
},
DMat4 => TypeInner::Matrix {
columns: VectorSize::Quad,
rows: VectorSize::Quad,
kind: ScalarKind::Float,
width: 8,
},
DMat23 => TypeInner::Matrix {
columns: VectorSize::Bi,
rows: VectorSize::Tri,
kind: ScalarKind::Float,
width: 8,
},
DMat24 => TypeInner::Matrix {
columns: VectorSize::Bi,
rows: VectorSize::Quad,
kind: ScalarKind::Float,
width: 8,
},
DMat32 => TypeInner::Matrix {
columns: VectorSize::Tri,
rows: VectorSize::Bi,
kind: ScalarKind::Float,
width: 8,
},
DMat34 => TypeInner::Matrix {
columns: VectorSize::Tri,
rows: VectorSize::Quad,
kind: ScalarKind::Float,
width: 8,
},
DMat42 => TypeInner::Matrix {
columns: VectorSize::Quad,
rows: VectorSize::Bi,
kind: ScalarKind::Float,
width: 8,
},
DMat43 => TypeInner::Matrix {
columns: VectorSize::Quad,
rows: VectorSize::Tri,
kind: ScalarKind::Float,
width: 8,
},
TypeName(ty_name) => {
if let Some(t_pos) = ty_name.0.find("texture") {
let kind = match &ty_name.0[..t_pos] {
"" => ScalarKind::Float,
"i" => ScalarKind::Sint,
"u" => ScalarKind::Uint,
_ => panic!(),
};
let arrayed = ty_name.0.ends_with("Array");
let (dim, class) = match &ty_name.0[(t_pos + 7)..] {
"1D" | "1DArray" => (ImageDimension::D1, ImageClass::Sampled),
"2D" | "2DArray" => (ImageDimension::D2, ImageClass::Sampled),
"3D" | "3DArray" => (ImageDimension::D3, ImageClass::Sampled),
"2DMS" | "2DMSArray" => (ImageDimension::D2, ImageClass::Multisampled),
"Cube" | "CubeArray" => (ImageDimension::Cube, ImageClass::Sampled),
_ => panic!(),
};
return Some(TypeInner::Image {
kind,
dim,
arrayed,
class,
});
}
match ty_name.0.as_str() {
"sampler" => TypeInner::Sampler { comparison: false },
"samplerShadow" => TypeInner::Sampler { comparison: true },
_ => unimplemented!(),
}
}
_ => unimplemented!(),
})
}

File diff suppressed because it is too large Load Diff

View File

@ -1,254 +0,0 @@
use super::{Token, TokenMetadata};
use std::{iter::Enumerate, str::Lines};
fn _consume_str<'a>(input: &'a str, what: &str) -> Option<&'a str> {
if input.starts_with(what) {
Some(&input[what.len()..])
} else {
None
}
}
fn consume_any(input: &str, what: impl Fn(char) -> bool) -> (&str, &str, usize) {
let pos = input.find(|c| !what(c)).unwrap_or_else(|| input.len());
let (o, i) = input.split_at(pos);
(o, i, pos)
}
pub fn consume_token(input: &String) -> (Token, &str, usize, usize) {
let mut input = input.as_str();
let start = input
.find(|c: char| !c.is_whitespace())
.unwrap_or(input.chars().count());
input = &input[start..];
let mut chars = input.chars();
let cur = match chars.next() {
Some(c) => c,
None => return (Token::End, input, start, start + 1),
};
match cur {
':' => {
input = chars.as_str();
if chars.next() == Some(':') {
(Token::DoubleColon, chars.as_str(), start, start + 2)
} else {
(Token::Separator(cur), input, start, start + 1)
}
}
';' | ',' | '.' => (Token::Separator(cur), chars.as_str(), start, start + 1),
'(' | ')' | '{' | '}' | '[' | ']' => (Token::Paren(cur), chars.as_str(), start, start + 1),
'<' | '>' => {
input = chars.as_str();
let next = chars.next();
if next == Some('=') {
(
Token::LogicalOperation(cur),
chars.as_str(),
start,
start + 1,
)
} else if next == Some(cur) {
(Token::ShiftOperation(cur), chars.as_str(), start, start + 2)
} else {
(Token::Operation(cur), input, start, start + 1)
}
}
'0'..='9' => {
let (number, rest, pos) = consume_any(input, |c| (c >= '0' && c <= '9' || c == '.'));
if let Some(_) = number.find('.') {
if (
chars.next().map(|c| c.to_lowercase().next().unwrap()),
chars.next().map(|c| c.to_lowercase().next().unwrap()),
) == (Some('l'), Some('f'))
{
(
Token::Double(number.parse().unwrap()),
chars.as_str(),
start,
start + pos + 2,
)
} else {
(
Token::Float(number.parse().unwrap()),
chars.as_str(),
start,
start + pos,
)
}
} else {
(
Token::Integral(number.parse().unwrap()),
rest,
start,
start + pos,
)
}
}
'a'..='z' | 'A'..='Z' | '_' => {
let (word, rest, pos) = consume_any(input, |c| c.is_alphanumeric() || c == '_');
(Token::Word(String::from(word)), rest, start, start + pos)
}
'+' | '-' => {
input = chars.as_str();
match chars.next() {
Some('=') => (Token::OpAssign(cur), chars.as_str(), start, start + 2),
Some(next) if cur == next => (Token::Sufix(cur), chars.as_str(), start, start + 2),
_ => (Token::Operation(cur), input, start, start + 1),
}
}
'%' | '^' => {
input = chars.as_str();
if chars.next() == Some('=') {
(Token::OpAssign(cur), chars.as_str(), start, start + 2)
} else {
(Token::Operation(cur), input, start, start + 1)
}
}
'!' => {
input = chars.as_str();
if chars.next() == Some('=') {
(
Token::LogicalOperation(cur),
chars.as_str(),
start,
start + 2,
)
} else {
(Token::Operation(cur), input, start, start + 1)
}
}
'*' => {
input = chars.as_str();
match chars.next() {
Some('=') => (Token::OpAssign(cur), chars.as_str(), start, start + 2),
Some('/') => (
Token::MultiLineCommentClose,
chars.as_str(),
start,
start + 2,
),
_ => (Token::Operation(cur), input, start, start + 1),
}
}
'/' => {
input = chars.as_str();
match chars.next() {
Some('=') => (Token::OpAssign(cur), chars.as_str(), start, start + 2),
Some('/') => (Token::LineComment, chars.as_str(), start, start + 2),
Some('*') => (
Token::MultiLineCommentOpen,
chars.as_str(),
start,
start + 2,
),
_ => (Token::Operation(cur), input, start, start + 1),
}
}
'=' | '&' | '|' => {
input = chars.as_str();
if chars.next() == Some(cur) {
(
Token::LogicalOperation(cur),
chars.as_str(),
start,
start + 2,
)
} else {
(Token::Operation(cur), input, start, start + 1)
}
}
'#' => {
input = chars.as_str();
if chars.next() == Some(cur) {
(Token::TokenPasting, chars.as_str(), start, start + 2)
} else {
(Token::Preprocessor, input, start, start + 1)
}
}
'~' => (Token::Operation(cur), chars.as_str(), start, start + 1),
'?' => (Token::Selection, chars.as_str(), start, start + 1),
_ => (Token::Unknown(cur), chars.as_str(), start, start + 1),
}
}
#[derive(Clone, Debug)]
pub struct Lexer<'a> {
lines: Enumerate<Lines<'a>>,
input: String,
line: usize,
offset: usize,
}
impl<'a> Lexer<'a> {
pub fn new(input: &'a str) -> Self {
let mut lines = input.lines().enumerate();
let (line, input) = lines.next().unwrap_or((0, ""));
let mut input = String::from(input);
while input.chars().last() == Some('\\') {
if let Some((_, next)) = lines.next() {
input.pop();
input.push_str(next);
} else {
break;
}
}
Lexer {
lines,
input,
line,
offset: 0,
}
}
#[must_use]
pub fn next(&mut self) -> TokenMetadata {
let (token, rest, start, end) = consume_token(&self.input);
if token == Token::End {
match self.lines.next() {
Some((line, input)) => {
let mut input = String::from(input);
while input.chars().last() == Some('\\') {
if let Some((_, next)) = self.lines.next() {
input.pop();
input.push_str(next);
} else {
break;
}
}
self.input = input;
self.line = line;
self.offset = 0;
self.next()
}
None => TokenMetadata {
token: Token::End,
line: self.line,
chars: self.offset + start..end + self.offset,
},
}
} else {
self.input = String::from(rest);
let metadata = TokenMetadata {
token,
line: self.line,
chars: self.offset + start..end + self.offset,
};
self.offset += end;
metadata
}
}
#[must_use]
pub fn peek(&mut self) -> TokenMetadata {
self.clone().next()
}
}

View File

@ -1,245 +0,0 @@
#![allow(clippy::all)]
#![allow(dead_code)]
use super::{Error, ErrorKind};
use crate::FastHashMap;
use std::{
fmt,
iter::Peekable,
ops::{Deref, Range},
vec::IntoIter,
};
pub mod lex;
#[cfg(feature = "glsl_preprocessor")]
pub mod preprocessor;
type Tokens = Peekable<IntoIter<TokenMetadata>>;
#[derive(Debug, Clone)]
pub struct TokenMetadata {
pub token: Token,
pub line: usize,
pub chars: Range<usize>,
}
impl Deref for TokenMetadata {
type Target = Token;
fn deref(&self) -> &Token {
&self.token
}
}
#[derive(Debug, PartialEq, Clone)]
pub enum Token {
Separator(char),
DoubleColon,
Paren(char),
Integral(usize),
Float(f32),
Double(f64),
Word(String),
Operation(char),
OpAssign(char),
LogicalOperation(char),
ShiftOperation(char),
Unknown(char),
LineComment,
MultiLineCommentOpen,
MultiLineCommentClose,
Preprocessor,
End,
Selection,
Sufix(char),
TokenPasting,
}
impl Token {
pub fn type_to_string(&self) -> String {
match self {
Token::Separator(separator) => separator.to_string(),
Token::DoubleColon => ":".to_string(),
Token::Paren(paren) => paren.to_string(),
Token::Integral(_) => "integer".to_string(),
Token::Float(_) => "float".to_string(),
Token::Double(_) => "double".to_string(),
Token::Word(_) => "word".to_string(),
Token::Operation(op) => op.to_string(),
Token::OpAssign(op) => format!("{}=", op),
Token::LogicalOperation(op) => format!("{}=", op),
Token::ShiftOperation(op) => format!("{0}{0}", op),
Token::Unknown(_) => "unknown".to_string(),
Token::LineComment => "//".to_string(),
Token::MultiLineCommentOpen => "/*".to_string(),
Token::MultiLineCommentClose => "*/".to_string(),
Token::Preprocessor => "#".to_string(),
Token::End => "EOF".to_string(),
Token::Selection => "?".to_string(),
Token::Sufix(op) => format!("{0}{0}", op),
Token::TokenPasting => "##".to_string(),
}
}
}
impl fmt::Display for Token {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Token::Separator(sep) => write!(f, "{}", sep),
Token::DoubleColon => write!(f, ":"),
Token::Paren(paren) => write!(f, "{}", paren),
Token::Integral(int) => write!(f, "{}", int),
Token::Float(float) => write!(f, "{}", float),
Token::Double(double) => write!(f, "{}", double),
Token::Word(word) => write!(f, "{}", word),
Token::Operation(op) => write!(f, "{}", op),
Token::OpAssign(op) => write!(f, "{}=", op),
Token::LogicalOperation(op) => write!(f, "{0}=", op),
Token::ShiftOperation(op) => write!(f, "{0}{0}", op),
Token::Unknown(unknown) => write!(f, "{}", unknown),
Token::LineComment => write!(f, "//"),
Token::MultiLineCommentOpen => write!(f, "/*"),
Token::MultiLineCommentClose => write!(f, "*/"),
Token::Preprocessor => write!(f, "#"),
Token::End => write!(f, ""),
Token::Selection => write!(f, "?"),
Token::Sufix(op) => write!(f, "{0}{0}", op),
Token::TokenPasting => write!(f, "##"),
}
}
}
#[derive(Debug)]
pub enum Node {
Ident(String),
Const(Literal),
}
#[derive(Debug, Copy, Clone)]
pub enum Literal {
Double(f64),
Float(f32),
Uint(usize),
Sint(isize),
Bool(bool),
}
fn parse_primary_expression(tokens: &mut Tokens) -> Result<Node, Error> {
let token = tokens.next().ok_or(Error {
kind: ErrorKind::EOF,
})?;
match token.token {
Token::Word(ident) => Ok(match ident.as_str() {
"true" => Node::Const(Literal::Bool(true)),
"false" => Node::Const(Literal::Bool(false)),
_ => Node::Ident(ident),
}),
Token::Integral(uint) => Ok(Node::Const(Literal::Uint(uint))),
Token::Float(float) => Ok(Node::Const(Literal::Float(float))),
Token::Double(double) => Ok(Node::Const(Literal::Double(double))),
Token::Paren('(') => todo!(), /* parse_expression */
_ => Err(Error {
kind: ErrorKind::UnexpectedToken {
expected: vec![
Token::Word(String::new()),
Token::Integral(0),
Token::Double(0.0),
Token::Float(0.0),
Token::Paren('('),
],
got: token,
},
}),
}
}
pub(self) fn parse_comments(mut lexer: lex::Lexer) -> Result<Vec<TokenMetadata>, Error> {
let mut tokens = Vec::new();
loop {
let token = lexer.next();
match token.token {
Token::MultiLineCommentOpen => {
let mut token = lexer.next();
while Token::MultiLineCommentClose != token.token {
match token.token {
Token::End => {
return Err(Error {
kind: ErrorKind::EOF,
})
}
_ => {}
}
token = lexer.next();
}
}
Token::LineComment => {
while token.line == lexer.peek().line && Token::End != lexer.peek().token {
let _ = lexer.next();
}
}
Token::End => {
tokens.push(token);
break;
}
_ => tokens.push(token),
}
}
Ok(tokens)
}
pub fn parse(input: &str) -> Result<String, Error> {
let lexer = lex::Lexer::new(input);
let tokens = parse_comments(lexer)?;
let mut macros = FastHashMap::default();
macros.insert(
String::from("GL_SPIRV"),
vec![TokenMetadata {
token: Token::Integral(100),
line: 0,
chars: 0..1,
}],
);
macros.insert(
String::from("VULKAN"),
vec![TokenMetadata {
token: Token::Integral(100),
line: 0,
chars: 0..1,
}],
);
log::trace!("------GLSL COMMENT STRIPPED------");
log::trace!("\n{:#?}", tokens);
log::trace!("---------------------------------");
#[cfg(feature = "glsl_preprocessor")]
let tokens = preprocessor::preprocess(&mut tokens.into_iter().peekable(), &mut macros)?;
let mut line = 0;
let mut start = 0;
Ok(tokens.into_iter().fold(String::new(), |mut acc, token| {
if token.line - line != 0 {
acc.push_str(&"\n".repeat(token.line - line));
start = 0;
line = token.line;
}
acc.push_str(&" ".repeat(token.chars.start - start));
acc.push_str(&token.token.to_string());
start = token.chars.end;
acc
}))
}

File diff suppressed because it is too large Load Diff

View File

@ -9,7 +9,7 @@ fn rosetta_test(file_name: &str, stage: crate::ShaderStage) {
let expected =
fs::read_to_string(test_dir.join(file_name).with_extension("expected.ron")).unwrap();
let module = crate::front::glsl_new::parse_str(&input, "main".to_string(), stage).unwrap();
let module = super::parse_str(&input, "main".to_string(), stage).unwrap();
let output = ron::ser::to_string_pretty(&module, Default::default()).unwrap();
assert_diff!(output.as_str(), expected.as_str(), "", 0);

View File

@ -1,50 +0,0 @@
use crate::{EntryPoint, Module, ShaderStage};
mod lex;
#[cfg(test)]
mod lex_tests;
mod ast;
use ast::Program;
use lex::Lexer;
mod error;
use error::ParseError;
mod parser;
#[cfg(test)]
mod parser_tests;
mod token;
mod types;
#[cfg(all(test, feature = "serialize"))]
mod rosetta_tests;
pub fn parse_str(source: &str, entry: String, stage: ShaderStage) -> Result<Module, ParseError> {
log::debug!("------ GLSL-pomelo ------");
let mut program = Program::new(stage);
let lex = Lexer::new(source);
let mut parser = parser::Parser::new(&mut program);
for token in lex {
parser.parse(token)?;
}
parser.end_of_input()?;
let mut module = Module::generate_empty();
module.functions = program.functions;
module.types = program.types;
module.constants = program.constants;
module.global_variables = program.global_variables;
// find entry point
if let Some(entry_handle) = program.lookup_function.get(&entry) {
module.entry_points.push(EntryPoint {
stage,
name: entry,
function: *entry_handle,
});
}
Ok(module)
}

View File

@ -1,9 +1,7 @@
//! Parsers which load shaders into memory.
#[cfg(feature = "glsl")]
#[cfg(feature = "glsl-in")]
pub mod glsl;
#[cfg(feature = "glsl-new")]
pub mod glsl_new;
#[cfg(feature = "spirv-in")]
pub mod spv;
#[cfg(feature = "wgsl-in")]

View File

@ -16,7 +16,7 @@ fn load_spv(name: &str) -> naga::Module {
naga::front::spv::parse_u8_slice(&input).unwrap()
}
#[cfg(feature = "glsl")]
#[cfg(feature = "glsl-in")]
fn load_glsl(name: &str, entry: &str, stage: naga::ShaderStage) -> naga::Module {
let input = load_test_data(name);
naga::front::glsl::parse_str(&input, entry.to_owned(), stage).unwrap()
@ -107,7 +107,7 @@ fn convert_cube() {
validator.validate(&fs).unwrap();
}
#[cfg(feature = "glsl")]
#[cfg(feature = "glsl-in")]
#[test]
#[ignore]
fn convert_phong_lighting() {
@ -129,15 +129,16 @@ fn convert_phong_lighting() {
w.write(&module);
}
#[cfg(feature = "glsl")]
#[test]
fn constant_expressions() {
let module = load_glsl(
"glsl_constant_expression.vert",
"main",
naga::ShaderStage::Fragment {
early_depth_test: None,
},
);
naga::proc::Validator::new().validate(&module).unwrap();
}
//TODO: get this working again (glsl-new)
// #[cfg(feature = "glsl-in")]
// #[test]
// fn constant_expressions() {
// let module = load_glsl(
// "glsl_constant_expression.vert",
// "main",
// naga::ShaderStage::Fragment {
// early_depth_test: None,
// },
// );
// naga::proc::Validator::new().validate(&module).unwrap();
// }