From 42338fff3a277735ae81e57fc01d1051da3dc02c Mon Sep 17 00:00:00 2001 From: teoxoy <28601907+teoxoy@users.noreply.github.com> Date: Thu, 21 Aug 2025 18:15:00 +0200 Subject: [PATCH 01/33] add `optionally_typed_ident` fn --- naga/src/front/wgsl/parse/mod.rs | 67 +++++++++++--------------------- 1 file changed, 23 insertions(+), 44 deletions(-) diff --git a/naga/src/front/wgsl/parse/mod.rs b/naga/src/front/wgsl/parse/mod.rs index c01ba4de30f..dd9ca40618a 100644 --- a/naga/src/front/wgsl/parse/mod.rs +++ b/naga/src/front/wgsl/parse/mod.rs @@ -1298,6 +1298,23 @@ impl Parser { Ok((handle, self.pop_rule_span(lexer))) } + fn optionally_typed_ident<'a>( + &mut self, + lexer: &mut Lexer<'a>, + ctx: &mut ExpressionContext<'a, '_, '_>, + ) -> Result<'a, (ast::Ident<'a>, Option>>)> { + let name = lexer.next_ident()?; + + let ty = if lexer.skip(Token::Separator(':')) { + Some(self.type_decl(lexer, ctx)?) + } else { + None + }; + + Ok((name, ty)) + } + + /// 'var' _disambiguate_template template_list? optionally_typed_ident fn variable_decl<'a>( &mut self, lexer: &mut Lexer<'a>, @@ -1322,13 +1339,7 @@ impl Parser { }; lexer.expect(Token::Paren('>'))?; } - let name = lexer.next_ident()?; - - let ty = if lexer.skip(Token::Separator(':')) { - Some(self.type_decl(lexer, ctx)?) - } else { - None - }; + let (name, ty) = self.optionally_typed_ident(lexer, ctx)?; let init = if lexer.skip(Token::Operation('=')) { let handle = self.general_expression(lexer, ctx)?; @@ -2141,14 +2152,8 @@ impl Parser { } "let" => { let _ = lexer.next(); - let name = lexer.next_ident()?; + let (name, given_ty) = this.optionally_typed_ident(lexer, ctx)?; - let given_ty = if lexer.skip(Token::Separator(':')) { - let ty = this.type_decl(lexer, ctx)?; - Some(ty) - } else { - None - }; lexer.expect(Token::Operation('='))?; let expr_id = this.general_expression(lexer, ctx)?; lexer.expect(Token::Separator(';'))?; @@ -2163,14 +2168,8 @@ impl Parser { } "const" => { let _ = lexer.next(); - let name = lexer.next_ident()?; + let (name, given_ty) = this.optionally_typed_ident(lexer, ctx)?; - let given_ty = if lexer.skip(Token::Separator(':')) { - let ty = this.type_decl(lexer, ctx)?; - Some(ty) - } else { - None - }; lexer.expect(Token::Operation('='))?; let expr_id = this.general_expression(lexer, ctx)?; lexer.expect(Token::Separator(';'))?; @@ -2185,14 +2184,7 @@ impl Parser { } "var" => { let _ = lexer.next(); - - let name = lexer.next_ident()?; - let ty = if lexer.skip(Token::Separator(':')) { - let ty = this.type_decl(lexer, ctx)?; - Some(ty) - } else { - None - }; + let (name, ty) = this.optionally_typed_ident(lexer, ctx)?; let init = if lexer.skip(Token::Operation('=')) { let init = this.general_expression(lexer, ctx)?; @@ -2950,14 +2942,7 @@ impl Parser { (Token::Word("const"), _) => { ensure_no_diag_attrs("`const`s".into(), diagnostic_filters)?; - let name = lexer.next_ident()?; - - let ty = if lexer.skip(Token::Separator(':')) { - let ty = self.type_decl(lexer, &mut ctx)?; - Some(ty) - } else { - None - }; + let (name, ty) = self.optionally_typed_ident(lexer, &mut ctx)?; lexer.expect(Token::Operation('='))?; let init = self.general_expression(lexer, &mut ctx)?; @@ -2973,13 +2958,7 @@ impl Parser { (Token::Word("override"), _) => { ensure_no_diag_attrs("`override`s".into(), diagnostic_filters)?; - let name = lexer.next_ident()?; - - let ty = if lexer.skip(Token::Separator(':')) { - Some(self.type_decl(lexer, &mut ctx)?) - } else { - None - }; + let (name, ty) = self.optionally_typed_ident(lexer, &mut ctx)?; let init = if lexer.skip(Token::Operation('=')) { Some(self.general_expression(lexer, &mut ctx)?) From 4adede5773484471dbbcf1f302713b05b123ae51 Mon Sep 17 00:00:00 2001 From: teoxoy <28601907+teoxoy@users.noreply.github.com> Date: Thu, 21 Aug 2025 18:52:46 +0200 Subject: [PATCH 02/33] [wgsl] rename `type_decl` to `type_specifier` --- naga/src/front/wgsl/parse/mod.rs | 27 +++++++++++++-------------- 1 file changed, 13 insertions(+), 14 deletions(-) diff --git a/naga/src/front/wgsl/parse/mod.rs b/naga/src/front/wgsl/parse/mod.rs index dd9ca40618a..153c778a0a0 100644 --- a/naga/src/front/wgsl/parse/mod.rs +++ b/naga/src/front/wgsl/parse/mod.rs @@ -703,7 +703,7 @@ impl Parser { } (Token::Paren('<'), ast::ConstructorType::PartialArray) => { lexer.expect_generic_paren('<')?; - let base = self.type_decl(lexer, ctx)?; + let base = self.type_specifier(lexer, ctx)?; let size = if lexer.end_of_generic_arguments() { let expr = self.const_generic_expression(lexer, ctx)?; lexer.skip(Token::Separator(',')); @@ -1306,7 +1306,7 @@ impl Parser { let name = lexer.next_ident()?; let ty = if lexer.skip(Token::Separator(':')) { - Some(self.type_decl(lexer, ctx)?) + Some(self.type_specifier(lexer, ctx)?) } else { None }; @@ -1406,7 +1406,7 @@ impl Parser { let name = lexer.next_ident()?; lexer.expect(Token::Separator(':'))?; - let ty = self.type_decl(lexer, ctx)?; + let ty = self.type_specifier(lexer, ctx)?; ready = lexer.skip(Token::Separator(',')); members.push(ast::StructMember { @@ -1441,7 +1441,7 @@ impl Parser { ) -> Result<'a, (Handle>, Span)> { lexer.expect_generic_paren('<')?; let start = lexer.start_byte_offset(); - let ty = self.type_decl(lexer, ctx)?; + let ty = self.type_specifier(lexer, ctx)?; let span = lexer.span_from(start); lexer.skip(Token::Separator(',')); lexer.expect_generic_paren('>')?; @@ -1464,7 +1464,7 @@ impl Parser { }) } - fn type_decl_impl<'a>( + fn type_specifier_impl<'a>( &mut self, lexer: &mut Lexer<'a>, word: &'a str, @@ -1704,7 +1704,7 @@ impl Parser { let (ident, span) = lexer.next_ident_with_span()?; let mut space = conv::map_address_space(ident, span)?; lexer.expect(Token::Separator(','))?; - let base = self.type_decl(lexer, ctx)?; + let base = self.type_specifier(lexer, ctx)?; if let crate::AddressSpace::Storage { ref mut access } = space { *access = if lexer.end_of_generic_arguments() { let result = lexer.next_storage_access()?; @@ -1719,7 +1719,7 @@ impl Parser { } "array" => { lexer.expect_generic_paren('<')?; - let base = self.type_decl(lexer, ctx)?; + let base = self.type_specifier(lexer, ctx)?; let size = if lexer.end_of_generic_arguments() { let size = self.const_generic_expression(lexer, ctx)?; lexer.skip(Token::Separator(',')); @@ -1733,7 +1733,7 @@ impl Parser { } "binding_array" => { lexer.expect_generic_paren('<')?; - let base = self.type_decl(lexer, ctx)?; + let base = self.type_specifier(lexer, ctx)?; let size = if lexer.end_of_generic_arguments() { let size = self.unary_expression(lexer, ctx)?; lexer.skip(Token::Separator(',')); @@ -1955,8 +1955,7 @@ impl Parser { } } - /// Parse type declaration of a given name. - fn type_decl<'a>( + fn type_specifier<'a>( &mut self, lexer: &mut Lexer<'a>, ctx: &mut ExpressionContext<'a, '_, '_>, @@ -1966,7 +1965,7 @@ impl Parser { let (name, span) = lexer.next_ident_with_span()?; - let ty = match this.type_decl_impl(lexer, name, span, ctx)? { + let ty = match this.type_specifier_impl(lexer, name, span, ctx)? { Some(ty) => ty, None => { ctx.unresolved.insert(ast::Dependency { @@ -2687,7 +2686,7 @@ impl Parser { let param_name = lexer.next_ident()?; lexer.expect(Token::Separator(':'))?; - let param_type = self.type_decl(lexer, &mut ctx)?; + let param_type = self.type_specifier(lexer, &mut ctx)?; let handle = ctx.declare_local(param_name)?; arguments.push(ast::FunctionArgument { @@ -2701,7 +2700,7 @@ impl Parser { // read return type let result = if lexer.skip(Token::Arrow) { let binding = self.varying_binding(lexer, &mut ctx)?; - let ty = self.type_decl(lexer, &mut ctx)?; + let ty = self.type_specifier(lexer, &mut ctx)?; let must_use = must_use.is_some(); Some(ast::FunctionResult { ty, @@ -2935,7 +2934,7 @@ impl Parser { let name = lexer.next_ident()?; lexer.expect(Token::Operation('='))?; - let ty = self.type_decl(lexer, &mut ctx)?; + let ty = self.type_specifier(lexer, &mut ctx)?; lexer.expect(Token::Separator(';'))?; Some(ast::GlobalDeclKind::Type(ast::TypeAlias { name, ty })) } From 54fe3ca2e57b422dad8c0843befdea03a446f14f Mon Sep 17 00:00:00 2001 From: teoxoy <28601907+teoxoy@users.noreply.github.com> Date: Thu, 21 Aug 2025 19:08:01 +0200 Subject: [PATCH 03/33] rename `assignment_statement` to `variable_updating_statement` --- naga/src/front/wgsl/parse/mod.rs | 26 ++++++++------------------ 1 file changed, 8 insertions(+), 18 deletions(-) diff --git a/naga/src/front/wgsl/parse/mod.rs b/naga/src/front/wgsl/parse/mod.rs index 153c778a0a0..15304198169 100644 --- a/naga/src/front/wgsl/parse/mod.rs +++ b/naga/src/front/wgsl/parse/mod.rs @@ -1983,14 +1983,16 @@ impl Parser { }) } - fn assignment_op_and_rhs<'a>( + /// Parses assignment, increment and decrement statements + fn variable_updating_statement<'a>( &mut self, lexer: &mut Lexer<'a>, ctx: &mut ExpressionContext<'a, '_, '_>, block: &mut ast::Block<'a>, - target: Handle>, - span_start: usize, ) -> Result<'a, ()> { + let span_start = lexer.start_byte_offset(); + let target = self.lhs_expression(lexer, ctx)?; + use crate::BinaryOperator as Bo; let op = lexer.next(); @@ -2043,18 +2045,6 @@ impl Parser { Ok(()) } - /// Parse an assignment statement (will also parse increment and decrement statements) - fn assignment_statement<'a>( - &mut self, - lexer: &mut Lexer<'a>, - ctx: &mut ExpressionContext<'a, '_, '_>, - block: &mut ast::Block<'a>, - ) -> Result<'a, ()> { - let span_start = lexer.start_byte_offset(); - let target = self.lhs_expression(lexer, ctx)?; - self.assignment_op_and_rhs(lexer, ctx, block, target, span_start) - } - /// Parse a function call statement. /// Expects `ident` to be consumed (not in the lexer). fn function_statement<'a>( @@ -2109,11 +2099,11 @@ impl Parser { } _ => { *lexer = cloned; - self.assignment_statement(lexer, context, block) + self.variable_updating_statement(lexer, context, block) } } } - _ => self.assignment_statement(lexer, context, block), + _ => self.variable_updating_statement(lexer, context, block), } } @@ -2487,7 +2477,7 @@ impl Parser { block.stmts.push(ast::Statement { kind, span }); } _ => { - this.assignment_statement(lexer, ctx, block)?; + this.variable_updating_statement(lexer, ctx, block)?; lexer.expect(Token::Separator(';'))?; this.pop_rule_span(lexer); } From e99eb0149b1630c3e9960173e27c922ae2f91197 Mon Sep 17 00:00:00 2001 From: teoxoy <28601907+teoxoy@users.noreply.github.com> Date: Thu, 21 Aug 2025 19:13:48 +0200 Subject: [PATCH 04/33] rename `general_expression` to `expression` --- naga/src/front/wgsl/parse/mod.rs | 78 ++++++++++++++------------------ 1 file changed, 35 insertions(+), 43 deletions(-) diff --git a/naga/src/front/wgsl/parse/mod.rs b/naga/src/front/wgsl/parse/mod.rs index 15304198169..e5140895cfb 100644 --- a/naga/src/front/wgsl/parse/mod.rs +++ b/naga/src/front/wgsl/parse/mod.rs @@ -193,7 +193,7 @@ impl<'a> BindingParser<'a> { "location" => { lexer.expect(Token::Paren('('))?; self.location - .set(parser.general_expression(lexer, ctx)?, name_span)?; + .set(parser.expression(lexer, ctx)?, name_span)?; lexer.expect(Token::Paren(')'))?; } "builtin" => { @@ -234,7 +234,7 @@ impl<'a> BindingParser<'a> { lexer.expect(Token::Paren('('))?; self.blend_src - .set(parser.general_expression(lexer, ctx)?, name_span)?; + .set(parser.expression(lexer, ctx)?, name_span)?; lexer.skip(Token::Separator(',')); lexer.expect(Token::Paren(')'))?; } @@ -358,7 +358,7 @@ impl Parser { return Ok(ast::SwitchValue::Default); } - let expr = self.general_expression(lexer, ctx)?; + let expr = self.expression(lexer, ctx)?; Ok(ast::SwitchValue::Expr(expr)) } @@ -736,7 +736,7 @@ impl Parser { } else if lexer.skip(Token::Paren(')')) { break; } - let arg = self.general_expression(lexer, ctx)?; + let arg = self.expression(lexer, ctx)?; arguments.push(arg); } @@ -750,7 +750,7 @@ impl Parser { ctx: &mut ExpressionContext<'a, '_, '_>, ) -> Result<'a, Handle>> { self.push_rule_span(Rule::EnclosedExpr, lexer); - let expr = self.general_expression(lexer, ctx)?; + let expr = self.expression(lexer, ctx)?; self.pop_rule_span(lexer); Ok(expr) } @@ -772,7 +772,7 @@ impl Parser { let (to, span) = self.singular_generic(lexer, ctx)?; lexer.open_arguments()?; - let expr = self.general_expression(lexer, ctx)?; + let expr = self.expression(lexer, ctx)?; lexer.close_arguments()?; ast::Expression::Bitcast { @@ -1003,7 +1003,7 @@ impl Parser { ctx: &mut ExpressionContext<'a, '_, '_>, ) -> Result<'a, Handle>> { self.push_rule_span(Rule::GenericExpr, lexer); - let expr = self.general_expression(lexer, ctx)?; + let expr = self.expression(lexer, ctx)?; self.pop_rule_span(lexer); Ok(expr) } @@ -1225,20 +1225,11 @@ impl Parser { ) } - fn general_expression<'a>( - &mut self, - lexer: &mut Lexer<'a>, - ctx: &mut ExpressionContext<'a, '_, '_>, - ) -> Result<'a, Handle>> { - self.general_expression_with_span(lexer, ctx) - .map(|(expr, _)| expr) - } - - fn general_expression_with_span<'a>( + fn expression<'a>( &mut self, lexer: &mut Lexer<'a>, context: &mut ExpressionContext<'a, '_, '_>, - ) -> Result<'a, (Handle>, Span)> { + ) -> Result<'a, Handle>> { self.push_rule_span(Rule::GeneralExpr, lexer); // logical_or_expression let handle = context.parse_binary_op( @@ -1295,7 +1286,8 @@ impl Parser { ) }, )?; - Ok((handle, self.pop_rule_span(lexer))) + self.pop_rule_span(lexer); + Ok(handle) } fn optionally_typed_ident<'a>( @@ -1342,7 +1334,7 @@ impl Parser { let (name, ty) = self.optionally_typed_ident(lexer, ctx)?; let init = if lexer.skip(Token::Operation('=')) { - let handle = self.general_expression(lexer, ctx)?; + let handle = self.expression(lexer, ctx)?; Some(handle) } else { None @@ -1387,13 +1379,13 @@ impl Parser { match lexer.next_ident_with_span()? { ("size", name_span) => { lexer.expect(Token::Paren('('))?; - let expr = self.general_expression(lexer, ctx)?; + let expr = self.expression(lexer, ctx)?; lexer.expect(Token::Paren(')'))?; size.set(expr, name_span)?; } ("align", name_span) => { lexer.expect(Token::Paren('('))?; - let expr = self.general_expression(lexer, ctx)?; + let expr = self.expression(lexer, ctx)?; lexer.expect(Token::Paren(')'))?; align.set(expr, name_span)?; } @@ -1998,7 +1990,7 @@ impl Parser { let op = lexer.next(); let (op, value) = match op { (Token::Operation('='), _) => { - let value = self.general_expression(lexer, ctx)?; + let value = self.expression(lexer, ctx)?; (None, value) } (Token::AssignmentOperation(c), _) => { @@ -2017,7 +2009,7 @@ impl Parser { _ => unreachable!(), }; - let value = self.general_expression(lexer, ctx)?; + let value = self.expression(lexer, ctx)?; (Some(op), value) } token @ (Token::IncrementOperation | Token::DecrementOperation, _) => { @@ -2134,7 +2126,7 @@ impl Parser { "_" => { let _ = lexer.next(); lexer.expect(Token::Operation('='))?; - let expr = this.general_expression(lexer, ctx)?; + let expr = this.expression(lexer, ctx)?; lexer.expect(Token::Separator(';'))?; ast::StatementKind::Phony(expr) @@ -2144,7 +2136,7 @@ impl Parser { let (name, given_ty) = this.optionally_typed_ident(lexer, ctx)?; lexer.expect(Token::Operation('='))?; - let expr_id = this.general_expression(lexer, ctx)?; + let expr_id = this.expression(lexer, ctx)?; lexer.expect(Token::Separator(';'))?; let handle = ctx.declare_local(name)?; @@ -2160,7 +2152,7 @@ impl Parser { let (name, given_ty) = this.optionally_typed_ident(lexer, ctx)?; lexer.expect(Token::Operation('='))?; - let expr_id = this.general_expression(lexer, ctx)?; + let expr_id = this.expression(lexer, ctx)?; lexer.expect(Token::Separator(';'))?; let handle = ctx.declare_local(name)?; @@ -2176,7 +2168,7 @@ impl Parser { let (name, ty) = this.optionally_typed_ident(lexer, ctx)?; let init = if lexer.skip(Token::Operation('=')) { - let init = this.general_expression(lexer, ctx)?; + let init = this.expression(lexer, ctx)?; Some(init) } else { None @@ -2195,7 +2187,7 @@ impl Parser { "return" => { let _ = lexer.next(); let value = if lexer.peek().0 != Token::Separator(';') { - let handle = this.general_expression(lexer, ctx)?; + let handle = this.expression(lexer, ctx)?; Some(handle) } else { None @@ -2205,7 +2197,7 @@ impl Parser { } "if" => { let _ = lexer.next(); - let condition = this.general_expression(lexer, ctx)?; + let condition = this.expression(lexer, ctx)?; let accept = this.block(lexer, ctx, brace_nesting_level)?.0; @@ -2222,7 +2214,7 @@ impl Parser { } // ... else if (...) { ... } - let other_condition = this.general_expression(lexer, ctx)?; + let other_condition = this.expression(lexer, ctx)?; let other_block = this.block(lexer, ctx, brace_nesting_level)?; elsif_stack.push((elseif_span_start, other_condition, other_block)); elseif_span_start = lexer.start_byte_offset(); @@ -2254,7 +2246,7 @@ impl Parser { } "switch" => { let _ = lexer.next(); - let selector = this.general_expression(lexer, ctx)?; + let selector = this.expression(lexer, ctx)?; let brace_span = lexer.expect_span(Token::Paren('{'))?; let brace_nesting_level = Self::increase_brace_nesting(brace_nesting_level, brace_span)?; @@ -2322,7 +2314,7 @@ impl Parser { let mut body = ast::Block::default(); let (condition, span) = - lexer.capture_span(|lexer| this.general_expression(lexer, ctx))?; + lexer.capture_span(|lexer| this.expression(lexer, ctx))?; let mut reject = ast::Block::default(); reject.stmts.push(ast::Statement { kind: ast::StatementKind::Break, @@ -2384,7 +2376,7 @@ impl Parser { if !lexer.skip(Token::Separator(';')) { let (condition, span) = lexer.capture_span(|lexer| -> Result<'_, _> { - let condition = this.general_expression(lexer, ctx)?; + let condition = this.expression(lexer, ctx)?; lexer.expect(Token::Separator(';'))?; Ok(condition) })?; @@ -2456,7 +2448,7 @@ impl Parser { // parentheses are optional let paren = lexer.skip(Token::Paren('(')); - let condition = this.general_expression(lexer, ctx)?; + let condition = this.expression(lexer, ctx)?; if paren { lexer.expect(Token::Paren(')'))?; @@ -2521,7 +2513,7 @@ impl Parser { // the break if lexer.expect(Token::Word("if"))?; - let condition = self.general_expression(lexer, ctx)?; + let condition = self.expression(lexer, ctx)?; // Set the condition of the break if to the newly parsed // expression break_if = Some(condition); @@ -2814,17 +2806,17 @@ impl Parser { match name { "binding" => { lexer.expect(Token::Paren('('))?; - bind_index.set(self.general_expression(lexer, &mut ctx)?, name_span)?; + bind_index.set(self.expression(lexer, &mut ctx)?, name_span)?; lexer.expect(Token::Paren(')'))?; } "group" => { lexer.expect(Token::Paren('('))?; - bind_group.set(self.general_expression(lexer, &mut ctx)?, name_span)?; + bind_group.set(self.expression(lexer, &mut ctx)?, name_span)?; lexer.expect(Token::Paren(')'))?; } "id" => { lexer.expect(Token::Paren('('))?; - id.set(self.general_expression(lexer, &mut ctx)?, name_span)?; + id.set(self.expression(lexer, &mut ctx)?, name_span)?; lexer.expect(Token::Paren(')'))?; } "vertex" => { @@ -2841,7 +2833,7 @@ impl Parser { lexer.expect(Token::Paren('('))?; let mut new_workgroup_size = [None; 3]; for (i, size) in new_workgroup_size.iter_mut().enumerate() { - *size = Some(self.general_expression(lexer, &mut ctx)?); + *size = Some(self.expression(lexer, &mut ctx)?); match lexer.next() { (Token::Paren(')'), _) => break, (Token::Separator(','), _) if i != 2 => (), @@ -2934,7 +2926,7 @@ impl Parser { let (name, ty) = self.optionally_typed_ident(lexer, &mut ctx)?; lexer.expect(Token::Operation('='))?; - let init = self.general_expression(lexer, &mut ctx)?; + let init = self.expression(lexer, &mut ctx)?; lexer.expect(Token::Separator(';'))?; Some(ast::GlobalDeclKind::Const(ast::Const { @@ -2950,7 +2942,7 @@ impl Parser { let (name, ty) = self.optionally_typed_ident(lexer, &mut ctx)?; let init = if lexer.skip(Token::Operation('=')) { - Some(self.general_expression(lexer, &mut ctx)?) + Some(self.expression(lexer, &mut ctx)?) } else { None }; @@ -3009,7 +3001,7 @@ impl Parser { // parentheses are optional let paren = lexer.skip(Token::Paren('(')); - let condition = self.general_expression(lexer, &mut ctx)?; + let condition = self.expression(lexer, &mut ctx)?; if paren { lexer.expect(Token::Paren(')'))?; From ed6a032055655430110de259de02cf70fcac8eb3 Mon Sep 17 00:00:00 2001 From: teoxoy <28601907+teoxoy@users.noreply.github.com> Date: Wed, 27 Aug 2025 15:00:51 +0200 Subject: [PATCH 05/33] remove `next_scalar_generic` --- naga/src/front/wgsl/parse/lexer.rs | 16 ---------------- naga/src/front/wgsl/parse/mod.rs | 2 +- 2 files changed, 1 insertion(+), 17 deletions(-) diff --git a/naga/src/front/wgsl/parse/lexer.rs b/naga/src/front/wgsl/parse/lexer.rs index d0a8033987b..549a02bab5b 100644 --- a/naga/src/front/wgsl/parse/lexer.rs +++ b/naga/src/front/wgsl/parse/lexer.rs @@ -500,22 +500,6 @@ impl<'a> Lexer<'a> { } } - /// Parses a generic scalar type, for example ``. - pub(in crate::front::wgsl) fn next_scalar_generic(&mut self) -> Result<'a, Scalar> { - self.expect_generic_paren('<')?; - let (scalar, _span) = match self.next() { - (Token::Word(word), span) => { - conv::get_scalar_type(&self.enable_extensions, span, word)? - .map(|scalar| (scalar, span)) - .ok_or(Error::UnknownScalarType(span))? - } - (_, span) => return Err(Box::new(Error::UnknownScalarType(span))), - }; - - self.expect_generic_paren('>')?; - Ok(scalar) - } - /// Parses a generic scalar type, for example ``. /// /// Returns the span covering the inner type, excluding the brackets. diff --git a/naga/src/front/wgsl/parse/mod.rs b/naga/src/front/wgsl/parse/mod.rs index e5140895cfb..da4c660b7bd 100644 --- a/naga/src/front/wgsl/parse/mod.rs +++ b/naga/src/front/wgsl/parse/mod.rs @@ -1688,7 +1688,7 @@ impl Parser { ty_span: Span::UNDEFINED, }, "atomic" => { - let scalar = lexer.next_scalar_generic()?; + let (scalar, _) = lexer.next_scalar_generic_with_span()?; ast::Type::Atomic(scalar) } "ptr" => { From 75fbca1ed1bc9e4a2eca0a8bcbeb73c50f549da8 Mon Sep 17 00:00:00 2001 From: teoxoy <28601907+teoxoy@users.noreply.github.com> Date: Wed, 27 Aug 2025 15:20:03 +0200 Subject: [PATCH 06/33] remove `peek_ident_with_span` --- naga/src/front/wgsl/parse/lexer.rs | 32 +++++++++--------------------- naga/src/front/wgsl/parse/mod.rs | 4 ++-- 2 files changed, 11 insertions(+), 25 deletions(-) diff --git a/naga/src/front/wgsl/parse/lexer.rs b/naga/src/front/wgsl/parse/lexer.rs index 549a02bab5b..fcb69f5f3c9 100644 --- a/naga/src/front/wgsl/parse/lexer.rs +++ b/naga/src/front/wgsl/parse/lexer.rs @@ -460,29 +460,15 @@ impl<'a> Lexer<'a> { pub(in crate::front::wgsl) fn next_ident_with_span(&mut self) -> Result<'a, (&'a str, Span)> { match self.next() { - (Token::Word(word), span) => Self::word_as_ident_with_span(word, span), - other => Err(Box::new(Error::Unexpected( - other.1, - ExpectedToken::Identifier, - ))), - } - } - - pub(in crate::front::wgsl) fn peek_ident_with_span(&mut self) -> Result<'a, (&'a str, Span)> { - match self.peek() { - (Token::Word(word), span) => Self::word_as_ident_with_span(word, span), - other => Err(Box::new(Error::Unexpected( - other.1, - ExpectedToken::Identifier, - ))), - } - } - - fn word_as_ident_with_span(word: &'a str, span: Span) -> Result<'a, (&'a str, Span)> { - match word { - "_" => Err(Box::new(Error::InvalidIdentifierUnderscore(span))), - word if word.starts_with("__") => Err(Box::new(Error::ReservedIdentifierPrefix(span))), - word => Ok((word, span)), + (Token::Word("_"), span) => Err(Box::new(Error::InvalidIdentifierUnderscore(span))), + (Token::Word(word), span) => { + if word.starts_with("__") { + Err(Box::new(Error::ReservedIdentifierPrefix(span))) + } else { + Ok((word, span)) + } + } + (_, span) => Err(Box::new(Error::Unexpected(span, ExpectedToken::Identifier))), } } diff --git a/naga/src/front/wgsl/parse/mod.rs b/naga/src/front/wgsl/parse/mod.rs index da4c660b7bd..7729013f4ba 100644 --- a/naga/src/front/wgsl/parse/mod.rs +++ b/naga/src/front/wgsl/parse/mod.rs @@ -3055,8 +3055,8 @@ impl Parser { tu.doc_comments = lexer.accumulate_module_doc_comments(); // Parse directives. - while let Ok((ident, _directive_ident_span)) = lexer.peek_ident_with_span() { - if let Some(kind) = DirectiveKind::from_ident(ident) { + while let (Token::Word(word), _) = lexer.peek() { + if let Some(kind) = DirectiveKind::from_ident(word) { self.push_rule_span(Rule::Directive, &mut lexer); let _ = lexer.next_ident_with_span().unwrap(); match kind { From 47108f674ecf6ff670b52a3d1cdd6193d3efbaa9 Mon Sep 17 00:00:00 2001 From: teoxoy <28601907+teoxoy@users.noreply.github.com> Date: Wed, 27 Aug 2025 17:53:50 +0200 Subject: [PATCH 07/33] update `lhs_expression` to be in line with the spec --- naga/src/front/wgsl/error.rs | 2 ++ naga/src/front/wgsl/parse/mod.rs | 53 +++++++++++++++++--------------- naga/tests/naga/wgsl_errors.rs | 12 ++++---- 3 files changed, 37 insertions(+), 30 deletions(-) diff --git a/naga/src/front/wgsl/error.rs b/naga/src/front/wgsl/error.rs index 17dab5cb0ea..05d58ee9110 100644 --- a/naga/src/front/wgsl/error.rs +++ b/naga/src/front/wgsl/error.rs @@ -135,6 +135,7 @@ pub enum ExpectedToken<'a> { Identifier, AfterIdentListComma, AfterIdentListArg, + LhsExpression, /// Expected: constant, parenthesized expression, identifier PrimaryExpression, /// Expected: assignment, increment/decrement expression @@ -487,6 +488,7 @@ impl<'a> Error<'a> { Token::End => "end".to_string(), }, ExpectedToken::Identifier => "identifier".to_string(), + ExpectedToken::LhsExpression => "lhs_expression".to_string(), ExpectedToken::PrimaryExpression => "expression".to_string(), ExpectedToken::Assignment => "assignment or increment/decrement".to_string(), ExpectedToken::SwitchItem => concat!( diff --git a/naga/src/front/wgsl/parse/mod.rs b/naga/src/front/wgsl/parse/mod.rs index 7729013f4ba..5c45bfeb27b 100644 --- a/naga/src/front/wgsl/parse/mod.rs +++ b/naga/src/front/wgsl/parse/mod.rs @@ -1082,34 +1082,35 @@ impl Parser { self.track_recursion(|this| { this.push_rule_span(Rule::LhsExpr, lexer); let start = lexer.start_byte_offset(); - let expr = match lexer.peek() { + let expr = match lexer.next() { (Token::Operation('*'), _) => { - let _ = lexer.next(); let expr = this.lhs_expression(lexer, ctx)?; let expr = ast::Expression::Deref(expr); let span = this.peek_rule_span(lexer); ctx.expressions.append(expr, span) } (Token::Operation('&'), _) => { - let _ = lexer.next(); let expr = this.lhs_expression(lexer, ctx)?; let expr = ast::Expression::AddrOf(expr); let span = this.peek_rule_span(lexer); ctx.expressions.append(expr, span) } - (Token::Operation('('), _) => { - let _ = lexer.next(); - let primary_expr = this.lhs_expression(lexer, ctx)?; + (Token::Paren('('), _) => { + let expr = this.lhs_expression(lexer, ctx)?; lexer.expect(Token::Paren(')'))?; - this.postfix(start, lexer, ctx, primary_expr)? + this.postfix(start, lexer, ctx, expr)? } (Token::Word(word), span) => { - let _ = lexer.next(); let ident = this.ident_expr(word, span, ctx); - let primary_expr = ctx.expressions.append(ast::Expression::Ident(ident), span); - this.postfix(start, lexer, ctx, primary_expr)? + let ident = ctx.expressions.append(ast::Expression::Ident(ident), span); + this.postfix(start, lexer, ctx, ident)? + } + (_, span) => { + return Err(Box::new(Error::Unexpected( + span, + ExpectedToken::LhsExpression, + ))); } - _ => this.singular_expression(lexer, ctx)?, }; this.pop_rule_span(lexer); @@ -1983,17 +1984,29 @@ impl Parser { block: &mut ast::Block<'a>, ) -> Result<'a, ()> { let span_start = lexer.start_byte_offset(); + match lexer.peek().0 { + Token::Word("_") => { + let _ = lexer.next(); + lexer.expect(Token::Operation('='))?; + let expr = self.expression(lexer, ctx)?; + let span = lexer.span_from(span_start); + block.stmts.push(ast::Statement { + kind: ast::StatementKind::Phony(expr), + span, + }); + return Ok(()); + } + _ => {} + } let target = self.lhs_expression(lexer, ctx)?; - use crate::BinaryOperator as Bo; - - let op = lexer.next(); - let (op, value) = match op { + let (op, value) = match lexer.next() { (Token::Operation('='), _) => { let value = self.expression(lexer, ctx)?; (None, value) } (Token::AssignmentOperation(c), _) => { + use crate::BinaryOperator as Bo; let op = match c { '<' => Bo::ShiftLeft, '>' => Bo::ShiftRight, @@ -2026,7 +2039,7 @@ impl Parser { }); return Ok(()); } - _ => return Err(Box::new(Error::Unexpected(op.1, ExpectedToken::Assignment))), + (_, span) => return Err(Box::new(Error::Unexpected(span, ExpectedToken::Assignment))), }; let span = lexer.span_from(span_start); @@ -2123,14 +2136,6 @@ impl Parser { } (Token::Word(word), _) => { let kind = match word { - "_" => { - let _ = lexer.next(); - lexer.expect(Token::Operation('='))?; - let expr = this.expression(lexer, ctx)?; - lexer.expect(Token::Separator(';'))?; - - ast::StatementKind::Phony(expr) - } "let" => { let _ = lexer.next(); let (name, given_ty) = this.optionally_typed_ident(lexer, ctx)?; diff --git a/naga/tests/naga/wgsl_errors.rs b/naga/tests/naga/wgsl_errors.rs index 675f52b94e1..09048a673a3 100644 --- a/naga/tests/naga/wgsl_errors.rs +++ b/naga/tests/naga/wgsl_errors.rs @@ -2569,11 +2569,11 @@ fn binary_statement() { 3 + 5; } ", - r###"error: expected assignment or increment/decrement, found "+" - ┌─ wgsl:3:15 + r###"error: expected lhs_expression, found "3" + ┌─ wgsl:3:13 │ 3 │ 3 + 5; - │ ^ expected assignment or increment/decrement + │ ^ expected lhs_expression "###, ); @@ -2587,11 +2587,11 @@ fn assign_to_expr() { 3 + 5 = 10; } ", - r###"error: expected assignment or increment/decrement, found "+" - ┌─ wgsl:3:15 + r###"error: expected lhs_expression, found "3" + ┌─ wgsl:3:13 │ 3 │ 3 + 5 = 10; - │ ^ expected assignment or increment/decrement + │ ^ expected lhs_expression "###, ); From fe63d0753e4e715c89b31dfb860d794b8087f70c Mon Sep 17 00:00:00 2001 From: teoxoy <28601907+teoxoy@users.noreply.github.com> Date: Wed, 27 Aug 2025 18:26:20 +0200 Subject: [PATCH 08/33] rename `postfix` to `component_or_swizzle_specifier` --- naga/src/front/wgsl/parse/mod.rs | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/naga/src/front/wgsl/parse/mod.rs b/naga/src/front/wgsl/parse/mod.rs index 5c45bfeb27b..e78617f11ba 100644 --- a/naga/src/front/wgsl/parse/mod.rs +++ b/naga/src/front/wgsl/parse/mod.rs @@ -963,7 +963,7 @@ impl Parser { Ok(expr) } - fn postfix<'a>( + fn component_or_swizzle_specifier<'a>( &mut self, span_start: usize, lexer: &mut Lexer<'a>, @@ -1098,12 +1098,12 @@ impl Parser { (Token::Paren('('), _) => { let expr = this.lhs_expression(lexer, ctx)?; lexer.expect(Token::Paren(')'))?; - this.postfix(start, lexer, ctx, expr)? + this.component_or_swizzle_specifier(start, lexer, ctx, expr)? } (Token::Word(word), span) => { let ident = this.ident_expr(word, span, ctx); let ident = ctx.expressions.append(ast::Expression::Ident(ident), span); - this.postfix(start, lexer, ctx, ident)? + this.component_or_swizzle_specifier(start, lexer, ctx, ident)? } (_, span) => { return Err(Box::new(Error::Unexpected( @@ -1127,7 +1127,7 @@ impl Parser { let start = lexer.start_byte_offset(); self.push_rule_span(Rule::SingularExpr, lexer); let primary_expr = self.primary_expression(lexer, ctx)?; - let singular_expr = self.postfix(start, lexer, ctx, primary_expr)?; + let singular_expr = self.component_or_swizzle_specifier(start, lexer, ctx, primary_expr)?; self.pop_rule_span(lexer); Ok(singular_expr) From 086c0e4c37af907d9754bbab4f2e30ef5989917b Mon Sep 17 00:00:00 2001 From: teoxoy <28601907+teoxoy@users.noreply.github.com> Date: Wed, 27 Aug 2025 19:16:13 +0200 Subject: [PATCH 09/33] remove `lexer.clone()` --- naga/src/front/wgsl/parse/lexer.rs | 2 +- naga/src/front/wgsl/parse/mod.rs | 47 +++++++++++++----------------- 2 files changed, 22 insertions(+), 27 deletions(-) diff --git a/naga/src/front/wgsl/parse/lexer.rs b/naga/src/front/wgsl/parse/lexer.rs index fcb69f5f3c9..a7671596839 100644 --- a/naga/src/front/wgsl/parse/lexer.rs +++ b/naga/src/front/wgsl/parse/lexer.rs @@ -7,7 +7,7 @@ use crate::Span; use alloc::{boxed::Box, vec::Vec}; -type TokenSpan<'a> = (Token<'a>, Span); +pub type TokenSpan<'a> = (Token<'a>, Span); #[derive(Copy, Clone, Debug, PartialEq)] pub enum Token<'a> { diff --git a/naga/src/front/wgsl/parse/mod.rs b/naga/src/front/wgsl/parse/mod.rs index e78617f11ba..bc4f583e7a9 100644 --- a/naga/src/front/wgsl/parse/mod.rs +++ b/naga/src/front/wgsl/parse/mod.rs @@ -9,7 +9,7 @@ use crate::front::wgsl::error::{DiagnosticAttributeNotSupportedPosition, Error, use crate::front::wgsl::parse::directive::enable_extension::{EnableExtension, EnableExtensions}; use crate::front::wgsl::parse::directive::language_extension::LanguageExtension; use crate::front::wgsl::parse::directive::DirectiveKind; -use crate::front::wgsl::parse::lexer::{Lexer, Token}; +use crate::front::wgsl::parse::lexer::{Lexer, Token, TokenSpan}; use crate::front::wgsl::parse::number::Number; use crate::front::wgsl::{Result, Scalar}; use crate::front::SymbolTable; @@ -1078,25 +1078,27 @@ impl Parser { &mut self, lexer: &mut Lexer<'a>, ctx: &mut ExpressionContext<'a, '_, '_>, + token: Option>, ) -> Result<'a, Handle>> { self.track_recursion(|this| { this.push_rule_span(Rule::LhsExpr, lexer); - let start = lexer.start_byte_offset(); - let expr = match lexer.next() { + let token = token.unwrap_or_else(|| lexer.next()); + let start = token.1.to_range().unwrap().start; + let expr = match token { (Token::Operation('*'), _) => { - let expr = this.lhs_expression(lexer, ctx)?; + let expr = this.lhs_expression(lexer, ctx, None)?; let expr = ast::Expression::Deref(expr); let span = this.peek_rule_span(lexer); ctx.expressions.append(expr, span) } (Token::Operation('&'), _) => { - let expr = this.lhs_expression(lexer, ctx)?; + let expr = this.lhs_expression(lexer, ctx, None)?; let expr = ast::Expression::AddrOf(expr); let span = this.peek_rule_span(lexer); ctx.expressions.append(expr, span) } (Token::Paren('('), _) => { - let expr = this.lhs_expression(lexer, ctx)?; + let expr = this.lhs_expression(lexer, ctx, None)?; lexer.expect(Token::Paren(')'))?; this.component_or_swizzle_specifier(start, lexer, ctx, expr)? } @@ -1982,11 +1984,11 @@ impl Parser { lexer: &mut Lexer<'a>, ctx: &mut ExpressionContext<'a, '_, '_>, block: &mut ast::Block<'a>, + token: TokenSpan<'a>, ) -> Result<'a, ()> { - let span_start = lexer.start_byte_offset(); - match lexer.peek().0 { + let span_start = token.1.to_range().unwrap().start; + match token.0 { Token::Word("_") => { - let _ = lexer.next(); lexer.expect(Token::Operation('='))?; let expr = self.expression(lexer, ctx)?; let span = lexer.span_from(span_start); @@ -1998,7 +2000,7 @@ impl Parser { } _ => {} } - let target = self.lhs_expression(lexer, ctx)?; + let target = self.lhs_expression(lexer, ctx, Some(token))?; let (op, value) = match lexer.next() { (Token::Operation('='), _) => { @@ -2093,22 +2095,14 @@ impl Parser { block: &mut ast::Block<'a>, ) -> Result<'a, ()> { let span_start = lexer.start_byte_offset(); - match lexer.peek() { - (Token::Word(name), span) => { - // A little hack for 2 token lookahead. - let cloned = lexer.clone(); - let _ = lexer.next(); - match lexer.peek() { - (Token::Paren('('), _) => { - self.function_statement(lexer, name, span, span_start, context, block) - } - _ => { - *lexer = cloned; - self.variable_updating_statement(lexer, context, block) - } + match lexer.next() { + token @ (Token::Word(name), span) => match lexer.peek() { + (Token::Paren('('), _) => { + self.function_statement(lexer, name, span, span_start, context, block) } - } - _ => self.variable_updating_statement(lexer, context, block), + _ => self.variable_updating_statement(lexer, context, block, token), + }, + token => self.variable_updating_statement(lexer, context, block, token), } } @@ -2474,7 +2468,8 @@ impl Parser { block.stmts.push(ast::Statement { kind, span }); } _ => { - this.variable_updating_statement(lexer, ctx, block)?; + let token = lexer.next(); + this.variable_updating_statement(lexer, ctx, block, token)?; lexer.expect(Token::Separator(';'))?; this.pop_rule_span(lexer); } From 16c2777baa290f605d523c8d38f0a82c7bb32731 Mon Sep 17 00:00:00 2001 From: teoxoy <28601907+teoxoy@users.noreply.github.com> Date: Wed, 27 Aug 2025 19:18:13 +0200 Subject: [PATCH 10/33] rename `function_statement` to `func_call_statement` --- naga/src/front/wgsl/parse/mod.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/naga/src/front/wgsl/parse/mod.rs b/naga/src/front/wgsl/parse/mod.rs index bc4f583e7a9..6af649c499f 100644 --- a/naga/src/front/wgsl/parse/mod.rs +++ b/naga/src/front/wgsl/parse/mod.rs @@ -2054,7 +2054,7 @@ impl Parser { /// Parse a function call statement. /// Expects `ident` to be consumed (not in the lexer). - fn function_statement<'a>( + fn func_call_statement<'a>( &mut self, lexer: &mut Lexer<'a>, ident: &'a str, @@ -2098,7 +2098,7 @@ impl Parser { match lexer.next() { token @ (Token::Word(name), span) => match lexer.peek() { (Token::Paren('('), _) => { - self.function_statement(lexer, name, span, span_start, context, block) + self.func_call_statement(lexer, name, span, span_start, context, block) } _ => self.variable_updating_statement(lexer, context, block, token), }, From 527cf31a12ed694e2a919f8c323746e6b7fa5fcb Mon Sep 17 00:00:00 2001 From: teoxoy <28601907+teoxoy@users.noreply.github.com> Date: Wed, 27 Aug 2025 19:20:10 +0200 Subject: [PATCH 11/33] rename `function_call_or_assignment_statement` to `func_call_or_variable_updating_statement` --- naga/src/front/wgsl/parse/mod.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/naga/src/front/wgsl/parse/mod.rs b/naga/src/front/wgsl/parse/mod.rs index 6af649c499f..f09a509eff3 100644 --- a/naga/src/front/wgsl/parse/mod.rs +++ b/naga/src/front/wgsl/parse/mod.rs @@ -2088,7 +2088,7 @@ impl Parser { Ok(()) } - fn function_call_or_assignment_statement<'a>( + fn func_call_or_variable_updating_statement<'a>( &mut self, lexer: &mut Lexer<'a>, context: &mut ExpressionContext<'a, '_, '_>, @@ -2396,7 +2396,7 @@ impl Parser { let mut continuing = ast::Block::default(); if !lexer.skip(Token::Paren(')')) { - this.function_call_or_assignment_statement( + this.func_call_or_variable_updating_statement( lexer, ctx, &mut continuing, @@ -2457,7 +2457,7 @@ impl Parser { } // assignment or a function call _ => { - this.function_call_or_assignment_statement(lexer, ctx, block)?; + this.func_call_or_variable_updating_statement(lexer, ctx, block)?; lexer.expect(Token::Separator(';'))?; this.pop_rule_span(lexer); return Ok(()); From 68ca96d886361802f4b7428cfd9181843e14ee77 Mon Sep 17 00:00:00 2001 From: teoxoy <28601907+teoxoy@users.noreply.github.com> Date: Wed, 27 Aug 2025 19:40:21 +0200 Subject: [PATCH 12/33] use `lexer.next()` instead of `peek()` for most statements --- naga/src/front/wgsl/parse/mod.rs | 51 ++++++++++++++------------------ 1 file changed, 22 insertions(+), 29 deletions(-) diff --git a/naga/src/front/wgsl/parse/mod.rs b/naga/src/front/wgsl/parse/mod.rs index f09a509eff3..a9a448016ff 100644 --- a/naga/src/front/wgsl/parse/mod.rs +++ b/naga/src/front/wgsl/parse/mod.rs @@ -2093,15 +2093,13 @@ impl Parser { lexer: &mut Lexer<'a>, context: &mut ExpressionContext<'a, '_, '_>, block: &mut ast::Block<'a>, + token: TokenSpan<'a>, ) -> Result<'a, ()> { - let span_start = lexer.start_byte_offset(); - match lexer.next() { - token @ (Token::Word(name), span) => match lexer.peek() { - (Token::Paren('('), _) => { - self.func_call_statement(lexer, name, span, span_start, context, block) - } - _ => self.variable_updating_statement(lexer, context, block, token), - }, + let span_start = token.1.to_range().unwrap().start; + match token { + (Token::Word(name), span) if matches!(lexer.peek(), (Token::Paren('('), _)) => { + self.func_call_statement(lexer, name, span, span_start, context, block) + } token => self.variable_updating_statement(lexer, context, block, token), } } @@ -2115,11 +2113,8 @@ impl Parser { ) -> Result<'a, ()> { self.track_recursion(|this| { this.push_rule_span(Rule::Statement, lexer); + match lexer.peek() { - (Token::Separator(';'), _) => { - let _ = lexer.next(); - this.pop_rule_span(lexer); - } (token, _) if is_start_of_compound_statement(token) => { let (inner, span) = this.block(lexer, ctx, brace_nesting_level)?; block.stmts.push(ast::Statement { @@ -2127,11 +2122,18 @@ impl Parser { span, }); this.pop_rule_span(lexer); + return Ok(()); + } + _ => {} + } + + match lexer.next() { + (Token::Separator(';'), _) => { + this.pop_rule_span(lexer); } - (Token::Word(word), _) => { + token @ (Token::Word(word), span) => { let kind = match word { "let" => { - let _ = lexer.next(); let (name, given_ty) = this.optionally_typed_ident(lexer, ctx)?; lexer.expect(Token::Operation('='))?; @@ -2147,7 +2149,6 @@ impl Parser { })) } "const" => { - let _ = lexer.next(); let (name, given_ty) = this.optionally_typed_ident(lexer, ctx)?; lexer.expect(Token::Operation('='))?; @@ -2163,7 +2164,6 @@ impl Parser { })) } "var" => { - let _ = lexer.next(); let (name, ty) = this.optionally_typed_ident(lexer, ctx)?; let init = if lexer.skip(Token::Operation('=')) { @@ -2184,7 +2184,6 @@ impl Parser { })) } "return" => { - let _ = lexer.next(); let value = if lexer.peek().0 != Token::Separator(';') { let handle = this.expression(lexer, ctx)?; Some(handle) @@ -2195,7 +2194,6 @@ impl Parser { ast::StatementKind::Return { value } } "if" => { - let _ = lexer.next(); let condition = this.expression(lexer, ctx)?; let accept = this.block(lexer, ctx, brace_nesting_level)?.0; @@ -2244,7 +2242,6 @@ impl Parser { } } "switch" => { - let _ = lexer.next(); let selector = this.expression(lexer, ctx)?; let brace_span = lexer.expect_span(Token::Paren('{'))?; let brace_nesting_level = @@ -2309,7 +2306,6 @@ impl Parser { } "loop" => this.r#loop(lexer, ctx, brace_nesting_level)?, "while" => { - let _ = lexer.next(); let mut body = ast::Block::default(); let (condition, span) = @@ -2342,7 +2338,6 @@ impl Parser { } } "for" => { - let _ = lexer.next(); lexer.expect(Token::Paren('('))?; ctx.local_table.push_scope(); @@ -2396,10 +2391,12 @@ impl Parser { let mut continuing = ast::Block::default(); if !lexer.skip(Token::Paren(')')) { + let token = lexer.next(); this.func_call_or_variable_updating_statement( lexer, ctx, &mut continuing, + token, )?; lexer.expect(Token::Paren(')'))?; } @@ -2419,7 +2416,6 @@ impl Parser { } } "break" => { - let (_, span) = lexer.next(); // Check if the next token is an `if`, this indicates // that the user tried to type out a `break if` which // is illegal in this position. @@ -2432,18 +2428,15 @@ impl Parser { ast::StatementKind::Break } "continue" => { - let _ = lexer.next(); lexer.expect(Token::Separator(';'))?; ast::StatementKind::Continue } "discard" => { - let _ = lexer.next(); lexer.expect(Token::Separator(';'))?; ast::StatementKind::Kill } // https://www.w3.org/TR/WGSL/#const-assert-statement "const_assert" => { - let _ = lexer.next(); // parentheses are optional let paren = lexer.skip(Token::Paren('(')); @@ -2457,7 +2450,9 @@ impl Parser { } // assignment or a function call _ => { - this.func_call_or_variable_updating_statement(lexer, ctx, block)?; + this.func_call_or_variable_updating_statement( + lexer, ctx, block, token, + )?; lexer.expect(Token::Separator(';'))?; this.pop_rule_span(lexer); return Ok(()); @@ -2467,8 +2462,7 @@ impl Parser { let span = this.pop_rule_span(lexer); block.stmts.push(ast::Statement { kind, span }); } - _ => { - let token = lexer.next(); + token => { this.variable_updating_statement(lexer, ctx, block, token)?; lexer.expect(Token::Separator(';'))?; this.pop_rule_span(lexer); @@ -2484,7 +2478,6 @@ impl Parser { ctx: &mut ExpressionContext<'a, '_, '_>, brace_nesting_level: u8, ) -> Result<'a, ast::StatementKind<'a>> { - let _ = lexer.next(); let mut body = ast::Block::default(); let mut continuing = ast::Block::default(); let mut break_if = None; From 97c58d098394804adc90bfd39d73c2143dcf6542 Mon Sep 17 00:00:00 2001 From: teoxoy <28601907+teoxoy@users.noreply.github.com> Date: Wed, 27 Aug 2025 19:46:56 +0200 Subject: [PATCH 13/33] flatten match --- naga/src/front/wgsl/parse/mod.rs | 590 +++++++++++++++---------------- 1 file changed, 287 insertions(+), 303 deletions(-) diff --git a/naga/src/front/wgsl/parse/mod.rs b/naga/src/front/wgsl/parse/mod.rs index a9a448016ff..76e5bf54545 100644 --- a/naga/src/front/wgsl/parse/mod.rs +++ b/naga/src/front/wgsl/parse/mod.rs @@ -2127,347 +2127,331 @@ impl Parser { _ => {} } - match lexer.next() { + let kind = match lexer.next() { (Token::Separator(';'), _) => { this.pop_rule_span(lexer); + return Ok(()); } - token @ (Token::Word(word), span) => { - let kind = match word { - "let" => { - let (name, given_ty) = this.optionally_typed_ident(lexer, ctx)?; + (Token::Word("let"), _) => { + let (name, given_ty) = this.optionally_typed_ident(lexer, ctx)?; - lexer.expect(Token::Operation('='))?; - let expr_id = this.expression(lexer, ctx)?; - lexer.expect(Token::Separator(';'))?; + lexer.expect(Token::Operation('='))?; + let expr_id = this.expression(lexer, ctx)?; + lexer.expect(Token::Separator(';'))?; - let handle = ctx.declare_local(name)?; - ast::StatementKind::LocalDecl(ast::LocalDecl::Let(ast::Let { - name, - ty: given_ty, - init: expr_id, - handle, - })) - } - "const" => { - let (name, given_ty) = this.optionally_typed_ident(lexer, ctx)?; + let handle = ctx.declare_local(name)?; + ast::StatementKind::LocalDecl(ast::LocalDecl::Let(ast::Let { + name, + ty: given_ty, + init: expr_id, + handle, + })) + } + (Token::Word("const"), _) => { + let (name, given_ty) = this.optionally_typed_ident(lexer, ctx)?; - lexer.expect(Token::Operation('='))?; - let expr_id = this.expression(lexer, ctx)?; - lexer.expect(Token::Separator(';'))?; + lexer.expect(Token::Operation('='))?; + let expr_id = this.expression(lexer, ctx)?; + lexer.expect(Token::Separator(';'))?; - let handle = ctx.declare_local(name)?; - ast::StatementKind::LocalDecl(ast::LocalDecl::Const(ast::LocalConst { - name, - ty: given_ty, - init: expr_id, - handle, - })) - } - "var" => { - let (name, ty) = this.optionally_typed_ident(lexer, ctx)?; - - let init = if lexer.skip(Token::Operation('=')) { - let init = this.expression(lexer, ctx)?; - Some(init) - } else { - None - }; + let handle = ctx.declare_local(name)?; + ast::StatementKind::LocalDecl(ast::LocalDecl::Const(ast::LocalConst { + name, + ty: given_ty, + init: expr_id, + handle, + })) + } + (Token::Word("var"), _) => { + let (name, ty) = this.optionally_typed_ident(lexer, ctx)?; - lexer.expect(Token::Separator(';'))?; + let init = if lexer.skip(Token::Operation('=')) { + let init = this.expression(lexer, ctx)?; + Some(init) + } else { + None + }; + + lexer.expect(Token::Separator(';'))?; - let handle = ctx.declare_local(name)?; - ast::StatementKind::LocalDecl(ast::LocalDecl::Var(ast::LocalVariable { - name, - ty, - init, - handle, - })) + let handle = ctx.declare_local(name)?; + ast::StatementKind::LocalDecl(ast::LocalDecl::Var(ast::LocalVariable { + name, + ty, + init, + handle, + })) + } + (Token::Word("return"), _) => { + let value = if lexer.peek().0 != Token::Separator(';') { + let handle = this.expression(lexer, ctx)?; + Some(handle) + } else { + None + }; + lexer.expect(Token::Separator(';'))?; + ast::StatementKind::Return { value } + } + (Token::Word("if"), _) => { + let condition = this.expression(lexer, ctx)?; + + let accept = this.block(lexer, ctx, brace_nesting_level)?.0; + + let mut elsif_stack = Vec::new(); + let mut elseif_span_start = lexer.start_byte_offset(); + let mut reject = loop { + if !lexer.skip(Token::Word("else")) { + break ast::Block::default(); } - "return" => { - let value = if lexer.peek().0 != Token::Separator(';') { - let handle = this.expression(lexer, ctx)?; - Some(handle) - } else { - None - }; - lexer.expect(Token::Separator(';'))?; - ast::StatementKind::Return { value } + + if !lexer.skip(Token::Word("if")) { + // ... else { ... } + break this.block(lexer, ctx, brace_nesting_level)?.0; } - "if" => { - let condition = this.expression(lexer, ctx)?; - let accept = this.block(lexer, ctx, brace_nesting_level)?.0; + // ... else if (...) { ... } + let other_condition = this.expression(lexer, ctx)?; + let other_block = this.block(lexer, ctx, brace_nesting_level)?; + elsif_stack.push((elseif_span_start, other_condition, other_block)); + elseif_span_start = lexer.start_byte_offset(); + }; - let mut elsif_stack = Vec::new(); - let mut elseif_span_start = lexer.start_byte_offset(); - let mut reject = loop { - if !lexer.skip(Token::Word("else")) { - break ast::Block::default(); - } + // reverse-fold the else-if blocks + //Note: we may consider uplifting this to the IR + for (other_span_start, other_cond, other_block) in elsif_stack.into_iter().rev() + { + let sub_stmt = ast::StatementKind::If { + condition: other_cond, + accept: other_block.0, + reject, + }; + reject = ast::Block::default(); + let span = lexer.span_from(other_span_start); + reject.stmts.push(ast::Statement { + kind: sub_stmt, + span, + }) + } - if !lexer.skip(Token::Word("if")) { - // ... else { ... } - break this.block(lexer, ctx, brace_nesting_level)?.0; - } + ast::StatementKind::If { + condition, + accept, + reject, + } + } + (Token::Word("switch"), _) => { + let selector = this.expression(lexer, ctx)?; + let brace_span = lexer.expect_span(Token::Paren('{'))?; + let brace_nesting_level = + Self::increase_brace_nesting(brace_nesting_level, brace_span)?; + let mut cases = Vec::new(); + + loop { + // cases + default + match lexer.next() { + (Token::Word("case"), _) => { + // parse a list of values + let value = loop { + let value = this.switch_value(lexer, ctx)?; + if lexer.skip(Token::Separator(',')) { + // list of values ends with ':' or a compound statement + let next_token = lexer.peek().0; + if next_token == Token::Separator(':') + || is_start_of_compound_statement(next_token) + { + break value; + } + } else { + break value; + } + cases.push(ast::SwitchCase { + value, + body: ast::Block::default(), + fall_through: true, + }); + }; - // ... else if (...) { ... } - let other_condition = this.expression(lexer, ctx)?; - let other_block = this.block(lexer, ctx, brace_nesting_level)?; - elsif_stack.push((elseif_span_start, other_condition, other_block)); - elseif_span_start = lexer.start_byte_offset(); - }; + lexer.skip(Token::Separator(':')); - // reverse-fold the else-if blocks - //Note: we may consider uplifting this to the IR - for (other_span_start, other_cond, other_block) in - elsif_stack.into_iter().rev() - { - let sub_stmt = ast::StatementKind::If { - condition: other_cond, - accept: other_block.0, - reject, - }; - reject = ast::Block::default(); - let span = lexer.span_from(other_span_start); - reject.stmts.push(ast::Statement { - kind: sub_stmt, - span, - }) - } + let body = this.block(lexer, ctx, brace_nesting_level)?.0; - ast::StatementKind::If { - condition, - accept, - reject, + cases.push(ast::SwitchCase { + value, + body, + fall_through: false, + }); } - } - "switch" => { - let selector = this.expression(lexer, ctx)?; - let brace_span = lexer.expect_span(Token::Paren('{'))?; - let brace_nesting_level = - Self::increase_brace_nesting(brace_nesting_level, brace_span)?; - let mut cases = Vec::new(); - - loop { - // cases + default - match lexer.next() { - (Token::Word("case"), _) => { - // parse a list of values - let value = loop { - let value = this.switch_value(lexer, ctx)?; - if lexer.skip(Token::Separator(',')) { - // list of values ends with ':' or a compound statement - let next_token = lexer.peek().0; - if next_token == Token::Separator(':') - || is_start_of_compound_statement(next_token) - { - break value; - } - } else { - break value; - } - cases.push(ast::SwitchCase { - value, - body: ast::Block::default(), - fall_through: true, - }); - }; - - lexer.skip(Token::Separator(':')); - - let body = this.block(lexer, ctx, brace_nesting_level)?.0; - - cases.push(ast::SwitchCase { - value, - body, - fall_through: false, - }); - } - (Token::Word("default"), _) => { - lexer.skip(Token::Separator(':')); - let body = this.block(lexer, ctx, brace_nesting_level)?.0; - cases.push(ast::SwitchCase { - value: ast::SwitchValue::Default, - body, - fall_through: false, - }); - } - (Token::Paren('}'), _) => break, - (_, span) => { - return Err(Box::new(Error::Unexpected( - span, - ExpectedToken::SwitchItem, - ))) - } - } + (Token::Word("default"), _) => { + lexer.skip(Token::Separator(':')); + let body = this.block(lexer, ctx, brace_nesting_level)?.0; + cases.push(ast::SwitchCase { + value: ast::SwitchValue::Default, + body, + fall_through: false, + }); } - - ast::StatementKind::Switch { selector, cases } - } - "loop" => this.r#loop(lexer, ctx, brace_nesting_level)?, - "while" => { - let mut body = ast::Block::default(); - - let (condition, span) = - lexer.capture_span(|lexer| this.expression(lexer, ctx))?; - let mut reject = ast::Block::default(); - reject.stmts.push(ast::Statement { - kind: ast::StatementKind::Break, - span, - }); - - body.stmts.push(ast::Statement { - kind: ast::StatementKind::If { - condition, - accept: ast::Block::default(), - reject, - }, - span, - }); - - let (block, span) = this.block(lexer, ctx, brace_nesting_level)?; - body.stmts.push(ast::Statement { - kind: ast::StatementKind::Block(block), - span, - }); - - ast::StatementKind::Loop { - body, - continuing: ast::Block::default(), - break_if: None, + (Token::Paren('}'), _) => break, + (_, span) => { + return Err(Box::new(Error::Unexpected( + span, + ExpectedToken::SwitchItem, + ))) } } - "for" => { - lexer.expect(Token::Paren('('))?; - - ctx.local_table.push_scope(); - - if !lexer.skip(Token::Separator(';')) { - let num_statements = block.stmts.len(); - let (_, span) = { - let ctx = &mut *ctx; - let block = &mut *block; - lexer.capture_span(|lexer| { - this.statement(lexer, ctx, block, brace_nesting_level) - })? - }; - - if block.stmts.len() != num_statements { - match block.stmts.last().unwrap().kind { - ast::StatementKind::Call { .. } - | ast::StatementKind::Assign { .. } - | ast::StatementKind::LocalDecl(_) => {} - _ => { - return Err(Box::new(Error::InvalidForInitializer( - span, - ))) - } - } - } - }; + } - let mut body = ast::Block::default(); - if !lexer.skip(Token::Separator(';')) { - let (condition, span) = - lexer.capture_span(|lexer| -> Result<'_, _> { - let condition = this.expression(lexer, ctx)?; - lexer.expect(Token::Separator(';'))?; - Ok(condition) - })?; - let mut reject = ast::Block::default(); - reject.stmts.push(ast::Statement { - kind: ast::StatementKind::Break, - span, - }); - body.stmts.push(ast::Statement { - kind: ast::StatementKind::If { - condition, - accept: ast::Block::default(), - reject, - }, - span, - }); - }; + ast::StatementKind::Switch { selector, cases } + } + (Token::Word("loop"), _) => this.r#loop(lexer, ctx, brace_nesting_level)?, + (Token::Word("while"), _) => { + let mut body = ast::Block::default(); + + let (condition, span) = + lexer.capture_span(|lexer| this.expression(lexer, ctx))?; + let mut reject = ast::Block::default(); + reject.stmts.push(ast::Statement { + kind: ast::StatementKind::Break, + span, + }); - let mut continuing = ast::Block::default(); - if !lexer.skip(Token::Paren(')')) { - let token = lexer.next(); - this.func_call_or_variable_updating_statement( - lexer, - ctx, - &mut continuing, - token, - )?; - lexer.expect(Token::Paren(')'))?; - } + body.stmts.push(ast::Statement { + kind: ast::StatementKind::If { + condition, + accept: ast::Block::default(), + reject, + }, + span, + }); - let (block, span) = this.block(lexer, ctx, brace_nesting_level)?; - body.stmts.push(ast::Statement { - kind: ast::StatementKind::Block(block), - span, - }); + let (block, span) = this.block(lexer, ctx, brace_nesting_level)?; + body.stmts.push(ast::Statement { + kind: ast::StatementKind::Block(block), + span, + }); - ctx.local_table.pop_scope(); + ast::StatementKind::Loop { + body, + continuing: ast::Block::default(), + break_if: None, + } + } + (Token::Word("for"), _) => { + lexer.expect(Token::Paren('('))?; - ast::StatementKind::Loop { - body, - continuing, - break_if: None, + ctx.local_table.push_scope(); + + if !lexer.skip(Token::Separator(';')) { + let num_statements = block.stmts.len(); + let (_, span) = { + let ctx = &mut *ctx; + let block = &mut *block; + lexer.capture_span(|lexer| { + this.statement(lexer, ctx, block, brace_nesting_level) + })? + }; + + if block.stmts.len() != num_statements { + match block.stmts.last().unwrap().kind { + ast::StatementKind::Call { .. } + | ast::StatementKind::Assign { .. } + | ast::StatementKind::LocalDecl(_) => {} + _ => return Err(Box::new(Error::InvalidForInitializer(span))), } } - "break" => { - // Check if the next token is an `if`, this indicates - // that the user tried to type out a `break if` which - // is illegal in this position. - let (peeked_token, peeked_span) = lexer.peek(); - if let Token::Word("if") = peeked_token { - let span = span.until(&peeked_span); - return Err(Box::new(Error::InvalidBreakIf(span))); - } - lexer.expect(Token::Separator(';'))?; - ast::StatementKind::Break - } - "continue" => { - lexer.expect(Token::Separator(';'))?; - ast::StatementKind::Continue - } - "discard" => { - lexer.expect(Token::Separator(';'))?; - ast::StatementKind::Kill - } - // https://www.w3.org/TR/WGSL/#const-assert-statement - "const_assert" => { - // parentheses are optional - let paren = lexer.skip(Token::Paren('(')); + }; + let mut body = ast::Block::default(); + if !lexer.skip(Token::Separator(';')) { + let (condition, span) = lexer.capture_span(|lexer| -> Result<'_, _> { let condition = this.expression(lexer, ctx)?; - - if paren { - lexer.expect(Token::Paren(')'))?; - } - lexer.expect(Token::Separator(';'))?; - ast::StatementKind::ConstAssert(condition) - } - // assignment or a function call - _ => { - this.func_call_or_variable_updating_statement( - lexer, ctx, block, token, - )?; lexer.expect(Token::Separator(';'))?; - this.pop_rule_span(lexer); - return Ok(()); - } + Ok(condition) + })?; + let mut reject = ast::Block::default(); + reject.stmts.push(ast::Statement { + kind: ast::StatementKind::Break, + span, + }); + body.stmts.push(ast::Statement { + kind: ast::StatementKind::If { + condition, + accept: ast::Block::default(), + reject, + }, + span, + }); }; - let span = this.pop_rule_span(lexer); - block.stmts.push(ast::Statement { kind, span }); + let mut continuing = ast::Block::default(); + if !lexer.skip(Token::Paren(')')) { + let token = lexer.next(); + this.func_call_or_variable_updating_statement( + lexer, + ctx, + &mut continuing, + token, + )?; + lexer.expect(Token::Paren(')'))?; + } + + let (block, span) = this.block(lexer, ctx, brace_nesting_level)?; + body.stmts.push(ast::Statement { + kind: ast::StatementKind::Block(block), + span, + }); + + ctx.local_table.pop_scope(); + + ast::StatementKind::Loop { + body, + continuing, + break_if: None, + } + } + (Token::Word("break"), span) => { + // Check if the next token is an `if`, this indicates + // that the user tried to type out a `break if` which + // is illegal in this position. + let (peeked_token, peeked_span) = lexer.peek(); + if let Token::Word("if") = peeked_token { + let span = span.until(&peeked_span); + return Err(Box::new(Error::InvalidBreakIf(span))); + } + lexer.expect(Token::Separator(';'))?; + ast::StatementKind::Break + } + (Token::Word("continue"), _) => { + lexer.expect(Token::Separator(';'))?; + ast::StatementKind::Continue + } + (Token::Word("discard"), _) => { + lexer.expect(Token::Separator(';'))?; + ast::StatementKind::Kill + } + // https://www.w3.org/TR/WGSL/#const-assert-statement + (Token::Word("const_assert"), _) => { + // parentheses are optional + let paren = lexer.skip(Token::Paren('(')); + + let condition = this.expression(lexer, ctx)?; + + if paren { + lexer.expect(Token::Paren(')'))?; + } + lexer.expect(Token::Separator(';'))?; + ast::StatementKind::ConstAssert(condition) } token => { - this.variable_updating_statement(lexer, ctx, block, token)?; + this.func_call_or_variable_updating_statement(lexer, ctx, block, token)?; lexer.expect(Token::Separator(';'))?; this.pop_rule_span(lexer); + return Ok(()); } - } + }; + + let span = this.pop_rule_span(lexer); + block.stmts.push(ast::Statement { kind, span }); + Ok(()) }) } From 35799e66baba7c5fff30b694bbad3bcaf458604a Mon Sep 17 00:00:00 2001 From: teoxoy <28601907+teoxoy@users.noreply.github.com> Date: Wed, 27 Aug 2025 20:11:55 +0200 Subject: [PATCH 14/33] add function that parses variable, value, func call and variable updating statements --- naga/src/front/wgsl/error.rs | 9 -- naga/src/front/wgsl/parse/mod.rs | 148 +++++++++++++++++-------------- naga/tests/naga/wgsl_errors.rs | 4 +- 3 files changed, 82 insertions(+), 79 deletions(-) diff --git a/naga/src/front/wgsl/error.rs b/naga/src/front/wgsl/error.rs index 05d58ee9110..7aa12e000ca 100644 --- a/naga/src/front/wgsl/error.rs +++ b/naga/src/front/wgsl/error.rs @@ -192,7 +192,6 @@ pub(crate) enum Error<'a> { }, BadIncrDecrReferenceType(Span), InvalidResolve(ResolveError), - InvalidForInitializer(Span), /// A break if appeared outside of a continuing block InvalidBreakIf(Span), InvalidGatherComponent(Span), @@ -612,14 +611,6 @@ impl<'a> Error<'a> { labels: vec![], notes: vec![], }, - Error::InvalidForInitializer(bad_span) => ParseError { - message: format!( - "for(;;) initializer is not an assignment or a function call: `{}`", - &source[bad_span] - ), - labels: vec![(bad_span, "not an assignment or function call".into())], - notes: vec![], - }, Error::InvalidBreakIf(bad_span) => ParseError { message: "A break if is only allowed in a continuing block".to_string(), labels: vec![(bad_span, "not in a continuing block".into())], diff --git a/naga/src/front/wgsl/parse/mod.rs b/naga/src/front/wgsl/parse/mod.rs index 76e5bf54545..48b5aed71d3 100644 --- a/naga/src/front/wgsl/parse/mod.rs +++ b/naga/src/front/wgsl/parse/mod.rs @@ -2088,6 +2088,7 @@ impl Parser { Ok(()) } + /// Parses func_call_statement and variable_updating_statement fn func_call_or_variable_updating_statement<'a>( &mut self, lexer: &mut Lexer<'a>, @@ -2104,6 +2105,77 @@ impl Parser { } } + /// Parses variable_or_value_statement, func_call_statement and variable_updating_statement + fn variable_or_value_or_func_call_or_variable_updating_statement<'a>( + &mut self, + lexer: &mut Lexer<'a>, + ctx: &mut ExpressionContext<'a, '_, '_>, + block: &mut ast::Block<'a>, + token: TokenSpan<'a>, + ) -> Result<'a, ()> { + let span_start = token.1.to_range().unwrap().start; + + let local_decl = match token { + (Token::Word("let"), _) => { + let (name, given_ty) = self.optionally_typed_ident(lexer, ctx)?; + + lexer.expect(Token::Operation('='))?; + let expr_id = self.expression(lexer, ctx)?; + + let handle = ctx.declare_local(name)?; + ast::LocalDecl::Let(ast::Let { + name, + ty: given_ty, + init: expr_id, + handle, + }) + } + (Token::Word("const"), _) => { + let (name, given_ty) = self.optionally_typed_ident(lexer, ctx)?; + + lexer.expect(Token::Operation('='))?; + let expr_id = self.expression(lexer, ctx)?; + + let handle = ctx.declare_local(name)?; + ast::LocalDecl::Const(ast::LocalConst { + name, + ty: given_ty, + init: expr_id, + handle, + }) + } + (Token::Word("var"), _) => { + let (name, ty) = self.optionally_typed_ident(lexer, ctx)?; + + let init = if lexer.skip(Token::Operation('=')) { + let init = self.expression(lexer, ctx)?; + Some(init) + } else { + None + }; + + let handle = ctx.declare_local(name)?; + ast::LocalDecl::Var(ast::LocalVariable { + name, + ty, + init, + handle, + }) + } + token => { + return self.func_call_or_variable_updating_statement(lexer, ctx, block, token); + } + }; + + let span = lexer.span_from(span_start); + block.stmts.push(ast::Statement { + kind: ast::StatementKind::LocalDecl(local_decl), + span, + }); + + Ok(()) + } + fn statement<'a>( &mut self, lexer: &mut Lexer<'a>, @@ -2132,56 +2204,6 @@ impl Parser { this.pop_rule_span(lexer); return Ok(()); } - (Token::Word("let"), _) => { - let (name, given_ty) = this.optionally_typed_ident(lexer, ctx)?; - - lexer.expect(Token::Operation('='))?; - let expr_id = this.expression(lexer, ctx)?; - lexer.expect(Token::Separator(';'))?; - - let handle = ctx.declare_local(name)?; - ast::StatementKind::LocalDecl(ast::LocalDecl::Let(ast::Let { - name, - ty: given_ty, - init: expr_id, - handle, - })) - } - (Token::Word("const"), _) => { - let (name, given_ty) = this.optionally_typed_ident(lexer, ctx)?; - - lexer.expect(Token::Operation('='))?; - let expr_id = this.expression(lexer, ctx)?; - lexer.expect(Token::Separator(';'))?; - - let handle = ctx.declare_local(name)?; - ast::StatementKind::LocalDecl(ast::LocalDecl::Const(ast::LocalConst { - name, - ty: given_ty, - init: expr_id, - handle, - })) - } - (Token::Word("var"), _) => { - let (name, ty) = this.optionally_typed_ident(lexer, ctx)?; - - let init = if lexer.skip(Token::Operation('=')) { - let init = this.expression(lexer, ctx)?; - Some(init) - } else { - None - }; - - lexer.expect(Token::Separator(';'))?; - - let handle = ctx.declare_local(name)?; - ast::StatementKind::LocalDecl(ast::LocalDecl::Var(ast::LocalVariable { - name, - ty, - init, - handle, - })) - } (Token::Word("return"), _) => { let value = if lexer.peek().0 != Token::Separator(';') { let handle = this.expression(lexer, ctx)?; @@ -2341,23 +2363,11 @@ impl Parser { ctx.local_table.push_scope(); if !lexer.skip(Token::Separator(';')) { - let num_statements = block.stmts.len(); - let (_, span) = { - let ctx = &mut *ctx; - let block = &mut *block; - lexer.capture_span(|lexer| { - this.statement(lexer, ctx, block, brace_nesting_level) - })? - }; - - if block.stmts.len() != num_statements { - match block.stmts.last().unwrap().kind { - ast::StatementKind::Call { .. } - | ast::StatementKind::Assign { .. } - | ast::StatementKind::LocalDecl(_) => {} - _ => return Err(Box::new(Error::InvalidForInitializer(span))), - } - } + let token = lexer.next(); + this.variable_or_value_or_func_call_or_variable_updating_statement( + lexer, ctx, block, token, + )?; + lexer.expect(Token::Separator(';'))?; }; let mut body = ast::Block::default(); @@ -2442,7 +2452,9 @@ impl Parser { ast::StatementKind::ConstAssert(condition) } token => { - this.func_call_or_variable_updating_statement(lexer, ctx, block, token)?; + this.variable_or_value_or_func_call_or_variable_updating_statement( + lexer, ctx, block, token, + )?; lexer.expect(Token::Separator(';'))?; this.pop_rule_span(lexer); return Ok(()); diff --git a/naga/tests/naga/wgsl_errors.rs b/naga/tests/naga/wgsl_errors.rs index 09048a673a3..b95685d26cf 100644 --- a/naga/tests/naga/wgsl_errors.rs +++ b/naga/tests/naga/wgsl_errors.rs @@ -396,11 +396,11 @@ fn bad_for_initializer() { for ({};;) {} } "#, - r#"error: for(;;) initializer is not an assignment or a function call: `{}` + r#"error: expected lhs_expression, found "{" ┌─ wgsl:3:22 │ 3 │ for ({};;) {} - │ ^^ not an assignment or function call + │ ^ expected lhs_expression "#, ); From 1149d020a782b94b98b31c16efa7f3c9de91218a Mon Sep 17 00:00:00 2001 From: teoxoy <28601907+teoxoy@users.noreply.github.com> Date: Wed, 27 Aug 2025 20:23:23 +0200 Subject: [PATCH 15/33] use `lexer.skip()` in one more place --- naga/src/front/wgsl/parse/mod.rs | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/naga/src/front/wgsl/parse/mod.rs b/naga/src/front/wgsl/parse/mod.rs index 48b5aed71d3..a966fe3987f 100644 --- a/naga/src/front/wgsl/parse/mod.rs +++ b/naga/src/front/wgsl/parse/mod.rs @@ -353,8 +353,7 @@ impl Parser { lexer: &mut Lexer<'a>, ctx: &mut ExpressionContext<'a, '_, '_>, ) -> Result<'a, ast::SwitchValue<'a>> { - if let Token::Word("default") = lexer.peek().0 { - let _ = lexer.next(); + if lexer.skip(Token::Word("default")) { return Ok(ast::SwitchValue::Default); } From f33897dda54881d9a9fc0cb640e75eafcaa75b23 Mon Sep 17 00:00:00 2001 From: teoxoy <28601907+teoxoy@users.noreply.github.com> Date: Wed, 27 Aug 2025 20:24:22 +0200 Subject: [PATCH 16/33] rename `Lexer` `skip` to `next_if` --- naga/src/front/wgsl/parse/lexer.rs | 18 +++---- naga/src/front/wgsl/parse/mod.rs | 84 +++++++++++++++--------------- 2 files changed, 51 insertions(+), 51 deletions(-) diff --git a/naga/src/front/wgsl/parse/lexer.rs b/naga/src/front/wgsl/parse/lexer.rs index a7671596839..f6399444ea5 100644 --- a/naga/src/front/wgsl/parse/lexer.rs +++ b/naga/src/front/wgsl/parse/lexer.rs @@ -444,11 +444,11 @@ impl<'a> Lexer<'a> { } pub(in crate::front::wgsl) fn end_of_generic_arguments(&mut self) -> bool { - self.skip(Token::Separator(',')) && self.peek().0 != Token::Paren('>') + self.next_if(Token::Separator(',')) && self.peek().0 != Token::Paren('>') } - /// If the next token matches it is skipped and true is returned - pub(in crate::front::wgsl) fn skip(&mut self, what: Token<'_>) -> bool { + /// If the next token matches it's consumed and true is returned + pub(in crate::front::wgsl) fn next_if(&mut self, what: Token<'_>) -> bool { let (peeked_token, rest) = self.peek_token_and_rest(); if peeked_token.0 == what { self.input = rest; @@ -535,15 +535,15 @@ impl<'a> Lexer<'a> { } pub(in crate::front::wgsl) fn next_acceleration_structure_flags(&mut self) -> Result<'a, bool> { - Ok(if self.skip(Token::Paren('<')) { - if !self.skip(Token::Paren('>')) { + Ok(if self.next_if(Token::Paren('<')) { + if !self.next_if(Token::Paren('>')) { let (name, span) = self.next_ident_with_span()?; let ret = if name == "vertex_return" { true } else { return Err(Box::new(Error::UnknownAttribute(span))); }; - self.skip(Token::Separator(',')); + self.next_if(Token::Separator(',')); self.expect(Token::Paren('>'))?; ret } else { @@ -559,14 +559,14 @@ impl<'a> Lexer<'a> { } pub(in crate::front::wgsl) fn close_arguments(&mut self) -> Result<'a, ()> { - let _ = self.skip(Token::Separator(',')); + let _ = self.next_if(Token::Separator(',')); self.expect(Token::Paren(')')) } pub(in crate::front::wgsl) fn next_argument(&mut self) -> Result<'a, bool> { let paren = Token::Paren(')'); - if self.skip(Token::Separator(',')) { - Ok(!self.skip(paren)) + if self.next_if(Token::Separator(',')) { + Ok(!self.next_if(paren)) } else { self.expect(paren).map(|()| false) } diff --git a/naga/src/front/wgsl/parse/mod.rs b/naga/src/front/wgsl/parse/mod.rs index a966fe3987f..c171e192d26 100644 --- a/naga/src/front/wgsl/parse/mod.rs +++ b/naga/src/front/wgsl/parse/mod.rs @@ -210,7 +210,7 @@ impl<'a> BindingParser<'a> { let (raw, span) = lexer.next_ident_with_span()?; self.interpolation .set(conv::map_interpolation(raw, span)?, name_span)?; - if lexer.skip(Token::Separator(',')) { + if lexer.next_if(Token::Separator(',')) { let (raw, span) = lexer.next_ident_with_span()?; self.sampling .set(conv::map_sampling(raw, span)?, name_span)?; @@ -235,7 +235,7 @@ impl<'a> BindingParser<'a> { lexer.expect(Token::Paren('('))?; self.blend_src .set(parser.expression(lexer, ctx)?, name_span)?; - lexer.skip(Token::Separator(',')); + lexer.next_if(Token::Separator(',')); lexer.expect(Token::Paren(')'))?; } _ => return Err(Box::new(Error::UnknownAttribute(name_span))), @@ -353,7 +353,7 @@ impl Parser { lexer: &mut Lexer<'a>, ctx: &mut ExpressionContext<'a, '_, '_>, ) -> Result<'a, ast::SwitchValue<'a>> { - if lexer.skip(Token::Word("default")) { + if lexer.next_if(Token::Word("default")) { return Ok(ast::SwitchValue::Default); } @@ -705,7 +705,7 @@ impl Parser { let base = self.type_specifier(lexer, ctx)?; let size = if lexer.end_of_generic_arguments() { let expr = self.const_generic_expression(lexer, ctx)?; - lexer.skip(Token::Separator(',')); + lexer.next_if(Token::Separator(',')); ast::ArraySize::Constant(expr) } else { ast::ArraySize::Dynamic @@ -732,7 +732,7 @@ impl Parser { if !lexer.next_argument()? { break; } - } else if lexer.skip(Token::Paren(')')) { + } else if lexer.next_if(Token::Paren(')')) { break; } let arg = self.expression(lexer, ctx)?; @@ -1299,7 +1299,7 @@ impl Parser { ) -> Result<'a, (ast::Ident<'a>, Option>>)> { let name = lexer.next_ident()?; - let ty = if lexer.skip(Token::Separator(':')) { + let ty = if lexer.next_if(Token::Separator(':')) { Some(self.type_specifier(lexer, ctx)?) } else { None @@ -1317,11 +1317,11 @@ impl Parser { self.push_rule_span(Rule::VariableDecl, lexer); let mut space = crate::AddressSpace::Handle; - if lexer.skip(Token::Paren('<')) { + if lexer.next_if(Token::Paren('<')) { let (class_str, span) = lexer.next_ident_with_span()?; space = match class_str { "storage" => { - let access = if lexer.skip(Token::Separator(',')) { + let access = if lexer.next_if(Token::Separator(',')) { lexer.next_storage_access()? } else { // defaulting to `read` @@ -1335,7 +1335,7 @@ impl Parser { } let (name, ty) = self.optionally_typed_ident(lexer, ctx)?; - let init = if lexer.skip(Token::Operation('=')) { + let init = if lexer.next_if(Token::Operation('=')) { let handle = self.expression(lexer, ctx)?; Some(handle) } else { @@ -1364,7 +1364,7 @@ impl Parser { lexer.expect(Token::Paren('{'))?; let mut ready = true; - while !lexer.skip(Token::Paren('}')) { + while !lexer.next_if(Token::Paren('}')) { if !ready { return Err(Box::new(Error::Unexpected( lexer.next().1, @@ -1377,7 +1377,7 @@ impl Parser { let (mut size, mut align) = (ParsedAttribute::default(), ParsedAttribute::default()); self.push_rule_span(Rule::Attribute, lexer); let mut bind_parser = BindingParser::default(); - while lexer.skip(Token::Attribute) { + while lexer.next_if(Token::Attribute) { match lexer.next_ident_with_span()? { ("size", name_span) => { lexer.expect(Token::Paren('('))?; @@ -1401,7 +1401,7 @@ impl Parser { let name = lexer.next_ident()?; lexer.expect(Token::Separator(':'))?; let ty = self.type_specifier(lexer, ctx)?; - ready = lexer.skip(Token::Separator(',')); + ready = lexer.next_if(Token::Separator(',')); members.push(ast::StructMember { name, @@ -1437,7 +1437,7 @@ impl Parser { let start = lexer.start_byte_offset(); let ty = self.type_specifier(lexer, ctx)?; let span = lexer.span_from(start); - lexer.skip(Token::Separator(',')); + lexer.next_if(Token::Separator(',')); lexer.expect_generic_paren('>')?; Ok((ty, span)) } @@ -1702,7 +1702,7 @@ impl Parser { if let crate::AddressSpace::Storage { ref mut access } = space { *access = if lexer.end_of_generic_arguments() { let result = lexer.next_storage_access()?; - lexer.skip(Token::Separator(',')); + lexer.next_if(Token::Separator(',')); result } else { crate::StorageAccess::LOAD @@ -1716,7 +1716,7 @@ impl Parser { let base = self.type_specifier(lexer, ctx)?; let size = if lexer.end_of_generic_arguments() { let size = self.const_generic_expression(lexer, ctx)?; - lexer.skip(Token::Separator(',')); + lexer.next_if(Token::Separator(',')); ast::ArraySize::Constant(size) } else { ast::ArraySize::Dynamic @@ -1730,7 +1730,7 @@ impl Parser { let base = self.type_specifier(lexer, ctx)?; let size = if lexer.end_of_generic_arguments() { let size = self.unary_expression(lexer, ctx)?; - lexer.skip(Token::Separator(',')); + lexer.next_if(Token::Separator(',')); ast::ArraySize::Constant(size) } else { ast::ArraySize::Dynamic @@ -2146,7 +2146,7 @@ impl Parser { (Token::Word("var"), _) => { let (name, ty) = self.optionally_typed_ident(lexer, ctx)?; - let init = if lexer.skip(Token::Operation('=')) { + let init = if lexer.next_if(Token::Operation('=')) { let init = self.expression(lexer, ctx)?; Some(init) } else { @@ -2221,11 +2221,11 @@ impl Parser { let mut elsif_stack = Vec::new(); let mut elseif_span_start = lexer.start_byte_offset(); let mut reject = loop { - if !lexer.skip(Token::Word("else")) { + if !lexer.next_if(Token::Word("else")) { break ast::Block::default(); } - if !lexer.skip(Token::Word("if")) { + if !lexer.next_if(Token::Word("if")) { // ... else { ... } break this.block(lexer, ctx, brace_nesting_level)?.0; } @@ -2274,7 +2274,7 @@ impl Parser { // parse a list of values let value = loop { let value = this.switch_value(lexer, ctx)?; - if lexer.skip(Token::Separator(',')) { + if lexer.next_if(Token::Separator(',')) { // list of values ends with ':' or a compound statement let next_token = lexer.peek().0; if next_token == Token::Separator(':') @@ -2292,7 +2292,7 @@ impl Parser { }); }; - lexer.skip(Token::Separator(':')); + lexer.next_if(Token::Separator(':')); let body = this.block(lexer, ctx, brace_nesting_level)?.0; @@ -2303,7 +2303,7 @@ impl Parser { }); } (Token::Word("default"), _) => { - lexer.skip(Token::Separator(':')); + lexer.next_if(Token::Separator(':')); let body = this.block(lexer, ctx, brace_nesting_level)?.0; cases.push(ast::SwitchCase { value: ast::SwitchValue::Default, @@ -2361,7 +2361,7 @@ impl Parser { ctx.local_table.push_scope(); - if !lexer.skip(Token::Separator(';')) { + if !lexer.next_if(Token::Separator(';')) { let token = lexer.next(); this.variable_or_value_or_func_call_or_variable_updating_statement( lexer, ctx, block, token, @@ -2370,7 +2370,7 @@ impl Parser { }; let mut body = ast::Block::default(); - if !lexer.skip(Token::Separator(';')) { + if !lexer.next_if(Token::Separator(';')) { let (condition, span) = lexer.capture_span(|lexer| -> Result<'_, _> { let condition = this.expression(lexer, ctx)?; lexer.expect(Token::Separator(';'))?; @@ -2392,7 +2392,7 @@ impl Parser { }; let mut continuing = ast::Block::default(); - if !lexer.skip(Token::Paren(')')) { + if !lexer.next_if(Token::Paren(')')) { let token = lexer.next(); this.func_call_or_variable_updating_statement( lexer, @@ -2440,7 +2440,7 @@ impl Parser { // https://www.w3.org/TR/WGSL/#const-assert-statement (Token::Word("const_assert"), _) => { // parentheses are optional - let paren = lexer.skip(Token::Paren('(')); + let paren = lexer.next_if(Token::Paren('(')); let condition = this.expression(lexer, ctx)?; @@ -2483,7 +2483,7 @@ impl Parser { ctx.local_table.push_scope(); loop { - if lexer.skip(Token::Word("continuing")) { + if lexer.next_if(Token::Word("continuing")) { // Branch for the `continuing` block, this must be // the last thing in the loop body @@ -2492,7 +2492,7 @@ impl Parser { let brace_nesting_level = Self::increase_brace_nesting(brace_nesting_level, brace_span)?; loop { - if lexer.skip(Token::Word("break")) { + if lexer.next_if(Token::Word("break")) { // Branch for the `break if` statement, this statement // has the form `break if ;` and must be the last // statement in a continuing block @@ -2513,7 +2513,7 @@ impl Parser { lexer.expect(Token::Paren('}'))?; // Stop parsing the continuing block break; - } else if lexer.skip(Token::Paren('}')) { + } else if lexer.next_if(Token::Paren('}')) { // If we encounter a closing brace it means we have reached // the end of the continuing block and should stop processing break; @@ -2527,7 +2527,7 @@ impl Parser { lexer.expect(Token::Paren('}'))?; break; } - if lexer.skip(Token::Paren('}')) { + if lexer.next_if(Token::Paren('}')) { // If we encounter a closing brace it means we have reached // the end of the loop body and should stop processing break; @@ -2559,7 +2559,7 @@ impl Parser { let mut diagnostic_filters = DiagnosticFilterMap::new(); self.push_rule_span(Rule::Attribute, lexer); - while lexer.skip(Token::Attribute) { + while lexer.next_if(Token::Attribute) { let (name, name_span) = lexer.next_ident_with_span()?; if let Some(DirectiveKind::Diagnostic) = DirectiveKind::from_ident(name) { let filter = self.diagnostic_filter(lexer)?; @@ -2588,7 +2588,7 @@ impl Parser { let brace_span = lexer.expect_span(Token::Paren('{'))?; let brace_nesting_level = Self::increase_brace_nesting(brace_nesting_level, brace_span)?; let mut block = ast::Block::default(); - while !lexer.skip(Token::Paren('}')) { + while !lexer.next_if(Token::Paren('}')) { self.statement(lexer, ctx, &mut block, brace_nesting_level)?; } @@ -2606,7 +2606,7 @@ impl Parser { let mut bind_parser = BindingParser::default(); self.push_rule_span(Rule::Attribute, lexer); - while lexer.skip(Token::Attribute) { + while lexer.next_if(Token::Attribute) { let (word, span) = lexer.next_ident_with_span()?; bind_parser.parse(self, lexer, word, span, ctx)?; } @@ -2644,7 +2644,7 @@ impl Parser { let mut arguments = Vec::new(); lexer.expect(Token::Paren('('))?; let mut ready = true; - while !lexer.skip(Token::Paren(')')) { + while !lexer.next_if(Token::Paren(')')) { if !ready { return Err(Box::new(Error::Unexpected( lexer.next().1, @@ -2665,10 +2665,10 @@ impl Parser { binding, handle, }); - ready = lexer.skip(Token::Separator(',')); + ready = lexer.next_if(Token::Separator(',')); } // read return type - let result = if lexer.skip(Token::Arrow) { + let result = if lexer.next_if(Token::Arrow) { let binding = self.varying_binding(lexer, &mut ctx)?; let ty = self.type_specifier(lexer, &mut ctx)?; let must_use = must_use.is_some(); @@ -2690,7 +2690,7 @@ impl Parser { lexer.expect(Token::Paren('{'))?; let brace_nesting_level = 1; let mut body = ast::Block::default(); - while !lexer.skip(Token::Paren('}')) { + while !lexer.next_if(Token::Paren('}')) { self.statement(lexer, &mut ctx, &mut body, brace_nesting_level)?; } @@ -2781,7 +2781,7 @@ impl Parser { }; self.push_rule_span(Rule::Attribute, lexer); - while lexer.skip(Token::Attribute) { + while lexer.next_if(Token::Attribute) { let (name, name_span) = lexer.next_ident_with_span()?; if let Some(DirectiveKind::Diagnostic) = DirectiveKind::from_ident(name) { let filter = self.diagnostic_filter(lexer)?; @@ -2929,7 +2929,7 @@ impl Parser { let (name, ty) = self.optionally_typed_ident(lexer, &mut ctx)?; - let init = if lexer.skip(Token::Operation('=')) { + let init = if lexer.next_if(Token::Operation('=')) { Some(self.expression(lexer, &mut ctx)?) } else { None @@ -2987,7 +2987,7 @@ impl Parser { ensure_no_diag_attrs("`const_assert`s".into(), diagnostic_filters)?; // parentheses are optional - let paren = lexer.skip(Token::Paren('(')); + let paren = lexer.next_if(Token::Paren('(')); let condition = self.expression(lexer, &mut ctx)?; @@ -3148,7 +3148,7 @@ impl Parser { lexer.expect(Token::Separator(','))?; let (diagnostic_name_token, diagnostic_name_token_span) = lexer.next_ident_with_span()?; - let triggering_rule = if lexer.skip(Token::Separator('.')) { + let triggering_rule = if lexer.next_if(Token::Separator('.')) { let (ident, _span) = lexer.next_ident_with_span()?; FilterableTriggeringRule::User(Box::new([diagnostic_name_token.into(), ident.into()])) } else { @@ -3170,7 +3170,7 @@ impl Parser { triggering_rule, new_severity, }; - lexer.skip(Token::Separator(',')); + lexer.next_if(Token::Separator(',')); lexer.expect(Token::Paren(')'))?; Ok(filter) From bf6be2e193701d28d10a53322663e09817b398cc Mon Sep 17 00:00:00 2001 From: teoxoy <28601907+teoxoy@users.noreply.github.com> Date: Wed, 27 Aug 2025 20:28:34 +0200 Subject: [PATCH 17/33] use `lexer.next()` in `primary_expression` --- naga/src/front/wgsl/parse/mod.rs | 42 ++++++-------------------------- 1 file changed, 7 insertions(+), 35 deletions(-) diff --git a/naga/src/front/wgsl/parse/mod.rs b/naga/src/front/wgsl/parse/mod.rs index c171e192d26..4173322c022 100644 --- a/naga/src/front/wgsl/parse/mod.rs +++ b/naga/src/front/wgsl/parse/mod.rs @@ -835,24 +835,18 @@ impl Parser { ast::Expression::Literal(ast::Literal::Number(Number::U32(intersection as u32))) } - let expr = match lexer.peek() { + let start = lexer.start_byte_offset(); + + let expr = match lexer.next() { (Token::Paren('('), _) => { - let _ = lexer.next(); let expr = self.enclosed_expression(lexer, ctx)?; lexer.expect(Token::Paren(')'))?; self.pop_rule_span(lexer); return Ok(expr); } - (Token::Word("true"), _) => { - let _ = lexer.next(); - ast::Expression::Literal(ast::Literal::Bool(true)) - } - (Token::Word("false"), _) => { - let _ = lexer.next(); - ast::Expression::Literal(ast::Literal::Bool(false)) - } + (Token::Word("true"), _) => ast::Expression::Literal(ast::Literal::Bool(true)), + (Token::Word("false"), _) => ast::Expression::Literal(ast::Literal::Bool(false)), (Token::Number(res), span) => { - let _ = lexer.next(); let num = res.map_err(|err| Error::BadNumber(span, err))?; if let Some(enable_extension) = num.requires_enable_extension() { @@ -866,70 +860,48 @@ impl Parser { ast::Expression::Literal(ast::Literal::Number(num)) } - (Token::Word("RAY_FLAG_NONE"), _) => { - let _ = lexer.next(); - literal_ray_flag(crate::RayFlag::empty()) - } + (Token::Word("RAY_FLAG_NONE"), _) => literal_ray_flag(crate::RayFlag::empty()), (Token::Word("RAY_FLAG_FORCE_OPAQUE"), _) => { - let _ = lexer.next(); literal_ray_flag(crate::RayFlag::FORCE_OPAQUE) } (Token::Word("RAY_FLAG_FORCE_NO_OPAQUE"), _) => { - let _ = lexer.next(); literal_ray_flag(crate::RayFlag::FORCE_NO_OPAQUE) } (Token::Word("RAY_FLAG_TERMINATE_ON_FIRST_HIT"), _) => { - let _ = lexer.next(); literal_ray_flag(crate::RayFlag::TERMINATE_ON_FIRST_HIT) } (Token::Word("RAY_FLAG_SKIP_CLOSEST_HIT_SHADER"), _) => { - let _ = lexer.next(); literal_ray_flag(crate::RayFlag::SKIP_CLOSEST_HIT_SHADER) } (Token::Word("RAY_FLAG_CULL_BACK_FACING"), _) => { - let _ = lexer.next(); literal_ray_flag(crate::RayFlag::CULL_BACK_FACING) } (Token::Word("RAY_FLAG_CULL_FRONT_FACING"), _) => { - let _ = lexer.next(); literal_ray_flag(crate::RayFlag::CULL_FRONT_FACING) } (Token::Word("RAY_FLAG_CULL_OPAQUE"), _) => { - let _ = lexer.next(); literal_ray_flag(crate::RayFlag::CULL_OPAQUE) } (Token::Word("RAY_FLAG_CULL_NO_OPAQUE"), _) => { - let _ = lexer.next(); literal_ray_flag(crate::RayFlag::CULL_NO_OPAQUE) } (Token::Word("RAY_FLAG_SKIP_TRIANGLES"), _) => { - let _ = lexer.next(); literal_ray_flag(crate::RayFlag::SKIP_TRIANGLES) } - (Token::Word("RAY_FLAG_SKIP_AABBS"), _) => { - let _ = lexer.next(); - literal_ray_flag(crate::RayFlag::SKIP_AABBS) - } + (Token::Word("RAY_FLAG_SKIP_AABBS"), _) => literal_ray_flag(crate::RayFlag::SKIP_AABBS), (Token::Word("RAY_QUERY_INTERSECTION_NONE"), _) => { - let _ = lexer.next(); literal_ray_intersection(crate::RayQueryIntersection::None) } (Token::Word("RAY_QUERY_INTERSECTION_TRIANGLE"), _) => { - let _ = lexer.next(); literal_ray_intersection(crate::RayQueryIntersection::Triangle) } (Token::Word("RAY_QUERY_INTERSECTION_GENERATED"), _) => { - let _ = lexer.next(); literal_ray_intersection(crate::RayQueryIntersection::Generated) } (Token::Word("RAY_QUERY_INTERSECTION_AABB"), _) => { - let _ = lexer.next(); literal_ray_intersection(crate::RayQueryIntersection::Aabb) } (Token::Word(word), span) => { - let start = lexer.start_byte_offset(); - let _ = lexer.next(); - if let Some(ty) = self.constructor_type(lexer, word, span, ctx)? { let ty_span = lexer.span_from(start); let components = self.arguments(lexer, ctx)?; From afcdc3c4a3b2c37497f8341b95541c7510a41193 Mon Sep 17 00:00:00 2001 From: teoxoy <28601907+teoxoy@users.noreply.github.com> Date: Wed, 27 Aug 2025 20:31:01 +0200 Subject: [PATCH 18/33] rename `function_call` to `call_expression` --- naga/src/front/wgsl/parse/mod.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/naga/src/front/wgsl/parse/mod.rs b/naga/src/front/wgsl/parse/mod.rs index 4173322c022..36bb6f14e79 100644 --- a/naga/src/front/wgsl/parse/mod.rs +++ b/naga/src/front/wgsl/parse/mod.rs @@ -756,7 +756,7 @@ impl Parser { /// Expects [`Rule::PrimaryExpr`] or [`Rule::SingularExpr`] on top; does not pop it. /// Expects `name` to be consumed (not in lexer). - fn function_call<'a>( + fn call_expression<'a>( &mut self, lexer: &mut Lexer<'a>, name: &'a str, @@ -912,10 +912,10 @@ impl Parser { } } else if let Token::Paren('(') = lexer.peek().0 { self.pop_rule_span(lexer); - return self.function_call(lexer, word, span, ctx); + return self.call_expression(lexer, word, span, ctx); } else if word == "bitcast" { self.pop_rule_span(lexer); - return self.function_call(lexer, word, span, ctx); + return self.call_expression(lexer, word, span, ctx); } else { let ident = self.ident_expr(word, span, ctx); ast::Expression::Ident(ident) From f827139b5bfd7c0b21548f10b8be333583955764 Mon Sep 17 00:00:00 2001 From: teoxoy <28601907+teoxoy@users.noreply.github.com> Date: Wed, 27 Aug 2025 21:45:56 +0200 Subject: [PATCH 19/33] use `lexer.next()` in `unary_expression` --- naga/src/front/wgsl/parse/mod.rs | 29 +++++++++++++---------------- 1 file changed, 13 insertions(+), 16 deletions(-) diff --git a/naga/src/front/wgsl/parse/mod.rs b/naga/src/front/wgsl/parse/mod.rs index 36bb6f14e79..7cd547d6db4 100644 --- a/naga/src/front/wgsl/parse/mod.rs +++ b/naga/src/front/wgsl/parse/mod.rs @@ -754,7 +754,6 @@ impl Parser { Ok(expr) } - /// Expects [`Rule::PrimaryExpr`] or [`Rule::SingularExpr`] on top; does not pop it. /// Expects `name` to be consumed (not in lexer). fn call_expression<'a>( &mut self, @@ -763,8 +762,6 @@ impl Parser { name_span: Span, ctx: &mut ExpressionContext<'a, '_, '_>, ) -> Result<'a, Handle>> { - assert!(self.rules.last().is_some()); - let expr = match name { // bitcast looks like a function call, but it's an operator and must be handled differently. "bitcast" => { @@ -797,7 +794,7 @@ impl Parser { } }; - let span = self.peek_rule_span(lexer); + let span = lexer.span_from(name_span.to_range().unwrap().start); let expr = ctx.expressions.append(expr, span); Ok(expr) } @@ -824,8 +821,10 @@ impl Parser { &mut self, lexer: &mut Lexer<'a>, ctx: &mut ExpressionContext<'a, '_, '_>, + token: TokenSpan<'a>, ) -> Result<'a, Handle>> { self.push_rule_span(Rule::PrimaryExpr, lexer); + const fn literal_ray_flag<'b>(flag: crate::RayFlag) -> ast::Expression<'b> { ast::Expression::Literal(ast::Literal::Number(Number::U32(flag.bits()))) } @@ -835,9 +834,9 @@ impl Parser { ast::Expression::Literal(ast::Literal::Number(Number::U32(intersection as u32))) } - let start = lexer.start_byte_offset(); + let start = token.1.to_range().unwrap().start; - let expr = match lexer.next() { + let expr = match token { (Token::Paren('('), _) => { let expr = self.enclosed_expression(lexer, ctx)?; lexer.expect(Token::Paren(')'))?; @@ -929,7 +928,8 @@ impl Parser { } }; - let span = self.pop_rule_span(lexer); + self.pop_rule_span(lexer); + let span = lexer.span_from(start); let expr = ctx.expressions.append(expr, span); Ok(expr) } @@ -988,9 +988,9 @@ impl Parser { self.track_recursion(|this| { this.push_rule_span(Rule::UnaryExpr, lexer); //TODO: refactor this to avoid backing up - let expr = match lexer.peek().0 { + let token = lexer.next(); + let expr = match token.0 { Token::Operation('-') => { - let _ = lexer.next(); let expr = this.unary_expression(lexer, ctx)?; let expr = ast::Expression::Unary { op: crate::UnaryOperator::Negate, @@ -1000,7 +1000,6 @@ impl Parser { ctx.expressions.append(expr, span) } Token::Operation('!') => { - let _ = lexer.next(); let expr = this.unary_expression(lexer, ctx)?; let expr = ast::Expression::Unary { op: crate::UnaryOperator::LogicalNot, @@ -1010,7 +1009,6 @@ impl Parser { ctx.expressions.append(expr, span) } Token::Operation('~') => { - let _ = lexer.next(); let expr = this.unary_expression(lexer, ctx)?; let expr = ast::Expression::Unary { op: crate::UnaryOperator::BitwiseNot, @@ -1020,20 +1018,18 @@ impl Parser { ctx.expressions.append(expr, span) } Token::Operation('*') => { - let _ = lexer.next(); let expr = this.unary_expression(lexer, ctx)?; let expr = ast::Expression::Deref(expr); let span = this.peek_rule_span(lexer); ctx.expressions.append(expr, span) } Token::Operation('&') => { - let _ = lexer.next(); let expr = this.unary_expression(lexer, ctx)?; let expr = ast::Expression::AddrOf(expr); let span = this.peek_rule_span(lexer); ctx.expressions.append(expr, span) } - _ => this.singular_expression(lexer, ctx)?, + _ => this.singular_expression(lexer, ctx, token)?, }; this.pop_rule_span(lexer); @@ -1096,10 +1092,11 @@ impl Parser { &mut self, lexer: &mut Lexer<'a>, ctx: &mut ExpressionContext<'a, '_, '_>, + token: TokenSpan<'a>, ) -> Result<'a, Handle>> { - let start = lexer.start_byte_offset(); + let start = token.1.to_range().unwrap().start; self.push_rule_span(Rule::SingularExpr, lexer); - let primary_expr = self.primary_expression(lexer, ctx)?; + let primary_expr = self.primary_expression(lexer, ctx, token)?; let singular_expr = self.component_or_swizzle_specifier(start, lexer, ctx, primary_expr)?; self.pop_rule_span(lexer); From 0e65fa309b52b36f76a2ffb8f24bdb742559710a Mon Sep 17 00:00:00 2001 From: teoxoy <28601907+teoxoy@users.noreply.github.com> Date: Wed, 27 Aug 2025 22:07:54 +0200 Subject: [PATCH 20/33] avoid recursing into `unary_expression` --- naga/src/front/wgsl/parse/mod.rs | 96 ++++++++++++++++++-------------- 1 file changed, 53 insertions(+), 43 deletions(-) diff --git a/naga/src/front/wgsl/parse/mod.rs b/naga/src/front/wgsl/parse/mod.rs index 7cd547d6db4..4368cba5d44 100644 --- a/naga/src/front/wgsl/parse/mod.rs +++ b/naga/src/front/wgsl/parse/mod.rs @@ -985,56 +985,66 @@ impl Parser { lexer: &mut Lexer<'a>, ctx: &mut ExpressionContext<'a, '_, '_>, ) -> Result<'a, Handle>> { - self.track_recursion(|this| { - this.push_rule_span(Rule::UnaryExpr, lexer); - //TODO: refactor this to avoid backing up - let token = lexer.next(); - let expr = match token.0 { - Token::Operation('-') => { - let expr = this.unary_expression(lexer, ctx)?; - let expr = ast::Expression::Unary { - op: crate::UnaryOperator::Negate, - expr, - }; - let span = this.peek_rule_span(lexer); - ctx.expressions.append(expr, span) + self.push_rule_span(Rule::UnaryExpr, lexer); + + enum UnaryOp { + Negate, + LogicalNot, + BitwiseNot, + Deref, + AddrOf, + } + + let mut ops = Vec::new(); + let mut expr; + + loop { + match lexer.next() { + (Token::Operation('-'), span) => { + ops.push((UnaryOp::Negate, span)); } - Token::Operation('!') => { - let expr = this.unary_expression(lexer, ctx)?; - let expr = ast::Expression::Unary { - op: crate::UnaryOperator::LogicalNot, - expr, - }; - let span = this.peek_rule_span(lexer); - ctx.expressions.append(expr, span) + (Token::Operation('!'), span) => { + ops.push((UnaryOp::LogicalNot, span)); } - Token::Operation('~') => { - let expr = this.unary_expression(lexer, ctx)?; - let expr = ast::Expression::Unary { - op: crate::UnaryOperator::BitwiseNot, - expr, - }; - let span = this.peek_rule_span(lexer); - ctx.expressions.append(expr, span) + (Token::Operation('~'), span) => { + ops.push((UnaryOp::BitwiseNot, span)); } - Token::Operation('*') => { - let expr = this.unary_expression(lexer, ctx)?; - let expr = ast::Expression::Deref(expr); - let span = this.peek_rule_span(lexer); - ctx.expressions.append(expr, span) + (Token::Operation('*'), span) => { + ops.push((UnaryOp::Deref, span)); } - Token::Operation('&') => { - let expr = this.unary_expression(lexer, ctx)?; - let expr = ast::Expression::AddrOf(expr); - let span = this.peek_rule_span(lexer); - ctx.expressions.append(expr, span) + (Token::Operation('&'), span) => { + ops.push((UnaryOp::AddrOf, span)); + } + token => { + expr = self.singular_expression(lexer, ctx, token)?; + break; } - _ => this.singular_expression(lexer, ctx, token)?, }; + } - this.pop_rule_span(lexer); - Ok(expr) - }) + for (op, span) in ops.into_iter().rev() { + let e = match op { + UnaryOp::Negate => ast::Expression::Unary { + op: crate::UnaryOperator::Negate, + expr, + }, + UnaryOp::LogicalNot => ast::Expression::Unary { + op: crate::UnaryOperator::LogicalNot, + expr, + }, + UnaryOp::BitwiseNot => ast::Expression::Unary { + op: crate::UnaryOperator::BitwiseNot, + expr, + }, + UnaryOp::Deref => ast::Expression::Deref(expr), + UnaryOp::AddrOf => ast::Expression::AddrOf(expr), + }; + let span = lexer.span_from(span.to_range().unwrap().start); + expr = ctx.expressions.append(e, span); + } + + self.pop_rule_span(lexer); + Ok(expr) } /// Parse a `lhs_expression`. From 6ae723053ee9abbb7170b8503782ef9853177773 Mon Sep 17 00:00:00 2001 From: teoxoy <28601907+teoxoy@users.noreply.github.com> Date: Fri, 3 Oct 2025 15:17:46 +0200 Subject: [PATCH 21/33] remove `Clone` derive from the `Lexer` also fixes spans in some cases --- naga/src/front/wgsl/parse/lexer.rs | 39 +++++++++++++++--------------- naga/tests/naga/validation.rs | 11 ++++++--- naga/tests/naga/wgsl_errors.rs | 7 +++--- 3 files changed, 30 insertions(+), 27 deletions(-) diff --git a/naga/src/front/wgsl/parse/lexer.rs b/naga/src/front/wgsl/parse/lexer.rs index f6399444ea5..2f9a9be684c 100644 --- a/naga/src/front/wgsl/parse/lexer.rs +++ b/naga/src/front/wgsl/parse/lexer.rs @@ -243,7 +243,6 @@ fn is_word_part(c: char) -> bool { unicode_ident::is_xid_continue(c) } -#[derive(Clone)] pub(in crate::front::wgsl) struct Lexer<'a> { /// The remaining unconsumed input. input: &'a str, @@ -311,13 +310,6 @@ impl<'a> Lexer<'a> { } } - fn peek_token_and_rest(&mut self) -> (TokenSpan<'a>, &'a str) { - let mut cloned = self.clone(); - let token = cloned.next(); - let rest = cloned.input; - (token, rest) - } - /// Collect all module doc comments until a non doc token is found. pub(in crate::front::wgsl) fn accumulate_module_doc_comments(&mut self) -> Vec<&'a str> { let mut doc_comments = Vec::new(); @@ -407,10 +399,28 @@ impl<'a> Lexer<'a> { #[must_use] pub(in crate::front::wgsl) fn peek(&mut self) -> TokenSpan<'a> { - let (token, _) = self.peek_token_and_rest(); + let input = self.input; + let last_end_offset = self.last_end_offset; + let token = self.next(); + self.input = input; + self.last_end_offset = last_end_offset; token } + /// If the next token matches it's consumed and true is returned + pub(in crate::front::wgsl) fn next_if(&mut self, what: Token<'_>) -> bool { + let input = self.input; + let last_end_offset = self.last_end_offset; + let token = self.next(); + if token.0 == what { + true + } else { + self.input = input; + self.last_end_offset = last_end_offset; + false + } + } + pub(in crate::front::wgsl) fn expect_span(&mut self, expected: Token<'a>) -> Result<'a, Span> { let next = self.next(); if next.0 == expected { @@ -447,17 +457,6 @@ impl<'a> Lexer<'a> { self.next_if(Token::Separator(',')) && self.peek().0 != Token::Paren('>') } - /// If the next token matches it's consumed and true is returned - pub(in crate::front::wgsl) fn next_if(&mut self, what: Token<'_>) -> bool { - let (peeked_token, rest) = self.peek_token_and_rest(); - if peeked_token.0 == what { - self.input = rest; - true - } else { - false - } - } - pub(in crate::front::wgsl) fn next_ident_with_span(&mut self) -> Result<'a, (&'a str, Span)> { match self.next() { (Token::Word("_"), span) => Err(Box::new(Error::InvalidIdentifierUnderscore(span))), diff --git a/naga/tests/naga/validation.rs b/naga/tests/naga/validation.rs index 7c12e79620f..d3af21f1c01 100644 --- a/naga/tests/naga/validation.rs +++ b/naga/tests/naga/validation.rs @@ -668,11 +668,14 @@ fn validation_error_messages() { "\ error: Function [1] 'main' is invalid ┌─ wgsl:7:17 - │ \n7 │ ╭ fn main() { + │\x20\x20 +7 │ ╭ fn main() { 8 │ │ foo(); - │ │ ^^^^ invalid function call - │ ╰──────────────────────────^ naga::ir::Function [1] - │ \n = Call to [0] is invalid + │ │ ^^^^^ invalid function call +9 │ │ } + │ ╰─────────────────^ naga::ir::Function [1] + │\x20\x20 + = Call to [0] is invalid = Requires 1 arguments, but 0 are provided ", diff --git a/naga/tests/naga/wgsl_errors.rs b/naga/tests/naga/wgsl_errors.rs index b95685d26cf..06b87f0186a 100644 --- a/naga/tests/naga/wgsl_errors.rs +++ b/naga/tests/naga/wgsl_errors.rs @@ -2887,7 +2887,7 @@ fn use_me(a: i32) { 2 │ @must_use │ ^^^^^^^^ 3 │ fn use_me(a: i32) { - │ ^^^^^^^^^^^^^ + │ ^^^^^^^^^^^^^^ │ = note: declare a return type or remove the attribute @@ -3694,7 +3694,7 @@ fn inconsistent_type() { ┌─ wgsl:2:20 │ 2 │ return dot(vec4(), vec3()); - │ ^^^ ^^^^^^^^^^ ^^^^^^^^^^ argument #2 has type vec3 + │ ^^^ ^^^^^^^^^^^ ^^^^^^^^^^^ argument #2 has type vec3 │ │\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20 │ this argument has type vec4, which constrains subsequent arguments │ @@ -4242,7 +4242,8 @@ fn max_type_size_two_arrays_in_struct() { 2 │ ╭ struct TwoArrays { 3 │ │ arr1: array, 4 │ │ arr2: array, - │ ╰───────────────────────────────────────────────^ this type exceeds the maximum size +5 │ │ } + │ ╰─────────────^ this type exceeds the maximum size │\x20\x20 = note: the maximum size is 1073741824 bytes From 0c6ff529dd03cef7f4813ee2f0b7ce64761ccdf0 Mon Sep 17 00:00:00 2001 From: teoxoy <28601907+teoxoy@users.noreply.github.com> Date: Tue, 7 Oct 2025 12:31:49 +0200 Subject: [PATCH 22/33] add `Lexer::span_with_start` --- naga/src/front/wgsl/parse/lexer.rs | 3 ++ naga/src/front/wgsl/parse/mod.rs | 50 +++++++++++++----------------- 2 files changed, 24 insertions(+), 29 deletions(-) diff --git a/naga/src/front/wgsl/parse/lexer.rs b/naga/src/front/wgsl/parse/lexer.rs index 2f9a9be684c..a47566feb1d 100644 --- a/naga/src/front/wgsl/parse/lexer.rs +++ b/naga/src/front/wgsl/parse/lexer.rs @@ -351,6 +351,9 @@ impl<'a> Lexer<'a> { pub(in crate::front::wgsl) fn span_from(&self, offset: usize) -> Span { Span::from(offset..self.last_end_offset) } + pub(in crate::front::wgsl) fn span_with_start(&self, span: Span) -> Span { + span.until(&Span::from(0..self.last_end_offset)) + } /// Return the next non-whitespace token from `self`. /// diff --git a/naga/src/front/wgsl/parse/mod.rs b/naga/src/front/wgsl/parse/mod.rs index 4368cba5d44..49480acf86b 100644 --- a/naga/src/front/wgsl/parse/mod.rs +++ b/naga/src/front/wgsl/parse/mod.rs @@ -794,7 +794,7 @@ impl Parser { } }; - let span = lexer.span_from(name_span.to_range().unwrap().start); + let span = lexer.span_with_start(name_span); let expr = ctx.expressions.append(expr, span); Ok(expr) } @@ -834,8 +834,6 @@ impl Parser { ast::Expression::Literal(ast::Literal::Number(Number::U32(intersection as u32))) } - let start = token.1.to_range().unwrap().start; - let expr = match token { (Token::Paren('('), _) => { let expr = self.enclosed_expression(lexer, ctx)?; @@ -902,7 +900,7 @@ impl Parser { } (Token::Word(word), span) => { if let Some(ty) = self.constructor_type(lexer, word, span, ctx)? { - let ty_span = lexer.span_from(start); + let ty_span = lexer.span_with_start(span); let components = self.arguments(lexer, ctx)?; ast::Expression::Construct { ty, @@ -929,14 +927,14 @@ impl Parser { }; self.pop_rule_span(lexer); - let span = lexer.span_from(start); + let span = lexer.span_with_start(token.1); let expr = ctx.expressions.append(expr, span); Ok(expr) } fn component_or_swizzle_specifier<'a>( &mut self, - span_start: usize, + span_start: Span, lexer: &mut Lexer<'a>, ctx: &mut ExpressionContext<'a, '_, '_>, expr: Handle>, @@ -961,7 +959,7 @@ impl Parser { _ => break, }; - let span = lexer.span_from(span_start); + let span = lexer.span_with_start(span_start); expr = ctx.expressions.append(expression, span); } @@ -1039,7 +1037,7 @@ impl Parser { UnaryOp::Deref => ast::Expression::Deref(expr), UnaryOp::AddrOf => ast::Expression::AddrOf(expr), }; - let span = lexer.span_from(span.to_range().unwrap().start); + let span = lexer.span_with_start(span); expr = ctx.expressions.append(e, span); } @@ -1060,7 +1058,6 @@ impl Parser { self.track_recursion(|this| { this.push_rule_span(Rule::LhsExpr, lexer); let token = token.unwrap_or_else(|| lexer.next()); - let start = token.1.to_range().unwrap().start; let expr = match token { (Token::Operation('*'), _) => { let expr = this.lhs_expression(lexer, ctx, None)?; @@ -1074,15 +1071,15 @@ impl Parser { let span = this.peek_rule_span(lexer); ctx.expressions.append(expr, span) } - (Token::Paren('('), _) => { + (Token::Paren('('), span) => { let expr = this.lhs_expression(lexer, ctx, None)?; lexer.expect(Token::Paren(')'))?; - this.component_or_swizzle_specifier(start, lexer, ctx, expr)? + this.component_or_swizzle_specifier(span, lexer, ctx, expr)? } (Token::Word(word), span) => { let ident = this.ident_expr(word, span, ctx); let ident = ctx.expressions.append(ast::Expression::Ident(ident), span); - this.component_or_swizzle_specifier(start, lexer, ctx, ident)? + this.component_or_swizzle_specifier(span, lexer, ctx, ident)? } (_, span) => { return Err(Box::new(Error::Unexpected( @@ -1104,10 +1101,10 @@ impl Parser { ctx: &mut ExpressionContext<'a, '_, '_>, token: TokenSpan<'a>, ) -> Result<'a, Handle>> { - let start = token.1.to_range().unwrap().start; self.push_rule_span(Rule::SingularExpr, lexer); let primary_expr = self.primary_expression(lexer, ctx, token)?; - let singular_expr = self.component_or_swizzle_specifier(start, lexer, ctx, primary_expr)?; + let singular_expr = + self.component_or_swizzle_specifier(token.1, lexer, ctx, primary_expr)?; self.pop_rule_span(lexer); Ok(singular_expr) @@ -1964,12 +1961,11 @@ impl Parser { block: &mut ast::Block<'a>, token: TokenSpan<'a>, ) -> Result<'a, ()> { - let span_start = token.1.to_range().unwrap().start; - match token.0 { - Token::Word("_") => { + match token { + (Token::Word("_"), span) => { lexer.expect(Token::Operation('='))?; let expr = self.expression(lexer, ctx)?; - let span = lexer.span_from(span_start); + let span = lexer.span_with_start(span); block.stmts.push(ast::Statement { kind: ast::StatementKind::Phony(expr), span, @@ -2005,14 +2001,14 @@ impl Parser { let value = self.expression(lexer, ctx)?; (Some(op), value) } - token @ (Token::IncrementOperation | Token::DecrementOperation, _) => { - let op = match token.0 { + op_token @ (Token::IncrementOperation | Token::DecrementOperation, _) => { + let op = match op_token.0 { Token::IncrementOperation => ast::StatementKind::Increment, Token::DecrementOperation => ast::StatementKind::Decrement, _ => unreachable!(), }; - let span = lexer.span_from(span_start); + let span = lexer.span_with_start(token.1); block.stmts.push(ast::Statement { kind: op(target), span, @@ -2022,7 +2018,7 @@ impl Parser { (_, span) => return Err(Box::new(Error::Unexpected(span, ExpectedToken::Assignment))), }; - let span = lexer.span_from(span_start); + let span = lexer.span_with_start(token.1); block.stmts.push(ast::Statement { kind: ast::StatementKind::Assign { target, op, value }, span, @@ -2037,7 +2033,6 @@ impl Parser { lexer: &mut Lexer<'a>, ident: &'a str, ident_span: Span, - span_start: usize, context: &mut ExpressionContext<'a, '_, '_>, block: &mut ast::Block<'a>, ) -> Result<'a, ()> { @@ -2048,7 +2043,7 @@ impl Parser { usage: ident_span, }); let arguments = self.arguments(lexer, context)?; - let span = lexer.span_from(span_start); + let span = lexer.span_with_start(ident_span); block.stmts.push(ast::Statement { kind: ast::StatementKind::Call { @@ -2074,10 +2069,9 @@ impl Parser { block: &mut ast::Block<'a>, token: TokenSpan<'a>, ) -> Result<'a, ()> { - let span_start = token.1.to_range().unwrap().start; match token { (Token::Word(name), span) if matches!(lexer.peek(), (Token::Paren('('), _)) => { - self.func_call_statement(lexer, name, span, span_start, context, block) + self.func_call_statement(lexer, name, span, context, block) } token => self.variable_updating_statement(lexer, context, block, token), } @@ -2091,8 +2085,6 @@ impl Parser { block: &mut ast::Block<'a>, token: TokenSpan<'a>, ) -> Result<'a, ()> { - let span_start = token.1.to_range().unwrap().start; - let local_decl = match token { (Token::Word("let"), _) => { let (name, given_ty) = self.optionally_typed_ident(lexer, ctx)?; @@ -2145,7 +2137,7 @@ impl Parser { } }; - let span = lexer.span_from(span_start); + let span = lexer.span_with_start(token.1); block.stmts.push(ast::Statement { kind: ast::StatementKind::LocalDecl(local_decl), span, From 4ebbc67dfe194c2b8b37ee1f875f8e27f64c927d Mon Sep 17 00:00:00 2001 From: teoxoy <28601907+teoxoy@users.noreply.github.com> Date: Tue, 7 Oct 2025 12:11:54 +0200 Subject: [PATCH 23/33] implement template list discovery --- naga/src/front/wgsl/parse/lexer.rs | 151 +++++++++++++++++++++++------ 1 file changed, 123 insertions(+), 28 deletions(-) diff --git a/naga/src/front/wgsl/parse/lexer.rs b/naga/src/front/wgsl/parse/lexer.rs index a47566feb1d..f49f08cec7b 100644 --- a/naga/src/front/wgsl/parse/lexer.rs +++ b/naga/src/front/wgsl/parse/lexer.rs @@ -35,6 +35,92 @@ fn consume_any(input: &str, what: impl Fn(char) -> bool) -> (&str, &str) { input.split_at(pos) } +struct UnclosedCandidate { + index: usize, + depth: usize, +} + +fn consume_tokens<'a>( + tokens: &mut Vec<(TokenSpan<'a>, &'a str)>, + source: &'a str, + mut input: &'a str, + ignore_doc_comments: bool, +) { + assert!(tokens.is_empty()); + + let mut looking_for_template_start = false; + let mut pending: Vec = Vec::new(); + let mut depth = 0; + + fn pop_until(pending: &mut Vec, depth: usize) { + while pending + .last() + .map(|candidate| candidate.depth >= depth) + .unwrap_or(false) + { + pending.pop(); + } + } + + loop { + let waiting_for_template_end = pending + .last() + .map(|candidate| candidate.depth == depth) + .unwrap_or(false); + + let (token, rest) = consume_token(input, waiting_for_template_end, ignore_doc_comments); + let span = Span::from(source.len() - input.len()..source.len() - rest.len()); + tokens.push(((token, span), rest)); + input = rest; + + match token { + Token::Word(_) => { + looking_for_template_start = true; + continue; + } + Token::Trivia | Token::DocComment(_) | Token::ModuleDocComment(_) + if looking_for_template_start => + { + continue; + } + Token::Paren('<') if looking_for_template_start => { + pending.push(UnclosedCandidate { + index: tokens.len() - 1, + depth, + }); + } + Token::Paren('>') if waiting_for_template_end => { + let candidate = pending.pop().unwrap(); + let token = tokens.get_mut(candidate.index).unwrap(); + // TODO: -- mark it as template start + } + Token::Paren('(' | '[') => { + depth += 1; + } + Token::Paren(')' | ']') => { + pop_until(&mut pending, depth); + depth = depth.saturating_sub(1); + } + Token::Operation('=') | Token::Separator(':' | ';') | Token::Paren('{') => { + pending.clear(); + depth = 0; + } + Token::LogicalOperation('&') | Token::LogicalOperation('|') => { + pop_until(&mut pending, depth); + } + _ => {} + } + + looking_for_template_start = false; + + if pending.is_empty() { + break; + } + } + + tokens.reverse(); +} + /// Return the token at the start of `input`. /// /// If `generic` is `false`, then the bit shift operators `>>` or `<<` @@ -52,7 +138,11 @@ fn consume_any(input: &str, what: impl Fn(char) -> bool) -> (&str, &str) { /// If `ignore_doc_comments` is true, doc comments are treated as [`Token::Trivia`]. /// /// [§3.1 Parsing]: https://gpuweb.github.io/gpuweb/wgsl/#parsing -fn consume_token(input: &str, generic: bool, ignore_doc_comments: bool) -> (Token<'_>, &str) { +fn consume_token( + input: &str, + waiting_for_template_end: bool, + ignore_doc_comments: bool, +) -> (Token<'_>, &str) { let mut chars = input.chars(); let cur = match chars.next() { Some(c) => c, @@ -71,9 +161,13 @@ fn consume_token(input: &str, generic: bool, ignore_doc_comments: bool) -> (Toke '(' | ')' | '{' | '}' | '[' | ']' => (Token::Paren(cur), chars.as_str()), '<' | '>' => { let og_chars = chars.as_str(); + if cur == '>' && waiting_for_template_end { + // TODO: -- mark it as template end + return (Token::Paren(cur), og_chars); + } match chars.next() { - Some('=') if !generic => (Token::LogicalOperation(cur), chars.as_str()), - Some(c) if c == cur && !generic => { + Some('=') => (Token::LogicalOperation(cur), chars.as_str()), + Some(c) if c == cur => { let og_chars = chars.as_str(); match chars.next() { Some('=') => (Token::AssignmentOperation(cur), chars.as_str()), @@ -261,6 +355,8 @@ pub(in crate::front::wgsl) struct Lexer<'a> { /// statements. last_end_offset: usize, + tokens: Vec<(TokenSpan<'a>, &'a str)>, + /// Whether or not to ignore doc comments. /// If `true`, doc comments are treated as [`Token::Trivia`]. ignore_doc_comments: bool, @@ -274,6 +370,7 @@ impl<'a> Lexer<'a> { input, source: input, last_end_offset: 0, + tokens: Vec::new(), enable_extensions: EnableExtensions::empty(), ignore_doc_comments, } @@ -361,41 +458,36 @@ impl<'a> Lexer<'a> { /// occur, but not angle brackets. #[must_use] pub(in crate::front::wgsl) fn next(&mut self) -> TokenSpan<'a> { - self.next_impl(false, true) - } - - /// Return the next non-whitespace token from `self`. - /// - /// Assume we are in a parse state where angle brackets may occur, - /// but not bit shift operators. - #[must_use] - pub(in crate::front::wgsl) fn next_generic(&mut self) -> TokenSpan<'a> { - self.next_impl(true, true) + self.next_impl(true) } #[cfg(test)] pub fn next_with_unignored_doc_comments(&mut self) -> TokenSpan<'a> { - self.next_impl(false, false) + self.next_impl(false) } /// Return the next non-whitespace token from `self`, with a span. /// /// See [`consume_token`] for the meaning of `generic`. - fn next_impl(&mut self, generic: bool, ignore_doc_comments: bool) -> TokenSpan<'a> { - let mut start_byte_offset = self.current_byte_offset(); + fn next_impl(&mut self, ignore_doc_comments: bool) -> TokenSpan<'a> { loop { - let (token, rest) = consume_token( - self.input, - generic, - ignore_doc_comments || self.ignore_doc_comments, - ); + if self.tokens.is_empty() { + consume_tokens( + &mut self.tokens, + self.source, + self.input, + ignore_doc_comments || self.ignore_doc_comments, + ); + } + assert!(!self.tokens.is_empty()); + let (token, rest) = self.tokens.pop().unwrap(); + self.input = rest; - match token { - Token::Trivia => start_byte_offset = self.current_byte_offset(), - _ => { - self.last_end_offset = self.current_byte_offset(); - return (token, self.span_from(start_byte_offset)); - } + self.last_end_offset = self.current_byte_offset(); + + match token.0 { + Token::Trivia => {} + _ => return token, } } } @@ -405,6 +497,7 @@ impl<'a> Lexer<'a> { let input = self.input; let last_end_offset = self.last_end_offset; let token = self.next(); + self.tokens.push((token, self.input)); self.input = input; self.last_end_offset = last_end_offset; token @@ -418,6 +511,7 @@ impl<'a> Lexer<'a> { if token.0 == what { true } else { + self.tokens.push((token, self.input)); self.input = input; self.last_end_offset = last_end_offset; false @@ -445,7 +539,7 @@ impl<'a> Lexer<'a> { &mut self, expected: char, ) -> Result<'a, ()> { - let next = self.next_generic(); + let next = self.next(); if next.0 == Token::Paren(expected) { Ok(()) } else { @@ -1019,6 +1113,7 @@ fn test_doc_comments_module() { Token::ModuleDocComment("/*! Different module comment again */"), Token::ModuleDocComment("//! After a break is supported."), Token::Word("const"), + Token::ModuleDocComment("//! After anything else is not."), ], ); } From 96a378ef96ff563f062306bd70a2cb58051a751e Mon Sep 17 00:00:00 2001 From: teoxoy <28601907+teoxoy@users.noreply.github.com> Date: Thu, 21 Aug 2025 16:27:34 +0200 Subject: [PATCH 24/33] [wgsl] update reserved list to latest CR --- naga/src/keywords/wgsl.rs | 69 +++---------------- naga/tests/naga/wgsl_errors.rs | 40 +++++------ .../tests/out/wgsl/glsl-expressions.frag.wgsl | 4 +- .../wgsl/glsl-global-constant-array.frag.wgsl | 4 +- 4 files changed, 34 insertions(+), 83 deletions(-) diff --git a/naga/src/keywords/wgsl.rs b/naga/src/keywords/wgsl.rs index 8ec0dd59c5f..00c3cef2c10 100644 --- a/naga/src/keywords/wgsl.rs +++ b/naga/src/keywords/wgsl.rs @@ -7,60 +7,18 @@ Keywords for [WGSL][wgsl] (WebGPU Shading Language). use crate::proc::KeywordSet; use crate::racy_lock::RacyLock; -// https://gpuweb.github.io/gpuweb/wgsl/#keyword-summary -// last sync: https://github.com/gpuweb/gpuweb/blob/39f2321f547c8f0b7f473cf1d47fba30b1691303/wgsl/index.bs +// last sync: https://www.w3.org/TR/2025/CRD-WGSL-20250809/#keyword-summary pub const RESERVED: &[&str] = &[ - // Type-defining Keywords - "array", - "atomic", - "bool", - "f32", - "f16", - "i32", - "i64", - "mat2x2", - "mat2x3", - "mat2x4", - "mat3x2", - "mat3x3", - "mat3x4", - "mat4x2", - "mat4x3", - "mat4x4", - "ptr", - "sampler", - "sampler_comparison", - "texture_1d", - "texture_2d", - "texture_2d_array", - "texture_3d", - "texture_cube", - "texture_cube_array", - "texture_external", - "texture_multisampled_2d", - "texture_storage_1d", - "texture_storage_2d", - "texture_storage_2d_array", - "texture_storage_3d", - "texture_depth_2d", - "texture_depth_2d_array", - "texture_depth_cube", - "texture_depth_cube_array", - "texture_depth_multisampled_2d", - "u32", - "u64", - "vec2", - "vec3", - "vec4", - // Other Keywords + // Keywords "alias", - "bitcast", "break", "case", "const", + "const_assert", "continue", "continuing", "default", + "diagnostic", "discard", "else", "enable", @@ -71,20 +29,14 @@ pub const RESERVED: &[&str] = &[ "let", "loop", "override", + "requires", "return", - "static_assert", "struct", "switch", "true", - "type", "var", "while", - // Reserved Words - "CompileShader", - "ComputeShader", - "DomainShader", - "GeometryShader", - "Hullshader", + // Reserved "NULL", "Self", "abstract", @@ -99,7 +51,6 @@ pub const RESERVED: &[&str] = &[ "auto", "await", "become", - "binding_array", "cast", "catch", "class", @@ -140,13 +91,11 @@ pub const RESERVED: &[&str] = &[ "get", "goto", "groupshared", - "handle", "highp", "impl", "implements", "import", "inline", - "inout", "instanceof", "interface", "layout", @@ -167,6 +116,8 @@ pub const RESERVED: &[&str] = &[ "noexcept", "noinline", "nointerpolation", + "non_coherent", + "noncoherent", "noperspective", "null", "nullptr", @@ -190,13 +141,12 @@ pub const RESERVED: &[&str] = &[ "regardless", "register", "reinterpret_cast", - "requires", + "require", "resource", "restrict", "self", "set", "shared", - "signed", "sizeof", "smooth", "snorm", @@ -213,6 +163,7 @@ pub const RESERVED: &[&str] = &[ "throw", "trait", "try", + "type", "typedef", "typeid", "typename", diff --git a/naga/tests/naga/wgsl_errors.rs b/naga/tests/naga/wgsl_errors.rs index 06b87f0186a..203721278e6 100644 --- a/naga/tests/naga/wgsl_errors.rs +++ b/naga/tests/naga/wgsl_errors.rs @@ -715,13 +715,13 @@ fn reserved_keyword() { // global var check( r#" - var bool: bool = true; + var case: bool = true; "#, - r###"error: name `bool` is a reserved keyword + r###"error: name `case` is a reserved keyword ┌─ wgsl:2:17 │ -2 │ var bool: bool = true; - │ ^^^^ definition of `bool` +2 │ var case: bool = true; + │ ^^^^ definition of `case` "###, ); @@ -747,14 +747,14 @@ fn reserved_keyword() { check( r#" fn foo() { - let atomic: f32 = 1.0; + let enable: f32 = 1.0; } "#, - r###"error: name `atomic` is a reserved keyword + r###"error: name `enable` is a reserved keyword ┌─ wgsl:3:21 │ -3 │ let atomic: f32 = 1.0; - │ ^^^^^^ definition of `atomic` +3 │ let enable: f32 = 1.0; + │ ^^^^^^ definition of `enable` "###, ); @@ -763,14 +763,14 @@ fn reserved_keyword() { check( r#" fn foo() { - var sampler: f32 = 1.0; + var default: f32 = 1.0; } "#, - r###"error: name `sampler` is a reserved keyword + r###"error: name `default` is a reserved keyword ┌─ wgsl:3:21 │ -3 │ var sampler: f32 = 1.0; - │ ^^^^^^^ definition of `sampler` +3 │ var default: f32 = 1.0; + │ ^^^^^^^ definition of `default` "###, ); @@ -792,13 +792,13 @@ fn reserved_keyword() { // struct check( r#" - struct array {} + struct override {} "#, - r###"error: name `array` is a reserved keyword + r###"error: name `override` is a reserved keyword ┌─ wgsl:2:20 │ -2 │ struct array {} - │ ^^^^^ definition of `array` +2 │ struct override {} + │ ^^^^^^^^ definition of `override` "###, ); @@ -806,13 +806,13 @@ fn reserved_keyword() { // struct member check( r#" - struct Foo { sampler: f32 } + struct Foo { switch: f32 } "#, - r###"error: name `sampler` is a reserved keyword + r###"error: name `switch` is a reserved keyword ┌─ wgsl:2:26 │ -2 │ struct Foo { sampler: f32 } - │ ^^^^^^^ definition of `sampler` +2 │ struct Foo { switch: f32 } + │ ^^^^^^ definition of `switch` "###, ); diff --git a/naga/tests/out/wgsl/glsl-expressions.frag.wgsl b/naga/tests/out/wgsl/glsl-expressions.frag.wgsl index 016207c3096..a71036a978b 100644 --- a/naga/tests/out/wgsl/glsl-expressions.frag.wgsl +++ b/naga/tests/out/wgsl/glsl-expressions.frag.wgsl @@ -7,7 +7,7 @@ struct a_buf { } struct TestStruct { - array_: array, 2>, + array: array, 2>, } struct FragmentOutput { @@ -402,7 +402,7 @@ fn indexConstantNonConstantIndex(i: i32) { i_1 = i; let _e2 = i_1; - let _e7 = local_5.array_[_e2]; + let _e7 = local_5.array[_e2]; a_26 = _e7; return; } diff --git a/naga/tests/out/wgsl/glsl-global-constant-array.frag.wgsl b/naga/tests/out/wgsl/glsl-global-constant-array.frag.wgsl index 6582dfc9ece..2519e9f9b4b 100644 --- a/naga/tests/out/wgsl/glsl-global-constant-array.frag.wgsl +++ b/naga/tests/out/wgsl/glsl-global-constant-array.frag.wgsl @@ -1,7 +1,7 @@ -const array_: array = array(1f, 2f); +const array: array = array(1f, 2f); fn main_1() { - var local: array = array_; + var local: array = array; return; } From c12bf68099ff6d51e2a21f5fc1add5b84a36ab9e Mon Sep 17 00:00:00 2001 From: teoxoy <28601907+teoxoy@users.noreply.github.com> Date: Tue, 7 Oct 2025 13:13:32 +0200 Subject: [PATCH 25/33] add `TemplateArgsStart` & `TemplateArgsEnd` tokens --- naga/src/front/wgsl/error.rs | 2 + naga/src/front/wgsl/parse/lexer.rs | 60 +++++++++++------------------- naga/src/front/wgsl/parse/mod.rs | 30 +++++++-------- naga/src/front/wgsl/tests.rs | 2 +- 4 files changed, 40 insertions(+), 54 deletions(-) diff --git a/naga/src/front/wgsl/error.rs b/naga/src/front/wgsl/error.rs index 7aa12e000ca..e6f2169209d 100644 --- a/naga/src/front/wgsl/error.rs +++ b/naga/src/front/wgsl/error.rs @@ -480,6 +480,8 @@ impl<'a> Error<'a> { Token::IncrementOperation => "increment operation".to_string(), Token::DecrementOperation => "decrement operation".to_string(), Token::Arrow => "->".to_string(), + Token::TemplateArgsStart => "template args start".to_string(), + Token::TemplateArgsEnd => "template args end".to_string(), Token::Unknown(c) => format!("unknown (`{c}`)"), Token::Trivia => "trivia".to_string(), Token::DocComment(s) => format!("doc comment ('{s}')"), diff --git a/naga/src/front/wgsl/parse/lexer.rs b/naga/src/front/wgsl/parse/lexer.rs index f49f08cec7b..23ba4b5ac8a 100644 --- a/naga/src/front/wgsl/parse/lexer.rs +++ b/naga/src/front/wgsl/parse/lexer.rs @@ -23,6 +23,8 @@ pub enum Token<'a> { IncrementOperation, DecrementOperation, Arrow, + TemplateArgsStart, + TemplateArgsEnd, Unknown(char), Trivia, DocComment(&'a str), @@ -89,10 +91,10 @@ fn consume_tokens<'a>( depth, }); } - Token::Paren('>') if waiting_for_template_end => { + Token::TemplateArgsEnd => { let candidate = pending.pop().unwrap(); - let token = tokens.get_mut(candidate.index).unwrap(); - // TODO: -- mark it as template start + let &mut ((ref mut token, _), _) = tokens.get_mut(candidate.index).unwrap(); + *token = Token::TemplateArgsStart; } Token::Paren('(' | '[') => { depth += 1; @@ -162,8 +164,7 @@ fn consume_token( '<' | '>' => { let og_chars = chars.as_str(); if cur == '>' && waiting_for_template_end { - // TODO: -- mark it as template end - return (Token::Paren(cur), og_chars); + return (Token::TemplateArgsEnd, og_chars); } match chars.next() { Some('=') => (Token::LogicalOperation(cur), chars.as_str()), @@ -467,8 +468,6 @@ impl<'a> Lexer<'a> { } /// Return the next non-whitespace token from `self`, with a span. - /// - /// See [`consume_token`] for the meaning of `generic`. fn next_impl(&mut self, ignore_doc_comments: bool) -> TokenSpan<'a> { loop { if self.tokens.is_empty() { @@ -535,23 +534,8 @@ impl<'a> Lexer<'a> { Ok(()) } - pub(in crate::front::wgsl) fn expect_generic_paren( - &mut self, - expected: char, - ) -> Result<'a, ()> { - let next = self.next(); - if next.0 == Token::Paren(expected) { - Ok(()) - } else { - Err(Box::new(Error::Unexpected( - next.1, - ExpectedToken::Token(Token::Paren(expected)), - ))) - } - } - pub(in crate::front::wgsl) fn end_of_generic_arguments(&mut self) -> bool { - self.next_if(Token::Separator(',')) && self.peek().0 != Token::Paren('>') + self.next_if(Token::Separator(',')) && self.peek().0 != Token::TemplateArgsEnd } pub(in crate::front::wgsl) fn next_ident_with_span(&mut self) -> Result<'a, (&'a str, Span)> { @@ -588,7 +572,7 @@ impl<'a> Lexer<'a> { pub(in crate::front::wgsl) fn next_scalar_generic_with_span( &mut self, ) -> Result<'a, (Scalar, Span)> { - self.expect_generic_paren('<')?; + self.expect(Token::TemplateArgsStart)?; let (scalar, span) = match self.next() { (Token::Word(word), span) => { @@ -599,7 +583,7 @@ impl<'a> Lexer<'a> { (_, span) => return Err(Box::new(Error::UnknownScalarType(span))), }; - self.expect_generic_paren('>')?; + self.expect(Token::TemplateArgsEnd)?; Ok((scalar, span)) } @@ -621,18 +605,18 @@ impl<'a> Lexer<'a> { pub(in crate::front::wgsl) fn next_format_generic( &mut self, ) -> Result<'a, (crate::StorageFormat, crate::StorageAccess)> { - self.expect(Token::Paren('<'))?; + self.expect(Token::TemplateArgsStart)?; let (ident, ident_span) = self.next_ident_with_span()?; let format = conv::map_storage_format(ident, ident_span)?; self.expect(Token::Separator(','))?; let access = self.next_storage_access()?; - self.expect(Token::Paren('>'))?; + self.expect(Token::TemplateArgsEnd)?; Ok((format, access)) } pub(in crate::front::wgsl) fn next_acceleration_structure_flags(&mut self) -> Result<'a, bool> { - Ok(if self.next_if(Token::Paren('<')) { - if !self.next_if(Token::Paren('>')) { + Ok(if self.next_if(Token::TemplateArgsStart) { + if !self.next_if(Token::TemplateArgsEnd) { let (name, span) = self.next_ident_with_span()?; let ret = if name == "vertex_return" { true @@ -640,7 +624,7 @@ impl<'a> Lexer<'a> { return Err(Box::new(Error::UnknownAttribute(span))); }; self.next_if(Token::Separator(',')); - self.expect(Token::Paren('>'))?; + self.expect(Token::TemplateArgsEnd)?; ret } else { false @@ -956,15 +940,15 @@ fn test_variable_decl() { Token::Number(Ok(Number::AbstractInt(0))), Token::Paren(')'), Token::Word("var"), - Token::Paren('<'), + Token::TemplateArgsStart, Token::Word("uniform"), - Token::Paren('>'), + Token::TemplateArgsEnd, Token::Word("texture"), Token::Separator(':'), Token::Word("texture_multisampled_2d"), - Token::Paren('<'), + Token::TemplateArgsStart, Token::Word("f32"), - Token::Paren('>'), + Token::TemplateArgsEnd, Token::Separator(';'), ], ); @@ -972,17 +956,17 @@ fn test_variable_decl() { "var buffer: array;", &[ Token::Word("var"), - Token::Paren('<'), + Token::TemplateArgsStart, Token::Word("storage"), Token::Separator(','), Token::Word("read_write"), - Token::Paren('>'), + Token::TemplateArgsEnd, Token::Word("buffer"), Token::Separator(':'), Token::Word("array"), - Token::Paren('<'), + Token::TemplateArgsStart, Token::Word("u32"), - Token::Paren('>'), + Token::TemplateArgsEnd, Token::Separator(';'), ], ); diff --git a/naga/src/front/wgsl/parse/mod.rs b/naga/src/front/wgsl/parse/mod.rs index 49480acf86b..8adf438f4dc 100644 --- a/naga/src/front/wgsl/parse/mod.rs +++ b/naga/src/front/wgsl/parse/mod.rs @@ -687,11 +687,11 @@ impl Parser { // parse component type if present match (lexer.peek().0, partial) { - (Token::Paren('<'), ast::ConstructorType::PartialVector { size }) => { + (Token::TemplateArgsStart, ast::ConstructorType::PartialVector { size }) => { let (ty, ty_span) = self.singular_generic(lexer, ctx)?; Ok(Some(ast::ConstructorType::Vector { size, ty, ty_span })) } - (Token::Paren('<'), ast::ConstructorType::PartialMatrix { columns, rows }) => { + (Token::TemplateArgsStart, ast::ConstructorType::PartialMatrix { columns, rows }) => { let (ty, ty_span) = self.singular_generic(lexer, ctx)?; Ok(Some(ast::ConstructorType::Matrix { columns, @@ -700,8 +700,8 @@ impl Parser { ty_span, })) } - (Token::Paren('<'), ast::ConstructorType::PartialArray) => { - lexer.expect_generic_paren('<')?; + (Token::TemplateArgsStart, ast::ConstructorType::PartialArray) => { + lexer.expect(Token::TemplateArgsStart)?; let base = self.type_specifier(lexer, ctx)?; let size = if lexer.end_of_generic_arguments() { let expr = self.const_generic_expression(lexer, ctx)?; @@ -710,7 +710,7 @@ impl Parser { } else { ast::ArraySize::Dynamic }; - lexer.expect_generic_paren('>')?; + lexer.expect(Token::TemplateArgsEnd)?; Ok(Some(ast::ConstructorType::Array { base, size })) } @@ -1293,7 +1293,7 @@ impl Parser { self.push_rule_span(Rule::VariableDecl, lexer); let mut space = crate::AddressSpace::Handle; - if lexer.next_if(Token::Paren('<')) { + if lexer.next_if(Token::TemplateArgsStart) { let (class_str, span) = lexer.next_ident_with_span()?; space = match class_str { "storage" => { @@ -1307,7 +1307,7 @@ impl Parser { } _ => conv::map_address_space(class_str, span)?, }; - lexer.expect(Token::Paren('>'))?; + lexer.expect(Token::TemplateArgsEnd)?; } let (name, ty) = self.optionally_typed_ident(lexer, ctx)?; @@ -1409,12 +1409,12 @@ impl Parser { lexer: &mut Lexer<'a>, ctx: &mut ExpressionContext<'a, '_, '_>, ) -> Result<'a, (Handle>, Span)> { - lexer.expect_generic_paren('<')?; + lexer.expect(Token::TemplateArgsStart)?; let start = lexer.start_byte_offset(); let ty = self.type_specifier(lexer, ctx)?; let span = lexer.span_from(start); lexer.next_if(Token::Separator(',')); - lexer.expect_generic_paren('>')?; + lexer.expect(Token::TemplateArgsEnd)?; Ok((ty, span)) } @@ -1670,7 +1670,7 @@ impl Parser { ast::Type::Atomic(scalar) } "ptr" => { - lexer.expect_generic_paren('<')?; + lexer.expect(Token::TemplateArgsStart)?; let (ident, span) = lexer.next_ident_with_span()?; let mut space = conv::map_address_space(ident, span)?; lexer.expect(Token::Separator(','))?; @@ -1684,11 +1684,11 @@ impl Parser { crate::StorageAccess::LOAD }; } - lexer.expect_generic_paren('>')?; + lexer.expect(Token::TemplateArgsEnd)?; ast::Type::Pointer { base, space } } "array" => { - lexer.expect_generic_paren('<')?; + lexer.expect(Token::TemplateArgsStart)?; let base = self.type_specifier(lexer, ctx)?; let size = if lexer.end_of_generic_arguments() { let size = self.const_generic_expression(lexer, ctx)?; @@ -1697,12 +1697,12 @@ impl Parser { } else { ast::ArraySize::Dynamic }; - lexer.expect_generic_paren('>')?; + lexer.expect(Token::TemplateArgsEnd)?; ast::Type::Array { base, size } } "binding_array" => { - lexer.expect_generic_paren('<')?; + lexer.expect(Token::TemplateArgsStart)?; let base = self.type_specifier(lexer, ctx)?; let size = if lexer.end_of_generic_arguments() { let size = self.unary_expression(lexer, ctx)?; @@ -1711,7 +1711,7 @@ impl Parser { } else { ast::ArraySize::Dynamic }; - lexer.expect_generic_paren('>')?; + lexer.expect(Token::TemplateArgsEnd)?; ast::Type::BindingArray { base, size } } diff --git a/naga/src/front/wgsl/tests.rs b/naga/src/front/wgsl/tests.rs index 149f5d329fd..9d228209691 100644 --- a/naga/src/front/wgsl/tests.rs +++ b/naga/src/front/wgsl/tests.rs @@ -415,7 +415,7 @@ fn parse_postfix() { fn parse_expressions() { parse_str("fn foo() { let x: f32 = select(0.0, 1.0, true); - let y: vec2 = select(vec2(1.0, 1.0), vec2(x, x), vec2(x < 0.5, x > 0.5)); + let y: vec2 = select(vec2(1.0, 1.0), vec2(x, x), vec2((x < 0.5), (x > 0.5))); let z: bool = !(0.0 == 1.0); }").unwrap(); } From 393213b09ff7e90fd869ff052eaadfc5f84919bc Mon Sep 17 00:00:00 2001 From: teoxoy <28601907+teoxoy@users.noreply.github.com> Date: Tue, 7 Oct 2025 16:05:53 +0200 Subject: [PATCH 26/33] handle `var` decl template list in the lowerer --- naga/src/front/wgsl/error.rs | 21 ++++++++++ naga/src/front/wgsl/lower/mod.rs | 63 +++++++++++++++++++++++++++++- naga/src/front/wgsl/parse/ast.rs | 2 +- naga/src/front/wgsl/parse/conv.rs | 12 ++++++ naga/src/front/wgsl/parse/lexer.rs | 18 +-------- naga/src/front/wgsl/parse/mod.rs | 44 +++++++++++---------- naga/src/front/wgsl/tests.rs | 58 +++++++++++++++++++++++++++ 7 files changed, 179 insertions(+), 39 deletions(-) diff --git a/naga/src/front/wgsl/error.rs b/naga/src/front/wgsl/error.rs index e6f2169209d..02dfe73083b 100644 --- a/naga/src/front/wgsl/error.rs +++ b/naga/src/front/wgsl/error.rs @@ -412,6 +412,9 @@ pub(crate) enum Error<'a> { TypeTooLarge { span: Span, }, + UnexpectedIdentForEnumerant(Span), + UnexpectedExprForEnumerant(Span), + UnusedArgsForTemplate(Vec), } impl From for Error<'_> { @@ -1381,6 +1384,24 @@ impl<'a> Error<'a> { crate::valid::MAX_TYPE_SIZE )], }, + Error::UnexpectedIdentForEnumerant(ident_span) => ParseError { + message: format!( + "identifier `{}` resolves to a declaration", + &source[ident_span] + ), + labels: vec![(ident_span, "needs to resolve to a predeclared enumerant".into())], + notes: vec![], + }, + Error::UnexpectedExprForEnumerant(expr_span) => ParseError { + message: "unexpected expression".to_string(), + labels: vec![(expr_span, "needs to be an identifier resolving to a predeclared enumerant".into())], + notes: vec![], + }, + Error::UnusedArgsForTemplate(ref expr_spans) => ParseError { + message: "unused expressions for template".to_string(), + labels: expr_spans.iter().cloned().map(|span| -> (_, _){ (span, "unused".into()) }).collect(), + notes: vec![], + }, } } } diff --git a/naga/src/front/wgsl/lower/mod.rs b/naga/src/front/wgsl/lower/mod.rs index e90d7eab0a8..ced06a0f065 100644 --- a/naga/src/front/wgsl/lower/mod.rs +++ b/naga/src/front/wgsl/lower/mod.rs @@ -1194,10 +1194,12 @@ impl<'source, 'temp> Lowerer<'source, 'temp> { None }; + let space = Self::var_address_space(&v.template_list, &ctx.as_const())?; + let handle = ctx.module.global_variables.append( ir::GlobalVariable { name: Some(v.name.name.to_string()), - space: v.space, + space, binding, ty, init: initializer, @@ -2327,6 +2329,65 @@ impl<'source, 'temp> Lowerer<'source, 'temp> { expr.try_map(|handle| ctx.append_expression(handle, span)) } + fn enumerant( + expr: Handle>, + ctx: &mut ExpressionContext<'source, '_, '_>, + ) -> Result<'source, (&'source str, Span)> { + let span = ctx.ast_expressions.get_span(expr); + let expr = &ctx.ast_expressions[expr]; + + match *expr { + ast::Expression::Ident(ast::IdentExpr::Local(_)) => { + Err(Box::new(Error::UnexpectedIdentForEnumerant(span))) + } + ast::Expression::Ident(ast::IdentExpr::Unresolved(name)) => { + if ctx.globals.get(name).is_some() { + Err(Box::new(Error::UnexpectedIdentForEnumerant(span))) + } else { + Ok((name, span)) + } + } + _ => Err(Box::new(Error::UnexpectedExprForEnumerant(span))), + } + } + + fn var_address_space( + template_list: &Option>>>, + ctx: &mut ExpressionContext<'source, '_, '_>, + ) -> Result<'source, ir::AddressSpace> { + let mut address_space = ir::AddressSpace::Handle; + + if let Some(ref template_list) = template_list { + let mut template_list_args = template_list.iter(); + let address_space_expr = template_list_args.next().unwrap(); + + let (enumerant, span) = Self::enumerant(*address_space_expr, ctx)?; + address_space = conv::map_address_space(enumerant, span)?; + + match address_space { + ir::AddressSpace::Storage { ref mut access } => { + if let Some(access_mode_expr) = template_list_args.next() { + let (enumerant, span) = Self::enumerant(*access_mode_expr, ctx)?; + let access_mode = conv::map_access_mode(enumerant, span)?; + *access = access_mode; + } else { + // defaulting to `read` + *access = ir::StorageAccess::LOAD + } + } + _ => {} + } + + let unused_args: Vec = template_list_args + .map(|expr| ctx.ast_expressions.get_span(*expr)) + .collect(); + if !unused_args.is_empty() { + return Err(Box::new(Error::UnusedArgsForTemplate(unused_args))); + } + } + Ok(address_space) + } + fn binary( &mut self, op: ir::BinaryOperator, diff --git a/naga/src/front/wgsl/parse/ast.rs b/naga/src/front/wgsl/parse/ast.rs index 345e9c4c486..0ac45c48d4e 100644 --- a/naga/src/front/wgsl/parse/ast.rs +++ b/naga/src/front/wgsl/parse/ast.rs @@ -164,7 +164,7 @@ pub struct ResourceBinding<'a> { #[derive(Debug)] pub struct GlobalVariable<'a> { pub name: Ident<'a>, - pub space: crate::AddressSpace, + pub template_list: Option>>>, pub binding: Option>, pub ty: Option>>, pub init: Option>>, diff --git a/naga/src/front/wgsl/parse/conv.rs b/naga/src/front/wgsl/parse/conv.rs index 30d0eb2d598..09d0e22c14b 100644 --- a/naga/src/front/wgsl/parse/conv.rs +++ b/naga/src/front/wgsl/parse/conv.rs @@ -20,6 +20,18 @@ pub fn map_address_space(word: &str, span: Span) -> Result<'_, crate::AddressSpa } } +pub fn map_access_mode(word: &str, span: Span) -> Result<'_, crate::StorageAccess> { + match word { + "read" => Ok(crate::StorageAccess::LOAD), + "write" => Ok(crate::StorageAccess::STORE), + "read_write" => Ok(crate::StorageAccess::LOAD | crate::StorageAccess::STORE), + "atomic" => Ok(crate::StorageAccess::ATOMIC + | crate::StorageAccess::LOAD + | crate::StorageAccess::STORE), + _ => Err(Box::new(Error::UnknownAccess(span))), + } +} + pub fn map_built_in( enable_extensions: &EnableExtensions, word: &str, diff --git a/naga/src/front/wgsl/parse/lexer.rs b/naga/src/front/wgsl/parse/lexer.rs index 23ba4b5ac8a..22b6e77605b 100644 --- a/naga/src/front/wgsl/parse/lexer.rs +++ b/naga/src/front/wgsl/parse/lexer.rs @@ -587,21 +587,6 @@ impl<'a> Lexer<'a> { Ok((scalar, span)) } - pub(in crate::front::wgsl) fn next_storage_access( - &mut self, - ) -> Result<'a, crate::StorageAccess> { - let (ident, span) = self.next_ident_with_span()?; - match ident { - "read" => Ok(crate::StorageAccess::LOAD), - "write" => Ok(crate::StorageAccess::STORE), - "read_write" => Ok(crate::StorageAccess::LOAD | crate::StorageAccess::STORE), - "atomic" => Ok(crate::StorageAccess::ATOMIC - | crate::StorageAccess::LOAD - | crate::StorageAccess::STORE), - _ => Err(Box::new(Error::UnknownAccess(span))), - } - } - pub(in crate::front::wgsl) fn next_format_generic( &mut self, ) -> Result<'a, (crate::StorageFormat, crate::StorageAccess)> { @@ -609,7 +594,8 @@ impl<'a> Lexer<'a> { let (ident, ident_span) = self.next_ident_with_span()?; let format = conv::map_storage_format(ident, ident_span)?; self.expect(Token::Separator(','))?; - let access = self.next_storage_access()?; + let (ident, ident_span) = self.next_ident_with_span()?; + let access = conv::map_access_mode(ident, ident_span)?; self.expect(Token::TemplateArgsEnd)?; Ok((format, access)) } diff --git a/naga/src/front/wgsl/parse/mod.rs b/naga/src/front/wgsl/parse/mod.rs index 8adf438f4dc..3c44d3b5264 100644 --- a/naga/src/front/wgsl/parse/mod.rs +++ b/naga/src/front/wgsl/parse/mod.rs @@ -1291,24 +1291,7 @@ impl Parser { ctx: &mut ExpressionContext<'a, '_, '_>, ) -> Result<'a, ast::GlobalVariable<'a>> { self.push_rule_span(Rule::VariableDecl, lexer); - let mut space = crate::AddressSpace::Handle; - - if lexer.next_if(Token::TemplateArgsStart) { - let (class_str, span) = lexer.next_ident_with_span()?; - space = match class_str { - "storage" => { - let access = if lexer.next_if(Token::Separator(',')) { - lexer.next_storage_access()? - } else { - // defaulting to `read` - crate::StorageAccess::LOAD - }; - crate::AddressSpace::Storage { access } - } - _ => conv::map_address_space(class_str, span)?, - }; - lexer.expect(Token::TemplateArgsEnd)?; - } + let template_list = self.maybe_template_list(lexer, ctx)?; let (name, ty) = self.optionally_typed_ident(lexer, ctx)?; let init = if lexer.next_if(Token::Operation('=')) { @@ -1322,7 +1305,7 @@ impl Parser { Ok(ast::GlobalVariable { name, - space, + template_list, binding: None, ty, init, @@ -1403,6 +1386,24 @@ impl Parser { Ok(members) } + fn maybe_template_list<'a>( + &mut self, + lexer: &mut Lexer<'a>, + ctx: &mut ExpressionContext<'a, '_, '_>, + ) -> Result<'a, Option>>>> { + if lexer.next_if(Token::TemplateArgsStart) { + let mut args = Vec::new(); + args.push(self.expression(lexer, ctx)?); + while lexer.next_if(Token::Separator(',')) && lexer.peek().0 != Token::TemplateArgsEnd { + args.push(self.expression(lexer, ctx)?); + } + lexer.expect(Token::TemplateArgsEnd)?; + return Ok(Some(args)); + } else { + Ok(None) + } + } + /// Parses ``, returning T and span of T fn singular_generic<'a>( &mut self, @@ -1677,9 +1678,10 @@ impl Parser { let base = self.type_specifier(lexer, ctx)?; if let crate::AddressSpace::Storage { ref mut access } = space { *access = if lexer.end_of_generic_arguments() { - let result = lexer.next_storage_access()?; + let (ident, span) = lexer.next_ident_with_span()?; + let access = conv::map_access_mode(ident, span)?; lexer.next_if(Token::Separator(',')); - result + access } else { crate::StorageAccess::LOAD }; diff --git a/naga/src/front/wgsl/tests.rs b/naga/src/front/wgsl/tests.rs index 9d228209691..3864ae8cb41 100644 --- a/naga/src/front/wgsl/tests.rs +++ b/naga/src/front/wgsl/tests.rs @@ -865,3 +865,61 @@ error: found conflicting `diagnostic(…)` rule(s) } } } + +mod template { + use crate::front::wgsl::assert_parse_err; + + #[test] + fn enumerant_shadowing() { + assert_parse_err( + " +fn storage() {} +var s: u32; +", + "\ +error: identifier `storage` resolves to a declaration + ┌─ wgsl:3:5 + │ +3 │ var s: u32; + │ ^^^^^^^ needs to resolve to a predeclared enumerant + +", + ); + } + + #[test] + fn unexpected_expr_as_enumerant() { + assert_parse_err( + " +var<1 + 1> s: u32; +", + "\ +error: unexpected expression + ┌─ wgsl:2:5 + │ +2 │ var<1 + 1> s: u32; + │ ^^^^^ needs to be an identifier resolving to a predeclared enumerant + +", + ); + } + + #[test] + fn unused_exprs_for_template() { + assert_parse_err( + " +var s: u32; +", + "\ +error: unused expressions for template + ┌─ wgsl:2:26 + │ +2 │ var s: u32; + │ ^^^^^^ ^^^^^^ unused + │ │\x20\x20\x20\x20\x20\x20\x20\x20 + │ unused + +", + ); + } +} From f52a0ad4eeef3ea5ae4573516c8948ff4c1fb762 Mon Sep 17 00:00:00 2001 From: teoxoy <28601907+teoxoy@users.noreply.github.com> Date: Mon, 20 Oct 2025 12:40:15 +0200 Subject: [PATCH 27/33] handle `bitcast` and construction expressions in the lowerer --- naga/src/front/wgsl/error.rs | 39 ++ naga/src/front/wgsl/lower/construction.rs | 136 +---- naga/src/front/wgsl/lower/mod.rs | 460 ++++++++++++---- naga/src/front/wgsl/lower/template_list.rs | 159 ++++++ naga/src/front/wgsl/parse/ast.rs | 153 ++---- naga/src/front/wgsl/parse/conv.rs | 173 +++++- .../wgsl/parse/directive/enable_extension.rs | 2 +- naga/src/front/wgsl/parse/lexer.rs | 5 - naga/src/front/wgsl/parse/mod.rs | 508 +++--------------- naga/src/front/wgsl/tests.rs | 98 ++++ naga/tests/naga/wgsl_errors.rs | 6 +- naga/tests/out/spv/wgsl-const-exprs.spvasm | 32 +- 12 files changed, 967 insertions(+), 804 deletions(-) create mode 100644 naga/src/front/wgsl/lower/template_list.rs diff --git a/naga/src/front/wgsl/error.rs b/naga/src/front/wgsl/error.rs index 02dfe73083b..fecd18792ff 100644 --- a/naga/src/front/wgsl/error.rs +++ b/naga/src/front/wgsl/error.rs @@ -199,6 +199,7 @@ pub(crate) enum Error<'a> { InvalidIdentifierUnderscore(Span), ReservedIdentifierPrefix(Span), UnknownAddressSpace(Span), + UnknownRayFlag(Span), RepeatedAttribute(Span), UnknownAttribute(Span), UnknownBuiltin(Span), @@ -275,6 +276,7 @@ pub(crate) enum Error<'a> { span: Span, }, CalledEntryPoint(Span), + CalledLocalDecl(Span), WrongArgumentCount { span: Span, expected: Range, @@ -415,6 +417,12 @@ pub(crate) enum Error<'a> { UnexpectedIdentForEnumerant(Span), UnexpectedExprForEnumerant(Span), UnusedArgsForTemplate(Vec), + UnexpectedTemplate(Span), + MissingTemplateArg { + span: Span, + arg: &'static str, + }, + UnexpectedExprForTypeExpression(Span), } impl From for Error<'_> { @@ -655,6 +663,11 @@ impl<'a> Error<'a> { labels: vec![(bad_span, "unknown address space".into())], notes: vec![], }, + Error::UnknownRayFlag(bad_span) => ParseError { + message: format!("unknown ray flag: `{}`", &source[bad_span]), + labels: vec![(bad_span, "unknown ray flag".into())], + notes: vec![], + }, Error::RepeatedAttribute(bad_span) => ParseError { message: format!("repeated attribute: `{}`", &source[bad_span]), labels: vec![(bad_span, "repeated attribute".into())], @@ -914,6 +927,11 @@ impl<'a> Error<'a> { labels: vec![(span, "entry point cannot be called".into())], notes: vec![], }, + Error::CalledLocalDecl(span) => ParseError { + message: "local declaration cannot be called".to_string(), + labels: vec![(span, "local declaration cannot be called".into())], + notes: vec![], + }, Error::WrongArgumentCount { span, ref expected, @@ -1402,6 +1420,27 @@ impl<'a> Error<'a> { labels: expr_spans.iter().cloned().map(|span| -> (_, _){ (span, "unused".into()) }).collect(), notes: vec![], }, + Error::UnexpectedTemplate(span) => ParseError { + message: "unexpected template".to_string(), + labels: vec![(span, "expected identifier".into())], + notes: vec![], + }, + Error::MissingTemplateArg { + span, + arg, + } => ParseError { + message: format!( + "`{}` needs a template argument specified: {arg}", + &source[span] + ), + labels: vec![(span, "is missing a template argument".into())], + notes: vec![], + }, + Error::UnexpectedExprForTypeExpression(expr_span) => ParseError { + message: "unexpected expression".to_string(), + labels: vec![(expr_span, "needs to be an identifier resolving to a type declaration (alias or struct) or predeclared type(-generator)".into())], + notes: vec![], + } } } } diff --git a/naga/src/front/wgsl/lower/construction.rs b/naga/src/front/wgsl/lower/construction.rs index 997d5a31238..9791ab6023a 100644 --- a/naga/src/front/wgsl/lower/construction.rs +++ b/naga/src/front/wgsl/lower/construction.rs @@ -1,10 +1,4 @@ -use alloc::{ - boxed::Box, - format, - string::{String, ToString}, - vec, - vec::Vec, -}; +use alloc::{boxed::Box, vec, vec::Vec}; use core::num::NonZeroU32; use crate::common::wgsl::TypeContext; @@ -13,9 +7,22 @@ use crate::front::wgsl::parse::ast; use crate::front::wgsl::{Error, Result}; use crate::{Handle, Span}; -/// A cooked form of `ast::ConstructorType` that uses Naga types whenever -/// possible. -enum Constructor { +/// A [`constructor built-in function`]. +/// +/// WGSL has two types of such functions: +/// +/// - Those that fully specify the type being constructed, like +/// `vec3(x,y,z)`, which obviously constructs a `vec3`. +/// +/// - Those that leave the component type of the composite being constructed +/// implicit, to be inferred from the argument types, like `vec3(x,y,z)`, +/// which constructs a `vec3` where `T` is the type of `x`, `y`, and `z`. +/// +/// This enum represents both cases. The `PartialFoo` variants +/// represent the second case, where the component type is implicit. +/// +/// [`constructor built-in function`]: https://gpuweb.github.io/gpuweb/wgsl/#constructor-builtin-function +pub enum Constructor { /// A vector construction whose component type is inferred from the /// argument: `vec3(1.0)`. PartialVector { size: crate::VectorSize }, @@ -62,21 +69,6 @@ impl Constructor> { } } -impl Constructor<(Handle, &crate::TypeInner)> { - fn to_error_string(&self, ctx: &ExpressionContext) -> String { - match *self { - Self::PartialVector { size } => { - format!("vec{}", size as u32,) - } - Self::PartialMatrix { columns, rows } => { - format!("mat{}x{}", columns as u32, rows as u32,) - } - Self::PartialArray => "array".to_string(), - Self::Type((handle, _inner)) => ctx.type_to_string(handle), - } - } -} - enum Components<'a> { None, One { @@ -108,24 +100,20 @@ impl<'source> Lowerer<'source, '_> { /// it's one of the `Partial` variants, we need to consider the argument /// types as well. /// - /// This is used for [`Construct`] expressions, but also for [`Call`] - /// expressions, once we've determined that the "callable" (in WGSL spec - /// terms) is actually a type. + /// This is used for [`Call`] expressions, once we've determined that + /// the "callable" (in WGSL spec terms) is actually a type. /// - /// [`Construct`]: ast::Expression::Construct /// [`Call`]: ast::Expression::Call pub fn construct( &mut self, span: Span, - constructor: &ast::ConstructorType<'source>, + constructor: Constructor>, ty_span: Span, components: &[Handle>], ctx: &mut ExpressionContext<'source, '_, '_>, ) -> Result<'source, Handle> { use crate::proc::TypeResolution as Tr; - let constructor_h = self.constructor(constructor, ctx)?; - let components = match *components { [] => Components::None, [component] => { @@ -160,7 +148,7 @@ impl<'source> Lowerer<'source, '_> { // Even though we computed `constructor` above, wait until now to borrow // a reference to the `TypeInner`, so that the component-handling code // above can have mutable access to the type arena. - let constructor = constructor_h.borrow_inner(ctx.module); + let constructor = constructor.borrow_inner(ctx.module); let expr; match (components, constructor) { @@ -554,14 +542,19 @@ impl<'source> Lowerer<'source, '_> { Components::One { span, component, .. }, - constructor, + Constructor::Type(( + ty, + &(crate::TypeInner::Scalar { .. } + | crate::TypeInner::Vector { .. } + | crate::TypeInner::Matrix { .. }), + )), ) => { let component_ty = &ctx.typifier()[component]; let from_type = ctx.type_resolution_to_string(component_ty); return Err(Box::new(Error::BadTypeCast { span, from_type, - to_type: constructor.to_error_string(ctx), + to_type: ctx.type_to_string(ty), })); } @@ -581,77 +574,4 @@ impl<'source> Lowerer<'source, '_> { let expr = ctx.append_expression(expr, span)?; Ok(expr) } - - /// Build a [`Constructor`] for a WGSL construction expression. - /// - /// If `constructor` conveys enough information to determine which Naga [`Type`] - /// we're actually building (i.e., it's not a partial constructor), then - /// ensure the `Type` exists in [`ctx.module`], and return - /// [`Constructor::Type`]. - /// - /// Otherwise, return the [`Constructor`] partial variant corresponding to - /// `constructor`. - /// - /// [`Type`]: crate::Type - /// [`ctx.module`]: ExpressionContext::module - fn constructor<'out>( - &mut self, - constructor: &ast::ConstructorType<'source>, - ctx: &mut ExpressionContext<'source, '_, 'out>, - ) -> Result<'source, Constructor>> { - let handle = match *constructor { - ast::ConstructorType::Scalar(scalar) => { - let ty = ctx.ensure_type_exists(scalar.to_inner_scalar()); - Constructor::Type(ty) - } - ast::ConstructorType::PartialVector { size } => Constructor::PartialVector { size }, - ast::ConstructorType::Vector { size, ty, ty_span } => { - let ty = self.resolve_ast_type(ty, &mut ctx.as_const())?; - let scalar = match ctx.module.types[ty].inner { - crate::TypeInner::Scalar(sc) => sc, - _ => return Err(Box::new(Error::UnknownScalarType(ty_span))), - }; - let ty = ctx.ensure_type_exists(crate::TypeInner::Vector { size, scalar }); - Constructor::Type(ty) - } - ast::ConstructorType::PartialMatrix { columns, rows } => { - Constructor::PartialMatrix { columns, rows } - } - ast::ConstructorType::Matrix { - rows, - columns, - ty, - ty_span, - } => { - let ty = self.resolve_ast_type(ty, &mut ctx.as_const())?; - let scalar = match ctx.module.types[ty].inner { - crate::TypeInner::Scalar(sc) => sc, - _ => return Err(Box::new(Error::UnknownScalarType(ty_span))), - }; - let ty = match scalar.kind { - crate::ScalarKind::Float => ctx.ensure_type_exists(crate::TypeInner::Matrix { - columns, - rows, - scalar, - }), - _ => return Err(Box::new(Error::BadMatrixScalarKind(ty_span, scalar))), - }; - Constructor::Type(ty) - } - ast::ConstructorType::PartialArray => Constructor::PartialArray, - ast::ConstructorType::Array { base, size } => { - let base = self.resolve_ast_type(base, &mut ctx.as_const())?; - let size = self.array_size(size, &mut ctx.as_const())?; - - ctx.layouter.update(ctx.module.to_ctx()).unwrap(); - let stride = ctx.layouter[base].to_stride(); - - let ty = ctx.ensure_type_exists(crate::TypeInner::Array { base, size, stride }); - Constructor::Type(ty) - } - ast::ConstructorType::Type(ty) => Constructor::Type(ty), - }; - - Ok(handle) - } } diff --git a/naga/src/front/wgsl/lower/mod.rs b/naga/src/front/wgsl/lower/mod.rs index ced06a0f065..f644f476d19 100644 --- a/naga/src/front/wgsl/lower/mod.rs +++ b/naga/src/front/wgsl/lower/mod.rs @@ -9,6 +9,7 @@ use core::num::NonZeroU32; use crate::front::wgsl::error::{Error, ExpectedToken, InvalidAssignmentType}; use crate::front::wgsl::index::Index; +use crate::front::wgsl::parse::directive::enable_extension::EnableExtensions; use crate::front::wgsl::parse::number::Number; use crate::front::wgsl::parse::{ast, conv}; use crate::front::wgsl::Result; @@ -21,8 +22,12 @@ use crate::{common::ForDebugWithTypes, proc::LayoutErrorInner}; use crate::{ir, proc}; use crate::{Arena, FastHashMap, FastIndexMap, Handle, Span}; +use construction::Constructor; +use template_list::TemplateListIter; + mod construction; mod conversion; +mod template_list; /// Resolves the inner type of a given expression. /// @@ -83,6 +88,8 @@ pub(super) use resolve; /// State for constructing a `ir::Module`. pub struct GlobalContext<'source, 'temp, 'out> { + enable_extensions: EnableExtensions, + /// The `TranslationUnit`'s expressions arena. ast_expressions: &'temp Arena>, @@ -107,6 +114,7 @@ pub struct GlobalContext<'source, 'temp, 'out> { impl<'source> GlobalContext<'source, '_, '_> { fn as_const(&mut self) -> ExpressionContext<'source, '_, '_> { ExpressionContext { + enable_extensions: self.enable_extensions, ast_expressions: self.ast_expressions, globals: self.globals, types: self.types, @@ -120,6 +128,7 @@ impl<'source> GlobalContext<'source, '_, '_> { fn as_override(&mut self) -> ExpressionContext<'source, '_, '_> { ExpressionContext { + enable_extensions: self.enable_extensions, ast_expressions: self.ast_expressions, globals: self.globals, types: self.types, @@ -144,6 +153,8 @@ impl<'source> GlobalContext<'source, '_, '_> { /// State for lowering a statement within a function. pub struct StatementContext<'source, 'temp, 'out> { + enable_extensions: EnableExtensions, + // WGSL AST values. /// A reference to [`TranslationUnit::expressions`] for the translation unit /// we're lowering. @@ -211,6 +222,7 @@ impl<'a, 'temp> StatementContext<'a, 'temp, '_> { 'temp: 't, { ExpressionContext { + enable_extensions: self.enable_extensions, globals: self.globals, types: self.types, ast_expressions: self.ast_expressions, @@ -238,6 +250,7 @@ impl<'a, 'temp> StatementContext<'a, 'temp, '_> { 'temp: 't, { ExpressionContext { + enable_extensions: self.enable_extensions, globals: self.globals, types: self.types, ast_expressions: self.ast_expressions, @@ -259,6 +272,7 @@ impl<'a, 'temp> StatementContext<'a, 'temp, '_> { #[allow(dead_code)] fn as_global(&mut self) -> GlobalContext<'a, '_, '_> { GlobalContext { + enable_extensions: self.enable_extensions, ast_expressions: self.ast_expressions, globals: self.globals, types: self.types, @@ -367,6 +381,8 @@ pub enum ExpressionContextType<'temp, 'out> { /// [`as_const`]: ExpressionContext::as_const /// [`Expression::Constant`]: ir::Expression::Constant pub struct ExpressionContext<'source, 'temp, 'out> { + enable_extensions: EnableExtensions, + // WGSL AST values. ast_expressions: &'temp Arena>, types: &'temp Arena>, @@ -429,6 +445,7 @@ impl<'source, 'temp, 'out> ExpressionContext<'source, 'temp, 'out> { #[allow(dead_code)] fn as_const(&mut self) -> ExpressionContext<'source, '_, '_> { ExpressionContext { + enable_extensions: self.enable_extensions, globals: self.globals, types: self.types, ast_expressions: self.ast_expressions, @@ -456,6 +473,7 @@ impl<'source, 'temp, 'out> ExpressionContext<'source, 'temp, 'out> { fn as_global(&mut self) -> GlobalContext<'source, '_, '_> { GlobalContext { + enable_extensions: self.enable_extensions, ast_expressions: self.ast_expressions, globals: self.globals, types: self.types, @@ -1127,6 +1145,7 @@ impl<'source, 'temp> Lowerer<'source, 'temp> { }; let mut ctx = GlobalContext { + enable_extensions: tu.enable_extensions, ast_expressions: &tu.expressions, globals: &mut FastHashMap::default(), types: &tu.types, @@ -1458,6 +1477,7 @@ impl<'source, 'temp> Lowerer<'source, 'temp> { let mut typifier = Typifier::default(); let mut stmt_ctx = StatementContext { + enable_extensions: ctx.enable_extensions, local_table: &mut local_table, globals: ctx.globals, ast_expressions: ctx.ast_expressions, @@ -1879,17 +1899,13 @@ impl<'source, 'temp> Lowerer<'source, 'temp> { ir::Statement::Return { value } } ast::StatementKind::Kill => ir::Statement::Kill, - ast::StatementKind::Call { - ref function, - ref arguments, - } => { + ast::StatementKind::Call(ref call_phrase) => { let mut emitter = proc::Emitter::default(); emitter.start(&ctx.function.expressions); let _ = self.call( + call_phrase, stmt.span, - function, - arguments, &mut ctx.as_expression(block, &mut emitter), true, )?; @@ -2125,10 +2141,21 @@ impl<'source, 'temp> Lowerer<'source, 'temp> { let handle = ctx.interrupt_emitter(ir::Expression::Literal(literal), span)?; return Ok(Typed::Plain(handle)); } - ast::Expression::Ident(ast::IdentExpr::Local(local)) => { + ast::Expression::Ident(ast::TemplateElaboratedIdent { + ref template_list, .. + }) if !template_list.is_empty() => { + return Err(Box::new(Error::UnexpectedTemplate(span))) + } + ast::Expression::Ident(ast::TemplateElaboratedIdent { + ident: ast::IdentExpr::Local(local), + .. + }) => { return ctx.local(&local, span); } - ast::Expression::Ident(ast::IdentExpr::Unresolved(name)) => { + ast::Expression::Ident(ast::TemplateElaboratedIdent { + ident: ast::IdentExpr::Unresolved(name), + .. + }) => { let global = ctx .globals .get(name) @@ -2156,14 +2183,6 @@ impl<'source, 'temp> Lowerer<'source, 'temp> { return expr.try_map(|handle| ctx.interrupt_emitter(handle, span)); } - ast::Expression::Construct { - ref ty, - ty_span, - ref components, - } => { - let handle = self.construct(span, ty, ty_span, components, ctx)?; - return Ok(Typed::Plain(handle)); - } ast::Expression::Unary { op, expr } => { let expr = self.expression_for_abstract(expr, ctx)?; Typed::Plain(ir::Expression::Unary { op, expr }) @@ -2213,13 +2232,10 @@ impl<'source, 'temp> Lowerer<'source, 'temp> { ast::Expression::Binary { op, left, right } => { self.binary(op, left, right, span, ctx)? } - ast::Expression::Call { - ref function, - ref arguments, - } => { + ast::Expression::Call(ref call_phrase) => { let handle = self - .call(span, function, arguments, ctx, false)? - .ok_or(Error::FunctionReturnsVoid(function.span))?; + .call(call_phrase, span, ctx, false)? + .ok_or(Error::FunctionReturnsVoid(span))?; return Ok(Typed::Plain(handle)); } ast::Expression::Index { base, index } => { @@ -2301,29 +2317,6 @@ impl<'source, 'temp> Lowerer<'source, 'temp> { access } - ast::Expression::Bitcast { expr, to, ty_span } => { - let expr = self.expression(expr, ctx)?; - let to_resolved = self.resolve_ast_type(to, &mut ctx.as_const())?; - - let element_scalar = match ctx.module.types[to_resolved].inner { - ir::TypeInner::Scalar(scalar) => scalar, - ir::TypeInner::Vector { scalar, .. } => scalar, - _ => { - let ty = resolve!(ctx, expr); - return Err(Box::new(Error::BadTypeCast { - from_type: ctx.type_resolution_to_string(ty), - span: ty_span, - to_type: ctx.type_to_string(to_resolved), - })); - } - }; - - Typed::Plain(ir::Expression::As { - expr, - kind: element_scalar.kind, - convert: None, - }) - } }; expr.try_map(|handle| ctx.append_expression(handle, span)) @@ -2331,16 +2324,20 @@ impl<'source, 'temp> Lowerer<'source, 'temp> { fn enumerant( expr: Handle>, - ctx: &mut ExpressionContext<'source, '_, '_>, + ctx: &ExpressionContext<'source, '_, '_>, ) -> Result<'source, (&'source str, Span)> { let span = ctx.ast_expressions.get_span(expr); let expr = &ctx.ast_expressions[expr]; match *expr { - ast::Expression::Ident(ast::IdentExpr::Local(_)) => { - Err(Box::new(Error::UnexpectedIdentForEnumerant(span))) - } - ast::Expression::Ident(ast::IdentExpr::Unresolved(name)) => { + ast::Expression::Ident(ast::TemplateElaboratedIdent { + ident: ast::IdentExpr::Local(_), + .. + }) => Err(Box::new(Error::UnexpectedIdentForEnumerant(span))), + ast::Expression::Ident(ast::TemplateElaboratedIdent { + ident: ast::IdentExpr::Unresolved(name), + .. + }) => { if ctx.globals.get(name).is_some() { Err(Box::new(Error::UnexpectedIdentForEnumerant(span))) } else { @@ -2352,40 +2349,183 @@ impl<'source, 'temp> Lowerer<'source, 'temp> { } fn var_address_space( - template_list: &Option>>>, - ctx: &mut ExpressionContext<'source, '_, '_>, + template_list: &[Handle>], + ctx: &ExpressionContext<'source, '_, '_>, ) -> Result<'source, ir::AddressSpace> { - let mut address_space = ir::AddressSpace::Handle; + let mut tl = TemplateListIter::new(Span::UNDEFINED, template_list); + let mut address_space = tl.maybe_address_space(ctx)?; + if let Some(ref mut address_space) = address_space { + tl.maybe_access_mode(address_space, ctx)?; + } + tl.finish(ctx)?; + Ok(address_space.unwrap_or(ir::AddressSpace::Handle)) + } - if let Some(ref template_list) = template_list { - let mut template_list_args = template_list.iter(); - let address_space_expr = template_list_args.next().unwrap(); + fn type_expression( + &mut self, + expr: Handle>, + ctx: &mut ExpressionContext<'source, '_, '_>, + ) -> Result<'source, Handle> { + let span = ctx.ast_expressions.get_span(expr); + let expr = &ctx.ast_expressions[expr]; + + let (ident, ident_span, template_list) = match expr { + ast::Expression::Ident(ast::TemplateElaboratedIdent { + ident: ast::IdentExpr::Unresolved(ident), + ident_span, + template_list, + .. + }) => (*ident, *ident_span, template_list), + _ => return Err(Box::new(Error::UnexpectedExprForTypeExpression(span))), + }; - let (enumerant, span) = Self::enumerant(*address_space_expr, ctx)?; - address_space = conv::map_address_space(enumerant, span)?; + let mut tl = TemplateListIter::new(ident_span, template_list); - match address_space { - ir::AddressSpace::Storage { ref mut access } => { - if let Some(access_mode_expr) = template_list_args.next() { - let (enumerant, span) = Self::enumerant(*access_mode_expr, ctx)?; - let access_mode = conv::map_access_mode(enumerant, span)?; - *access = access_mode; - } else { - // defaulting to `read` - *access = ir::StorageAccess::LOAD + if let Some(global) = ctx.globals.get(ident) { + match global { + &LoweredGlobalDecl::Type(handle) => { + tl.finish(ctx)?; + return Ok(handle); + } + _ => return Err(Box::new(Error::UnexpectedExprForTypeExpression(span))), + } + } + + let ty = conv::map_predeclared_type(&ctx.enable_extensions, ident_span, ident)?; + let Some(ty) = ty else { + return Err(Box::new(Error::UnknownIdent(ident_span, ident))); + }; + let ty = self.finalize_type(ctx, ty, &mut tl)?; + + tl.finish(ctx)?; + + Ok(ty) + } + + fn finalize_type( + &mut self, + ctx: &mut ExpressionContext<'source, '_, '_>, + ty: conv::PredeclaredType, + tl: &mut TemplateListIter<'_, 'source>, + ) -> Result<'source, Handle> { + let ty = match ty { + conv::PredeclaredType::TypeInner(ty_inner) => { + match ty_inner { + ir::TypeInner::Image { + class: ir::ImageClass::External, + .. + } => { + // Other than the WGSL backend, every backend that supports + // external textures does so by lowering them to a set of + // ordinary textures and some parameters saying how to + // sample from them. We don't know which backend will + // consume the `Module` we're building, but in case it's not + // WGSL, populate `SpecialTypes::external_texture_params` + // and `SpecialTypes::external_texture_transfer_function` + // with the types the backend will use for the parameter + // buffer. + // + // Neither of these are the type we are lowering here: + // that's an ordinary `TypeInner::Image`. But the fact we + // are lowering a `texture_external` implies the backends + // may need these additional types too. + ctx.module.generate_external_texture_types(); } + _ => {} } - _ => {} + ctx.as_global().ensure_type_exists(None, ty_inner) } + conv::PredeclaredType::RayDesc => ctx.module.generate_ray_desc_type(), + conv::PredeclaredType::RayIntersection => ctx.module.generate_ray_intersection_type(), + conv::PredeclaredType::TypeGenerator(type_generator) => { + let ty_inner = match type_generator { + conv::TypeGenerator::Vector { size } => { + let (scalar, _) = tl.scalar(self, ctx)?; + ir::TypeInner::Vector { size, scalar } + } + conv::TypeGenerator::Matrix { columns, rows } => { + let (scalar, span) = tl.scalar(self, ctx)?; + if scalar.kind != ir::ScalarKind::Float { + return Err(Box::new(Error::BadMatrixScalarKind(span, scalar))); + } + ir::TypeInner::Matrix { + columns, + rows, + scalar, + } + } + conv::TypeGenerator::Array => { + let base = tl.ty(self, ctx)?; + let size = tl.maybe_array_size(self, ctx)?; + + // Determine the size of the base type, if needed. + ctx.layouter.update(ctx.module.to_ctx()).map_err(|err| { + let LayoutErrorInner::TooLarge = err.inner else { + unreachable!("unexpected layout error: {err:?}"); + }; + // Lots of type definitions don't get spans, so this error + // message may not be very useful. + Box::new(Error::TypeTooLarge { + span: ctx.module.types.get_span(err.ty), + }) + })?; + let stride = ctx.layouter[base].to_stride(); - let unused_args: Vec = template_list_args - .map(|expr| ctx.ast_expressions.get_span(*expr)) - .collect(); - if !unused_args.is_empty() { - return Err(Box::new(Error::UnusedArgsForTemplate(unused_args))); + ir::TypeInner::Array { base, size, stride } + } + conv::TypeGenerator::Atomic => { + let (scalar, _) = tl.scalar(self, ctx)?; + ir::TypeInner::Atomic(scalar) + } + conv::TypeGenerator::Pointer => { + let mut space = tl.address_space(ctx)?; + let base = tl.ty(self, ctx)?; + tl.maybe_access_mode(&mut space, ctx)?; + ir::TypeInner::Pointer { base, space } + } + conv::TypeGenerator::SampledTexture { + dim, + arrayed, + multi, + } => { + let (scalar, span) = tl.scalar(self, ctx)?; + let ir::Scalar { kind, width } = scalar; + if width != 4 { + return Err(Box::new(Error::BadTextureSampleType { span, scalar })); + } + ir::TypeInner::Image { + dim, + arrayed, + class: ir::ImageClass::Sampled { kind, multi }, + } + } + conv::TypeGenerator::StorageTexture { dim, arrayed } => { + let format = tl.storage_format(ctx)?; + let access = tl.access_mode(ctx)?; + ir::TypeInner::Image { + dim, + arrayed, + class: ir::ImageClass::Storage { format, access }, + } + } + conv::TypeGenerator::BindingArray => { + let base = tl.ty(self, ctx)?; + let size = tl.maybe_array_size(self, ctx)?; + ir::TypeInner::BindingArray { base, size } + } + conv::TypeGenerator::AccelerationStructure => { + let vertex_return = tl.maybe_vertex_return(ctx)?; + ir::TypeInner::AccelerationStructure { vertex_return } + } + conv::TypeGenerator::RayQuery => { + let vertex_return = tl.maybe_vertex_return(ctx)?; + ir::TypeInner::RayQuery { vertex_return } + } + }; + ctx.as_global().ensure_type_exists(None, ty_inner) } - } - Ok(address_space) + }; + Ok(ty) } fn binary( @@ -2469,22 +2609,32 @@ impl<'source, 'temp> Lowerer<'source, 'temp> { /// [`Call`]: ir::Statement::Call fn call( &mut self, + call_phrase: &ast::CallPhrase<'source>, span: Span, - function: &ast::Ident<'source>, - arguments: &[Handle>], ctx: &mut ExpressionContext<'source, '_, '_>, is_statement: bool, ) -> Result<'source, Option>> { - let function_span = function.span; - match ctx.globals.get(function.name) { + let function_name = match call_phrase.function.ident { + ast::IdentExpr::Unresolved(name) => name, + ast::IdentExpr::Local(_) => { + return Err(Box::new(Error::CalledLocalDecl( + call_phrase.function.ident_span, + ))) + } + }; + let mut function_span = call_phrase.function.ident_span; + function_span.subsume(call_phrase.function.template_list_span); + let arguments = call_phrase.arguments.as_slice(); + + let mut tl = TemplateListIter::new(function_span, &call_phrase.function.template_list); + + match ctx.globals.get(function_name) { Some(&LoweredGlobalDecl::Type(ty)) => { - let handle = self.construct( - span, - &ast::ConstructorType::Type(ty), - function_span, - arguments, - ctx, - )?; + // user-declared types can't make use of template lists + tl.finish(ctx)?; + + let handle = + self.construct(span, Constructor::Type(ty), function_span, arguments, ctx)?; Ok(Some(handle)) } Some( @@ -2502,6 +2652,9 @@ impl<'source, 'temp> Lowerer<'source, 'temp> { handle: function, must_use, }) => { + // user-declared functions can't make use of template lists + tl.finish(ctx)?; + let arguments = arguments .iter() .enumerate() @@ -2557,8 +2710,72 @@ impl<'source, 'temp> Lowerer<'source, 'temp> { Ok(result) } None => { + match function_name { + "bitcast" => { + let ty = tl.ty(self, ctx)?; + tl.finish(ctx)?; + + let mut args = ctx.prepare_args(arguments, 1, function_span); + let expr = self.expression(args.next()?, ctx)?; + args.finish()?; + + let element_scalar = match ctx.module.types[ty].inner { + ir::TypeInner::Scalar(scalar) => scalar, + ir::TypeInner::Vector { scalar, .. } => scalar, + _ => { + let ty_resolution = resolve!(ctx, expr); + return Err(Box::new(Error::BadTypeCast { + from_type: ctx.type_resolution_to_string(ty_resolution), + span: function_span, + to_type: ctx.type_to_string(ty), + })); + } + }; + + let expr = ir::Expression::As { + expr, + kind: element_scalar.kind, + convert: None, + }; + + let expr = ctx.append_expression(expr, function_span)?; + return Ok(Some(expr)); + } + _ => {} + } + + let ty = conv::map_predeclared_type( + &ctx.enable_extensions, + function_span, + function_name, + )?; + if let Some(ty) = ty { + let empty_template_list = call_phrase.function.template_list.is_empty(); + let constructor_ty = match ty { + conv::PredeclaredType::TypeGenerator(conv::TypeGenerator::Vector { + size, + }) if empty_template_list => Constructor::PartialVector { size }, + conv::PredeclaredType::TypeGenerator(conv::TypeGenerator::Matrix { + columns, + rows, + }) if empty_template_list => Constructor::PartialMatrix { columns, rows }, + conv::PredeclaredType::TypeGenerator(conv::TypeGenerator::Array) + if empty_template_list => + { + Constructor::PartialArray + } + _ => Constructor::Type(self.finalize_type(ctx, ty, &mut tl)?), + }; + tl.finish(ctx)?; + let handle = + self.construct(span, constructor_ty, function_span, arguments, ctx)?; + return Ok(Some(handle)); + }; + + tl.finish(ctx)?; + let span = function_span; - let expr = if let Some(fun) = conv::map_relational_fun(function.name) { + let expr = if let Some(fun) = conv::map_relational_fun(function_name) { let mut args = ctx.prepare_args(arguments, 1, span); let argument = self.expression(args.next()?, ctx)?; args.finish()?; @@ -2582,28 +2799,28 @@ impl<'source, 'temp> Lowerer<'source, 'temp> { } else { ir::Expression::Relational { fun, argument } } - } else if let Some((axis, ctrl)) = conv::map_derivative(function.name) { + } else if let Some((axis, ctrl)) = conv::map_derivative(function_name) { let mut args = ctx.prepare_args(arguments, 1, span); let expr = self.expression(args.next()?, ctx)?; args.finish()?; ir::Expression::Derivative { axis, ctrl, expr } - } else if let Some(fun) = conv::map_standard_fun(function.name) { + } else if let Some(fun) = conv::map_standard_fun(function_name) { self.math_function_helper(span, fun, arguments, ctx)? - } else if let Some(fun) = Texture::map(function.name) { + } else if let Some(fun) = Texture::map(function_name) { self.texture_sample_helper(fun, arguments, span, ctx)? - } else if let Some((op, cop)) = conv::map_subgroup_operation(function.name) { + } else if let Some((op, cop)) = conv::map_subgroup_operation(function_name) { return Ok(Some( self.subgroup_operation_helper(span, op, cop, arguments, ctx)?, )); - } else if let Some(mode) = SubgroupGather::map(function.name) { + } else if let Some(mode) = SubgroupGather::map(function_name) { return Ok(Some( self.subgroup_gather_helper(span, mode, arguments, ctx)?, )); - } else if let Some(fun) = ir::AtomicFunction::map(function.name) { + } else if let Some(fun) = ir::AtomicFunction::map(function_name) { return self.atomic_helper(span, fun, arguments, is_statement, ctx); } else { - match function.name { + match function_name { "select" => { let mut args = ctx.prepare_args(arguments, 3, span); @@ -2779,7 +2996,7 @@ impl<'source, 'temp> Lowerer<'source, 'temp> { image, coordinate, array_index, - fun: match function.name { + fun: match function_name { "textureAtomicMin" => ir::AtomicFunction::Min, "textureAtomicMax" => ir::AtomicFunction::Max, "textureAtomicAdd" => ir::AtomicFunction::Add, @@ -3093,17 +3310,6 @@ impl<'source, 'temp> Lowerer<'source, 'temp> { committed: false, } } - "RayDesc" => { - let ty = ctx.module.generate_ray_desc_type(); - let handle = self.construct( - span, - &ast::ConstructorType::Type(ty), - function.span, - arguments, - ctx, - )?; - return Ok(Some(handle)); - } "subgroupBallot" => { let mut args = ctx.prepare_args(arguments, 0, span); let predicate = if arguments.len() == 1 { @@ -3192,7 +3398,7 @@ impl<'source, 'temp> Lowerer<'source, 'temp> { return Ok(Some(result)); } _ => { - return Err(Box::new(Error::UnknownIdent(function.span, function.name))) + return Err(Box::new(Error::UnknownIdent(function_span, function_name))) } } }; @@ -3941,6 +4147,46 @@ impl<'source, 'temp> Lowerer<'source, 'temp> { }) } + fn array_size2( + &mut self, + expr: Handle>, + ctx: &mut ExpressionContext<'source, '_, '_>, + ) -> Result<'source, ir::ArraySize> { + let span = ctx.ast_expressions.get_span(expr); + let const_ctx = &mut ctx.as_const(); + let const_expr = self.expression(expr, const_ctx); + match const_expr { + Ok(value) => { + let len = const_ctx.const_eval_expr_to_u32(value).map_err(|err| { + Box::new(match err { + proc::U32EvalError::NonConst => { + Error::ExpectedConstExprConcreteIntegerScalar(span) + } + proc::U32EvalError::Negative => Error::ExpectedPositiveArrayLength(span), + }) + })?; + let size = NonZeroU32::new(len).ok_or(Error::ExpectedPositiveArrayLength(span))?; + Ok(ir::ArraySize::Constant(size)) + } + Err(err) => { + if let Error::ConstantEvaluatorError(ref ty, _) = *err { + match **ty { + proc::ConstantEvaluatorError::OverrideExpr => { + Ok(ir::ArraySize::Pending(self.array_size_override( + expr, + &mut ctx.as_global().as_override(), + span, + )?)) + } + _ => Err(err), + } + } else { + Err(err) + } + } + } + } + fn array_size_override( &mut self, size_expr: Handle>, diff --git a/naga/src/front/wgsl/lower/template_list.rs b/naga/src/front/wgsl/lower/template_list.rs new file mode 100644 index 00000000000..a9a28ccf1c2 --- /dev/null +++ b/naga/src/front/wgsl/lower/template_list.rs @@ -0,0 +1,159 @@ +use alloc::{boxed::Box, vec::Vec}; + +use crate::{ + front::wgsl::{ + error::Error, + lower::{ExpressionContext, Lowerer, Result}, + parse::{ast, conv}, + }, + ir, Handle, Span, +}; + +/// Iterator over a template list. +/// +/// All functions will attempt to consume an element in the list. +/// +/// Function variants prefixed with "maybe" will not return an error if there +/// are no more elements left in the list. +pub struct TemplateListIter<'iter, 'source> { + ident_span: Span, + template_list: core::slice::Iter<'iter, Handle>>, +} + +impl<'iter, 'source> TemplateListIter<'iter, 'source> { + pub fn new(ident_span: Span, template_list: &'iter [Handle>]) -> Self { + Self { + ident_span, + template_list: template_list.iter(), + } + } + + pub fn finish(self, ctx: &ExpressionContext<'source, '_, '_>) -> Result<'source, ()> { + let unused_args: Vec = self + .template_list + .map(|expr| ctx.ast_expressions.get_span(*expr)) + .collect(); + if unused_args.is_empty() { + Ok(()) + } else { + Err(Box::new(Error::UnusedArgsForTemplate(unused_args))) + } + } + + fn expect_next( + &mut self, + arg: &'static str, + ) -> Result<'source, Handle>> { + if let Some(expr) = self.template_list.next() { + Ok(*expr) + } else { + Err(Box::new(Error::MissingTemplateArg { + span: self.ident_span, + arg, + })) + } + } + + pub fn ty( + &mut self, + lowerer: &mut Lowerer<'source, '_>, + ctx: &mut ExpressionContext<'source, '_, '_>, + ) -> Result<'source, Handle> { + let expr = self.expect_next("`T`, a type")?; + lowerer.type_expression(expr, ctx) + } + + pub fn scalar( + &mut self, + lowerer: &mut Lowerer<'source, '_>, + ctx: &mut ExpressionContext<'source, '_, '_>, + ) -> Result<'source, (ir::Scalar, Span)> { + let expr = self.expect_next("`T`, a scalar type")?; + let ty = lowerer.type_expression(expr, ctx)?; + let span = ctx.ast_expressions.get_span(expr); + match ctx.module.types[ty].inner { + ir::TypeInner::Scalar(scalar) => Ok((scalar, span)), + _ => Err(Box::new(Error::UnknownScalarType(span))), + } + } + + pub fn maybe_array_size( + &mut self, + lowerer: &mut Lowerer<'source, '_>, + ctx: &mut ExpressionContext<'source, '_, '_>, + ) -> Result<'source, ir::ArraySize> { + if let Some(expr) = self.template_list.next() { + lowerer.array_size2(*expr, ctx) + } else { + Ok(ir::ArraySize::Dynamic) + } + } + + pub fn address_space( + &mut self, + ctx: &ExpressionContext<'source, '_, '_>, + ) -> Result<'source, ir::AddressSpace> { + let expr = self.expect_next("`AS`, an address space")?; + let (enumerant, span) = Lowerer::enumerant(expr, ctx)?; + conv::map_address_space(enumerant, span) + } + pub fn maybe_address_space( + &mut self, + ctx: &ExpressionContext<'source, '_, '_>, + ) -> Result<'source, Option> { + if let Some(expr) = self.template_list.next() { + let (enumerant, span) = Lowerer::enumerant(*expr, ctx)?; + Ok(Some(conv::map_address_space(enumerant, span)?)) + } else { + Ok(None) + } + } + + pub fn access_mode( + &mut self, + ctx: &ExpressionContext<'source, '_, '_>, + ) -> Result<'source, ir::StorageAccess> { + let expr = self.expect_next("`Access`, an access mode")?; + let (enumerant, span) = Lowerer::enumerant(expr, ctx)?; + conv::map_access_mode(enumerant, span) + } + pub fn maybe_access_mode( + &mut self, + space: &mut ir::AddressSpace, + ctx: &ExpressionContext<'source, '_, '_>, + ) -> Result<'source, ()> { + if let &mut ir::AddressSpace::Storage { ref mut access } = space { + if let Some(expr) = self.template_list.next() { + let (enumerant, span) = Lowerer::enumerant(*expr, ctx)?; + let access_mode = conv::map_access_mode(enumerant, span)?; + *access = access_mode; + } else { + // defaulting to `read` + *access = ir::StorageAccess::LOAD + } + } + Ok(()) + } + + pub fn storage_format( + &mut self, + ctx: &ExpressionContext<'source, '_, '_>, + ) -> Result<'source, ir::StorageFormat> { + let expr = self.expect_next("`Format`, a texel format")?; + let (enumerant, span) = Lowerer::enumerant(expr, ctx)?; + conv::map_storage_format(enumerant, span) + } + + pub fn maybe_vertex_return( + &mut self, + ctx: &ExpressionContext<'source, '_, '_>, + ) -> Result<'source, bool> { + if let Some(expr) = self.template_list.next() { + let (enumerant, span) = Lowerer::enumerant(*expr, ctx)?; + conv::map_ray_flag(enumerant, span)?; + Ok(true) + } else { + Ok(false) + } + } +} diff --git a/naga/src/front/wgsl/parse/ast.rs b/naga/src/front/wgsl/parse/ast.rs index 0ac45c48d4e..10811bce468 100644 --- a/naga/src/front/wgsl/parse/ast.rs +++ b/naga/src/front/wgsl/parse/ast.rs @@ -58,6 +58,34 @@ pub enum IdentExpr<'a> { Local(Handle), } +#[derive(Debug)] +pub struct TemplateElaboratedIdent<'a> { + pub ident: IdentExpr<'a>, + pub ident_span: Span, + pub template_list: Vec>>, + pub template_list_span: Span, +} + +/// A function call or type constructor expression. +/// +/// We can't tell whether an expression like `IDENTIFIER(EXPR, ...)` is a +/// construction expression or a function call until we know `IDENTIFIER`'s +/// definition, so we represent everything of that form as one of these +/// expressions until lowering. At that point, [`Lowerer::call`] has +/// everything's definition in hand, and can decide whether to emit a Naga +/// [`Constant`], [`As`], [`Splat`], or [`Compose`] expression. +/// +/// [`Lowerer::call`]: Lowerer::call +/// [`Constant`]: crate::Expression::Constant +/// [`As`]: crate::Expression::As +/// [`Splat`]: crate::Expression::Splat +/// [`Compose`]: crate::Expression::Compose +#[derive(Debug)] +pub struct CallPhrase<'a> { + pub function: TemplateElaboratedIdent<'a>, + pub arguments: Vec>>, +} + /// A reference to a module-scope definition or predeclared object. /// /// Each [`GlobalDecl`] holds a set of these values, to be resolved to @@ -164,7 +192,7 @@ pub struct ResourceBinding<'a> { #[derive(Debug)] pub struct GlobalVariable<'a> { pub name: Ident<'a>, - pub template_list: Option>>>, + pub template_list: Vec>>, pub binding: Option>, pub ty: Option>>, pub init: Option>>, @@ -304,10 +332,7 @@ pub enum StatementKind<'a> { value: Option>>, }, Kill, - Call { - function: Ident<'a>, - arguments: Vec>>, - }, + Call(CallPhrase<'a>), Assign { target: Handle>, op: Option, @@ -332,77 +357,6 @@ pub struct SwitchCase<'a> { pub fall_through: bool, } -/// A type at the head of a [`Construct`] expression. -/// -/// WGSL has two types of [`type constructor expressions`]: -/// -/// - Those that fully specify the type being constructed, like -/// `vec3(x,y,z)`, which obviously constructs a `vec3`. -/// -/// - Those that leave the component type of the composite being constructed -/// implicit, to be inferred from the argument types, like `vec3(x,y,z)`, -/// which constructs a `vec3` where `T` is the type of `x`, `y`, and `z`. -/// -/// This enum represents the head type of both cases. The `PartialFoo` variants -/// represent the second case, where the component type is implicit. -/// -/// This does not cover structs or types referred to by type aliases. See the -/// documentation for [`Construct`] and [`Call`] expressions for details. -/// -/// [`Construct`]: Expression::Construct -/// [`type constructor expressions`]: https://gpuweb.github.io/gpuweb/wgsl/#type-constructor-expr -/// [`Call`]: Expression::Call -#[derive(Debug)] -pub enum ConstructorType<'a> { - /// A scalar type or conversion: `f32(1)`. - Scalar(Scalar), - - /// A vector construction whose component type is inferred from the - /// argument: `vec3(1.0)`. - PartialVector { size: crate::VectorSize }, - - /// A vector construction whose component type is written out: - /// `vec3(1.0)`. - Vector { - size: crate::VectorSize, - ty: Handle>, - ty_span: Span, - }, - - /// A matrix construction whose component type is inferred from the - /// argument: `mat2x2(1,2,3,4)`. - PartialMatrix { - columns: crate::VectorSize, - rows: crate::VectorSize, - }, - - /// A matrix construction whose component type is written out: - /// `mat2x2(1,2,3,4)`. - Matrix { - columns: crate::VectorSize, - rows: crate::VectorSize, - ty: Handle>, - ty_span: Span, - }, - - /// An array whose component type and size are inferred from the arguments: - /// `array(3,4,5)`. - PartialArray, - - /// An array whose component type and size are written out: - /// `array(3,4,5)`. - Array { - base: Handle>, - size: ArraySize<'a>, - }, - - /// Constructing a value of a known Naga IR type. - /// - /// This variant is produced only during lowering, when we have Naga types - /// available, never during parsing. - Type(Handle), -} - #[derive(Debug, Copy, Clone)] pub enum Literal { Bool(bool), @@ -415,27 +369,7 @@ use crate::front::wgsl::lower::Lowerer; #[derive(Debug)] pub enum Expression<'a> { Literal(Literal), - Ident(IdentExpr<'a>), - - /// A type constructor expression. - /// - /// This is only used for expressions like `KEYWORD(EXPR...)` and - /// `KEYWORD(EXPR...)`, where `KEYWORD` is a [type-defining keyword] like - /// `vec3`. These keywords cannot be shadowed by user definitions, so we can - /// tell that such an expression is a construction immediately. - /// - /// For ordinary identifiers, we can't tell whether an expression like - /// `IDENTIFIER(EXPR, ...)` is a construction expression or a function call - /// until we know `IDENTIFIER`'s definition, so we represent those as - /// [`Call`] expressions. - /// - /// [type-defining keyword]: https://gpuweb.github.io/gpuweb/wgsl/#type-defining-keywords - /// [`Call`]: Expression::Call - Construct { - ty: ConstructorType<'a>, - ty_span: Span, - components: Vec>>, - }, + Ident(TemplateElaboratedIdent<'a>), Unary { op: crate::UnaryOperator, expr: Handle>, @@ -447,25 +381,7 @@ pub enum Expression<'a> { left: Handle>, right: Handle>, }, - - /// A function call or type constructor expression. - /// - /// We can't tell whether an expression like `IDENTIFIER(EXPR, ...)` is a - /// construction expression or a function call until we know `IDENTIFIER`'s - /// definition, so we represent everything of that form as one of these - /// expressions until lowering. At that point, [`Lowerer::call`] has - /// everything's definition in hand, and can decide whether to emit a Naga - /// [`Constant`], [`As`], [`Splat`], or [`Compose`] expression. - /// - /// [`Lowerer::call`]: Lowerer::call - /// [`Constant`]: crate::Expression::Constant - /// [`As`]: crate::Expression::As - /// [`Splat`]: crate::Expression::Splat - /// [`Compose`]: crate::Expression::Compose - Call { - function: Ident<'a>, - arguments: Vec>>, - }, + Call(CallPhrase<'a>), Index { base: Handle>, index: Handle>, @@ -474,11 +390,6 @@ pub enum Expression<'a> { base: Handle>, field: Ident<'a>, }, - Bitcast { - expr: Handle>, - to: Handle>, - ty_span: Span, - }, } #[derive(Debug)] diff --git a/naga/src/front/wgsl/parse/conv.rs b/naga/src/front/wgsl/parse/conv.rs index 09d0e22c14b..5b6eb3b6b40 100644 --- a/naga/src/front/wgsl/parse/conv.rs +++ b/naga/src/front/wgsl/parse/conv.rs @@ -2,7 +2,7 @@ use crate::front::wgsl::parse::directive::enable_extension::{ EnableExtensions, ImplementedEnableExtension, }; use crate::front::wgsl::{Error, Result, Scalar}; -use crate::Span; +use crate::{ImageClass, ImageDimension, Span, TypeInner, VectorSize}; use alloc::boxed::Box; @@ -32,6 +32,13 @@ pub fn map_access_mode(word: &str, span: Span) -> Result<'_, crate::StorageAcces } } +pub fn map_ray_flag(word: &str, span: Span) -> Result<'_, ()> { + match word { + "vertex_return" => Ok(()), + _ => Err(Box::new(Error::UnknownRayFlag(span))), + } +} + pub fn map_built_in( enable_extensions: &EnableExtensions, word: &str, @@ -348,3 +355,167 @@ pub fn map_subgroup_operation( _ => return None, }) } + +pub enum TypeGenerator { + Vector { + size: VectorSize, + }, + Matrix { + columns: VectorSize, + rows: VectorSize, + }, + Array, + Atomic, + Pointer, + SampledTexture { + dim: ImageDimension, + arrayed: bool, + multi: bool, + }, + StorageTexture { + dim: ImageDimension, + arrayed: bool, + }, + BindingArray, + AccelerationStructure, + RayQuery, +} + +pub enum PredeclaredType { + TypeInner(TypeInner), + RayDesc, + RayIntersection, + TypeGenerator(TypeGenerator), +} +impl From for PredeclaredType { + fn from(value: TypeInner) -> Self { + Self::TypeInner(value) + } +} +impl From for PredeclaredType { + fn from(value: TypeGenerator) -> Self { + Self::TypeGenerator(value) + } +} + +pub fn map_predeclared_type( + enable_extensions: &EnableExtensions, + span: Span, + word: &str, +) -> Result<'static, Option> { + #[rustfmt::skip] + let ty = match word { + // predeclared types + + // scalars + "bool" => TypeInner::Scalar(Scalar::BOOL).into(), + "i32" => TypeInner::Scalar(Scalar::I32).into(), + "u32" => TypeInner::Scalar(Scalar::U32).into(), + "f32" => TypeInner::Scalar(Scalar::F32).into(), + "f16" => TypeInner::Scalar(Scalar::F16).into(), + "i64" => TypeInner::Scalar(Scalar::I64).into(), + "u64" => TypeInner::Scalar(Scalar::U64).into(), + "f64" => TypeInner::Scalar(Scalar::F64).into(), + // vector aliases + "vec2i" => TypeInner::Vector { size: VectorSize::Bi, scalar: Scalar::I32 }.into(), + "vec3i" => TypeInner::Vector { size: VectorSize::Tri, scalar: Scalar::I32 }.into(), + "vec4i" => TypeInner::Vector { size: VectorSize::Quad, scalar: Scalar::I32 }.into(), + "vec2u" => TypeInner::Vector { size: VectorSize::Bi, scalar: Scalar::U32 }.into(), + "vec3u" => TypeInner::Vector { size: VectorSize::Tri, scalar: Scalar::U32 }.into(), + "vec4u" => TypeInner::Vector { size: VectorSize::Quad, scalar: Scalar::U32 }.into(), + "vec2f" => TypeInner::Vector { size: VectorSize::Bi, scalar: Scalar::F32 }.into(), + "vec3f" => TypeInner::Vector { size: VectorSize::Tri, scalar: Scalar::F32 }.into(), + "vec4f" => TypeInner::Vector { size: VectorSize::Quad, scalar: Scalar::F32 }.into(), + "vec2h" => TypeInner::Vector { size: VectorSize::Bi, scalar: Scalar::F16 }.into(), + "vec3h" => TypeInner::Vector { size: VectorSize::Tri, scalar: Scalar::F16 }.into(), + "vec4h" => TypeInner::Vector { size: VectorSize::Quad, scalar: Scalar::F16 }.into(), + // matrix aliases + "mat2x2f" => TypeInner::Matrix { columns: VectorSize::Bi, rows: VectorSize::Bi, scalar: Scalar::F32 }.into(), + "mat2x3f" => TypeInner::Matrix { columns: VectorSize::Bi, rows: VectorSize::Tri, scalar: Scalar::F32 }.into(), + "mat2x4f" => TypeInner::Matrix { columns: VectorSize::Bi, rows: VectorSize::Quad, scalar: Scalar::F32 }.into(), + "mat3x2f" => TypeInner::Matrix { columns: VectorSize::Tri, rows: VectorSize::Bi, scalar: Scalar::F32 }.into(), + "mat3x3f" => TypeInner::Matrix { columns: VectorSize::Tri, rows: VectorSize::Tri, scalar: Scalar::F32 }.into(), + "mat3x4f" => TypeInner::Matrix { columns: VectorSize::Tri, rows: VectorSize::Quad, scalar: Scalar::F32 }.into(), + "mat4x2f" => TypeInner::Matrix { columns: VectorSize::Quad, rows: VectorSize::Bi, scalar: Scalar::F32 }.into(), + "mat4x3f" => TypeInner::Matrix { columns: VectorSize::Quad, rows: VectorSize::Tri, scalar: Scalar::F32 }.into(), + "mat4x4f" => TypeInner::Matrix { columns: VectorSize::Quad, rows: VectorSize::Quad, scalar: Scalar::F32 }.into(), + "mat2x2h" => TypeInner::Matrix { columns: VectorSize::Bi, rows: VectorSize::Bi, scalar: Scalar::F16 }.into(), + "mat2x3h" => TypeInner::Matrix { columns: VectorSize::Bi, rows: VectorSize::Tri, scalar: Scalar::F16 }.into(), + "mat2x4h" => TypeInner::Matrix { columns: VectorSize::Bi, rows: VectorSize::Quad, scalar: Scalar::F16 }.into(), + "mat3x2h" => TypeInner::Matrix { columns: VectorSize::Tri, rows: VectorSize::Bi, scalar: Scalar::F16 }.into(), + "mat3x3h" => TypeInner::Matrix { columns: VectorSize::Tri, rows: VectorSize::Tri, scalar: Scalar::F16 }.into(), + "mat3x4h" => TypeInner::Matrix { columns: VectorSize::Tri, rows: VectorSize::Quad, scalar: Scalar::F16 }.into(), + "mat4x2h" => TypeInner::Matrix { columns: VectorSize::Quad, rows: VectorSize::Bi, scalar: Scalar::F16 }.into(), + "mat4x3h" => TypeInner::Matrix { columns: VectorSize::Quad, rows: VectorSize::Tri, scalar: Scalar::F16 }.into(), + "mat4x4h" => TypeInner::Matrix { columns: VectorSize::Quad, rows: VectorSize::Quad, scalar: Scalar::F16 }.into(), + // samplers + "sampler" => TypeInner::Sampler { comparison: false }.into(), + "sampler_comparison" => TypeInner::Sampler { comparison: true }.into(), + // depth textures + "texture_depth_2d" => TypeInner::Image { dim: ImageDimension::D2, arrayed: false, class: ImageClass::Depth { multi: false } }.into(), + "texture_depth_2d_array" => TypeInner::Image { dim: ImageDimension::D2, arrayed: true, class: ImageClass::Depth { multi: false } }.into(), + "texture_depth_cube" => TypeInner::Image { dim: ImageDimension::Cube, arrayed: false, class: ImageClass::Depth { multi: false } }.into(), + "texture_depth_cube_array" => TypeInner::Image { dim: ImageDimension::Cube, arrayed: true, class: ImageClass::Depth { multi: false } }.into(), + "texture_depth_multisampled_2d" => TypeInner::Image { dim: ImageDimension::D2, arrayed: false, class: ImageClass::Depth { multi: true } }.into(), + // external texture + "texture_external" => TypeInner::Image { dim: ImageDimension::D2, arrayed: false, class: ImageClass::External }.into(), + // ray desc + "RayDesc" => PredeclaredType::RayDesc, + // ray intersection + "RayIntersection" => PredeclaredType::RayIntersection, + + // predeclared type generators + + // vector + "vec2" => TypeGenerator::Vector { size: VectorSize::Bi }.into(), + "vec3" => TypeGenerator::Vector { size: VectorSize::Tri }.into(), + "vec4" => TypeGenerator::Vector { size: VectorSize::Quad }.into(), + // matrix + "mat2x2" => TypeGenerator::Matrix { columns: VectorSize::Bi, rows: VectorSize::Bi }.into(), + "mat2x3" => TypeGenerator::Matrix { columns: VectorSize::Bi, rows: VectorSize::Tri }.into(), + "mat2x4" => TypeGenerator::Matrix { columns: VectorSize::Bi, rows: VectorSize::Quad }.into(), + "mat3x2" => TypeGenerator::Matrix { columns: VectorSize::Tri, rows: VectorSize::Bi }.into(), + "mat3x3" => TypeGenerator::Matrix { columns: VectorSize::Tri, rows: VectorSize::Tri }.into(), + "mat3x4" => TypeGenerator::Matrix { columns: VectorSize::Tri, rows: VectorSize::Quad }.into(), + "mat4x2" => TypeGenerator::Matrix { columns: VectorSize::Quad, rows: VectorSize::Bi }.into(), + "mat4x3" => TypeGenerator::Matrix { columns: VectorSize::Quad, rows: VectorSize::Tri }.into(), + "mat4x4" => TypeGenerator::Matrix { columns: VectorSize::Quad, rows: VectorSize::Quad }.into(), + // array + "array" => TypeGenerator::Array.into(), + // atomic + "atomic" => TypeGenerator::Atomic.into(), + // pointer + "ptr" => TypeGenerator::Pointer.into(), + // sampled textures + "texture_1d" => TypeGenerator::SampledTexture { dim: ImageDimension::D1, arrayed: false, multi: false }.into(), + "texture_2d" => TypeGenerator::SampledTexture { dim: ImageDimension::D2, arrayed: false, multi: false }.into(), + "texture_2d_array" => TypeGenerator::SampledTexture { dim: ImageDimension::D2, arrayed: true, multi: false }.into(), + "texture_3d" => TypeGenerator::SampledTexture { dim: ImageDimension::D3, arrayed: false, multi: false }.into(), + "texture_cube" => TypeGenerator::SampledTexture { dim: ImageDimension::Cube, arrayed: false, multi: false }.into(), + "texture_cube_array" => TypeGenerator::SampledTexture { dim: ImageDimension::Cube, arrayed: true, multi: false }.into(), + "texture_multisampled_2d" => TypeGenerator::SampledTexture { dim: ImageDimension::D2, arrayed: false, multi: true }.into(), + // storage textures + "texture_storage_1d" => TypeGenerator::StorageTexture { dim: ImageDimension::D1, arrayed: false }.into(), + "texture_storage_2d" => TypeGenerator::StorageTexture { dim: ImageDimension::D2, arrayed: false }.into(), + "texture_storage_2d_array" => TypeGenerator::StorageTexture { dim: ImageDimension::D2, arrayed: true }.into(), + "texture_storage_3d" => TypeGenerator::StorageTexture { dim: ImageDimension::D3, arrayed: false }.into(), + // binding array + "binding_array" => TypeGenerator::BindingArray.into(), + // acceleration structure + "acceleration_structure" => TypeGenerator::AccelerationStructure.into(), + // ray query + "ray_query" => TypeGenerator::RayQuery.into(), + _ => return Ok(None), + }; + + if matches!(ty, PredeclaredType::TypeInner(ref ty) if ty.scalar() == Some(Scalar::F16)) + && !enable_extensions.contains(ImplementedEnableExtension::F16) + { + return Err(Box::new(Error::EnableExtensionNotEnabled { + span, + kind: ImplementedEnableExtension::F16.into(), + })); + } + + Ok(Some(ty)) +} diff --git a/naga/src/front/wgsl/parse/directive/enable_extension.rs b/naga/src/front/wgsl/parse/directive/enable_extension.rs index 38d6d6719ca..f7fb0ee1fa2 100644 --- a/naga/src/front/wgsl/parse/directive/enable_extension.rs +++ b/naga/src/front/wgsl/parse/directive/enable_extension.rs @@ -8,7 +8,7 @@ use crate::Span; use alloc::boxed::Box; /// Tracks the status of every enable-extension known to Naga. -#[derive(Clone, Debug, Eq, PartialEq)] +#[derive(Clone, Copy, Debug, Eq, PartialEq)] pub struct EnableExtensions { dual_source_blending: bool, /// Whether `enable f16;` was written earlier in the shader module. diff --git a/naga/src/front/wgsl/parse/lexer.rs b/naga/src/front/wgsl/parse/lexer.rs index 22b6e77605b..78dd6ec8c0f 100644 --- a/naga/src/front/wgsl/parse/lexer.rs +++ b/naga/src/front/wgsl/parse/lexer.rs @@ -624,11 +624,6 @@ impl<'a> Lexer<'a> { self.expect(Token::Paren('(')) } - pub(in crate::front::wgsl) fn close_arguments(&mut self) -> Result<'a, ()> { - let _ = self.next_if(Token::Separator(',')); - self.expect(Token::Paren(')')) - } - pub(in crate::front::wgsl) fn next_argument(&mut self) -> Result<'a, bool> { let paren = Token::Paren(')'); if self.next_if(Token::Separator(',')) { diff --git a/naga/src/front/wgsl/parse/mod.rs b/naga/src/front/wgsl/parse/mod.rs index 3c44d3b5264..a8a128a018e 100644 --- a/naga/src/front/wgsl/parse/mod.rs +++ b/naga/src/front/wgsl/parse/mod.rs @@ -361,363 +361,6 @@ impl Parser { Ok(ast::SwitchValue::Expr(expr)) } - /// Decide if we're looking at a construction expression, and return its - /// type if so. - /// - /// If the identifier `word` is a [type-defining keyword], then return a - /// [`ConstructorType`] value describing the type to build. Return an error - /// if the type is not constructible (like `sampler`). - /// - /// If `word` isn't a type name, then return `None`. - /// - /// [type-defining keyword]: https://gpuweb.github.io/gpuweb/wgsl/#type-defining-keywords - /// [`ConstructorType`]: ast::ConstructorType - fn constructor_type<'a>( - &mut self, - lexer: &mut Lexer<'a>, - word: &'a str, - span: Span, - ctx: &mut ExpressionContext<'a, '_, '_>, - ) -> Result<'a, Option>> { - if let Some(scalar) = conv::get_scalar_type(&lexer.enable_extensions, span, word)? { - return Ok(Some(ast::ConstructorType::Scalar(scalar))); - } - - let partial = match word { - "vec2" => ast::ConstructorType::PartialVector { - size: crate::VectorSize::Bi, - }, - "vec2i" => { - return Ok(Some(ast::ConstructorType::Vector { - size: crate::VectorSize::Bi, - ty: ctx.new_scalar(Scalar::I32), - ty_span: Span::UNDEFINED, - })) - } - "vec2u" => { - return Ok(Some(ast::ConstructorType::Vector { - size: crate::VectorSize::Bi, - ty: ctx.new_scalar(Scalar::U32), - ty_span: Span::UNDEFINED, - })) - } - "vec2f" => { - return Ok(Some(ast::ConstructorType::Vector { - size: crate::VectorSize::Bi, - ty: ctx.new_scalar(Scalar::F32), - ty_span: Span::UNDEFINED, - })) - } - "vec2h" => { - return Ok(Some(ast::ConstructorType::Vector { - size: crate::VectorSize::Bi, - ty: ctx.new_scalar(Scalar::F16), - ty_span: Span::UNDEFINED, - })) - } - "vec3" => ast::ConstructorType::PartialVector { - size: crate::VectorSize::Tri, - }, - "vec3i" => { - return Ok(Some(ast::ConstructorType::Vector { - size: crate::VectorSize::Tri, - ty: ctx.new_scalar(Scalar::I32), - ty_span: Span::UNDEFINED, - })) - } - "vec3u" => { - return Ok(Some(ast::ConstructorType::Vector { - size: crate::VectorSize::Tri, - ty: ctx.new_scalar(Scalar::U32), - ty_span: Span::UNDEFINED, - })) - } - "vec3f" => { - return Ok(Some(ast::ConstructorType::Vector { - size: crate::VectorSize::Tri, - ty: ctx.new_scalar(Scalar::F32), - ty_span: Span::UNDEFINED, - })) - } - "vec3h" => { - return Ok(Some(ast::ConstructorType::Vector { - size: crate::VectorSize::Tri, - ty: ctx.new_scalar(Scalar::F16), - ty_span: Span::UNDEFINED, - })) - } - "vec4" => ast::ConstructorType::PartialVector { - size: crate::VectorSize::Quad, - }, - "vec4i" => { - return Ok(Some(ast::ConstructorType::Vector { - size: crate::VectorSize::Quad, - ty: ctx.new_scalar(Scalar::I32), - ty_span: Span::UNDEFINED, - })) - } - "vec4u" => { - return Ok(Some(ast::ConstructorType::Vector { - size: crate::VectorSize::Quad, - ty: ctx.new_scalar(Scalar::U32), - ty_span: Span::UNDEFINED, - })) - } - "vec4f" => { - return Ok(Some(ast::ConstructorType::Vector { - size: crate::VectorSize::Quad, - ty: ctx.new_scalar(Scalar::F32), - ty_span: Span::UNDEFINED, - })) - } - "vec4h" => { - return Ok(Some(ast::ConstructorType::Vector { - size: crate::VectorSize::Quad, - ty: ctx.new_scalar(Scalar::F16), - ty_span: Span::UNDEFINED, - })) - } - "mat2x2" => ast::ConstructorType::PartialMatrix { - columns: crate::VectorSize::Bi, - rows: crate::VectorSize::Bi, - }, - "mat2x2f" => { - return Ok(Some(ast::ConstructorType::Matrix { - columns: crate::VectorSize::Bi, - rows: crate::VectorSize::Bi, - ty: ctx.new_scalar(Scalar::F32), - ty_span: Span::UNDEFINED, - })) - } - "mat2x2h" => { - return Ok(Some(ast::ConstructorType::Matrix { - columns: crate::VectorSize::Bi, - rows: crate::VectorSize::Bi, - ty: ctx.new_scalar(Scalar::F16), - ty_span: Span::UNDEFINED, - })) - } - "mat2x3" => ast::ConstructorType::PartialMatrix { - columns: crate::VectorSize::Bi, - rows: crate::VectorSize::Tri, - }, - "mat2x3f" => { - return Ok(Some(ast::ConstructorType::Matrix { - columns: crate::VectorSize::Bi, - rows: crate::VectorSize::Tri, - ty: ctx.new_scalar(Scalar::F32), - ty_span: Span::UNDEFINED, - })) - } - "mat2x3h" => { - return Ok(Some(ast::ConstructorType::Matrix { - columns: crate::VectorSize::Bi, - rows: crate::VectorSize::Tri, - ty: ctx.new_scalar(Scalar::F16), - ty_span: Span::UNDEFINED, - })) - } - "mat2x4" => ast::ConstructorType::PartialMatrix { - columns: crate::VectorSize::Bi, - rows: crate::VectorSize::Quad, - }, - "mat2x4f" => { - return Ok(Some(ast::ConstructorType::Matrix { - columns: crate::VectorSize::Bi, - rows: crate::VectorSize::Quad, - ty: ctx.new_scalar(Scalar::F32), - ty_span: Span::UNDEFINED, - })) - } - "mat2x4h" => { - return Ok(Some(ast::ConstructorType::Matrix { - columns: crate::VectorSize::Bi, - rows: crate::VectorSize::Quad, - ty: ctx.new_scalar(Scalar::F16), - ty_span: Span::UNDEFINED, - })) - } - "mat3x2" => ast::ConstructorType::PartialMatrix { - columns: crate::VectorSize::Tri, - rows: crate::VectorSize::Bi, - }, - "mat3x2f" => { - return Ok(Some(ast::ConstructorType::Matrix { - columns: crate::VectorSize::Tri, - rows: crate::VectorSize::Bi, - ty: ctx.new_scalar(Scalar::F32), - ty_span: Span::UNDEFINED, - })) - } - "mat3x2h" => { - return Ok(Some(ast::ConstructorType::Matrix { - columns: crate::VectorSize::Tri, - rows: crate::VectorSize::Bi, - ty: ctx.new_scalar(Scalar::F16), - ty_span: Span::UNDEFINED, - })) - } - "mat3x3" => ast::ConstructorType::PartialMatrix { - columns: crate::VectorSize::Tri, - rows: crate::VectorSize::Tri, - }, - "mat3x3f" => { - return Ok(Some(ast::ConstructorType::Matrix { - columns: crate::VectorSize::Tri, - rows: crate::VectorSize::Tri, - ty: ctx.new_scalar(Scalar::F32), - ty_span: Span::UNDEFINED, - })) - } - "mat3x3h" => { - return Ok(Some(ast::ConstructorType::Matrix { - columns: crate::VectorSize::Tri, - rows: crate::VectorSize::Tri, - ty: ctx.new_scalar(Scalar::F16), - ty_span: Span::UNDEFINED, - })) - } - "mat3x4" => ast::ConstructorType::PartialMatrix { - columns: crate::VectorSize::Tri, - rows: crate::VectorSize::Quad, - }, - "mat3x4f" => { - return Ok(Some(ast::ConstructorType::Matrix { - columns: crate::VectorSize::Tri, - rows: crate::VectorSize::Quad, - ty: ctx.new_scalar(Scalar::F32), - ty_span: Span::UNDEFINED, - })) - } - "mat3x4h" => { - return Ok(Some(ast::ConstructorType::Matrix { - columns: crate::VectorSize::Tri, - rows: crate::VectorSize::Quad, - ty: ctx.new_scalar(Scalar::F16), - ty_span: Span::UNDEFINED, - })) - } - "mat4x2" => ast::ConstructorType::PartialMatrix { - columns: crate::VectorSize::Quad, - rows: crate::VectorSize::Bi, - }, - "mat4x2f" => { - return Ok(Some(ast::ConstructorType::Matrix { - columns: crate::VectorSize::Quad, - rows: crate::VectorSize::Bi, - ty: ctx.new_scalar(Scalar::F32), - ty_span: Span::UNDEFINED, - })) - } - "mat4x2h" => { - return Ok(Some(ast::ConstructorType::Matrix { - columns: crate::VectorSize::Quad, - rows: crate::VectorSize::Bi, - ty: ctx.new_scalar(Scalar::F16), - ty_span: Span::UNDEFINED, - })) - } - "mat4x3" => ast::ConstructorType::PartialMatrix { - columns: crate::VectorSize::Quad, - rows: crate::VectorSize::Tri, - }, - "mat4x3f" => { - return Ok(Some(ast::ConstructorType::Matrix { - columns: crate::VectorSize::Quad, - rows: crate::VectorSize::Tri, - ty: ctx.new_scalar(Scalar::F32), - ty_span: Span::UNDEFINED, - })) - } - "mat4x3h" => { - return Ok(Some(ast::ConstructorType::Matrix { - columns: crate::VectorSize::Quad, - rows: crate::VectorSize::Tri, - ty: ctx.new_scalar(Scalar::F16), - ty_span: Span::UNDEFINED, - })) - } - "mat4x4" => ast::ConstructorType::PartialMatrix { - columns: crate::VectorSize::Quad, - rows: crate::VectorSize::Quad, - }, - "mat4x4f" => { - return Ok(Some(ast::ConstructorType::Matrix { - columns: crate::VectorSize::Quad, - rows: crate::VectorSize::Quad, - ty: ctx.new_scalar(Scalar::F32), - ty_span: Span::UNDEFINED, - })) - } - "mat4x4h" => { - return Ok(Some(ast::ConstructorType::Matrix { - columns: crate::VectorSize::Quad, - rows: crate::VectorSize::Quad, - ty: ctx.new_scalar(Scalar::F16), - ty_span: Span::UNDEFINED, - })) - } - "array" => ast::ConstructorType::PartialArray, - "atomic" - | "binding_array" - | "sampler" - | "sampler_comparison" - | "texture_1d" - | "texture_1d_array" - | "texture_2d" - | "texture_2d_array" - | "texture_3d" - | "texture_cube" - | "texture_cube_array" - | "texture_multisampled_2d" - | "texture_multisampled_2d_array" - | "texture_depth_2d" - | "texture_depth_2d_array" - | "texture_depth_cube" - | "texture_depth_cube_array" - | "texture_depth_multisampled_2d" - | "texture_external" - | "texture_storage_1d" - | "texture_storage_1d_array" - | "texture_storage_2d" - | "texture_storage_2d_array" - | "texture_storage_3d" => return Err(Box::new(Error::TypeNotConstructible(span))), - _ => return Ok(None), - }; - - // parse component type if present - match (lexer.peek().0, partial) { - (Token::TemplateArgsStart, ast::ConstructorType::PartialVector { size }) => { - let (ty, ty_span) = self.singular_generic(lexer, ctx)?; - Ok(Some(ast::ConstructorType::Vector { size, ty, ty_span })) - } - (Token::TemplateArgsStart, ast::ConstructorType::PartialMatrix { columns, rows }) => { - let (ty, ty_span) = self.singular_generic(lexer, ctx)?; - Ok(Some(ast::ConstructorType::Matrix { - columns, - rows, - ty, - ty_span, - })) - } - (Token::TemplateArgsStart, ast::ConstructorType::PartialArray) => { - lexer.expect(Token::TemplateArgsStart)?; - let base = self.type_specifier(lexer, ctx)?; - let size = if lexer.end_of_generic_arguments() { - let expr = self.const_generic_expression(lexer, ctx)?; - lexer.next_if(Token::Separator(',')); - ast::ArraySize::Constant(expr) - } else { - ast::ArraySize::Dynamic - }; - lexer.expect(Token::TemplateArgsEnd)?; - - Ok(Some(ast::ConstructorType::Array { base, size })) - } - (_, partial) => Ok(Some(partial)), - } - } - /// Expects `name` to be consumed (not in lexer). fn arguments<'a>( &mut self, @@ -754,51 +397,6 @@ impl Parser { Ok(expr) } - /// Expects `name` to be consumed (not in lexer). - fn call_expression<'a>( - &mut self, - lexer: &mut Lexer<'a>, - name: &'a str, - name_span: Span, - ctx: &mut ExpressionContext<'a, '_, '_>, - ) -> Result<'a, Handle>> { - let expr = match name { - // bitcast looks like a function call, but it's an operator and must be handled differently. - "bitcast" => { - let (to, span) = self.singular_generic(lexer, ctx)?; - - lexer.open_arguments()?; - let expr = self.expression(lexer, ctx)?; - lexer.close_arguments()?; - - ast::Expression::Bitcast { - expr, - to, - ty_span: span, - } - } - // everything else must be handled later, since they can be hidden by user-defined functions. - _ => { - let arguments = self.arguments(lexer, ctx)?; - ctx.unresolved.insert(ast::Dependency { - ident: name, - usage: name_span, - }); - ast::Expression::Call { - function: ast::Ident { - name, - span: name_span, - }, - arguments, - } - } - }; - - let span = lexer.span_with_start(name_span); - let expr = ctx.expressions.append(expr, span); - Ok(expr) - } - fn ident_expr<'a>( &mut self, name: &'a str, @@ -899,22 +497,19 @@ impl Parser { literal_ray_intersection(crate::RayQueryIntersection::Aabb) } (Token::Word(word), span) => { - if let Some(ty) = self.constructor_type(lexer, word, span, ctx)? { - let ty_span = lexer.span_with_start(span); - let components = self.arguments(lexer, ctx)?; - ast::Expression::Construct { - ty, - ty_span, - components, - } - } else if let Token::Paren('(') = lexer.peek().0 { - self.pop_rule_span(lexer); - return self.call_expression(lexer, word, span, ctx); - } else if word == "bitcast" { - self.pop_rule_span(lexer); - return self.call_expression(lexer, word, span, ctx); + let ident = self.template_elaborated_ident(word, span, lexer, ctx)?; + + if let Token::Paren('(') = lexer.peek().0 { + let arguments = self.arguments(lexer, ctx)?; + ctx.unresolved.insert(ast::Dependency { + ident: word, + usage: span, + }); + ast::Expression::Call(ast::CallPhrase { + function: ident, + arguments, + }) } else { - let ident = self.ident_expr(word, span, ctx); ast::Expression::Ident(ident) } } @@ -1078,6 +673,12 @@ impl Parser { } (Token::Word(word), span) => { let ident = this.ident_expr(word, span, ctx); + let ident = ast::TemplateElaboratedIdent { + ident, + ident_span: span, + template_list: Vec::new(), + template_list_span: Span::UNDEFINED, + }; let ident = ctx.expressions.append(ast::Expression::Ident(ident), span); this.component_or_swizzle_specifier(span, lexer, ctx, ident)? } @@ -1291,7 +892,7 @@ impl Parser { ctx: &mut ExpressionContext<'a, '_, '_>, ) -> Result<'a, ast::GlobalVariable<'a>> { self.push_rule_span(Rule::VariableDecl, lexer); - let template_list = self.maybe_template_list(lexer, ctx)?; + let (template_list, _) = self.maybe_template_list(lexer, ctx)?; let (name, ty) = self.optionally_typed_ident(lexer, ctx)?; let init = if lexer.next_if(Token::Operation('=')) { @@ -1390,7 +991,8 @@ impl Parser { &mut self, lexer: &mut Lexer<'a>, ctx: &mut ExpressionContext<'a, '_, '_>, - ) -> Result<'a, Option>>>> { + ) -> Result<'a, (Vec>>, Span)> { + let start = lexer.start_byte_offset(); if lexer.next_if(Token::TemplateArgsStart) { let mut args = Vec::new(); args.push(self.expression(lexer, ctx)?); @@ -1398,12 +1000,30 @@ impl Parser { args.push(self.expression(lexer, ctx)?); } lexer.expect(Token::TemplateArgsEnd)?; - return Ok(Some(args)); + let span = lexer.span_from(start); + Ok((args, span)) } else { - Ok(None) + Ok((Vec::new(), Span::UNDEFINED)) } } + fn template_elaborated_ident<'a>( + &mut self, + word: &'a str, + span: Span, + lexer: &mut Lexer<'a>, + ctx: &mut ExpressionContext<'a, '_, '_>, + ) -> Result<'a, ast::TemplateElaboratedIdent<'a>> { + let ident = self.ident_expr(word, span, ctx); + let (template_list, template_list_span) = self.maybe_template_list(lexer, ctx)?; + Ok(ast::TemplateElaboratedIdent { + ident, + ident_span: span, + template_list, + template_list_span, + }) + } + /// Parses ``, returning T and span of T fn singular_generic<'a>( &mut self, @@ -2029,38 +1649,44 @@ impl Parser { } /// Parse a function call statement. - /// Expects `ident` to be consumed (not in the lexer). - fn func_call_statement<'a>( + /// Expects `token` to be consumed (not in the lexer). + fn maybe_func_call_statement<'a>( &mut self, lexer: &mut Lexer<'a>, - ident: &'a str, - ident_span: Span, context: &mut ExpressionContext<'a, '_, '_>, block: &mut ast::Block<'a>, - ) -> Result<'a, ()> { + token: TokenSpan<'a>, + ) -> Result<'a, bool> { + let (name, name_span) = match token { + (Token::Word(name), span) => (name, span), + _ => return Ok(false), + }; + let ident = self.template_elaborated_ident(name, name_span, lexer, context)?; + if ident.template_list.is_empty() && !matches!(lexer.peek(), (Token::Paren('('), _)) { + return Ok(false); + } + self.push_rule_span(Rule::SingularExpr, lexer); context.unresolved.insert(ast::Dependency { - ident, - usage: ident_span, + ident: name, + usage: name_span, }); + let arguments = self.arguments(lexer, context)?; - let span = lexer.span_with_start(ident_span); + let span = lexer.span_with_start(name_span); block.stmts.push(ast::Statement { - kind: ast::StatementKind::Call { - function: ast::Ident { - name: ident, - span: ident_span, - }, + kind: ast::StatementKind::Call(ast::CallPhrase { + function: ident, arguments, - }, + }), span, }); self.pop_rule_span(lexer); - Ok(()) + Ok(true) } /// Parses func_call_statement and variable_updating_statement @@ -2071,12 +1697,10 @@ impl Parser { block: &mut ast::Block<'a>, token: TokenSpan<'a>, ) -> Result<'a, ()> { - match token { - (Token::Word(name), span) if matches!(lexer.peek(), (Token::Paren('('), _)) => { - self.func_call_statement(lexer, name, span, context, block) - } - token => self.variable_updating_statement(lexer, context, block, token), + if !self.maybe_func_call_statement(lexer, context, block, token)? { + self.variable_updating_statement(lexer, context, block, token)?; } + Ok(()) } /// Parses variable_or_value_statement, func_call_statement and variable_updating_statement @@ -3072,7 +2696,7 @@ impl Parser { } } - lexer.enable_extensions = enable_extensions.clone(); + lexer.enable_extensions = enable_extensions; tu.enable_extensions = enable_extensions; tu.diagnostic_filter_leaf = Self::write_diagnostic_filters(&mut tu.diagnostic_filters, diagnostic_filters, None); diff --git a/naga/src/front/wgsl/tests.rs b/naga/src/front/wgsl/tests.rs index 3864ae8cb41..3e16eb703bd 100644 --- a/naga/src/front/wgsl/tests.rs +++ b/naga/src/front/wgsl/tests.rs @@ -919,6 +919,104 @@ error: unused expressions for template │ │\x20\x20\x20\x20\x20\x20\x20\x20 │ unused +", + ); + } + + #[test] + fn unused_template_list_for_fn() { + assert_parse_err( + " +fn inner_test() {} +fn test() { + inner_test(); +} +", + "\ +error: unused expressions for template + ┌─ wgsl:4:16 + │ +4 │ inner_test(); + │ ^^^^^^^^^^^^^^^^^^^ unused + +", + ); + } + + #[test] + fn unused_template_list_for_struct() { + assert_parse_err( + " +struct test_struct {} +fn test() { + _ = test_struct(); +} +", + "\ +error: unused expressions for template + ┌─ wgsl:4:21 + │ +4 │ _ = test_struct(); + │ ^^^^^^^^^^^^^^^^^^^ unused + +", + ); + } + + #[test] + fn unused_template_list_for_alias() { + assert_parse_err( + " +alias test_alias = f32; +fn test() { + _ = test_alias(); +} +", + "\ +error: unused expressions for template + ┌─ wgsl:4:20 + │ +4 │ _ = test_alias(); + │ ^^^^^^^^^^^^^^^^^^^ unused + +", + ); + } + + #[test] + fn unexpected_template() { + assert_parse_err( + " +fn vertex() -> vec4 { + return vec4; +} +", + "\ +error: unexpected template + ┌─ wgsl:3:12 + │ +3 │ return vec4; + │ ^^^^^^^^^ expected identifier + +", + ); + } + + #[test] + fn expected_template_arg() { + assert_parse_err( + " +fn test() { + bitcast(8); +} +", + "\ +error: `bitcast` needs a template argument specified: `T`, a type + ┌─ wgsl:3:5 + │ +3 │ bitcast(8); + │ ^^^^^^^ is missing a template argument + ", ); } diff --git a/naga/tests/naga/wgsl_errors.rs b/naga/tests/naga/wgsl_errors.rs index 203721278e6..4f19e0d8889 100644 --- a/naga/tests/naga/wgsl_errors.rs +++ b/naga/tests/naga/wgsl_errors.rs @@ -248,11 +248,11 @@ fn type_not_constructible() { _ = atomic(0); } "#, - r#"error: type `atomic` is not constructible + r#"error: type `atomic` is not constructible ┌─ wgsl:3:21 │ 3 │ _ = atomic(0); - │ ^^^^^^ type is not constructible + │ ^^^^^^^^^^^ type is not constructible "#, ); @@ -2837,7 +2837,7 @@ fn function_returns_void() { ┌─ wgsl:7:18 │ 7 │ let a = x(); - │ ^ + │ ^^^ │ = note: perhaps you meant to call the function in a separate statement? diff --git a/naga/tests/out/spv/wgsl-const-exprs.spvasm b/naga/tests/out/spv/wgsl-const-exprs.spvasm index 84701b9e163..889bca5b081 100644 --- a/naga/tests/out/spv/wgsl-const-exprs.spvasm +++ b/naga/tests/out/spv/wgsl-const-exprs.spvasm @@ -14,15 +14,15 @@ OpDecorate %13 ArrayStride 4 %4 = OpTypeInt 32 1 %5 = OpTypeBool %6 = OpTypeVector %4 4 -%7 = OpTypeFloat 32 -%8 = OpTypeVector %7 4 +%8 = OpTypeFloat 32 +%7 = OpTypeVector %8 4 %10 = OpConstant %3 2 -%9 = OpTypeArray %7 %10 +%9 = OpTypeArray %8 %10 %11 = OpTypeVector %4 3 %12 = OpTypeVector %5 2 %14 = OpConstant %3 9 %13 = OpTypeArray %4 %14 -%15 = OpTypeVector %7 2 +%15 = OpTypeVector %8 2 %16 = OpConstant %4 3 %17 = OpConstantTrue %5 %18 = OpConstantFalse %5 @@ -31,13 +31,13 @@ OpDecorate %13 ArrayStride 4 %21 = OpConstant %4 1 %22 = OpConstant %4 2 %23 = OpConstant %4 8 -%24 = OpConstant %7 3.141 -%25 = OpConstant %7 6.282 -%26 = OpConstant %7 0.44444445 -%27 = OpConstant %7 0 -%28 = OpConstantComposite %8 %26 %27 %27 %27 -%29 = OpConstant %7 4 -%30 = OpConstant %7 5 +%24 = OpConstant %8 3.141 +%25 = OpConstant %8 6.282 +%26 = OpConstant %8 0.44444445 +%27 = OpConstant %8 0 +%28 = OpConstantComposite %7 %26 %27 %27 %27 +%29 = OpConstant %8 4 +%30 = OpConstant %8 5 %31 = OpConstantComposite %15 %29 %30 %32 = OpConstantComposite %12 %17 %18 %35 = OpTypeFunction %2 @@ -58,10 +58,10 @@ OpDecorate %13 ArrayStride 4 %86 = OpConstant %3 30 %87 = OpConstant %3 0 %94 = OpConstantNull %3 -%97 = OpConstant %7 1 -%98 = OpConstant %7 2 -%99 = OpConstantComposite %8 %98 %97 %97 %97 -%101 = OpTypePointer Function %8 +%97 = OpConstant %8 1 +%98 = OpConstant %8 2 +%99 = OpConstantComposite %7 %98 %97 %97 %97 +%101 = OpTypePointer Function %7 %106 = OpTypePointer Function %9 %107 = OpConstantNull %9 %111 = OpConstantComposite %11 %21 %21 %21 @@ -81,7 +81,7 @@ OpDecorate %13 ArrayStride 4 %153 = OpConstant %4 9 %154 = OpConstantComposite %13 %21 %22 %16 %19 %151 %47 %152 %23 %153 %155 = OpConstantComposite %6 %21 %22 %16 %19 -%157 = OpTypePointer Function %7 +%157 = OpTypePointer Function %8 %160 = OpConstantNull %4 %162 = OpConstantNull %4 %164 = OpTypePointer Function %13 From 081dbe16c5ef362aea3cfa6ba9aec83ea4cc10c8 Mon Sep 17 00:00:00 2001 From: teoxoy <28601907+teoxoy@users.noreply.github.com> Date: Mon, 20 Oct 2025 14:35:36 +0200 Subject: [PATCH 28/33] handle all type resolution in the lowerer --- naga/src/front/wgsl/error.rs | 9 - naga/src/front/wgsl/lower/mod.rs | 260 ++------ naga/src/front/wgsl/lower/template_list.rs | 2 +- naga/src/front/wgsl/parse/ast.rs | 86 +-- naga/src/front/wgsl/parse/conv.rs | 54 -- naga/src/front/wgsl/parse/lexer.rs | 61 +- naga/src/front/wgsl/parse/mod.rs | 577 +----------------- naga/src/front/wgsl/tests.rs | 25 +- naga/tests/naga/wgsl_errors.rs | 20 +- ...t_atomic_compare_exchange_i32.Compute.glsl | 2 +- ...t_atomic_compare_exchange_u32.Compute.glsl | 2 +- .../glsl/wgsl-atomicOps.cs_main.Compute.glsl | 4 +- ...ides-atomicCompareExchangeWeak.compact.ron | 8 +- ...sl-overrides-atomicCompareExchangeWeak.ron | 8 +- .../out/msl/wgsl-atomicCompareExchange.msl | 8 +- naga/tests/out/msl/wgsl-atomicOps-float32.msl | 6 +- .../out/msl/wgsl-atomicOps-int64-min-max.msl | 6 +- naga/tests/out/msl/wgsl-atomicOps.msl | 8 +- .../out/msl/wgsl-bounds-check-zero-atomic.msl | 8 +- .../wgsl-atomicCompareExchange-int64.spvasm | 2 +- .../spv/wgsl-atomicOps-int64-min-max.spvasm | 2 +- .../tests/out/spv/wgsl-atomicOps-int64.spvasm | 2 +- naga/tests/out/spv/wgsl-binding-arrays.spvasm | 124 ++-- naga/tests/out/spv/wgsl-struct-layout.spvasm | 36 +- 24 files changed, 212 insertions(+), 1108 deletions(-) diff --git a/naga/src/front/wgsl/error.rs b/naga/src/front/wgsl/error.rs index fecd18792ff..10694137cde 100644 --- a/naga/src/front/wgsl/error.rs +++ b/naga/src/front/wgsl/error.rs @@ -146,8 +146,6 @@ pub enum ExpectedToken<'a> { WorkgroupSizeSeparator, /// Expected: 'struct', 'let', 'var', 'type', ';', 'fn', eof GlobalItem, - /// Expected a type. - Type, /// Access of `var`, `let`, `const`. Variable, /// Access of a function @@ -206,7 +204,6 @@ pub(crate) enum Error<'a> { UnknownAccess(Span), UnknownIdent(Span, &'a str), UnknownScalarType(Span), - UnknownType(Span), UnknownStorageFormat(Span), UnknownConservativeDepth(Span), UnknownEnableExtension(Span, &'a str), @@ -517,7 +514,6 @@ impl<'a> Error<'a> { "or the end of the file" ) .to_string(), - ExpectedToken::Type => "type".to_string(), ExpectedToken::Variable => "variable access".to_string(), ExpectedToken::Function => "function name".to_string(), ExpectedToken::AfterIdentListArg => { @@ -698,11 +694,6 @@ impl<'a> Error<'a> { labels: vec![(bad_span, "unknown conservative depth".into())], notes: vec![], }, - Error::UnknownType(bad_span) => ParseError { - message: format!("unknown type: `{}`", &source[bad_span]), - labels: vec![(bad_span, "unknown type".into())], - notes: vec![], - }, Error::UnknownEnableExtension(span, word) => ParseError { message: format!("unknown enable-extension `{word}`"), labels: vec![(span, "".into())], diff --git a/naga/src/front/wgsl/lower/mod.rs b/naga/src/front/wgsl/lower/mod.rs index f644f476d19..92eae48ca61 100644 --- a/naga/src/front/wgsl/lower/mod.rs +++ b/naga/src/front/wgsl/lower/mod.rs @@ -93,9 +93,6 @@ pub struct GlobalContext<'source, 'temp, 'out> { /// The `TranslationUnit`'s expressions arena. ast_expressions: &'temp Arena>, - /// The `TranslationUnit`'s types arena. - types: &'temp Arena>, - // Naga IR values. /// The map from the names of module-scope declarations to the Naga IR /// `Handle`s we have built for them, owned by `Lowerer::lower`. @@ -117,7 +114,6 @@ impl<'source> GlobalContext<'source, '_, '_> { enable_extensions: self.enable_extensions, ast_expressions: self.ast_expressions, globals: self.globals, - types: self.types, module: self.module, const_typifier: self.const_typifier, layouter: self.layouter, @@ -131,7 +127,6 @@ impl<'source> GlobalContext<'source, '_, '_> { enable_extensions: self.enable_extensions, ast_expressions: self.ast_expressions, globals: self.globals, - types: self.types, module: self.module, const_typifier: self.const_typifier, layouter: self.layouter, @@ -162,12 +157,6 @@ pub struct StatementContext<'source, 'temp, 'out> { /// [`TranslationUnit::expressions`]: ast::TranslationUnit::expressions ast_expressions: &'temp Arena>, - /// A reference to [`TranslationUnit::types`] for the translation unit - /// we're lowering. - /// - /// [`TranslationUnit::types`]: ast::TranslationUnit::types - types: &'temp Arena>, - // Naga IR values. /// The map from the names of module-scope declarations to the Naga IR /// `Handle`s we have built for them, owned by `Lowerer::lower`. @@ -224,7 +213,6 @@ impl<'a, 'temp> StatementContext<'a, 'temp, '_> { ExpressionContext { enable_extensions: self.enable_extensions, globals: self.globals, - types: self.types, ast_expressions: self.ast_expressions, const_typifier: self.const_typifier, layouter: self.layouter, @@ -252,7 +240,6 @@ impl<'a, 'temp> StatementContext<'a, 'temp, '_> { ExpressionContext { enable_extensions: self.enable_extensions, globals: self.globals, - types: self.types, ast_expressions: self.ast_expressions, const_typifier: self.const_typifier, layouter: self.layouter, @@ -275,7 +262,6 @@ impl<'a, 'temp> StatementContext<'a, 'temp, '_> { enable_extensions: self.enable_extensions, ast_expressions: self.ast_expressions, globals: self.globals, - types: self.types, module: self.module, const_typifier: self.const_typifier, layouter: self.layouter, @@ -385,7 +371,6 @@ pub struct ExpressionContext<'source, 'temp, 'out> { // WGSL AST values. ast_expressions: &'temp Arena>, - types: &'temp Arena>, // Naga IR values. /// The map from the names of module-scope declarations to the Naga IR @@ -447,7 +432,6 @@ impl<'source, 'temp, 'out> ExpressionContext<'source, 'temp, 'out> { ExpressionContext { enable_extensions: self.enable_extensions, globals: self.globals, - types: self.types, ast_expressions: self.ast_expressions, const_typifier: self.const_typifier, layouter: self.layouter, @@ -476,7 +460,6 @@ impl<'source, 'temp, 'out> ExpressionContext<'source, 'temp, 'out> { enable_extensions: self.enable_extensions, ast_expressions: self.ast_expressions, globals: self.globals, - types: self.types, module: self.module, const_typifier: self.const_typifier, layouter: self.layouter, @@ -1148,7 +1131,6 @@ impl<'source, 'temp> Lowerer<'source, 'temp> { enable_extensions: tu.enable_extensions, ast_expressions: &tu.expressions, globals: &mut FastHashMap::default(), - types: &tu.types, module: &mut module, const_typifier: &mut Typifier::new(), layouter: &mut proc::Layouter::default(), @@ -1193,7 +1175,8 @@ impl<'source, 'temp> Lowerer<'source, 'temp> { } ast::GlobalDeclKind::Var(ref v) => { let explicit_ty = - v.ty.map(|ast| self.resolve_ast_type(ast, &mut ctx.as_const())) + v.ty.as_ref() + .map(|ast| self.resolve_ast_type(ast, &mut ctx.as_const())) .transpose()?; let (ty, initializer) = self.type_and_init( @@ -1242,7 +1225,8 @@ impl<'source, 'temp> Lowerer<'source, 'temp> { let mut ectx = ctx.as_const(); let explicit_ty = - c.ty.map(|ast| self.resolve_ast_type(ast, &mut ectx)) + c.ty.as_ref() + .map(|ast| self.resolve_ast_type(ast, &mut ectx)) .transpose()?; let (ty, init) = self.type_and_init( @@ -1277,7 +1261,8 @@ impl<'source, 'temp> Lowerer<'source, 'temp> { } ast::GlobalDeclKind::Override(ref o) => { let explicit_ty = - o.ty.map(|ast| self.resolve_ast_type(ast, &mut ctx.as_const())) + o.ty.as_ref() + .map(|ast| self.resolve_ast_type(ast, &mut ctx.as_const())) .transpose()?; let mut ectx = ctx.as_override(); @@ -1332,8 +1317,8 @@ impl<'source, 'temp> Lowerer<'source, 'temp> { } ast::GlobalDeclKind::Type(ref alias) => { let ty = self.resolve_named_ast_type( - alias.ty, - Some(alias.name.name.to_string()), + &alias.ty, + alias.name.name.to_string(), &mut ctx.as_const(), )?; ctx.globals @@ -1437,7 +1422,7 @@ impl<'source, 'temp> Lowerer<'source, 'temp> { .iter() .enumerate() .map(|(i, arg)| -> Result<'_, _> { - let ty = self.resolve_ast_type(arg.ty, &mut ctx.as_const())?; + let ty = self.resolve_ast_type(&arg.ty, &mut ctx.as_const())?; let expr = expressions.append(ir::Expression::FunctionArgument(i as u32), arg.name.span); local_table.insert(arg.handle, Declared::Runtime(Typed::Plain(expr))); @@ -1456,7 +1441,7 @@ impl<'source, 'temp> Lowerer<'source, 'temp> { .result .as_ref() .map(|res| -> Result<'_, _> { - let ty = self.resolve_ast_type(res.ty, &mut ctx.as_const())?; + let ty = self.resolve_ast_type(&res.ty, &mut ctx.as_const())?; Ok(ir::FunctionResult { ty, binding: self.binding(&res.binding, ty, ctx)?, @@ -1486,7 +1471,6 @@ impl<'source, 'temp> Lowerer<'source, 'temp> { layouter: ctx.layouter, function: &mut function, named_expressions: &mut named_expressions, - types: ctx.types, module: ctx.module, local_expression_kind_tracker: &mut local_expression_kind_tracker, global_expression_kind_tracker: ctx.global_expression_kind_tracker, @@ -1611,6 +1595,7 @@ impl<'source, 'temp> Lowerer<'source, 'temp> { let explicit_ty = l .ty + .as_ref() .map(|ty| self.resolve_ast_type(ty, &mut ctx.as_const(block, &mut emitter))) .transpose()?; @@ -1650,10 +1635,11 @@ impl<'source, 'temp> Lowerer<'source, 'temp> { emitter.start(&ctx.function.expressions); let explicit_ty = - v.ty.map(|ast| { - self.resolve_ast_type(ast, &mut ctx.as_const(block, &mut emitter)) - }) - .transpose()?; + v.ty.as_ref() + .map(|ast| { + self.resolve_ast_type(ast, &mut ctx.as_const(block, &mut emitter)) + }) + .transpose()?; let mut ectx = ctx.as_expression(block, &mut emitter); let (ty, initializer) = self.type_and_init( @@ -1717,7 +1703,8 @@ impl<'source, 'temp> Lowerer<'source, 'temp> { let ectx = &mut ctx.as_const(block, &mut emitter); let explicit_ty = - c.ty.map(|ast| self.resolve_ast_type(ast, &mut ectx.as_const())) + c.ty.as_ref() + .map(|ast| self.resolve_ast_type(ast, &mut ectx.as_const())) .transpose()?; let (_ty, init) = self.type_and_init( @@ -2369,16 +2356,34 @@ impl<'source, 'temp> Lowerer<'source, 'temp> { let span = ctx.ast_expressions.get_span(expr); let expr = &ctx.ast_expressions[expr]; - let (ident, ident_span, template_list) = match expr { - ast::Expression::Ident(ast::TemplateElaboratedIdent { - ident: ast::IdentExpr::Unresolved(ident), - ident_span, - template_list, - .. - }) => (*ident, *ident_span, template_list), + let ident = match *expr { + ast::Expression::Ident(ref ident) => ident, _ => return Err(Box::new(Error::UnexpectedExprForTypeExpression(span))), }; + self.type_specifier(ident, ctx, None) + } + + fn type_specifier( + &mut self, + ident: &ast::TemplateElaboratedIdent<'source>, + ctx: &mut ExpressionContext<'source, '_, '_>, + alias_name: Option, + ) -> Result<'source, Handle> { + let &ast::TemplateElaboratedIdent { + ref ident, + ident_span, + ref template_list, + .. + } = ident; + + let ident = match *ident { + ast::IdentExpr::Unresolved(ident) => ident, + ast::IdentExpr::Local(_) => { + return Err(Box::new(Error::UnexpectedExprForTypeExpression(ident_span))) + } + }; + let mut tl = TemplateListIter::new(ident_span, template_list); if let Some(global) = ctx.globals.get(ident) { @@ -2387,7 +2392,7 @@ impl<'source, 'temp> Lowerer<'source, 'temp> { tl.finish(ctx)?; return Ok(handle); } - _ => return Err(Box::new(Error::UnexpectedExprForTypeExpression(span))), + _ => return Err(Box::new(Error::UnexpectedExprForTypeExpression(ident_span))), } } @@ -2395,7 +2400,7 @@ impl<'source, 'temp> Lowerer<'source, 'temp> { let Some(ty) = ty else { return Err(Box::new(Error::UnknownIdent(ident_span, ident))); }; - let ty = self.finalize_type(ctx, ty, &mut tl)?; + let ty = self.finalize_type(ctx, ty, &mut tl, alias_name)?; tl.finish(ctx)?; @@ -2407,6 +2412,7 @@ impl<'source, 'temp> Lowerer<'source, 'temp> { ctx: &mut ExpressionContext<'source, '_, '_>, ty: conv::PredeclaredType, tl: &mut TemplateListIter<'_, 'source>, + alias_name: Option, ) -> Result<'source, Handle> { let ty = match ty { conv::PredeclaredType::TypeInner(ty_inner) => { @@ -2433,7 +2439,7 @@ impl<'source, 'temp> Lowerer<'source, 'temp> { } _ => {} } - ctx.as_global().ensure_type_exists(None, ty_inner) + ctx.as_global().ensure_type_exists(alias_name, ty_inner) } conv::PredeclaredType::RayDesc => ctx.module.generate_ray_desc_type(), conv::PredeclaredType::RayIntersection => ctx.module.generate_ray_intersection_type(), @@ -2522,7 +2528,7 @@ impl<'source, 'temp> Lowerer<'source, 'temp> { ir::TypeInner::RayQuery { vertex_return } } }; - ctx.as_global().ensure_type_exists(None, ty_inner) + ctx.as_global().ensure_type_exists(alias_name, ty_inner) } }; Ok(ty) @@ -2764,7 +2770,7 @@ impl<'source, 'temp> Lowerer<'source, 'temp> { { Constructor::PartialArray } - _ => Constructor::Type(self.finalize_type(ctx, ty, &mut tl)?), + _ => Constructor::Type(self.finalize_type(ctx, ty, &mut tl, None)?), }; tl.finish(ctx)?; let handle = @@ -3977,7 +3983,7 @@ impl<'source, 'temp> Lowerer<'source, 'temp> { let mut doc_comments: Vec>> = Vec::new(); for member in s.members.iter() { - let ty = self.resolve_ast_type(member.ty, &mut ctx.as_const())?; + let ty = self.resolve_ast_type(&member.ty, &mut ctx.as_const())?; ctx.layouter.update(ctx.module.to_ctx()).map_err(|err| { let LayoutErrorInner::TooLarge = err.inner else { @@ -4099,55 +4105,6 @@ impl<'source, 'temp> Lowerer<'source, 'temp> { } fn array_size( - &mut self, - size: ast::ArraySize<'source>, - ctx: &mut ExpressionContext<'source, '_, '_>, - ) -> Result<'source, ir::ArraySize> { - Ok(match size { - ast::ArraySize::Constant(expr) => { - let span = ctx.ast_expressions.get_span(expr); - let const_expr = self.expression(expr, &mut ctx.as_const()); - match const_expr { - Ok(value) => { - let len = ctx.const_eval_expr_to_u32(value).map_err(|err| { - Box::new(match err { - proc::U32EvalError::NonConst => { - Error::ExpectedConstExprConcreteIntegerScalar(span) - } - proc::U32EvalError::Negative => { - Error::ExpectedPositiveArrayLength(span) - } - }) - })?; - let size = - NonZeroU32::new(len).ok_or(Error::ExpectedPositiveArrayLength(span))?; - ir::ArraySize::Constant(size) - } - Err(err) => { - if let Error::ConstantEvaluatorError(ref ty, _) = *err { - match **ty { - proc::ConstantEvaluatorError::OverrideExpr => { - ir::ArraySize::Pending(self.array_size_override( - expr, - &mut ctx.as_global().as_override(), - span, - )?) - } - _ => { - return Err(err); - } - } - } else { - return Err(err); - } - } - } - } - ast::ArraySize::Dynamic => ir::ArraySize::Dynamic, - }) - } - - fn array_size2( &mut self, expr: Handle>, ctx: &mut ExpressionContext<'source, '_, '_>, @@ -4228,127 +4185,20 @@ impl<'source, 'temp> Lowerer<'source, 'temp> { /// [`SpecialTypes`]: ir::SpecialTypes fn resolve_named_ast_type( &mut self, - handle: Handle>, - name: Option, + ident: &ast::TemplateElaboratedIdent<'source>, + name: String, ctx: &mut ExpressionContext<'source, '_, '_>, ) -> Result<'source, Handle> { - let inner = match ctx.types[handle] { - ast::Type::Scalar(scalar) => scalar.to_inner_scalar(), - ast::Type::Vector { size, ty, ty_span } => { - let ty = self.resolve_ast_type(ty, ctx)?; - let scalar = match ctx.module.types[ty].inner { - ir::TypeInner::Scalar(sc) => sc, - _ => return Err(Box::new(Error::UnknownScalarType(ty_span))), - }; - ir::TypeInner::Vector { size, scalar } - } - ast::Type::Matrix { - rows, - columns, - ty, - ty_span, - } => { - let ty = self.resolve_ast_type(ty, ctx)?; - let scalar = match ctx.module.types[ty].inner { - ir::TypeInner::Scalar(sc) => sc, - _ => return Err(Box::new(Error::UnknownScalarType(ty_span))), - }; - match scalar.kind { - ir::ScalarKind::Float => ir::TypeInner::Matrix { - columns, - rows, - scalar, - }, - _ => return Err(Box::new(Error::BadMatrixScalarKind(ty_span, scalar))), - } - } - ast::Type::Atomic(scalar) => scalar.to_inner_atomic(), - ast::Type::Pointer { base, space } => { - let base = self.resolve_ast_type(base, ctx)?; - ir::TypeInner::Pointer { base, space } - } - ast::Type::Array { base, size } => { - let base = self.resolve_ast_type(base, &mut ctx.as_const())?; - let size = self.array_size(size, ctx)?; - - // Determine the size of the base type, if needed. - ctx.layouter.update(ctx.module.to_ctx()).map_err(|err| { - let LayoutErrorInner::TooLarge = err.inner else { - unreachable!("unexpected layout error: {err:?}"); - }; - // Lots of type definitions don't get spans, so this error - // message may not be very useful. - Box::new(Error::TypeTooLarge { - span: ctx.module.types.get_span(err.ty), - }) - })?; - let stride = ctx.layouter[base].to_stride(); - - ir::TypeInner::Array { base, size, stride } - } - ast::Type::Image { - dim, - arrayed, - class, - } => { - if class == crate::ImageClass::External { - // Other than the WGSL backend, every backend that supports - // external textures does so by lowering them to a set of - // ordinary textures and some parameters saying how to - // sample from them. We don't know which backend will - // consume the `Module` we're building, but in case it's not - // WGSL, populate `SpecialTypes::external_texture_params` - // and `SpecialTypes::external_texture_transfer_function` - // with the types the backend will use for the parameter - // buffer. - // - // Neither of these are the type we are lowering here: - // that's an ordinary `TypeInner::Image`. But the fact we - // are lowering a `texture_external` implies the backends - // may need these additional types too. - ctx.module.generate_external_texture_types(); - } - ir::TypeInner::Image { - dim, - arrayed, - class, - } - } - ast::Type::Sampler { comparison } => ir::TypeInner::Sampler { comparison }, - ast::Type::AccelerationStructure { vertex_return } => { - ir::TypeInner::AccelerationStructure { vertex_return } - } - ast::Type::RayQuery { vertex_return } => ir::TypeInner::RayQuery { vertex_return }, - ast::Type::BindingArray { base, size } => { - let base = self.resolve_ast_type(base, ctx)?; - let size = self.array_size(size, ctx)?; - ir::TypeInner::BindingArray { base, size } - } - ast::Type::RayDesc => { - return Ok(ctx.module.generate_ray_desc_type()); - } - ast::Type::RayIntersection => { - return Ok(ctx.module.generate_ray_intersection_type()); - } - ast::Type::User(ref ident) => { - return match ctx.globals.get(ident.name) { - Some(&LoweredGlobalDecl::Type(handle)) => Ok(handle), - Some(_) => Err(Box::new(Error::Unexpected(ident.span, ExpectedToken::Type))), - None => Err(Box::new(Error::UnknownType(ident.span))), - } - } - }; - - Ok(ctx.as_global().ensure_type_exists(name, inner)) + self.type_specifier(ident, ctx, Some(name)) } /// Return a Naga `Handle` representing the front-end type `handle`. fn resolve_ast_type( &mut self, - handle: Handle>, + ident: &ast::TemplateElaboratedIdent<'source>, ctx: &mut ExpressionContext<'source, '_, '_>, ) -> Result<'source, Handle> { - self.resolve_named_ast_type(handle, None, ctx) + self.type_specifier(ident, ctx, None) } fn binding( diff --git a/naga/src/front/wgsl/lower/template_list.rs b/naga/src/front/wgsl/lower/template_list.rs index a9a28ccf1c2..19539b9f263 100644 --- a/naga/src/front/wgsl/lower/template_list.rs +++ b/naga/src/front/wgsl/lower/template_list.rs @@ -83,7 +83,7 @@ impl<'iter, 'source> TemplateListIter<'iter, 'source> { ctx: &mut ExpressionContext<'source, '_, '_>, ) -> Result<'source, ir::ArraySize> { if let Some(expr) = self.template_list.next() { - lowerer.array_size2(*expr, ctx) + lowerer.array_size(*expr, ctx) } else { Ok(ir::ArraySize::Dynamic) } diff --git a/naga/src/front/wgsl/parse/ast.rs b/naga/src/front/wgsl/parse/ast.rs index 10811bce468..85cb64210f6 100644 --- a/naga/src/front/wgsl/parse/ast.rs +++ b/naga/src/front/wgsl/parse/ast.rs @@ -4,7 +4,6 @@ use core::hash::Hash; use crate::diagnostic_filter::DiagnosticFilterNode; use crate::front::wgsl::parse::directive::enable_extension::EnableExtensions; use crate::front::wgsl::parse::number::Number; -use crate::front::wgsl::Scalar; use crate::{Arena, FastIndexSet, Handle, Span}; #[derive(Debug, Default)] @@ -24,12 +23,6 @@ pub struct TranslationUnit<'a> { /// [`Function`]: crate::Function pub expressions: Arena>, - /// Non-user-defined types, like `vec4` or `array`. - /// - /// These are referred to by `Handle>` values. - /// User-defined types are referred to by name until lowering. - pub types: Arena>, - /// Arena for all diagnostic filter rules parsed in this module, including those in functions. /// /// See [`DiagnosticFilterNode`] for details on how the tree is represented and used in @@ -139,14 +132,14 @@ pub enum GlobalDeclKind<'a> { #[derive(Debug)] pub struct FunctionArgument<'a> { pub name: Ident<'a>, - pub ty: Handle>, + pub ty: TemplateElaboratedIdent<'a>, pub binding: Option>, pub handle: Handle, } #[derive(Debug)] pub struct FunctionResult<'a> { - pub ty: Handle>, + pub ty: TemplateElaboratedIdent<'a>, pub binding: Option>, pub must_use: bool, } @@ -194,7 +187,7 @@ pub struct GlobalVariable<'a> { pub name: Ident<'a>, pub template_list: Vec>>, pub binding: Option>, - pub ty: Option>>, + pub ty: Option>, pub init: Option>>, pub doc_comments: Vec<&'a str>, } @@ -202,7 +195,7 @@ pub struct GlobalVariable<'a> { #[derive(Debug)] pub struct StructMember<'a> { pub name: Ident<'a>, - pub ty: Handle>, + pub ty: TemplateElaboratedIdent<'a>, pub binding: Option>, pub align: Option>>, pub size: Option>>, @@ -219,13 +212,13 @@ pub struct Struct<'a> { #[derive(Debug)] pub struct TypeAlias<'a> { pub name: Ident<'a>, - pub ty: Handle>, + pub ty: TemplateElaboratedIdent<'a>, } #[derive(Debug)] pub struct Const<'a> { pub name: Ident<'a>, - pub ty: Option>>, + pub ty: Option>, pub init: Handle>, pub doc_comments: Vec<&'a str>, } @@ -234,69 +227,10 @@ pub struct Const<'a> { pub struct Override<'a> { pub name: Ident<'a>, pub id: Option>>, - pub ty: Option>>, + pub ty: Option>, pub init: Option>>, } -/// The size of an [`Array`] or [`BindingArray`]. -/// -/// [`Array`]: Type::Array -/// [`BindingArray`]: Type::BindingArray -#[derive(Debug, Copy, Clone)] -pub enum ArraySize<'a> { - /// The length as a constant expression. - Constant(Handle>), - Dynamic, -} - -#[derive(Debug)] -pub enum Type<'a> { - Scalar(Scalar), - Vector { - size: crate::VectorSize, - ty: Handle>, - ty_span: Span, - }, - Matrix { - columns: crate::VectorSize, - rows: crate::VectorSize, - ty: Handle>, - ty_span: Span, - }, - Atomic(Scalar), - Pointer { - base: Handle>, - space: crate::AddressSpace, - }, - Array { - base: Handle>, - size: ArraySize<'a>, - }, - Image { - dim: crate::ImageDimension, - arrayed: bool, - class: crate::ImageClass, - }, - Sampler { - comparison: bool, - }, - AccelerationStructure { - vertex_return: bool, - }, - RayQuery { - vertex_return: bool, - }, - RayDesc, - RayIntersection, - BindingArray { - base: Handle>, - size: ArraySize<'a>, - }, - - /// A user-defined type, like a struct or a type alias. - User(Ident<'a>), -} - #[derive(Debug, Default)] pub struct Block<'a> { pub stmts: Vec>, @@ -395,7 +329,7 @@ pub enum Expression<'a> { #[derive(Debug)] pub struct LocalVariable<'a> { pub name: Ident<'a>, - pub ty: Option>>, + pub ty: Option>, pub init: Option>>, pub handle: Handle, } @@ -403,7 +337,7 @@ pub struct LocalVariable<'a> { #[derive(Debug)] pub struct Let<'a> { pub name: Ident<'a>, - pub ty: Option>>, + pub ty: Option>, pub init: Handle>, pub handle: Handle, } @@ -411,7 +345,7 @@ pub struct Let<'a> { #[derive(Debug)] pub struct LocalConst<'a> { pub name: Ident<'a>, - pub ty: Option>>, + pub ty: Option>, pub init: Handle>, pub handle: Handle, } diff --git a/naga/src/front/wgsl/parse/conv.rs b/naga/src/front/wgsl/parse/conv.rs index 5b6eb3b6b40..70961041fd2 100644 --- a/naga/src/front/wgsl/parse/conv.rs +++ b/naga/src/front/wgsl/parse/conv.rs @@ -152,60 +152,6 @@ pub fn map_storage_format(word: &str, span: Span) -> Result<'_, crate::StorageFo }) } -pub fn get_scalar_type( - enable_extensions: &EnableExtensions, - span: Span, - word: &str, -) -> Result<'static, Option> { - use crate::ScalarKind as Sk; - let scalar = match word { - "f16" => Some(Scalar { - kind: Sk::Float, - width: 2, - }), - "f32" => Some(Scalar { - kind: Sk::Float, - width: 4, - }), - "f64" => Some(Scalar { - kind: Sk::Float, - width: 8, - }), - "i32" => Some(Scalar { - kind: Sk::Sint, - width: 4, - }), - "u32" => Some(Scalar { - kind: Sk::Uint, - width: 4, - }), - "i64" => Some(Scalar { - kind: Sk::Sint, - width: 8, - }), - "u64" => Some(Scalar { - kind: Sk::Uint, - width: 8, - }), - "bool" => Some(Scalar { - kind: Sk::Bool, - width: crate::BOOL_WIDTH, - }), - _ => None, - }; - - if matches!(scalar, Some(Scalar::F16)) - && !enable_extensions.contains(ImplementedEnableExtension::F16) - { - return Err(Box::new(Error::EnableExtensionNotEnabled { - span, - kind: ImplementedEnableExtension::F16.into(), - })); - } - - Ok(scalar) -} - pub fn map_derivative(word: &str) -> Option<(crate::DerivativeAxis, crate::DerivativeControl)> { use crate::{DerivativeAxis as Axis, DerivativeControl as Ctrl}; match word { diff --git a/naga/src/front/wgsl/parse/lexer.rs b/naga/src/front/wgsl/parse/lexer.rs index 78dd6ec8c0f..52654392ca6 100644 --- a/naga/src/front/wgsl/parse/lexer.rs +++ b/naga/src/front/wgsl/parse/lexer.rs @@ -1,8 +1,7 @@ use super::{number::consume_number, Error, ExpectedToken, Result}; use crate::front::wgsl::error::NumberError; use crate::front::wgsl::parse::directive::enable_extension::EnableExtensions; -use crate::front::wgsl::parse::{conv, Number}; -use crate::front::wgsl::Scalar; +use crate::front::wgsl::parse::Number; use crate::Span; use alloc::{boxed::Box, vec::Vec}; @@ -534,10 +533,6 @@ impl<'a> Lexer<'a> { Ok(()) } - pub(in crate::front::wgsl) fn end_of_generic_arguments(&mut self) -> bool { - self.next_if(Token::Separator(',')) && self.peek().0 != Token::TemplateArgsEnd - } - pub(in crate::front::wgsl) fn next_ident_with_span(&mut self) -> Result<'a, (&'a str, Span)> { match self.next() { (Token::Word("_"), span) => Err(Box::new(Error::InvalidIdentifierUnderscore(span))), @@ -566,60 +561,6 @@ impl<'a> Lexer<'a> { } } - /// Parses a generic scalar type, for example ``. - /// - /// Returns the span covering the inner type, excluding the brackets. - pub(in crate::front::wgsl) fn next_scalar_generic_with_span( - &mut self, - ) -> Result<'a, (Scalar, Span)> { - self.expect(Token::TemplateArgsStart)?; - - let (scalar, span) = match self.next() { - (Token::Word(word), span) => { - conv::get_scalar_type(&self.enable_extensions, span, word)? - .map(|scalar| (scalar, span)) - .ok_or(Error::UnknownScalarType(span))? - } - (_, span) => return Err(Box::new(Error::UnknownScalarType(span))), - }; - - self.expect(Token::TemplateArgsEnd)?; - Ok((scalar, span)) - } - - pub(in crate::front::wgsl) fn next_format_generic( - &mut self, - ) -> Result<'a, (crate::StorageFormat, crate::StorageAccess)> { - self.expect(Token::TemplateArgsStart)?; - let (ident, ident_span) = self.next_ident_with_span()?; - let format = conv::map_storage_format(ident, ident_span)?; - self.expect(Token::Separator(','))?; - let (ident, ident_span) = self.next_ident_with_span()?; - let access = conv::map_access_mode(ident, ident_span)?; - self.expect(Token::TemplateArgsEnd)?; - Ok((format, access)) - } - - pub(in crate::front::wgsl) fn next_acceleration_structure_flags(&mut self) -> Result<'a, bool> { - Ok(if self.next_if(Token::TemplateArgsStart) { - if !self.next_if(Token::TemplateArgsEnd) { - let (name, span) = self.next_ident_with_span()?; - let ret = if name == "vertex_return" { - true - } else { - return Err(Box::new(Error::UnknownAttribute(span))); - }; - self.next_if(Token::Separator(',')); - self.expect(Token::TemplateArgsEnd)?; - ret - } else { - false - } - } else { - false - }) - } - pub(in crate::front::wgsl) fn open_arguments(&mut self) -> Result<'a, ()> { self.expect(Token::Paren('(')) } diff --git a/naga/src/front/wgsl/parse/mod.rs b/naga/src/front/wgsl/parse/mod.rs index a8a128a018e..eab5e2daec7 100644 --- a/naga/src/front/wgsl/parse/mod.rs +++ b/naga/src/front/wgsl/parse/mod.rs @@ -11,7 +11,7 @@ use crate::front::wgsl::parse::directive::language_extension::LanguageExtension; use crate::front::wgsl::parse::directive::DirectiveKind; use crate::front::wgsl::parse::lexer::{Lexer, Token, TokenSpan}; use crate::front::wgsl::parse::number::Number; -use crate::front::wgsl::{Result, Scalar}; +use crate::front::wgsl::Result; use crate::front::SymbolTable; use crate::{Arena, FastHashSet, FastIndexSet, Handle, ShaderStage, Span}; @@ -34,12 +34,6 @@ struct ExpressionContext<'input, 'temp, 'out> { /// [`TranslationUnit::expressions`]: ast::TranslationUnit::expressions expressions: &'out mut Arena>, - /// The [`TranslationUnit::types`] arena to which we should contribute new - /// types. - /// - /// [`TranslationUnit::types`]: ast::TranslationUnit::types - types: &'out mut Arena>, - /// A map from identifiers in scope to the locals/arguments they represent. /// /// The handles refer to the [`locals`] arena; see that field's @@ -120,11 +114,6 @@ impl<'a> ExpressionContext<'a, '_, '_> { Ok(handle) } } - - fn new_scalar(&mut self, scalar: Scalar) -> Handle> { - self.types - .append(ast::Type::Scalar(scalar), Span::UNDEFINED) - } } /// Which grammar rule we are in the midst of parsing. @@ -136,7 +125,6 @@ impl<'a> ExpressionContext<'a, '_, '_> { enum Rule { Attribute, VariableDecl, - TypeDecl, FunctionDecl, Block, Statement, @@ -561,17 +549,6 @@ impl Parser { Ok(expr) } - fn const_generic_expression<'a>( - &mut self, - lexer: &mut Lexer<'a>, - ctx: &mut ExpressionContext<'a, '_, '_>, - ) -> Result<'a, Handle>> { - self.push_rule_span(Rule::GenericExpr, lexer); - let expr = self.expression(lexer, ctx)?; - self.pop_rule_span(lexer); - Ok(expr) - } - /// Parse a `unary_expression`. fn unary_expression<'a>( &mut self, @@ -873,7 +850,7 @@ impl Parser { &mut self, lexer: &mut Lexer<'a>, ctx: &mut ExpressionContext<'a, '_, '_>, - ) -> Result<'a, (ast::Ident<'a>, Option>>)> { + ) -> Result<'a, (ast::Ident<'a>, Option>)> { let name = lexer.next_ident()?; let ty = if lexer.next_if(Token::Separator(':')) { @@ -1024,555 +1001,13 @@ impl Parser { }) } - /// Parses ``, returning T and span of T - fn singular_generic<'a>( - &mut self, - lexer: &mut Lexer<'a>, - ctx: &mut ExpressionContext<'a, '_, '_>, - ) -> Result<'a, (Handle>, Span)> { - lexer.expect(Token::TemplateArgsStart)?; - let start = lexer.start_byte_offset(); - let ty = self.type_specifier(lexer, ctx)?; - let span = lexer.span_from(start); - lexer.next_if(Token::Separator(',')); - lexer.expect(Token::TemplateArgsEnd)?; - Ok((ty, span)) - } - - fn matrix_with_type<'a>( - &mut self, - lexer: &mut Lexer<'a>, - ctx: &mut ExpressionContext<'a, '_, '_>, - columns: crate::VectorSize, - rows: crate::VectorSize, - ) -> Result<'a, ast::Type<'a>> { - let (ty, ty_span) = self.singular_generic(lexer, ctx)?; - Ok(ast::Type::Matrix { - columns, - rows, - ty, - ty_span, - }) - } - - fn type_specifier_impl<'a>( - &mut self, - lexer: &mut Lexer<'a>, - word: &'a str, - span: Span, - ctx: &mut ExpressionContext<'a, '_, '_>, - ) -> Result<'a, Option>> { - if let Some(scalar) = conv::get_scalar_type(&lexer.enable_extensions, span, word)? { - return Ok(Some(ast::Type::Scalar(scalar))); - } - - Ok(Some(match word { - "vec2" => { - let (ty, ty_span) = self.singular_generic(lexer, ctx)?; - ast::Type::Vector { - size: crate::VectorSize::Bi, - ty, - ty_span, - } - } - "vec2i" => ast::Type::Vector { - size: crate::VectorSize::Bi, - ty: ctx.new_scalar(Scalar::I32), - ty_span: Span::UNDEFINED, - }, - "vec2u" => ast::Type::Vector { - size: crate::VectorSize::Bi, - ty: ctx.new_scalar(Scalar::U32), - ty_span: Span::UNDEFINED, - }, - "vec2f" => ast::Type::Vector { - size: crate::VectorSize::Bi, - ty: ctx.new_scalar(Scalar::F32), - ty_span: Span::UNDEFINED, - }, - "vec2h" => ast::Type::Vector { - size: crate::VectorSize::Bi, - ty: ctx.new_scalar(Scalar::F16), - ty_span: Span::UNDEFINED, - }, - "vec3" => { - let (ty, ty_span) = self.singular_generic(lexer, ctx)?; - ast::Type::Vector { - size: crate::VectorSize::Tri, - ty, - ty_span, - } - } - "vec3i" => ast::Type::Vector { - size: crate::VectorSize::Tri, - ty: ctx.new_scalar(Scalar::I32), - ty_span: Span::UNDEFINED, - }, - "vec3u" => ast::Type::Vector { - size: crate::VectorSize::Tri, - ty: ctx.new_scalar(Scalar::U32), - ty_span: Span::UNDEFINED, - }, - "vec3f" => ast::Type::Vector { - size: crate::VectorSize::Tri, - ty: ctx.new_scalar(Scalar::F32), - ty_span: Span::UNDEFINED, - }, - "vec3h" => ast::Type::Vector { - size: crate::VectorSize::Tri, - ty: ctx.new_scalar(Scalar::F16), - ty_span: Span::UNDEFINED, - }, - "vec4" => { - let (ty, ty_span) = self.singular_generic(lexer, ctx)?; - ast::Type::Vector { - size: crate::VectorSize::Quad, - ty, - ty_span, - } - } - "vec4i" => ast::Type::Vector { - size: crate::VectorSize::Quad, - ty: ctx.new_scalar(Scalar::I32), - ty_span: Span::UNDEFINED, - }, - "vec4u" => ast::Type::Vector { - size: crate::VectorSize::Quad, - ty: ctx.new_scalar(Scalar::U32), - ty_span: Span::UNDEFINED, - }, - "vec4f" => ast::Type::Vector { - size: crate::VectorSize::Quad, - ty: ctx.new_scalar(Scalar::F32), - ty_span: Span::UNDEFINED, - }, - "vec4h" => ast::Type::Vector { - size: crate::VectorSize::Quad, - ty: ctx.new_scalar(Scalar::F16), - ty_span: Span::UNDEFINED, - }, - "mat2x2" => { - self.matrix_with_type(lexer, ctx, crate::VectorSize::Bi, crate::VectorSize::Bi)? - } - "mat2x2f" => ast::Type::Matrix { - columns: crate::VectorSize::Bi, - rows: crate::VectorSize::Bi, - ty: ctx.new_scalar(Scalar::F32), - ty_span: Span::UNDEFINED, - }, - "mat2x2h" => ast::Type::Matrix { - columns: crate::VectorSize::Bi, - rows: crate::VectorSize::Bi, - ty: ctx.new_scalar(Scalar::F16), - ty_span: Span::UNDEFINED, - }, - "mat2x3" => { - self.matrix_with_type(lexer, ctx, crate::VectorSize::Bi, crate::VectorSize::Tri)? - } - "mat2x3f" => ast::Type::Matrix { - columns: crate::VectorSize::Bi, - rows: crate::VectorSize::Tri, - ty: ctx.new_scalar(Scalar::F32), - ty_span: Span::UNDEFINED, - }, - "mat2x3h" => ast::Type::Matrix { - columns: crate::VectorSize::Bi, - rows: crate::VectorSize::Tri, - ty: ctx.new_scalar(Scalar::F16), - ty_span: Span::UNDEFINED, - }, - "mat2x4" => { - self.matrix_with_type(lexer, ctx, crate::VectorSize::Bi, crate::VectorSize::Quad)? - } - "mat2x4f" => ast::Type::Matrix { - columns: crate::VectorSize::Bi, - rows: crate::VectorSize::Quad, - ty: ctx.new_scalar(Scalar::F32), - ty_span: Span::UNDEFINED, - }, - "mat2x4h" => ast::Type::Matrix { - columns: crate::VectorSize::Bi, - rows: crate::VectorSize::Quad, - ty: ctx.new_scalar(Scalar::F16), - ty_span: Span::UNDEFINED, - }, - "mat3x2" => { - self.matrix_with_type(lexer, ctx, crate::VectorSize::Tri, crate::VectorSize::Bi)? - } - "mat3x2f" => ast::Type::Matrix { - columns: crate::VectorSize::Tri, - rows: crate::VectorSize::Bi, - ty: ctx.new_scalar(Scalar::F32), - ty_span: Span::UNDEFINED, - }, - "mat3x2h" => ast::Type::Matrix { - columns: crate::VectorSize::Tri, - rows: crate::VectorSize::Bi, - ty: ctx.new_scalar(Scalar::F16), - ty_span: Span::UNDEFINED, - }, - "mat3x3" => { - self.matrix_with_type(lexer, ctx, crate::VectorSize::Tri, crate::VectorSize::Tri)? - } - "mat3x3f" => ast::Type::Matrix { - columns: crate::VectorSize::Tri, - rows: crate::VectorSize::Tri, - ty: ctx.new_scalar(Scalar::F32), - ty_span: Span::UNDEFINED, - }, - "mat3x3h" => ast::Type::Matrix { - columns: crate::VectorSize::Tri, - rows: crate::VectorSize::Tri, - ty: ctx.new_scalar(Scalar::F16), - ty_span: Span::UNDEFINED, - }, - "mat3x4" => { - self.matrix_with_type(lexer, ctx, crate::VectorSize::Tri, crate::VectorSize::Quad)? - } - "mat3x4f" => ast::Type::Matrix { - columns: crate::VectorSize::Tri, - rows: crate::VectorSize::Quad, - ty: ctx.new_scalar(Scalar::F32), - ty_span: Span::UNDEFINED, - }, - "mat3x4h" => ast::Type::Matrix { - columns: crate::VectorSize::Tri, - rows: crate::VectorSize::Quad, - ty: ctx.new_scalar(Scalar::F16), - ty_span: Span::UNDEFINED, - }, - "mat4x2" => { - self.matrix_with_type(lexer, ctx, crate::VectorSize::Quad, crate::VectorSize::Bi)? - } - "mat4x2f" => ast::Type::Matrix { - columns: crate::VectorSize::Quad, - rows: crate::VectorSize::Bi, - ty: ctx.new_scalar(Scalar::F32), - ty_span: Span::UNDEFINED, - }, - "mat4x2h" => ast::Type::Matrix { - columns: crate::VectorSize::Quad, - rows: crate::VectorSize::Bi, - ty: ctx.new_scalar(Scalar::F16), - ty_span: Span::UNDEFINED, - }, - "mat4x3" => { - self.matrix_with_type(lexer, ctx, crate::VectorSize::Quad, crate::VectorSize::Tri)? - } - "mat4x3f" => ast::Type::Matrix { - columns: crate::VectorSize::Quad, - rows: crate::VectorSize::Tri, - ty: ctx.new_scalar(Scalar::F32), - ty_span: Span::UNDEFINED, - }, - "mat4x3h" => ast::Type::Matrix { - columns: crate::VectorSize::Quad, - rows: crate::VectorSize::Tri, - ty: ctx.new_scalar(Scalar::F16), - ty_span: Span::UNDEFINED, - }, - "mat4x4" => { - self.matrix_with_type(lexer, ctx, crate::VectorSize::Quad, crate::VectorSize::Quad)? - } - "mat4x4f" => ast::Type::Matrix { - columns: crate::VectorSize::Quad, - rows: crate::VectorSize::Quad, - ty: ctx.new_scalar(Scalar::F32), - ty_span: Span::UNDEFINED, - }, - "mat4x4h" => ast::Type::Matrix { - columns: crate::VectorSize::Quad, - rows: crate::VectorSize::Quad, - ty: ctx.new_scalar(Scalar::F16), - ty_span: Span::UNDEFINED, - }, - "atomic" => { - let (scalar, _) = lexer.next_scalar_generic_with_span()?; - ast::Type::Atomic(scalar) - } - "ptr" => { - lexer.expect(Token::TemplateArgsStart)?; - let (ident, span) = lexer.next_ident_with_span()?; - let mut space = conv::map_address_space(ident, span)?; - lexer.expect(Token::Separator(','))?; - let base = self.type_specifier(lexer, ctx)?; - if let crate::AddressSpace::Storage { ref mut access } = space { - *access = if lexer.end_of_generic_arguments() { - let (ident, span) = lexer.next_ident_with_span()?; - let access = conv::map_access_mode(ident, span)?; - lexer.next_if(Token::Separator(',')); - access - } else { - crate::StorageAccess::LOAD - }; - } - lexer.expect(Token::TemplateArgsEnd)?; - ast::Type::Pointer { base, space } - } - "array" => { - lexer.expect(Token::TemplateArgsStart)?; - let base = self.type_specifier(lexer, ctx)?; - let size = if lexer.end_of_generic_arguments() { - let size = self.const_generic_expression(lexer, ctx)?; - lexer.next_if(Token::Separator(',')); - ast::ArraySize::Constant(size) - } else { - ast::ArraySize::Dynamic - }; - lexer.expect(Token::TemplateArgsEnd)?; - - ast::Type::Array { base, size } - } - "binding_array" => { - lexer.expect(Token::TemplateArgsStart)?; - let base = self.type_specifier(lexer, ctx)?; - let size = if lexer.end_of_generic_arguments() { - let size = self.unary_expression(lexer, ctx)?; - lexer.next_if(Token::Separator(',')); - ast::ArraySize::Constant(size) - } else { - ast::ArraySize::Dynamic - }; - lexer.expect(Token::TemplateArgsEnd)?; - - ast::Type::BindingArray { base, size } - } - "sampler" => ast::Type::Sampler { comparison: false }, - "sampler_comparison" => ast::Type::Sampler { comparison: true }, - "texture_1d" => { - let (scalar, span) = lexer.next_scalar_generic_with_span()?; - Self::check_texture_sample_type(scalar, span)?; - ast::Type::Image { - dim: crate::ImageDimension::D1, - arrayed: false, - class: crate::ImageClass::Sampled { - kind: scalar.kind, - multi: false, - }, - } - } - "texture_1d_array" => { - let (scalar, span) = lexer.next_scalar_generic_with_span()?; - Self::check_texture_sample_type(scalar, span)?; - ast::Type::Image { - dim: crate::ImageDimension::D1, - arrayed: true, - class: crate::ImageClass::Sampled { - kind: scalar.kind, - multi: false, - }, - } - } - "texture_2d" => { - let (scalar, span) = lexer.next_scalar_generic_with_span()?; - Self::check_texture_sample_type(scalar, span)?; - ast::Type::Image { - dim: crate::ImageDimension::D2, - arrayed: false, - class: crate::ImageClass::Sampled { - kind: scalar.kind, - multi: false, - }, - } - } - "texture_2d_array" => { - let (scalar, span) = lexer.next_scalar_generic_with_span()?; - Self::check_texture_sample_type(scalar, span)?; - ast::Type::Image { - dim: crate::ImageDimension::D2, - arrayed: true, - class: crate::ImageClass::Sampled { - kind: scalar.kind, - multi: false, - }, - } - } - "texture_3d" => { - let (scalar, span) = lexer.next_scalar_generic_with_span()?; - Self::check_texture_sample_type(scalar, span)?; - ast::Type::Image { - dim: crate::ImageDimension::D3, - arrayed: false, - class: crate::ImageClass::Sampled { - kind: scalar.kind, - multi: false, - }, - } - } - "texture_cube" => { - let (scalar, span) = lexer.next_scalar_generic_with_span()?; - Self::check_texture_sample_type(scalar, span)?; - ast::Type::Image { - dim: crate::ImageDimension::Cube, - arrayed: false, - class: crate::ImageClass::Sampled { - kind: scalar.kind, - multi: false, - }, - } - } - "texture_cube_array" => { - let (scalar, span) = lexer.next_scalar_generic_with_span()?; - Self::check_texture_sample_type(scalar, span)?; - ast::Type::Image { - dim: crate::ImageDimension::Cube, - arrayed: true, - class: crate::ImageClass::Sampled { - kind: scalar.kind, - multi: false, - }, - } - } - "texture_multisampled_2d" => { - let (scalar, span) = lexer.next_scalar_generic_with_span()?; - Self::check_texture_sample_type(scalar, span)?; - ast::Type::Image { - dim: crate::ImageDimension::D2, - arrayed: false, - class: crate::ImageClass::Sampled { - kind: scalar.kind, - multi: true, - }, - } - } - "texture_multisampled_2d_array" => { - let (scalar, span) = lexer.next_scalar_generic_with_span()?; - Self::check_texture_sample_type(scalar, span)?; - ast::Type::Image { - dim: crate::ImageDimension::D2, - arrayed: true, - class: crate::ImageClass::Sampled { - kind: scalar.kind, - multi: true, - }, - } - } - "texture_depth_2d" => ast::Type::Image { - dim: crate::ImageDimension::D2, - arrayed: false, - class: crate::ImageClass::Depth { multi: false }, - }, - "texture_depth_2d_array" => ast::Type::Image { - dim: crate::ImageDimension::D2, - arrayed: true, - class: crate::ImageClass::Depth { multi: false }, - }, - "texture_depth_cube" => ast::Type::Image { - dim: crate::ImageDimension::Cube, - arrayed: false, - class: crate::ImageClass::Depth { multi: false }, - }, - "texture_depth_cube_array" => ast::Type::Image { - dim: crate::ImageDimension::Cube, - arrayed: true, - class: crate::ImageClass::Depth { multi: false }, - }, - "texture_depth_multisampled_2d" => ast::Type::Image { - dim: crate::ImageDimension::D2, - arrayed: false, - class: crate::ImageClass::Depth { multi: true }, - }, - "texture_external" => ast::Type::Image { - dim: crate::ImageDimension::D2, - arrayed: false, - class: crate::ImageClass::External, - }, - "texture_storage_1d" => { - let (format, access) = lexer.next_format_generic()?; - ast::Type::Image { - dim: crate::ImageDimension::D1, - arrayed: false, - class: crate::ImageClass::Storage { format, access }, - } - } - "texture_storage_1d_array" => { - let (format, access) = lexer.next_format_generic()?; - ast::Type::Image { - dim: crate::ImageDimension::D1, - arrayed: true, - class: crate::ImageClass::Storage { format, access }, - } - } - "texture_storage_2d" => { - let (format, access) = lexer.next_format_generic()?; - ast::Type::Image { - dim: crate::ImageDimension::D2, - arrayed: false, - class: crate::ImageClass::Storage { format, access }, - } - } - "texture_storage_2d_array" => { - let (format, access) = lexer.next_format_generic()?; - ast::Type::Image { - dim: crate::ImageDimension::D2, - arrayed: true, - class: crate::ImageClass::Storage { format, access }, - } - } - "texture_storage_3d" => { - let (format, access) = lexer.next_format_generic()?; - ast::Type::Image { - dim: crate::ImageDimension::D3, - arrayed: false, - class: crate::ImageClass::Storage { format, access }, - } - } - "acceleration_structure" => { - let vertex_return = lexer.next_acceleration_structure_flags()?; - ast::Type::AccelerationStructure { vertex_return } - } - "ray_query" => { - let vertex_return = lexer.next_acceleration_structure_flags()?; - ast::Type::RayQuery { vertex_return } - } - "RayDesc" => ast::Type::RayDesc, - "RayIntersection" => ast::Type::RayIntersection, - _ => return Ok(None), - })) - } - - fn check_texture_sample_type(scalar: Scalar, span: Span) -> Result<'static, ()> { - use crate::ScalarKind::*; - // Validate according to https://gpuweb.github.io/gpuweb/wgsl/#sampled-texture-type - match scalar { - Scalar { - kind: Float | Sint | Uint, - width: 4, - } => Ok(()), - Scalar { - kind: Uint, - width: 8, - } => Ok(()), - _ => Err(Box::new(Error::BadTextureSampleType { span, scalar })), - } - } - fn type_specifier<'a>( &mut self, lexer: &mut Lexer<'a>, ctx: &mut ExpressionContext<'a, '_, '_>, - ) -> Result<'a, Handle>> { - self.track_recursion(|this| { - this.push_rule_span(Rule::TypeDecl, lexer); - - let (name, span) = lexer.next_ident_with_span()?; - - let ty = match this.type_specifier_impl(lexer, name, span, ctx)? { - Some(ty) => ty, - None => { - ctx.unresolved.insert(ast::Dependency { - ident: name, - usage: span, - }); - ast::Type::User(ast::Ident { name, span }) - } - }; - - this.pop_rule_span(lexer); - - let handle = ctx.types.append(ty, Span::UNDEFINED); - Ok(handle) - }) + ) -> Result<'a, ast::TemplateElaboratedIdent<'a>> { + let (name, span) = lexer.next_ident_with_span()?; + self.template_elaborated_ident(name, span, lexer, ctx) } /// Parses assignment, increment and decrement statements @@ -2230,7 +1665,6 @@ impl Parser { expressions: &mut out.expressions, local_table: &mut SymbolTable::default(), locals: &mut locals, - types: &mut out.types, unresolved: dependencies, }; @@ -2362,7 +1796,6 @@ impl Parser { expressions: &mut out.expressions, local_table: &mut SymbolTable::default(), locals: &mut Arena::new(), - types: &mut out.types, unresolved: &mut dependencies, }; let mut diagnostic_filters = DiagnosticFilterMap::new(); diff --git a/naga/src/front/wgsl/tests.rs b/naga/src/front/wgsl/tests.rs index 3e16eb703bd..5b4cd898d92 100644 --- a/naga/src/front/wgsl/tests.rs +++ b/naga/src/front/wgsl/tests.rs @@ -345,7 +345,7 @@ fn parse_texture_load() { .unwrap(); parse_str( " - var t: texture_multisampled_2d_array; + var t: texture_2d_array; fn foo() { let r: vec4 = textureLoad(t, vec2(10, 20), 2, 3); } @@ -354,9 +354,9 @@ fn parse_texture_load() { .unwrap(); parse_str( " - var t: texture_storage_1d_array; + var t: texture_storage_1d; fn foo() { - let r: vec4 = textureLoad(t, 10, 2); + let r: vec4 = textureLoad(t, 10); } ", ) @@ -380,12 +380,21 @@ fn parse_texture_store() { fn parse_texture_query() { parse_str( " - var t: texture_multisampled_2d_array; + var t: texture_multisampled_2d; fn foo() { - var dim: vec2 = textureDimensions(t); - dim = textureDimensions(t, 0); - let layers: u32 = textureNumLayers(t); - let samples: u32 = textureNumSamples(t); + let dim = textureDimensions(t); + let samples = textureNumSamples(t); + } + ", + ) + .unwrap(); + parse_str( + " + var t: texture_2d_array; + fn foo() { + let dim = textureDimensions(t); + let levels = textureNumLevels(t); + let layers = textureNumLayers(t); } ", ) diff --git a/naga/tests/naga/wgsl_errors.rs b/naga/tests/naga/wgsl_errors.rs index 4f19e0d8889..49f70198b24 100644 --- a/naga/tests/naga/wgsl_errors.rs +++ b/naga/tests/naga/wgsl_errors.rs @@ -121,12 +121,12 @@ fn invalid_float() { #[test] fn invalid_texture_sample_type() { check( - "const x: texture_2d;", + "var x: texture_2d;", r###"error: texture sample type must be one of f32, i32 or u32, but found bool - ┌─ wgsl:1:21 + ┌─ wgsl:1:19 │ -1 │ const x: texture_2d; - │ ^^^^ must be one of f32, i32 or u32 +1 │ var x: texture_2d; + │ ^^^^ must be one of f32, i32 or u32 "###, ); @@ -513,11 +513,11 @@ fn unknown_type() { r#" const a: Vec = 10; "#, - r#"error: unknown type: `Vec` + r#"error: no definition in scope for identifier: `Vec` ┌─ wgsl:2:22 │ 2 │ const a: Vec = 10; - │ ^^^ unknown type + │ ^^^ unknown identifier "#, ); @@ -527,13 +527,13 @@ fn unknown_type() { fn unknown_storage_format() { check( r#" - const storage1: texture_storage_1d; + var storage1: texture_storage_1d; "#, r#"error: unknown storage format: `rgba` - ┌─ wgsl:2:48 + ┌─ wgsl:2:46 │ -2 │ const storage1: texture_storage_1d; - │ ^^^^ unknown storage format +2 │ var storage1: texture_storage_1d; + │ ^^^^ unknown storage format "#, ); diff --git a/naga/tests/out/glsl/wgsl-atomicCompareExchange.test_atomic_compare_exchange_i32.Compute.glsl b/naga/tests/out/glsl/wgsl-atomicCompareExchange.test_atomic_compare_exchange_i32.Compute.glsl index b45d8299f2d..7ab8af17d8b 100644 --- a/naga/tests/out/glsl/wgsl-atomicCompareExchange.test_atomic_compare_exchange_i32.Compute.glsl +++ b/naga/tests/out/glsl/wgsl-atomicCompareExchange.test_atomic_compare_exchange_i32.Compute.glsl @@ -15,7 +15,7 @@ struct _atomic_compare_exchange_result_Uint_4_ { }; const uint SIZE = 128u; -layout(std430) buffer type_2_block_0Compute { int _group_0_binding_0_cs[128]; }; +layout(std430) buffer type_3_block_0Compute { int _group_0_binding_0_cs[128]; }; void main() { diff --git a/naga/tests/out/glsl/wgsl-atomicCompareExchange.test_atomic_compare_exchange_u32.Compute.glsl b/naga/tests/out/glsl/wgsl-atomicCompareExchange.test_atomic_compare_exchange_u32.Compute.glsl index 171b9a869fb..552a398c1ec 100644 --- a/naga/tests/out/glsl/wgsl-atomicCompareExchange.test_atomic_compare_exchange_u32.Compute.glsl +++ b/naga/tests/out/glsl/wgsl-atomicCompareExchange.test_atomic_compare_exchange_u32.Compute.glsl @@ -15,7 +15,7 @@ struct _atomic_compare_exchange_result_Uint_4_ { }; const uint SIZE = 128u; -layout(std430) buffer type_4_block_0Compute { uint _group_0_binding_1_cs[128]; }; +layout(std430) buffer type_5_block_0Compute { uint _group_0_binding_1_cs[128]; }; void main() { diff --git a/naga/tests/out/glsl/wgsl-atomicOps.cs_main.Compute.glsl b/naga/tests/out/glsl/wgsl-atomicOps.cs_main.Compute.glsl index 104f671218d..8429fbb3b19 100644 --- a/naga/tests/out/glsl/wgsl-atomicOps.cs_main.Compute.glsl +++ b/naga/tests/out/glsl/wgsl-atomicOps.cs_main.Compute.glsl @@ -17,9 +17,9 @@ struct _atomic_compare_exchange_result_Sint_4_ { int old_value; bool exchanged; }; -layout(std430) buffer type_block_0Compute { uint _group_0_binding_0_cs; }; +layout(std430) buffer type_1_block_0Compute { uint _group_0_binding_0_cs; }; -layout(std430) buffer type_2_block_1Compute { int _group_0_binding_1_cs[2]; }; +layout(std430) buffer type_4_block_1Compute { int _group_0_binding_1_cs[2]; }; layout(std430) buffer Struct_block_2Compute { Struct _group_0_binding_2_cs; }; diff --git a/naga/tests/out/ir/wgsl-overrides-atomicCompareExchangeWeak.compact.ron b/naga/tests/out/ir/wgsl-overrides-atomicCompareExchangeWeak.compact.ron index 640ee25ca49..b275abcd34b 100644 --- a/naga/tests/out/ir/wgsl-overrides-atomicCompareExchangeWeak.compact.ron +++ b/naga/tests/out/ir/wgsl-overrides-atomicCompareExchangeWeak.compact.ron @@ -9,14 +9,14 @@ ), ( name: None, - inner: Atomic(( + inner: Scalar(( kind: Uint, width: 4, )), ), ( name: None, - inner: Scalar(( + inner: Atomic(( kind: Uint, width: 4, )), @@ -34,7 +34,7 @@ members: [ ( name: Some("old_value"), - ty: 2, + ty: 1, binding: None, offset: 0, ), @@ -76,7 +76,7 @@ name: Some("a"), space: WorkGroup, binding: None, - ty: 1, + ty: 2, init: None, ), ], diff --git a/naga/tests/out/ir/wgsl-overrides-atomicCompareExchangeWeak.ron b/naga/tests/out/ir/wgsl-overrides-atomicCompareExchangeWeak.ron index 640ee25ca49..b275abcd34b 100644 --- a/naga/tests/out/ir/wgsl-overrides-atomicCompareExchangeWeak.ron +++ b/naga/tests/out/ir/wgsl-overrides-atomicCompareExchangeWeak.ron @@ -9,14 +9,14 @@ ), ( name: None, - inner: Atomic(( + inner: Scalar(( kind: Uint, width: 4, )), ), ( name: None, - inner: Scalar(( + inner: Atomic(( kind: Uint, width: 4, )), @@ -34,7 +34,7 @@ members: [ ( name: Some("old_value"), - ty: 2, + ty: 1, binding: None, offset: 0, ), @@ -76,7 +76,7 @@ name: Some("a"), space: WorkGroup, binding: None, - ty: 1, + ty: 2, init: None, ), ], diff --git a/naga/tests/out/msl/wgsl-atomicCompareExchange.msl b/naga/tests/out/msl/wgsl-atomicCompareExchange.msl index 967e003a791..a70acbe2cbd 100644 --- a/naga/tests/out/msl/wgsl-atomicCompareExchange.msl +++ b/naga/tests/out/msl/wgsl-atomicCompareExchange.msl @@ -4,10 +4,10 @@ using metal::uint; -struct type_2 { +struct type_3 { metal::atomic_int inner[128]; }; -struct type_4 { +struct type_5 { metal::atomic_uint inner[128]; }; struct _atomic_compare_exchange_result_Sint_4_ { @@ -73,7 +73,7 @@ _atomic_compare_exchange_result_Uint_4_ naga_atomic_compare_exchange_weak_explic constant uint SIZE = 128u; kernel void test_atomic_compare_exchange_i32_( - device type_2& arr_i32_ [[user(fake0)]] + device type_3& arr_i32_ [[user(fake0)]] ) { uint i = 0u; int old = {}; @@ -124,7 +124,7 @@ kernel void test_atomic_compare_exchange_i32_( kernel void test_atomic_compare_exchange_u32_( - device type_4& arr_u32_ [[user(fake0)]] + device type_5& arr_u32_ [[user(fake0)]] ) { uint i_1 = 0u; uint old_1 = {}; diff --git a/naga/tests/out/msl/wgsl-atomicOps-float32.msl b/naga/tests/out/msl/wgsl-atomicOps-float32.msl index 40b5a7a14f6..dc503498d9f 100644 --- a/naga/tests/out/msl/wgsl-atomicOps-float32.msl +++ b/naga/tests/out/msl/wgsl-atomicOps-float32.msl @@ -4,12 +4,12 @@ using metal::uint; -struct type_1 { +struct type_2 { metal::atomic_float inner[2]; }; struct Struct { metal::atomic_float atomic_scalar; - type_1 atomic_arr; + type_2 atomic_arr; }; struct cs_mainInput { @@ -17,7 +17,7 @@ struct cs_mainInput { kernel void cs_main( metal::uint3 id [[thread_position_in_threadgroup]] , device metal::atomic_float& storage_atomic_scalar [[user(fake0)]] -, device type_1& storage_atomic_arr [[user(fake0)]] +, device type_2& storage_atomic_arr [[user(fake0)]] , device Struct& storage_struct [[user(fake0)]] ) { metal::atomic_store_explicit(&storage_atomic_scalar, 1.5, metal::memory_order_relaxed); diff --git a/naga/tests/out/msl/wgsl-atomicOps-int64-min-max.msl b/naga/tests/out/msl/wgsl-atomicOps-int64-min-max.msl index f69a2a49bd3..09009356bd4 100644 --- a/naga/tests/out/msl/wgsl-atomicOps-int64-min-max.msl +++ b/naga/tests/out/msl/wgsl-atomicOps-int64-min-max.msl @@ -4,12 +4,12 @@ using metal::uint; -struct type_1 { +struct type_2 { metal::atomic_ulong inner[2]; }; struct Struct { metal::atomic_ulong atomic_scalar; - type_1 atomic_arr; + type_2 atomic_arr; }; struct cs_mainInput { @@ -17,7 +17,7 @@ struct cs_mainInput { kernel void cs_main( metal::uint3 id [[thread_position_in_threadgroup]] , device metal::atomic_ulong& storage_atomic_scalar [[user(fake0)]] -, device type_1& storage_atomic_arr [[user(fake0)]] +, device type_2& storage_atomic_arr [[user(fake0)]] , device Struct& storage_struct [[user(fake0)]] , constant ulong& input [[user(fake0)]] ) { diff --git a/naga/tests/out/msl/wgsl-atomicOps.msl b/naga/tests/out/msl/wgsl-atomicOps.msl index 4afdde1540f..ea82237e72f 100644 --- a/naga/tests/out/msl/wgsl-atomicOps.msl +++ b/naga/tests/out/msl/wgsl-atomicOps.msl @@ -4,12 +4,12 @@ using metal::uint; -struct type_2 { +struct type_4 { metal::atomic_int inner[2]; }; struct Struct { metal::atomic_uint atomic_scalar; - type_2 atomic_arr; + type_4 atomic_arr; }; struct _atomic_compare_exchange_result_Uint_4_ { uint old_value; @@ -77,10 +77,10 @@ struct cs_mainInput { kernel void cs_main( metal::uint3 id [[thread_position_in_threadgroup]] , device metal::atomic_uint& storage_atomic_scalar [[user(fake0)]] -, device type_2& storage_atomic_arr [[user(fake0)]] +, device type_4& storage_atomic_arr [[user(fake0)]] , device Struct& storage_struct [[user(fake0)]] , threadgroup metal::atomic_uint& workgroup_atomic_scalar -, threadgroup type_2& workgroup_atomic_arr +, threadgroup type_4& workgroup_atomic_arr , threadgroup Struct& workgroup_struct ) { if (metal::all(id == metal::uint3(0u))) { diff --git a/naga/tests/out/msl/wgsl-bounds-check-zero-atomic.msl b/naga/tests/out/msl/wgsl-bounds-check-zero-atomic.msl index 0e54afff4a7..bb851049dce 100644 --- a/naga/tests/out/msl/wgsl-bounds-check-zero-atomic.msl +++ b/naga/tests/out/msl/wgsl-bounds-check-zero-atomic.msl @@ -14,14 +14,14 @@ struct _mslBufferSizes { uint size0; }; -struct type_1 { +struct type_2 { metal::atomic_uint inner[10]; }; -typedef metal::atomic_uint type_2[1]; +typedef metal::atomic_uint type_3[1]; struct Globals { metal::atomic_uint a; - type_1 b; - type_2 c; + type_2 b; + type_3 c; }; uint fetch_add_atomic( diff --git a/naga/tests/out/spv/wgsl-atomicCompareExchange-int64.spvasm b/naga/tests/out/spv/wgsl-atomicCompareExchange-int64.spvasm index ad89ea1943e..ad1e1b962ec 100644 --- a/naga/tests/out/spv/wgsl-atomicCompareExchange-int64.spvasm +++ b/naga/tests/out/spv/wgsl-atomicCompareExchange-int64.spvasm @@ -3,8 +3,8 @@ ; Generator: rspirv ; Bound: 173 OpCapability Shader -OpCapability Int64Atomics OpCapability Int64 +OpCapability Int64Atomics OpExtension "SPV_KHR_storage_buffer_storage_class" %1 = OpExtInstImport "GLSL.std.450" OpMemoryModel Logical GLSL450 diff --git a/naga/tests/out/spv/wgsl-atomicOps-int64-min-max.spvasm b/naga/tests/out/spv/wgsl-atomicOps-int64-min-max.spvasm index 2d31197b3b8..9dac2828f58 100644 --- a/naga/tests/out/spv/wgsl-atomicOps-int64-min-max.spvasm +++ b/naga/tests/out/spv/wgsl-atomicOps-int64-min-max.spvasm @@ -3,8 +3,8 @@ ; Generator: rspirv ; Bound: 67 OpCapability Shader -OpCapability Int64Atomics OpCapability Int64 +OpCapability Int64Atomics OpExtension "SPV_KHR_storage_buffer_storage_class" %1 = OpExtInstImport "GLSL.std.450" OpMemoryModel Logical GLSL450 diff --git a/naga/tests/out/spv/wgsl-atomicOps-int64.spvasm b/naga/tests/out/spv/wgsl-atomicOps-int64.spvasm index 806ce479912..70cf4796724 100644 --- a/naga/tests/out/spv/wgsl-atomicOps-int64.spvasm +++ b/naga/tests/out/spv/wgsl-atomicOps-int64.spvasm @@ -3,8 +3,8 @@ ; Generator: rspirv ; Bound: 227 OpCapability Shader -OpCapability Int64Atomics OpCapability Int64 +OpCapability Int64Atomics OpExtension "SPV_KHR_storage_buffer_storage_class" %1 = OpExtInstImport "GLSL.std.450" OpMemoryModel Logical GLSL450 diff --git a/naga/tests/out/spv/wgsl-binding-arrays.spvasm b/naga/tests/out/spv/wgsl-binding-arrays.spvasm index c95bef4c567..6e9c02aa346 100644 --- a/naga/tests/out/spv/wgsl-binding-arrays.spvasm +++ b/naga/tests/out/spv/wgsl-binding-arrays.spvasm @@ -82,27 +82,27 @@ OpDecorate %395 NonUniform %2 = OpTypeVoid %3 = OpTypeInt 32 0 %4 = OpTypeStruct %3 -%6 = OpTypeFloat 32 -%5 = OpTypeImage %6 2D 0 0 0 1 Unknown -%7 = OpTypeRuntimeArray %5 +%5 = OpTypeFloat 32 +%6 = OpTypeImage %5 2D 0 0 0 1 Unknown +%7 = OpTypeRuntimeArray %6 %9 = OpConstant %3 5 -%8 = OpTypeArray %5 %9 -%10 = OpTypeImage %6 2D 0 1 0 1 Unknown +%8 = OpTypeArray %6 %9 +%10 = OpTypeImage %5 2D 0 1 0 1 Unknown %11 = OpTypeArray %10 %9 -%12 = OpTypeImage %6 2D 0 0 1 1 Unknown +%12 = OpTypeImage %5 2D 0 0 1 1 Unknown %13 = OpTypeArray %12 %9 -%14 = OpTypeImage %6 2D 1 0 0 1 Unknown +%14 = OpTypeImage %5 2D 1 0 0 1 Unknown %15 = OpTypeArray %14 %9 -%16 = OpTypeImage %6 2D 0 0 0 2 Rgba32f +%16 = OpTypeImage %5 2D 0 0 0 2 Rgba32f %17 = OpTypeArray %16 %9 %18 = OpTypeSampler %19 = OpTypeArray %18 %9 %20 = OpTypeArray %18 %9 %21 = OpTypeStruct %3 -%22 = OpTypeVector %6 4 +%22 = OpTypeVector %5 4 %23 = OpTypeVector %3 2 %26 = OpConstant %3 10 -%25 = OpTypeArray %5 %26 +%25 = OpTypeArray %6 %26 %27 = OpTypePointer UniformConstant %25 %24 = OpVariable %27 UniformConstant %29 = OpTypePointer UniformConstant %8 @@ -130,9 +130,9 @@ OpDecorate %395 NonUniform %54 = OpTypePointer Uniform %4 %55 = OpConstant %3 0 %57 = OpConstantComposite %23 %55 %55 -%58 = OpConstant %6 0 +%58 = OpConstant %5 0 %59 = OpConstantComposite %22 %58 %58 %58 %58 -%60 = OpTypeVector %6 2 +%60 = OpTypeVector %5 2 %61 = OpConstantComposite %60 %58 %58 %62 = OpTypeInt 32 1 %63 = OpConstant %62 0 @@ -140,12 +140,12 @@ OpDecorate %395 NonUniform %65 = OpConstantComposite %64 %63 %63 %67 = OpTypePointer Function %3 %69 = OpTypePointer Function %23 -%71 = OpTypePointer Function %6 +%71 = OpTypePointer Function %5 %73 = OpTypePointer Function %22 %75 = OpTypePointer Uniform %3 -%79 = OpTypePointer UniformConstant %5 +%79 = OpTypePointer UniformConstant %6 %97 = OpTypePointer UniformConstant %18 -%100 = OpTypeSampledImage %5 +%100 = OpTypeSampledImage %6 %121 = OpTypePointer UniformConstant %14 %126 = OpTypeSampledImage %14 %149 = OpTypeBool @@ -170,25 +170,25 @@ OpBranch %74 %77 = OpLoad %3 %76 %78 = OpCompositeExtract %3 %46 0 %80 = OpAccessChain %79 %24 %55 -%81 = OpLoad %5 %80 +%81 = OpLoad %6 %80 %82 = OpImageQuerySizeLod %23 %81 %55 %83 = OpLoad %23 %68 %84 = OpIAdd %23 %83 %82 OpStore %68 %84 %85 = OpAccessChain %79 %24 %77 -%86 = OpLoad %5 %85 +%86 = OpLoad %6 %85 %87 = OpImageQuerySizeLod %23 %86 %55 %88 = OpLoad %23 %68 %89 = OpIAdd %23 %88 %87 OpStore %68 %89 %90 = OpAccessChain %79 %24 %78 -%91 = OpLoad %5 %90 +%91 = OpLoad %6 %90 %92 = OpImageQuerySizeLod %23 %91 %55 %93 = OpLoad %23 %68 %94 = OpIAdd %23 %93 %92 OpStore %68 %94 %95 = OpAccessChain %79 %28 %55 -%96 = OpLoad %5 %95 +%96 = OpLoad %6 %95 %98 = OpAccessChain %97 %38 %55 %99 = OpLoad %18 %98 %101 = OpSampledImage %100 %96 %99 @@ -197,7 +197,7 @@ OpStore %68 %94 %104 = OpFAdd %22 %103 %102 OpStore %72 %104 %105 = OpAccessChain %79 %28 %77 -%106 = OpLoad %5 %105 +%106 = OpLoad %6 %105 %107 = OpAccessChain %97 %38 %77 %108 = OpLoad %18 %107 %109 = OpSampledImage %100 %106 %108 @@ -206,7 +206,7 @@ OpStore %72 %104 %112 = OpFAdd %22 %111 %110 OpStore %72 %112 %113 = OpAccessChain %79 %28 %78 -%114 = OpLoad %5 %113 +%114 = OpLoad %6 %113 %115 = OpAccessChain %97 %38 %78 %116 = OpLoad %18 %115 %117 = OpSampledImage %100 %114 %116 @@ -242,7 +242,7 @@ OpStore %72 %138 %146 = OpFAdd %22 %145 %144 OpStore %72 %146 %147 = OpAccessChain %79 %24 %55 -%148 = OpLoad %5 %147 +%148 = OpLoad %6 %147 %151 = OpImageQueryLevels %62 %148 %152 = OpULessThan %149 %63 %151 OpSelectionMerge %153 None @@ -261,7 +261,7 @@ OpBranch %153 %163 = OpFAdd %22 %162 %161 OpStore %72 %163 %164 = OpAccessChain %79 %24 %77 -%165 = OpLoad %5 %164 +%165 = OpLoad %6 %164 %166 = OpImageQueryLevels %62 %165 %167 = OpULessThan %149 %63 %166 OpSelectionMerge %168 None @@ -280,7 +280,7 @@ OpBranch %168 %177 = OpFAdd %22 %176 %175 OpStore %72 %177 %178 = OpAccessChain %79 %24 %78 -%179 = OpLoad %5 %178 +%179 = OpLoad %6 %178 %180 = OpImageQueryLevels %62 %179 %181 = OpULessThan %149 %63 %180 OpSelectionMerge %182 None @@ -320,19 +320,19 @@ OpStore %66 %205 %211 = OpIAdd %3 %210 %209 OpStore %66 %211 %212 = OpAccessChain %79 %28 %55 -%213 = OpLoad %5 %212 +%213 = OpLoad %6 %212 %214 = OpImageQueryLevels %3 %213 %215 = OpLoad %3 %66 %216 = OpIAdd %3 %215 %214 OpStore %66 %216 %217 = OpAccessChain %79 %28 %77 -%218 = OpLoad %5 %217 +%218 = OpLoad %6 %217 %219 = OpImageQueryLevels %3 %218 %220 = OpLoad %3 %66 %221 = OpIAdd %3 %220 %219 OpStore %66 %221 %222 = OpAccessChain %79 %28 %78 -%223 = OpLoad %5 %222 +%223 = OpLoad %6 %222 %224 = OpImageQueryLevels %3 %223 %225 = OpLoad %3 %66 %226 = OpIAdd %3 %225 %224 @@ -356,7 +356,7 @@ OpStore %66 %237 %242 = OpIAdd %3 %241 %240 OpStore %66 %242 %243 = OpAccessChain %79 %28 %55 -%244 = OpLoad %5 %243 +%244 = OpLoad %6 %243 %245 = OpAccessChain %97 %38 %55 %246 = OpLoad %18 %245 %247 = OpSampledImage %100 %244 %246 @@ -365,7 +365,7 @@ OpStore %66 %242 %250 = OpFAdd %22 %249 %248 OpStore %72 %250 %251 = OpAccessChain %79 %28 %77 -%252 = OpLoad %5 %251 +%252 = OpLoad %6 %251 %253 = OpAccessChain %97 %38 %77 %254 = OpLoad %18 %253 %255 = OpSampledImage %100 %252 %254 @@ -374,7 +374,7 @@ OpStore %72 %250 %258 = OpFAdd %22 %257 %256 OpStore %72 %258 %259 = OpAccessChain %79 %28 %78 -%260 = OpLoad %5 %259 +%260 = OpLoad %6 %259 %261 = OpAccessChain %97 %38 %78 %262 = OpLoad %18 %261 %263 = OpSampledImage %100 %260 %262 @@ -383,7 +383,7 @@ OpStore %72 %258 %266 = OpFAdd %22 %265 %264 OpStore %72 %266 %267 = OpAccessChain %79 %28 %55 -%268 = OpLoad %5 %267 +%268 = OpLoad %6 %267 %269 = OpAccessChain %97 %38 %55 %270 = OpLoad %18 %269 %271 = OpSampledImage %100 %268 %270 @@ -392,7 +392,7 @@ OpStore %72 %266 %274 = OpFAdd %22 %273 %272 OpStore %72 %274 %275 = OpAccessChain %79 %28 %77 -%276 = OpLoad %5 %275 +%276 = OpLoad %6 %275 %277 = OpAccessChain %97 %38 %77 %278 = OpLoad %18 %277 %279 = OpSampledImage %100 %276 %278 @@ -401,7 +401,7 @@ OpStore %72 %274 %282 = OpFAdd %22 %281 %280 OpStore %72 %282 %283 = OpAccessChain %79 %28 %78 -%284 = OpLoad %5 %283 +%284 = OpLoad %6 %283 %285 = OpAccessChain %97 %38 %78 %286 = OpLoad %18 %285 %287 = OpSampledImage %100 %284 %286 @@ -414,57 +414,57 @@ OpStore %72 %290 %293 = OpAccessChain %97 %40 %55 %294 = OpLoad %18 %293 %295 = OpSampledImage %126 %292 %294 -%296 = OpImageSampleDrefImplicitLod %6 %295 %61 %58 -%297 = OpLoad %6 %70 -%298 = OpFAdd %6 %297 %296 +%296 = OpImageSampleDrefImplicitLod %5 %295 %61 %58 +%297 = OpLoad %5 %70 +%298 = OpFAdd %5 %297 %296 OpStore %70 %298 %299 = OpAccessChain %121 %34 %77 %300 = OpLoad %14 %299 %301 = OpAccessChain %97 %40 %77 %302 = OpLoad %18 %301 %303 = OpSampledImage %126 %300 %302 -%304 = OpImageSampleDrefImplicitLod %6 %303 %61 %58 -%305 = OpLoad %6 %70 -%306 = OpFAdd %6 %305 %304 +%304 = OpImageSampleDrefImplicitLod %5 %303 %61 %58 +%305 = OpLoad %5 %70 +%306 = OpFAdd %5 %305 %304 OpStore %70 %306 %307 = OpAccessChain %121 %34 %78 %308 = OpLoad %14 %307 %309 = OpAccessChain %97 %40 %78 %310 = OpLoad %18 %309 %311 = OpSampledImage %126 %308 %310 -%312 = OpImageSampleDrefImplicitLod %6 %311 %61 %58 -%313 = OpLoad %6 %70 -%314 = OpFAdd %6 %313 %312 +%312 = OpImageSampleDrefImplicitLod %5 %311 %61 %58 +%313 = OpLoad %5 %70 +%314 = OpFAdd %5 %313 %312 OpStore %70 %314 %315 = OpAccessChain %121 %34 %55 %316 = OpLoad %14 %315 %317 = OpAccessChain %97 %40 %55 %318 = OpLoad %18 %317 %319 = OpSampledImage %126 %316 %318 -%320 = OpImageSampleDrefExplicitLod %6 %319 %61 %58 Lod %58 -%321 = OpLoad %6 %70 -%322 = OpFAdd %6 %321 %320 +%320 = OpImageSampleDrefExplicitLod %5 %319 %61 %58 Lod %58 +%321 = OpLoad %5 %70 +%322 = OpFAdd %5 %321 %320 OpStore %70 %322 %323 = OpAccessChain %121 %34 %77 %324 = OpLoad %14 %323 %325 = OpAccessChain %97 %40 %77 %326 = OpLoad %18 %325 %327 = OpSampledImage %126 %324 %326 -%328 = OpImageSampleDrefExplicitLod %6 %327 %61 %58 Lod %58 -%329 = OpLoad %6 %70 -%330 = OpFAdd %6 %329 %328 +%328 = OpImageSampleDrefExplicitLod %5 %327 %61 %58 Lod %58 +%329 = OpLoad %5 %70 +%330 = OpFAdd %5 %329 %328 OpStore %70 %330 %331 = OpAccessChain %121 %34 %78 %332 = OpLoad %14 %331 %333 = OpAccessChain %97 %40 %78 %334 = OpLoad %18 %333 %335 = OpSampledImage %126 %332 %334 -%336 = OpImageSampleDrefExplicitLod %6 %335 %61 %58 Lod %58 -%337 = OpLoad %6 %70 -%338 = OpFAdd %6 %337 %336 +%336 = OpImageSampleDrefExplicitLod %5 %335 %61 %58 Lod %58 +%337 = OpLoad %5 %70 +%338 = OpFAdd %5 %337 %336 OpStore %70 %338 %339 = OpAccessChain %79 %28 %55 -%340 = OpLoad %5 %339 +%340 = OpLoad %6 %339 %341 = OpAccessChain %97 %38 %55 %342 = OpLoad %18 %341 %343 = OpSampledImage %100 %340 %342 @@ -473,7 +473,7 @@ OpStore %70 %338 %346 = OpFAdd %22 %345 %344 OpStore %72 %346 %347 = OpAccessChain %79 %28 %77 -%348 = OpLoad %5 %347 +%348 = OpLoad %6 %347 %349 = OpAccessChain %97 %38 %77 %350 = OpLoad %18 %349 %351 = OpSampledImage %100 %348 %350 @@ -482,7 +482,7 @@ OpStore %72 %346 %354 = OpFAdd %22 %353 %352 OpStore %72 %354 %355 = OpAccessChain %79 %28 %78 -%356 = OpLoad %5 %355 +%356 = OpLoad %6 %355 %357 = OpAccessChain %97 %38 %78 %358 = OpLoad %18 %357 %359 = OpSampledImage %100 %356 %358 @@ -491,7 +491,7 @@ OpStore %72 %354 %362 = OpFAdd %22 %361 %360 OpStore %72 %362 %363 = OpAccessChain %79 %28 %55 -%364 = OpLoad %5 %363 +%364 = OpLoad %6 %363 %365 = OpAccessChain %97 %38 %55 %366 = OpLoad %18 %365 %367 = OpSampledImage %100 %364 %366 @@ -500,7 +500,7 @@ OpStore %72 %362 %370 = OpFAdd %22 %369 %368 OpStore %72 %370 %371 = OpAccessChain %79 %28 %77 -%372 = OpLoad %5 %371 +%372 = OpLoad %6 %371 %373 = OpAccessChain %97 %38 %77 %374 = OpLoad %18 %373 %375 = OpSampledImage %100 %372 %374 @@ -509,7 +509,7 @@ OpStore %72 %370 %378 = OpFAdd %22 %377 %376 OpStore %72 %378 %379 = OpAccessChain %79 %28 %78 -%380 = OpLoad %5 %379 +%380 = OpLoad %6 %379 %381 = OpAccessChain %97 %38 %78 %382 = OpLoad %18 %381 %383 = OpSampledImage %100 %380 %382 @@ -535,13 +535,13 @@ OpImageWrite %395 %65 %396 %400 = OpIAdd %23 %397 %399 %401 = OpConvertUToF %60 %400 %402 = OpLoad %22 %72 -%403 = OpCompositeExtract %6 %401 0 -%404 = OpCompositeExtract %6 %401 1 -%405 = OpCompositeExtract %6 %401 0 -%406 = OpCompositeExtract %6 %401 1 +%403 = OpCompositeExtract %5 %401 0 +%404 = OpCompositeExtract %5 %401 1 +%405 = OpCompositeExtract %5 %401 0 +%406 = OpCompositeExtract %5 %401 1 %407 = OpCompositeConstruct %22 %403 %404 %405 %406 %408 = OpFAdd %22 %402 %407 -%409 = OpLoad %6 %70 +%409 = OpLoad %5 %70 %410 = OpCompositeConstruct %22 %409 %409 %409 %409 %411 = OpFAdd %22 %408 %410 OpStore %50 %411 diff --git a/naga/tests/out/spv/wgsl-struct-layout.spvasm b/naga/tests/out/spv/wgsl-struct-layout.spvasm index 31611dad6a9..6b34fd56337 100644 --- a/naga/tests/out/spv/wgsl-struct-layout.spvasm +++ b/naga/tests/out/spv/wgsl-struct-layout.spvasm @@ -52,11 +52,11 @@ OpDecorate %73 Location 1 OpDecorate %75 Location 2 OpDecorate %77 BuiltIn Position %2 = OpTypeVoid -%3 = OpTypeFloat 32 -%4 = OpTypeVector %3 3 -%5 = OpTypeStruct %4 %3 -%6 = OpTypeVector %3 4 -%7 = OpTypeStruct %3 %4 %3 +%4 = OpTypeFloat 32 +%3 = OpTypeVector %4 3 +%5 = OpTypeStruct %3 %4 +%6 = OpTypeVector %4 4 +%7 = OpTypeStruct %4 %3 %4 %9 = OpTypeStruct %5 %10 = OpTypePointer Uniform %9 %8 = OpVariable %10 Uniform @@ -69,14 +69,14 @@ OpDecorate %77 BuiltIn Position %18 = OpTypeStruct %7 %19 = OpTypePointer StorageBuffer %18 %17 = OpVariable %19 StorageBuffer -%23 = OpTypePointer Input %4 +%23 = OpTypePointer Input %3 %22 = OpVariable %23 Input -%26 = OpTypePointer Input %3 +%26 = OpTypePointer Input %4 %25 = OpVariable %26 Input %29 = OpTypePointer Output %6 %28 = OpVariable %29 Output %31 = OpTypeFunction %2 -%32 = OpConstant %3 0 +%32 = OpConstant %4 0 %33 = OpConstantComposite %6 %32 %32 %32 %32 %37 = OpVariable %23 Input %39 = OpVariable %26 Input @@ -101,8 +101,8 @@ OpDecorate %77 BuiltIn Position %88 = OpConstantNull %7 %30 = OpFunction %2 None %31 %20 = OpLabel -%24 = OpLoad %4 %22 -%27 = OpLoad %3 %25 +%24 = OpLoad %3 %22 +%27 = OpLoad %4 %25 %21 = OpCompositeConstruct %5 %24 %27 OpBranch %34 %34 = OpLabel @@ -111,8 +111,8 @@ OpReturn OpFunctionEnd %42 = OpFunction %2 None %31 %35 = OpLabel -%38 = OpLoad %4 %37 -%40 = OpLoad %3 %39 +%38 = OpLoad %3 %37 +%40 = OpLoad %4 %39 %36 = OpCompositeConstruct %5 %38 %40 OpBranch %43 %43 = OpLabel @@ -134,9 +134,9 @@ OpReturn OpFunctionEnd %67 = OpFunction %2 None %31 %58 = OpLabel -%61 = OpLoad %3 %60 -%63 = OpLoad %4 %62 -%65 = OpLoad %3 %64 +%61 = OpLoad %4 %60 +%63 = OpLoad %3 %62 +%65 = OpLoad %4 %64 %59 = OpCompositeConstruct %7 %61 %63 %65 OpBranch %68 %68 = OpLabel @@ -145,9 +145,9 @@ OpReturn OpFunctionEnd %78 = OpFunction %2 None %31 %69 = OpLabel -%72 = OpLoad %3 %71 -%74 = OpLoad %4 %73 -%76 = OpLoad %3 %75 +%72 = OpLoad %4 %71 +%74 = OpLoad %3 %73 +%76 = OpLoad %4 %75 %70 = OpCompositeConstruct %7 %72 %74 %76 OpBranch %79 %79 = OpLabel From 5262323cfa03aae688e4f7eaa6793168949a3481 Mon Sep 17 00:00:00 2001 From: teoxoy <28601907+teoxoy@users.noreply.github.com> Date: Mon, 20 Oct 2025 14:56:13 +0200 Subject: [PATCH 29/33] add `shadowing_predeclared_types` test --- naga/src/front/wgsl/tests.rs | 30 ++++++++++++++++++++++++++++++ 1 file changed, 30 insertions(+) diff --git a/naga/src/front/wgsl/tests.rs b/naga/src/front/wgsl/tests.rs index 5b4cd898d92..9fd9a32326c 100644 --- a/naga/src/front/wgsl/tests.rs +++ b/naga/src/front/wgsl/tests.rs @@ -631,6 +631,36 @@ fn parse_alias() { .unwrap(); } +#[test] +fn shadowing_predeclared_types() { + parse_str( + " + fn test(f32: vec2f) -> vec2f { return f32; } + ", + ) + .unwrap(); + parse_str( + " + fn test(vec2: vec2f) -> vec2f { return vec2; } + ", + ) + .unwrap(); + parse_str( + " + alias vec2f = vec2u; + fn test(v: vec2f) -> vec2u { return v; } + ", + ) + .unwrap(); + parse_str( + " + struct vec2f { inner: vec2 }; + fn test(v: vec2f) -> vec2 { return v.inner; } + ", + ) + .unwrap(); +} + #[test] fn parse_texture_load_store_expecting_four_args() { for (func, texture) in [ From 4c87309756bbf3373bb93daf5ed2b915a11ab008 Mon Sep 17 00:00:00 2001 From: teoxoy <28601907+teoxoy@users.noreply.github.com> Date: Mon, 20 Oct 2025 15:17:14 +0200 Subject: [PATCH 30/33] add `test_template_list` test --- naga/src/front/wgsl/parse/lexer.rs | 102 +++++++++++++++++++++++++++++ 1 file changed, 102 insertions(+) diff --git a/naga/src/front/wgsl/parse/lexer.rs b/naga/src/front/wgsl/parse/lexer.rs index 52654392ca6..6736d505b66 100644 --- a/naga/src/front/wgsl/parse/lexer.rs +++ b/naga/src/front/wgsl/parse/lexer.rs @@ -894,6 +894,108 @@ fn test_variable_decl() { ); } +#[test] +fn test_template_list() { + sub_test( + "AD", + &[ + Token::Word("A"), + Token::Paren('<'), + Token::Word("B"), + Token::LogicalOperation('|'), + Token::Word("C"), + Token::Paren('>'), + Token::Word("D"), + ], + ); + sub_test( + "A(B(E))", + &[ + Token::Word("A"), + Token::Paren('('), + Token::Word("B"), + Token::TemplateArgsStart, + Token::Word("C"), + Token::Separator(','), + Token::Word("D"), + Token::TemplateArgsEnd, + Token::Paren('('), + Token::Word("E"), + Token::Paren(')'), + Token::Paren(')'), + ], + ); + sub_test( + "arrayB)>", + &[ + Token::Word("array"), + Token::TemplateArgsStart, + Token::Word("i32"), + Token::Separator(','), + Token::Word("select"), + Token::Paren('('), + Token::Number(Ok(Number::AbstractInt(2))), + Token::Separator(','), + Token::Number(Ok(Number::AbstractInt(3))), + Token::Separator(','), + Token::Word("A"), + Token::Paren('>'), + Token::Word("B"), + Token::Paren(')'), + Token::TemplateArgsEnd, + ], + ); + sub_test( + "A[BD", + &[ + Token::Word("A"), + Token::Paren('['), + Token::Word("B"), + Token::Paren('<'), + Token::Word("C"), + Token::Paren(']'), + Token::Paren('>'), + Token::Word("D"), + ], + ); + sub_test( + "A", + &[ + Token::Word("A"), + Token::TemplateArgsStart, + Token::Word("B"), + Token::ShiftOperation('<'), + Token::Word("C"), + Token::TemplateArgsEnd, + ], + ); + sub_test( + "A<(B>=C)>", + &[ + Token::Word("A"), + Token::TemplateArgsStart, + Token::Paren('('), + Token::Word("B"), + Token::LogicalOperation('>'), + Token::Word("C"), + Token::Paren(')'), + Token::TemplateArgsEnd, + ], + ); + sub_test( + "A=C>", + &[ + Token::Word("A"), + Token::TemplateArgsStart, + Token::Word("B"), + Token::TemplateArgsEnd, + Token::Operation('='), + Token::Word("C"), + Token::Paren('>'), + ], + ); +} + #[test] fn test_comments() { sub_test("// Single comment", &[]); From 7fe5aa33eb5ced9022405a61703f296c344902e6 Mon Sep 17 00:00:00 2001 From: teoxoy <28601907+teoxoy@users.noreply.github.com> Date: Mon, 20 Oct 2025 16:04:54 +0200 Subject: [PATCH 31/33] document `consume_tokens` --- naga/src/front/wgsl/parse/lexer.rs | 30 +++++++++++++++++------------- 1 file changed, 17 insertions(+), 13 deletions(-) diff --git a/naga/src/front/wgsl/parse/lexer.rs b/naga/src/front/wgsl/parse/lexer.rs index 6736d505b66..b1d98a4ea58 100644 --- a/naga/src/front/wgsl/parse/lexer.rs +++ b/naga/src/front/wgsl/parse/lexer.rs @@ -41,6 +41,21 @@ struct UnclosedCandidate { depth: usize, } +/// Implements the [Template list discovery algorithm] but does so lazily and +/// after tokenization. +/// +/// It starts tokenizing `input` in a loop and stops once the first potential +/// template list encontered has been disambiguated; populating the `tokens` +/// buffer in the process. +/// +/// Parameters +/// +/// - `tokens` is expected to be an empty buffer of tokens that this function populates. +/// - `source` is the whole original source code. +/// - `input` is the remaining unconsumed source code. +/// - `ignore_doc_comments` determines if doc comments are treated as [`Token::Trivia`]. +/// +/// [Template list discovery algorithm]: https://www.w3.org/TR/WGSL/#template-list-discovery fn consume_tokens<'a>( tokens: &mut Vec<(TokenSpan<'a>, &'a str)>, source: &'a str, @@ -124,21 +139,10 @@ fn consume_tokens<'a>( /// Return the token at the start of `input`. /// -/// If `generic` is `false`, then the bit shift operators `>>` or `<<` -/// are valid lookahead tokens for the current parser state (see [§3.1 -/// Parsing] in the WGSL specification). In other words: -/// -/// - If `generic` is `true`, then we are expecting an angle bracket -/// around a generic type parameter, like the `<` and `>` in -/// `vec3`, so interpret `<` and `>` as `Token::Paren` tokens, -/// even if they're part of `<<` or `>>` sequences. -/// -/// - Otherwise, interpret `<<` and `>>` as shift operators: -/// `Token::LogicalOperation` tokens. +/// If `waiting_for_template_end` is `true` and the current token is `>`, then +/// [`Token::TemplateArgsEnd`] is returned instead of `>`, `>>`, `>=` or `>>=`. /// /// If `ignore_doc_comments` is true, doc comments are treated as [`Token::Trivia`]. -/// -/// [§3.1 Parsing]: https://gpuweb.github.io/gpuweb/wgsl/#parsing fn consume_token( input: &str, waiting_for_template_end: bool, From 92e95ff91a29dffcadcb2b52dd5b37da0d375f72 Mon Sep 17 00:00:00 2001 From: teoxoy <28601907+teoxoy@users.noreply.github.com> Date: Tue, 21 Oct 2025 16:06:25 +0200 Subject: [PATCH 32/33] adjust GLSL snapshots --- naga/tests/in/glsl/images.frag | 9 + naga/tests/in/glsl/samplers.frag | 20 +- naga/tests/out/wgsl/glsl-images.frag.wgsl | 98 ++-- naga/tests/out/wgsl/glsl-samplers.frag.wgsl | 471 +++++++++----------- 4 files changed, 269 insertions(+), 329 deletions(-) diff --git a/naga/tests/in/glsl/images.frag b/naga/tests/in/glsl/images.frag index d6c93b1793f..fc9e7805fd6 100644 --- a/naga/tests/in/glsl/images.frag +++ b/naga/tests/in/glsl/images.frag @@ -1,10 +1,15 @@ #version 460 core +// WGSL doesn't have 1D array textures. +#define HAS_1D_ARRAY_TEXTURES 0 + layout(rgba8, binding = 0) uniform image1D img1D; layout(rgba8, binding = 1) uniform image2D img2D; layout(rgba8, binding = 2) uniform image3D img3D; // layout(rgba8, binding = 3) uniform imageCube imgCube; +#if HAS_1D_ARRAY_TEXTURES layout(rgba8, binding = 4) uniform image1DArray img1DArray; +#endif layout(rgba8, binding = 5) uniform image2DArray img2DArray; // layout(rgba8, binding = 6) uniform imageCubeArray imgCubeArray; @@ -18,11 +23,13 @@ void testImg1D(in int coord) { vec4 c = imageLoad(img1D, coord); } +#if HAS_1D_ARRAY_TEXTURES void testImg1DArray(in ivec2 coord) { vec2 size = imageSize(img1DArray); vec4 c = imageLoad(img1DArray, coord); imageStore(img1DArray, coord, vec4(2)); } +#endif void testImg2D(in ivec2 coord) { vec2 size = imageSize(img2D); @@ -72,7 +79,9 @@ void testImgWriteReadOnly(in ivec2 coord) { void main() { testImg1D(1); +#if HAS_1D_ARRAY_TEXTURES testImg1DArray(ivec2(0)); +#endif testImg2D(ivec2(0)); testImg2DArray(ivec3(0)); testImg3D(ivec3(0)); diff --git a/naga/tests/in/glsl/samplers.frag b/naga/tests/in/glsl/samplers.frag index 9c1bbb7a9ef..dcf8bff73fe 100644 --- a/naga/tests/in/glsl/samplers.frag +++ b/naga/tests/in/glsl/samplers.frag @@ -1,8 +1,15 @@ #version 440 core precision mediump float; +// WGSL doesn't have 1D depth, 1D array or 2D multisampled array textures. +#define HAS_1D_DEPTH_TEXTURES 0 +#define HAS_1D_ARRAY_TEXTURES 0 +#define HAS_2D_MS_ARRAY_TEXTURES 0 + layout(set = 1, binding = 0) uniform texture1D tex1D; +#if HAS_1D_ARRAY_TEXTURES layout(set = 1, binding = 1) uniform texture1DArray tex1DArray; +#endif layout(set = 1, binding = 2) uniform texture2D tex2D; layout(set = 1, binding = 3) uniform texture2DArray tex2DArray; layout(set = 1, binding = 4) uniform textureCube texCube; @@ -14,9 +21,6 @@ layout(set = 1, binding = 8) uniform itexture2D itex2D; layout(set = 2, binding = 0) uniform sampler samp; -// WGSL doesn't have 1D depth samplers. -#define HAS_1D_DEPTH_TEXTURES 0 - #if HAS_1D_DEPTH_TEXTURES layout(set = 1, binding = 10) uniform texture1D tex1DShadow; layout(set = 1, binding = 11) uniform texture1DArray tex1DArrayShadow; @@ -30,7 +34,9 @@ layout(set = 1, binding = 16) uniform texture3D tex3DShadow; layout(set = 1, binding = 17) uniform samplerShadow sampShadow; layout(binding = 18) uniform texture2DMS tex2DMS; +#if HAS_2D_MS_ARRAY_TEXTURES layout(binding = 19) uniform texture2DMSArray tex2DMSArray; +#endif // Conventions for readability: // 1.0 = Shadow Ref @@ -97,6 +103,7 @@ void testTex1DShadow(float coord) { } #endif +#if HAS_1D_ARRAY_TEXTURES void testTex1DArray(in vec2 coord) { ivec2 size1DArray = textureSize(sampler1DArray(tex1DArray, samp), 0); int levels = textureQueryLevels(sampler1DArray(tex1DArray, samp)); @@ -112,6 +119,7 @@ void testTex1DArray(in vec2 coord) { c = texelFetch(sampler1DArray(tex1DArray, samp), ivec2(coord), 3); c = texelFetchOffset(sampler1DArray(tex1DArray, samp), ivec2(coord), 3, 5); } +#endif #if HAS_1D_DEPTH_TEXTURES void testTex1DArrayShadow(in vec2 coord) { @@ -290,18 +298,22 @@ void testTex2DMS(in vec2 coord) { c = texelFetch(sampler2DMS(tex2DMS, samp), ivec2(coord), 3); } +#if HAS_2D_MS_ARRAY_TEXTURES void testTex2DMSArray(in vec3 coord) { ivec3 size2DMSArray = textureSize(sampler2DMSArray(tex2DMSArray, samp)); vec4 c; c = texelFetch(sampler2DMSArray(tex2DMSArray, samp), ivec3(coord), 3); } +#endif void main() { testTex1D(1.0); #if HAS_1D_DEPTH_TEXTURES testTex1DShadow(2.0); #endif +#if HAS_1D_ARRAY_TEXTURES testTex1DArray(vec2(3.0)); +#endif #if HAS_1D_DEPTH_TEXTURES testTex1DArrayShadow(vec2(4.0)); #endif @@ -315,5 +327,7 @@ void main() { testTexCubeArrayShadow(vec4(1.0)); testTex3D(vec3(1.0)); testTex2DMS(vec2(1.0)); +#if HAS_2D_MS_ARRAY_TEXTURES testTex2DMSArray(vec3(1.0)); +#endif } diff --git a/naga/tests/out/wgsl/glsl-images.frag.wgsl b/naga/tests/out/wgsl/glsl-images.frag.wgsl index 25986557edc..7bed73016c3 100644 --- a/naga/tests/out/wgsl/glsl-images.frag.wgsl +++ b/naga/tests/out/wgsl/glsl-images.frag.wgsl @@ -4,8 +4,6 @@ var img1D: texture_storage_1d; var img2D: texture_storage_2d; @group(0) @binding(2) var img3D: texture_storage_3d; -@group(0) @binding(4) -var img1DArray: texture_storage_1d_array; @group(0) @binding(5) var img2DArray: texture_storage_2d_array; @group(0) @binding(7) @@ -31,111 +29,93 @@ fn testImg1D(coord: i32) { return; } -fn testImg1DArray(coord_2: vec2) { +fn testImg2D(coord_2: vec2) { var coord_3: vec2; var size_1: vec2; var c_1: vec4; coord_3 = coord_2; - let _e3 = textureDimensions(img1DArray); - let _e4 = textureNumLayers(img1DArray); - size_1 = vec2(vec2(vec2(_e3, _e4))); - let _e9 = coord_3; - let _e12 = textureLoad(img1DArray, _e9.x, _e9.y); - c_1 = _e12; - let _e14 = coord_3; - textureStore(img1DArray, _e14.x, _e14.y, vec4(2f)); + let _e3 = textureDimensions(img2D); + size_1 = vec2(vec2(_e3)); + let _e7 = coord_3; + let _e8 = textureLoad(img2D, _e7); + c_1 = _e8; + let _e10 = coord_3; + textureStore(img2D, _e10, vec4(2f)); return; } -fn testImg2D(coord_4: vec2) { - var coord_5: vec2; - var size_2: vec2; +fn testImg2DArray(coord_4: vec3) { + var coord_5: vec3; + var size_2: vec3; var c_2: vec4; coord_5 = coord_4; - let _e3 = textureDimensions(img2D); - size_2 = vec2(vec2(_e3)); - let _e7 = coord_5; - let _e8 = textureLoad(img2D, _e7); - c_2 = _e8; - let _e10 = coord_5; - textureStore(img2D, _e10, vec4(2f)); + let _e3 = textureDimensions(img2DArray); + let _e6 = textureNumLayers(img2DArray); + size_2 = vec3(vec3(vec3(_e3.x, _e3.y, _e6))); + let _e11 = coord_5; + let _e14 = textureLoad(img2DArray, _e11.xy, _e11.z); + c_2 = _e14; + let _e16 = coord_5; + textureStore(img2DArray, _e16.xy, _e16.z, vec4(2f)); return; } -fn testImg2DArray(coord_6: vec3) { +fn testImg3D(coord_6: vec3) { var coord_7: vec3; var size_3: vec3; var c_3: vec4; coord_7 = coord_6; - let _e3 = textureDimensions(img2DArray); - let _e6 = textureNumLayers(img2DArray); - size_3 = vec3(vec3(vec3(_e3.x, _e3.y, _e6))); - let _e11 = coord_7; - let _e14 = textureLoad(img2DArray, _e11.xy, _e11.z); - c_3 = _e14; - let _e16 = coord_7; - textureStore(img2DArray, _e16.xy, _e16.z, vec4(2f)); + let _e3 = textureDimensions(img3D); + size_3 = vec3(vec3(_e3)); + let _e7 = coord_7; + let _e8 = textureLoad(img3D, _e7); + c_3 = _e8; + let _e10 = coord_7; + textureStore(img3D, _e10, vec4(2f)); return; } -fn testImg3D(coord_8: vec3) { - var coord_9: vec3; - var size_4: vec3; +fn testImgReadOnly(coord_8: vec2) { + var coord_9: vec2; + var size_4: vec2; var c_4: vec4; coord_9 = coord_8; - let _e3 = textureDimensions(img3D); - size_4 = vec3(vec3(_e3)); - let _e7 = coord_9; - let _e8 = textureLoad(img3D, _e7); - c_4 = _e8; - let _e10 = coord_9; - textureStore(img3D, _e10, vec4(2f)); + let _e4 = textureDimensions(img2D); + size_4 = vec2(vec2(_e4)); + let _e8 = coord_9; + let _e9 = textureLoad(imgReadOnly, _e8); + c_4 = _e9; return; } -fn testImgReadOnly(coord_10: vec2) { +fn testImgWriteOnly(coord_10: vec2) { var coord_11: vec2; var size_5: vec2; - var c_5: vec4; coord_11 = coord_10; let _e4 = textureDimensions(img2D); size_5 = vec2(vec2(_e4)); let _e8 = coord_11; - let _e9 = textureLoad(imgReadOnly, _e8); - c_5 = _e9; + textureStore(imgWriteOnly, _e8, vec4(2f)); return; } -fn testImgWriteOnly(coord_12: vec2) { +fn testImgWriteReadOnly(coord_12: vec2) { var coord_13: vec2; var size_6: vec2; coord_13 = coord_12; - let _e4 = textureDimensions(img2D); - size_6 = vec2(vec2(_e4)); - let _e8 = coord_13; - textureStore(imgWriteOnly, _e8, vec4(2f)); - return; -} - -fn testImgWriteReadOnly(coord_14: vec2) { - var coord_15: vec2; - var size_7: vec2; - - coord_15 = coord_14; let _e3 = textureDimensions(imgWriteReadOnly); - size_7 = vec2(vec2(_e3)); + size_6 = vec2(vec2(_e3)); return; } fn main_1() { testImg1D(1i); - testImg1DArray(vec2(0i)); testImg2D(vec2(0i)); testImg2DArray(vec3(0i)); testImg3D(vec3(0i)); diff --git a/naga/tests/out/wgsl/glsl-samplers.frag.wgsl b/naga/tests/out/wgsl/glsl-samplers.frag.wgsl index 9ae246b03ef..adc405a557e 100644 --- a/naga/tests/out/wgsl/glsl-samplers.frag.wgsl +++ b/naga/tests/out/wgsl/glsl-samplers.frag.wgsl @@ -1,7 +1,5 @@ @group(1) @binding(0) var tex1D: texture_1d; -@group(1) @binding(1) -var tex1DArray: texture_1d_array; @group(1) @binding(2) var tex2D: texture_2d; @group(1) @binding(3) @@ -30,8 +28,6 @@ var texCubeArrayShadow: texture_depth_cube_array; var sampShadow: sampler_comparison; @group(0) @binding(18) var tex2DMS: texture_multisampled_2d; -@group(0) @binding(19) -var tex2DMSArray: texture_multisampled_2d_array; fn testTex1D(coord: f32) { var coord_1: f32; @@ -119,228 +115,187 @@ fn testTex1D(coord: f32) { return; } -fn testTex1DArray(coord_2: vec2) { +fn testTex2D(coord_2: vec2) { var coord_3: vec2; - var size1DArray: vec2; + var size2D: vec2; var levels_1: i32; var c_1: vec4; coord_3 = coord_2; - let _e5 = textureDimensions(tex1DArray, 0i); - let _e6 = textureNumLayers(tex1DArray); - size1DArray = vec2(vec2(_e5, _e6)); - let _e10 = textureNumLevels(tex1DArray); - levels_1 = i32(_e10); - let _e14 = coord_3; - let _e18 = textureSample(tex1DArray, samp, _e14.x, i32(_e14.y)); - c_1 = _e18; - let _e19 = coord_3; - let _e25 = textureSampleGrad(tex1DArray, samp, _e19.x, i32(_e19.y), 4f, 4f); - c_1 = _e25; - let _e26 = coord_3; - let _e33 = textureSampleGrad(tex1DArray, samp, _e26.x, i32(_e26.y), 4f, 4f, 5i); - c_1 = _e33; - let _e34 = coord_3; - let _e39 = textureSampleLevel(tex1DArray, samp, _e34.x, i32(_e34.y), 3f); - c_1 = _e39; - let _e40 = coord_3; - let _e46 = textureSampleLevel(tex1DArray, samp, _e40.x, i32(_e40.y), 3f, 5i); - c_1 = _e46; - let _e47 = coord_3; - let _e52 = textureSample(tex1DArray, samp, _e47.x, i32(_e47.y), 5i); - c_1 = _e52; - let _e53 = coord_3; - let _e54 = vec2(_e53); - let _e58 = textureLoad(tex1DArray, _e54.x, _e54.y, 3i); - c_1 = _e58; - let _e59 = coord_3; - let _e60 = vec2(_e59); - let _e64 = textureLoad(tex1DArray, _e60.x, _e60.y, 3i); - c_1 = _e64; - return; -} - -fn testTex2D(coord_4: vec2) { - var coord_5: vec2; - var size2D: vec2; - var levels_2: i32; - var c_2: vec4; - - coord_5 = coord_4; let _e7 = textureDimensions(tex2D, 0i); size2D = vec2(_e7); let _e10 = textureNumLevels(tex2D); - levels_2 = i32(_e10); - let _e14 = coord_5; + levels_1 = i32(_e10); + let _e14 = coord_3; let _e15 = textureSample(tex2D, samp, _e14); - c_2 = _e15; - let _e16 = coord_5; + c_1 = _e15; + let _e16 = coord_3; let _e18 = textureSampleBias(tex2D, samp, _e16, 2f); - c_2 = _e18; - let _e19 = coord_5; + c_1 = _e18; + let _e19 = coord_3; let _e24 = textureSampleGrad(tex2D, samp, _e19, vec2(4f), vec2(4f)); - c_2 = _e24; - let _e25 = coord_5; + c_1 = _e24; + let _e25 = coord_3; let _e32 = textureSampleGrad(tex2D, samp, _e25, vec2(4f), vec2(4f), vec2(5i)); - c_2 = _e32; - let _e33 = coord_5; + c_1 = _e32; + let _e33 = coord_3; let _e35 = textureSampleLevel(tex2D, samp, _e33, 3f); - c_2 = _e35; - let _e36 = coord_5; + c_1 = _e35; + let _e36 = coord_3; let _e40 = textureSampleLevel(tex2D, samp, _e36, 3f, vec2(5i)); - c_2 = _e40; - let _e41 = coord_5; + c_1 = _e40; + let _e41 = coord_3; let _e44 = textureSample(tex2D, samp, _e41, vec2(5i)); - c_2 = _e44; - let _e45 = coord_5; + c_1 = _e44; + let _e45 = coord_3; let _e49 = textureSampleBias(tex2D, samp, _e45, 2f, vec2(5i)); - c_2 = _e49; - let _e50 = coord_5; + c_1 = _e49; + let _e50 = coord_3; let _e54 = vec3(_e50.x, _e50.y, 6f); let _e59 = textureSample(tex2D, samp, (_e54.xy / vec2(_e54.z))); - c_2 = _e59; - let _e60 = coord_5; + c_1 = _e59; + let _e60 = coord_3; let _e65 = vec4(_e60.x, _e60.y, 0f, 6f); let _e71 = textureSample(tex2D, samp, (_e65.xyz / vec3(_e65.w)).xy); - c_2 = _e71; - let _e72 = coord_5; + c_1 = _e71; + let _e72 = coord_3; let _e76 = vec3(_e72.x, _e72.y, 6f); let _e82 = textureSampleBias(tex2D, samp, (_e76.xy / vec2(_e76.z)), 2f); - c_2 = _e82; - let _e83 = coord_5; + c_1 = _e82; + let _e83 = coord_3; let _e88 = vec4(_e83.x, _e83.y, 0f, 6f); let _e95 = textureSampleBias(tex2D, samp, (_e88.xyz / vec3(_e88.w)).xy, 2f); - c_2 = _e95; - let _e96 = coord_5; + c_1 = _e95; + let _e96 = coord_3; let _e100 = vec3(_e96.x, _e96.y, 6f); let _e109 = textureSampleGrad(tex2D, samp, (_e100.xy / vec2(_e100.z)), vec2(4f), vec2(4f)); - c_2 = _e109; - let _e110 = coord_5; + c_1 = _e109; + let _e110 = coord_3; let _e115 = vec4(_e110.x, _e110.y, 0f, 6f); let _e125 = textureSampleGrad(tex2D, samp, (_e115.xyz / vec3(_e115.w)).xy, vec2(4f), vec2(4f)); - c_2 = _e125; - let _e126 = coord_5; + c_1 = _e125; + let _e126 = coord_3; let _e130 = vec3(_e126.x, _e126.y, 6f); let _e141 = textureSampleGrad(tex2D, samp, (_e130.xy / vec2(_e130.z)), vec2(4f), vec2(4f), vec2(5i)); - c_2 = _e141; - let _e142 = coord_5; + c_1 = _e141; + let _e142 = coord_3; let _e147 = vec4(_e142.x, _e142.y, 0f, 6f); let _e159 = textureSampleGrad(tex2D, samp, (_e147.xyz / vec3(_e147.w)).xy, vec2(4f), vec2(4f), vec2(5i)); - c_2 = _e159; - let _e160 = coord_5; + c_1 = _e159; + let _e160 = coord_3; let _e164 = vec3(_e160.x, _e160.y, 6f); let _e170 = textureSampleLevel(tex2D, samp, (_e164.xy / vec2(_e164.z)), 3f); - c_2 = _e170; - let _e171 = coord_5; + c_1 = _e170; + let _e171 = coord_3; let _e176 = vec4(_e171.x, _e171.y, 0f, 6f); let _e183 = textureSampleLevel(tex2D, samp, (_e176.xyz / vec3(_e176.w)).xy, 3f); - c_2 = _e183; - let _e184 = coord_5; + c_1 = _e183; + let _e184 = coord_3; let _e188 = vec3(_e184.x, _e184.y, 6f); let _e196 = textureSampleLevel(tex2D, samp, (_e188.xy / vec2(_e188.z)), 3f, vec2(5i)); - c_2 = _e196; - let _e197 = coord_5; + c_1 = _e196; + let _e197 = coord_3; let _e202 = vec4(_e197.x, _e197.y, 0f, 6f); let _e211 = textureSampleLevel(tex2D, samp, (_e202.xyz / vec3(_e202.w)).xy, 3f, vec2(5i)); - c_2 = _e211; - let _e212 = coord_5; + c_1 = _e211; + let _e212 = coord_3; let _e216 = vec3(_e212.x, _e212.y, 6f); let _e223 = textureSample(tex2D, samp, (_e216.xy / vec2(_e216.z)), vec2(5i)); - c_2 = _e223; - let _e224 = coord_5; + c_1 = _e223; + let _e224 = coord_3; let _e229 = vec4(_e224.x, _e224.y, 0f, 6f); let _e237 = textureSample(tex2D, samp, (_e229.xyz / vec3(_e229.w)).xy, vec2(5i)); - c_2 = _e237; - let _e238 = coord_5; + c_1 = _e237; + let _e238 = coord_3; let _e242 = vec3(_e238.x, _e238.y, 6f); let _e250 = textureSampleBias(tex2D, samp, (_e242.xy / vec2(_e242.z)), 2f, vec2(5i)); - c_2 = _e250; - let _e251 = coord_5; + c_1 = _e250; + let _e251 = coord_3; let _e256 = vec4(_e251.x, _e251.y, 0f, 6f); let _e265 = textureSampleBias(tex2D, samp, (_e256.xyz / vec3(_e256.w)).xy, 2f, vec2(5i)); - c_2 = _e265; - let _e266 = coord_5; + c_1 = _e265; + let _e266 = coord_3; let _e269 = textureLoad(tex2D, vec2(_e266), 3i); - c_2 = _e269; - let _e270 = coord_5; + c_1 = _e269; + let _e270 = coord_3; let _e273 = textureLoad(utex2D, vec2(_e270), 3i); - c_2 = vec4(_e273); - let _e275 = coord_5; + c_1 = vec4(_e273); + let _e275 = coord_3; let _e278 = textureLoad(itex2D, vec2(_e275), 3i); - c_2 = vec4(_e278); - let _e280 = coord_5; + c_1 = vec4(_e278); + let _e280 = coord_3; let _e283 = textureLoad(tex2D, vec2(_e280), 3i); - c_2 = _e283; - let _e284 = coord_5; + c_1 = _e283; + let _e284 = coord_3; let _e287 = textureLoad(utex2D, vec2(_e284), 3i); - c_2 = vec4(_e287); - let _e289 = coord_5; + c_1 = vec4(_e287); + let _e289 = coord_3; let _e292 = textureLoad(itex2D, vec2(_e289), 3i); - c_2 = vec4(_e292); + c_1 = vec4(_e292); return; } -fn testTex2DShadow(coord_6: vec2) { - var coord_7: vec2; +fn testTex2DShadow(coord_4: vec2) { + var coord_5: vec2; var size2DShadow: vec2; - var levels_3: i32; + var levels_2: i32; var d: f32; - coord_7 = coord_6; + coord_5 = coord_4; let _e5 = textureDimensions(tex2DShadow, 0i); size2DShadow = vec2(_e5); let _e8 = textureNumLevels(tex2DShadow); - levels_3 = i32(_e8); - let _e12 = coord_7; + levels_2 = i32(_e8); + let _e12 = coord_5; let _e16 = vec3(_e12.x, _e12.y, 1f); let _e19 = textureSampleCompare(tex2DShadow, sampShadow, _e16.xy, _e16.z); d = _e19; - let _e20 = coord_7; + let _e20 = coord_5; let _e24 = vec3(_e20.x, _e20.y, 1f); let _e27 = textureSampleCompareLevel(tex2DShadow, sampShadow, _e24.xy, _e24.z); d = _e27; - let _e28 = coord_7; + let _e28 = coord_5; let _e32 = vec3(_e28.x, _e28.y, 1f); let _e37 = textureSampleCompareLevel(tex2DShadow, sampShadow, _e32.xy, _e32.z, vec2(5i)); d = _e37; - let _e38 = coord_7; + let _e38 = coord_5; let _e42 = vec3(_e38.x, _e38.y, 1f); let _e45 = textureSampleCompareLevel(tex2DShadow, sampShadow, _e42.xy, _e42.z); d = _e45; - let _e46 = coord_7; + let _e46 = coord_5; let _e50 = vec3(_e46.x, _e46.y, 1f); let _e55 = textureSampleCompareLevel(tex2DShadow, sampShadow, _e50.xy, _e50.z, vec2(5i)); d = _e55; - let _e56 = coord_7; + let _e56 = coord_5; let _e60 = vec3(_e56.x, _e56.y, 1f); let _e65 = textureSampleCompare(tex2DShadow, sampShadow, _e60.xy, _e60.z, vec2(5i)); d = _e65; - let _e66 = coord_7; + let _e66 = coord_5; let _e71 = vec4(_e66.x, _e66.y, 1f, 6f); let _e75 = (_e71.xyz / vec3(_e71.w)); let _e78 = textureSampleCompare(tex2DShadow, sampShadow, _e75.xy, _e75.z); d = _e78; - let _e79 = coord_7; + let _e79 = coord_5; let _e84 = vec4(_e79.x, _e79.y, 1f, 6f); let _e88 = (_e84.xyz / vec3(_e84.w)); let _e91 = textureSampleCompareLevel(tex2DShadow, sampShadow, _e88.xy, _e88.z); d = _e91; - let _e92 = coord_7; + let _e92 = coord_5; let _e97 = vec4(_e92.x, _e92.y, 1f, 6f); let _e103 = (_e97.xyz / vec3(_e97.w)); let _e106 = textureSampleCompareLevel(tex2DShadow, sampShadow, _e103.xy, _e103.z, vec2(5i)); d = _e106; - let _e107 = coord_7; + let _e107 = coord_5; let _e112 = vec4(_e107.x, _e107.y, 1f, 6f); let _e116 = (_e112.xyz / vec3(_e112.w)); let _e119 = textureSampleCompareLevel(tex2DShadow, sampShadow, _e116.xy, _e116.z); d = _e119; - let _e120 = coord_7; + let _e120 = coord_5; let _e125 = vec4(_e120.x, _e120.y, 1f, 6f); let _e131 = (_e125.xyz / vec3(_e125.w)); let _e134 = textureSampleCompareLevel(tex2DShadow, sampShadow, _e131.xy, _e131.z, vec2(5i)); d = _e134; - let _e135 = coord_7; + let _e135 = coord_5; let _e140 = vec4(_e135.x, _e135.y, 1f, 6f); let _e146 = (_e140.xyz / vec3(_e140.w)); let _e149 = textureSampleCompare(tex2DShadow, sampShadow, _e146.xy, _e146.z, vec2(5i)); @@ -348,286 +303,269 @@ fn testTex2DShadow(coord_6: vec2) { return; } -fn testTex2DArray(coord_8: vec3) { - var coord_9: vec3; +fn testTex2DArray(coord_6: vec3) { + var coord_7: vec3; var size2DArray: vec3; - var levels_4: i32; - var c_3: vec4; + var levels_3: i32; + var c_2: vec4; - coord_9 = coord_8; + coord_7 = coord_6; let _e5 = textureDimensions(tex2DArray, 0i); let _e8 = textureNumLayers(tex2DArray); size2DArray = vec3(vec3(_e5.x, _e5.y, _e8)); let _e12 = textureNumLevels(tex2DArray); - levels_4 = i32(_e12); - let _e16 = coord_9; + levels_3 = i32(_e12); + let _e16 = coord_7; let _e20 = textureSample(tex2DArray, samp, _e16.xy, i32(_e16.z)); - c_3 = _e20; - let _e21 = coord_9; + c_2 = _e20; + let _e21 = coord_7; let _e26 = textureSampleBias(tex2DArray, samp, _e21.xy, i32(_e21.z), 2f); - c_3 = _e26; - let _e27 = coord_9; + c_2 = _e26; + let _e27 = coord_7; let _e35 = textureSampleGrad(tex2DArray, samp, _e27.xy, i32(_e27.z), vec2(4f), vec2(4f)); - c_3 = _e35; - let _e36 = coord_9; + c_2 = _e35; + let _e36 = coord_7; let _e46 = textureSampleGrad(tex2DArray, samp, _e36.xy, i32(_e36.z), vec2(4f), vec2(4f), vec2(5i)); - c_3 = _e46; - let _e47 = coord_9; + c_2 = _e46; + let _e47 = coord_7; let _e52 = textureSampleLevel(tex2DArray, samp, _e47.xy, i32(_e47.z), 3f); - c_3 = _e52; - let _e53 = coord_9; + c_2 = _e52; + let _e53 = coord_7; let _e60 = textureSampleLevel(tex2DArray, samp, _e53.xy, i32(_e53.z), 3f, vec2(5i)); - c_3 = _e60; - let _e61 = coord_9; + c_2 = _e60; + let _e61 = coord_7; let _e67 = textureSample(tex2DArray, samp, _e61.xy, i32(_e61.z), vec2(5i)); - c_3 = _e67; - let _e68 = coord_9; + c_2 = _e67; + let _e68 = coord_7; let _e75 = textureSampleBias(tex2DArray, samp, _e68.xy, i32(_e68.z), 2f, vec2(5i)); - c_3 = _e75; - let _e76 = coord_9; + c_2 = _e75; + let _e76 = coord_7; let _e77 = vec3(_e76); let _e81 = textureLoad(tex2DArray, _e77.xy, _e77.z, 3i); - c_3 = _e81; - let _e82 = coord_9; + c_2 = _e81; + let _e82 = coord_7; let _e83 = vec3(_e82); let _e87 = textureLoad(tex2DArray, _e83.xy, _e83.z, 3i); - c_3 = _e87; + c_2 = _e87; return; } -fn testTex2DArrayShadow(coord_10: vec3) { - var coord_11: vec3; +fn testTex2DArrayShadow(coord_8: vec3) { + var coord_9: vec3; var size2DArrayShadow: vec3; - var levels_5: i32; + var levels_4: i32; var d_1: f32; - coord_11 = coord_10; + coord_9 = coord_8; let _e5 = textureDimensions(tex2DArrayShadow, 0i); let _e8 = textureNumLayers(tex2DArrayShadow); size2DArrayShadow = vec3(vec3(_e5.x, _e5.y, _e8)); let _e12 = textureNumLevels(tex2DArrayShadow); - levels_5 = i32(_e12); - let _e16 = coord_11; + levels_4 = i32(_e12); + let _e16 = coord_9; let _e21 = vec4(_e16.x, _e16.y, _e16.z, 1f); let _e26 = textureSampleCompare(tex2DArrayShadow, sampShadow, _e21.xy, i32(_e21.z), _e21.w); d_1 = _e26; - let _e27 = coord_11; + let _e27 = coord_9; let _e32 = vec4(_e27.x, _e27.y, _e27.z, 1f); let _e37 = textureSampleCompareLevel(tex2DArrayShadow, sampShadow, _e32.xy, i32(_e32.z), _e32.w); d_1 = _e37; - let _e38 = coord_11; + let _e38 = coord_9; let _e43 = vec4(_e38.x, _e38.y, _e38.z, 1f); let _e50 = textureSampleCompareLevel(tex2DArrayShadow, sampShadow, _e43.xy, i32(_e43.z), _e43.w, vec2(5i)); d_1 = _e50; - let _e51 = coord_11; + let _e51 = coord_9; let _e56 = vec4(_e51.x, _e51.y, _e51.z, 1f); let _e63 = textureSampleCompare(tex2DArrayShadow, sampShadow, _e56.xy, i32(_e56.z), _e56.w, vec2(5i)); d_1 = _e63; return; } -fn testTexCube(coord_12: vec3) { - var coord_13: vec3; +fn testTexCube(coord_10: vec3) { + var coord_11: vec3; var sizeCube: vec2; - var levels_6: i32; - var c_4: vec4; + var levels_5: i32; + var c_3: vec4; - coord_13 = coord_12; + coord_11 = coord_10; let _e5 = textureDimensions(texCube, 0i); sizeCube = vec2(_e5); let _e8 = textureNumLevels(texCube); - levels_6 = i32(_e8); - let _e12 = coord_13; + levels_5 = i32(_e8); + let _e12 = coord_11; let _e13 = textureSample(texCube, samp, _e12); - c_4 = _e13; - let _e14 = coord_13; + c_3 = _e13; + let _e14 = coord_11; let _e16 = textureSampleBias(texCube, samp, _e14, 2f); - c_4 = _e16; - let _e17 = coord_13; + c_3 = _e16; + let _e17 = coord_11; let _e22 = textureSampleGrad(texCube, samp, _e17, vec3(4f), vec3(4f)); - c_4 = _e22; - let _e23 = coord_13; + c_3 = _e22; + let _e23 = coord_11; let _e25 = textureSampleLevel(texCube, samp, _e23, 3f); - c_4 = _e25; + c_3 = _e25; return; } -fn testTexCubeShadow(coord_14: vec3) { - var coord_15: vec3; +fn testTexCubeShadow(coord_12: vec3) { + var coord_13: vec3; var sizeCubeShadow: vec2; - var levels_7: i32; + var levels_6: i32; var d_2: f32; - coord_15 = coord_14; + coord_13 = coord_12; let _e5 = textureDimensions(texCubeShadow, 0i); sizeCubeShadow = vec2(_e5); let _e8 = textureNumLevels(texCubeShadow); - levels_7 = i32(_e8); - let _e12 = coord_15; + levels_6 = i32(_e8); + let _e12 = coord_13; let _e17 = vec4(_e12.x, _e12.y, _e12.z, 1f); let _e20 = textureSampleCompare(texCubeShadow, sampShadow, _e17.xyz, _e17.w); d_2 = _e20; - let _e21 = coord_15; + let _e21 = coord_13; let _e26 = vec4(_e21.x, _e21.y, _e21.z, 1f); let _e29 = textureSampleCompareLevel(texCubeShadow, sampShadow, _e26.xyz, _e26.w); d_2 = _e29; return; } -fn testTexCubeArray(coord_16: vec4) { - var coord_17: vec4; +fn testTexCubeArray(coord_14: vec4) { + var coord_15: vec4; var sizeCubeArray: vec3; - var levels_8: i32; - var c_5: vec4; + var levels_7: i32; + var c_4: vec4; - coord_17 = coord_16; + coord_15 = coord_14; let _e5 = textureDimensions(texCubeArray, 0i); let _e8 = textureNumLayers(texCubeArray); sizeCubeArray = vec3(vec3(_e5.x, _e5.y, _e8)); let _e12 = textureNumLevels(texCubeArray); - levels_8 = i32(_e12); - let _e16 = coord_17; + levels_7 = i32(_e12); + let _e16 = coord_15; let _e20 = textureSample(texCubeArray, samp, _e16.xyz, i32(_e16.w)); - c_5 = _e20; - let _e21 = coord_17; + c_4 = _e20; + let _e21 = coord_15; let _e26 = textureSampleBias(texCubeArray, samp, _e21.xyz, i32(_e21.w), 2f); - c_5 = _e26; - let _e27 = coord_17; + c_4 = _e26; + let _e27 = coord_15; let _e35 = textureSampleGrad(texCubeArray, samp, _e27.xyz, i32(_e27.w), vec3(4f), vec3(4f)); - c_5 = _e35; - let _e36 = coord_17; + c_4 = _e35; + let _e36 = coord_15; let _e41 = textureSampleLevel(texCubeArray, samp, _e36.xyz, i32(_e36.w), 3f); - c_5 = _e41; + c_4 = _e41; return; } -fn testTexCubeArrayShadow(coord_18: vec4) { - var coord_19: vec4; +fn testTexCubeArrayShadow(coord_16: vec4) { + var coord_17: vec4; var sizeCubeArrayShadow: vec3; - var levels_9: i32; + var levels_8: i32; var d_3: f32; - coord_19 = coord_18; + coord_17 = coord_16; let _e5 = textureDimensions(texCubeArrayShadow, 0i); let _e8 = textureNumLayers(texCubeArrayShadow); sizeCubeArrayShadow = vec3(vec3(_e5.x, _e5.y, _e8)); let _e12 = textureNumLevels(texCubeArrayShadow); - levels_9 = i32(_e12); - let _e16 = coord_19; + levels_8 = i32(_e12); + let _e16 = coord_17; let _e21 = textureSampleCompare(texCubeArrayShadow, sampShadow, _e16.xyz, i32(_e16.w), 1f); d_3 = _e21; return; } -fn testTex3D(coord_20: vec3) { - var coord_21: vec3; +fn testTex3D(coord_18: vec3) { + var coord_19: vec3; var size3D: vec3; - var levels_10: i32; - var c_6: vec4; + var levels_9: i32; + var c_5: vec4; - coord_21 = coord_20; + coord_19 = coord_18; let _e5 = textureDimensions(tex3D, 0i); size3D = vec3(_e5); let _e8 = textureNumLevels(tex3D); - levels_10 = i32(_e8); - let _e12 = coord_21; + levels_9 = i32(_e8); + let _e12 = coord_19; let _e13 = textureSample(tex3D, samp, _e12); - c_6 = _e13; - let _e14 = coord_21; + c_5 = _e13; + let _e14 = coord_19; let _e16 = textureSampleBias(tex3D, samp, _e14, 2f); - c_6 = _e16; - let _e17 = coord_21; + c_5 = _e16; + let _e17 = coord_19; let _e22 = vec4(_e17.x, _e17.y, _e17.z, 6f); let _e27 = textureSample(tex3D, samp, (_e22.xyz / vec3(_e22.w))); - c_6 = _e27; - let _e28 = coord_21; + c_5 = _e27; + let _e28 = coord_19; let _e33 = vec4(_e28.x, _e28.y, _e28.z, 6f); let _e39 = textureSampleBias(tex3D, samp, (_e33.xyz / vec3(_e33.w)), 2f); - c_6 = _e39; - let _e40 = coord_21; + c_5 = _e39; + let _e40 = coord_19; let _e45 = vec4(_e40.x, _e40.y, _e40.z, 6f); let _e52 = textureSample(tex3D, samp, (_e45.xyz / vec3(_e45.w)), vec3(5i)); - c_6 = _e52; - let _e53 = coord_21; + c_5 = _e52; + let _e53 = coord_19; let _e58 = vec4(_e53.x, _e53.y, _e53.z, 6f); let _e66 = textureSampleBias(tex3D, samp, (_e58.xyz / vec3(_e58.w)), 2f, vec3(5i)); - c_6 = _e66; - let _e67 = coord_21; + c_5 = _e66; + let _e67 = coord_19; let _e72 = vec4(_e67.x, _e67.y, _e67.z, 6f); let _e78 = textureSampleLevel(tex3D, samp, (_e72.xyz / vec3(_e72.w)), 3f); - c_6 = _e78; - let _e79 = coord_21; + c_5 = _e78; + let _e79 = coord_19; let _e84 = vec4(_e79.x, _e79.y, _e79.z, 6f); let _e92 = textureSampleLevel(tex3D, samp, (_e84.xyz / vec3(_e84.w)), 3f, vec3(5i)); - c_6 = _e92; - let _e93 = coord_21; + c_5 = _e92; + let _e93 = coord_19; let _e98 = vec4(_e93.x, _e93.y, _e93.z, 6f); let _e107 = textureSampleGrad(tex3D, samp, (_e98.xyz / vec3(_e98.w)), vec3(4f), vec3(4f)); - c_6 = _e107; - let _e108 = coord_21; + c_5 = _e107; + let _e108 = coord_19; let _e113 = vec4(_e108.x, _e108.y, _e108.z, 6f); let _e124 = textureSampleGrad(tex3D, samp, (_e113.xyz / vec3(_e113.w)), vec3(4f), vec3(4f), vec3(5i)); - c_6 = _e124; - let _e125 = coord_21; + c_5 = _e124; + let _e125 = coord_19; let _e130 = textureSampleGrad(tex3D, samp, _e125, vec3(4f), vec3(4f)); - c_6 = _e130; - let _e131 = coord_21; + c_5 = _e130; + let _e131 = coord_19; let _e138 = textureSampleGrad(tex3D, samp, _e131, vec3(4f), vec3(4f), vec3(5i)); - c_6 = _e138; - let _e139 = coord_21; + c_5 = _e138; + let _e139 = coord_19; let _e141 = textureSampleLevel(tex3D, samp, _e139, 3f); - c_6 = _e141; - let _e142 = coord_21; + c_5 = _e141; + let _e142 = coord_19; let _e146 = textureSampleLevel(tex3D, samp, _e142, 3f, vec3(5i)); - c_6 = _e146; - let _e147 = coord_21; + c_5 = _e146; + let _e147 = coord_19; let _e150 = textureSample(tex3D, samp, _e147, vec3(5i)); - c_6 = _e150; - let _e151 = coord_21; + c_5 = _e150; + let _e151 = coord_19; let _e155 = textureSampleBias(tex3D, samp, _e151, 2f, vec3(5i)); - c_6 = _e155; - let _e156 = coord_21; + c_5 = _e155; + let _e156 = coord_19; let _e159 = textureLoad(tex3D, vec3(_e156), 3i); - c_6 = _e159; - let _e160 = coord_21; + c_5 = _e159; + let _e160 = coord_19; let _e163 = textureLoad(tex3D, vec3(_e160), 3i); - c_6 = _e163; + c_5 = _e163; return; } -fn testTex2DMS(coord_22: vec2) { - var coord_23: vec2; +fn testTex2DMS(coord_20: vec2) { + var coord_21: vec2; var size2DMS: vec2; - var c_7: vec4; + var c_6: vec4; - coord_23 = coord_22; + coord_21 = coord_20; let _e3 = textureDimensions(tex2DMS); size2DMS = vec2(_e3); - let _e7 = coord_23; + let _e7 = coord_21; let _e10 = textureLoad(tex2DMS, vec2(_e7), 3i); - c_7 = _e10; - return; -} - -fn testTex2DMSArray(coord_24: vec3) { - var coord_25: vec3; - var size2DMSArray: vec3; - var c_8: vec4; - - coord_25 = coord_24; - let _e3 = textureDimensions(tex2DMSArray); - let _e6 = textureNumLayers(tex2DMSArray); - size2DMSArray = vec3(vec3(_e3.x, _e3.y, _e6)); - let _e11 = coord_25; - let _e12 = vec3(_e11); - let _e16 = textureLoad(tex2DMSArray, _e12.xy, _e12.z, 3i); - c_8 = _e16; + c_6 = _e10; return; } fn main_1() { testTex1D(1f); - testTex1DArray(vec2(3f)); testTex2D(vec2(1f)); testTex2DShadow(vec2(1f)); testTex2DArray(vec3(1f)); @@ -638,7 +576,6 @@ fn main_1() { testTexCubeArrayShadow(vec4(1f)); testTex3D(vec3(1f)); testTex2DMS(vec2(1f)); - testTex2DMSArray(vec3(1f)); return; } From 13a9c1b308ffc369ab37a95a72138e9b0ccb4e71 Mon Sep 17 00:00:00 2001 From: teoxoy <28601907+teoxoy@users.noreply.github.com> Date: Wed, 22 Oct 2025 14:07:56 +0200 Subject: [PATCH 33/33] [wgsl-out] avoid shadowing built-ins --- naga/src/back/glsl/mod.rs | 1 + naga/src/back/hlsl/writer.rs | 1 + naga/src/back/msl/writer.rs | 1 + naga/src/back/wgsl/writer.rs | 1 + naga/src/keywords/wgsl.rs | 263 ++++++++++++++++++ naga/src/proc/namer.rs | 34 ++- .../wgsl/glsl-931-constant-emitting.frag.wgsl | 4 +- .../wgsl/glsl-constant-array-size.frag.wgsl | 4 +- .../wgsl/glsl-global-constant-array.frag.wgsl | 4 +- .../out/wgsl/spv-8151-barrier-reorder.wgsl | 4 +- .../out/wgsl/spv-atomic_compare_exchange.wgsl | 4 +- naga/tests/out/wgsl/spv-atomic_exchange.wgsl | 4 +- ...spv-atomic_global_struct_field_vertex.wgsl | 4 +- naga/tests/out/wgsl/spv-atomic_i_add_sub.wgsl | 4 +- .../out/wgsl/spv-atomic_i_decrement.wgsl | 4 +- .../out/wgsl/spv-atomic_i_increment.wgsl | 4 +- .../out/wgsl/spv-atomic_load_and_store.wgsl | 4 +- naga/tests/out/wgsl/spv-barrier.wgsl | 4 +- .../out/wgsl/spv-binding-arrays.dynamic.wgsl | 4 +- .../out/wgsl/spv-binding-arrays.static.wgsl | 4 +- .../tests/out/wgsl/spv-empty-global-name.wgsl | 4 +- naga/tests/out/wgsl/spv-fetch_depth.wgsl | 4 +- naga/tests/out/wgsl/spv-subgroup-barrier.wgsl | 4 +- .../out/wgsl/spv-subgroup-operations-s.wgsl | 4 +- .../out/wgsl/spv-unnamed-gl-per-vertex.wgsl | 4 +- 25 files changed, 334 insertions(+), 43 deletions(-) diff --git a/naga/src/back/glsl/mod.rs b/naga/src/back/glsl/mod.rs index 4c5a9d8cbcb..b2fd888f62d 100644 --- a/naga/src/back/glsl/mod.rs +++ b/naga/src/back/glsl/mod.rs @@ -663,6 +663,7 @@ impl<'a, W: Write> Writer<'a, W> { namer.reset( module, &keywords::RESERVED_KEYWORD_SET, + proc::KeywordSet::empty(), proc::CaseInsensitiveKeywordSet::empty(), &[ "gl_", // all GL built-in variables diff --git a/naga/src/back/hlsl/writer.rs b/naga/src/back/hlsl/writer.rs index ab95b9327f9..f22f75b49b7 100644 --- a/naga/src/back/hlsl/writer.rs +++ b/naga/src/back/hlsl/writer.rs @@ -155,6 +155,7 @@ impl<'a, W: fmt::Write> super::Writer<'a, W> { self.namer.reset( module, &super::keywords::RESERVED_SET, + proc::KeywordSet::empty(), &super::keywords::RESERVED_CASE_INSENSITIVE_SET, super::keywords::RESERVED_PREFIXES, &mut self.names, diff --git a/naga/src/back/msl/writer.rs b/naga/src/back/msl/writer.rs index 6e51f90181e..1be53af048d 100644 --- a/naga/src/back/msl/writer.rs +++ b/naga/src/back/msl/writer.rs @@ -4279,6 +4279,7 @@ impl Writer { self.namer.reset( module, &super::keywords::RESERVED_SET, + proc::KeywordSet::empty(), proc::CaseInsensitiveKeywordSet::empty(), &[CLAMPED_LOD_LOAD_PREFIX], &mut self.names, diff --git a/naga/src/back/wgsl/writer.rs b/naga/src/back/wgsl/writer.rs index 225a63343bf..88d629a6028 100644 --- a/naga/src/back/wgsl/writer.rs +++ b/naga/src/back/wgsl/writer.rs @@ -100,6 +100,7 @@ impl Writer { self.namer.reset( module, &crate::keywords::wgsl::RESERVED_SET, + &crate::keywords::wgsl::BUILTIN_IDENTIFIER_SET, // an identifier must not start with two underscore proc::CaseInsensitiveKeywordSet::empty(), &["__", "_naga"], diff --git a/naga/src/keywords/wgsl.rs b/naga/src/keywords/wgsl.rs index 00c3cef2c10..e8b4d9a085b 100644 --- a/naga/src/keywords/wgsl.rs +++ b/naga/src/keywords/wgsl.rs @@ -190,3 +190,266 @@ pub const RESERVED: &[&str] = &[ /// /// See for benchmarks. pub static RESERVED_SET: RacyLock = RacyLock::new(|| KeywordSet::from_iter(RESERVED)); + +/// Shadowable words that the WGSL backend should avoid using for declarations. +/// +/// Includes: +/// - [6.9. Predeclared Types and Type-Generators] +/// - [6.3.1. Predeclared enumerants] +/// - [17. Built-in Functions] +/// +/// This set must be separate from the [`RESERVED`] set above since the +/// [`Namer`](crate::proc::Namer) must ignore these identifiers if they appear +/// as struct member names. This is because this set contains `fract` and `exp` +/// which are also names used by return types of the `frexp` and `modf` built-in functions. +/// +/// [6.9. Predeclared Types and Type-Generators]: https://www.w3.org/TR/WGSL/#predeclared-types +/// [6.3.1. Predeclared enumerants]: https://www.w3.org/TR/WGSL/#predeclared-enumerants +/// [17. Built-in Functions]: https://www.w3.org/TR/WGSL/#builtin-functions +pub const BUILTIN_IDENTIFIERS: &[&str] = &[ + // types + "bool", + "i32", + "u32", + "f32", + "f16", + "array", + "atomic", + "vec2", + "vec3", + "vec4", + "mat2x2", + "mat2x3", + "mat2x4", + "mat3x2", + "mat3x3", + "mat3x4", + "mat4x2", + "mat4x3", + "mat4x4", + "ptr", + "sampler", + "sampler_comparison", + "texture_1d", + "texture_2d", + "texture_2d_array", + "texture_3d", + "texture_cube", + "texture_cube_array", + "texture_multisampled_2d", + "texture_depth_multisampled_2d", + "texture_external", + "texture_storage_1d", + "texture_storage_2d", + "texture_storage_2d_array", + "texture_storage_3d", + "texture_depth_2d", + "texture_depth_2d_array", + "texture_depth_cube", + "texture_depth_cube_array", + // enumerants + "read", + "write", + "read_write", + "function", + "private", + "workgroup", + "uniform", + "storage", + "rgba8unorm", + "rgba8snorm", + "rgba8uint", + "rgba8sint", + "rgba16unorm", + "rgba16snorm", + "rgba16uint", + "rgba16sint", + "rgba16float", + "rg8unorm", + "rg8snorm", + "rg8uint", + "rg8sint", + "rg16unorm", + "rg16snorm", + "rg16uint", + "rg16sint", + "rg16float", + "r32uint", + "r32sint", + "r32float", + "rg32uint", + "rg32sint", + "rg32float", + "rgba32uint", + "rgba32sint", + "rgba32float", + "bgra8unorm", + "r8unorm", + "r8snorm", + "r8uint", + "r8sint", + "r16unorm", + "r16snorm", + "r16uint", + "r16sint", + "r16float", + "rgb10a2unorm", + "rgb10a2uint", + "rg11b10ufloat", + // functions + "bitcast", + "all", + "any", + "select", + "arrayLength", + "abs", + "acos", + "acosh", + "asin", + "asinh", + "atan", + "atanh", + "atan2", + "ceil", + "clamp", + "cos", + "cosh", + "countLeadingZeros", + "countOneBits", + "countTrailingZeros", + "cross", + "degrees", + "determinant", + "distance", + "dot", + "dot4U8Packed", + "dot4I8Packed", + "exp", + "exp2", + "extractBits", + "faceForward", + "firstLeadingBit", + "firstTrailingBit", + "floor", + "fma", + "fract", + "frexp", + "insertBits", + "inverseSqrt", + "ldexp", + "length", + "log", + "log2", + "max", + "min", + "mix", + "modf", + "normalize", + "pow", + "quantizeToF16", + "radians", + "reflect", + "refract", + "reverseBits", + "round", + "saturate", + "sign", + "sin", + "sinh", + "smoothstep", + "sqrt", + "step", + "tan", + "tanh", + "transpose", + "trunc", + "dpdx", + "dpdxCoarse", + "dpdxFine", + "dpdy", + "dpdyCoarse", + "dpdyFine", + "fwidth", + "fwidthCoarse", + "fwidthFine", + "textureDimensions", + "textureGather", + "textureGatherCompare", + "textureLoad", + "textureNumLayers", + "textureNumLevels", + "textureNumSamples", + "textureSample", + "textureSampleBias", + "textureSampleCompare", + "textureSampleCompareLevel", + "textureSampleGrad", + "textureSampleLevel", + "textureSampleBaseClampToEdge", + "textureStore", + "atomicLoad", + "atomicStore", + "atomicAdd", + "atomicSub", + "atomicMax", + "atomicMin", + "atomicAnd", + "atomicOr", + "atomicXor", + "atomicExchange", + "atomicCompareExchangeWeak", + "pack4x8snorm", + "pack4x8unorm", + "pack4xI8", + "pack4xU8", + "pack4xI8Clamp", + "pack4xU8Clamp", + "pack2x16snorm", + "pack2x16unorm", + "pack2x16float", + "unpack4x8snorm", + "unpack4x8unorm", + "unpack4xI8", + "unpack4xU8", + "unpack2x16snorm", + "unpack2x16unorm", + "unpack2x16float", + "storageBarrier", + "textureBarrier", + "workgroupBarrier", + "workgroupUniformLoad", + "subgroupAdd", + "subgroupExclusiveAdd", + "subgroupInclusiveAdd", + "subgroupAll", + "subgroupAnd", + "subgroupAny", + "subgroupBallot", + "subgroupBroadcast", + "subgroupBroadcastFirst", + "subgroupElect", + "subgroupMax", + "subgroupMin", + "subgroupMul", + "subgroupExclusiveMul", + "subgroupInclusiveMul", + "subgroupOr", + "subgroupShuffle", + "subgroupShuffleDown", + "subgroupShuffleUp", + "subgroupShuffleXor", + "subgroupXor", + "quadBroadcast", + "quadSwapDiagonal", + "quadSwapX", + "quadSwapY", + // not in the WGSL spec + "i64", + "u64", + "f64", + "push_constant", + "r64uint", +]; + +pub static BUILTIN_IDENTIFIER_SET: RacyLock = + RacyLock::new(|| KeywordSet::from_iter(BUILTIN_IDENTIFIERS)); diff --git a/naga/src/proc/namer.rs b/naga/src/proc/namer.rs index 127e346f3a1..8b81445d751 100644 --- a/naga/src/proc/namer.rs +++ b/naga/src/proc/namer.rs @@ -90,6 +90,7 @@ pub struct Namer { /// The last numeric suffix used for each base name. Zero means "no suffix". unique: FastHashMap, keywords: &'static KeywordSet, + builtin_identifiers: &'static KeywordSet, keywords_case_insensitive: &'static CaseInsensitiveKeywordSet, reserved_prefixes: Vec<&'static str>, } @@ -173,6 +174,13 @@ impl Namer { /// Guarantee uniqueness by applying a numeric suffix when necessary. If `label_raw` /// itself ends with digits, separate them from the suffix with an underscore. pub fn call(&mut self, label_raw: &str) -> String { + self.call_impl(label_raw, false) + } + + /// See documentation of [`Self::call`]. Additionally this function allows ignoring + /// `Namer::builtin_identifiers` mainly for [`NameKey::StructMember`] since struct members + /// can't shadow builtin identifiers. + pub fn call_impl(&mut self, label_raw: &str, ignore_builtin_identifiers: bool) -> String { use core::fmt::Write as _; // for write!-ing to Strings let base = self.sanitize(label_raw); @@ -196,6 +204,8 @@ impl Namer { if base.ends_with(char::is_numeric) || self.keywords.contains(base.as_ref()) || self.keywords_case_insensitive.contains(base.as_ref()) + || (!ignore_builtin_identifiers + && self.builtin_identifiers.contains(base.as_ref())) { suffixed.push(SEPARATOR); } @@ -209,10 +219,22 @@ impl Namer { } pub fn call_or(&mut self, label: &Option, fallback: &str) -> String { - self.call(match *label { - Some(ref name) => name, - None => fallback, - }) + self.call_or_impl(label, fallback, false) + } + + fn call_or_impl( + &mut self, + label: &Option, + fallback: &str, + ignore_builtin_identifiers: bool, + ) -> String { + self.call_impl( + match *label { + Some(ref name) => name, + None => fallback, + }, + ignore_builtin_identifiers, + ) } /// Enter a local namespace for things like structs. @@ -231,6 +253,7 @@ impl Namer { &mut self, module: &crate::Module, reserved_keywords: &'static KeywordSet, + builtin_identifiers: &'static KeywordSet, reserved_keywords_case_insensitive: &'static CaseInsensitiveKeywordSet, reserved_prefixes: &[&'static str], output: &mut FastHashMap, @@ -240,6 +263,7 @@ impl Namer { self.unique.clear(); self.keywords = reserved_keywords; + self.builtin_identifiers = builtin_identifiers; self.keywords_case_insensitive = reserved_keywords_case_insensitive; // Choose fallback names for anonymous entry point return types. @@ -281,7 +305,7 @@ impl Namer { // struct members have their own namespace, because access is always prefixed self.namespace(members.len(), |namer| { for (index, member) in members.iter().enumerate() { - let name = namer.call_or(&member.name, "member"); + let name = namer.call_or_impl(&member.name, "member", true); output.insert(NameKey::StructMember(ty_handle, index as u32), name); } }) diff --git a/naga/tests/out/wgsl/glsl-931-constant-emitting.frag.wgsl b/naga/tests/out/wgsl/glsl-931-constant-emitting.frag.wgsl index 1bd18e96b1b..6a9e080fdb4 100644 --- a/naga/tests/out/wgsl/glsl-931-constant-emitting.frag.wgsl +++ b/naga/tests/out/wgsl/glsl-931-constant-emitting.frag.wgsl @@ -1,11 +1,11 @@ const constant: i32 = 10i; -fn function() -> f32 { +fn function_() -> f32 { return 0f; } fn main_1() { - let _e0 = function(); + let _e0 = function_(); return; } diff --git a/naga/tests/out/wgsl/glsl-constant-array-size.frag.wgsl b/naga/tests/out/wgsl/glsl-constant-array-size.frag.wgsl index 14441e2bd1b..cbc16448ae9 100644 --- a/naga/tests/out/wgsl/glsl-constant-array-size.frag.wgsl +++ b/naga/tests/out/wgsl/glsl-constant-array-size.frag.wgsl @@ -7,7 +7,7 @@ const NUM_VECS: i32 = 42i; @group(1) @binding(0) var global: Data; -fn function() -> vec4 { +fn function_() -> vec4 { var sum: vec4 = vec4(0f); var i: i32 = 0i; @@ -32,7 +32,7 @@ fn function() -> vec4 { } fn main_1() { - let _e0 = function(); + let _e0 = function_(); return; } diff --git a/naga/tests/out/wgsl/glsl-global-constant-array.frag.wgsl b/naga/tests/out/wgsl/glsl-global-constant-array.frag.wgsl index 2519e9f9b4b..6582dfc9ece 100644 --- a/naga/tests/out/wgsl/glsl-global-constant-array.frag.wgsl +++ b/naga/tests/out/wgsl/glsl-global-constant-array.frag.wgsl @@ -1,7 +1,7 @@ -const array: array = array(1f, 2f); +const array_: array = array(1f, 2f); fn main_1() { - var local: array = array; + var local: array = array_; return; } diff --git a/naga/tests/out/wgsl/spv-8151-barrier-reorder.wgsl b/naga/tests/out/wgsl/spv-8151-barrier-reorder.wgsl index 4ba429bb811..4a6f542f4f4 100644 --- a/naga/tests/out/wgsl/spv-8151-barrier-reorder.wgsl +++ b/naga/tests/out/wgsl/spv-8151-barrier-reorder.wgsl @@ -7,7 +7,7 @@ var global: vec3; var global_1: type_3; var global_2: u32; -fn function() { +fn function_() { let _e6 = global; let _e8 = (_e6.x == 0u); if _e8 { @@ -26,5 +26,5 @@ fn function() { @compute @workgroup_size(2, 1, 1) fn barrier_reorder_bug(@builtin(local_invocation_id) param: vec3) { global = param; - function(); + function_(); } diff --git a/naga/tests/out/wgsl/spv-atomic_compare_exchange.wgsl b/naga/tests/out/wgsl/spv-atomic_compare_exchange.wgsl index 36b460d74af..b224b21ddf9 100644 --- a/naga/tests/out/wgsl/spv-atomic_compare_exchange.wgsl +++ b/naga/tests/out/wgsl/spv-atomic_compare_exchange.wgsl @@ -16,7 +16,7 @@ var global: type_5; @group(0) @binding(1) var global_1: type_3; -fn function() { +fn function_() { var phi_28_: type_2; var phi_29_: type_2; var phi_43_: type_2; @@ -62,5 +62,5 @@ fn function() { @compute @workgroup_size(32, 1, 1) fn stage_test_atomic_compare_exchange() { - function(); + function_(); } diff --git a/naga/tests/out/wgsl/spv-atomic_exchange.wgsl b/naga/tests/out/wgsl/spv-atomic_exchange.wgsl index 0440b1b92b5..089ebb5b3a2 100644 --- a/naga/tests/out/wgsl/spv-atomic_exchange.wgsl +++ b/naga/tests/out/wgsl/spv-atomic_exchange.wgsl @@ -16,7 +16,7 @@ var global: type_5; @group(0) @binding(1) var global_1: type_3; -fn function() { +fn function_() { var phi_26_: type_2; var phi_29_: u32; var phi_43_: type_2; @@ -76,5 +76,5 @@ fn function() { @compute @workgroup_size(32, 1, 1) fn stage_test_atomic_exchange() { - function(); + function_(); } diff --git a/naga/tests/out/wgsl/spv-atomic_global_struct_field_vertex.wgsl b/naga/tests/out/wgsl/spv-atomic_global_struct_field_vertex.wgsl index c3369e241b8..a40ad260378 100644 --- a/naga/tests/out/wgsl/spv-atomic_global_struct_field_vertex.wgsl +++ b/naga/tests/out/wgsl/spv-atomic_global_struct_field_vertex.wgsl @@ -12,7 +12,7 @@ struct type_6 { var global: type_6; var global_1: vec4 = vec4(0f, 0f, 0f, 1f); -fn function() { +fn function_() { let _e7 = global.member.member; let _e8 = atomicAdd((&global.member.member_2), _e7); let _e9 = f32(_e8); @@ -23,7 +23,7 @@ fn function() { @vertex fn global_field_vertex() -> @builtin(position) vec4 { - function(); + function_(); let _e1 = global_1; return _e1; } diff --git a/naga/tests/out/wgsl/spv-atomic_i_add_sub.wgsl b/naga/tests/out/wgsl/spv-atomic_i_add_sub.wgsl index bebab9e059e..86de7816877 100644 --- a/naga/tests/out/wgsl/spv-atomic_i_add_sub.wgsl +++ b/naga/tests/out/wgsl/spv-atomic_i_add_sub.wgsl @@ -11,7 +11,7 @@ var global: type_4; @group(0) @binding(1) var global_1: type_2; -fn function() { +fn function_() { let _e6 = atomicAdd((&global.member), 2u); let _e7 = atomicSub((&global.member), _e6); if (_e6 < arrayLength((&global_1.member))) { @@ -22,5 +22,5 @@ fn function() { @compute @workgroup_size(32, 1, 1) fn stage_test_atomic_i_add_sub() { - function(); + function_(); } diff --git a/naga/tests/out/wgsl/spv-atomic_i_decrement.wgsl b/naga/tests/out/wgsl/spv-atomic_i_decrement.wgsl index e2f3bd83b6e..9d3fa4185ca 100644 --- a/naga/tests/out/wgsl/spv-atomic_i_decrement.wgsl +++ b/naga/tests/out/wgsl/spv-atomic_i_decrement.wgsl @@ -11,7 +11,7 @@ var global: type_5; @group(0) @binding(1) var global_1: type_3; -fn function() { +fn function_() { var phi_33_: bool; loop { @@ -33,5 +33,5 @@ fn function() { @compute @workgroup_size(32, 1, 1) fn stage_test_atomic_i_decrement() { - function(); + function_(); } diff --git a/naga/tests/out/wgsl/spv-atomic_i_increment.wgsl b/naga/tests/out/wgsl/spv-atomic_i_increment.wgsl index 139372776a3..46a236b61fc 100644 --- a/naga/tests/out/wgsl/spv-atomic_i_increment.wgsl +++ b/naga/tests/out/wgsl/spv-atomic_i_increment.wgsl @@ -11,7 +11,7 @@ var global: type_4; @group(0) @binding(1) var global_1: type_2; -fn function() { +fn function_() { var phi_21_: u32; var phi_22_: u32; @@ -38,5 +38,5 @@ fn function() { @compute @workgroup_size(32, 1, 1) fn stage_test_atomic_i_increment() { - function(); + function_(); } diff --git a/naga/tests/out/wgsl/spv-atomic_load_and_store.wgsl b/naga/tests/out/wgsl/spv-atomic_load_and_store.wgsl index 0525acb2232..d5fefefa13b 100644 --- a/naga/tests/out/wgsl/spv-atomic_load_and_store.wgsl +++ b/naga/tests/out/wgsl/spv-atomic_load_and_store.wgsl @@ -16,7 +16,7 @@ var global: type_5; @group(0) @binding(1) var global_1: type_3; -fn function() { +fn function_() { var phi_25_: type_2; var phi_40_: type_2; var phi_41_: type_2; @@ -68,5 +68,5 @@ fn function() { @compute @workgroup_size(32, 1, 1) fn stage_test_atomic_load_and_store() { - function(); + function_(); } diff --git a/naga/tests/out/wgsl/spv-barrier.wgsl b/naga/tests/out/wgsl/spv-barrier.wgsl index dc8ad2cbff3..5d7c74af8ce 100644 --- a/naga/tests/out/wgsl/spv-barrier.wgsl +++ b/naga/tests/out/wgsl/spv-barrier.wgsl @@ -1,4 +1,4 @@ -fn function() { +fn function_() { workgroupBarrier(); workgroupBarrier(); storageBarrier(); @@ -16,5 +16,5 @@ fn function() { @compute @workgroup_size(64, 1, 1) fn main() { - function(); + function_(); } diff --git a/naga/tests/out/wgsl/spv-binding-arrays.dynamic.wgsl b/naga/tests/out/wgsl/spv-binding-arrays.dynamic.wgsl index 4b2f0792069..bb277d351e9 100644 --- a/naga/tests/out/wgsl/spv-binding-arrays.dynamic.wgsl +++ b/naga/tests/out/wgsl/spv-binding-arrays.dynamic.wgsl @@ -4,7 +4,7 @@ var global: binding_array>; var global_1: binding_array; var global_2: vec4; -fn function() { +fn function_() { let _e8 = textureSampleLevel(global[1i], global_1[1i], vec2(0.5f, 0.5f), 0f); global_2 = _e8; return; @@ -12,7 +12,7 @@ fn function() { @fragment fn main() -> @location(0) vec4 { - function(); + function_(); let _e1 = global_2; return _e1; } diff --git a/naga/tests/out/wgsl/spv-binding-arrays.static.wgsl b/naga/tests/out/wgsl/spv-binding-arrays.static.wgsl index bf87a16844a..f0a89c67807 100644 --- a/naga/tests/out/wgsl/spv-binding-arrays.static.wgsl +++ b/naga/tests/out/wgsl/spv-binding-arrays.static.wgsl @@ -4,7 +4,7 @@ var global: binding_array, 256>; var global_1: binding_array; var global_2: vec4; -fn function() { +fn function_() { let _e8 = textureSampleLevel(global[1i], global_1[1i], vec2(0.5f, 0.5f), 0f); global_2 = _e8; return; @@ -12,7 +12,7 @@ fn function() { @fragment fn main() -> @location(0) vec4 { - function(); + function_(); let _e1 = global_2; return _e1; } diff --git a/naga/tests/out/wgsl/spv-empty-global-name.wgsl b/naga/tests/out/wgsl/spv-empty-global-name.wgsl index 1be54db130a..f67f2ef344b 100644 --- a/naga/tests/out/wgsl/spv-empty-global-name.wgsl +++ b/naga/tests/out/wgsl/spv-empty-global-name.wgsl @@ -5,7 +5,7 @@ struct type_1 { @group(0) @binding(0) var unnamed: type_1; -fn function() { +fn function_() { let _e3 = unnamed.member; unnamed.member = (_e3 + 1i); return; @@ -13,5 +13,5 @@ fn function() { @compute @workgroup_size(1, 1, 1) fn main() { - function(); + function_(); } diff --git a/naga/tests/out/wgsl/spv-fetch_depth.wgsl b/naga/tests/out/wgsl/spv-fetch_depth.wgsl index 17950e9b1e2..b12b2e6a41b 100644 --- a/naga/tests/out/wgsl/spv-fetch_depth.wgsl +++ b/naga/tests/out/wgsl/spv-fetch_depth.wgsl @@ -13,7 +13,7 @@ var global_1: type_4; @group(0) @binding(2) var global_2: texture_depth_2d; -fn function() { +fn function_() { let _e6 = global_1.member; let _e7 = textureLoad(global_2, _e6, 0i); global.member = vec4(_e7).x; @@ -22,5 +22,5 @@ fn function() { @compute @workgroup_size(32, 1, 1) fn cull_fetch_depth() { - function(); + function_(); } diff --git a/naga/tests/out/wgsl/spv-subgroup-barrier.wgsl b/naga/tests/out/wgsl/spv-subgroup-barrier.wgsl index a26a73e8c3f..6f61db9c6f1 100644 --- a/naga/tests/out/wgsl/spv-subgroup-barrier.wgsl +++ b/naga/tests/out/wgsl/spv-subgroup-barrier.wgsl @@ -1,4 +1,4 @@ -fn function() { +fn function_() { subgroupBarrier(); subgroupBarrier(); return; @@ -6,5 +6,5 @@ fn function() { @compute @workgroup_size(64, 1, 1) fn main() { - function(); + function_(); } diff --git a/naga/tests/out/wgsl/spv-subgroup-operations-s.wgsl b/naga/tests/out/wgsl/spv-subgroup-operations-s.wgsl index 4fe2aa6f2ca..1baf259e261 100644 --- a/naga/tests/out/wgsl/spv-subgroup-operations-s.wgsl +++ b/naga/tests/out/wgsl/spv-subgroup-operations-s.wgsl @@ -3,7 +3,7 @@ var global_1: u32; var global_2: u32; var global_3: u32; -fn function() { +fn function_() { let _e5 = global_2; let _e6 = global_3; let _e9 = subgroupBallot(((_e6 & 1u) == 1u)); @@ -36,5 +36,5 @@ fn main(@builtin(num_subgroups) param: u32, @builtin(subgroup_id) param_1: u32, global_1 = param_1; global_2 = param_2; global_3 = param_3; - function(); + function_(); } diff --git a/naga/tests/out/wgsl/spv-unnamed-gl-per-vertex.wgsl b/naga/tests/out/wgsl/spv-unnamed-gl-per-vertex.wgsl index 9f1b9ee6079..7cd34bcd197 100644 --- a/naga/tests/out/wgsl/spv-unnamed-gl-per-vertex.wgsl +++ b/naga/tests/out/wgsl/spv-unnamed-gl-per-vertex.wgsl @@ -8,7 +8,7 @@ struct type_4 { var global: type_4 = type_4(vec4(0f, 0f, 0f, 1f), 1f, array(), array()); var global_1: i32; -fn function() { +fn function_() { let _e9 = global_1; global.member = vec4(select(1f, -4f, (_e9 == 0i)), select(-1f, 4f, (_e9 == 2i)), 0f, 1f); return; @@ -17,7 +17,7 @@ fn function() { @vertex fn main(@builtin(vertex_index) param: u32) -> @builtin(position) vec4 { global_1 = i32(param); - function(); + function_(); let _e6 = global.member.y; global.member.y = -(_e6); let _e8 = global.member;