Skip to content

Commit

Permalink
Fix invalid syntex errors for allower let as variable names (#3743)
Browse files Browse the repository at this point in the history
  • Loading branch information
raskad authored Mar 18, 2024
1 parent f3dfd58 commit 06b1c33
Show file tree
Hide file tree
Showing 4 changed files with 56 additions and 26 deletions.
17 changes: 16 additions & 1 deletion core/parser/src/parser/statement/declaration/lexical.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
//! [spec]: https://tc39.es/ecma262/#sec-let-and-const-declarations

use crate::{
lexer::{Error as LexError, TokenKind},
lexer::{Error as LexError, Token, TokenKind},
parser::{
cursor::{Cursor, SemicolonResult},
expression::Initializer,
Expand Down Expand Up @@ -123,6 +123,21 @@ where
}
}

/// Check if the given token is valid after the `let` keyword of a lexical declaration.
pub(crate) fn allowed_token_after_let(token: Option<&Token>) -> bool {
matches!(
token.map(Token::kind),
Some(
TokenKind::IdentifierName(_)
| TokenKind::Keyword((
Keyword::Await | Keyword::Yield | Keyword::Let | Keyword::Async,
_
))
| TokenKind::Punctuator(Punctuator::OpenBlock | Punctuator::OpenBracket),
)
)
}

/// Parses a binding list.
///
/// It will return an error if a `const` declaration is being parsed and there is no
Expand Down
2 changes: 1 addition & 1 deletion core/parser/src/parser/statement/declaration/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ pub(in crate::parser) use self::{
class_decl::ClassTail, ClassDeclaration, FunctionDeclaration, HoistableDeclaration,
},
import::ImportDeclaration,
lexical::LexicalDeclaration,
lexical::{allowed_token_after_let, LexicalDeclaration},
};
use crate::{
lexer::TokenKind,
Expand Down
42 changes: 25 additions & 17 deletions core/parser/src/parser/statement/iteration/for_statement.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,15 +11,19 @@ use crate::{
lexer::{Error as LexError, TokenKind},
parser::{
expression::{AssignmentExpression, Expression},
statement::declaration::LexicalDeclaration,
statement::{variable::VariableDeclarationList, Statement},
statement::{
declaration::{allowed_token_after_let, LexicalDeclaration},
variable::VariableDeclarationList,
Statement,
},
AllowAwait, AllowReturn, AllowYield, Cursor, OrAbrupt, ParseResult, TokenParser,
},
source::ReadChar,
Error,
};
use ast::{
declaration::Binding,
expression::Identifier,
operations::{bound_names, var_declared_names},
};
use boa_ast::{
Expand Down Expand Up @@ -107,7 +111,7 @@ where
}
};

let init = match cursor.peek(0, interner).or_abrupt()?.kind() {
let init = match cursor.peek(0, interner).or_abrupt()?.kind().clone() {
TokenKind::Keyword((Keyword::Var, _)) => {
cursor.advance(interner);
Some(
Expand All @@ -116,20 +120,15 @@ where
.into(),
)
}
TokenKind::Keyword((Keyword::Let, _)) => Some('exit: {
if !cursor.strict() {
if let Some(token) = cursor.peek(1, interner)? {
if token.kind() == &TokenKind::Keyword((Keyword::In, false)) {
cursor.advance(interner);
break 'exit boa_ast::Expression::Identifier(Sym::LET.into()).into();
}
}
}

LexicalDeclaration::new(false, self.allow_yield, self.allow_await, true)
.parse(cursor, interner)?
.into()
}),
TokenKind::Keyword((Keyword::Let, false))
if allowed_token_after_let(cursor.peek(1, interner)?) =>
{
Some(
LexicalDeclaration::new(false, self.allow_yield, self.allow_await, true)
.parse(cursor, interner)?
.into(),
)
}
TokenKind::Keyword((Keyword::Const, _)) => Some(
LexicalDeclaration::new(false, self.allow_yield, self.allow_await, true)
.parse(cursor, interner)?
Expand Down Expand Up @@ -174,6 +173,15 @@ where
));
}
(Some(init), TokenKind::Keyword((kw @ (Keyword::In | Keyword::Of), false))) => {
if kw == &Keyword::Of
&& init
== ForLoopInitializer::Expression(ast::Expression::Identifier(
Identifier::new(Sym::LET),
))
{
return Err(Error::general("unexpected token", position));
}

let in_loop = kw == &Keyword::In;
let init = initializer_to_iterable_loop_initializer(
init,
Expand Down
21 changes: 14 additions & 7 deletions core/parser/src/parser/statement/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ use self::{
block::BlockStatement,
break_stm::BreakStatement,
continue_stm::ContinueStatement,
declaration::{Declaration, ExportDeclaration, ImportDeclaration},
declaration::{allowed_token_after_let, Declaration, ExportDeclaration, ImportDeclaration},
expression::ExpressionStatement,
if_stm::IfStatement,
iteration::{DoWhileStatement, ForStatement, WhileStatement},
Expand Down Expand Up @@ -412,12 +412,19 @@ where
let _timer = Profiler::global().start_event("StatementListItem", "Parsing");
let tok = cursor.peek(0, interner).or_abrupt()?;

match *tok.kind() {
TokenKind::Keyword(
(Keyword::Function | Keyword::Class | Keyword::Const, _) | (Keyword::Let, false),
) => Declaration::new(self.allow_yield, self.allow_await)
.parse(cursor, interner)
.map(ast::StatementListItem::from),
match tok.kind().clone() {
TokenKind::Keyword((Keyword::Function | Keyword::Class | Keyword::Const, _)) => {
Declaration::new(self.allow_yield, self.allow_await)
.parse(cursor, interner)
.map(ast::StatementListItem::from)
}
TokenKind::Keyword((Keyword::Let, false))
if allowed_token_after_let(cursor.peek(1, interner)?) =>
{
Declaration::new(self.allow_yield, self.allow_await)
.parse(cursor, interner)
.map(ast::StatementListItem::from)
}
TokenKind::Keyword((Keyword::Async, false)) => {
let skip_n = if cursor.peek_is_line_terminator(0, interner).or_abrupt()? {
2
Expand Down

0 comments on commit 06b1c33

Please sign in to comment.