Skip to content

Commit

Permalink
test: add list folding lexer and parser tests
Browse files Browse the repository at this point in the history
  • Loading branch information
tohrnii committed Jan 31, 2023
1 parent 08763f9 commit 0d3b94e
Show file tree
Hide file tree
Showing 4 changed files with 592 additions and 3 deletions.
1 change: 0 additions & 1 deletion air-script-core/src/expression.rs
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,6 @@ pub enum Expression {

#[derive(Debug, Clone, Eq, PartialEq)]
pub enum ListFoldingType {
Expr(ListComprehension),
Sum(ListComprehension),
Prod(ListComprehension),
}
2 changes: 1 addition & 1 deletion ir/src/constraints/graph.rs
Original file line number Diff line number Diff line change
Expand Up @@ -161,6 +161,7 @@ impl AlgebraicGraph {
lhs.domain(),
))
}
Expression::ListFolding(_) => todo!(),
}
}

Expand Down Expand Up @@ -228,7 +229,6 @@ impl AlgebraicGraph {
let lhs_base = self.accumulate_degree(cycles, lhs);
lhs_base * rhs
}
Expression::ListFolding(_) => todo!(),
}
}

Expand Down
76 changes: 76 additions & 0 deletions parser/src/lexer/tests/list_comprehension.rs
Original file line number Diff line number Diff line change
Expand Up @@ -72,3 +72,79 @@ fn multiple_iterables_comprehension() {
];
expect_valid_tokenization(source, tokens);
}

#[test]
fn one_iterable_folding() {
let source = "let y = sum([x for x in x])";
let tokens = vec![
Token::Let,
Token::Ident("y".to_string()),
Token::Equal,
Token::Sum,
Token::Lparen,
Token::Lsqb,
Token::Ident("x".to_string()),
Token::For,
Token::Ident("x".to_string()),
Token::In,
Token::Ident("x".to_string()),
Token::Rsqb,
Token::Rparen,
];
expect_valid_tokenization(source, tokens);
}

#[test]
fn multiple_iterables_list_folding() {
let source = "let a = sum([w + x - y - z for (w, x, y, z) in (0..3, x, y[0..3], z[0..3])])";
let tokens = vec![
Token::Let,
Token::Ident("a".to_string()),
Token::Equal,
Token::Sum,
Token::Lparen,
Token::Lsqb,
Token::Ident("w".to_string()),
Token::Plus,
Token::Ident("x".to_string()),
Token::Minus,
Token::Ident("y".to_string()),
Token::Minus,
Token::Ident("z".to_string()),
Token::For,
Token::Lparen,
Token::Ident("w".to_string()),
Token::Comma,
Token::Ident("x".to_string()),
Token::Comma,
Token::Ident("y".to_string()),
Token::Comma,
Token::Ident("z".to_string()),
Token::Rparen,
Token::In,
Token::Lparen,
Token::Num("0".to_string()),
Token::Range,
Token::Num("3".to_string()),
Token::Comma,
Token::Ident("x".to_string()),
Token::Comma,
Token::Ident("y".to_string()),
Token::Lsqb,
Token::Num("0".to_string()),
Token::Range,
Token::Num("3".to_string()),
Token::Rsqb,
Token::Comma,
Token::Ident("z".to_string()),
Token::Lsqb,
Token::Num("0".to_string()),
Token::Range,
Token::Num("3".to_string()),
Token::Rsqb,
Token::Rparen,
Token::Rsqb,
Token::Rparen,
];
expect_valid_tokenization(source, tokens);
}
Loading

0 comments on commit 0d3b94e

Please sign in to comment.