Skip to content

Commit

Permalink
fix tests
Browse files Browse the repository at this point in the history
  • Loading branch information
Skgland committed Feb 25, 2021
1 parent be3c206 commit 5e93766
Showing 1 changed file with 69 additions and 12 deletions.
81 changes: 69 additions & 12 deletions crates/prolog_parser/tests/parse_tokens.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,47 @@ use prolog_parser::ast::*;
use prolog_parser::lexer::{Lexer, Token};
use prolog_parser::tabled_rc::TabledData;

use std::fmt::{Debug, Formatter};
use std::rc::Rc;

fn token_eq(l: &Token, r: &Token) -> bool {
match (l, r) {
(Token::Constant(cl), Token::Constant(cr)) => cl == cr,
(Token::Var(vl), Token::Var(vr)) => vl == vr,
(Token::Open, Token::Open)
| (Token::OpenCT, Token::OpenCT)
| (Token::Close, Token::Close)
| (Token::OpenList, Token::OpenList)
| (Token::CloseList, Token::CloseList)
| (Token::OpenCurly, Token::OpenCurly)
| (Token::CloseCurly, Token::CloseCurly)
| (Token::HeadTailSeparator, Token::HeadTailSeparator)
| (Token::Comma, Token::Comma)
| (Token::End, Token::End) => true,
_ => false,
}
}

fn token_slice_eq(l: &[Token], r: &[Token]) -> bool {
l.iter().zip(r.iter()).all(|(a, b)| token_eq(a, b))
}

struct TSW<'a>(&'a [Token]);

impl<'a> Debug for TSW<'a> {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
self.0.fmt(f)
}
}

impl<'a> PartialEq for TSW<'a> {
fn eq(&self, other: &Self) -> bool {
token_slice_eq(self.0, other.0)
}
}

impl<'a> Eq for TSW<'a> {}

fn read_all_tokens(text: &str) -> Result<Vec<Token>, ParserError> {
let atom_tbl = TabledData::new(Rc::new("my_module".to_string()));
let flags = MachineFlags::default();
Expand All @@ -21,35 +60,44 @@ fn read_all_tokens(text: &str) -> Result<Vec<Token>, ParserError> {
#[test]
fn empty_multiline_comment() -> Result<(), ParserError> {
let tokens = read_all_tokens("/**/ 4\n")?;
assert_eq!(tokens, [Token::Constant(Constant::Fixnum(4))]);
assert_eq!(
TSW(tokens.as_slice()),
TSW(&[Token::Constant(Constant::Fixnum(4))])
);
Ok(())
}

#[test]
fn any_char_multiline_comment() -> Result<(), ParserError> {
let tokens = read_all_tokens("/* █╗╚═══╝ © */ 4\n")?;
assert_eq!(tokens, [Token::Constant(Constant::Fixnum(4))]);
assert_eq!(
TSW(tokens.as_slice()),
TSW(&[Token::Constant(Constant::Fixnum(4))])
);
Ok(())
}

#[test]
fn simple_char() -> Result<(), ParserError> {
let tokens = read_all_tokens("'a'\n")?;
assert_eq!(tokens, [Token::Constant(Constant::Char('a'))]);
assert_eq!(
TSW(tokens.as_slice()),
TSW(&[Token::Constant(Constant::Char('a'))])
);
Ok(())
}

#[test]
fn char_with_meta_seq() -> Result<(), ParserError> {
let tokens = read_all_tokens(r#"'\\' '\'' '\"' '\`' "#)?; // use literal string so \ are escaped
assert_eq!(
tokens,
[
TSW(tokens.as_slice()),
TSW(&[
Token::Constant(Constant::Char('\\')),
Token::Constant(Constant::Char('\'')),
Token::Constant(Constant::Char('"')),
Token::Constant(Constant::Char('`'))
]
])
);
Ok(())
}
Expand All @@ -58,38 +106,47 @@ fn char_with_meta_seq() -> Result<(), ParserError> {
fn char_with_control_seq() -> Result<(), ParserError> {
let tokens = read_all_tokens(r"'\a' '\b' '\r' '\f' '\t' '\n' '\v' ")?;
assert_eq!(
tokens,
[
TSW(tokens.as_slice()),
TSW(&[
Token::Constant(Constant::Char('\u{07}')),
Token::Constant(Constant::Char('\u{08}')),
Token::Constant(Constant::Char('\r')),
Token::Constant(Constant::Char('\u{0c}')),
Token::Constant(Constant::Char('\t')),
Token::Constant(Constant::Char('\n')),
Token::Constant(Constant::Char('\u{0b}')),
]
])
);
Ok(())
}

#[test]
fn char_with_octseq() -> Result<(), ParserError> {
let tokens = read_all_tokens(r"'\60433\' ")?;
assert_eq!(tokens, [Token::Constant(Constant::Char('愛'))]); // Japanese character
assert_eq!(
TSW(tokens.as_slice()),
TSW(&[Token::Constant(Constant::Char('愛'))])
); // Japanese character
Ok(())
}

#[test]
fn char_with_octseq_0() -> Result<(), ParserError> {
let tokens = read_all_tokens(r"'\0\' ")?;
assert_eq!(tokens, [Token::Constant(Constant::Char('\u{0000}'))]);
assert_eq!(
TSW(tokens.as_slice()),
TSW(&[Token::Constant(Constant::Char('\u{0000}'))])
);
Ok(())
}

#[test]
fn char_with_hexseq() -> Result<(), ParserError> {
let tokens = read_all_tokens(r"'\x2124\' ")?;
assert_eq!(tokens, [Token::Constant(Constant::Char('ℤ'))]); // Z math symbol
assert_eq!(
TSW(tokens.as_slice()),
TSW(&[Token::Constant(Constant::Char('ℤ'))])
); // Z math symbol
Ok(())
}

Expand Down

0 comments on commit 5e93766

Please sign in to comment.