From e78cebe6760eb6f9c99b0923344305d485319eb0 Mon Sep 17 00:00:00 2001 From: Eric MORAND Date: Tue, 30 Jul 2019 11:24:18 +0200 Subject: [PATCH] Fix issue #12 --- docs/assets/js/search.js | 2 +- docs/classes/lexer.html | 21 +- docs/classes/syntaxerror.html | 17 +- docs/classes/token.html | 99 +++--- docs/classes/tokenstream.html | 571 ++++++++++++++++++++++++++++++ docs/enums/tokentype.html | 47 +-- docs/globals.html | 24 +- docs/index.html | 3 + src/index.ts | 1 + src/lib/Lexer.ts | 10 +- src/lib/Token.ts | 42 +-- src/lib/TokenStream.ts | 200 +++++++++++ src/lib/TokenType.ts | 78 ++-- test/unit/lib/Lexer/test.ts | 8 +- test/unit/lib/TokenStream/test.ts | 189 ++++++++++ 15 files changed, 1150 insertions(+), 162 deletions(-) create mode 100644 docs/classes/tokenstream.html create mode 100644 src/lib/TokenStream.ts create mode 100644 test/unit/lib/TokenStream/test.ts diff --git a/docs/assets/js/search.js b/docs/assets/js/search.js index 688b3f3..c0ac922 100644 --- a/docs/assets/js/search.js +++ b/docs/assets/js/search.js @@ -1,3 +1,3 @@ var typedoc = typedoc || {}; typedoc.search = typedoc.search || {}; - typedoc.search.data = {"kinds":{"4":"Enumeration","16":"Enumeration member","32":"Variable","64":"Function","128":"Class","512":"Constructor","1024":"Property","2048":"Method"},"rows":[{"id":0,"kind":4,"name":"TokenType","url":"enums/tokentype.html","classes":"tsd-kind-enum"},{"id":1,"kind":16,"name":"CLOSING_QUOTE","url":"enums/tokentype.html#closing_quote","classes":"tsd-kind-enum-member tsd-parent-kind-enum","parent":"TokenType"},{"id":2,"kind":16,"name":"COMMENT_END","url":"enums/tokentype.html#comment_end","classes":"tsd-kind-enum-member tsd-parent-kind-enum","parent":"TokenType"},{"id":3,"kind":16,"name":"COMMENT_START","url":"enums/tokentype.html#comment_start","classes":"tsd-kind-enum-member tsd-parent-kind-enum","parent":"TokenType"},{"id":4,"kind":16,"name":"EOF","url":"enums/tokentype.html#eof","classes":"tsd-kind-enum-member tsd-parent-kind-enum","parent":"TokenType"},{"id":5,"kind":16,"name":"INTERPOLATION_START","url":"enums/tokentype.html#interpolation_start","classes":"tsd-kind-enum-member tsd-parent-kind-enum","parent":"TokenType"},{"id":6,"kind":16,"name":"INTERPOLATION_END","url":"enums/tokentype.html#interpolation_end","classes":"tsd-kind-enum-member tsd-parent-kind-enum","parent":"TokenType"},{"id":7,"kind":16,"name":"LINE_TRIMMING_MODIFIER","url":"enums/tokentype.html#line_trimming_modifier","classes":"tsd-kind-enum-member tsd-parent-kind-enum","parent":"TokenType"},{"id":8,"kind":16,"name":"NAME","url":"enums/tokentype.html#name","classes":"tsd-kind-enum-member tsd-parent-kind-enum","parent":"TokenType"},{"id":9,"kind":16,"name":"NUMBER","url":"enums/tokentype.html#number","classes":"tsd-kind-enum-member tsd-parent-kind-enum","parent":"TokenType"},{"id":10,"kind":16,"name":"OPENING_QUOTE","url":"enums/tokentype.html#opening_quote","classes":"tsd-kind-enum-member tsd-parent-kind-enum","parent":"TokenType"},{"id":11,"kind":16,"name":"OPERATOR","url":"enums/tokentype.html#operator","classes":"tsd-kind-enum-member tsd-parent-kind-enum","parent":"TokenType"},{"id":12,"kind":16,"name":"PUNCTUATION","url":"enums/tokentype.html#punctuation","classes":"tsd-kind-enum-member tsd-parent-kind-enum","parent":"TokenType"},{"id":13,"kind":16,"name":"STRING","url":"enums/tokentype.html#string","classes":"tsd-kind-enum-member tsd-parent-kind-enum","parent":"TokenType"},{"id":14,"kind":16,"name":"TAG_END","url":"enums/tokentype.html#tag_end","classes":"tsd-kind-enum-member tsd-parent-kind-enum","parent":"TokenType"},{"id":15,"kind":16,"name":"TAG_START","url":"enums/tokentype.html#tag_start","classes":"tsd-kind-enum-member tsd-parent-kind-enum","parent":"TokenType"},{"id":16,"kind":16,"name":"TEST_OPERATOR","url":"enums/tokentype.html#test_operator","classes":"tsd-kind-enum-member tsd-parent-kind-enum","parent":"TokenType"},{"id":17,"kind":16,"name":"TEXT","url":"enums/tokentype.html#text","classes":"tsd-kind-enum-member tsd-parent-kind-enum","parent":"TokenType"},{"id":18,"kind":16,"name":"TRIMMING_MODIFIER","url":"enums/tokentype.html#trimming_modifier","classes":"tsd-kind-enum-member tsd-parent-kind-enum","parent":"TokenType"},{"id":19,"kind":16,"name":"VARIABLE_END","url":"enums/tokentype.html#variable_end","classes":"tsd-kind-enum-member tsd-parent-kind-enum","parent":"TokenType"},{"id":20,"kind":16,"name":"VARIABLE_START","url":"enums/tokentype.html#variable_start","classes":"tsd-kind-enum-member tsd-parent-kind-enum","parent":"TokenType"},{"id":21,"kind":16,"name":"WHITESPACE","url":"enums/tokentype.html#whitespace","classes":"tsd-kind-enum-member tsd-parent-kind-enum","parent":"TokenType"},{"id":22,"kind":16,"name":"ARROW","url":"enums/tokentype.html#arrow","classes":"tsd-kind-enum-member tsd-parent-kind-enum","parent":"TokenType"},{"id":23,"kind":64,"name":"typeToString","url":"globals.html#typetostring","classes":"tsd-kind-function"},{"id":24,"kind":128,"name":"Token","url":"classes/token.html","classes":"tsd-kind-class"},{"id":25,"kind":512,"name":"constructor","url":"classes/token.html#constructor","classes":"tsd-kind-constructor tsd-parent-kind-class","parent":"Token"},{"id":26,"kind":2048,"name":"test","url":"classes/token.html#test","classes":"tsd-kind-method tsd-parent-kind-class","parent":"Token"},{"id":27,"kind":2048,"name":"getLine","url":"classes/token.html#getline","classes":"tsd-kind-method tsd-parent-kind-class","parent":"Token"},{"id":28,"kind":2048,"name":"getColumn","url":"classes/token.html#getcolumn","classes":"tsd-kind-method tsd-parent-kind-class","parent":"Token"},{"id":29,"kind":2048,"name":"getType","url":"classes/token.html#gettype","classes":"tsd-kind-method tsd-parent-kind-class","parent":"Token"},{"id":30,"kind":2048,"name":"getValue","url":"classes/token.html#getvalue","classes":"tsd-kind-method tsd-parent-kind-class","parent":"Token"},{"id":31,"kind":2048,"name":"toString","url":"classes/token.html#tostring","classes":"tsd-kind-method tsd-parent-kind-class","parent":"Token"},{"id":32,"kind":2048,"name":"serialize","url":"classes/token.html#serialize","classes":"tsd-kind-method tsd-parent-kind-class","parent":"Token"},{"id":33,"kind":128,"name":"SyntaxError","url":"classes/syntaxerror.html","classes":"tsd-kind-class"},{"id":34,"kind":1024,"name":"line","url":"classes/syntaxerror.html#line","classes":"tsd-kind-property tsd-parent-kind-class","parent":"SyntaxError"},{"id":35,"kind":1024,"name":"column","url":"classes/syntaxerror.html#column","classes":"tsd-kind-property tsd-parent-kind-class","parent":"SyntaxError"},{"id":36,"kind":512,"name":"constructor","url":"classes/syntaxerror.html#constructor","classes":"tsd-kind-constructor tsd-parent-kind-class","parent":"SyntaxError"},{"id":37,"kind":1024,"name":"name","url":"classes/syntaxerror.html#name","classes":"tsd-kind-property tsd-parent-kind-class tsd-is-inherited","parent":"SyntaxError"},{"id":38,"kind":1024,"name":"message","url":"classes/syntaxerror.html#message","classes":"tsd-kind-property tsd-parent-kind-class tsd-is-inherited","parent":"SyntaxError"},{"id":39,"kind":1024,"name":"stack","url":"classes/syntaxerror.html#stack","classes":"tsd-kind-property tsd-parent-kind-class tsd-is-overwrite tsd-is-inherited","parent":"SyntaxError"},{"id":40,"kind":1024,"name":"Error","url":"classes/syntaxerror.html#error","classes":"tsd-kind-property tsd-parent-kind-class tsd-is-static","parent":"SyntaxError"},{"id":41,"kind":128,"name":"Lexer","url":"classes/lexer.html","classes":"tsd-kind-class"},{"id":42,"kind":1024,"name":"testOperators","url":"classes/lexer.html#testoperators","classes":"tsd-kind-property tsd-parent-kind-class tsd-is-protected","parent":"Lexer"},{"id":43,"kind":1024,"name":"arrowOperator","url":"classes/lexer.html#arrowoperator","classes":"tsd-kind-property tsd-parent-kind-class tsd-is-protected","parent":"Lexer"},{"id":44,"kind":1024,"name":"operators","url":"classes/lexer.html#operators","classes":"tsd-kind-property tsd-parent-kind-class tsd-is-protected","parent":"Lexer"},{"id":45,"kind":1024,"name":"tagPair","url":"classes/lexer.html#tagpair","classes":"tsd-kind-property tsd-parent-kind-class tsd-is-protected","parent":"Lexer"},{"id":46,"kind":1024,"name":"commentPair","url":"classes/lexer.html#commentpair","classes":"tsd-kind-property tsd-parent-kind-class tsd-is-protected","parent":"Lexer"},{"id":47,"kind":1024,"name":"interpolationPair","url":"classes/lexer.html#interpolationpair","classes":"tsd-kind-property tsd-parent-kind-class tsd-is-protected","parent":"Lexer"},{"id":48,"kind":1024,"name":"variablePair","url":"classes/lexer.html#variablepair","classes":"tsd-kind-property tsd-parent-kind-class tsd-is-protected","parent":"Lexer"},{"id":49,"kind":512,"name":"constructor","url":"classes/lexer.html#constructor","classes":"tsd-kind-constructor tsd-parent-kind-class","parent":"Lexer"},{"id":50,"kind":2048,"name":"tokenize","url":"classes/lexer.html#tokenize","classes":"tsd-kind-method tsd-parent-kind-class","parent":"Lexer"},{"id":51,"kind":32,"name":"bracketPairs","url":"globals.html#bracketpairs","classes":"tsd-kind-variable"},{"id":52,"kind":32,"name":"doubleQuotedStringContentPattern","url":"globals.html#doublequotedstringcontentpattern","classes":"tsd-kind-variable"},{"id":53,"kind":32,"name":"doubleQuotedStringDelimiterPattern","url":"globals.html#doublequotedstringdelimiterpattern","classes":"tsd-kind-variable"},{"id":54,"kind":32,"name":"lineSeparators","url":"globals.html#lineseparators","classes":"tsd-kind-variable"},{"id":55,"kind":32,"name":"namePattern","url":"globals.html#namepattern","classes":"tsd-kind-variable"},{"id":56,"kind":32,"name":"numberPattern","url":"globals.html#numberpattern","classes":"tsd-kind-variable"},{"id":57,"kind":32,"name":"punctuationPattern","url":"globals.html#punctuationpattern","classes":"tsd-kind-variable"},{"id":58,"kind":32,"name":"stringPattern","url":"globals.html#stringpattern","classes":"tsd-kind-variable"},{"id":59,"kind":32,"name":"whitespacePattern","url":"globals.html#whitespacepattern","classes":"tsd-kind-variable"}]}; \ No newline at end of file + typedoc.search.data = {"kinds":{"4":"Enumeration","16":"Enumeration member","32":"Variable","64":"Function","128":"Class","512":"Constructor","1024":"Property","2048":"Method","262144":"Accessor"},"rows":[{"id":0,"kind":4,"name":"TokenType","url":"enums/tokentype.html","classes":"tsd-kind-enum"},{"id":1,"kind":16,"name":"CLOSING_QUOTE","url":"enums/tokentype.html#closing_quote","classes":"tsd-kind-enum-member tsd-parent-kind-enum","parent":"TokenType"},{"id":2,"kind":16,"name":"COMMENT_END","url":"enums/tokentype.html#comment_end","classes":"tsd-kind-enum-member tsd-parent-kind-enum","parent":"TokenType"},{"id":3,"kind":16,"name":"COMMENT_START","url":"enums/tokentype.html#comment_start","classes":"tsd-kind-enum-member tsd-parent-kind-enum","parent":"TokenType"},{"id":4,"kind":16,"name":"EOF","url":"enums/tokentype.html#eof","classes":"tsd-kind-enum-member tsd-parent-kind-enum","parent":"TokenType"},{"id":5,"kind":16,"name":"INTERPOLATION_START","url":"enums/tokentype.html#interpolation_start","classes":"tsd-kind-enum-member tsd-parent-kind-enum","parent":"TokenType"},{"id":6,"kind":16,"name":"INTERPOLATION_END","url":"enums/tokentype.html#interpolation_end","classes":"tsd-kind-enum-member tsd-parent-kind-enum","parent":"TokenType"},{"id":7,"kind":16,"name":"LINE_TRIMMING_MODIFIER","url":"enums/tokentype.html#line_trimming_modifier","classes":"tsd-kind-enum-member tsd-parent-kind-enum","parent":"TokenType"},{"id":8,"kind":16,"name":"NAME","url":"enums/tokentype.html#name","classes":"tsd-kind-enum-member tsd-parent-kind-enum","parent":"TokenType"},{"id":9,"kind":16,"name":"NUMBER","url":"enums/tokentype.html#number","classes":"tsd-kind-enum-member tsd-parent-kind-enum","parent":"TokenType"},{"id":10,"kind":16,"name":"OPENING_QUOTE","url":"enums/tokentype.html#opening_quote","classes":"tsd-kind-enum-member tsd-parent-kind-enum","parent":"TokenType"},{"id":11,"kind":16,"name":"OPERATOR","url":"enums/tokentype.html#operator","classes":"tsd-kind-enum-member tsd-parent-kind-enum","parent":"TokenType"},{"id":12,"kind":16,"name":"PUNCTUATION","url":"enums/tokentype.html#punctuation","classes":"tsd-kind-enum-member tsd-parent-kind-enum","parent":"TokenType"},{"id":13,"kind":16,"name":"STRING","url":"enums/tokentype.html#string","classes":"tsd-kind-enum-member tsd-parent-kind-enum","parent":"TokenType"},{"id":14,"kind":16,"name":"TAG_END","url":"enums/tokentype.html#tag_end","classes":"tsd-kind-enum-member tsd-parent-kind-enum","parent":"TokenType"},{"id":15,"kind":16,"name":"TAG_START","url":"enums/tokentype.html#tag_start","classes":"tsd-kind-enum-member tsd-parent-kind-enum","parent":"TokenType"},{"id":16,"kind":16,"name":"TEST_OPERATOR","url":"enums/tokentype.html#test_operator","classes":"tsd-kind-enum-member tsd-parent-kind-enum","parent":"TokenType"},{"id":17,"kind":16,"name":"TEXT","url":"enums/tokentype.html#text","classes":"tsd-kind-enum-member tsd-parent-kind-enum","parent":"TokenType"},{"id":18,"kind":16,"name":"TRIMMING_MODIFIER","url":"enums/tokentype.html#trimming_modifier","classes":"tsd-kind-enum-member tsd-parent-kind-enum","parent":"TokenType"},{"id":19,"kind":16,"name":"VARIABLE_END","url":"enums/tokentype.html#variable_end","classes":"tsd-kind-enum-member tsd-parent-kind-enum","parent":"TokenType"},{"id":20,"kind":16,"name":"VARIABLE_START","url":"enums/tokentype.html#variable_start","classes":"tsd-kind-enum-member tsd-parent-kind-enum","parent":"TokenType"},{"id":21,"kind":16,"name":"WHITESPACE","url":"enums/tokentype.html#whitespace","classes":"tsd-kind-enum-member tsd-parent-kind-enum","parent":"TokenType"},{"id":22,"kind":16,"name":"ARROW","url":"enums/tokentype.html#arrow","classes":"tsd-kind-enum-member tsd-parent-kind-enum","parent":"TokenType"},{"id":23,"kind":64,"name":"typeToString","url":"globals.html#typetostring","classes":"tsd-kind-function"},{"id":24,"kind":128,"name":"Token","url":"classes/token.html","classes":"tsd-kind-class"},{"id":25,"kind":512,"name":"constructor","url":"classes/token.html#constructor","classes":"tsd-kind-constructor tsd-parent-kind-class","parent":"Token"},{"id":26,"kind":2048,"name":"test","url":"classes/token.html#test","classes":"tsd-kind-method tsd-parent-kind-class","parent":"Token"},{"id":27,"kind":262144,"name":"line","url":"classes/token.html#line","classes":"tsd-kind-get-signature tsd-parent-kind-class","parent":"Token"},{"id":28,"kind":262144,"name":"column","url":"classes/token.html#column","classes":"tsd-kind-get-signature tsd-parent-kind-class","parent":"Token"},{"id":29,"kind":262144,"name":"type","url":"classes/token.html#type","classes":"tsd-kind-get-signature tsd-parent-kind-class","parent":"Token"},{"id":30,"kind":262144,"name":"value","url":"classes/token.html#value","classes":"tsd-kind-get-signature tsd-parent-kind-class","parent":"Token"},{"id":31,"kind":2048,"name":"toString","url":"classes/token.html#tostring","classes":"tsd-kind-method tsd-parent-kind-class","parent":"Token"},{"id":32,"kind":2048,"name":"serialize","url":"classes/token.html#serialize","classes":"tsd-kind-method tsd-parent-kind-class","parent":"Token"},{"id":33,"kind":128,"name":"SyntaxError","url":"classes/syntaxerror.html","classes":"tsd-kind-class"},{"id":34,"kind":1024,"name":"line","url":"classes/syntaxerror.html#line","classes":"tsd-kind-property tsd-parent-kind-class","parent":"SyntaxError"},{"id":35,"kind":1024,"name":"column","url":"classes/syntaxerror.html#column","classes":"tsd-kind-property tsd-parent-kind-class","parent":"SyntaxError"},{"id":36,"kind":512,"name":"constructor","url":"classes/syntaxerror.html#constructor","classes":"tsd-kind-constructor tsd-parent-kind-class","parent":"SyntaxError"},{"id":37,"kind":1024,"name":"name","url":"classes/syntaxerror.html#name","classes":"tsd-kind-property tsd-parent-kind-class tsd-is-inherited","parent":"SyntaxError"},{"id":38,"kind":1024,"name":"message","url":"classes/syntaxerror.html#message","classes":"tsd-kind-property tsd-parent-kind-class tsd-is-inherited","parent":"SyntaxError"},{"id":39,"kind":1024,"name":"stack","url":"classes/syntaxerror.html#stack","classes":"tsd-kind-property tsd-parent-kind-class tsd-is-overwrite tsd-is-inherited","parent":"SyntaxError"},{"id":40,"kind":1024,"name":"Error","url":"classes/syntaxerror.html#error","classes":"tsd-kind-property tsd-parent-kind-class tsd-is-static","parent":"SyntaxError"},{"id":41,"kind":128,"name":"Lexer","url":"classes/lexer.html","classes":"tsd-kind-class"},{"id":42,"kind":1024,"name":"testOperators","url":"classes/lexer.html#testoperators","classes":"tsd-kind-property tsd-parent-kind-class tsd-is-protected","parent":"Lexer"},{"id":43,"kind":1024,"name":"arrowOperator","url":"classes/lexer.html#arrowoperator","classes":"tsd-kind-property tsd-parent-kind-class tsd-is-protected","parent":"Lexer"},{"id":44,"kind":1024,"name":"operators","url":"classes/lexer.html#operators","classes":"tsd-kind-property tsd-parent-kind-class tsd-is-protected","parent":"Lexer"},{"id":45,"kind":1024,"name":"tagPair","url":"classes/lexer.html#tagpair","classes":"tsd-kind-property tsd-parent-kind-class tsd-is-protected","parent":"Lexer"},{"id":46,"kind":1024,"name":"commentPair","url":"classes/lexer.html#commentpair","classes":"tsd-kind-property tsd-parent-kind-class tsd-is-protected","parent":"Lexer"},{"id":47,"kind":1024,"name":"interpolationPair","url":"classes/lexer.html#interpolationpair","classes":"tsd-kind-property tsd-parent-kind-class tsd-is-protected","parent":"Lexer"},{"id":48,"kind":1024,"name":"variablePair","url":"classes/lexer.html#variablepair","classes":"tsd-kind-property tsd-parent-kind-class tsd-is-protected","parent":"Lexer"},{"id":49,"kind":512,"name":"constructor","url":"classes/lexer.html#constructor","classes":"tsd-kind-constructor tsd-parent-kind-class","parent":"Lexer"},{"id":50,"kind":2048,"name":"tokenize","url":"classes/lexer.html#tokenize","classes":"tsd-kind-method tsd-parent-kind-class","parent":"Lexer"},{"id":51,"kind":32,"name":"bracketPairs","url":"globals.html#bracketpairs","classes":"tsd-kind-variable"},{"id":52,"kind":32,"name":"doubleQuotedStringContentPattern","url":"globals.html#doublequotedstringcontentpattern","classes":"tsd-kind-variable"},{"id":53,"kind":32,"name":"doubleQuotedStringDelimiterPattern","url":"globals.html#doublequotedstringdelimiterpattern","classes":"tsd-kind-variable"},{"id":54,"kind":32,"name":"lineSeparators","url":"globals.html#lineseparators","classes":"tsd-kind-variable"},{"id":55,"kind":32,"name":"namePattern","url":"globals.html#namepattern","classes":"tsd-kind-variable"},{"id":56,"kind":32,"name":"numberPattern","url":"globals.html#numberpattern","classes":"tsd-kind-variable"},{"id":57,"kind":32,"name":"punctuationPattern","url":"globals.html#punctuationpattern","classes":"tsd-kind-variable"},{"id":58,"kind":32,"name":"stringPattern","url":"globals.html#stringpattern","classes":"tsd-kind-variable"},{"id":59,"kind":32,"name":"whitespacePattern","url":"globals.html#whitespacepattern","classes":"tsd-kind-variable"},{"id":60,"kind":128,"name":"TokenStream","url":"classes/tokenstream.html","classes":"tsd-kind-class"},{"id":61,"kind":512,"name":"constructor","url":"classes/tokenstream.html#constructor","classes":"tsd-kind-constructor tsd-parent-kind-class","parent":"TokenStream"},{"id":62,"kind":262144,"name":"current","url":"classes/tokenstream.html#current","classes":"tsd-kind-get-signature tsd-parent-kind-class","parent":"TokenStream"},{"id":63,"kind":262144,"name":"tokens","url":"classes/tokenstream.html#tokens","classes":"tsd-kind-get-signature tsd-parent-kind-class","parent":"TokenStream"},{"id":64,"kind":2048,"name":"toString","url":"classes/tokenstream.html#tostring","classes":"tsd-kind-method tsd-parent-kind-class","parent":"TokenStream"},{"id":65,"kind":2048,"name":"serialize","url":"classes/tokenstream.html#serialize","classes":"tsd-kind-method tsd-parent-kind-class","parent":"TokenStream"},{"id":66,"kind":2048,"name":"toAst","url":"classes/tokenstream.html#toast","classes":"tsd-kind-method tsd-parent-kind-class","parent":"TokenStream"},{"id":67,"kind":2048,"name":"injectTokens","url":"classes/tokenstream.html#injecttokens","classes":"tsd-kind-method tsd-parent-kind-class","parent":"TokenStream"},{"id":68,"kind":2048,"name":"rewind","url":"classes/tokenstream.html#rewind","classes":"tsd-kind-method tsd-parent-kind-class","parent":"TokenStream"},{"id":69,"kind":2048,"name":"next","url":"classes/tokenstream.html#next","classes":"tsd-kind-method tsd-parent-kind-class","parent":"TokenStream"},{"id":70,"kind":2048,"name":"nextIf","url":"classes/tokenstream.html#nextif","classes":"tsd-kind-method tsd-parent-kind-class","parent":"TokenStream"},{"id":71,"kind":2048,"name":"look","url":"classes/tokenstream.html#look","classes":"tsd-kind-method tsd-parent-kind-class","parent":"TokenStream"},{"id":72,"kind":2048,"name":"test","url":"classes/tokenstream.html#test","classes":"tsd-kind-method tsd-parent-kind-class","parent":"TokenStream"}]}; \ No newline at end of file diff --git a/docs/classes/lexer.html b/docs/classes/lexer.html index 1725413..b558476 100644 --- a/docs/classes/lexer.html +++ b/docs/classes/lexer.html @@ -115,7 +115,7 @@

constructor

  • @@ -137,7 +137,7 @@

    Protected arrowOperator<
    arrowOperator: [string]
    @@ -152,7 +152,7 @@

    Protected commentPaircommentPair: [string, string]

    @@ -167,7 +167,7 @@

    Protected interpolationP
    interpolationPair: [string, string]
    @@ -182,7 +182,7 @@

    Protected operators

    operators: string[]
    @@ -197,7 +197,7 @@

    Protected tagPair

    tagPair: [string, string]
    @@ -212,7 +212,7 @@

    Protected testOperators<
    testOperators: [string, string]
    @@ -227,7 +227,7 @@

    Protected variablePairvariablePair: [string, string]

    @@ -249,7 +249,7 @@

    tokenize

  • @@ -326,6 +326,9 @@

    Returns Token

  • +
  • + TokenStream +
  • bracketPairs
  • diff --git a/docs/classes/syntaxerror.html b/docs/classes/syntaxerror.html index a7b7109..629f92a 100644 --- a/docs/classes/syntaxerror.html +++ b/docs/classes/syntaxerror.html @@ -121,7 +121,7 @@

    constructor

  • @@ -170,7 +170,7 @@

    column

    column: number
    @@ -185,7 +185,7 @@

    line

    line: number
    @@ -201,7 +201,7 @@

    message

    @@ -212,7 +212,7 @@

    name

    @@ -224,7 +224,7 @@

    Optional stack

    Inherited from Error.stack

    Overrides Error.stack

      -
    • Defined in /home/ericmorand/Projects/twig-lexer/node_modules/typedoc/node_modules/typescript/lib/lib.es5.d.ts:965
    • +
    • Defined in /home/ericmorand-private/Projects/twig-lexer/node_modules/typedoc/node_modules/typescript/lib/lib.es5.d.ts:965
    @@ -234,7 +234,7 @@

    Static Error

    Error: ErrorConstructor
    @@ -289,6 +289,9 @@

    Static Error

  • Token
  • +
  • + TokenStream +
  • bracketPairs
  • diff --git a/docs/classes/token.html b/docs/classes/token.html index ff13893..8d43256 100644 --- a/docs/classes/token.html +++ b/docs/classes/token.html @@ -82,13 +82,18 @@

    Constructors

  • constructor
  • +
    +

    Accessors

    + +

    Methods

    @@ -146,7 +146,7 @@

    EOF

    EOF: = "EOF"
    @@ -156,7 +156,7 @@

    INTERPOLATION_END

    INTERPOLATION_END: = "INTERPOLATION_END"
    @@ -166,7 +166,7 @@

    INTERPOLATION_START

    INTERPOLATION_START: = "INTERPOLATION_START"
    @@ -176,7 +176,7 @@

    LINE_TRIMMING_MODIFIER

    LINE_TRIMMING_MODIFIER: = "LINE_TRIMMING_MODIFIER"
    @@ -186,7 +186,7 @@

    NAME

    NAME: = "NAME"
    @@ -196,7 +196,7 @@

    NUMBER

    NUMBER: = "NUMBER"
    @@ -206,7 +206,7 @@

    OPENING_QUOTE

    OPENING_QUOTE: = "OPENING_QUOTE"
    @@ -216,7 +216,7 @@

    OPERATOR

    OPERATOR: = "OPERATOR"
    @@ -226,7 +226,7 @@

    PUNCTUATION

    PUNCTUATION: = "PUNCTUATION"
    @@ -236,7 +236,7 @@

    STRING

    STRING: = "STRING"
    @@ -246,7 +246,7 @@

    TAG_END

    TAG_END: = "TAG_END"
    @@ -256,7 +256,7 @@

    TAG_START

    TAG_START: = "TAG_START"
    @@ -266,7 +266,7 @@

    TEST_OPERATOR

    TEST_OPERATOR: = "TEST_OPERATOR"
    @@ -276,7 +276,7 @@

    TEXT

    TEXT: = "TEXT"
    @@ -286,7 +286,7 @@

    TRIMMING_MODIFIER

    TRIMMING_MODIFIER: = "TRIMMING_MODIFIER"
    @@ -296,7 +296,7 @@

    VARIABLE_END

    VARIABLE_END: = "VARIABLE_END"
    @@ -306,7 +306,7 @@

    VARIABLE_START

    VARIABLE_START: = "VARIABLE_START"
    @@ -316,7 +316,7 @@

    WHITESPACE

    WHITESPACE: = "WHITESPACE"
    @@ -416,6 +416,9 @@

    WHITESPACE

  • Token
  • +
  • + TokenStream +
  • bracketPairs
  • diff --git a/docs/globals.html b/docs/globals.html index 5880527..08847e6 100644 --- a/docs/globals.html +++ b/docs/globals.html @@ -77,6 +77,7 @@

    Classes

  • Lexer
  • SyntaxError
  • Token
  • +
  • TokenStream
  • @@ -110,7 +111,7 @@

    Const bracketPairs

    bracketPairs: [string, string][] = [['(', ')'], ['{', '}'], ['[', ']']]
    @@ -125,7 +126,7 @@

    Const doubleQuotedStrin
    doubleQuotedStringContentPattern: string = "[^#"\\]*(?:(?:\\\\.|#(?!{))[^#"\\]*)*"
    @@ -140,7 +141,7 @@

    Const doubleQuotedStrin
    doubleQuotedStringDelimiterPattern: string = """
    @@ -155,7 +156,7 @@

    Const lineSeparators

    lineSeparators: string[] = ['\\r\\n', '\\r', '\\n']
    @@ -170,7 +171,7 @@

    Const namePattern

    namePattern: string = "[a-zA-Z_\x7f-\xff][a-zA-Z0-9_\x7f-\xff]*"
    @@ -185,7 +186,7 @@

    Const numberPattern

    numberPattern: string = "[0-9]+(?:\.[0-9]+)?"
    @@ -200,7 +201,7 @@

    Const punctuationPattern

    punctuationPattern: string = "[?:.,|]"
    @@ -215,7 +216,7 @@

    Const stringPattern

    stringPattern: string = "(")([^#"\\]*(?:\\.[^#"\\]*)*)(")|^(')([^'\\]*(?:\\.[^'\\]*)*)(')"
    @@ -230,7 +231,7 @@

    Const whitespacePattern

    whitespacePattern: string = "[ \r\n\t\f\v]+"
    @@ -252,7 +253,7 @@

    typeToString

  • @@ -304,6 +305,9 @@

    Returns string Token

  • +
  • + TokenStream +
  • bracketPairs
  • diff --git a/docs/index.html b/docs/index.html index b713d97..398b070 100644 --- a/docs/index.html +++ b/docs/index.html @@ -122,6 +122,9 @@

    License

  • Token
  • +
  • + TokenStream +
  • bracketPairs
  • diff --git a/src/index.ts b/src/index.ts index 670bee7..be606ca 100644 --- a/src/index.ts +++ b/src/index.ts @@ -1,4 +1,5 @@ export * from './lib/Lexer'; export * from './lib/SyntaxError'; export * from './lib/Token'; +export * from './lib/TokenStream'; export * from './lib/TokenType'; diff --git a/src/lib/Lexer.ts b/src/lib/Lexer.ts index af6bca3..bd1d53c 100644 --- a/src/lib/Lexer.ts +++ b/src/lib/Lexer.ts @@ -1,6 +1,6 @@ import {Token} from "./Token"; -import {SyntaxError} from "./SyntaxError"; import {TokenType} from "./TokenType"; +import {SyntaxError} from "./SyntaxError"; enum LexerState { COMMENT = 'COMMENT', @@ -119,12 +119,12 @@ export class Lexer { * The test operators. */ protected testOperators: [string, string]; - + /** * The arrow operator. */ - protected arrowOperator: [string]; - + protected arrowOperator: [string]; + /** * The supported operators. */ @@ -646,4 +646,4 @@ export class Lexer { private popScope() { this.scope = this.scopes.pop(); } -} +} \ No newline at end of file diff --git a/src/lib/Token.ts b/src/lib/Token.ts index 6045998..7ee2a90 100644 --- a/src/lib/Token.ts +++ b/src/lib/Token.ts @@ -1,10 +1,10 @@ import {TokenType, typeToString} from "./TokenType"; export class Token { - private type: TokenType; - private value: any; - private line: number; - private column: number; + private readonly _type: TokenType; + private readonly _value: any; + private readonly _line: number; + private readonly _column: number; /** * @constructor @@ -14,21 +14,21 @@ export class Token { * @param {number} column The column where the token is located in the source */ constructor(type: TokenType, value: any, line: number, column: number) { - this.type = type; - this.value = value; - this.line = line; - this.column = column; + this._type = type; + this._value = value; + this._line = line; + this._column = column; } /** - * Tests the current token for a type and/or a content. + * Test the token for a type and/or a content. * * @param {TokenType} type - * @param {string|string[]|number} values + * @param {string|string[]|number} value * @returns {boolean} */ - public test(type: TokenType, values: string | string[] | number = null) { - return (this.type === type) && (values === null || (Array.isArray(values) && values.includes(this.value)) || this.value == values); + public test(type: TokenType, value: string | string[] | number = null) { + return (this._type === type) && (value === null || (Array.isArray(value) && value.includes(this._value)) || this._value == value); } /** @@ -36,8 +36,8 @@ export class Token { * * @return {number} */ - public getLine(): number { - return this.line; + get line(): number { + return this._line; } /** @@ -45,8 +45,8 @@ export class Token { * * @return {number} */ - public getColumn(): number { - return this.column; + get column(): number { + return this._column; } /** @@ -54,8 +54,8 @@ export class Token { * * @return {TokenType} */ - public getType(): TokenType { - return this.type; + get type(): TokenType { + return this._type; } /** @@ -63,8 +63,8 @@ export class Token { * * @return {*} */ - public getValue(): any { - return this.value; + get value(): any { + return this._value; } /** @@ -84,4 +84,4 @@ export class Token { public serialize(): string { return this.value; } -} +} \ No newline at end of file diff --git a/src/lib/TokenStream.ts b/src/lib/TokenStream.ts new file mode 100644 index 0000000..661306e --- /dev/null +++ b/src/lib/TokenStream.ts @@ -0,0 +1,200 @@ +import {TokenType} from "./TokenType"; +import {Token} from "./Token"; + +const safeCChars: Array = ['b', 'f', 'n', 'r', 't', 'v', '0', '\'', '"', '\\']; + +const stripcslashes = function (string: string) { + return string.replace(/\\(.)/g, function (match, char) { + if (safeCChars.includes(char)) { + return new Function('return "' + match + '"')(); + } else { + return char; + } + }); +}; + +export class TokenStream { + private _tokens: Array; + private _current: number = 0; + + constructor(tokens: Array) { + this._tokens = tokens; + } + + /** + * @return {Token} + */ + get current(): Token { + return this._tokens[this._current]; + } + + /** + * @return {Token[]} + */ + get tokens(): Token[] { + return this._tokens; + } + + toString() { + return this.tokens.map(function (token: Token) { + return token.toString(); + }).join('\n'); + } + + /** + * Serialize the stream to a Twig string. + * + * @return {string} + */ + serialize() { + return this.tokens.map(function (token: Token) { + return token.serialize(); + }).join(''); + } + + /** + * Construct and return a list of tokens relevant to render a Twig template. + * + * @return {Token[]} + */ + toAst(): Token[] { + let tokens: Token[] = []; + + while (!this.test(TokenType.EOF)) { + let current: Token = this.current; + + if (!this.test(TokenType.WHITESPACE) && + !this.test(TokenType.TRIMMING_MODIFIER) && + !this.test(TokenType.LINE_TRIMMING_MODIFIER)) { + let tokenValue: string = current.value; + + if (this.test(TokenType.TEXT) || this.test(TokenType.STRING)) { + // strip C slashes + tokenValue = stripcslashes(tokenValue); + // streamline line separators + tokenValue = tokenValue.replace(/\r\n|\r/g, '\n'); + } else if (this.test(TokenType.OPERATOR)) { + // remove unnecessary operator spaces + tokenValue = tokenValue.replace(/\s+/, ' '); + } + + // handle whitespace control modifiers + let wstCandidate: Token; + + wstCandidate = this.look(2); + + if (wstCandidate) { + if (wstCandidate.type === TokenType.TRIMMING_MODIFIER) { + tokenValue = tokenValue.replace(/\s*$/, ''); + } + + if (wstCandidate.type === TokenType.LINE_TRIMMING_MODIFIER) { + tokenValue = tokenValue.replace(/[ \t\0\x0B]*$/, ''); + } + } + + wstCandidate = this.look(-2); + + if (wstCandidate) { + if (wstCandidate.type === TokenType.TRIMMING_MODIFIER) { + tokenValue = tokenValue.replace(/^\s*/, ''); + } + + if (wstCandidate.type === TokenType.LINE_TRIMMING_MODIFIER) { + tokenValue = tokenValue.replace(/^[ \t\0\x0B]*/, ''); + } + } + + // don't push empty TEXT tokens + if (!this.test(TokenType.TEXT) || (tokenValue.length > 0)) { + tokens.push(new Token(current.type, tokenValue, current.line, current.column)); + } + } + + this.next(); + } + + // EOF + let current: Token = this.current; + + tokens.push(new Token( + current.type, + current.value, + current.line, + current.column + )); + + return tokens; + } + + /** + * Inject tokens after the current one. + * + * @param tokens + */ + injectTokens(tokens: Token[]) { + this._tokens.splice(this._current, 0, ...tokens); + } + + rewind() { + this._current = 0; + } + + /** + * Set the pointer to the next token and returns the previous one. + * + * @return {Token} + */ + next() { + let token = this.current; + + this._current++; + + if (this._current >= this.tokens.length) { + return null; + } + + return token; + } + + /** + * Test the current token, then, if the test is successful, sets the pointer to the next one and returns the tested token. + * + * @return {Token} The next token if the condition is true, null otherwise + */ + nextIf(primary: TokenType, secondary: Array | string = null): Token { + if (this.current.test(primary, secondary)) { + return this.next(); + } + + return null; + } + + /** + * Look at the next token. + * + * @param {number} number + * + * @return {Token} + */ + look(number: number = 1) { + let index = this._current + number; + + if ((index >= this.tokens.length) || (index < 0)) { + return null; + } + + return this.tokens[index]; + } + + /** + * Test the current token. + * + * @param {TokenType} type + * @param {string|string[]|number} value + * @returns {boolean} + */ + test(type: TokenType, value: string | string[] | number = null) { + return this.current.test(type, value); + } +} \ No newline at end of file diff --git a/src/lib/TokenType.ts b/src/lib/TokenType.ts index 74dfc05..235a880 100644 --- a/src/lib/TokenType.ts +++ b/src/lib/TokenType.ts @@ -1,40 +1,40 @@ -export enum TokenType { - CLOSING_QUOTE = 'CLOSING_QUOTE', - COMMENT_END = 'COMMENT_END', - COMMENT_START = 'COMMENT_START', - EOF = 'EOF', - INTERPOLATION_START = 'INTERPOLATION_START', - INTERPOLATION_END = 'INTERPOLATION_END', - LINE_TRIMMING_MODIFIER = 'LINE_TRIMMING_MODIFIER', - NAME = 'NAME', - NUMBER = 'NUMBER', - OPENING_QUOTE = 'OPENING_QUOTE', - OPERATOR = 'OPERATOR', - PUNCTUATION = 'PUNCTUATION', - STRING = 'STRING', - TAG_END = 'TAG_END', - TAG_START = 'TAG_START', - TEST_OPERATOR = 'TEST_OPERATOR', - TEXT = 'TEXT', - TRIMMING_MODIFIER = 'TRIMMING_MODIFIER', - VARIABLE_END = 'VARIABLE_END', - VARIABLE_START = 'VARIABLE_START', - WHITESPACE = 'WHITESPACE', - ARROW = 'ARROW' -} - -/** - * Returns the human representation of a token type. - * - * @param {TokenType} type The token type - * @param {boolean} short Whether to return a short representation or not - * - * @returns {string} The string representation - */ -export function typeToString(type: TokenType, short: boolean = false): string { - if (type in TokenType) { - return short ? type : 'TokenType.' + type; - } else { - throw new Error(`Token type "${type}" does not exist.`); + export enum TokenType { + CLOSING_QUOTE = 'CLOSING_QUOTE', + COMMENT_END = 'COMMENT_END', + COMMENT_START = 'COMMENT_START', + EOF = 'EOF', + INTERPOLATION_START = 'INTERPOLATION_START', + INTERPOLATION_END = 'INTERPOLATION_END', + LINE_TRIMMING_MODIFIER = 'LINE_TRIMMING_MODIFIER', + NAME = 'NAME', + NUMBER = 'NUMBER', + OPENING_QUOTE = 'OPENING_QUOTE', + OPERATOR = 'OPERATOR', + PUNCTUATION = 'PUNCTUATION', + STRING = 'STRING', + TAG_END = 'TAG_END', + TAG_START = 'TAG_START', + TEST_OPERATOR = 'TEST_OPERATOR', + TEXT = 'TEXT', + TRIMMING_MODIFIER = 'TRIMMING_MODIFIER', + VARIABLE_END = 'VARIABLE_END', + VARIABLE_START = 'VARIABLE_START', + WHITESPACE = 'WHITESPACE', + ARROW = 'ARROW' } -} + + /** + * Returns the human representation of a token type. + * + * @param {TokenType} type The token type + * @param {boolean} short Whether to return a short representation or not + * + * @returns {string} The string representation + */ + export function typeToString(type: TokenType, short: boolean = false): string { + if (type in TokenType) { + return short ? type : 'TokenType.' + type; + } else { + throw new Error(`Token type "${type}" does not exist.`); + } + } \ No newline at end of file diff --git a/test/unit/lib/Lexer/test.ts b/test/unit/lib/Lexer/test.ts index fbdb94f..f059837 100644 --- a/test/unit/lib/Lexer/test.ts +++ b/test/unit/lib/Lexer/test.ts @@ -29,10 +29,10 @@ let testTokens = (test: tape.Test, tokens: Token[], data: [TokenType, any, numbe let line = data[index][2]; let column = data[index][3]; - test.same(token.getType(), type, 'type should be "' + typeToString(type) + '"'); - test.looseEqual(token.getValue(), value, token.getType() + ' value should be "' + ((value && value.length > 80) ? value.substr(0, 77) + '...' : value) + '"'); - test.same(token.getLine(), line, 'line should be ' + line); - test.same(token.getColumn(), column, 'column should be ' + column); + test.same(token.type, type, 'type should be "' + typeToString(type) + '"'); + test.looseEqual(token.value, value, token.type + ' value should be "' + ((value && value.length > 80) ? value.substr(0, 77) + '...' : value) + '"'); + test.same(token.line, line, 'line should be ' + line); + test.same(token.column, column, 'column should be ' + column); index++; } diff --git a/test/unit/lib/TokenStream/test.ts b/test/unit/lib/TokenStream/test.ts new file mode 100644 index 0000000..d25b2b6 --- /dev/null +++ b/test/unit/lib/TokenStream/test.ts @@ -0,0 +1,189 @@ +import * as tape from 'tape'; +import {TokenStream} from '../../../../src/lib/TokenStream'; +import {Token} from "../../../../src/lib/Token"; +import {TokenType} from "../../../../src/lib/TokenType"; + +tape('TokenStream', (test) => { + test.test('traversal', (test) => { + let stream = new TokenStream([ + new Token(TokenType.NAME, 'foo', 1, 1), + new Token(TokenType.TEXT, 'foo', 1, 1), + new Token(TokenType.STRING, 'foo', 1, 1) + ]); + + test.true(stream.current.test(TokenType.NAME, 'foo')); + test.true(stream.next().test(TokenType.NAME, 'foo'), 'next returns the current token'); + test.true(stream.current.test(TokenType.TEXT, 'foo'), 'next increments the pointer'); + stream.next(); + test.true(stream.current.test(TokenType.STRING, 'foo')); + stream.next(); + + stream.rewind(); + test.true(stream.current.test(TokenType.NAME, 'foo'), 'rewind actually rewinds the stream'); + + test.true(stream.nextIf(TokenType.NAME, 'foo').test(TokenType.NAME, 'foo'), 'nextIf returns the tested token when the test is successful'); + test.true(stream.current.test(TokenType.TEXT, 'foo'), 'nextIf increments the pointer when the test is successful'); + test.false(stream.nextIf(TokenType.NAME, 'foo')); + test.true(stream.nextIf(TokenType.TEXT), 'nextIf support a single parameter'); + + test.end(); + }); + + test.test('lookup', (test) => { + let stream = new TokenStream([ + new Token(TokenType.NAME, 'foo', 1, 1), + new Token(TokenType.TEXT, 'foo', 1, 1), + new Token(TokenType.STRING, 'foo', 1, 1) + ]); + + test.same(stream.look(-1), null); + test.true(stream.look(0).test(TokenType.NAME, 'foo')); + test.true(stream.look().test(TokenType.TEXT, 'foo')); + test.true(stream.look(1).test(TokenType.TEXT, 'foo')); + test.true(stream.look(2).test(TokenType.STRING, 'foo')); + test.same(stream.look(3), null); + stream.next(); + test.true(stream.look(-1).test(TokenType.NAME, 'foo')); + + test.end(); + }); + + test.test('test', (test) => { + let stream = new TokenStream([ + new Token(TokenType.NAME, 'foo', 1, 1), + new Token(TokenType.TEXT, 'foo', 1, 1), + new Token(TokenType.STRING, 'foo', 1, 1) + ]); + + test.true(stream.test(TokenType.NAME, 'foo')); + test.false(stream.test(TokenType.TEXT, 'foo')); + test.true(stream.test(TokenType.NAME)); + stream.next(); + test.true(stream.test(TokenType.TEXT, 'foo')); + + test.end(); + }); + + test.test('injection', (test) => { + let stream = new TokenStream([ + new Token(TokenType.NAME, 'foo', 1, 1), + new Token(TokenType.TEXT, 'foo', 1, 1), + new Token(TokenType.STRING, 'foo', 1, 1) + ]); + + stream.injectTokens([ + new Token(TokenType.NAME, 'bar', 1, 1) + ]); + + test.true(stream.test(TokenType.NAME, 'bar')); + + stream.injectTokens([ + new Token(TokenType.TEXT, 'bar', 1, 1), + new Token(TokenType.STRING, 'bar', 1, 1) + ]); + + test.true(stream.test(TokenType.TEXT, 'bar')); + test.true(stream.look().test(TokenType.STRING, 'bar')); + test.true(stream.look(2).test(TokenType.NAME, 'bar')); + + test.end(); + }); + + test.test('toString', (test) => { + let stream = new TokenStream([ + new Token(TokenType.NAME, 'foo', 1, 1), + new Token(TokenType.TEXT, 'foo', 1, 1), + new Token(TokenType.STRING, 'foo', 1, 1) + ]); + + test.same(stream.toString(), `NAME(foo) +TEXT(foo) +STRING(foo)`); + + test.end(); + }); + + test.test('serialize', (test) => { + let stream = new TokenStream([ + new Token(TokenType.NAME, 'foo', 1, 1), + new Token(TokenType.TEXT, 'foo', 1, 1), + new Token(TokenType.STRING, 'foo', 1, 1) + ]); + + test.same(stream.serialize(), `foofoofoo`); + + test.end(); + }); + + test.test('toAst', (test) => { + let stream = new TokenStream([ + new Token(TokenType.TRIMMING_MODIFIER, '-', 1, 1), + new Token(TokenType.WHITESPACE, ' ', 1, 2), + new Token(TokenType.LINE_TRIMMING_MODIFIER, '~', 1, 3), + new Token(TokenType.EOF, null, 1, 4), + ]); + + test.true(stream.toAst()[0].test(TokenType.EOF), 'filters non-relevant tokens'); + + stream = new TokenStream([ + new Token(TokenType.TEXT, 'foo\n ', 1, 1), + new Token(TokenType.TAG_START, '{%', 2, 1), + new Token(TokenType.TRIMMING_MODIFIER, '-', 2, 3), + new Token(TokenType.EOF, null, 2, 4), + ]); + + test.true(stream.toAst()[0].test(TokenType.TEXT, 'foo'), 'handles trimming modifier on left side'); + + stream = new TokenStream([ + new Token(TokenType.TRIMMING_MODIFIER, '-', 1, 1), + new Token(TokenType.TAG_END, '%}', 1, 2), + new Token(TokenType.TEXT, ' \nfoo', 1, 4), + new Token(TokenType.EOF, null, 2, 1), + ]); + + test.true(stream.toAst()[1].test(TokenType.TEXT, 'foo'), 'handles trimming modifier on right side'); + + stream = new TokenStream([ + new Token(TokenType.TEXT, 'foo\n ', 1, 1), + new Token(TokenType.TAG_START, '{%', 2, 1), + new Token(TokenType.LINE_TRIMMING_MODIFIER, '~', 2, 3), + new Token(TokenType.EOF, null, 2, 4), + ]); + + test.true(stream.toAst()[0].test(TokenType.TEXT, 'foo\n'), 'handles line trimming modifier on left side'); + + stream = new TokenStream([ + new Token(TokenType.LINE_TRIMMING_MODIFIER, '~', 1, 1), + new Token(TokenType.TAG_END, '%}', 1, 2), + new Token(TokenType.TEXT, ' \nfoo', 1, 4), + new Token(TokenType.EOF, null, 2, 1), + ]); + + test.true(stream.toAst()[1].test(TokenType.TEXT, '\nfoo'), 'handles line trimming modifier on right side'); + + stream = new TokenStream([ + new Token(TokenType.OPERATOR, 'foo bar', 1, 1), + new Token(TokenType.EOF, null, 1, 16), + ]); + + test.true(stream.toAst()[0].test(TokenType.OPERATOR, 'foo bar'), 'removes unnecessary operator spaces'); + + stream = new TokenStream([ + new Token(TokenType.TEXT, '', 1, 1), + new Token(TokenType.EOF, null, 1, 6), + ]); + + test.true(stream.toAst()[0].test(TokenType.EOF), 'filters empty TEXT tokens out'); + + stream = new TokenStream([ + new Token(TokenType.STRING, '\\z\\t', 1, 1), + new Token(TokenType.EOF, null, 1, 6), + ]); + + test.true(stream.toAst()[0].test(TokenType.STRING, 'z\t'), 'converts C-style escape sequences'); + + test.end(); + }); + + test.end(); +}); \ No newline at end of file