forked from alecthomas/chroma
-
Notifications
You must be signed in to change notification settings - Fork 0
/
mutators_test.go
57 lines (53 loc) · 1.25 KB
/
mutators_test.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
package chroma
import (
"testing"
assert "github.com/alecthomas/assert/v2"
)
func TestInclude(t *testing.T) {
include := Include("other")
actual := CompiledRules{
"root": {{Rule: include}},
"other": {
{Rule: Rule{Pattern: "//.+", Type: Comment}},
{Rule: Rule{Pattern: `"[^"]*"`, Type: String}},
},
}
lexer := &RegexLexer{rules: actual}
err := include.Mutator.(LexerMutator).MutateLexer(lexer.rules, "root", 0)
assert.NoError(t, err)
expected := CompiledRules{
"root": {
{Rule: Rule{
Pattern: "//.+",
Type: Comment,
}},
{Rule: Rule{
Pattern: `"[^"]*"`,
Type: String,
}},
},
"other": {
{Rule: Rule{
Pattern: "//.+",
Type: Comment,
}},
{Rule: Rule{
Pattern: `"[^"]*"`,
Type: String,
}},
},
}
assert.Equal(t, expected, actual)
}
func TestCombine(t *testing.T) {
l := mustNewLexer(t, nil, Rules{ // nolint: forbidigo
"root": {{`hello`, String, Combined("world", "bye", "space")}},
"world": {{`world`, Name, nil}},
"bye": {{`bye`, Name, nil}},
"space": {{`\s+`, Whitespace, nil}},
})
it, err := l.Tokenise(nil, "hello world")
assert.NoError(t, err)
expected := []Token{{String, `hello`}, {Whitespace, ` `}, {Name, `world`}}
assert.Equal(t, expected, it.Tokens())
}