-
Notifications
You must be signed in to change notification settings - Fork 0
/
tree.go
121 lines (107 loc) · 3.3 KB
/
tree.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
package parse
import (
"errors"
"github.com/zalgonoise/lex"
)
const (
maxBackup = 5
)
var (
// ErrInvalidID is a preset error for invalid node IDs
ErrInvalidID = errors.New("invalid node ID")
// ErrInvalidID is a preset error for non-existing nodes
ErrNotFound = errors.New("node was not found")
// ErrInvalidID is a preset error for cyclical edges in the graph
ErrCyclicalEdge = errors.New("cyclical edges are not allowed")
)
// BackupSlot is a (weightless) enum type to create defined containers
// for node IDs, so they can be reused or referenced
type BackupSlot struct{}
var (
Slot0 BackupSlot = struct{}{}
Slot1 BackupSlot = struct{}{}
Slot2 BackupSlot = struct{}{}
Slot3 BackupSlot = struct{}{}
Slot4 BackupSlot = struct{}{}
)
// Tree is a generic tree data structure to represent a lexical tree
//
// The Tree will buffer tokens of type T from a Lexer, identified by the same
// type of comparable tokens. The parser runs in tandem with a lexer -- as the
// parser advances to the next token, it is actually consuming a token from the lexer
// by calling its `lexer.NextItem()` method.
//
// A Tree exposes methods for creating and moving around Nodes, and to consume, buffer and
// backup lex Items as it converts them into nodes in the Tree.
//
// A Tree will store every node it contains, nested within the Root node. To navigate through
// the nodes in a Tree, the Tree stores (and exports) a Root element, pointing to this Root node.
//
// The Root node contains all top-level items in lexical order, which may or may not have
// edges themselves. It is the responsibility of the caller to ensure that the Tree is
// navigated through entirely, when processing it.
type Tree[C comparable, T any] struct {
Root *Node[C, T]
node *Node[C, T]
items []lex.Item[C, T]
lex lex.Emitter[C, T]
peek int
backup map[BackupSlot]*Node[C, T]
parseFn ParseFn[C, T]
}
// New creates a parse.Tree with the input lex.Emitter `l` and ParseFn `initParse`,
// initialized with a root node with type T `typ` and values V `values`, on position `-1`.
func New[C comparable, T any](
l lex.Emitter[C, T],
initParse ParseFn[C, T],
typ C,
values ...T,
) *Tree[C, T] {
t := &Tree[C, T]{
items: make([]lex.Item[C, T], maxBackup),
lex: l,
peek: 0,
backup: map[BackupSlot]*Node[C, T]{},
parseFn: initParse,
}
t.Root = t.Node(lex.NewItem(-1, typ, values...))
t.node = t.Root
return t
}
// Next consumes and returns the next Item from the lexer
func (t *Tree[C, T]) Next() lex.Item[C, T] {
if t.peek > 0 {
t.peek--
} else {
t.items[0] = t.lex.NextItem()
}
return t.items[t.peek]
}
// Peek returns but does not consume the next Item from the lexer
func (t *Tree[C, T]) Peek() lex.Item[C, T] {
if t.peek > 0 {
return t.items[t.peek-1]
}
t.peek = 1
t.items[0] = t.lex.NextItem()
return t.items[0]
}
// Backup backs the stream up `n` Items
//
// The zeroth Item is already there. Order must be most recent -> oldest
func (t *Tree[C, T]) Backup(items ...lex.Item[C, T]) {
for idx, item := range items {
if idx+1 >= maxBackup {
break
}
t.items[idx+1] = item
}
t.peek = len(items)
}
// Parse iterates through the incoming lex Items, by calling its `ParseFn`s, until all tokens
// are consumed and parsed into the Tree
func (t *Tree[C, T]) Parse() {
for t.parseFn != nil {
t.parseFn = t.parseFn(t)
}
}