-
Notifications
You must be signed in to change notification settings - Fork 1
/
parser.odin
139 lines (116 loc) · 2.91 KB
/
parser.odin
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
package ini
import "core:strings"
import "core:unicode"
import "core:unicode/utf8"
// INI is a map from section to a map from key to values.
// Pairs defined before the first section are put into the "" key: INI[""].
INI :: map[string]map[string]string
ini_delete :: proc(i: ^INI) {
for k, v in i {
for kk, vv in v {
delete(kk)
delete(vv)
}
delete(k)
delete(v)
}
delete(i^)
}
ParseErr :: enum {
EOF, // Probably not an error (returned when ok).
IllegalToken,
KeyWithoutEquals,
ValueWithoutKey,
UnexpectedEquals,
}
ParseResult :: struct {
err: ParseErr,
pos: Pos,
}
// Parser parses the tokens from the lexer into the ini map.
Parser :: struct {
lexer: ^Lexer,
ini: ^INI,
curr_section: ^map[string]string,
}
make_parser :: proc(l: ^Lexer, ini: ^INI) -> Parser {
p: Parser
p.lexer = l
p.ini = ini
if !("" in p.ini) {
p.ini[""] = map[string]string{}
}
p.curr_section = &p.ini[""]
return p
}
parse_into :: proc(data: []byte, ini: ^INI) -> ParseResult {
l := make_lexer(data)
p := make_parser(&l, ini)
res := parser_parse(&p)
return res
}
parse :: proc(data: []byte) -> (INI, ParseResult) {
ini: INI
res := parse_into(data, &ini)
if res.err != .EOF {
ini_delete(&ini)
}
return ini, res
}
parser_parse :: proc(using p: ^Parser) -> ParseResult {
for t := lexer_next(lexer);; t = lexer_next(lexer) {
if res, ok := parser_parse_token(p, t).?; ok {
return res
}
}
}
@(private = "file")
parser_parse_token :: proc(using p: ^Parser, t: Token) -> Maybe(ParseResult) {
switch t.type {
case .Illegal:
return ParseResult{.IllegalToken, t.pos}
case .Key:
assignment := lexer_next(lexer)
if assignment.type != .Assign {
return ParseResult{.KeyWithoutEquals, t.pos}
}
key := strings.clone(string(to_lower(t.value)))
value := lexer_next(lexer)
if value.type != .Value {
// No value, value is empty string.
curr_section[key] = ""
return parser_parse_token(p, value)
}
curr_section[key] = strings.clone(string(value.value))
case .Section:
#no_bounds_check no_brackets := t.value[1:len(t.value) - 1]
key := string(to_lower(no_brackets))
if !(string(key) in curr_section) {
ini[strings.clone(key)] = map[string]string{}
}
curr_section = &ini[key]
case .Value:
return ParseResult{.ValueWithoutKey, t.pos}
case .Assign:
return ParseResult{.UnexpectedEquals, t.pos}
// Ignoring comments.
case .Comment:
case .EOF:
return ParseResult{.EOF, t.pos}
}
return nil
}
@(private = "file")
to_lower :: proc(s: []byte) -> []byte {
s, i, n := s, 0, len(s)
for i < n {
ch, ch_size := utf8.decode_rune(s[i:])
i += ch_size
lower_ch := unicode.to_lower(ch)
if ch != lower_ch {
lower_bytes := transmute([4]byte)lower_ch
copy(s[i-1:], lower_bytes[:ch_size])
}
}
return s
}