Skip to content

Commit bb0d1ab

Browse files
authored
Merge pull request #95 from ornj/ext-lua-sh
Refactor idea to simplify interfaces
2 parents 44ab750 + afb2858 commit bb0d1ab

File tree

7 files changed

+114
-89
lines changed

7 files changed

+114
-89
lines changed

analyze_test.go

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -2110,9 +2110,7 @@ func TestAnalyze_lua(t *testing.T) {
21102110
err := analyze("nginx.conf", tc.stmt, ";", tc.ctx, &ParseOptions{
21112111
MatchFuncs: []MatchFunc{MatchLua},
21122112
LexOptions: LexOptions{
2113-
ExternalLexers: []Lexer{
2114-
&Lua{},
2115-
},
2113+
Lexers: []RegisterLexer{lua.RegisterLexer()},
21162114
},
21172115
})
21182116

build.go

Lines changed: 45 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -18,23 +18,45 @@ import (
1818
)
1919

2020
type BuildOptions struct {
21-
Indent int
22-
Tabs bool
23-
Header bool
24-
ExternalBuilds []Builder // handle specific directives
21+
Indent int
22+
Tabs bool
23+
Header bool
24+
Builders []RegisterBuilder // handle specific directives
25+
extBuilders map[string]Builder
26+
}
27+
28+
// RegisterBuilder is an option that can be used to add a builder to build NGINX configuration for custom directives.
29+
type RegisterBuilder interface {
30+
applyBuildOptions(options *BuildOptions)
31+
}
32+
33+
type registerBuilder struct {
34+
b Builder
35+
directives []string
36+
}
37+
38+
func (rb registerBuilder) applyBuildOptions(o *BuildOptions) {
39+
if o.extBuilders == nil {
40+
o.extBuilders = make(map[string]Builder)
41+
}
42+
43+
for _, s := range rb.directives {
44+
o.extBuilders[s] = rb.b
45+
}
46+
}
47+
48+
// BuildWithBuilder registers a builder to build the NGINX configuration for the given directives.
49+
func BuildWithBuilder(b Builder, directives ...string) RegisterBuilder {
50+
return registerBuilder{b: b, directives: directives}
2551
}
2652

2753
// Builder is the interface implemented by types that can render a Directive
2854
// as it appears in NGINX configuration files.
2955
//
30-
// RegisterBuilder returns the names of the directives for which the builder can
31-
// build NGINX configuration.
32-
//
3356
// Build writes the strings that represent the Directive and it's Block to the
3457
// io.StringWriter returning any error encountered that caused the write to stop
3558
// early. Build must not modify the Directive.
3659
type Builder interface {
37-
RegisterBuilder() []string
3860
Build(stmt *Directive) string
3961
}
4062

@@ -63,6 +85,10 @@ func BuildFiles(payload Payload, dir string, options *BuildOptions) error {
6385
dir = cwd
6486
}
6587

88+
for _, o := range options.Builders {
89+
o.applyBuildOptions(options)
90+
}
91+
6692
for _, config := range payload.Config {
6793
path := config.File
6894
if !filepath.IsAbs(path) {
@@ -111,6 +137,12 @@ func Build(w io.Writer, config Config, options *BuildOptions) error {
111137
}
112138
}
113139

140+
if options.extBuilders == nil { // might be set if using BuildFiles
141+
for _, o := range options.Builders {
142+
o.applyBuildOptions(options)
143+
}
144+
}
145+
114146
body := strings.Builder{}
115147
buildBlock(&body, nil, config.Parsed, 0, 0, options)
116148

@@ -123,7 +155,7 @@ func Build(w io.Writer, config Config, options *BuildOptions) error {
123155
return err
124156
}
125157

126-
//nolint:funlen,gocognit
158+
//nolint:gocognit
127159
func buildBlock(sb io.StringWriter, parent *Directive, block Directives, depth int, lastLine int, options *BuildOptions) {
128160
for i, stmt := range block {
129161
// if the this statement is a comment on the same line as the preview, do not emit EOL for this stmt
@@ -147,18 +179,10 @@ func buildBlock(sb io.StringWriter, parent *Directive, block Directives, depth i
147179
directive := Enquote(stmt.Directive)
148180
_, _ = sb.WriteString(directive)
149181

150-
if options.ExternalBuilds != nil {
151-
extDirectivesMap := make(map[string]Builder)
152-
for _, ext := range options.ExternalBuilds {
153-
directives := ext.RegisterBuilder()
154-
for _, d := range directives {
155-
extDirectivesMap[d] = ext
156-
}
157-
158-
if ext, ok := extDirectivesMap[directive]; ok {
159-
_, _ = sb.WriteString(" ") // space between directives and arguments
160-
_, _ = sb.WriteString(ext.Build(stmt))
161-
}
182+
if options.extBuilders != nil {
183+
if ext, ok := options.extBuilders[directive]; ok {
184+
_, _ = sb.WriteString(" ") // space between directives and arguments
185+
_, _ = sb.WriteString(ext.Build(stmt))
162186
}
163187
} else {
164188
// special handling for if statements

build_test.go

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -260,7 +260,7 @@ var buildFixtures = []buildFixture{
260260
},
261261
{
262262
name: "lua block",
263-
options: BuildOptions{ExternalBuilds: []Builder{&Lua{}}},
263+
options: BuildOptions{Builders: []RegisterBuilder{lua.RegisterBuilder()}},
264264
parsed: Directives{
265265
{
266266
Directive: "content_by_lua_block",
@@ -273,7 +273,7 @@ var buildFixtures = []buildFixture{
273273
},
274274
{
275275
name: "set_by_lua_block",
276-
options: BuildOptions{ExternalBuilds: []Builder{&Lua{}}},
276+
options: BuildOptions{Builders: []RegisterBuilder{lua.RegisterBuilder()}},
277277
parsed: Directives{
278278
{
279279
Directive: "set_by_lua_block",

lex.go

Lines changed: 36 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -46,27 +46,53 @@ func SetTokenChanCap(size int) {
4646

4747
// Lexer is an interface for implementing lexers that handle external NGINX tokens during the lexical analysis phase.
4848
type Lexer interface {
49-
// RegisterLexer registers an external lexer with a given SubScanner.
50-
// This method integrates the external lexer into the lexical analysis process,
51-
// enabling it to handle external token scanning. It returns a slice of strings
52-
// representing the tokens that the external lexer can recognize.
53-
RegisterLexer(scanner *SubScanner) []string
5449
// Lex processes a matched token and returns a channel of NgxToken objects.
5550
// This method performs lexical analysis on the matched token and produces a stream of tokens for the parser to consume.
5651
// The external lexer should close the channel once it has completed lexing the input to signal the end of tokens.
5752
// Failure to close the channel will cause the receiver to wait indefinitely.
58-
Lex(matchedToken string) <-chan NgxToken
53+
Lex(s *SubScanner, matchedToken string) <-chan NgxToken
5954
}
6055

6156
// LexOptions allows customization of the lexing process by specifying external lexers
6257
// that can handle specific directives. By registering interest in particular directives,
6358
// external lexers can ensure that these directives are processed separately
6459
// from the general lexical analysis logic.
6560
type LexOptions struct {
66-
ExternalLexers []Lexer
61+
Lexers []RegisterLexer
62+
extLexers map[string]Lexer
63+
}
64+
65+
// RegisterLexer is an option that cna be used to add a lexer to tokenize external NGINX tokens.
66+
type RegisterLexer interface {
67+
applyLexOptions(options *LexOptions)
68+
}
69+
70+
type registerLexer struct {
71+
l Lexer
72+
stringTokens []string
73+
}
74+
75+
func (rl registerLexer) applyLexOptions(o *LexOptions) {
76+
if o.extLexers == nil {
77+
o.extLexers = make(map[string]Lexer)
78+
}
79+
80+
for _, s := range rl.stringTokens {
81+
o.extLexers[s] = rl.l
82+
}
83+
}
84+
85+
// LexWithLexer registers a Lexer that implements tokenization of an NGINX configuration after one of the given
86+
// stringTokens is encountered by Lex.
87+
func LexWithLexer(l Lexer, stringTokens ...string) RegisterLexer {
88+
return registerLexer{l: l, stringTokens: stringTokens}
6789
}
6890

6991
func LexWithOptions(r io.Reader, options LexOptions) chan NgxToken {
92+
for _, o := range options.Lexers {
93+
o.applyLexOptions(&options)
94+
}
95+
7096
tc := make(chan NgxToken, tokChanCap)
7197
go tokenize(r, tc, options)
7298
return tc
@@ -119,22 +145,6 @@ func tokenize(reader io.Reader, tokenCh chan NgxToken, options LexOptions) {
119145
lexState = skipSpace
120146
}
121147

122-
var externalLexers map[string]Lexer
123-
var externalScanner *SubScanner
124-
for _, ext := range options.ExternalLexers {
125-
if externalLexers == nil {
126-
externalLexers = make(map[string]Lexer)
127-
}
128-
129-
if externalScanner == nil {
130-
externalScanner = &SubScanner{scanner: scanner, tokenLine: tokenLine}
131-
}
132-
133-
for _, d := range ext.RegisterLexer(externalScanner) {
134-
externalLexers[d] = ext
135-
}
136-
}
137-
138148
for {
139149
if readNext {
140150
if !scanner.Scan() {
@@ -167,13 +177,13 @@ func tokenize(reader io.Reader, tokenCh chan NgxToken, options LexOptions) {
167177
if token.Len() > 0 {
168178
tokenStr := token.String()
169179
if nextTokenIsDirective {
170-
if ext, ok := externalLexers[tokenStr]; ok {
180+
if ext, ok := options.extLexers[tokenStr]; ok {
171181
// saving lex state before emitting tokenStr to know if we encountered start quote
172182
lastLexState := lexState
173183
emit(tokenStartLine, lexState == inQuote, nil)
174184

175-
externalScanner.tokenLine = tokenLine
176-
extTokenCh := ext.Lex(tokenStr)
185+
externalScanner := &SubScanner{scanner: scanner, tokenLine: tokenLine}
186+
extTokenCh := ext.Lex(externalScanner, tokenStr)
177187
for tok := range extTokenCh {
178188
tokenCh <- tok
179189
}

lex_test.go

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -429,10 +429,10 @@ func TestLex(t *testing.T) {
429429
t.Fatal(err)
430430
}
431431
defer file.Close()
432+
433+
lua := &Lua{}
432434
options := LexOptions{
433-
ExternalLexers: []Lexer{
434-
&Lua{},
435-
},
435+
Lexers: []RegisterLexer{lua.RegisterLexer()},
436436
}
437437
i := 0
438438

lua.go

Lines changed: 22 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -5,9 +5,7 @@ import (
55
"strings"
66
)
77

8-
type Lua struct {
9-
s *SubScanner
10-
}
8+
type Lua struct{}
119

1210
func (l *Lua) directiveNames() []string {
1311
return []string{
@@ -30,13 +28,12 @@ func (l *Lua) directiveNames() []string {
3028
}
3129
}
3230

33-
func (l *Lua) RegisterLexer(s *SubScanner) []string {
34-
l.s = s
35-
return l.directiveNames()
31+
func (l *Lua) RegisterLexer() RegisterLexer {
32+
return LexWithLexer(l, l.directiveNames()...)
3633
}
3734

3835
//nolint:funlen,gocognit,gocyclo,nosec
39-
func (l *Lua) Lex(matchedToken string) <-chan NgxToken {
36+
func (l *Lua) Lex(s *SubScanner, matchedToken string) <-chan NgxToken {
4037
tokenCh := make(chan NgxToken)
4138

4239
tokenDepth := 0
@@ -51,21 +48,21 @@ func (l *Lua) Lex(matchedToken string) <-chan NgxToken {
5148
if matchedToken == "set_by_lua_block" /* #nosec G101 */ {
5249
arg := ""
5350
for {
54-
if !l.s.Scan() {
51+
if !s.Scan() {
5552
return
5653
}
57-
next := l.s.Text()
54+
next := s.Text()
5855
if isSpace(next) {
5956
if arg != "" {
60-
tokenCh <- NgxToken{Value: arg, Line: l.s.Line(), IsQuoted: false}
57+
tokenCh <- NgxToken{Value: arg, Line: s.Line(), IsQuoted: false}
6158
break
6259
}
6360

6461
for isSpace(next) {
65-
if !l.s.Scan() {
62+
if !s.Scan() {
6663
return
6764
}
68-
next = l.s.Text()
65+
next = s.Text()
6966
}
7067
}
7168
arg += next
@@ -74,14 +71,14 @@ func (l *Lua) Lex(matchedToken string) <-chan NgxToken {
7471

7572
// check that Lua block starts correctly
7673
for {
77-
if !l.s.Scan() {
74+
if !s.Scan() {
7875
return
7976
}
80-
next := l.s.Text()
77+
next := s.Text()
8178

8279
if !isSpace(next) {
8380
if next != "{" {
84-
lineno := l.s.Line()
81+
lineno := s.Line()
8582
tokenCh <- NgxToken{Error: &ParseError{File: &lexerFile, What: `expected "{" to start lua block`, Line: &lineno}}
8683
return
8784
}
@@ -92,13 +89,13 @@ func (l *Lua) Lex(matchedToken string) <-chan NgxToken {
9289

9390
// Grab everything in Lua block as a single token and watch for curly brace '{' in strings
9491
for {
95-
if !l.s.Scan() {
92+
if !s.Scan() {
9693
return
9794
}
9895

99-
next := l.s.Text()
100-
if err := l.s.Err(); err != nil {
101-
lineno := l.s.Line()
96+
next := s.Text()
97+
if err := s.Err(); err != nil {
98+
lineno := s.Line()
10299
tokenCh <- NgxToken{Error: &ParseError{File: &lexerFile, What: err.Error(), Line: &lineno}}
103100
}
104101

@@ -112,7 +109,7 @@ func (l *Lua) Lex(matchedToken string) <-chan NgxToken {
112109
case next == "}" && !inQuotes:
113110
tokenDepth--
114111
if tokenDepth < 0 {
115-
lineno := l.s.Line()
112+
lineno := s.Line()
116113
tokenCh <- NgxToken{Error: &ParseError{File: &lexerFile, What: `unexpected "}"`, Line: &lineno}}
117114
return
118115
}
@@ -122,8 +119,8 @@ func (l *Lua) Lex(matchedToken string) <-chan NgxToken {
122119
}
123120

124121
if tokenDepth == 0 {
125-
tokenCh <- NgxToken{Value: tok.String(), Line: l.s.Line(), IsQuoted: true}
126-
tokenCh <- NgxToken{Value: ";", Line: l.s.Line(), IsQuoted: false} // For an end to the Lua string based on the nginx bahavior
122+
tokenCh <- NgxToken{Value: tok.String(), Line: s.Line(), IsQuoted: true}
123+
tokenCh <- NgxToken{Value: ";", Line: s.Line(), IsQuoted: false} // For an end to the Lua string based on the nginx bahavior
127124
// See: https://github.com/nginxinc/crossplane/blob/master/crossplane/ext/lua.py#L122C25-L122C41
128125
return
129126
}
@@ -142,7 +139,7 @@ func (l *Lua) Lex(matchedToken string) <-chan NgxToken {
142139

143140
// stricly check that first non space character is {
144141
if tokenDepth == 0 {
145-
tokenCh <- NgxToken{Value: next, Line: l.s.Line(), IsQuoted: false}
142+
tokenCh <- NgxToken{Value: next, Line: s.Line(), IsQuoted: false}
146143
return
147144
}
148145
tok.WriteString(next)
@@ -153,8 +150,8 @@ func (l *Lua) Lex(matchedToken string) <-chan NgxToken {
153150
return tokenCh
154151
}
155152

156-
func (l *Lua) RegisterBuilder() []string {
157-
return l.directiveNames()
153+
func (l *Lua) RegisterBuilder() RegisterBuilder {
154+
return BuildWithBuilder(l, l.directiveNames()...)
158155
}
159156

160157
func (l *Lua) Build(stmt *Directive) string {

0 commit comments

Comments
 (0)