From 8e5edf62cb928b460e823ee7ee7007dcdee426f0 Mon Sep 17 00:00:00 2001 From: Stephen Hurwitz Date: Tue, 11 Jun 2024 10:41:22 -0700 Subject: [PATCH 1/5] Refactor idea to simplify interfaces Moves the "register" responsibility to the options types, not 100% on the API, but it's an idea. The interfaces are now single-method which is pretty much as small as it gets. You could implement these interfaces with a single function that calls itself. A benefit to this is most obvious with the external Lexer I think as now it is not responsible for storing a value for later use that carries state and is not concurrent-safe. --- analyze_test.go | 4 +-- build.go | 65 ++++++++++++++++++++++++++++++++++--------------- build_test.go | 4 +-- lex.go | 55 +++++++++++++++++++++-------------------- lex_test.go | 6 ++--- lua.go | 45 +++++++++++++++------------------- parse_test.go | 14 ++++------- 7 files changed, 105 insertions(+), 88 deletions(-) diff --git a/analyze_test.go b/analyze_test.go index fff8e571..a5027801 100644 --- a/analyze_test.go +++ b/analyze_test.go @@ -2110,9 +2110,7 @@ func TestAnalyze_lua(t *testing.T) { err := analyze("nginx.conf", tc.stmt, ";", tc.ctx, &ParseOptions{ MatchFuncs: []MatchFunc{MatchLua}, LexOptions: LexOptions{ - ExternalLexers: []Lexer{ - &Lua{}, - }, + Lexers: []RegisterLexer{LexWithLexer(lua, lua.DirectiveNames()...)}, }, }) diff --git a/build.go b/build.go index 51d5fa54..d9eead98 100644 --- a/build.go +++ b/build.go @@ -18,23 +18,34 @@ import ( ) type BuildOptions struct { - Indent int - Tabs bool - Header bool - ExternalBuilds []Builder // handle specific directives + Indent int + Tabs bool + Header bool + Builders []RegisterBuilder // handle specific directives + extBuilders map[string]Builder +} + +type RegisterBuilder func(*BuildOptions) + +func BuildWithBuilder(b Builder, stringTokens ...string) RegisterBuilder { + return func(o *BuildOptions) { + if o.extBuilders == nil { + o.extBuilders = make(map[string]Builder) + } + + for _, s := range stringTokens { + o.extBuilders[s] = b + } + } } // Builder is the interface implemented by types that can render a Directive // as it appears in NGINX configuration files. // -// RegisterBuilder returns the names of the directives for which the builder can -// build NGINX configuration. -// // Build writes the strings that represent the Directive and it's Block to the // io.StringWriter returning any error encountered that caused the write to stop // early. Build must not modify the Directive. type Builder interface { - RegisterBuilder() []string Build(stmt *Directive) string } @@ -63,6 +74,10 @@ func BuildFiles(payload Payload, dir string, options *BuildOptions) error { dir = cwd } + for _, o := range options.Builders { + o(options) + } + for _, config := range payload.Config { path := config.File if !filepath.IsAbs(path) { @@ -111,6 +126,12 @@ func Build(w io.Writer, config Config, options *BuildOptions) error { } } + if options.extBuilders == nil { // might be set if using BuildFiles + for _, o := range options.Builders { + o(options) + } + } + body := strings.Builder{} buildBlock(&body, nil, config.Parsed, 0, 0, options) @@ -147,18 +168,22 @@ func buildBlock(sb io.StringWriter, parent *Directive, block Directives, depth i directive := Enquote(stmt.Directive) _, _ = sb.WriteString(directive) - if options.ExternalBuilds != nil { - extDirectivesMap := make(map[string]Builder) - for _, ext := range options.ExternalBuilds { - directives := ext.RegisterBuilder() - for _, d := range directives { - extDirectivesMap[d] = ext - } - - if ext, ok := extDirectivesMap[directive]; ok { - _, _ = sb.WriteString(" ") // space between directives and arguments - _, _ = sb.WriteString(ext.Build(stmt)) - } + if options.extBuilders != nil { + // extDirectivesMap := make(map[string]Builder) + // for _, ext := range options.Builders { + // directives := ext.RegisterBuilder() + // for _, d := range directives { + // extDirectivesMap[d] = ext + // } + + // if ext, ok := extDirectivesMap[directive]; ok { + // _, _ = sb.WriteString(" ") // space between directives and arguments + // _, _ = sb.WriteString(ext.Build(stmt)) + // } + // } + if ext, ok := options.extBuilders[directive]; ok { + _, _ = sb.WriteString(" ") // space between directives and arguments + _, _ = sb.WriteString(ext.Build(stmt)) } } else { // special handling for if statements diff --git a/build_test.go b/build_test.go index bc2bc617..35318433 100644 --- a/build_test.go +++ b/build_test.go @@ -260,7 +260,7 @@ var buildFixtures = []buildFixture{ }, { name: "lua block", - options: BuildOptions{ExternalBuilds: []Builder{&Lua{}}}, + options: BuildOptions{Builders: []RegisterBuilder{BuildWithBuilder(lua, lua.DirectiveNames()...)}}, parsed: Directives{ { Directive: "content_by_lua_block", @@ -273,7 +273,7 @@ var buildFixtures = []buildFixture{ }, { name: "set_by_lua_block", - options: BuildOptions{ExternalBuilds: []Builder{&Lua{}}}, + options: BuildOptions{Builders: []RegisterBuilder{BuildWithBuilder(lua, lua.DirectiveNames()...)}}, parsed: Directives{ { Directive: "set_by_lua_block", diff --git a/lex.go b/lex.go index 0c213e65..cf762d3d 100644 --- a/lex.go +++ b/lex.go @@ -46,16 +46,11 @@ func SetTokenChanCap(size int) { // Lexer is an interface for implementing lexers that handle external NGINX tokens during the lexical analysis phase. type Lexer interface { - // RegisterLexer registers an external lexer with a given SubScanner. - // This method integrates the external lexer into the lexical analysis process, - // enabling it to handle external token scanning. It returns a slice of strings - // representing the tokens that the external lexer can recognize. - RegisterLexer(scanner *SubScanner) []string // Lex processes a matched token and returns a channel of NgxToken objects. // This method performs lexical analysis on the matched token and produces a stream of tokens for the parser to consume. // The external lexer should close the channel once it has completed lexing the input to signal the end of tokens. // Failure to close the channel will cause the receiver to wait indefinitely. - Lex(matchedToken string) <-chan NgxToken + Lex(s *SubScanner, matchedToken string) <-chan NgxToken } // LexOptions allows customization of the lexing process by specifying external lexers @@ -63,10 +58,31 @@ type Lexer interface { // external lexers can ensure that these directives are processed separately // from the general lexical analysis logic. type LexOptions struct { - ExternalLexers []Lexer + Lexers []RegisterLexer + extLexers map[string]Lexer +} + +type RegisterLexer func(*LexOptions) + +// LexWithLexer registers a Lexer that implements tokenization of an NGINX configuration after one of the given +// stringTokens is encountered by Lex. +func LexWithLexer(l Lexer, stringTokens ...string) RegisterLexer { + return func(o *LexOptions) { + if o.extLexers == nil { + o.extLexers = make(map[string]Lexer) + } + + for _, s := range stringTokens { + o.extLexers[s] = l + } + } } func LexWithOptions(r io.Reader, options LexOptions) chan NgxToken { + for _, o := range options.Lexers { + o(&options) + } + tc := make(chan NgxToken, tokChanCap) go tokenize(r, tc, options) return tc @@ -119,22 +135,6 @@ func tokenize(reader io.Reader, tokenCh chan NgxToken, options LexOptions) { lexState = skipSpace } - var externalLexers map[string]Lexer - var externalScanner *SubScanner - for _, ext := range options.ExternalLexers { - if externalLexers == nil { - externalLexers = make(map[string]Lexer) - } - - if externalScanner == nil { - externalScanner = &SubScanner{scanner: scanner, tokenLine: tokenLine} - } - - for _, d := range ext.RegisterLexer(externalScanner) { - externalLexers[d] = ext - } - } - for { if readNext { if !scanner.Scan() { @@ -167,13 +167,16 @@ func tokenize(reader io.Reader, tokenCh chan NgxToken, options LexOptions) { if token.Len() > 0 { tokenStr := token.String() if nextTokenIsDirective { - if ext, ok := externalLexers[tokenStr]; ok { + // if ext, ok := externalLexers[tokenStr]; ok { + if ext, ok := options.extLexers[tokenStr]; ok { // saving lex state before emitting tokenStr to know if we encountered start quote lastLexState := lexState emit(tokenStartLine, lexState == inQuote, nil) - externalScanner.tokenLine = tokenLine - extTokenCh := ext.Lex(tokenStr) + externalScanner := &SubScanner{scanner: scanner, tokenLine: tokenLine} + + // externalScanner.tokenLine = tokenLine + extTokenCh := ext.Lex(externalScanner, tokenStr) for tok := range extTokenCh { tokenCh <- tok } diff --git a/lex_test.go b/lex_test.go index 3ba80e47..2848d87f 100644 --- a/lex_test.go +++ b/lex_test.go @@ -429,10 +429,10 @@ func TestLex(t *testing.T) { t.Fatal(err) } defer file.Close() + + lua := &Lua{} options := LexOptions{ - ExternalLexers: []Lexer{ - &Lua{}, - }, + Lexers: []RegisterLexer{LexWithLexer(lua, lua.DirectiveNames()...)}, } i := 0 diff --git a/lua.go b/lua.go index b63d0a20..1725aab4 100644 --- a/lua.go +++ b/lua.go @@ -6,10 +6,10 @@ import ( ) type Lua struct { - s *SubScanner + // s *SubScanner } -func (l *Lua) directiveNames() []string { +func (l *Lua) DirectiveNames() []string { return []string{ "init_by_lua_block", "init_worker_by_lua_block", @@ -30,13 +30,8 @@ func (l *Lua) directiveNames() []string { } } -func (l *Lua) RegisterLexer(s *SubScanner) []string { - l.s = s - return l.directiveNames() -} - //nolint:funlen,gocognit,gocyclo,nosec -func (l *Lua) Lex(matchedToken string) <-chan NgxToken { +func (l *Lua) Lex(s *SubScanner, matchedToken string) <-chan NgxToken { tokenCh := make(chan NgxToken) tokenDepth := 0 @@ -51,21 +46,21 @@ func (l *Lua) Lex(matchedToken string) <-chan NgxToken { if matchedToken == "set_by_lua_block" /* #nosec G101 */ { arg := "" for { - if !l.s.Scan() { + if !s.Scan() { return } - next := l.s.Text() + next := s.Text() if isSpace(next) { if arg != "" { - tokenCh <- NgxToken{Value: arg, Line: l.s.Line(), IsQuoted: false} + tokenCh <- NgxToken{Value: arg, Line: s.Line(), IsQuoted: false} break } for isSpace(next) { - if !l.s.Scan() { + if !s.Scan() { return } - next = l.s.Text() + next = s.Text() } } arg += next @@ -74,14 +69,14 @@ func (l *Lua) Lex(matchedToken string) <-chan NgxToken { // check that Lua block starts correctly for { - if !l.s.Scan() { + if !s.Scan() { return } - next := l.s.Text() + next := s.Text() if !isSpace(next) { if next != "{" { - lineno := l.s.Line() + lineno := s.Line() tokenCh <- NgxToken{Error: &ParseError{File: &lexerFile, What: `expected "{" to start lua block`, Line: &lineno}} return } @@ -92,13 +87,13 @@ func (l *Lua) Lex(matchedToken string) <-chan NgxToken { // Grab everything in Lua block as a single token and watch for curly brace '{' in strings for { - if !l.s.Scan() { + if !s.Scan() { return } - next := l.s.Text() - if err := l.s.Err(); err != nil { - lineno := l.s.Line() + next := s.Text() + if err := s.Err(); err != nil { + lineno := s.Line() tokenCh <- NgxToken{Error: &ParseError{File: &lexerFile, What: err.Error(), Line: &lineno}} } @@ -112,7 +107,7 @@ func (l *Lua) Lex(matchedToken string) <-chan NgxToken { case next == "}" && !inQuotes: tokenDepth-- if tokenDepth < 0 { - lineno := l.s.Line() + lineno := s.Line() tokenCh <- NgxToken{Error: &ParseError{File: &lexerFile, What: `unexpected "}"`, Line: &lineno}} return } @@ -122,8 +117,8 @@ func (l *Lua) Lex(matchedToken string) <-chan NgxToken { } if tokenDepth == 0 { - tokenCh <- NgxToken{Value: tok.String(), Line: l.s.Line(), IsQuoted: true} - tokenCh <- NgxToken{Value: ";", Line: l.s.Line(), IsQuoted: false} // For an end to the Lua string based on the nginx bahavior + tokenCh <- NgxToken{Value: tok.String(), Line: s.Line(), IsQuoted: true} + tokenCh <- NgxToken{Value: ";", Line: s.Line(), IsQuoted: false} // For an end to the Lua string based on the nginx bahavior // See: https://github.com/nginxinc/crossplane/blob/master/crossplane/ext/lua.py#L122C25-L122C41 return } @@ -142,7 +137,7 @@ func (l *Lua) Lex(matchedToken string) <-chan NgxToken { // stricly check that first non space character is { if tokenDepth == 0 { - tokenCh <- NgxToken{Value: next, Line: l.s.Line(), IsQuoted: false} + tokenCh <- NgxToken{Value: next, Line: s.Line(), IsQuoted: false} return } tok.WriteString(next) @@ -154,7 +149,7 @@ func (l *Lua) Lex(matchedToken string) <-chan NgxToken { } func (l *Lua) RegisterBuilder() []string { - return l.directiveNames() + return l.DirectiveNames() } func (l *Lua) Build(stmt *Directive) string { diff --git a/parse_test.go b/parse_test.go index 79268ed5..8c11d562 100644 --- a/parse_test.go +++ b/parse_test.go @@ -43,6 +43,8 @@ func getTestConfigPath(parts ...string) string { return filepath.Join("testdata", "configs", filepath.Join(parts...)) } +var lua = &Lua{} + //nolint:gochecknoglobals,exhaustruct var parseFixtures = []parseFixture{ {"includes-regular", "", ParseOptions{}, Payload{ @@ -1707,9 +1709,7 @@ var parseFixtures = []parseFixture{ ErrorOnUnknownDirectives: true, MatchFuncs: []MatchFunc{MatchLua}, LexOptions: LexOptions{ - ExternalLexers: []Lexer{ - &Lua{}, - }, + Lexers: []RegisterLexer{LexWithLexer(lua, lua.DirectiveNames()...)}, }, }, Payload{ Status: "ok", @@ -1838,9 +1838,7 @@ var parseFixtures = []parseFixture{ ErrorOnUnknownDirectives: true, MatchFuncs: []MatchFunc{MatchLua}, LexOptions: LexOptions{ - ExternalLexers: []Lexer{ - &Lua{}, - }, + Lexers: []RegisterLexer{LexWithLexer(lua, lua.DirectiveNames()...)}, }, }, Payload{ Status: "ok", @@ -1932,9 +1930,7 @@ var parseFixtures = []parseFixture{ ParseComments: true, MatchFuncs: []MatchFunc{MatchLua}, LexOptions: LexOptions{ - ExternalLexers: []Lexer{ - &Lua{}, - }, + Lexers: []RegisterLexer{LexWithLexer(lua, lua.DirectiveNames()...)}, }, }, Payload{ Status: "ok", From 3041f977f3fede2141e16c462700dcb8c38c476b Mon Sep 17 00:00:00 2001 From: Stephen Hurwitz Date: Tue, 11 Jun 2024 14:23:39 -0700 Subject: [PATCH 2/5] Refactor functional options for lex and build Refactored the options so that callers cannot accidentally on purpose modify other parts of the config struct. --- build.go | 47 +++++++++++++++++++++++------------------------ lex.go | 31 ++++++++++++++++++++----------- 2 files changed, 43 insertions(+), 35 deletions(-) diff --git a/build.go b/build.go index d9eead98..f6359078 100644 --- a/build.go +++ b/build.go @@ -25,18 +25,29 @@ type BuildOptions struct { extBuilders map[string]Builder } -type RegisterBuilder func(*BuildOptions) +// RegisterBuilder is an option that can be used to add a builder to build NGINX configuration for custom directives. +type RegisterBuilder interface { + applyBuildOptions(options *BuildOptions) +} -func BuildWithBuilder(b Builder, stringTokens ...string) RegisterBuilder { - return func(o *BuildOptions) { - if o.extBuilders == nil { - o.extBuilders = make(map[string]Builder) - } +type registerBuilder struct { + b Builder + directives []string +} - for _, s := range stringTokens { - o.extBuilders[s] = b - } +func (rb registerBuilder) applyBuildOptions(o *BuildOptions) { + if o.extBuilders == nil { + o.extBuilders = make(map[string]Builder) } + + for _, s := range rb.directives { + o.extBuilders[s] = rb.b + } +} + +// BuildWithBuilder registers a builder to build the NGINX configuration for the given directives. +func BuildWithBuilder(b Builder, directives ...string) RegisterBuilder { + return registerBuilder{b: b, directives: directives} } // Builder is the interface implemented by types that can render a Directive @@ -75,7 +86,7 @@ func BuildFiles(payload Payload, dir string, options *BuildOptions) error { } for _, o := range options.Builders { - o(options) + o.applyBuildOptions(options) } for _, config := range payload.Config { @@ -128,7 +139,7 @@ func Build(w io.Writer, config Config, options *BuildOptions) error { if options.extBuilders == nil { // might be set if using BuildFiles for _, o := range options.Builders { - o(options) + o.applyBuildOptions(options) } } @@ -144,7 +155,7 @@ func Build(w io.Writer, config Config, options *BuildOptions) error { return err } -//nolint:funlen,gocognit +//nolint:gocognit func buildBlock(sb io.StringWriter, parent *Directive, block Directives, depth int, lastLine int, options *BuildOptions) { for i, stmt := range block { // if the this statement is a comment on the same line as the preview, do not emit EOL for this stmt @@ -169,18 +180,6 @@ func buildBlock(sb io.StringWriter, parent *Directive, block Directives, depth i _, _ = sb.WriteString(directive) if options.extBuilders != nil { - // extDirectivesMap := make(map[string]Builder) - // for _, ext := range options.Builders { - // directives := ext.RegisterBuilder() - // for _, d := range directives { - // extDirectivesMap[d] = ext - // } - - // if ext, ok := extDirectivesMap[directive]; ok { - // _, _ = sb.WriteString(" ") // space between directives and arguments - // _, _ = sb.WriteString(ext.Build(stmt)) - // } - // } if ext, ok := options.extBuilders[directive]; ok { _, _ = sb.WriteString(" ") // space between directives and arguments _, _ = sb.WriteString(ext.Build(stmt)) diff --git a/lex.go b/lex.go index cf762d3d..9fc5c334 100644 --- a/lex.go +++ b/lex.go @@ -62,25 +62,34 @@ type LexOptions struct { extLexers map[string]Lexer } -type RegisterLexer func(*LexOptions) +type RegisterLexer interface { + applyLexOptions(options *LexOptions) +} + +type registerLexer struct { + l Lexer + stringTokens []string +} + +func (rl registerLexer) applyLexOptions(o *LexOptions) { + if o.extLexers == nil { + o.extLexers = make(map[string]Lexer) + } + + for _, s := range rl.stringTokens { + o.extLexers[s] = rl.l + } +} // LexWithLexer registers a Lexer that implements tokenization of an NGINX configuration after one of the given // stringTokens is encountered by Lex. func LexWithLexer(l Lexer, stringTokens ...string) RegisterLexer { - return func(o *LexOptions) { - if o.extLexers == nil { - o.extLexers = make(map[string]Lexer) - } - - for _, s := range stringTokens { - o.extLexers[s] = l - } - } + return registerLexer{l: l, stringTokens: stringTokens} } func LexWithOptions(r io.Reader, options LexOptions) chan NgxToken { for _, o := range options.Lexers { - o(&options) + o.applyLexOptions(&options) } tc := make(chan NgxToken, tokChanCap) From 8eacf94240c5ed04d5c8332c5879ff396327ad32 Mon Sep 17 00:00:00 2001 From: Stephen Hurwitz Date: Tue, 11 Jun 2024 14:26:12 -0700 Subject: [PATCH 3/5] Adds missing docstring to lex fuction option Also removed some commented out code. --- lex.go | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/lex.go b/lex.go index 9fc5c334..8ec40bb0 100644 --- a/lex.go +++ b/lex.go @@ -62,6 +62,7 @@ type LexOptions struct { extLexers map[string]Lexer } +// RegisterLexer is an option that cna be used to add a lexer to tokenize external NGINX tokens. type RegisterLexer interface { applyLexOptions(options *LexOptions) } @@ -176,15 +177,12 @@ func tokenize(reader io.Reader, tokenCh chan NgxToken, options LexOptions) { if token.Len() > 0 { tokenStr := token.String() if nextTokenIsDirective { - // if ext, ok := externalLexers[tokenStr]; ok { if ext, ok := options.extLexers[tokenStr]; ok { // saving lex state before emitting tokenStr to know if we encountered start quote lastLexState := lexState emit(tokenStartLine, lexState == inQuote, nil) externalScanner := &SubScanner{scanner: scanner, tokenLine: tokenLine} - - // externalScanner.tokenLine = tokenLine extTokenCh := ext.Lex(externalScanner, tokenStr) for tok := range extTokenCh { tokenCh <- tok From fc21994eb5a4a58442cdbf1c72fdcc3ebe529595 Mon Sep 17 00:00:00 2001 From: Stephen Hurwitz Date: Tue, 11 Jun 2024 14:28:12 -0700 Subject: [PATCH 4/5] removes commented out field on Lua --- lua.go | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/lua.go b/lua.go index 1725aab4..cb3df343 100644 --- a/lua.go +++ b/lua.go @@ -5,9 +5,7 @@ import ( "strings" ) -type Lua struct { - // s *SubScanner -} +type Lua struct{} func (l *Lua) DirectiveNames() []string { return []string{ From afb28588c7e10ba4db15effe52f764afe0f8a892 Mon Sep 17 00:00:00 2001 From: Stephen Hurwitz Date: Tue, 11 Jun 2024 14:33:52 -0700 Subject: [PATCH 5/5] Adds helper methods to Lua for registering --- analyze_test.go | 2 +- build_test.go | 4 ++-- lex_test.go | 2 +- lua.go | 10 +++++++--- parse_test.go | 6 +++--- 5 files changed, 14 insertions(+), 10 deletions(-) diff --git a/analyze_test.go b/analyze_test.go index a5027801..5010d867 100644 --- a/analyze_test.go +++ b/analyze_test.go @@ -2110,7 +2110,7 @@ func TestAnalyze_lua(t *testing.T) { err := analyze("nginx.conf", tc.stmt, ";", tc.ctx, &ParseOptions{ MatchFuncs: []MatchFunc{MatchLua}, LexOptions: LexOptions{ - Lexers: []RegisterLexer{LexWithLexer(lua, lua.DirectiveNames()...)}, + Lexers: []RegisterLexer{lua.RegisterLexer()}, }, }) diff --git a/build_test.go b/build_test.go index 35318433..4490db76 100644 --- a/build_test.go +++ b/build_test.go @@ -260,7 +260,7 @@ var buildFixtures = []buildFixture{ }, { name: "lua block", - options: BuildOptions{Builders: []RegisterBuilder{BuildWithBuilder(lua, lua.DirectiveNames()...)}}, + options: BuildOptions{Builders: []RegisterBuilder{lua.RegisterBuilder()}}, parsed: Directives{ { Directive: "content_by_lua_block", @@ -273,7 +273,7 @@ var buildFixtures = []buildFixture{ }, { name: "set_by_lua_block", - options: BuildOptions{Builders: []RegisterBuilder{BuildWithBuilder(lua, lua.DirectiveNames()...)}}, + options: BuildOptions{Builders: []RegisterBuilder{lua.RegisterBuilder()}}, parsed: Directives{ { Directive: "set_by_lua_block", diff --git a/lex_test.go b/lex_test.go index 2848d87f..31a3375b 100644 --- a/lex_test.go +++ b/lex_test.go @@ -432,7 +432,7 @@ func TestLex(t *testing.T) { lua := &Lua{} options := LexOptions{ - Lexers: []RegisterLexer{LexWithLexer(lua, lua.DirectiveNames()...)}, + Lexers: []RegisterLexer{lua.RegisterLexer()}, } i := 0 diff --git a/lua.go b/lua.go index cb3df343..8a77880d 100644 --- a/lua.go +++ b/lua.go @@ -7,7 +7,7 @@ import ( type Lua struct{} -func (l *Lua) DirectiveNames() []string { +func (l *Lua) directiveNames() []string { return []string{ "init_by_lua_block", "init_worker_by_lua_block", @@ -28,6 +28,10 @@ func (l *Lua) DirectiveNames() []string { } } +func (l *Lua) RegisterLexer() RegisterLexer { + return LexWithLexer(l, l.directiveNames()...) +} + //nolint:funlen,gocognit,gocyclo,nosec func (l *Lua) Lex(s *SubScanner, matchedToken string) <-chan NgxToken { tokenCh := make(chan NgxToken) @@ -146,8 +150,8 @@ func (l *Lua) Lex(s *SubScanner, matchedToken string) <-chan NgxToken { return tokenCh } -func (l *Lua) RegisterBuilder() []string { - return l.DirectiveNames() +func (l *Lua) RegisterBuilder() RegisterBuilder { + return BuildWithBuilder(l, l.directiveNames()...) } func (l *Lua) Build(stmt *Directive) string { diff --git a/parse_test.go b/parse_test.go index 8c11d562..65a44ca9 100644 --- a/parse_test.go +++ b/parse_test.go @@ -1709,7 +1709,7 @@ var parseFixtures = []parseFixture{ ErrorOnUnknownDirectives: true, MatchFuncs: []MatchFunc{MatchLua}, LexOptions: LexOptions{ - Lexers: []RegisterLexer{LexWithLexer(lua, lua.DirectiveNames()...)}, + Lexers: []RegisterLexer{lua.RegisterLexer()}, }, }, Payload{ Status: "ok", @@ -1838,7 +1838,7 @@ var parseFixtures = []parseFixture{ ErrorOnUnknownDirectives: true, MatchFuncs: []MatchFunc{MatchLua}, LexOptions: LexOptions{ - Lexers: []RegisterLexer{LexWithLexer(lua, lua.DirectiveNames()...)}, + Lexers: []RegisterLexer{lua.RegisterLexer()}, }, }, Payload{ Status: "ok", @@ -1930,7 +1930,7 @@ var parseFixtures = []parseFixture{ ParseComments: true, MatchFuncs: []MatchFunc{MatchLua}, LexOptions: LexOptions{ - Lexers: []RegisterLexer{LexWithLexer(lua, lua.DirectiveNames()...)}, + Lexers: []RegisterLexer{lua.RegisterLexer()}, }, }, Payload{ Status: "ok",