@@ -87,10 +87,6 @@ func (e *extScanner) Err() error   { return e.scanner.Err() }
8787func  (e  * extScanner ) Text () string  { return  e .scanner .Text () }
8888func  (e  * extScanner ) Line () int     { return  e .tokenLine  }
8989
90- type  tokenInfo  struct  {
91- 	LineNumberExists  bool 
92- }
93- 
9490//nolint:gocyclo,funlen,gocognit,maintidx 
9591func  tokenize (reader  io.Reader , tokenCh  chan  NgxToken , options  LexOptions ) {
9692	token  :=  strings.Builder {}
@@ -129,7 +125,7 @@ func tokenize(reader io.Reader, tokenCh chan NgxToken, options LexOptions) {
129125		for  _ , d  :=  range  ext .Register (externalScanner ) {
130126			if  _ , ok  :=  externalLexers [d ]; ok  {
131127				// Handle the duplicate token name, emitting an error token and exit 
132- 				tokenCh  <-  NgxToken {Value : "Duplicate token name" , Line : tokenLine , IsQuoted : false , Error : errors .New ("Duplicate  token name handled" )}
128+ 				tokenCh  <-  NgxToken {Value : "Duplicate token name" , Line : tokenLine , IsQuoted : false , Error : errors .New ("duplicate  token name handled" )}
133129				close (tokenCh )
134130				return 
135131			}
@@ -169,7 +165,7 @@ func tokenize(reader io.Reader, tokenCh chan NgxToken, options LexOptions) {
169165		if  token .Len () >  0  {
170166			tokenStr  :=  token .String ()
171167			if  ext , ok  :=  externalLexers [tokenStr ]; ok  {
172- 				if  nextTokenIsDirective  ==   true   {
168+ 				if  nextTokenIsDirective  {
173169					// saving lex state before emitting tokenStr to know if we encountered start quote 
174170					lastLexState  :=  lexState 
175171					if  lexState  ==  inQuote  {
0 commit comments