Skip to content

Commit

Permalink
ignore writerune result
Browse files Browse the repository at this point in the history
  • Loading branch information
tufanbarisyildirim committed Mar 25, 2023
1 parent 138c741 commit 473ea30
Showing 1 changed file with 16 additions and 17 deletions.
33 changes: 16 additions & 17 deletions parser/lexer.go
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ import (
"github.com/tufanbarisyildirim/gonginx/parser/token"
)

//lexer is the main tokenizer
// lexer is the main tokenizer
type lexer struct {
reader *bufio.Reader
file string
Expand All @@ -19,26 +19,26 @@ type lexer struct {
Latest token.Token
}

//lex initializes a lexer from string conetnt
// lex initializes a lexer from string conetnt
func lex(content string) *lexer {
return newLexer(bytes.NewBuffer([]byte(content)))
}

//newLexer initilizes a lexer from a reader
// newLexer initilizes a lexer from a reader
func newLexer(r io.Reader) *lexer {
return &lexer{
line: 1,
reader: bufio.NewReader(r),
}
}

//Scan gives you next token
// Scan gives you next token
func (s *lexer) scan() token.Token {
s.Latest = s.getNextToken()
return s.Latest
}

//All scans all token and returns them as a slice
// All scans all token and returns them as a slice
func (s *lexer) all() token.Tokens {
tokens := make([]token.Token, 0)
for {
Expand Down Expand Up @@ -84,7 +84,7 @@ reToken:
}
}

//Peek returns nexr rune without consuming it
// Peek returns nexr rune without consuming it
func (s *lexer) peek() rune {
r, _, _ := s.reader.ReadRune()
_ = s.reader.UnreadRune()
Expand All @@ -110,7 +110,7 @@ func (s *lexer) readUntil(until runeCheck) string {
return buf.String()
}

//NewToken creates a new Token with its line and column
// NewToken creates a new Token with its line and column
func (s *lexer) NewToken(tokenType token.Type) token.Token {
return token.Token{
Type: tokenType,
Expand Down Expand Up @@ -155,8 +155,6 @@ func (s *lexer) scanLuaCode() token.Token {
if ch == rune(token.EOF) {
panic("unexpected end of file while scanning a string, maybe an unclosed lua code?")
}
tmp := string(ch)
strings.TrimSpace(tmp)
if inComment {
if ch == '\n' {
inComment = false
Expand All @@ -181,7 +179,8 @@ func (s *lexer) scanLuaCode() token.Token {
}
}

/**
/*
*
\” – To escape “ within double quoted string.
\\ – To escape the backslash.
\n – To add line breaks between string.
Expand All @@ -191,7 +190,7 @@ func (s *lexer) scanLuaCode() token.Token {
func (s *lexer) scanQuotedString(delimiter rune) token.Token {
var buf bytes.Buffer
tok := s.NewToken(token.QuotedString)
buf.WriteRune(s.read()) //consume delimiter
_, _ = buf.WriteRune(s.read()) //consume delimiter
for {
ch := s.read()

Expand All @@ -203,20 +202,20 @@ func (s *lexer) scanQuotedString(delimiter rune) token.Token {
if needsEscape(s.peek(), delimiter) {
switch s.read() {
case 'n':
buf.WriteRune('\n')
_, _ = buf.WriteRune('\n')
case 'r':
buf.WriteRune('\r')
_, _ = buf.WriteRune('\r')
case 't':
buf.WriteRune('\t')
_, _ = buf.WriteRune('\t')
case '\\':
buf.WriteRune('\\')
_, _ = buf.WriteRune('\\')
case delimiter:
buf.WriteRune(delimiter)
_, _ = buf.WriteRune(delimiter)
}
continue
}
}
buf.WriteRune(ch)
_, _ = buf.WriteRune(ch)
if ch == delimiter {
break
}
Expand Down

0 comments on commit 473ea30

Please sign in to comment.