Skip to content

Commit

Permalink
Token limit fix CVE-2023-49559 (#291)
Browse files Browse the repository at this point in the history
* Add directive limit to prevent overloading

* Added token limit to parser for query and schema parsing, removed my previous directive limit

* Added token limit to parser for query and schema parsing, removed my previous directive limit

* Update parser/parser.go

* Update parser/parser.go

* Update parser/query.go

* Fix lint

Signed-off-by: Steve Coffman <steve@khanacademy.org>

---------

Signed-off-by: Steve Coffman <steve@khanacademy.org>
Co-authored-by: Yuval Moravchick <yuval.moravchick@catonetworks.com>
Co-authored-by: Steve Coffman <StevenACoffman@users.noreply.github.com>
Co-authored-by: Steve Coffman <steve@khanacademy.org>
  • Loading branch information
4 people authored Jun 11, 2024
1 parent 6db1bd3 commit d457fc0
Show file tree
Hide file tree
Showing 4 changed files with 22 additions and 4 deletions.
14 changes: 14 additions & 0 deletions parser/parser.go
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
package parser

import (
"fmt"
"strconv"

"github.com/vektah/gqlparser/v2/ast"
Expand All @@ -20,6 +21,13 @@ type parser struct {

comment *ast.CommentGroup
commentConsuming bool

tokenCount int
maxTokenLimit int
}

func (p *parser) SetMaxTokenLimit(maxToken int) {
p.maxTokenLimit = maxToken
}

func (p *parser) consumeComment() (*ast.Comment, bool) {
Expand Down Expand Up @@ -95,6 +103,12 @@ func (p *parser) next() lexer.Token {
if p.err != nil {
return p.prev
}
// Increment the token count before reading the next token
p.tokenCount++
if p.maxTokenLimit != 0 && p.tokenCount > p.maxTokenLimit {
p.err = fmt.Errorf("exceeded token limit of %d", p.maxTokenLimit)
return p.prev
}
if p.peeked {
p.peeked = false
p.comment = nil
Expand Down
5 changes: 4 additions & 1 deletion parser/parser_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -166,5 +166,8 @@ func TestParserUtils(t *testing.T) {
}

func newParser(input string) parser {
return parser{lexer: lexer.New(&ast.Source{Input: input, Name: "input.graphql"})}
return parser{
lexer: lexer.New(&ast.Source{Input: input, Name: "input.graphql"}),
maxTokenLimit: 15000, // 15000 is the default value
}
}
4 changes: 2 additions & 2 deletions parser/query.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,14 +2,14 @@ package parser

import (
"github.com/vektah/gqlparser/v2/lexer"

//nolint:revive
. "github.com/vektah/gqlparser/v2/ast"
)

func ParseQuery(source *Source) (*QueryDocument, error) {
p := parser{
lexer: lexer.New(source),
lexer: lexer.New(source),
maxTokenLimit: 0, // 0 is the default value
}
return p.parseQueryDocument(), p.err
}
Expand Down
3 changes: 2 additions & 1 deletion parser/schema.go
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,8 @@ func ParseSchemas(inputs ...*Source) (*SchemaDocument, error) {

func ParseSchema(source *Source) (*SchemaDocument, error) {
p := parser{
lexer: lexer.New(source),
lexer: lexer.New(source),
maxTokenLimit: 15000, // default value
}
sd, err := p.parseSchemaDocument(), p.err
if err != nil {
Expand Down

0 comments on commit d457fc0

Please sign in to comment.