Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Always return parser errors gracefully #3578

Merged
merged 1 commit into from
Sep 23, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 9 additions & 3 deletions runtime/environment.go
Original file line number Diff line number Diff line change
Expand Up @@ -522,9 +522,15 @@ func (e *interpreterEnvironment) parseAndCheckProgram(
err error,
) {
wrapParsingCheckingError := func(err error) error {
return &ParsingCheckingError{
Err: err,
Location: location,
switch err.(type) {
// Wrap only parsing and checking errors.
case *sema.CheckerError, parser.Error:
return &ParsingCheckingError{
Err: err,
Location: location,
}
default:
return err
}
}

Expand Down
1 change: 1 addition & 0 deletions runtime/error_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,7 @@ func TestRuntimeError(t *testing.T) {
Location: location,
},
)

require.EqualError(
t,
err,
Expand Down
13 changes: 11 additions & 2 deletions runtime/old_parser/parser.go
Original file line number Diff line number Diff line change
Expand Up @@ -91,8 +91,13 @@ func Parse[T any](
config Config,
) (result T, errors []error) {
// create a lexer, which turns the input string into tokens
tokens := lexer.Lex(input, memoryGauge)
tokens, err := lexer.Lex(input, memoryGauge)
if err != nil {
errors = append(errors, err)
return
}
defer tokens.Reclaim()

return ParseTokenStream(
memoryGauge,
tokens,
Expand Down Expand Up @@ -637,8 +642,12 @@ func ParseArgumentList(
}

func ParseProgram(memoryGauge common.MemoryGauge, code []byte, config Config) (program *ast.Program, err error) {
tokens := lexer.Lex(code, memoryGauge)
tokens, err := lexer.Lex(code, memoryGauge)
if err != nil {
return
}
defer tokens.Reclaim()

return ParseProgramFromTokenStream(memoryGauge, tokens, config)
}

Expand Down
15 changes: 4 additions & 11 deletions runtime/old_parser/parser_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -732,18 +732,11 @@ func TestParseArgumentList(t *testing.T) {
gauge := makeLimitingMemoryGauge()
gauge.Limit(common.MemoryKindTypeToken, 0)

var panicMsg any
(func() {
defer func() {
panicMsg = recover()
}()
_, err := ParseArgumentList(gauge, []byte(`(1, b: true)`), Config{})
require.Len(t, err, 1)
require.IsType(t, errors.MemoryError{}, err[0])

ParseArgumentList(gauge, []byte(`(1, b: true)`), Config{})
})()

require.IsType(t, errors.MemoryError{}, panicMsg)

fatalError, _ := panicMsg.(errors.MemoryError)
fatalError, _ := err[0].(errors.MemoryError)
var expectedError limitingMemoryGaugeError
assert.ErrorAs(t, fatalError, &expectedError)
})
Expand Down
29 changes: 8 additions & 21 deletions runtime/parser/expression_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -324,17 +324,11 @@ func TestParseAdvancedExpression(t *testing.T) {
gauge.debug = true
gauge.Limit(common.MemoryKindPosition, 11)

var panicMsg any
(func() {
defer func() {
panicMsg = recover()
}()
ParseExpression(gauge, []byte("1 < 2"), Config{})
})()
_, errs := ParseExpression(gauge, []byte("1 < 2"), Config{})
require.Len(t, errs, 1)
require.IsType(t, errors.MemoryError{}, errs[0])

require.IsType(t, errors.MemoryError{}, panicMsg)

fatalError, _ := panicMsg.(errors.MemoryError)
fatalError, _ := errs[0].(errors.MemoryError)
var expectedError limitingMemoryGaugeError
assert.ErrorAs(t, fatalError, &expectedError)
})
Expand All @@ -346,18 +340,11 @@ func TestParseAdvancedExpression(t *testing.T) {
gauge := makeLimitingMemoryGauge()
gauge.Limit(common.MemoryKindIntegerExpression, 1)

var panicMsg any
(func() {
defer func() {
panicMsg = recover()
}()

ParseExpression(gauge, []byte("1 < 2 > 3"), Config{})
})()

require.IsType(t, errors.MemoryError{}, panicMsg)
_, errs := ParseExpression(gauge, []byte("1 < 2 > 3"), Config{})
require.Len(t, errs, 1)
require.IsType(t, errors.MemoryError{}, errs[0])

fatalError, _ := panicMsg.(errors.MemoryError)
fatalError, _ := errs[0].(errors.MemoryError)
var expectedError limitingMemoryGaugeError
assert.ErrorAs(t, fatalError, &expectedError)
})
Expand Down
21 changes: 9 additions & 12 deletions runtime/parser/lexer/lexer.go
Original file line number Diff line number Diff line change
Expand Up @@ -144,13 +144,13 @@ var pool = sync.Pool{
},
}

func Lex(input []byte, memoryGauge common.MemoryGauge) TokenStream {
func Lex(input []byte, memoryGauge common.MemoryGauge) (TokenStream, error) {
l := pool.Get().(*lexer)
l.clear()
l.memoryGauge = memoryGauge
l.input = input
l.run(rootState)
return l
err := l.run(rootState)
return l, err
}

// run executes the stateFn, which will scan the runes in the input
Expand All @@ -162,32 +162,29 @@ func Lex(input []byte, memoryGauge common.MemoryGauge) TokenStream {
// stateFn is returned, which for example happens when reaching the end of the file.
//
// When all stateFn have been executed, an EOF token is emitted.
func (l *lexer) run(state stateFn) {
func (l *lexer) run(state stateFn) (err error) {

// catch panic exceptions, emit it to the tokens channel before
// closing it
defer func() {
if r := recover(); r != nil {
var err error
switch r := r.(type) {
case errors.MemoryError, errors.InternalError:
// fatal errors and internal errors percolates up.
// Note: not all fatal errors are internal errors.
// e.g: memory limit exceeding is a fatal error, but also a user error.
panic(r)
// fatal errors and internal errors percolates up.
// Note: not all fatal errors are internal errors.
// e.g: memory limit exceeding is a fatal error, but also a user error.
case error:
err = r
default:
err = fmt.Errorf("lexer: %v", r)
}

l.emitError(err)
}
}()

for state != nil {
state = state(l)
}

return
}

// next decodes the next rune (UTF8 character) from the input string.
Expand Down
22 changes: 12 additions & 10 deletions runtime/parser/lexer/lexer_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,10 @@ func testLex(t *testing.T, input string, expected []token) {

bytes := []byte(input)

withTokens(Lex(bytes, nil), func(actualTokens []Token) {
tokenStream, err := Lex(bytes, nil)
require.NoError(t, err)

withTokens(tokenStream, func(actualTokens []Token) {
utils.AssertEqualWithDiff(t, expectedTokens, actualTokens)

require.Len(t, actualTokens, len(expectedTokens))
Expand Down Expand Up @@ -2385,7 +2388,8 @@ func TestRevert(t *testing.T) {

t.Parallel()

tokenStream := Lex([]byte("1 2 3"), nil)
tokenStream, err := Lex([]byte("1 2 3"), nil)
require.NoError(t, err)

// Assert all tokens

Expand Down Expand Up @@ -2550,7 +2554,8 @@ func TestEOFsAfterError(t *testing.T) {

t.Parallel()

tokenStream := Lex([]byte(`1 ''`), nil)
tokenStream, err := Lex([]byte(`1 ''`), nil)
require.NoError(t, err)

// Assert all tokens

Expand Down Expand Up @@ -2613,7 +2618,8 @@ func TestEOFsAfterEmptyInput(t *testing.T) {

t.Parallel()

tokenStream := Lex(nil, nil)
tokenStream, err := Lex(nil, nil)
require.NoError(t, err)

// Assert EOFs keep on being returned for Next()
// at the end of the stream
Expand Down Expand Up @@ -2644,10 +2650,6 @@ func TestLimit(t *testing.T) {

code := b.String()

assert.PanicsWithValue(t,
TokenLimitReachedError{},
func() {
_ = Lex([]byte(code), nil)
},
)
_, err := Lex([]byte(code), nil)
require.ErrorAs(t, err, &TokenLimitReachedError{})
}
32 changes: 23 additions & 9 deletions runtime/parser/parser.go
Original file line number Diff line number Diff line change
Expand Up @@ -100,8 +100,13 @@
config Config,
) (result T, errors []error) {
// create a lexer, which turns the input string into tokens
tokens := lexer.Lex(input, memoryGauge)
tokens, err := lexer.Lex(input, memoryGauge)
if err != nil {
errors = append(errors, err)
return
}
defer tokens.Reclaim()

return ParseTokenStream(
memoryGauge,
tokens,
Expand All @@ -127,29 +132,34 @@

defer func() {
if r := recover(); r != nil {
var err error
switch r := r.(type) {
case ParseError:
// Report parser errors.
p.report(r)

// Do not treat non-parser errors as syntax errors.
case errors.InternalError, errors.UserError:
// Also do not wrap non-parser errors, that are already
// known cadence errors. i.e: internal errors / user errors.
// e.g: `errors.MemoryError`
panic(r)
// Also do not wrap non-parser errors, that are already
// known cadence errors. i.e: internal errors / user errors.
// e.g: `errors.MemoryError`
case errors.UserError:
err = r
case errors.InternalError:
err = r

Check warning on line 148 in runtime/parser/parser.go

View check run for this annotation

Codecov / codecov/patch

runtime/parser/parser.go#L147-L148

Added lines #L147 - L148 were not covered by tests
case error:
// Any other error/panic is an internal error.
// Thus, wrap with an UnexpectedError to mark it as an internal error
// and propagate up the call stack.
panic(errors.NewUnexpectedErrorFromCause(r))
err = errors.NewUnexpectedErrorFromCause(r)

Check warning on line 153 in runtime/parser/parser.go

View check run for this annotation

Codecov / codecov/patch

runtime/parser/parser.go#L153

Added line #L153 was not covered by tests
default:
panic(errors.NewUnexpectedError("parser: %v", r))
err = errors.NewUnexpectedError("parser: %v", r)

Check warning on line 155 in runtime/parser/parser.go

View check run for this annotation

Codecov / codecov/patch

runtime/parser/parser.go#L155

Added line #L155 was not covered by tests
}

var zero T
result = zero
errs = p.errors

errs = append(errs, err)
}

for _, bufferedErrors := range p.bufferedErrorsStack {
Expand Down Expand Up @@ -677,8 +687,12 @@
}

func ParseProgram(memoryGauge common.MemoryGauge, code []byte, config Config) (program *ast.Program, err error) {
tokens := lexer.Lex(code, memoryGauge)
tokens, err := lexer.Lex(code, memoryGauge)
if err != nil {
return
}
defer tokens.Reclaim()

return ParseProgramFromTokenStream(memoryGauge, tokens, config)
}

Expand Down
14 changes: 4 additions & 10 deletions runtime/parser/parser_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -734,18 +734,12 @@ func TestParseArgumentList(t *testing.T) {
gauge := makeLimitingMemoryGauge()
gauge.Limit(common.MemoryKindTypeToken, 0)

var panicMsg any
(func() {
defer func() {
panicMsg = recover()
}()
_, errs := ParseArgumentList(gauge, []byte(`(1, b: true)`), Config{})
require.Len(t, errs, 1)

ParseArgumentList(gauge, []byte(`(1, b: true)`), Config{})
})()
require.IsType(t, errors.MemoryError{}, errs[0])

require.IsType(t, errors.MemoryError{}, panicMsg)

fatalError, _ := panicMsg.(errors.MemoryError)
fatalError, _ := errs[0].(errors.MemoryError)
var expectedError limitingMemoryGaugeError
assert.ErrorAs(t, fatalError, &expectedError)
})
Expand Down
5 changes: 4 additions & 1 deletion runtime/repl.go
Original file line number Diff line number Diff line change
Expand Up @@ -254,8 +254,11 @@
code = prefixedCode
}

tokens := lexer.Lex(code, nil)
tokens, err := lexer.Lex(code, nil)

Check warning on line 257 in runtime/repl.go

View check run for this annotation

Codecov / codecov/patch

runtime/repl.go#L257

Added line #L257 was not covered by tests
defer tokens.Reclaim()
if err != nil {
return
}

Check warning on line 261 in runtime/repl.go

View check run for this annotation

Codecov / codecov/patch

runtime/repl.go#L259-L261

Added lines #L259 - L261 were not covered by tests

inputIsComplete = isInputComplete(tokens)

Expand Down
2 changes: 1 addition & 1 deletion runtime/runtime_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -7497,7 +7497,7 @@ func TestRuntimeInternalErrors(t *testing.T) {

RequireError(t, err)

assertRuntimeErrorIsInternalError(t, err)
assertRuntimeErrorIsExternalError(t, err)
})

}
Expand Down
Loading