Skip to content

Commit

Permalink
fix coverage
Browse files Browse the repository at this point in the history
  • Loading branch information
tswast committed Apr 11, 2024
1 parent 8ef9b2c commit ae835f6
Show file tree
Hide file tree
Showing 3 changed files with 29 additions and 29 deletions.
6 changes: 3 additions & 3 deletions tests/unit/line_arg_parser/test_lexer.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,14 +19,14 @@

@pytest.fixture(scope="session")
def lexer_class():
from google.cloud.bigquery.magics.line_arg_parser.lexer import Lexer
from bigquery_magics.line_arg_parser.lexer import Lexer

return Lexer


def test_empy_input(lexer_class):
from google.cloud.bigquery.magics.line_arg_parser import TokenType
from google.cloud.bigquery.magics.line_arg_parser.lexer import Token
from bigquery_magics.line_arg_parser import TokenType
from bigquery_magics.line_arg_parser.lexer import Token

lexer = lexer_class("")
tokens = list(lexer)
Expand Down
48 changes: 24 additions & 24 deletions tests/unit/line_arg_parser/test_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,14 +19,14 @@

@pytest.fixture(scope="session")
def parser_class():
from google.cloud.bigquery.magics.line_arg_parser.parser import Parser
from bigquery_magics.line_arg_parser.parser import Parser

return Parser


def test_consume_expected_eol(parser_class):
from google.cloud.bigquery.magics.line_arg_parser import TokenType
from google.cloud.bigquery.magics.line_arg_parser.lexer import Token
from bigquery_magics.line_arg_parser import TokenType
from bigquery_magics.line_arg_parser.lexer import Token

# A simple iterable of Tokens is sufficient.
fake_lexer = [Token(TokenType.EOL, lexeme="", pos=0)]
Expand All @@ -36,8 +36,8 @@ def test_consume_expected_eol(parser_class):


def test_consume_unexpected_eol(parser_class):
from google.cloud.bigquery.magics.line_arg_parser import ParseError, TokenType
from google.cloud.bigquery.magics.line_arg_parser.lexer import Token
from bigquery_magics.line_arg_parser import ParseError, TokenType
from bigquery_magics.line_arg_parser.lexer import Token

# A simple iterable of Tokens is sufficient.
fake_lexer = [Token(TokenType.EOL, lexeme="", pos=0)]
Expand All @@ -48,8 +48,8 @@ def test_consume_unexpected_eol(parser_class):


def test_input_line_unexpected_input(parser_class):
from google.cloud.bigquery.magics.line_arg_parser import ParseError, TokenType
from google.cloud.bigquery.magics.line_arg_parser.lexer import Token
from bigquery_magics.line_arg_parser import ParseError, TokenType
from bigquery_magics.line_arg_parser.lexer import Token

# A simple iterable of Tokens is sufficient.
fake_lexer = [
Expand All @@ -64,8 +64,8 @@ def test_input_line_unexpected_input(parser_class):


def test_destination_var_unexpected_input(parser_class):
from google.cloud.bigquery.magics.line_arg_parser import ParseError, TokenType
from google.cloud.bigquery.magics.line_arg_parser.lexer import Token
from bigquery_magics.line_arg_parser import ParseError, TokenType
from bigquery_magics.line_arg_parser.lexer import Token

# A simple iterable of Tokens is sufficient.
fake_lexer = [
Expand All @@ -79,8 +79,8 @@ def test_destination_var_unexpected_input(parser_class):


def test_option_value_unexpected_input(parser_class):
from google.cloud.bigquery.magics.line_arg_parser import ParseError, TokenType
from google.cloud.bigquery.magics.line_arg_parser.lexer import Token
from bigquery_magics.line_arg_parser import ParseError, TokenType
from bigquery_magics.line_arg_parser.lexer import Token

# A simple iterable of Tokens is sufficient.
fake_lexer = [
Expand All @@ -94,8 +94,8 @@ def test_option_value_unexpected_input(parser_class):


def test_dict_items_empty_dict(parser_class):
from google.cloud.bigquery.magics.line_arg_parser import TokenType
from google.cloud.bigquery.magics.line_arg_parser.lexer import Token
from bigquery_magics.line_arg_parser import TokenType
from bigquery_magics.line_arg_parser.lexer import Token

# A simple iterable of Tokens is sufficient.
fake_lexer = [Token(TokenType.RCURL, lexeme="}", pos=22)]
Expand All @@ -107,8 +107,8 @@ def test_dict_items_empty_dict(parser_class):


def test_dict_items_trailing_comma(parser_class):
from google.cloud.bigquery.magics.line_arg_parser import TokenType
from google.cloud.bigquery.magics.line_arg_parser.lexer import Token
from bigquery_magics.line_arg_parser import TokenType
from bigquery_magics.line_arg_parser.lexer import Token

# A simple iterable of Tokens is sufficient.
fake_lexer = [
Expand All @@ -129,8 +129,8 @@ def test_dict_items_trailing_comma(parser_class):


def test_dict_item_unknown_input(parser_class):
from google.cloud.bigquery.magics.line_arg_parser import ParseError, TokenType
from google.cloud.bigquery.magics.line_arg_parser.lexer import Token
from bigquery_magics.line_arg_parser import ParseError, TokenType
from bigquery_magics.line_arg_parser.lexer import Token

# A simple iterable of Tokens is sufficient.
fake_lexer = [Token(TokenType.UNKNOWN, lexeme="#/%", pos=35)]
Expand All @@ -141,9 +141,9 @@ def test_dict_item_unknown_input(parser_class):


def test_pyvalue_list_containing_dict(parser_class):
from google.cloud.bigquery.magics.line_arg_parser import TokenType
from google.cloud.bigquery.magics.line_arg_parser.lexer import Token
from google.cloud.bigquery.magics.line_arg_parser.parser import PyDict, PyList
from bigquery_magics.line_arg_parser import TokenType
from bigquery_magics.line_arg_parser.lexer import Token
from bigquery_magics.line_arg_parser.parser import PyDict, PyList

# A simple iterable of Tokens is sufficient.
fake_lexer = [
Expand Down Expand Up @@ -174,8 +174,8 @@ def test_pyvalue_list_containing_dict(parser_class):


def test_pyvalue_invalid_token(parser_class):
from google.cloud.bigquery.magics.line_arg_parser import ParseError, TokenType
from google.cloud.bigquery.magics.line_arg_parser.lexer import Token
from bigquery_magics.line_arg_parser import ParseError, TokenType
from bigquery_magics.line_arg_parser.lexer import Token

# A simple iterable of Tokens is sufficient.
fake_lexer = [Token(TokenType.OPTION_SPEC, lexeme="--verbose", pos=75)]
Expand All @@ -187,8 +187,8 @@ def test_pyvalue_invalid_token(parser_class):


def test_collection_items_empty(parser_class):
from google.cloud.bigquery.magics.line_arg_parser import TokenType
from google.cloud.bigquery.magics.line_arg_parser.lexer import Token
from bigquery_magics.line_arg_parser import TokenType
from bigquery_magics.line_arg_parser.lexer import Token

# A simple iterable of Tokens is sufficient.
fake_lexer = [Token(TokenType.RPAREN, lexeme=")", pos=30)]
Expand Down
4 changes: 2 additions & 2 deletions tests/unit/line_arg_parser/test_visitors.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,13 +19,13 @@

@pytest.fixture
def base_visitor():
from google.cloud.bigquery.magics.line_arg_parser.visitors import NodeVisitor
from bigquery_magics.line_arg_parser.visitors import NodeVisitor

return NodeVisitor()


def test_unknown_node(base_visitor):
from google.cloud.bigquery.magics.line_arg_parser.parser import ParseNode
from bigquery_magics.line_arg_parser.parser import ParseNode

class UnknownNode(ParseNode):
pass
Expand Down

0 comments on commit ae835f6

Please sign in to comment.