Skip to content

Commit

Permalink
Add tests for when BigQuery Storage client isn't installed
Browse files Browse the repository at this point in the history
  • Loading branch information
tswast committed Apr 2, 2019
1 parent a85c02c commit ef4aa5d
Show file tree
Hide file tree
Showing 2 changed files with 134 additions and 10 deletions.
10 changes: 9 additions & 1 deletion bigquery/google/cloud/bigquery/magics.py
Original file line number Diff line number Diff line change
Expand Up @@ -134,6 +134,10 @@
from IPython.core import magic_arguments
except ImportError: # pragma: NO COVER
raise ImportError("This module can only be loaded in IPython.")
try:
from google.cloud import bigquery_storage_v1beta1
except ImportError: # pragma: NO COVER
bigquery_storage_v1beta1 = None

import google.auth
from google.cloud import bigquery
Expand Down Expand Up @@ -375,6 +379,10 @@ def _make_bqstorage_client(use_bqstorage_api, credentials):
if not use_bqstorage_api:
return None

from google.cloud import bigquery_storage_v1beta1
if bigquery_storage_v1beta1 is None:
raise ImportError(
"Install the google-cloud-bigquery-storage and fastavro packages "
"to use the BigQuery storage API."
)

return bigquery_storage_v1beta1.BigQueryStorageClient(credentials=credentials)
134 changes: 125 additions & 9 deletions bigquery/tests/unit/test_magics.py
Original file line number Diff line number Diff line change
Expand Up @@ -153,6 +153,18 @@ def test__make_bqstorage_client_true():
assert isinstance(got, bigquery_storage_v1beta1.BigQueryStorageClient)


def test__make_bqstorage_client_true_raises_import_error(monkeypatch):
monkeypatch.setattr(magics, "bigquery_storage_v1beta1", None)
credentials_mock = mock.create_autospec(
google.auth.credentials.Credentials, instance=True
)

with pytest.raises(ImportError) as exc:
magics._make_bqstorage_client(True, credentials_mock)

assert "google-cloud-bigquery-storage" in str(exc)


@pytest.mark.usefixtures("ipython_interactive")
def test_extension_load():
ip = IPython.get_ipython()
Expand All @@ -165,13 +177,16 @@ def test_extension_load():

@pytest.mark.usefixtures("ipython_interactive")
@pytest.mark.skipif(pandas is None, reason="Requires `pandas`")
def test_bigquery_magic_without_optional_arguments():
def test_bigquery_magic_without_optional_arguments(monkeypatch):
ip = IPython.get_ipython()
ip.extension_manager.load_extension("google.cloud.bigquery")
magics.context.credentials = mock.create_autospec(
google.auth.credentials.Credentials, instance=True
)

# Shouldn't fail when BigQuery Storage client isn't installed.
monkeypatch.setattr(magics, "bigquery_storage_v1beta1", None)

sql = "SELECT 17 AS num"
result = pandas.DataFrame([17], columns=["num"])
run_query_patch = mock.patch(
Expand Down Expand Up @@ -285,44 +300,145 @@ def test_bigquery_magic_clears_display_in_verbose_mode():
@pytest.mark.skipIf(
bigquery_storage_v1beta1 is None, "Requires `google-cloud-bigquery-storage`"
)
def test_bigquery_magic_with_bqstorage(monkeypatch):
def test_bigquery_magic_with_bqstorage_from_argument(monkeypatch):
ip = IPython.get_ipython()
ip.extension_manager.load_extension("google.cloud.bigquery")
mock_credentials = mock.create_autospec(
google.auth.credentials.Credentials, instance=True
)

# Set up the context with monkeypatch so that it's reset for subsequent
# tests.
monkeypatch.setattr(magics.context, "credentials", mock_credentials)
monkeypatch.setattr(magics.context, "use_bqstorage_api", True)
monkeypatch.setattr(magics.context, "use_bqstorage_api", False)

# Mock out the BigQuery Storage API.
bqstorage_mock = mock.create_autospec(
bigquery_storage_v1beta1.BigQueryStorageClient
)
bqstorage_instance_mock = mock.create_autospec(
bigquery_storage_v1beta1.BigQueryStorageClient, instance=True
)
bqstorage_mock.return_value = bqstorage_instance_mock
monkeypatch.setattr(
magics.bigquery_storage_v1beta1, "BigQueryStorageClient", bqstorage_mock
)

sql = "SELECT 17 AS num"
result = pandas.DataFrame([17], columns=["num"])
make_bqstorage_patch = mock.patch(
"google.cloud.bigquery.magics._make_bqstorage_client", autospec=True
run_query_patch = mock.patch(
"google.cloud.bigquery.magics._run_query", autospec=True
)
query_job_mock = mock.create_autospec(
google.cloud.bigquery.job.QueryJob, instance=True
)
query_job_mock.to_dataframe.return_value = result
with run_query_patch as run_query_mock:
run_query_mock.return_value = query_job_mock

return_value = ip.run_cell_magic("bigquery", "--use_bqstorage_api", sql)

bqstorage_mock.assert_called_once_with(credentials=mock_credentials)
query_job_mock.to_dataframe.assert_called_once_with(
bqstorage_client=bqstorage_instance_mock
)

assert isinstance(return_value, pandas.DataFrame)


@pytest.mark.usefixtures("ipython_interactive")
@pytest.mark.skipIf(
bigquery_storage_v1beta1 is None, "Requires `google-cloud-bigquery-storage`"
)
def test_bigquery_magic_with_bqstorage_from_context(monkeypatch):
ip = IPython.get_ipython()
ip.extension_manager.load_extension("google.cloud.bigquery")
mock_credentials = mock.create_autospec(
google.auth.credentials.Credentials, instance=True
)

# Set up the context with monkeypatch so that it's reset for subsequent
# tests.
monkeypatch.setattr(magics.context, "credentials", mock_credentials)
monkeypatch.setattr(magics.context, "use_bqstorage_api", True)

# Mock out the BigQuery Storage API.
bqstorage_mock = mock.create_autospec(
bigquery_storage_v1beta1.BigQueryStorageClient
)
bqstorage_instance_mock = mock.create_autospec(
bigquery_storage_v1beta1.BigQueryStorageClient, instance=True
)
bqstorage_mock.return_value = bqstorage_instance_mock
monkeypatch.setattr(
magics.bigquery_storage_v1beta1, "BigQueryStorageClient", bqstorage_mock
)

sql = "SELECT 17 AS num"
result = pandas.DataFrame([17], columns=["num"])
run_query_patch = mock.patch(
"google.cloud.bigquery.magics._run_query", autospec=True
)
query_job_mock = mock.create_autospec(
google.cloud.bigquery.job.QueryJob, instance=True
)
query_job_mock.to_dataframe.return_value = result
with run_query_patch as run_query_mock, make_bqstorage_patch as make_bqstorage_mock:
with run_query_patch as run_query_mock:
run_query_mock.return_value = query_job_mock
make_bqstorage_mock.return_value = bqstorage_mock

return_value = ip.run_cell_magic("bigquery", "", sql)

make_bqstorage_mock.assert_called_once_with(True, mock_credentials)
bqstorage_mock.assert_called_once_with(credentials=mock_credentials)
query_job_mock.to_dataframe.assert_called_once_with(
bqstorage_client=bqstorage_mock
bqstorage_client=bqstorage_instance_mock
)

assert isinstance(return_value, pandas.DataFrame)


@pytest.mark.usefixtures("ipython_interactive")
@pytest.mark.skipIf(
bigquery_storage_v1beta1 is None, "Requires `google-cloud-bigquery-storage`"
)
def test_bigquery_magic_without_bqstorage(monkeypatch):
ip = IPython.get_ipython()
ip.extension_manager.load_extension("google.cloud.bigquery")
mock_credentials = mock.create_autospec(
google.auth.credentials.Credentials, instance=True
)

# Set up the context with monkeypatch so that it's reset for subsequent
# tests.
monkeypatch.setattr(magics.context, "credentials", mock_credentials)

# Mock out the BigQuery Storage API.
bqstorage_mock = mock.create_autospec(
bigquery_storage_v1beta1.BigQueryStorageClient
)
monkeypatch.setattr(
magics.bigquery_storage_v1beta1, "BigQueryStorageClient", bqstorage_mock
)

sql = "SELECT 17 AS num"
result = pandas.DataFrame([17], columns=["num"])
run_query_patch = mock.patch(
"google.cloud.bigquery.magics._run_query", autospec=True
)
query_job_mock = mock.create_autospec(
google.cloud.bigquery.job.QueryJob, instance=True
)
query_job_mock.to_dataframe.return_value = result
with run_query_patch as run_query_mock:
run_query_mock.return_value = query_job_mock

return_value = ip.run_cell_magic("bigquery", "", sql)

bqstorage_mock.assert_not_called()
query_job_mock.to_dataframe.assert_called_once_with(bqstorage_client=None)

assert isinstance(return_value, pandas.DataFrame)


@pytest.mark.usefixtures("ipython_interactive")
def test_bigquery_magic_with_project():
ip = IPython.get_ipython()
Expand Down

0 comments on commit ef4aa5d

Please sign in to comment.