From d7f05316a4d08dfea7f4abf4cf413839bb7d491f Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Fri, 16 Jul 2021 16:12:01 +0200 Subject: [PATCH] Make method signatures compatible again The annotations caused a mismatch --- google/cloud/bigquery/job/query.py | 4 ++-- google/cloud/bigquery/table.py | 11 ++++++----- 2 files changed, 8 insertions(+), 7 deletions(-) diff --git a/google/cloud/bigquery/job/query.py b/google/cloud/bigquery/job/query.py index 23eecfdf79..67955dbb17 100644 --- a/google/cloud/bigquery/job/query.py +++ b/google/cloud/bigquery/job/query.py @@ -1342,7 +1342,7 @@ def result( def to_arrow( self, progress_bar_type: str = None, - bqstorage_client: "bigquery_storage.BigQueryReadClient" = None, + bqstorage_client: Optional["bigquery_storage.BigQueryReadClient"] = None, create_bqstorage_client: bool = True, max_results: Optional[int] = None, ) -> "pyarrow.Table": @@ -1412,7 +1412,7 @@ def to_arrow( # that should only exist here in the QueryJob method. def to_dataframe( self, - bqstorage_client: "bigquery_storage.BigQueryReadClient" = None, + bqstorage_client: Optional["bigquery_storage.BigQueryReadClient"] = None, dtypes: Dict[str, Any] = None, progress_bar_type: str = None, create_bqstorage_client: bool = True, diff --git a/google/cloud/bigquery/table.py b/google/cloud/bigquery/table.py index c88488b7ba..fee98ce5e5 100644 --- a/google/cloud/bigquery/table.py +++ b/google/cloud/bigquery/table.py @@ -36,7 +36,6 @@ from google.api_core.page_iterator import HTTPIterator import google.cloud._helpers -from google.cloud import bigquery_storage from google.cloud.bigquery import _helpers from google.cloud.bigquery import _pandas_helpers from google.cloud.bigquery.exceptions import LegacyBigQueryStorageError @@ -48,6 +47,8 @@ from google.cloud.bigquery.encryption_configuration import EncryptionConfiguration if typing.TYPE_CHECKING: # pragma: NO COVER + from google.cloud import bigquery_storage + # Unconditionally import optional dependencies again to tell pytype that # they are not None, avoiding false "no attribute" errors. import pandas @@ -1628,7 +1629,7 @@ def _to_arrow_iterable(self, bqstorage_client=None): def to_arrow( self, progress_bar_type: str = None, - bqstorage_client: Optional[bigquery_storage.BigQueryReadClient] = None, + bqstorage_client: Optional["bigquery_storage.BigQueryReadClient"] = None, create_bqstorage_client: bool = True, ) -> "pyarrow.Table": """[Beta] Create a class:`pyarrow.Table` by loading all pages of a @@ -1723,7 +1724,7 @@ def to_arrow( def to_dataframe_iterable( self, - bqstorage_client: Optional[bigquery_storage.BigQueryReadClient] = None, + bqstorage_client: Optional["bigquery_storage.BigQueryReadClient"] = None, dtypes: Dict[str, Any] = None, max_queue_size: int = _pandas_helpers._MAX_QUEUE_SIZE_DEFAULT, ) -> "pandas.DataFrame": @@ -1797,7 +1798,7 @@ def to_dataframe_iterable( # changes to job.QueryJob.to_dataframe() def to_dataframe( self, - bqstorage_client: Optional[bigquery_storage.BigQueryReadClient] = None, + bqstorage_client: Optional["bigquery_storage.BigQueryReadClient"] = None, dtypes: Dict[str, Any] = None, progress_bar_type: str = None, create_bqstorage_client: bool = True, @@ -1978,7 +1979,7 @@ def to_dataframe( def to_dataframe_iterable( self, - bqstorage_client: Optional[bigquery_storage.BigQueryReadClient] = None, + bqstorage_client: Optional["bigquery_storage.BigQueryReadClient"] = None, dtypes: Optional[Dict[str, Any]] = None, max_queue_size: Optional[int] = None, ) -> Iterator["pandas.DataFrame"]: