Skip to content

Commit

Permalink
Make method signatures compatible again
Browse files Browse the repository at this point in the history
The annotations caused a mismatch
  • Loading branch information
plamut committed Jul 16, 2021
1 parent 996baa3 commit d7f0531
Show file tree
Hide file tree
Showing 2 changed files with 8 additions and 7 deletions.
4 changes: 2 additions & 2 deletions google/cloud/bigquery/job/query.py
Original file line number Diff line number Diff line change
Expand Up @@ -1342,7 +1342,7 @@ def result(
def to_arrow(
self,
progress_bar_type: str = None,
bqstorage_client: "bigquery_storage.BigQueryReadClient" = None,
bqstorage_client: Optional["bigquery_storage.BigQueryReadClient"] = None,
create_bqstorage_client: bool = True,
max_results: Optional[int] = None,
) -> "pyarrow.Table":
Expand Down Expand Up @@ -1412,7 +1412,7 @@ def to_arrow(
# that should only exist here in the QueryJob method.
def to_dataframe(
self,
bqstorage_client: "bigquery_storage.BigQueryReadClient" = None,
bqstorage_client: Optional["bigquery_storage.BigQueryReadClient"] = None,
dtypes: Dict[str, Any] = None,
progress_bar_type: str = None,
create_bqstorage_client: bool = True,
Expand Down
11 changes: 6 additions & 5 deletions google/cloud/bigquery/table.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,6 @@
from google.api_core.page_iterator import HTTPIterator

import google.cloud._helpers
from google.cloud import bigquery_storage
from google.cloud.bigquery import _helpers
from google.cloud.bigquery import _pandas_helpers
from google.cloud.bigquery.exceptions import LegacyBigQueryStorageError
Expand All @@ -48,6 +47,8 @@
from google.cloud.bigquery.encryption_configuration import EncryptionConfiguration

if typing.TYPE_CHECKING: # pragma: NO COVER
from google.cloud import bigquery_storage

# Unconditionally import optional dependencies again to tell pytype that
# they are not None, avoiding false "no attribute" errors.
import pandas
Expand Down Expand Up @@ -1628,7 +1629,7 @@ def _to_arrow_iterable(self, bqstorage_client=None):
def to_arrow(
self,
progress_bar_type: str = None,
bqstorage_client: Optional[bigquery_storage.BigQueryReadClient] = None,
bqstorage_client: Optional["bigquery_storage.BigQueryReadClient"] = None,
create_bqstorage_client: bool = True,
) -> "pyarrow.Table":
"""[Beta] Create a class:`pyarrow.Table` by loading all pages of a
Expand Down Expand Up @@ -1723,7 +1724,7 @@ def to_arrow(

def to_dataframe_iterable(
self,
bqstorage_client: Optional[bigquery_storage.BigQueryReadClient] = None,
bqstorage_client: Optional["bigquery_storage.BigQueryReadClient"] = None,
dtypes: Dict[str, Any] = None,
max_queue_size: int = _pandas_helpers._MAX_QUEUE_SIZE_DEFAULT,
) -> "pandas.DataFrame":
Expand Down Expand Up @@ -1797,7 +1798,7 @@ def to_dataframe_iterable(
# changes to job.QueryJob.to_dataframe()
def to_dataframe(
self,
bqstorage_client: Optional[bigquery_storage.BigQueryReadClient] = None,
bqstorage_client: Optional["bigquery_storage.BigQueryReadClient"] = None,
dtypes: Dict[str, Any] = None,
progress_bar_type: str = None,
create_bqstorage_client: bool = True,
Expand Down Expand Up @@ -1978,7 +1979,7 @@ def to_dataframe(

def to_dataframe_iterable(
self,
bqstorage_client: Optional[bigquery_storage.BigQueryReadClient] = None,
bqstorage_client: Optional["bigquery_storage.BigQueryReadClient"] = None,
dtypes: Optional[Dict[str, Any]] = None,
max_queue_size: Optional[int] = None,
) -> Iterator["pandas.DataFrame"]:
Expand Down

0 comments on commit d7f0531

Please sign in to comment.