From dcbf7dd039e95348d66ea7b2117d8d5822f49040 Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Fri, 1 Nov 2019 15:20:12 -0700 Subject: [PATCH] Rename parameter to create_bqstorage_client. --- bigquery/google/cloud/bigquery/job.py | 20 ++++++------- bigquery/google/cloud/bigquery/table.py | 28 +++++++++---------- bigquery/samples/download_public_data.py | 2 +- .../samples/download_public_data_sandbox.py | 2 +- 4 files changed, 26 insertions(+), 26 deletions(-) diff --git a/bigquery/google/cloud/bigquery/job.py b/bigquery/google/cloud/bigquery/job.py index 928f17ac8ebf2..5fd963ec82159 100644 --- a/bigquery/google/cloud/bigquery/job.py +++ b/bigquery/google/cloud/bigquery/job.py @@ -3110,7 +3110,7 @@ def result( # If changing the signature of this method, make sure to apply the same # changes to table.RowIterator.to_arrow() def to_arrow( - self, progress_bar_type=None, bqstorage_client=None, use_bqstorage_api=False + self, progress_bar_type=None, bqstorage_client=None, create_bqstorage_client=False ): """[Beta] Create a class:`pyarrow.Table` by loading all pages of a table or query. @@ -3144,10 +3144,10 @@ def to_arrow( Reading from a specific partition or snapshot is not currently supported by this method. - use_bqstorage_api (bool): + create_bqstorage_client (bool): **Beta Feature** Optional. If ``True``, create a BigQuery - *Storage API client and - use the faster BigQuery Storage API to fetch rows from + Storage API client using the default API settings. The + BigQuery Storage API is a faster way to fetch rows from BigQuery. See the ``bqstorage_client`` parameter for more information. @@ -3170,7 +3170,7 @@ def to_arrow( return self.result().to_arrow( progress_bar_type=progress_bar_type, bqstorage_client=bqstorage_client, - use_bqstorage_api=use_bqstorage_api, + create_bqstorage_client=create_bqstorage_client, ) # If changing the signature of this method, make sure to apply the same @@ -3180,7 +3180,7 @@ def to_dataframe( bqstorage_client=None, dtypes=None, progress_bar_type=None, - use_bqstorage_api=False, + create_bqstorage_client=False, ): """Return a pandas DataFrame from a QueryJob @@ -3214,10 +3214,10 @@ def to_dataframe( for details. ..versionadded:: 1.11.0 - use_bqstorage_api (bool): + create_bqstorage_client (bool): **Beta Feature** Optional. If ``True``, create a BigQuery - *Storage API client and - use the faster BigQuery Storage API to fetch rows from + Storage API client using the default API settings. The + BigQuery Storage API is a faster way to fetch rows from BigQuery. See the ``bqstorage_client`` parameter for more information. @@ -3237,7 +3237,7 @@ def to_dataframe( bqstorage_client=bqstorage_client, dtypes=dtypes, progress_bar_type=progress_bar_type, - use_bqstorage_api=use_bqstorage_api, + create_bqstorage_client=create_bqstorage_client, ) def __iter__(self): diff --git a/bigquery/google/cloud/bigquery/table.py b/bigquery/google/cloud/bigquery/table.py index 4f0c076514086..f875181d619bc 100644 --- a/bigquery/google/cloud/bigquery/table.py +++ b/bigquery/google/cloud/bigquery/table.py @@ -1441,7 +1441,7 @@ def _to_arrow_iterable(self, bqstorage_client=None): # If changing the signature of this method, make sure to apply the same # changes to job.QueryJob.to_arrow() def to_arrow( - self, progress_bar_type=None, bqstorage_client=None, use_bqstorage_api=False + self, progress_bar_type=None, bqstorage_client=None, create_bqstorage_client=False ): """[Beta] Create a class:`pyarrow.Table` by loading all pages of a table or query. @@ -1475,10 +1475,10 @@ def to_arrow( Reading from a specific partition or snapshot is not currently supported by this method. - use_bqstorage_api (bool): + create_bqstorage_client (bool): **Beta Feature** Optional. If ``True``, create a BigQuery - *Storage API client and - use the faster BigQuery Storage API to fetch rows from + Storage API client using the default API settings. The + BigQuery Storage API is a faster way to fetch rows from BigQuery. See the ``bqstorage_client`` parameter for more information. @@ -1500,7 +1500,7 @@ def to_arrow( if pyarrow is None: raise ValueError(_NO_PYARROW_ERROR) - if not bqstorage_client and use_bqstorage_api: + if not bqstorage_client and create_bqstorage_client: bqstorage_client = self.client._create_bqstorage_client() progress_bar = self._get_progress_bar(progress_bar_type) @@ -1562,7 +1562,7 @@ def to_dataframe( bqstorage_client=None, dtypes=None, progress_bar_type=None, - use_bqstorage_api=False, + create_bqstorage_client=False, ): """Create a pandas DataFrame by loading all pages of a query. @@ -1607,10 +1607,10 @@ def to_dataframe( progress bar as a graphical dialog box. ..versionadded:: 1.11.0 - use_bqstorage_api (bool): + create_bqstorage_client (bool): **Beta Feature** Optional. If ``True``, create a BigQuery - *Storage API client and - use the faster BigQuery Storage API to fetch rows from + Storage API client using the default API settings. The + BigQuery Storage API is a faster way to fetch rows from BigQuery. See the ``bqstorage_client`` parameter for more information. @@ -1636,7 +1636,7 @@ def to_dataframe( if dtypes is None: dtypes = {} - if not bqstorage_client and use_bqstorage_api: + if not bqstorage_client and create_bqstorage_client: bqstorage_client = self.client._create_bqstorage_client() if bqstorage_client and self.max_results is not None: @@ -1686,14 +1686,14 @@ class _EmptyRowIterator(object): total_rows = 0 def to_arrow( - self, progress_bar_type=None, bqstorage_client=None, use_bqstorage_api=False + self, progress_bar_type=None, bqstorage_client=None, create_bqstorage_client=False ): """[Beta] Create an empty class:`pyarrow.Table`. Args: progress_bar_type (Optional[str]): Ignored. Added for compatibility with RowIterator. bqstorage_client (Any): Ignored. Added for compatibility with RowIterator. - use_bqstorage_api (bool): Ignored. Added for compatibility with RowIterator. + create_bqstorage_client (bool): Ignored. Added for compatibility with RowIterator. Returns: pyarrow.Table: An empty :class:`pyarrow.Table`. @@ -1707,7 +1707,7 @@ def to_dataframe( bqstorage_client=None, dtypes=None, progress_bar_type=None, - use_bqstorage_api=False, + create_bqstorage_client=False, ): """Create an empty dataframe. @@ -1715,7 +1715,7 @@ def to_dataframe( bqstorage_client (Any): Ignored. Added for compatibility with RowIterator. dtypes (Any): Ignored. Added for compatibility with RowIterator. progress_bar_type (Any): Ignored. Added for compatibility with RowIterator. - use_bqstorage_api (bool): Ignored. Added for compatibility with RowIterator. + create_bqstorage_client (bool): Ignored. Added for compatibility with RowIterator. Returns: pandas.DataFrame: An empty :class:`~pandas.DataFrame`. diff --git a/bigquery/samples/download_public_data.py b/bigquery/samples/download_public_data.py index 4e09cab56411d..815d140fc6f15 100644 --- a/bigquery/samples/download_public_data.py +++ b/bigquery/samples/download_public_data.py @@ -27,7 +27,7 @@ def download_public_data(client): table_id = "bigquery-public-data.usa_names.usa_1910_current" # Use the BigQuery Storage API to speed-up downloads of large tables. - dataframe = client.list_rows(table_id).to_dataframe(use_bqstorage_api=True) + dataframe = client.list_rows(table_id).to_dataframe(create_bqstorage_client=True) print(dataframe.info()) # [END bigquery_pandas_public_data] diff --git a/bigquery/samples/download_public_data_sandbox.py b/bigquery/samples/download_public_data_sandbox.py index d725ab94f1cbf..edb1466e4bd78 100644 --- a/bigquery/samples/download_public_data_sandbox.py +++ b/bigquery/samples/download_public_data_sandbox.py @@ -28,7 +28,7 @@ def download_public_data_sandbox(client): query_string = "SELECT * FROM `bigquery-public-data.usa_names.usa_1910_current`" # Use the BigQuery Storage API to speed-up downloads of large tables. - dataframe = client.query(query_string).to_dataframe(use_bqstorage_api=True) + dataframe = client.query(query_string).to_dataframe(create_bqstorage_client=True) print(dataframe.info()) # [END bigquery_pandas_public_data_sandbox]