diff --git a/.coveragerc b/.coveragerc index dd39c854..593c73d9 100644 --- a/.coveragerc +++ b/.coveragerc @@ -1,35 +1,18 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Generated by synthtool. DO NOT EDIT! [run] branch = True [report] fail_under = 100 show_missing = True +omit = + google/cloud/bigquery_storage/__init__.py exclude_lines = # Re-enable the standard pragma pragma: NO COVER # Ignore debug-only repr def __repr__ - # Ignore abstract methods - raise NotImplementedError -omit = - */gapic/*.py - */proto/*.py - */core/*.py - */site-packages/*.py \ No newline at end of file + # Ignore pkg_resources exceptions. + # This is added at the module level as a safeguard for if someone + # generates the code and tries to run it without pip installing. This + # makes it virtually impossible to test properly. + except pkg_resources.DistributionNotFound diff --git a/.flake8 b/.flake8 index ed931638..29227d4c 100644 --- a/.flake8 +++ b/.flake8 @@ -26,6 +26,7 @@ exclude = *_pb2.py # Standard linting exemptions. + **/.nox/** __pycache__, .git, *.pyc, diff --git a/.kokoro/build.sh b/.kokoro/build.sh index a7968df7..9d2f7d14 100755 --- a/.kokoro/build.sh +++ b/.kokoro/build.sh @@ -15,7 +15,11 @@ set -eo pipefail -cd github/python-bigquery-storage +if [[ -z "${PROJECT_ROOT:-}" ]]; then + PROJECT_ROOT="github/python-bigquery-storage" +fi + +cd "${PROJECT_ROOT}" # Disable buffering, so that the logs stream through. export PYTHONUNBUFFERED=1 @@ -30,16 +34,16 @@ export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json export PROJECT_ID=$(cat "${KOKORO_GFILE_DIR}/project-id.json") # Remove old nox -python3.6 -m pip uninstall --yes --quiet nox-automation +python3 -m pip uninstall --yes --quiet nox-automation # Install nox -python3.6 -m pip install --upgrade --quiet nox -python3.6 -m nox --version +python3 -m pip install --upgrade --quiet nox +python3 -m nox --version # If NOX_SESSION is set, it only runs the specified session, # otherwise run all the sessions. if [[ -n "${NOX_SESSION:-}" ]]; then - python3.6 -m nox -s "${NOX_SESSION:-}" + python3 -m nox -s ${NOX_SESSION:-} else - python3.6 -m nox + python3 -m nox fi diff --git a/.kokoro/docs/docs-presubmit.cfg b/.kokoro/docs/docs-presubmit.cfg index 11181078..d59ee418 100644 --- a/.kokoro/docs/docs-presubmit.cfg +++ b/.kokoro/docs/docs-presubmit.cfg @@ -15,3 +15,14 @@ env_vars: { key: "TRAMPOLINE_IMAGE_UPLOAD" value: "false" } + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-bigquery-storage/.kokoro/build.sh" +} + +# Only run this nox session. +env_vars: { + key: "NOX_SESSION" + value: "docs docfx" +} diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 00000000..a9024b15 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,17 @@ +# See https://pre-commit.com for more information +# See https://pre-commit.com/hooks.html for more hooks +repos: +- repo: https://github.com/pre-commit/pre-commit-hooks + rev: v3.4.0 + hooks: + - id: trailing-whitespace + - id: end-of-file-fixer + - id: check-yaml +- repo: https://github.com/psf/black + rev: 19.10b0 + hooks: + - id: black +- repo: https://gitlab.com/pycqa/flake8 + rev: 3.8.4 + hooks: + - id: flake8 diff --git a/.trampolinerc b/.trampolinerc index 995ee291..c7d663ae 100644 --- a/.trampolinerc +++ b/.trampolinerc @@ -18,12 +18,14 @@ required_envvars+=( "STAGING_BUCKET" "V2_STAGING_BUCKET" + "NOX_SESSION" ) # Add env vars which are passed down into the container here. pass_down_envvars+=( "STAGING_BUCKET" "V2_STAGING_BUCKET" + "NOX_SESSION" ) # Prevent unintentional override on the default image. diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index cc2641d2..2ee34e24 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -21,8 +21,8 @@ In order to add a feature: - The feature must be documented in both the API and narrative documentation. -- The feature must work fully on the following CPython versions: 2.7, - 3.5, 3.6, 3.7 and 3.8 on both UNIX and Windows. +- The feature must work fully on the following CPython versions: + 3.6, 3.7, 3.8 and 3.9 on both UNIX and Windows. - The feature must not add unnecessary dependencies (where "unnecessary" is of course subjective, but new dependencies should @@ -111,6 +111,16 @@ Coding Style should point to the official ``googleapis`` checkout and the the branch should be the main branch on that remote (``master``). +- This repository contains configuration for the + `pre-commit `__ tool, which automates checking + our linters during a commit. If you have it installed on your ``$PATH``, + you can enable enforcing those checks via: + +.. code-block:: bash + + $ pre-commit install + pre-commit installed at .git/hooks/pre-commit + Exceptions to PEP8: - Many unit tests use a helper method, ``_call_fut`` ("FUT" is short for @@ -192,25 +202,24 @@ Supported Python Versions We support: -- `Python 3.5`_ - `Python 3.6`_ - `Python 3.7`_ - `Python 3.8`_ +- `Python 3.9`_ -.. _Python 3.5: https://docs.python.org/3.5/ .. _Python 3.6: https://docs.python.org/3.6/ .. _Python 3.7: https://docs.python.org/3.7/ .. _Python 3.8: https://docs.python.org/3.8/ +.. _Python 3.9: https://docs.python.org/3.9/ Supported versions can be found in our ``noxfile.py`` `config`_. .. _config: https://github.com/googleapis/python-bigquery-storage/blob/master/noxfile.py -Python 2.7 support is deprecated. All code changes should maintain Python 2.7 compatibility until January 1, 2020. We also explicitly decided to support Python 3 beginning with version -3.5. Reasons for this include: +3.6. Reasons for this include: - Encouraging use of newest versions of Python 3 - Taking the lead of `prominent`_ open-source `projects`_ diff --git a/LICENSE b/LICENSE index a8ee855d..d6456956 100644 --- a/LICENSE +++ b/LICENSE @@ -1,6 +1,7 @@ - Apache License + + Apache License Version 2.0, January 2004 - https://www.apache.org/licenses/ + http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION @@ -192,7 +193,7 @@ you may not use this file except in compliance with the License. You may obtain a copy of the License at - https://www.apache.org/licenses/LICENSE-2.0 + http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, diff --git a/docs/_static/custom.css b/docs/_static/custom.css index 0abaf229..bcd37bbd 100644 --- a/docs/_static/custom.css +++ b/docs/_static/custom.css @@ -1,4 +1,9 @@ div#python2-eol { border-color: red; border-width: medium; -} \ No newline at end of file +} + +/* Ensure minimum width for 'Parameters' / 'Returns' column */ +dl.field-list > dt { + min-width: 100px +} diff --git a/docs/bigquery_storage_v1/big_query_read.rst b/docs/bigquery_storage_v1/big_query_read.rst new file mode 100644 index 00000000..74f0a5fa --- /dev/null +++ b/docs/bigquery_storage_v1/big_query_read.rst @@ -0,0 +1,6 @@ +BigQueryRead +------------------------------ + +.. automodule:: google.cloud.bigquery_storage_v1.services.big_query_read + :members: + :inherited-members: diff --git a/docs/bigquery_storage_v1/services.rst b/docs/bigquery_storage_v1/services.rst index 5d0f9532..3f6cff3d 100644 --- a/docs/bigquery_storage_v1/services.rst +++ b/docs/bigquery_storage_v1/services.rst @@ -1,6 +1,6 @@ Services for Google Cloud Bigquery Storage v1 API ================================================= +.. toctree:: + :maxdepth: 2 -.. automodule:: google.cloud.bigquery_storage_v1.services.big_query_read - :members: - :inherited-members: + big_query_read diff --git a/docs/bigquery_storage_v1/types.rst b/docs/bigquery_storage_v1/types.rst index 3f722c57..28b5db72 100644 --- a/docs/bigquery_storage_v1/types.rst +++ b/docs/bigquery_storage_v1/types.rst @@ -3,4 +3,5 @@ Types for Google Cloud Bigquery Storage v1 API .. automodule:: google.cloud.bigquery_storage_v1.types :members: + :undoc-members: :show-inheritance: diff --git a/google/cloud/bigquery_storage_v1/services/big_query_read/async_client.py b/google/cloud/bigquery_storage_v1/services/big_query_read/async_client.py index 7108ffd0..5363e60f 100644 --- a/google/cloud/bigquery_storage_v1/services/big_query_read/async_client.py +++ b/google/cloud/bigquery_storage_v1/services/big_query_read/async_client.py @@ -81,6 +81,7 @@ class BigQueryReadAsyncClient: BigQueryReadClient.parse_common_location_path ) + from_service_account_info = BigQueryReadClient.from_service_account_info from_service_account_file = BigQueryReadClient.from_service_account_file from_service_account_json = from_service_account_file @@ -181,16 +182,17 @@ async def create_read_session( caller. Args: - request (:class:`~.storage.CreateReadSessionRequest`): + request (:class:`google.cloud.bigquery_storage_v1.types.CreateReadSessionRequest`): The request object. Request message for `CreateReadSession`. parent (:class:`str`): Required. The request project that owns the session, in the form of ``projects/{project_id}``. + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - read_session (:class:`~.stream.ReadSession`): + read_session (:class:`google.cloud.bigquery_storage_v1.types.ReadSession`): Required. Session to be created. This corresponds to the ``read_session`` field on the ``request`` instance; if ``request`` is provided, this @@ -210,6 +212,7 @@ async def create_read_session( Streams must be read starting from offset 0. + This corresponds to the ``max_stream_count`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -221,7 +224,7 @@ async def create_read_session( sent along with the request as metadata. Returns: - ~.stream.ReadSession: + google.cloud.bigquery_storage_v1.types.ReadSession: Information about the ReadSession. """ # Create or coerce a protobuf request object. @@ -296,7 +299,7 @@ def read_rows( reflecting the current state of the stream. Args: - request (:class:`~.storage.ReadRowsRequest`): + request (:class:`google.cloud.bigquery_storage_v1.types.ReadRowsRequest`): The request object. Request message for `ReadRows`. read_stream (:class:`str`): Required. Stream to read rows from. @@ -309,6 +312,7 @@ def read_rows( Requesting a larger offset is undefined. If not specified, start reading from offset zero. + This corresponds to the ``offset`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -320,9 +324,9 @@ def read_rows( sent along with the request as metadata. Returns: - AsyncIterable[~.storage.ReadRowsResponse]: - Response from calling ``ReadRows`` may include row data, - progress and throttling information. + AsyncIterable[google.cloud.bigquery_storage_v1.types.ReadRowsResponse]: + Response from calling ReadRows may include row data, progress and + throttling information. """ # Create or coerce a protobuf request object. @@ -396,7 +400,7 @@ async def split_read_stream( once the streams have been read to completion. Args: - request (:class:`~.storage.SplitReadStreamRequest`): + request (:class:`google.cloud.bigquery_storage_v1.types.SplitReadStreamRequest`): The request object. Request message for `SplitReadStream`. @@ -407,8 +411,8 @@ async def split_read_stream( sent along with the request as metadata. Returns: - ~.storage.SplitReadStreamResponse: - Response message for ``SplitReadStream``. + google.cloud.bigquery_storage_v1.types.SplitReadStreamResponse: + Response message for SplitReadStream. """ # Create or coerce a protobuf request object. diff --git a/google/cloud/bigquery_storage_v1/services/big_query_read/client.py b/google/cloud/bigquery_storage_v1/services/big_query_read/client.py index 3f04760f..3cb3f026 100644 --- a/google/cloud/bigquery_storage_v1/services/big_query_read/client.py +++ b/google/cloud/bigquery_storage_v1/services/big_query_read/client.py @@ -113,6 +113,22 @@ def _get_default_mtls_endpoint(api_endpoint): DEFAULT_ENDPOINT ) + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + BigQueryReadClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials @@ -125,7 +141,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): kwargs: Additional arguments to pass to the constructor. Returns: - {@api.name}: The constructed client. + BigQueryReadClient: The constructed client. """ credentials = service_account.Credentials.from_service_account_file(filename) kwargs["credentials"] = credentials @@ -267,10 +283,10 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, ~.BigQueryReadTransport]): The + transport (Union[str, BigQueryReadTransport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (client_options_lib.ClientOptions): Custom options for the + client_options (google.api_core.client_options.ClientOptions): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT @@ -404,21 +420,22 @@ def create_read_session( caller. Args: - request (:class:`~.storage.CreateReadSessionRequest`): + request (google.cloud.bigquery_storage_v1.types.CreateReadSessionRequest): The request object. Request message for `CreateReadSession`. - parent (:class:`str`): + parent (str): Required. The request project that owns the session, in the form of ``projects/{project_id}``. + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - read_session (:class:`~.stream.ReadSession`): + read_session (google.cloud.bigquery_storage_v1.types.ReadSession): Required. Session to be created. This corresponds to the ``read_session`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - max_stream_count (:class:`int`): + max_stream_count (int): Max initial number of streams. If unset or zero, the server will provide a value of streams so as to produce @@ -433,6 +450,7 @@ def create_read_session( Streams must be read starting from offset 0. + This corresponds to the ``max_stream_count`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -444,7 +462,7 @@ def create_read_session( sent along with the request as metadata. Returns: - ~.stream.ReadSession: + google.cloud.bigquery_storage_v1.types.ReadSession: Information about the ReadSession. """ # Create or coerce a protobuf request object. @@ -512,19 +530,20 @@ def read_rows( reflecting the current state of the stream. Args: - request (:class:`~.storage.ReadRowsRequest`): + request (google.cloud.bigquery_storage_v1.types.ReadRowsRequest): The request object. Request message for `ReadRows`. - read_stream (:class:`str`): + read_stream (str): Required. Stream to read rows from. This corresponds to the ``read_stream`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - offset (:class:`int`): + offset (int): The offset requested must be less than the last row read from Read. Requesting a larger offset is undefined. If not specified, start reading from offset zero. + This corresponds to the ``offset`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -536,9 +555,9 @@ def read_rows( sent along with the request as metadata. Returns: - Iterable[~.storage.ReadRowsResponse]: - Response from calling ``ReadRows`` may include row data, - progress and throttling information. + Iterable[google.cloud.bigquery_storage_v1.types.ReadRowsResponse]: + Response from calling ReadRows may include row data, progress and + throttling information. """ # Create or coerce a protobuf request object. @@ -607,7 +626,7 @@ def split_read_stream( once the streams have been read to completion. Args: - request (:class:`~.storage.SplitReadStreamRequest`): + request (google.cloud.bigquery_storage_v1.types.SplitReadStreamRequest): The request object. Request message for `SplitReadStream`. @@ -618,8 +637,8 @@ def split_read_stream( sent along with the request as metadata. Returns: - ~.storage.SplitReadStreamResponse: - Response message for ``SplitReadStream``. + google.cloud.bigquery_storage_v1.types.SplitReadStreamResponse: + Response message for SplitReadStream. """ # Create or coerce a protobuf request object. diff --git a/google/cloud/bigquery_storage_v1/services/big_query_read/transports/__init__.py b/google/cloud/bigquery_storage_v1/services/big_query_read/transports/__init__.py index 2e9fe066..87e56323 100644 --- a/google/cloud/bigquery_storage_v1/services/big_query_read/transports/__init__.py +++ b/google/cloud/bigquery_storage_v1/services/big_query_read/transports/__init__.py @@ -28,7 +28,6 @@ _transport_registry["grpc"] = BigQueryReadGrpcTransport _transport_registry["grpc_asyncio"] = BigQueryReadGrpcAsyncIOTransport - __all__ = ( "BigQueryReadTransport", "BigQueryReadGrpcTransport", diff --git a/google/cloud/bigquery_storage_v1/services/big_query_read/transports/grpc.py b/google/cloud/bigquery_storage_v1/services/big_query_read/transports/grpc.py index 041854b9..95fac6e5 100644 --- a/google/cloud/bigquery_storage_v1/services/big_query_read/transports/grpc.py +++ b/google/cloud/bigquery_storage_v1/services/big_query_read/transports/grpc.py @@ -148,10 +148,10 @@ def __init__( ssl_credentials=ssl_credentials, scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, - options=( + options=[ ("grpc.max_send_message_length", -1), ("grpc.max_receive_message_length", -1), - ), + ], ) self._ssl_channel_credentials = ssl_credentials else: @@ -170,10 +170,10 @@ def __init__( ssl_credentials=ssl_channel_credentials, scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, - options=( + options=[ ("grpc.max_send_message_length", -1), ("grpc.max_receive_message_length", -1), - ), + ], ) self._stubs = {} # type: Dict[str, Callable] @@ -200,7 +200,7 @@ def create_channel( ) -> grpc.Channel: """Create and return a gRPC channel object. Args: - address (Optionsl[str]): The host for the channel to use. + address (Optional[str]): The host for the channel to use. credentials (Optional[~.Credentials]): The authorization credentials to attach to requests. These credentials identify this application to the service. If diff --git a/google/cloud/bigquery_storage_v1/services/big_query_read/transports/grpc_asyncio.py b/google/cloud/bigquery_storage_v1/services/big_query_read/transports/grpc_asyncio.py index 3e08afdd..34ec72ad 100644 --- a/google/cloud/bigquery_storage_v1/services/big_query_read/transports/grpc_asyncio.py +++ b/google/cloud/bigquery_storage_v1/services/big_query_read/transports/grpc_asyncio.py @@ -193,10 +193,10 @@ def __init__( ssl_credentials=ssl_credentials, scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, - options=( + options=[ ("grpc.max_send_message_length", -1), ("grpc.max_receive_message_length", -1), - ), + ], ) self._ssl_channel_credentials = ssl_credentials else: @@ -215,10 +215,10 @@ def __init__( ssl_credentials=ssl_channel_credentials, scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, - options=( + options=[ ("grpc.max_send_message_length", -1), ("grpc.max_receive_message_length", -1), - ), + ], ) # Run the base constructor. diff --git a/google/cloud/bigquery_storage_v1/types/__init__.py b/google/cloud/bigquery_storage_v1/types/__init__.py index 346ce9cf..14fc7096 100644 --- a/google/cloud/bigquery_storage_v1/types/__init__.py +++ b/google/cloud/bigquery_storage_v1/types/__init__.py @@ -27,6 +27,7 @@ DataFormat, ReadSession, ReadStream, + DataFormat, ) from .storage import ( CreateReadSessionRequest, @@ -38,7 +39,6 @@ SplitReadStreamResponse, ) - __all__ = ( "ArrowSchema", "ArrowRecordBatch", @@ -47,6 +47,7 @@ "DataFormat", "ReadSession", "ReadStream", + "DataFormat", "CreateReadSessionRequest", "ReadRowsRequest", "ThrottleState", diff --git a/google/cloud/bigquery_storage_v1/types/storage.py b/google/cloud/bigquery_storage_v1/types/storage.py index 1b9c9d35..367d3bb3 100644 --- a/google/cloud/bigquery_storage_v1/types/storage.py +++ b/google/cloud/bigquery_storage_v1/types/storage.py @@ -44,7 +44,7 @@ class CreateReadSessionRequest(proto.Message): parent (str): Required. The request project that owns the session, in the form of ``projects/{project_id}``. - read_session (~.stream.ReadSession): + read_session (google.cloud.bigquery_storage_v1.types.ReadSession): Required. Session to be created. max_stream_count (int): Max initial number of streams. If unset or @@ -102,7 +102,7 @@ class StreamStats(proto.Message): r"""Estimated stream statistics for a given Stream. Attributes: - progress (~.storage.StreamStats.Progress): + progress (google.cloud.bigquery_storage_v1.types.StreamStats.Progress): Represents the progress of the current stream. """ @@ -141,16 +141,16 @@ class ReadRowsResponse(proto.Message): and throttling information. Attributes: - avro_rows (~.avro.AvroRows): + avro_rows (google.cloud.bigquery_storage_v1.types.AvroRows): Serialized row data in AVRO format. - arrow_record_batch (~.arrow.ArrowRecordBatch): + arrow_record_batch (google.cloud.bigquery_storage_v1.types.ArrowRecordBatch): Serialized row data in Arrow RecordBatch format. row_count (int): Number of serialized rows in the rows block. - stats (~.storage.StreamStats): + stats (google.cloud.bigquery_storage_v1.types.StreamStats): Statistics for the stream. - throttle_state (~.storage.ThrottleState): + throttle_state (google.cloud.bigquery_storage_v1.types.ThrottleState): Throttling state. If unset, the latest response still describes the current throttling status. @@ -201,11 +201,11 @@ class SplitReadStreamResponse(proto.Message): r"""Response message for ``SplitReadStream``. Attributes: - primary_stream (~.stream.ReadStream): + primary_stream (google.cloud.bigquery_storage_v1.types.ReadStream): Primary stream, which contains the beginning portion of \|original_stream|. An empty value indicates that the original stream can no longer be split. - remainder_stream (~.stream.ReadStream): + remainder_stream (google.cloud.bigquery_storage_v1.types.ReadStream): Remainder stream, which contains the tail of \|original_stream|. An empty value indicates that the original stream can no longer be split. diff --git a/google/cloud/bigquery_storage_v1/types/stream.py b/google/cloud/bigquery_storage_v1/types/stream.py index eeec7a88..34b865eb 100644 --- a/google/cloud/bigquery_storage_v1/types/stream.py +++ b/google/cloud/bigquery_storage_v1/types/stream.py @@ -43,28 +43,28 @@ class ReadSession(proto.Message): name (str): Output only. Unique identifier for the session, in the form ``projects/{project_id}/locations/{location}/sessions/{session_id}``. - expire_time (~.timestamp.Timestamp): + expire_time (google.protobuf.timestamp_pb2.Timestamp): Output only. Time at which the session becomes invalid. After this time, subsequent requests to read this Session will return errors. The expire_time is automatically assigned and currently cannot be specified or updated. - data_format (~.stream.DataFormat): + data_format (google.cloud.bigquery_storage_v1.types.DataFormat): Immutable. Data format of the output data. - avro_schema (~.avro.AvroSchema): + avro_schema (google.cloud.bigquery_storage_v1.types.AvroSchema): Output only. Avro schema. - arrow_schema (~.arrow.ArrowSchema): + arrow_schema (google.cloud.bigquery_storage_v1.types.ArrowSchema): Output only. Arrow schema. table (str): Immutable. Table that this ReadSession is reading from, in the form ``projects/{project_id}/datasets/{dataset_id}/tables/{table_id}`` - table_modifiers (~.stream.ReadSession.TableModifiers): + table_modifiers (google.cloud.bigquery_storage_v1.types.ReadSession.TableModifiers): Optional. Any modifiers which are applied when reading from the specified table. - read_options (~.stream.ReadSession.TableReadOptions): + read_options (google.cloud.bigquery_storage_v1.types.ReadSession.TableReadOptions): Optional. Read options for this session (e.g. column selection, filters). - streams (Sequence[~.stream.ReadStream]): + streams (Sequence[google.cloud.bigquery_storage_v1.types.ReadStream]): Output only. A list of streams created with the session. At least one stream is created with the session. In the @@ -78,7 +78,7 @@ class TableModifiers(proto.Message): r"""Additional attributes when reading a table. Attributes: - snapshot_time (~.timestamp.Timestamp): + snapshot_time (google.protobuf.timestamp_pb2.Timestamp): The snapshot time of the table. If not set, interpreted as now. """ diff --git a/noxfile.py b/noxfile.py index c6d9258f..c5baa18c 100644 --- a/noxfile.py +++ b/noxfile.py @@ -30,6 +30,17 @@ SYSTEM_TEST_PYTHON_VERSIONS = ["3.8"] UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8", "3.9"] +# 'docfx' is excluded since it only needs to run in 'docs-presubmit' +nox.options.sessions = [ + "unit", + "system", + "cover", + "lint", + "lint_setup_py", + "blacken", + "docs", +] + @nox.session(python=DEFAULT_PYTHON_VERSION) def lint(session): @@ -84,9 +95,7 @@ def default(session): session.run( "py.test", "--quiet", - "--cov=google.cloud.bigquery_storage", - "--cov=google.cloud.bigquery_storage_v1", - "--cov=google.cloud", + "--cov=google/cloud", "--cov=tests/unit", "--cov-append", "--cov-config=.coveragerc", diff --git a/samples/quickstart/noxfile.py b/samples/quickstart/noxfile.py index ab2c4922..bbd25fcd 100644 --- a/samples/quickstart/noxfile.py +++ b/samples/quickstart/noxfile.py @@ -17,6 +17,7 @@ import os from pathlib import Path import sys +from typing import Callable, Dict, List, Optional import nox @@ -65,7 +66,7 @@ TEST_CONFIG.update(TEST_CONFIG_OVERRIDE) -def get_pytest_env_vars(): +def get_pytest_env_vars() -> Dict[str, str]: """Returns a dict for pytest invocation.""" ret = {} @@ -94,7 +95,7 @@ def get_pytest_env_vars(): # -def _determine_local_import_names(start_dir): +def _determine_local_import_names(start_dir: str) -> List[str]: """Determines all import names that should be considered "local". This is used when running the linter to insure that import order is @@ -132,7 +133,7 @@ def _determine_local_import_names(start_dir): @nox.session -def lint(session): +def lint(session: nox.sessions.Session) -> None: if not TEST_CONFIG["enforce_type_hints"]: session.install("flake8", "flake8-import-order") else: @@ -153,7 +154,7 @@ def lint(session): @nox.session -def blacken(session): +def blacken(session: nox.sessions.Session) -> None: session.install("black") python_files = [path for path in os.listdir(".") if path.endswith(".py")] @@ -168,7 +169,9 @@ def blacken(session): PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"] -def _session_tests(session, post_install=None): +def _session_tests( + session: nox.sessions.Session, post_install: Callable = None +) -> None: """Runs py.test for a particular project.""" if os.path.exists("requirements.txt"): session.install("-r", "requirements.txt") @@ -194,7 +197,7 @@ def _session_tests(session, post_install=None): @nox.session(python=ALL_VERSIONS) -def py(session): +def py(session: nox.sessions.Session) -> None: """Runs py.test for a sample using the specified version of Python.""" if session.python in TESTED_VERSIONS: _session_tests(session) @@ -209,7 +212,7 @@ def py(session): # -def _get_repo_root(): +def _get_repo_root() -> Optional[str]: """ Returns the root folder of the project. """ # Get root of this repository. Assume we don't have directories nested deeper than 10 items. p = Path(os.getcwd()) @@ -232,7 +235,7 @@ def _get_repo_root(): @nox.session @nox.parametrize("path", GENERATED_READMES) -def readmegen(session, path): +def readmegen(session: nox.sessions.Session, path: str) -> None: """(Re-)generates the readme for a sample.""" session.install("jinja2", "pyyaml") dir_ = os.path.dirname(path) diff --git a/samples/to_dataframe/noxfile.py b/samples/to_dataframe/noxfile.py index ab2c4922..bbd25fcd 100644 --- a/samples/to_dataframe/noxfile.py +++ b/samples/to_dataframe/noxfile.py @@ -17,6 +17,7 @@ import os from pathlib import Path import sys +from typing import Callable, Dict, List, Optional import nox @@ -65,7 +66,7 @@ TEST_CONFIG.update(TEST_CONFIG_OVERRIDE) -def get_pytest_env_vars(): +def get_pytest_env_vars() -> Dict[str, str]: """Returns a dict for pytest invocation.""" ret = {} @@ -94,7 +95,7 @@ def get_pytest_env_vars(): # -def _determine_local_import_names(start_dir): +def _determine_local_import_names(start_dir: str) -> List[str]: """Determines all import names that should be considered "local". This is used when running the linter to insure that import order is @@ -132,7 +133,7 @@ def _determine_local_import_names(start_dir): @nox.session -def lint(session): +def lint(session: nox.sessions.Session) -> None: if not TEST_CONFIG["enforce_type_hints"]: session.install("flake8", "flake8-import-order") else: @@ -153,7 +154,7 @@ def lint(session): @nox.session -def blacken(session): +def blacken(session: nox.sessions.Session) -> None: session.install("black") python_files = [path for path in os.listdir(".") if path.endswith(".py")] @@ -168,7 +169,9 @@ def blacken(session): PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"] -def _session_tests(session, post_install=None): +def _session_tests( + session: nox.sessions.Session, post_install: Callable = None +) -> None: """Runs py.test for a particular project.""" if os.path.exists("requirements.txt"): session.install("-r", "requirements.txt") @@ -194,7 +197,7 @@ def _session_tests(session, post_install=None): @nox.session(python=ALL_VERSIONS) -def py(session): +def py(session: nox.sessions.Session) -> None: """Runs py.test for a sample using the specified version of Python.""" if session.python in TESTED_VERSIONS: _session_tests(session) @@ -209,7 +212,7 @@ def py(session): # -def _get_repo_root(): +def _get_repo_root() -> Optional[str]: """ Returns the root folder of the project. """ # Get root of this repository. Assume we don't have directories nested deeper than 10 items. p = Path(os.getcwd()) @@ -232,7 +235,7 @@ def _get_repo_root(): @nox.session @nox.parametrize("path", GENERATED_READMES) -def readmegen(session, path): +def readmegen(session: nox.sessions.Session, path: str) -> None: """(Re-)generates the readme for a sample.""" session.install("jinja2", "pyyaml") dir_ = os.path.dirname(path) diff --git a/synth.metadata b/synth.metadata index 831ec1b1..dc973ce0 100644 --- a/synth.metadata +++ b/synth.metadata @@ -3,30 +3,30 @@ { "git": { "name": ".", - "remote": "https://github.com/googleapis/python-bigquery-storage.git", - "sha": "994a7c1cb1f8008e630d2325a9c168001e5081b4" + "remote": "git@github.com:tswast/python-bigquery-storage.git", + "sha": "936e0a5777a201c49fb76f7638a69cfd2569c2e7" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "07d41a7e5cade45aba6f0d277c89722b48f2c956", - "internalRef": "339292950" + "sha": "38cb5ce59453ce509773afcdfc72764441a4b531", + "internalRef": "351190966" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "9a7d9fbb7045c34c9d3d22c1ff766eeae51f04c9" + "sha": "16ec872dd898d7de6e1822badfac32484b5d9031" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "9a7d9fbb7045c34c9d3d22c1ff766eeae51f04c9" + "sha": "16ec872dd898d7de6e1822badfac32484b5d9031" } } ], @@ -136,4 +136,4 @@ "tests/unit/gapic/bigquery_storage_v1/__init__.py", "tests/unit/gapic/bigquery_storage_v1/test_big_query_read.py" ] -} \ No newline at end of file +} diff --git a/synth.py b/synth.py index 92cf5c29..fffa438f 100644 --- a/synth.py +++ b/synth.py @@ -97,44 +97,6 @@ '\g<0>\n\n session.install("google-cloud-bigquery")', ) -# Remove client-side validation of message length. -# https://github.com/googleapis/python-bigquery-storage/issues/78 -s.replace( - [ - "google/cloud/bigquery_storage_v1/services/big_query_read/transports/grpc.py", - "google/cloud/bigquery_storage_v1/services/big_query_read/transports/grpc_asyncio.py", - ], - ( - r"type\(self\).create_channel\(\s*" - r"host,\s*" - r"credentials=credentials,\s*" - r"credentials_file=credentials_file,\s*" - r"ssl_credentials=ssl_[a-z_]*credentials,\s*" - r"scopes=scopes or self.AUTH_SCOPES,\s*" - r"quota_project_id=quota_project_id" - ), - """\g<0>, - options=( - ('grpc.max_send_message_length', -1), - ('grpc.max_receive_message_length', -1) - )""", -) -s.replace( - "tests/unit/gapic/bigquery_storage_v1/test_big_query_read.py", - ( - r"grpc_create_channel\.assert_called_once_with\([^()]+" - r"scopes=\([^()]+\),\s*" - r"ssl_credentials=[a-z_]+,\s*" - r"quota_project_id=None" - ), - """\g<0>, - options=( - ('grpc.max_send_message_length', -1), - ('grpc.max_receive_message_length', -1) - )""", -) - - # We don't want the generated client to be accessible through # "google.cloud.bigquery_storage", replace it with the hand written client that # wraps it. diff --git a/tests/unit/gapic/bigquery_storage_v1/test_big_query_read.py b/tests/unit/gapic/bigquery_storage_v1/test_big_query_read.py index 1c3cfafb..3823109b 100644 --- a/tests/unit/gapic/bigquery_storage_v1/test_big_query_read.py +++ b/tests/unit/gapic/bigquery_storage_v1/test_big_query_read.py @@ -86,7 +86,20 @@ def test__get_default_mtls_endpoint(): assert BigQueryReadClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi -@pytest.mark.parametrize("client_class", [BigQueryReadClient, BigQueryReadAsyncClient]) +def test_big_query_read_client_from_service_account_info(): + creds = credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = BigQueryReadClient.from_service_account_info(info) + assert client.transport._credentials == creds + + assert client.transport._host == "bigquerystorage.googleapis.com:443" + + +@pytest.mark.parametrize("client_class", [BigQueryReadClient, BigQueryReadAsyncClient,]) def test_big_query_read_client_from_service_account_file(client_class): creds = credentials.AnonymousCredentials() with mock.patch.object( @@ -104,7 +117,10 @@ def test_big_query_read_client_from_service_account_file(client_class): def test_big_query_read_client_get_transport_class(): transport = BigQueryReadClient.get_transport_class() - assert transport == transports.BigQueryReadGrpcTransport + available_transports = [ + transports.BigQueryReadGrpcTransport, + ] + assert transport in available_transports transport = BigQueryReadClient.get_transport_class("grpc") assert transport == transports.BigQueryReadGrpcTransport @@ -1051,7 +1067,10 @@ def test_transport_get_channel(): @pytest.mark.parametrize( "transport_class", - [transports.BigQueryReadGrpcTransport, transports.BigQueryReadGrpcAsyncIOTransport], + [ + transports.BigQueryReadGrpcTransport, + transports.BigQueryReadGrpcAsyncIOTransport, + ], ) def test_transport_adc(transport_class): # Test default credentials are used if not provided. @@ -1186,7 +1205,7 @@ def test_big_query_read_host_with_port(): def test_big_query_read_grpc_transport_channel(): - channel = grpc.insecure_channel("http://localhost/") + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) # Check that channel is used if provided. transport = transports.BigQueryReadGrpcTransport( @@ -1198,7 +1217,7 @@ def test_big_query_read_grpc_transport_channel(): def test_big_query_read_grpc_asyncio_transport_channel(): - channel = aio.insecure_channel("http://localhost/") + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) # Check that channel is used if provided. transport = transports.BigQueryReadGrpcAsyncIOTransport( @@ -1218,7 +1237,7 @@ def test_big_query_read_transport_channel_mtls_with_client_cert_source(transport "grpc.ssl_channel_credentials", autospec=True ) as grpc_ssl_channel_cred: with mock.patch.object( - transport_class, "create_channel", autospec=True + transport_class, "create_channel" ) as grpc_create_channel: mock_ssl_cred = mock.Mock() grpc_ssl_channel_cred.return_value = mock_ssl_cred @@ -1251,10 +1270,10 @@ def test_big_query_read_transport_channel_mtls_with_client_cert_source(transport ), ssl_credentials=mock_ssl_cred, quota_project_id=None, - options=( + options=[ ("grpc.max_send_message_length", -1), ("grpc.max_receive_message_length", -1), - ), + ], ) assert transport.grpc_channel == mock_grpc_channel assert transport._ssl_channel_credentials == mock_ssl_cred @@ -1272,7 +1291,7 @@ def test_big_query_read_transport_channel_mtls_with_adc(transport_class): ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), ): with mock.patch.object( - transport_class, "create_channel", autospec=True + transport_class, "create_channel" ) as grpc_create_channel: mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel @@ -1297,10 +1316,10 @@ def test_big_query_read_transport_channel_mtls_with_adc(transport_class): ), ssl_credentials=mock_ssl_cred, quota_project_id=None, - options=( + options=[ ("grpc.max_send_message_length", -1), ("grpc.max_receive_message_length", -1), - ), + ], ) assert transport.grpc_channel == mock_grpc_channel