diff --git a/.gitignore b/.gitignore index b9daa52..b4243ce 100644 --- a/.gitignore +++ b/.gitignore @@ -50,8 +50,10 @@ docs.metadata # Virtual environment env/ + +# Test logs coverage.xml -sponge_log.xml +*sponge_log.xml # System test environment variables. system_tests/local_test_setup diff --git a/.kokoro/build.sh b/.kokoro/build.sh index 7640bf3..03a459c 100755 --- a/.kokoro/build.sh +++ b/.kokoro/build.sh @@ -40,6 +40,16 @@ python3 -m pip uninstall --yes --quiet nox-automation python3 -m pip install --upgrade --quiet nox python3 -m nox --version +# If this is a continuous build, send the test log to the FlakyBot. +# See https://github.com/googleapis/repo-automation-bots/tree/master/packages/flakybot. +if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"continuous"* ]]; then + cleanup() { + chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot + $KOKORO_GFILE_DIR/linux_amd64/flakybot + } + trap cleanup EXIT HUP +fi + # If NOX_SESSION is set, it only runs the specified session, # otherwise run all the sessions. if [[ -n "${NOX_SESSION:-}" ]]; then diff --git a/.kokoro/samples/python3.6/periodic-head.cfg b/.kokoro/samples/python3.6/periodic-head.cfg new file mode 100644 index 0000000..f9cfcd3 --- /dev/null +++ b/.kokoro/samples/python3.6/periodic-head.cfg @@ -0,0 +1,11 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-pubsub/.kokoro/test-samples-against-head.sh" +} diff --git a/.kokoro/samples/python3.7/periodic-head.cfg b/.kokoro/samples/python3.7/periodic-head.cfg new file mode 100644 index 0000000..f9cfcd3 --- /dev/null +++ b/.kokoro/samples/python3.7/periodic-head.cfg @@ -0,0 +1,11 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-pubsub/.kokoro/test-samples-against-head.sh" +} diff --git a/.kokoro/samples/python3.8/periodic-head.cfg b/.kokoro/samples/python3.8/periodic-head.cfg new file mode 100644 index 0000000..f9cfcd3 --- /dev/null +++ b/.kokoro/samples/python3.8/periodic-head.cfg @@ -0,0 +1,11 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-pubsub/.kokoro/test-samples-against-head.sh" +} diff --git a/.kokoro/test-samples-against-head.sh b/.kokoro/test-samples-against-head.sh new file mode 100755 index 0000000..113d9c2 --- /dev/null +++ b/.kokoro/test-samples-against-head.sh @@ -0,0 +1,28 @@ +#!/bin/bash +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# A customized test runner for samples. +# +# For periodic builds, you can specify this file for testing against head. + +# `-e` enables the script to automatically fail when a command fails +# `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero +set -eo pipefail +# Enables `**` to include files nested inside sub-folders +shopt -s globstar + +cd github/python-memcache + +exec .kokoro/test-samples-impl.sh diff --git a/.kokoro/test-samples-impl.sh b/.kokoro/test-samples-impl.sh new file mode 100755 index 0000000..cf5de74 --- /dev/null +++ b/.kokoro/test-samples-impl.sh @@ -0,0 +1,102 @@ +#!/bin/bash +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +# `-e` enables the script to automatically fail when a command fails +# `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero +set -eo pipefail +# Enables `**` to include files nested inside sub-folders +shopt -s globstar + +# Exit early if samples directory doesn't exist +if [ ! -d "./samples" ]; then + echo "No tests run. `./samples` not found" + exit 0 +fi + +# Disable buffering, so that the logs stream through. +export PYTHONUNBUFFERED=1 + +# Debug: show build environment +env | grep KOKORO + +# Install nox +python3.6 -m pip install --upgrade --quiet nox + +# Use secrets acessor service account to get secrets +if [[ -f "${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" ]]; then + gcloud auth activate-service-account \ + --key-file="${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" \ + --project="cloud-devrel-kokoro-resources" +fi + +# This script will create 3 files: +# - testing/test-env.sh +# - testing/service-account.json +# - testing/client-secrets.json +./scripts/decrypt-secrets.sh + +source ./testing/test-env.sh +export GOOGLE_APPLICATION_CREDENTIALS=$(pwd)/testing/service-account.json + +# For cloud-run session, we activate the service account for gcloud sdk. +gcloud auth activate-service-account \ + --key-file "${GOOGLE_APPLICATION_CREDENTIALS}" + +export GOOGLE_CLIENT_SECRETS=$(pwd)/testing/client-secrets.json + +echo -e "\n******************** TESTING PROJECTS ********************" + +# Switch to 'fail at end' to allow all tests to complete before exiting. +set +e +# Use RTN to return a non-zero value if the test fails. +RTN=0 +ROOT=$(pwd) +# Find all requirements.txt in the samples directory (may break on whitespace). +for file in samples/**/requirements.txt; do + cd "$ROOT" + # Navigate to the project folder. + file=$(dirname "$file") + cd "$file" + + echo "------------------------------------------------------------" + echo "- testing $file" + echo "------------------------------------------------------------" + + # Use nox to execute the tests for the project. + python3.6 -m nox -s "$RUN_TESTS_SESSION" + EXIT=$? + + # If this is a periodic build, send the test log to the FlakyBot. + # See https://github.com/googleapis/repo-automation-bots/tree/master/packages/flakybot. + if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then + chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot + $KOKORO_GFILE_DIR/linux_amd64/flakybot + fi + + if [[ $EXIT -ne 0 ]]; then + RTN=1 + echo -e "\n Testing failed: Nox returned a non-zero exit code. \n" + else + echo -e "\n Testing completed.\n" + fi + +done +cd "$ROOT" + +# Workaround for Kokoro permissions issue: delete secrets +rm testing/{test-env.sh,client-secrets.json,service-account.json} + +exit "$RTN" diff --git a/.kokoro/test-samples.sh b/.kokoro/test-samples.sh index 87c7159..e1c8a45 100755 --- a/.kokoro/test-samples.sh +++ b/.kokoro/test-samples.sh @@ -13,6 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. +# The default test runner for samples. +# +# For periodic builds, we rewinds the repo to the latest release, and +# run test-samples-impl.sh. # `-e` enables the script to automatically fail when a command fails # `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero @@ -24,87 +28,19 @@ cd github/python-memcache # Run periodic samples tests at latest release if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then + # preserving the test runner implementation. + cp .kokoro/test-samples-impl.sh "${TMPDIR}/test-samples-impl.sh" + echo "--- IMPORTANT IMPORTANT IMPORTANT ---" + echo "Now we rewind the repo back to the latest release..." LATEST_RELEASE=$(git describe --abbrev=0 --tags) git checkout $LATEST_RELEASE -fi - -# Exit early if samples directory doesn't exist -if [ ! -d "./samples" ]; then - echo "No tests run. `./samples` not found" - exit 0 -fi - -# Disable buffering, so that the logs stream through. -export PYTHONUNBUFFERED=1 - -# Debug: show build environment -env | grep KOKORO - -# Install nox -python3.6 -m pip install --upgrade --quiet nox - -# Use secrets acessor service account to get secrets -if [[ -f "${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" ]]; then - gcloud auth activate-service-account \ - --key-file="${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" \ - --project="cloud-devrel-kokoro-resources" -fi - -# This script will create 3 files: -# - testing/test-env.sh -# - testing/service-account.json -# - testing/client-secrets.json -./scripts/decrypt-secrets.sh - -source ./testing/test-env.sh -export GOOGLE_APPLICATION_CREDENTIALS=$(pwd)/testing/service-account.json - -# For cloud-run session, we activate the service account for gcloud sdk. -gcloud auth activate-service-account \ - --key-file "${GOOGLE_APPLICATION_CREDENTIALS}" - -export GOOGLE_CLIENT_SECRETS=$(pwd)/testing/client-secrets.json - -echo -e "\n******************** TESTING PROJECTS ********************" - -# Switch to 'fail at end' to allow all tests to complete before exiting. -set +e -# Use RTN to return a non-zero value if the test fails. -RTN=0 -ROOT=$(pwd) -# Find all requirements.txt in the samples directory (may break on whitespace). -for file in samples/**/requirements.txt; do - cd "$ROOT" - # Navigate to the project folder. - file=$(dirname "$file") - cd "$file" - - echo "------------------------------------------------------------" - echo "- testing $file" - echo "------------------------------------------------------------" - - # Use nox to execute the tests for the project. - python3.6 -m nox -s "$RUN_TESTS_SESSION" - EXIT=$? - - # If this is a periodic build, send the test log to the FlakyBot. - # See https://github.com/googleapis/repo-automation-bots/tree/master/packages/flakybot. - if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then - chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot - $KOKORO_GFILE_DIR/linux_amd64/flakybot + echo "The current head is: " + echo $(git rev-parse --verify HEAD) + echo "--- IMPORTANT IMPORTANT IMPORTANT ---" + # move back the test runner implementation if there's no file. + if [ ! -f .kokoro/test-samples-impl.sh ]; then + cp "${TMPDIR}/test-samples-impl.sh" .kokoro/test-samples-impl.sh fi +fi - if [[ $EXIT -ne 0 ]]; then - RTN=1 - echo -e "\n Testing failed: Nox returned a non-zero exit code. \n" - else - echo -e "\n Testing completed.\n" - fi - -done -cd "$ROOT" - -# Workaround for Kokoro permissions issue: delete secrets -rm testing/{test-env.sh,client-secrets.json,service-account.json} - -exit "$RTN" +exec .kokoro/test-samples-impl.sh diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index a9024b1..32302e4 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -12,6 +12,6 @@ repos: hooks: - id: black - repo: https://gitlab.com/pycqa/flake8 - rev: 3.8.4 + rev: 3.9.0 hooks: - id: flake8 diff --git a/LICENSE b/LICENSE index a8ee855..d645695 100644 --- a/LICENSE +++ b/LICENSE @@ -1,6 +1,7 @@ - Apache License + + Apache License Version 2.0, January 2004 - https://www.apache.org/licenses/ + http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION @@ -192,7 +193,7 @@ you may not use this file except in compliance with the License. You may obtain a copy of the License at - https://www.apache.org/licenses/LICENSE-2.0 + http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, diff --git a/google/cloud/memcache_v1/services/cloud_memcache/async_client.py b/google/cloud/memcache_v1/services/cloud_memcache/async_client.py index fd21c36..b09fddb 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/async_client.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/async_client.py @@ -100,8 +100,36 @@ class CloudMemcacheAsyncClient: CloudMemcacheClient.parse_common_location_path ) - from_service_account_info = CloudMemcacheClient.from_service_account_info - from_service_account_file = CloudMemcacheClient.from_service_account_file + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CloudMemcacheAsyncClient: The constructed client. + """ + return CloudMemcacheClient.from_service_account_info.__func__(CloudMemcacheAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CloudMemcacheAsyncClient: The constructed client. + """ + return CloudMemcacheClient.from_service_account_file.__func__(CloudMemcacheAsyncClient, filename, *args, **kwargs) # type: ignore + from_service_account_json = from_service_account_file @property @@ -362,7 +390,7 @@ async def create_instance( - Must start with a letter. - Must be between 1-40 characters. - Must end with a number or a letter. - - Must be unique within the user project / location. + - Must be unique within the user project / location If any of the above are not met, will raise an invalid argument error. @@ -464,7 +492,6 @@ async def update_instance( - ``displayName`` - This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. diff --git a/google/cloud/memcache_v1/services/cloud_memcache/client.py b/google/cloud/memcache_v1/services/cloud_memcache/client.py index 0ae3f3e..591ee49 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/client.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/client.py @@ -564,7 +564,7 @@ def create_instance( - Must start with a letter. - Must be between 1-40 characters. - Must end with a number or a letter. - - Must be unique within the user project / location. + - Must be unique within the user project / location If any of the above are not met, will raise an invalid argument error. @@ -667,7 +667,6 @@ def update_instance( - ``displayName`` - This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. diff --git a/google/cloud/memcache_v1/services/cloud_memcache/transports/base.py b/google/cloud/memcache_v1/services/cloud_memcache/transports/base.py index a6a2b3c..c9b57c7 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/transports/base.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/transports/base.py @@ -69,10 +69,10 @@ def __init__( scope (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. """ # Save the hostname. Default to port 443 (HTTPS) if none is specified. @@ -80,6 +80,9 @@ def __init__( host += ":443" self._host = host + # Save the scopes. + self._scopes = scopes or self.AUTH_SCOPES + # If no credentials are provided, then determine the appropriate # defaults. if credentials and credentials_file: @@ -89,20 +92,17 @@ def __init__( if credentials_file is not None: credentials, _ = auth.load_credentials_from_file( - credentials_file, scopes=scopes, quota_project_id=quota_project_id + credentials_file, scopes=self._scopes, quota_project_id=quota_project_id ) elif credentials is None: credentials, _ = auth.default( - scopes=scopes, quota_project_id=quota_project_id + scopes=self._scopes, quota_project_id=quota_project_id ) # Save the credentials. self._credentials = credentials - # Lifted into its own function so it can be stubbed out during tests. - self._prep_wrapped_messages(client_info) - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { diff --git a/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc.py b/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc.py index bc03a88..61c7dde 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc.py @@ -126,7 +126,10 @@ def __init__( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ + self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client = None if api_mtls_endpoint: warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) @@ -134,70 +137,50 @@ def __init__( warnings.warn("client_cert_source is deprecated", DeprecationWarning) if channel: - # Sanity check: Ensure that channel and credentials are not both - # provided. + # Ignore credentials if a channel was passed. credentials = False - # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None - elif api_mtls_endpoint: - host = ( - api_mtls_endpoint - if ":" in api_mtls_endpoint - else api_mtls_endpoint + ":443" - ) - - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - ssl_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - ssl_credentials = SslCredentials().ssl_credentials - # create a new channel. The provided one is ignored. - self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - ssl_credentials=ssl_credentials, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - self._ssl_channel_credentials = ssl_credentials else: - host = host if ":" in host else host + ":443" + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + ) - # create a new channel. The provided one is ignored. + if not self._grpc_channel: self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, + self._host, + credentials=self._credentials, credentials_file=credentials_file, + scopes=self._scopes, ssl_credentials=self._ssl_channel_credentials, - scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, options=[ ("grpc.max_send_message_length", -1), @@ -205,18 +188,8 @@ def __init__( ], ) - self._stubs = {} # type: Dict[str, Callable] - self._operations_client = None - - # Run the base constructor. - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - client_info=client_info, - ) + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) @classmethod def create_channel( @@ -230,7 +203,7 @@ def create_channel( ) -> grpc.Channel: """Create and return a gRPC channel object. Args: - address (Optional[str]): The host for the channel to use. + host (Optional[str]): The host for the channel to use. credentials (Optional[~.Credentials]): The authorization credentials to attach to requests. These credentials identify this application to the service. If diff --git a/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc_asyncio.py b/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc_asyncio.py index c11f7c5..d669536 100644 --- a/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc_asyncio.py +++ b/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc_asyncio.py @@ -81,7 +81,7 @@ def create_channel( ) -> aio.Channel: """Create and return a gRPC AsyncIO channel object. Args: - address (Optional[str]): The host for the channel to use. + host (Optional[str]): The host for the channel to use. credentials (Optional[~.Credentials]): The authorization credentials to attach to requests. These credentials identify this application to the service. If @@ -159,10 +159,10 @@ def __init__( ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. Raises: @@ -171,7 +171,10 @@ def __init__( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ + self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client = None if api_mtls_endpoint: warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) @@ -179,70 +182,50 @@ def __init__( warnings.warn("client_cert_source is deprecated", DeprecationWarning) if channel: - # Sanity check: Ensure that channel and credentials are not both - # provided. + # Ignore credentials if a channel was passed. credentials = False - # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None - elif api_mtls_endpoint: - host = ( - api_mtls_endpoint - if ":" in api_mtls_endpoint - else api_mtls_endpoint + ":443" - ) - - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - ssl_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - ssl_credentials = SslCredentials().ssl_credentials - # create a new channel. The provided one is ignored. - self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - ssl_credentials=ssl_credentials, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - self._ssl_channel_credentials = ssl_credentials else: - host = host if ":" in host else host + ":443" + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + ) - # create a new channel. The provided one is ignored. + if not self._grpc_channel: self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, + self._host, + credentials=self._credentials, credentials_file=credentials_file, + scopes=self._scopes, ssl_credentials=self._ssl_channel_credentials, - scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, options=[ ("grpc.max_send_message_length", -1), @@ -250,18 +233,8 @@ def __init__( ], ) - # Run the base constructor. - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - client_info=client_info, - ) - - self._stubs = {} - self._operations_client = None + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) @property def grpc_channel(self) -> aio.Channel: diff --git a/google/cloud/memcache_v1/types/__init__.py b/google/cloud/memcache_v1/types/__init__.py index 5fe285a..29acce6 100644 --- a/google/cloud/memcache_v1/types/__init__.py +++ b/google/cloud/memcache_v1/types/__init__.py @@ -16,31 +16,31 @@ # from .cloud_memcache import ( + ApplyParametersRequest, + CreateInstanceRequest, + DeleteInstanceRequest, + GetInstanceRequest, Instance, ListInstancesRequest, ListInstancesResponse, - GetInstanceRequest, - CreateInstanceRequest, - UpdateInstanceRequest, - DeleteInstanceRequest, - ApplyParametersRequest, - UpdateParametersRequest, MemcacheParameters, OperationMetadata, + UpdateInstanceRequest, + UpdateParametersRequest, MemcacheVersion, ) __all__ = ( + "ApplyParametersRequest", + "CreateInstanceRequest", + "DeleteInstanceRequest", + "GetInstanceRequest", "Instance", "ListInstancesRequest", "ListInstancesResponse", - "GetInstanceRequest", - "CreateInstanceRequest", - "UpdateInstanceRequest", - "DeleteInstanceRequest", - "ApplyParametersRequest", - "UpdateParametersRequest", "MemcacheParameters", "OperationMetadata", + "UpdateInstanceRequest", + "UpdateParametersRequest", "MemcacheVersion", ) diff --git a/google/cloud/memcache_v1/types/cloud_memcache.py b/google/cloud/memcache_v1/types/cloud_memcache.py index 066bc9d..a148657 100644 --- a/google/cloud/memcache_v1/types/cloud_memcache.py +++ b/google/cloud/memcache_v1/types/cloud_memcache.py @@ -352,7 +352,7 @@ class CreateInstanceRequest(proto.Message): - Must start with a letter. - Must be between 1-40 characters. - Must end with a number or a letter. - - Must be unique within the user project / location. + - Must be unique within the user project / location If any of the above are not met, will raise an invalid argument error. diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py index d0d7b85..b687488 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py @@ -100,8 +100,36 @@ class CloudMemcacheAsyncClient: CloudMemcacheClient.parse_common_location_path ) - from_service_account_info = CloudMemcacheClient.from_service_account_info - from_service_account_file = CloudMemcacheClient.from_service_account_file + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CloudMemcacheAsyncClient: The constructed client. + """ + return CloudMemcacheClient.from_service_account_info.__func__(CloudMemcacheAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CloudMemcacheAsyncClient: The constructed client. + """ + return CloudMemcacheClient.from_service_account_file.__func__(CloudMemcacheAsyncClient, filename, *args, **kwargs) # type: ignore + from_service_account_json = from_service_account_file @property @@ -458,7 +486,6 @@ async def update_instance( - ``displayName`` - This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py index 0da8b35..8d43719 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py @@ -661,7 +661,6 @@ def update_instance( - ``displayName`` - This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/base.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/base.py index 2c598e4..a1b9e2c 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/base.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/base.py @@ -69,10 +69,10 @@ def __init__( scope (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. """ # Save the hostname. Default to port 443 (HTTPS) if none is specified. @@ -80,6 +80,9 @@ def __init__( host += ":443" self._host = host + # Save the scopes. + self._scopes = scopes or self.AUTH_SCOPES + # If no credentials are provided, then determine the appropriate # defaults. if credentials and credentials_file: @@ -89,20 +92,17 @@ def __init__( if credentials_file is not None: credentials, _ = auth.load_credentials_from_file( - credentials_file, scopes=scopes, quota_project_id=quota_project_id + credentials_file, scopes=self._scopes, quota_project_id=quota_project_id ) elif credentials is None: credentials, _ = auth.default( - scopes=scopes, quota_project_id=quota_project_id + scopes=self._scopes, quota_project_id=quota_project_id ) # Save the credentials. self._credentials = credentials - # Lifted into its own function so it can be stubbed out during tests. - self._prep_wrapped_messages(client_info) - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc.py index d1ffc02..f5f202f 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc.py @@ -126,7 +126,10 @@ def __init__( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ + self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client = None if api_mtls_endpoint: warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) @@ -134,70 +137,50 @@ def __init__( warnings.warn("client_cert_source is deprecated", DeprecationWarning) if channel: - # Sanity check: Ensure that channel and credentials are not both - # provided. + # Ignore credentials if a channel was passed. credentials = False - # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None - elif api_mtls_endpoint: - host = ( - api_mtls_endpoint - if ":" in api_mtls_endpoint - else api_mtls_endpoint + ":443" - ) - - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - ssl_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - ssl_credentials = SslCredentials().ssl_credentials - # create a new channel. The provided one is ignored. - self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - ssl_credentials=ssl_credentials, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - self._ssl_channel_credentials = ssl_credentials else: - host = host if ":" in host else host + ":443" + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + ) - # create a new channel. The provided one is ignored. + if not self._grpc_channel: self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, + self._host, + credentials=self._credentials, credentials_file=credentials_file, + scopes=self._scopes, ssl_credentials=self._ssl_channel_credentials, - scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, options=[ ("grpc.max_send_message_length", -1), @@ -205,18 +188,8 @@ def __init__( ], ) - self._stubs = {} # type: Dict[str, Callable] - self._operations_client = None - - # Run the base constructor. - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - client_info=client_info, - ) + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) @classmethod def create_channel( @@ -230,7 +203,7 @@ def create_channel( ) -> grpc.Channel: """Create and return a gRPC channel object. Args: - address (Optional[str]): The host for the channel to use. + host (Optional[str]): The host for the channel to use. credentials (Optional[~.Credentials]): The authorization credentials to attach to requests. These credentials identify this application to the service. If diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc_asyncio.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc_asyncio.py index 24f0f89..f2452b4 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc_asyncio.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc_asyncio.py @@ -81,7 +81,7 @@ def create_channel( ) -> aio.Channel: """Create and return a gRPC AsyncIO channel object. Args: - address (Optional[str]): The host for the channel to use. + host (Optional[str]): The host for the channel to use. credentials (Optional[~.Credentials]): The authorization credentials to attach to requests. These credentials identify this application to the service. If @@ -159,10 +159,10 @@ def __init__( ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. Raises: @@ -171,7 +171,10 @@ def __init__( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ + self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client = None if api_mtls_endpoint: warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) @@ -179,70 +182,50 @@ def __init__( warnings.warn("client_cert_source is deprecated", DeprecationWarning) if channel: - # Sanity check: Ensure that channel and credentials are not both - # provided. + # Ignore credentials if a channel was passed. credentials = False - # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None - elif api_mtls_endpoint: - host = ( - api_mtls_endpoint - if ":" in api_mtls_endpoint - else api_mtls_endpoint + ":443" - ) - - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - ssl_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - ssl_credentials = SslCredentials().ssl_credentials - # create a new channel. The provided one is ignored. - self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - ssl_credentials=ssl_credentials, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - self._ssl_channel_credentials = ssl_credentials else: - host = host if ":" in host else host + ":443" + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + ) - # create a new channel. The provided one is ignored. + if not self._grpc_channel: self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, + self._host, + credentials=self._credentials, credentials_file=credentials_file, + scopes=self._scopes, ssl_credentials=self._ssl_channel_credentials, - scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, options=[ ("grpc.max_send_message_length", -1), @@ -250,18 +233,8 @@ def __init__( ], ) - # Run the base constructor. - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - client_info=client_info, - ) - - self._stubs = {} - self._operations_client = None + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) @property def grpc_channel(self) -> aio.Channel: diff --git a/google/cloud/memcache_v1beta2/types/__init__.py b/google/cloud/memcache_v1beta2/types/__init__.py index 9fa6727..90cf3eb 100644 --- a/google/cloud/memcache_v1beta2/types/__init__.py +++ b/google/cloud/memcache_v1beta2/types/__init__.py @@ -16,37 +16,37 @@ # from .cloud_memcache import ( + ApplyParametersRequest, + ApplySoftwareUpdateRequest, + CreateInstanceRequest, + DeleteInstanceRequest, + GetInstanceRequest, Instance, ListInstancesRequest, ListInstancesResponse, - GetInstanceRequest, - CreateInstanceRequest, - UpdateInstanceRequest, - DeleteInstanceRequest, - ApplyParametersRequest, - UpdateParametersRequest, - ApplySoftwareUpdateRequest, + LocationMetadata, MemcacheParameters, OperationMetadata, - LocationMetadata, + UpdateInstanceRequest, + UpdateParametersRequest, ZoneMetadata, MemcacheVersion, ) __all__ = ( + "ApplyParametersRequest", + "ApplySoftwareUpdateRequest", + "CreateInstanceRequest", + "DeleteInstanceRequest", + "GetInstanceRequest", "Instance", "ListInstancesRequest", "ListInstancesResponse", - "GetInstanceRequest", - "CreateInstanceRequest", - "UpdateInstanceRequest", - "DeleteInstanceRequest", - "ApplyParametersRequest", - "UpdateParametersRequest", - "ApplySoftwareUpdateRequest", + "LocationMetadata", "MemcacheParameters", "OperationMetadata", - "LocationMetadata", + "UpdateInstanceRequest", + "UpdateParametersRequest", "ZoneMetadata", "MemcacheVersion", ) diff --git a/noxfile.py b/noxfile.py index 9e90799..0eb433e 100644 --- a/noxfile.py +++ b/noxfile.py @@ -18,6 +18,7 @@ from __future__ import absolute_import import os +import pathlib import shutil import nox @@ -30,6 +31,8 @@ SYSTEM_TEST_PYTHON_VERSIONS = ["3.8"] UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8", "3.9"] +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + # 'docfx' is excluded since it only needs to run in 'docs-presubmit' nox.options.sessions = [ "unit", @@ -41,6 +44,9 @@ "docs", ] +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + @nox.session(python=DEFAULT_PYTHON_VERSION) def lint(session): @@ -81,18 +87,21 @@ def lint_setup_py(session): def default(session): # Install all test dependencies, then install this package in-place. - session.install("asyncmock", "pytest-asyncio") - session.install( - "mock", "pytest", "pytest-cov", + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" ) + session.install("asyncmock", "pytest-asyncio", "-c", constraints_path) - session.install("-e", ".") + session.install("mock", "pytest", "pytest-cov", "-c", constraints_path) + + session.install("-e", ".", "-c", constraints_path) # Run py.test against the unit tests. session.run( "py.test", "--quiet", + f"--junitxml=unit_{session.python}_sponge_log.xml", "--cov=google/cloud", "--cov=tests/unit", "--cov-append", @@ -113,6 +122,9 @@ def unit(session): @nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) def system(session): """Run the system test suite.""" + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) system_test_path = os.path.join("tests", "system.py") system_test_folder_path = os.path.join("tests", "system") @@ -122,6 +134,9 @@ def system(session): # Sanity check: Only run tests if the environment variable is set. if not os.environ.get("GOOGLE_APPLICATION_CREDENTIALS", ""): session.skip("Credentials must be set via environment variable") + # Install pyopenssl for mTLS testing. + if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": + session.install("pyopenssl") system_test_exists = os.path.exists(system_test_path) system_test_folder_exists = os.path.exists(system_test_folder_path) @@ -134,16 +149,26 @@ def system(session): # Install all test dependencies, then install this package into the # virtualenv's dist-packages. - session.install( - "mock", "pytest", "google-cloud-testutils", - ) - session.install("-e", ".") + session.install("mock", "pytest", "google-cloud-testutils", "-c", constraints_path) + session.install("-e", ".", "-c", constraints_path) # Run py.test against the system tests. if system_test_exists: - session.run("py.test", "--quiet", system_test_path, *session.posargs) + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) if system_test_folder_exists: - session.run("py.test", "--quiet", system_test_folder_path, *session.posargs) + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) @nox.session(python=DEFAULT_PYTHON_VERSION) @@ -186,9 +211,7 @@ def docfx(session): """Build the docfx yaml files for this library.""" session.install("-e", ".") - # sphinx-docfx-yaml supports up to sphinx version 1.5.5. - # https://github.com/docascode/sphinx-docfx-yaml/issues/97 - session.install("sphinx==1.5.5", "alabaster", "recommonmark", "sphinx-docfx-yaml") + session.install("sphinx", "alabaster", "recommonmark", "gcp-sphinx-docfx-yaml") shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( diff --git a/renovate.json b/renovate.json index 4fa9493..f08bc22 100644 --- a/renovate.json +++ b/renovate.json @@ -1,5 +1,6 @@ { "extends": [ "config:base", ":preserveSemverRanges" - ] + ], + "ignorePaths": [".pre-commit-config.yaml"] } diff --git a/setup.py b/setup.py index 70ec372..94938bf 100644 --- a/setup.py +++ b/setup.py @@ -44,8 +44,6 @@ "proto-plus >= 1.4.0", ), python_requires=">=3.6", - setup_requires=["libcst >= 0.2.5"], - scripts=["scripts/fixup_keywords.py"], classifiers=[ "Development Status :: 4 - Beta", "Intended Audience :: Developers", diff --git a/synth.metadata b/synth.metadata index b83959d..df869eb 100644 --- a/synth.metadata +++ b/synth.metadata @@ -4,21 +4,29 @@ "git": { "name": ".", "remote": "git@github.com:googleapis/python-memcache", - "sha": "b068bfca843c0d792bb2b79f5b6b28fcc80ae7c8" + "sha": "0f8357803b2ae2e06115d36ef4e84967a8ba1256" + } + }, + { + "git": { + "name": "googleapis", + "remote": "https://github.com/googleapis/googleapis.git", + "sha": "95dd24960cf9f794ef583e59ad9f1fabe1c4a924", + "internalRef": "365882072" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "6b026e1443948dcfc0b9e3289c85e940eb70f694" + "sha": "551dd78ca04f7989abc9e63e392f8b8cfa1a0ef9" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "6b026e1443948dcfc0b9e3289c85e940eb70f694" + "sha": "551dd78ca04f7989abc9e63e392f8b8cfa1a0ef9" } } ], @@ -31,6 +39,15 @@ "language": "python", "generator": "bazel" } + }, + { + "client": { + "source": "googleapis", + "apiName": "memcache", + "apiVersion": "v1", + "language": "python", + "generator": "bazel" + } } ] } \ No newline at end of file diff --git a/synth.py b/synth.py index 65a7ca3..b097003 100644 --- a/synth.py +++ b/synth.py @@ -38,15 +38,10 @@ excludes = [ "setup.py", "docs/index.rst", + "README.rst" ] s.move(library, excludes=excludes) -# Add extra linebreak after bulleted list to appease sphinx -s.replace("google/**/*client.py", """- Must be unique within the user project / location""", -"""- Must be unique within the user project / location\n""") -s.replace("google/**/*client.py", "- ``displayName``", "- ``displayName``\n") - - # ---------------------------------------------------------------------------- # Add templated files # ---------------------------------------------------------------------------- diff --git a/testing/constraints-3.6.txt b/testing/constraints-3.6.txt new file mode 100644 index 0000000..a929701 --- /dev/null +++ b/testing/constraints-3.6.txt @@ -0,0 +1,9 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. + +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.22.2 +proto-plus==1.15.0 diff --git a/testing/constraints-3.7.txt b/testing/constraints-3.7.txt new file mode 100644 index 0000000..da93009 --- /dev/null +++ b/testing/constraints-3.7.txt @@ -0,0 +1,2 @@ +# This constraints file is left inentionally empty +# so the latest version of dependencies is installed \ No newline at end of file diff --git a/testing/constraints-3.8.txt b/testing/constraints-3.8.txt new file mode 100644 index 0000000..da93009 --- /dev/null +++ b/testing/constraints-3.8.txt @@ -0,0 +1,2 @@ +# This constraints file is left inentionally empty +# so the latest version of dependencies is installed \ No newline at end of file diff --git a/testing/constraints-3.9.txt b/testing/constraints-3.9.txt new file mode 100644 index 0000000..da93009 --- /dev/null +++ b/testing/constraints-3.9.txt @@ -0,0 +1,2 @@ +# This constraints file is left inentionally empty +# so the latest version of dependencies is installed \ No newline at end of file diff --git a/tests/unit/gapic/memcache_v1/__init__.py b/tests/unit/gapic/memcache_v1/__init__.py index 8b13789..42ffdf2 100644 --- a/tests/unit/gapic/memcache_v1/__init__.py +++ b/tests/unit/gapic/memcache_v1/__init__.py @@ -1 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/tests/unit/gapic/memcache_v1/test_cloud_memcache.py b/tests/unit/gapic/memcache_v1/test_cloud_memcache.py index 896e3b1..b4793ec 100644 --- a/tests/unit/gapic/memcache_v1/test_cloud_memcache.py +++ b/tests/unit/gapic/memcache_v1/test_cloud_memcache.py @@ -90,15 +90,19 @@ def test__get_default_mtls_endpoint(): ) -def test_cloud_memcache_client_from_service_account_info(): +@pytest.mark.parametrize( + "client_class", [CloudMemcacheClient, CloudMemcacheAsyncClient,] +) +def test_cloud_memcache_client_from_service_account_info(client_class): creds = credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_info" ) as factory: factory.return_value = creds info = {"valid": True} - client = CloudMemcacheClient.from_service_account_info(info) + client = client_class.from_service_account_info(info) assert client.transport._credentials == creds + assert isinstance(client, client_class) assert client.transport._host == "memcache.googleapis.com:443" @@ -114,9 +118,11 @@ def test_cloud_memcache_client_from_service_account_file(client_class): factory.return_value = creds client = client_class.from_service_account_file("dummy/file/path.json") assert client.transport._credentials == creds + assert isinstance(client, client_class) client = client_class.from_service_account_json("dummy/file/path.json") assert client.transport._credentials == creds + assert isinstance(client, client_class) assert client.transport._host == "memcache.googleapis.com:443" @@ -481,6 +487,22 @@ def test_list_instances_from_dict(): test_list_instances(request_type=dict) +def test_list_instances_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudMemcacheClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + client.list_instances() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == cloud_memcache.ListInstancesRequest() + + @pytest.mark.asyncio async def test_list_instances_async( transport: str = "grpc_asyncio", request_type=cloud_memcache.ListInstancesRequest @@ -841,6 +863,22 @@ def test_get_instance_from_dict(): test_get_instance(request_type=dict) +def test_get_instance_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudMemcacheClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: + client.get_instance() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == cloud_memcache.GetInstanceRequest() + + @pytest.mark.asyncio async def test_get_instance_async( transport: str = "grpc_asyncio", request_type=cloud_memcache.GetInstanceRequest @@ -1055,6 +1093,22 @@ def test_create_instance_from_dict(): test_create_instance(request_type=dict) +def test_create_instance_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudMemcacheClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: + client.create_instance() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == cloud_memcache.CreateInstanceRequest() + + @pytest.mark.asyncio async def test_create_instance_async( transport: str = "grpc_asyncio", request_type=cloud_memcache.CreateInstanceRequest @@ -1263,6 +1317,22 @@ def test_update_instance_from_dict(): test_update_instance(request_type=dict) +def test_update_instance_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudMemcacheClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: + client.update_instance() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == cloud_memcache.UpdateInstanceRequest() + + @pytest.mark.asyncio async def test_update_instance_async( transport: str = "grpc_asyncio", request_type=cloud_memcache.UpdateInstanceRequest @@ -1469,6 +1539,24 @@ def test_update_parameters_from_dict(): test_update_parameters(request_type=dict) +def test_update_parameters_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudMemcacheClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_parameters), "__call__" + ) as call: + client.update_parameters() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == cloud_memcache.UpdateParametersRequest() + + @pytest.mark.asyncio async def test_update_parameters_async( transport: str = "grpc_asyncio", request_type=cloud_memcache.UpdateParametersRequest @@ -1687,6 +1775,22 @@ def test_delete_instance_from_dict(): test_delete_instance(request_type=dict) +def test_delete_instance_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudMemcacheClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: + client.delete_instance() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == cloud_memcache.DeleteInstanceRequest() + + @pytest.mark.asyncio async def test_delete_instance_async( transport: str = "grpc_asyncio", request_type=cloud_memcache.DeleteInstanceRequest @@ -1873,6 +1977,22 @@ def test_apply_parameters_from_dict(): test_apply_parameters(request_type=dict) +def test_apply_parameters_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudMemcacheClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.apply_parameters), "__call__") as call: + client.apply_parameters() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == cloud_memcache.ApplyParametersRequest() + + @pytest.mark.asyncio async def test_apply_parameters_async( transport: str = "grpc_asyncio", request_type=cloud_memcache.ApplyParametersRequest diff --git a/tests/unit/gapic/memcache_v1beta2/__init__.py b/tests/unit/gapic/memcache_v1beta2/__init__.py index 8b13789..42ffdf2 100644 --- a/tests/unit/gapic/memcache_v1beta2/__init__.py +++ b/tests/unit/gapic/memcache_v1beta2/__init__.py @@ -1 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py b/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py index 0671fea..1e242ab 100644 --- a/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py +++ b/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py @@ -92,15 +92,19 @@ def test__get_default_mtls_endpoint(): ) -def test_cloud_memcache_client_from_service_account_info(): +@pytest.mark.parametrize( + "client_class", [CloudMemcacheClient, CloudMemcacheAsyncClient,] +) +def test_cloud_memcache_client_from_service_account_info(client_class): creds = credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_info" ) as factory: factory.return_value = creds info = {"valid": True} - client = CloudMemcacheClient.from_service_account_info(info) + client = client_class.from_service_account_info(info) assert client.transport._credentials == creds + assert isinstance(client, client_class) assert client.transport._host == "memcache.googleapis.com:443" @@ -116,9 +120,11 @@ def test_cloud_memcache_client_from_service_account_file(client_class): factory.return_value = creds client = client_class.from_service_account_file("dummy/file/path.json") assert client.transport._credentials == creds + assert isinstance(client, client_class) client = client_class.from_service_account_json("dummy/file/path.json") assert client.transport._credentials == creds + assert isinstance(client, client_class) assert client.transport._host == "memcache.googleapis.com:443" @@ -483,6 +489,22 @@ def test_list_instances_from_dict(): test_list_instances(request_type=dict) +def test_list_instances_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudMemcacheClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + client.list_instances() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == cloud_memcache.ListInstancesRequest() + + @pytest.mark.asyncio async def test_list_instances_async( transport: str = "grpc_asyncio", request_type=cloud_memcache.ListInstancesRequest @@ -846,6 +868,22 @@ def test_get_instance_from_dict(): test_get_instance(request_type=dict) +def test_get_instance_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudMemcacheClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: + client.get_instance() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == cloud_memcache.GetInstanceRequest() + + @pytest.mark.asyncio async def test_get_instance_async( transport: str = "grpc_asyncio", request_type=cloud_memcache.GetInstanceRequest @@ -1063,6 +1101,22 @@ def test_create_instance_from_dict(): test_create_instance(request_type=dict) +def test_create_instance_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudMemcacheClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: + client.create_instance() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == cloud_memcache.CreateInstanceRequest() + + @pytest.mark.asyncio async def test_create_instance_async( transport: str = "grpc_asyncio", request_type=cloud_memcache.CreateInstanceRequest @@ -1271,6 +1325,22 @@ def test_update_instance_from_dict(): test_update_instance(request_type=dict) +def test_update_instance_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudMemcacheClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: + client.update_instance() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == cloud_memcache.UpdateInstanceRequest() + + @pytest.mark.asyncio async def test_update_instance_async( transport: str = "grpc_asyncio", request_type=cloud_memcache.UpdateInstanceRequest @@ -1477,6 +1547,24 @@ def test_update_parameters_from_dict(): test_update_parameters(request_type=dict) +def test_update_parameters_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudMemcacheClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_parameters), "__call__" + ) as call: + client.update_parameters() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == cloud_memcache.UpdateParametersRequest() + + @pytest.mark.asyncio async def test_update_parameters_async( transport: str = "grpc_asyncio", request_type=cloud_memcache.UpdateParametersRequest @@ -1695,6 +1783,22 @@ def test_delete_instance_from_dict(): test_delete_instance(request_type=dict) +def test_delete_instance_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudMemcacheClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: + client.delete_instance() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == cloud_memcache.DeleteInstanceRequest() + + @pytest.mark.asyncio async def test_delete_instance_async( transport: str = "grpc_asyncio", request_type=cloud_memcache.DeleteInstanceRequest @@ -1881,6 +1985,22 @@ def test_apply_parameters_from_dict(): test_apply_parameters(request_type=dict) +def test_apply_parameters_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudMemcacheClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.apply_parameters), "__call__") as call: + client.apply_parameters() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == cloud_memcache.ApplyParametersRequest() + + @pytest.mark.asyncio async def test_apply_parameters_async( transport: str = "grpc_asyncio", request_type=cloud_memcache.ApplyParametersRequest @@ -2087,6 +2207,24 @@ def test_apply_software_update_from_dict(): test_apply_software_update(request_type=dict) +def test_apply_software_update_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudMemcacheClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.apply_software_update), "__call__" + ) as call: + client.apply_software_update() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == cloud_memcache.ApplySoftwareUpdateRequest() + + @pytest.mark.asyncio async def test_apply_software_update_async( transport: str = "grpc_asyncio",