diff --git a/packages/google-cloud-batch/batch-v1-py.tar.gz b/packages/google-cloud-batch/batch-v1-py.tar.gz new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/google-cloud-batch/google/cloud/batch/gapic_version.py b/packages/google-cloud-batch/google/cloud/batch/gapic_version.py index 582686c5a553..558c8aab67c5 100644 --- a/packages/google-cloud-batch/google/cloud/batch/gapic_version.py +++ b/packages/google-cloud-batch/google/cloud/batch/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.17.23" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-batch/google/cloud/batch_v1/gapic_version.py b/packages/google-cloud-batch/google/cloud/batch_v1/gapic_version.py index 582686c5a553..558c8aab67c5 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1/gapic_version.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.17.23" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-batch/google/cloud/batch_v1/types/task.py b/packages/google-cloud-batch/google/cloud/batch_v1/types/task.py index 770048a9d2d0..f431f02a9850 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1/types/task.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1/types/task.py @@ -109,7 +109,7 @@ class ComputeResource(proto.Message): class StatusEvent(proto.Message): - r"""Status event + r"""Status event. Attributes: type_ (str): @@ -119,9 +119,13 @@ class StatusEvent(proto.Message): event_time (google.protobuf.timestamp_pb2.Timestamp): The time this event occurred. task_execution (google.cloud.batch_v1.types.TaskExecution): - Task Execution + Task Execution. + This field is only defined for task-level status + events where the task fails. task_state (google.cloud.batch_v1.types.TaskStatus.State): - Task State + Task State. + This field is only defined for task-level status + events. """ type_: str = proto.Field( @@ -179,11 +183,11 @@ class TaskExecution(proto.Message): class TaskStatus(proto.Message): - r"""Status of a task + r"""Status of a task. Attributes: state (google.cloud.batch_v1.types.TaskStatus.State): - Task state + Task state. status_events (MutableSequence[google.cloud.batch_v1.types.StatusEvent]): Detailed info about why the state is reached. """ diff --git a/packages/google-cloud-batch/google/cloud/batch_v1alpha/__init__.py b/packages/google-cloud-batch/google/cloud/batch_v1alpha/__init__.py index 0e918ab65e9f..25fa5142c3a9 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1alpha/__init__.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1alpha/__init__.py @@ -20,6 +20,8 @@ from .services.batch_service import BatchServiceAsyncClient, BatchServiceClient from .types.batch import ( + CancelJobRequest, + CancelJobResponse, CreateJobRequest, CreateResourceAllowanceRequest, DeleteJobRequest, @@ -76,6 +78,8 @@ "AllocationPolicy", "BatchServiceClient", "CalendarPeriod", + "CancelJobRequest", + "CancelJobResponse", "ComputeResource", "CreateJobRequest", "CreateResourceAllowanceRequest", diff --git a/packages/google-cloud-batch/google/cloud/batch_v1alpha/gapic_metadata.json b/packages/google-cloud-batch/google/cloud/batch_v1alpha/gapic_metadata.json index fbbd7eed609c..d2eeeb1176cf 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1alpha/gapic_metadata.json +++ b/packages/google-cloud-batch/google/cloud/batch_v1alpha/gapic_metadata.json @@ -10,6 +10,11 @@ "grpc": { "libraryClient": "BatchServiceClient", "rpcs": { + "CancelJob": { + "methods": [ + "cancel_job" + ] + }, "CreateJob": { "methods": [ "create_job" @@ -75,6 +80,11 @@ "grpc-async": { "libraryClient": "BatchServiceAsyncClient", "rpcs": { + "CancelJob": { + "methods": [ + "cancel_job" + ] + }, "CreateJob": { "methods": [ "create_job" @@ -140,6 +150,11 @@ "rest": { "libraryClient": "BatchServiceClient", "rpcs": { + "CancelJob": { + "methods": [ + "cancel_job" + ] + }, "CreateJob": { "methods": [ "create_job" diff --git a/packages/google-cloud-batch/google/cloud/batch_v1alpha/gapic_version.py b/packages/google-cloud-batch/google/cloud/batch_v1alpha/gapic_version.py index 582686c5a553..558c8aab67c5 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1alpha/gapic_version.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1alpha/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.17.23" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/async_client.py b/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/async_client.py index 87156abbb235..e7ab4b27281f 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/async_client.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/async_client.py @@ -633,6 +633,124 @@ async def sample_delete_job(): # Done; return the response. return response + async def cancel_job( + self, + request: Optional[Union[batch.CancelJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Cancel a Job. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import batch_v1alpha + + async def sample_cancel_job(): + # Create a client + client = batch_v1alpha.BatchServiceAsyncClient() + + # Initialize request argument(s) + request = batch_v1alpha.CancelJobRequest( + name="name_value", + ) + + # Make the request + operation = client.cancel_job(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.batch_v1alpha.types.CancelJobRequest, dict]]): + The request object. CancelJob Request. + name (:class:`str`): + Required. Job name. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.batch_v1alpha.types.CancelJobResponse` + Response to the CancelJob request. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, batch.CancelJobRequest): + request = batch.CancelJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.cancel_job + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + batch.CancelJobResponse, + metadata_type=batch.OperationMetadata, + ) + + # Done; return the response. + return response + async def update_job( self, request: Optional[Union[batch.UpdateJobRequest, dict]] = None, diff --git a/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/client.py b/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/client.py index c267e54a623e..bd4d3f87ca0d 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/client.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/client.py @@ -1109,6 +1109,121 @@ def sample_delete_job(): # Done; return the response. return response + def cancel_job( + self, + request: Optional[Union[batch.CancelJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Cancel a Job. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import batch_v1alpha + + def sample_cancel_job(): + # Create a client + client = batch_v1alpha.BatchServiceClient() + + # Initialize request argument(s) + request = batch_v1alpha.CancelJobRequest( + name="name_value", + ) + + # Make the request + operation = client.cancel_job(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.batch_v1alpha.types.CancelJobRequest, dict]): + The request object. CancelJob Request. + name (str): + Required. Job name. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.batch_v1alpha.types.CancelJobResponse` + Response to the CancelJob request. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, batch.CancelJobRequest): + request = batch.CancelJobRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.cancel_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + batch.CancelJobResponse, + metadata_type=batch.OperationMetadata, + ) + + # Done; return the response. + return response + def update_job( self, request: Optional[Union[batch.UpdateJobRequest, dict]] = None, diff --git a/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/transports/base.py b/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/transports/base.py index bea486b7ead4..4f3ffa39c02d 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/transports/base.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/transports/base.py @@ -161,6 +161,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.cancel_job: gapic_v1.method.wrap_method( + self.cancel_job, + default_timeout=60.0, + client_info=client_info, + ), self.update_job: gapic_v1.method.wrap_method( self.update_job, default_timeout=60.0, @@ -288,6 +293,15 @@ def delete_job( ]: raise NotImplementedError() + @property + def cancel_job( + self, + ) -> Callable[ + [batch.CancelJobRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + @property def update_job( self, diff --git a/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/transports/grpc.py b/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/transports/grpc.py index 04c0554ce78f..b723e9d7f161 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/transports/grpc.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/transports/grpc.py @@ -334,6 +334,32 @@ def delete_job( ) return self._stubs["delete_job"] + @property + def cancel_job( + self, + ) -> Callable[[batch.CancelJobRequest], operations_pb2.Operation]: + r"""Return a callable for the cancel job method over gRPC. + + Cancel a Job. + + Returns: + Callable[[~.CancelJobRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_job" not in self._stubs: + self._stubs["cancel_job"] = self.grpc_channel.unary_unary( + "/google.cloud.batch.v1alpha.BatchService/CancelJob", + request_serializer=batch.CancelJobRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["cancel_job"] + @property def update_job(self) -> Callable[[batch.UpdateJobRequest], gcb_job.Job]: r"""Return a callable for the update job method over gRPC. diff --git a/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/transports/grpc_asyncio.py b/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/transports/grpc_asyncio.py index 23e047db70f0..fa50038d8225 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/transports/grpc_asyncio.py @@ -340,6 +340,32 @@ def delete_job( ) return self._stubs["delete_job"] + @property + def cancel_job( + self, + ) -> Callable[[batch.CancelJobRequest], Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the cancel job method over gRPC. + + Cancel a Job. + + Returns: + Callable[[~.CancelJobRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_job" not in self._stubs: + self._stubs["cancel_job"] = self.grpc_channel.unary_unary( + "/google.cloud.batch.v1alpha.BatchService/CancelJob", + request_serializer=batch.CancelJobRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["cancel_job"] + @property def update_job(self) -> Callable[[batch.UpdateJobRequest], Awaitable[gcb_job.Job]]: r"""Return a callable for the update job method over gRPC. @@ -613,6 +639,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.cancel_job: gapic_v1.method_async.wrap_method( + self.cancel_job, + default_timeout=60.0, + client_info=client_info, + ), self.update_job: gapic_v1.method_async.wrap_method( self.update_job, default_timeout=60.0, diff --git a/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/transports/rest.py b/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/transports/rest.py index 16b672e6217c..4a1d9e6184fb 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/transports/rest.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/transports/rest.py @@ -79,6 +79,14 @@ class BatchServiceRestInterceptor: .. code-block:: python class MyCustomBatchServiceInterceptor(BatchServiceRestInterceptor): + def pre_cancel_job(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_cancel_job(self, response): + logging.log(f"Received response: {response}") + return response + def pre_create_job(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -181,6 +189,27 @@ def post_update_resource_allowance(self, response): """ + def pre_cancel_job( + self, request: batch.CancelJobRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[batch.CancelJobRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_job + + Override in a subclass to manipulate the request or metadata + before they are sent to the BatchService server. + """ + return request, metadata + + def post_cancel_job( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for cancel_job + + Override in a subclass to manipulate the response + after it is returned by the BatchService server but before + it is returned to user code. + """ + return response + def pre_create_job( self, request: batch.CreateJobRequest, metadata: Sequence[Tuple[str, str]] ) -> Tuple[batch.CreateJobRequest, Sequence[Tuple[str, str]]]: @@ -722,6 +751,99 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: # Return the client from cache. return self._operations_client + class _CancelJob(BatchServiceRestStub): + def __hash__(self): + return hash("CancelJob") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: batch.CancelJobRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the cancel job method over HTTP. + + Args: + request (~.batch.CancelJobRequest): + The request object. CancelJob Request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha/{name=projects/*/locations/*/jobs/*}:cancel", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_cancel_job(request, metadata) + pb_request = batch.CancelJobRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_cancel_job(resp) + return resp + class _CreateJob(BatchServiceRestStub): def __hash__(self): return hash("CreateJob") @@ -1776,6 +1898,14 @@ def __call__( resp = self._interceptor.post_update_resource_allowance(resp) return resp + @property + def cancel_job( + self, + ) -> Callable[[batch.CancelJobRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CancelJob(self._session, self._host, self._interceptor) # type: ignore + @property def create_job(self) -> Callable[[batch.CreateJobRequest], gcb_job.Job]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. diff --git a/packages/google-cloud-batch/google/cloud/batch_v1alpha/types/__init__.py b/packages/google-cloud-batch/google/cloud/batch_v1alpha/types/__init__.py index d410f1d71c07..c1ecdb476e43 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1alpha/types/__init__.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1alpha/types/__init__.py @@ -14,6 +14,8 @@ # limitations under the License. # from .batch import ( + CancelJobRequest, + CancelJobResponse, CreateJobRequest, CreateResourceAllowanceRequest, DeleteJobRequest, @@ -66,6 +68,8 @@ from .volume import GCS, NFS, PD, Volume __all__ = ( + "CancelJobRequest", + "CancelJobResponse", "CreateJobRequest", "CreateResourceAllowanceRequest", "DeleteJobRequest", diff --git a/packages/google-cloud-batch/google/cloud/batch_v1alpha/types/batch.py b/packages/google-cloud-batch/google/cloud/batch_v1alpha/types/batch.py index 7cbfb53f4ef7..a1f56648f3a4 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1alpha/types/batch.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1alpha/types/batch.py @@ -33,6 +33,8 @@ "CreateJobRequest", "GetJobRequest", "DeleteJobRequest", + "CancelJobRequest", + "CancelJobResponse", "UpdateJobRequest", "ListJobsRequest", "ListJobsResponse", @@ -170,6 +172,48 @@ class DeleteJobRequest(proto.Message): ) +class CancelJobRequest(proto.Message): + r"""CancelJob Request. + + Attributes: + name (str): + Required. Job name. + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes after the first request. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class CancelJobResponse(proto.Message): + r"""Response to the CancelJob request.""" + + class UpdateJobRequest(proto.Message): r"""UpdateJob Request. diff --git a/packages/google-cloud-batch/google/cloud/batch_v1alpha/types/job.py b/packages/google-cloud-batch/google/cloud/batch_v1alpha/types/job.py index 80bb9f2925a4..65660493a533 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1alpha/types/job.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1alpha/types/job.py @@ -938,6 +938,26 @@ class InstancePolicyOrTemplate(proto.Message): Optional. Set this field true if you want Batch to install Ops Agent on your behalf. Default is false. + block_project_ssh_keys (bool): + Optional. Set this field to ``true`` if you want Batch to + block project-level SSH keys from accessing this job's VMs. + Alternatively, you can configure the job to specify a VM + instance template that blocks project-level SSH keys. In + either case, Batch blocks project-level SSH keys while + creating the VMs for this job. + + Batch allows project-level SSH keys for a job's VMs only if + all the following are true: + + - This field is undefined or set to ``false``. + - The job's VM instance template (if any) doesn't block + project-level SSH keys. + + Notably, you can override this behavior by manually updating + a VM to block or allow project-level SSH keys. For more + information about blocking project-level SSH keys, see the + Compute Engine documentation: + https://cloud.google.com/compute/docs/connect/restrict-ssh-keys#block-keys """ policy: "AllocationPolicy.InstancePolicy" = proto.Field( @@ -959,6 +979,10 @@ class InstancePolicyOrTemplate(proto.Message): proto.BOOL, number=4, ) + block_project_ssh_keys: bool = proto.Field( + proto.BOOL, + number=5, + ) class NetworkInterface(proto.Message): r"""A network interface. diff --git a/packages/google-cloud-batch/google/cloud/batch_v1alpha/types/task.py b/packages/google-cloud-batch/google/cloud/batch_v1alpha/types/task.py index 625ed05e889f..f854b36207a1 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1alpha/types/task.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1alpha/types/task.py @@ -118,7 +118,7 @@ class ComputeResource(proto.Message): class StatusEvent(proto.Message): - r"""Status event + r"""Status event. Attributes: type_ (str): @@ -128,9 +128,13 @@ class StatusEvent(proto.Message): event_time (google.protobuf.timestamp_pb2.Timestamp): The time this event occurred. task_execution (google.cloud.batch_v1alpha.types.TaskExecution): - Task Execution + Task Execution. + This field is only defined for task-level status + events where the task fails. task_state (google.cloud.batch_v1alpha.types.TaskStatus.State): - Task State + Task State. + This field is only defined for task-level status + events. """ type_: str = proto.Field( @@ -197,11 +201,11 @@ class TaskExecution(proto.Message): class TaskStatus(proto.Message): - r"""Status of a task + r"""Status of a task. Attributes: state (google.cloud.batch_v1alpha.types.TaskStatus.State): - Task state + Task state. status_events (MutableSequence[google.cloud.batch_v1alpha.types.StatusEvent]): Detailed info about why the state is reached. resource_usage (google.cloud.batch_v1alpha.types.TaskResourceUsage): diff --git a/packages/google-cloud-batch/samples/generated_samples/batch_v1alpha_generated_batch_service_cancel_job_async.py b/packages/google-cloud-batch/samples/generated_samples/batch_v1alpha_generated_batch_service_cancel_job_async.py new file mode 100644 index 000000000000..19d634ba25bd --- /dev/null +++ b/packages/google-cloud-batch/samples/generated_samples/batch_v1alpha_generated_batch_service_cancel_job_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CancelJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-batch + + +# [START batch_v1alpha_generated_BatchService_CancelJob_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import batch_v1alpha + + +async def sample_cancel_job(): + # Create a client + client = batch_v1alpha.BatchServiceAsyncClient() + + # Initialize request argument(s) + request = batch_v1alpha.CancelJobRequest( + name="name_value", + ) + + # Make the request + operation = client.cancel_job(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END batch_v1alpha_generated_BatchService_CancelJob_async] diff --git a/packages/google-cloud-batch/samples/generated_samples/batch_v1alpha_generated_batch_service_cancel_job_sync.py b/packages/google-cloud-batch/samples/generated_samples/batch_v1alpha_generated_batch_service_cancel_job_sync.py new file mode 100644 index 000000000000..4661b4b1b295 --- /dev/null +++ b/packages/google-cloud-batch/samples/generated_samples/batch_v1alpha_generated_batch_service_cancel_job_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CancelJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-batch + + +# [START batch_v1alpha_generated_BatchService_CancelJob_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import batch_v1alpha + + +def sample_cancel_job(): + # Create a client + client = batch_v1alpha.BatchServiceClient() + + # Initialize request argument(s) + request = batch_v1alpha.CancelJobRequest( + name="name_value", + ) + + # Make the request + operation = client.cancel_job(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END batch_v1alpha_generated_BatchService_CancelJob_sync] diff --git a/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1.json b/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1.json index c68cd953756f..e2df1067e4dd 100644 --- a/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1.json +++ b/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-batch", - "version": "0.17.23" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1alpha.json b/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1alpha.json index 9b4f0fd89dbc..7f67670b100c 100644 --- a/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1alpha.json +++ b/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1alpha.json @@ -8,9 +8,170 @@ ], "language": "PYTHON", "name": "google-cloud-batch", - "version": "0.17.23" + "version": "0.1.0" }, "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.batch_v1alpha.BatchServiceAsyncClient", + "shortName": "BatchServiceAsyncClient" + }, + "fullName": "google.cloud.batch_v1alpha.BatchServiceAsyncClient.cancel_job", + "method": { + "fullName": "google.cloud.batch.v1alpha.BatchService.CancelJob", + "service": { + "fullName": "google.cloud.batch.v1alpha.BatchService", + "shortName": "BatchService" + }, + "shortName": "CancelJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.batch_v1alpha.types.CancelJobRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "cancel_job" + }, + "description": "Sample for CancelJob", + "file": "batch_v1alpha_generated_batch_service_cancel_job_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "batch_v1alpha_generated_BatchService_CancelJob_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "batch_v1alpha_generated_batch_service_cancel_job_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.batch_v1alpha.BatchServiceClient", + "shortName": "BatchServiceClient" + }, + "fullName": "google.cloud.batch_v1alpha.BatchServiceClient.cancel_job", + "method": { + "fullName": "google.cloud.batch.v1alpha.BatchService.CancelJob", + "service": { + "fullName": "google.cloud.batch.v1alpha.BatchService", + "shortName": "BatchService" + }, + "shortName": "CancelJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.batch_v1alpha.types.CancelJobRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "cancel_job" + }, + "description": "Sample for CancelJob", + "file": "batch_v1alpha_generated_batch_service_cancel_job_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "batch_v1alpha_generated_BatchService_CancelJob_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "batch_v1alpha_generated_batch_service_cancel_job_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/packages/google-cloud-batch/scripts/fixup_batch_v1alpha_keywords.py b/packages/google-cloud-batch/scripts/fixup_batch_v1alpha_keywords.py index 03e73c3a0a5e..3b43e30b2119 100644 --- a/packages/google-cloud-batch/scripts/fixup_batch_v1alpha_keywords.py +++ b/packages/google-cloud-batch/scripts/fixup_batch_v1alpha_keywords.py @@ -39,6 +39,7 @@ def partition( class batchCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'cancel_job': ('name', 'request_id', ), 'create_job': ('parent', 'job', 'job_id', 'request_id', ), 'create_resource_allowance': ('parent', 'resource_allowance', 'resource_allowance_id', 'request_id', ), 'delete_job': ('name', 'reason', 'request_id', ), diff --git a/packages/google-cloud-batch/tests/unit/gapic/batch_v1alpha/test_batch_service.py b/packages/google-cloud-batch/tests/unit/gapic/batch_v1alpha/test_batch_service.py index 8d13af92a978..a25909de20f8 100644 --- a/packages/google-cloud-batch/tests/unit/gapic/batch_v1alpha/test_batch_service.py +++ b/packages/google-cloud-batch/tests/unit/gapic/batch_v1alpha/test_batch_service.py @@ -2261,11 +2261,11 @@ async def test_delete_job_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - batch.UpdateJobRequest, + batch.CancelJobRequest, dict, ], ) -def test_update_job(request_type, transport: str = "grpc"): +def test_cancel_job(request_type, transport: str = "grpc"): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2276,33 +2276,22 @@ def test_update_job(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_job), "__call__") as call: + with mock.patch.object(type(client.transport.cancel_job), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = gcb_job.Job( - name="name_value", - uid="uid_value", - priority=898, - scheduling_policy=gcb_job.Job.SchedulingPolicy.AS_SOON_AS_POSSIBLE, - ) - response = client.update_job(request) + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.cancel_job(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = batch.UpdateJobRequest() + request = batch.CancelJobRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, gcb_job.Job) - assert response.name == "name_value" - assert response.uid == "uid_value" - assert response.priority == 898 - assert ( - response.scheduling_policy == gcb_job.Job.SchedulingPolicy.AS_SOON_AS_POSSIBLE - ) + assert isinstance(response, future.Future) -def test_update_job_empty_call(): +def test_cancel_job_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = BatchServiceClient( @@ -2311,17 +2300,17 @@ def test_update_job_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_job), "__call__") as call: + with mock.patch.object(type(client.transport.cancel_job), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.update_job() + client.cancel_job() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == batch.UpdateJobRequest() + assert args[0] == batch.CancelJobRequest() -def test_update_job_non_empty_request_with_auto_populated_field(): +def test_cancel_job_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = BatchServiceClient( @@ -2332,20 +2321,24 @@ def test_update_job_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = batch.UpdateJobRequest() + request = batch.CancelJobRequest( + name="name_value", + ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_job), "__call__") as call: + with mock.patch.object(type(client.transport.cancel_job), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.update_job(request=request) + client.cancel_job(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == batch.UpdateJobRequest() + assert args[0] == batch.CancelJobRequest( + name="name_value", + ) -def test_update_job_use_cached_wrapped_rpc(): +def test_cancel_job_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -2359,21 +2352,25 @@ def test_update_job_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.update_job in client._transport._wrapped_methods + assert client._transport.cancel_job in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.update_job] = mock_rpc + client._transport._wrapped_methods[client._transport.cancel_job] = mock_rpc request = {} - client.update_job(request) + client.cancel_job(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.update_job(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.cancel_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -2381,7 +2378,7 @@ def test_update_job_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_update_job_empty_call_async(): +async def test_cancel_job_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = BatchServiceAsyncClient( @@ -2390,24 +2387,19 @@ async def test_update_job_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_job), "__call__") as call: + with mock.patch.object(type(client.transport.cancel_job), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - gcb_job.Job( - name="name_value", - uid="uid_value", - priority=898, - scheduling_policy=gcb_job.Job.SchedulingPolicy.AS_SOON_AS_POSSIBLE, - ) + operations_pb2.Operation(name="operations/spam") ) - response = await client.update_job() + response = await client.cancel_job() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == batch.UpdateJobRequest() + assert args[0] == batch.CancelJobRequest() @pytest.mark.asyncio -async def test_update_job_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_cancel_job_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -2422,23 +2414,27 @@ async def test_update_job_async_use_cached_wrapped_rpc(transport: str = "grpc_as # Ensure method has been cached assert ( - client._client._transport.update_job + client._client._transport.cancel_job in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ - client._client._transport.update_job + client._client._transport.cancel_job ] = mock_object request = {} - await client.update_job(request) + await client.cancel_job(request) # Establish that the underlying gRPC stub method was called. assert mock_object.call_count == 1 - await client.update_job(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.cancel_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -2446,8 +2442,8 @@ async def test_update_job_async_use_cached_wrapped_rpc(transport: str = "grpc_as @pytest.mark.asyncio -async def test_update_job_async( - transport: str = "grpc_asyncio", request_type=batch.UpdateJobRequest +async def test_cancel_job_async( + transport: str = "grpc_asyncio", request_type=batch.CancelJobRequest ): client = BatchServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2459,54 +2455,43 @@ async def test_update_job_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_job), "__call__") as call: + with mock.patch.object(type(client.transport.cancel_job), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - gcb_job.Job( - name="name_value", - uid="uid_value", - priority=898, - scheduling_policy=gcb_job.Job.SchedulingPolicy.AS_SOON_AS_POSSIBLE, - ) + operations_pb2.Operation(name="operations/spam") ) - response = await client.update_job(request) + response = await client.cancel_job(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = batch.UpdateJobRequest() + request = batch.CancelJobRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, gcb_job.Job) - assert response.name == "name_value" - assert response.uid == "uid_value" - assert response.priority == 898 - assert ( - response.scheduling_policy == gcb_job.Job.SchedulingPolicy.AS_SOON_AS_POSSIBLE - ) + assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_update_job_async_from_dict(): - await test_update_job_async(request_type=dict) +async def test_cancel_job_async_from_dict(): + await test_cancel_job_async(request_type=dict) -def test_update_job_field_headers(): +def test_cancel_job_field_headers(): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = batch.UpdateJobRequest() + request = batch.CancelJobRequest() - request.job.name = "name_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_job), "__call__") as call: - call.return_value = gcb_job.Job() - client.update_job(request) + with mock.patch.object(type(client.transport.cancel_job), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.cancel_job(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -2517,26 +2502,28 @@ def test_update_job_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "job.name=name_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_update_job_field_headers_async(): +async def test_cancel_job_field_headers_async(): client = BatchServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = batch.UpdateJobRequest() + request = batch.CancelJobRequest() - request.job.name = "name_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_job), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gcb_job.Job()) - await client.update_job(request) + with mock.patch.object(type(client.transport.cancel_job), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.cancel_job(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -2547,39 +2534,35 @@ async def test_update_job_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "job.name=name_value", + "name=name_value", ) in kw["metadata"] -def test_update_job_flattened(): +def test_cancel_job_flattened(): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_job), "__call__") as call: + with mock.patch.object(type(client.transport.cancel_job), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = gcb_job.Job() + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.update_job( - job=gcb_job.Job(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.cancel_job( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].job - mock_val = gcb_job.Job(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_update_job_flattened_error(): +def test_cancel_job_flattened_error(): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -2587,46 +2570,43 @@ def test_update_job_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_job( - batch.UpdateJobRequest(), - job=gcb_job.Job(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.cancel_job( + batch.CancelJobRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_update_job_flattened_async(): +async def test_cancel_job_flattened_async(): client = BatchServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_job), "__call__") as call: + with mock.patch.object(type(client.transport.cancel_job), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = gcb_job.Job() + call.return_value = operations_pb2.Operation(name="operations/op") - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gcb_job.Job()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.update_job( - job=gcb_job.Job(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + response = await client.cancel_job( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].job - mock_val = gcb_job.Job(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_update_job_flattened_error_async(): +async def test_cancel_job_flattened_error_async(): client = BatchServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -2634,21 +2614,20 @@ async def test_update_job_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.update_job( - batch.UpdateJobRequest(), - job=gcb_job.Job(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + await client.cancel_job( + batch.CancelJobRequest(), + name="name_value", ) @pytest.mark.parametrize( "request_type", [ - batch.ListJobsRequest, + batch.UpdateJobRequest, dict, ], ) -def test_list_jobs(request_type, transport: str = "grpc"): +def test_update_job(request_type, transport: str = "grpc"): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2659,27 +2638,33 @@ def test_list_jobs(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: + with mock.patch.object(type(client.transport.update_job), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = batch.ListJobsResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], + call.return_value = gcb_job.Job( + name="name_value", + uid="uid_value", + priority=898, + scheduling_policy=gcb_job.Job.SchedulingPolicy.AS_SOON_AS_POSSIBLE, ) - response = client.list_jobs(request) + response = client.update_job(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = batch.ListJobsRequest() + request = batch.UpdateJobRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListJobsPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert isinstance(response, gcb_job.Job) + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.priority == 898 + assert ( + response.scheduling_policy == gcb_job.Job.SchedulingPolicy.AS_SOON_AS_POSSIBLE + ) -def test_list_jobs_empty_call(): +def test_update_job_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = BatchServiceClient( @@ -2688,17 +2673,17 @@ def test_list_jobs_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: + with mock.patch.object(type(client.transport.update_job), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_jobs() + client.update_job() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == batch.ListJobsRequest() + assert args[0] == batch.UpdateJobRequest() -def test_list_jobs_non_empty_request_with_auto_populated_field(): +def test_update_job_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = BatchServiceClient( @@ -2709,30 +2694,20 @@ def test_list_jobs_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = batch.ListJobsRequest( - parent="parent_value", - filter="filter_value", - order_by="order_by_value", - page_token="page_token_value", - ) + request = batch.UpdateJobRequest() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: + with mock.patch.object(type(client.transport.update_job), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_jobs(request=request) + client.update_job(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == batch.ListJobsRequest( - parent="parent_value", - filter="filter_value", - order_by="order_by_value", - page_token="page_token_value", - ) + assert args[0] == batch.UpdateJobRequest() -def test_list_jobs_use_cached_wrapped_rpc(): +def test_update_job_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -2746,21 +2721,21 @@ def test_list_jobs_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_jobs in client._transport._wrapped_methods + assert client._transport.update_job in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_jobs] = mock_rpc + client._transport._wrapped_methods[client._transport.update_job] = mock_rpc request = {} - client.list_jobs(request) + client.update_job(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_jobs(request) + client.update_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -2768,7 +2743,7 @@ def test_list_jobs_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_list_jobs_empty_call_async(): +async def test_update_job_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = BatchServiceAsyncClient( @@ -2777,22 +2752,24 @@ async def test_list_jobs_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: + with mock.patch.object(type(client.transport.update_job), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - batch.ListJobsResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], + gcb_job.Job( + name="name_value", + uid="uid_value", + priority=898, + scheduling_policy=gcb_job.Job.SchedulingPolicy.AS_SOON_AS_POSSIBLE, ) ) - response = await client.list_jobs() + response = await client.update_job() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == batch.ListJobsRequest() + assert args[0] == batch.UpdateJobRequest() @pytest.mark.asyncio -async def test_list_jobs_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_update_job_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -2807,23 +2784,23 @@ async def test_list_jobs_async_use_cached_wrapped_rpc(transport: str = "grpc_asy # Ensure method has been cached assert ( - client._client._transport.list_jobs + client._client._transport.update_job in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ - client._client._transport.list_jobs + client._client._transport.update_job ] = mock_object request = {} - await client.list_jobs(request) + await client.update_job(request) # Establish that the underlying gRPC stub method was called. assert mock_object.call_count == 1 - await client.list_jobs(request) + await client.update_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -2831,8 +2808,8 @@ async def test_list_jobs_async_use_cached_wrapped_rpc(transport: str = "grpc_asy @pytest.mark.asyncio -async def test_list_jobs_async( - transport: str = "grpc_asyncio", request_type=batch.ListJobsRequest +async def test_update_job_async( + transport: str = "grpc_asyncio", request_type=batch.UpdateJobRequest ): client = BatchServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2844,48 +2821,54 @@ async def test_list_jobs_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: + with mock.patch.object(type(client.transport.update_job), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - batch.ListJobsResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) - ) - response = await client.list_jobs(request) + gcb_job.Job( + name="name_value", + uid="uid_value", + priority=898, + scheduling_policy=gcb_job.Job.SchedulingPolicy.AS_SOON_AS_POSSIBLE, + ) + ) + response = await client.update_job(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = batch.ListJobsRequest() + request = batch.UpdateJobRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListJobsAsyncPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert isinstance(response, gcb_job.Job) + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.priority == 898 + assert ( + response.scheduling_policy == gcb_job.Job.SchedulingPolicy.AS_SOON_AS_POSSIBLE + ) @pytest.mark.asyncio -async def test_list_jobs_async_from_dict(): - await test_list_jobs_async(request_type=dict) +async def test_update_job_async_from_dict(): + await test_update_job_async(request_type=dict) -def test_list_jobs_field_headers(): +def test_update_job_field_headers(): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = batch.ListJobsRequest() + request = batch.UpdateJobRequest() - request.parent = "parent_value" + request.job.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: - call.return_value = batch.ListJobsResponse() - client.list_jobs(request) + with mock.patch.object(type(client.transport.update_job), "__call__") as call: + call.return_value = gcb_job.Job() + client.update_job(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -2896,28 +2879,26 @@ def test_list_jobs_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "job.name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_list_jobs_field_headers_async(): +async def test_update_job_field_headers_async(): client = BatchServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = batch.ListJobsRequest() + request = batch.UpdateJobRequest() - request.parent = "parent_value" + request.job.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - batch.ListJobsResponse() - ) - await client.list_jobs(request) + with mock.patch.object(type(client.transport.update_job), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gcb_job.Job()) + await client.update_job(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -2928,35 +2909,39 @@ async def test_list_jobs_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "job.name=name_value", ) in kw["metadata"] -def test_list_jobs_flattened(): +def test_update_job_flattened(): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: + with mock.patch.object(type(client.transport.update_job), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = batch.ListJobsResponse() + call.return_value = gcb_job.Job() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_jobs( - parent="parent_value", + client.update_job( + job=gcb_job.Job(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].job + mock_val = gcb_job.Job(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val -def test_list_jobs_flattened_error(): +def test_update_job_flattened_error(): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -2964,43 +2949,46 @@ def test_list_jobs_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_jobs( - batch.ListJobsRequest(), - parent="parent_value", + client.update_job( + batch.UpdateJobRequest(), + job=gcb_job.Job(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio -async def test_list_jobs_flattened_async(): +async def test_update_job_flattened_async(): client = BatchServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: + with mock.patch.object(type(client.transport.update_job), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = batch.ListJobsResponse() + call.return_value = gcb_job.Job() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - batch.ListJobsResponse() - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gcb_job.Job()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_jobs( - parent="parent_value", + response = await client.update_job( + job=gcb_job.Job(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].job + mock_val = gcb_job.Job(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val @pytest.mark.asyncio -async def test_list_jobs_flattened_error_async(): +async def test_update_job_flattened_error_async(): client = BatchServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -3008,290 +2996,105 @@ async def test_list_jobs_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_jobs( - batch.ListJobsRequest(), - parent="parent_value", + await client.update_job( + batch.UpdateJobRequest(), + job=gcb_job.Job(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_list_jobs_pager(transport_name: str = "grpc"): +@pytest.mark.parametrize( + "request_type", + [ + batch.ListJobsRequest, + dict, + ], +) +def test_list_jobs(request_type, transport: str = "grpc"): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, + transport=transport, ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - batch.ListJobsResponse( - jobs=[ - job.Job(), - job.Job(), - job.Job(), - ], - next_page_token="abc", - ), - batch.ListJobsResponse( - jobs=[], - next_page_token="def", - ), - batch.ListJobsResponse( - jobs=[ - job.Job(), - ], - next_page_token="ghi", - ), - batch.ListJobsResponse( - jobs=[ - job.Job(), - job.Job(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + # Designate an appropriate return value for the call. + call.return_value = batch.ListJobsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) - pager = client.list_jobs(request={}, retry=retry, timeout=timeout) + response = client.list_jobs(request) - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = batch.ListJobsRequest() + assert args[0] == request - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, job.Job) for i in results) + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListJobsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] -def test_list_jobs_pages(transport_name: str = "grpc"): +def test_list_jobs_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - batch.ListJobsResponse( - jobs=[ - job.Job(), - job.Job(), - job.Job(), - ], - next_page_token="abc", - ), - batch.ListJobsResponse( - jobs=[], - next_page_token="def", - ), - batch.ListJobsResponse( - jobs=[ - job.Job(), - ], - next_page_token="ghi", - ), - batch.ListJobsResponse( - jobs=[ - job.Job(), - job.Job(), - ], - ), - RuntimeError, + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) - pages = list(client.list_jobs(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token + client.list_jobs() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == batch.ListJobsRequest() -@pytest.mark.asyncio -async def test_list_jobs_async_pager(): - client = BatchServiceAsyncClient( +def test_list_jobs_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = batch.ListJobsRequest( + parent="parent_value", + filter="filter_value", + order_by="order_by_value", + page_token="page_token_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - batch.ListJobsResponse( - jobs=[ - job.Job(), - job.Job(), - job.Job(), - ], - next_page_token="abc", - ), - batch.ListJobsResponse( - jobs=[], - next_page_token="def", - ), - batch.ListJobsResponse( - jobs=[ - job.Job(), - ], - next_page_token="ghi", - ), - batch.ListJobsResponse( - jobs=[ - job.Job(), - job.Job(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_jobs( - request={}, - ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, job.Job) for i in responses) - - -@pytest.mark.asyncio -async def test_list_jobs_async_pages(): - client = BatchServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - batch.ListJobsResponse( - jobs=[ - job.Job(), - job.Job(), - job.Job(), - ], - next_page_token="abc", - ), - batch.ListJobsResponse( - jobs=[], - next_page_token="def", - ), - batch.ListJobsResponse( - jobs=[ - job.Job(), - ], - next_page_token="ghi", - ), - batch.ListJobsResponse( - jobs=[ - job.Job(), - job.Job(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_jobs(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize( - "request_type", - [ - batch.GetTaskRequest, - dict, - ], -) -def test_get_task(request_type, transport: str = "grpc"): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_task), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = task.Task( - name="name_value", - ) - response = client.get_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = batch.GetTaskRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, task.Task) - assert response.name == "name_value" - - -def test_get_task_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_task), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_task() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == batch.GetTaskRequest() - - -def test_get_task_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = batch.GetTaskRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_task), "__call__") as call: + with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_task(request=request) + client.list_jobs(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == batch.GetTaskRequest( - name="name_value", + assert args[0] == batch.ListJobsRequest( + parent="parent_value", + filter="filter_value", + order_by="order_by_value", + page_token="page_token_value", ) -def test_get_task_use_cached_wrapped_rpc(): +def test_list_jobs_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -3305,21 +3108,21 @@ def test_get_task_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_task in client._transport._wrapped_methods + assert client._transport.list_jobs in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_task] = mock_rpc + client._transport._wrapped_methods[client._transport.list_jobs] = mock_rpc request = {} - client.get_task(request) + client.list_jobs(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_task(request) + client.list_jobs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -3327,7 +3130,7 @@ def test_get_task_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_task_empty_call_async(): +async def test_list_jobs_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = BatchServiceAsyncClient( @@ -3336,21 +3139,22 @@ async def test_get_task_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_task), "__call__") as call: + with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - task.Task( - name="name_value", + batch.ListJobsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) ) - response = await client.get_task() + response = await client.list_jobs() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == batch.GetTaskRequest() + assert args[0] == batch.ListJobsRequest() @pytest.mark.asyncio -async def test_get_task_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_list_jobs_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -3365,23 +3169,23 @@ async def test_get_task_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn # Ensure method has been cached assert ( - client._client._transport.get_task + client._client._transport.list_jobs in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ - client._client._transport.get_task + client._client._transport.list_jobs ] = mock_object request = {} - await client.get_task(request) + await client.list_jobs(request) # Establish that the underlying gRPC stub method was called. assert mock_object.call_count == 1 - await client.get_task(request) + await client.list_jobs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -3389,8 +3193,8 @@ async def test_get_task_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn @pytest.mark.asyncio -async def test_get_task_async( - transport: str = "grpc_asyncio", request_type=batch.GetTaskRequest +async def test_list_jobs_async( + transport: str = "grpc_asyncio", request_type=batch.ListJobsRequest ): client = BatchServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3402,46 +3206,48 @@ async def test_get_task_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_task), "__call__") as call: + with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - task.Task( - name="name_value", + batch.ListJobsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) ) - response = await client.get_task(request) + response = await client.list_jobs(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = batch.GetTaskRequest() + request = batch.ListJobsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, task.Task) - assert response.name == "name_value" + assert isinstance(response, pagers.ListJobsAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] @pytest.mark.asyncio -async def test_get_task_async_from_dict(): - await test_get_task_async(request_type=dict) +async def test_list_jobs_async_from_dict(): + await test_list_jobs_async(request_type=dict) -def test_get_task_field_headers(): +def test_list_jobs_field_headers(): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = batch.GetTaskRequest() + request = batch.ListJobsRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_task), "__call__") as call: - call.return_value = task.Task() - client.get_task(request) + with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: + call.return_value = batch.ListJobsResponse() + client.list_jobs(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -3452,26 +3258,28 @@ def test_get_task_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_get_task_field_headers_async(): +async def test_list_jobs_field_headers_async(): client = BatchServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = batch.GetTaskRequest() + request = batch.ListJobsRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_task), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(task.Task()) - await client.get_task(request) + with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + batch.ListJobsResponse() + ) + await client.list_jobs(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -3482,35 +3290,35 @@ async def test_get_task_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] -def test_get_task_flattened(): +def test_list_jobs_flattened(): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_task), "__call__") as call: + with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = task.Task() + call.return_value = batch.ListJobsResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_task( - name="name_value", - ) + client.list_jobs( + parent="parent_value", + ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val -def test_get_task_flattened_error(): +def test_list_jobs_flattened_error(): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -3518,41 +3326,43 @@ def test_get_task_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_task( - batch.GetTaskRequest(), - name="name_value", + client.list_jobs( + batch.ListJobsRequest(), + parent="parent_value", ) @pytest.mark.asyncio -async def test_get_task_flattened_async(): +async def test_list_jobs_flattened_async(): client = BatchServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_task), "__call__") as call: + with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = task.Task() + call.return_value = batch.ListJobsResponse() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(task.Task()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + batch.ListJobsResponse() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_task( - name="name_value", + response = await client.list_jobs( + parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val @pytest.mark.asyncio -async def test_get_task_flattened_error_async(): +async def test_list_jobs_flattened_error_async(): client = BatchServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -3560,132 +3370,318 @@ async def test_get_task_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_task( - batch.GetTaskRequest(), - name="name_value", + await client.list_jobs( + batch.ListJobsRequest(), + parent="parent_value", ) -@pytest.mark.parametrize( - "request_type", - [ - batch.ListTasksRequest, - dict, - ], -) -def test_list_tasks(request_type, transport: str = "grpc"): +def test_list_jobs_pager(transport_name: str = "grpc"): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport=transport_name, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_tasks), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = batch.ListTasksResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], + with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + batch.ListJobsResponse( + jobs=[ + job.Job(), + job.Job(), + job.Job(), + ], + next_page_token="abc", + ), + batch.ListJobsResponse( + jobs=[], + next_page_token="def", + ), + batch.ListJobsResponse( + jobs=[ + job.Job(), + ], + next_page_token="ghi", + ), + batch.ListJobsResponse( + jobs=[ + job.Job(), + job.Job(), + ], + ), + RuntimeError, ) - response = client.list_tasks(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = batch.ListTasksRequest() - assert args[0] == request + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_jobs(request={}, retry=retry, timeout=timeout) - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListTasksPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, job.Job) for i in results) -def test_list_tasks_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. +def test_list_jobs_pages(transport_name: str = "grpc"): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_tasks), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + batch.ListJobsResponse( + jobs=[ + job.Job(), + job.Job(), + job.Job(), + ], + next_page_token="abc", + ), + batch.ListJobsResponse( + jobs=[], + next_page_token="def", + ), + batch.ListJobsResponse( + jobs=[ + job.Job(), + ], + next_page_token="ghi", + ), + batch.ListJobsResponse( + jobs=[ + job.Job(), + job.Job(), + ], + ), + RuntimeError, ) - client.list_tasks() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == batch.ListTasksRequest() + pages = list(client.list_jobs(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token -def test_list_tasks_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BatchServiceClient( +@pytest.mark.asyncio +async def test_list_jobs_async_pager(): + client = BatchServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = batch.ListTasksRequest( - parent="parent_value", - filter="filter_value", - order_by="order_by_value", - page_token="page_token_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_tasks), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.list_tasks(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == batch.ListTasksRequest( - parent="parent_value", - filter="filter_value", - order_by="order_by_value", - page_token="page_token_value", + with mock.patch.object( + type(client.transport.list_jobs), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + batch.ListJobsResponse( + jobs=[ + job.Job(), + job.Job(), + job.Job(), + ], + next_page_token="abc", + ), + batch.ListJobsResponse( + jobs=[], + next_page_token="def", + ), + batch.ListJobsResponse( + jobs=[ + job.Job(), + ], + next_page_token="ghi", + ), + batch.ListJobsResponse( + jobs=[ + job.Job(), + job.Job(), + ], + ), + RuntimeError, ) - - -def test_list_tasks_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + async_pager = await client.list_jobs( + request={}, ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_tasks in client._transport._wrapped_methods + assert len(responses) == 6 + assert all(isinstance(i, job.Job) for i in responses) - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.list_tasks] = mock_rpc - request = {} - client.list_tasks(request) - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 +@pytest.mark.asyncio +async def test_list_jobs_async_pages(): + client = BatchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) - client.list_tasks(request) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_jobs), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + batch.ListJobsResponse( + jobs=[ + job.Job(), + job.Job(), + job.Job(), + ], + next_page_token="abc", + ), + batch.ListJobsResponse( + jobs=[], + next_page_token="def", + ), + batch.ListJobsResponse( + jobs=[ + job.Job(), + ], + next_page_token="ghi", + ), + batch.ListJobsResponse( + jobs=[ + job.Job(), + job.Job(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_jobs(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + batch.GetTaskRequest, + dict, + ], +) +def test_get_task(request_type, transport: str = "grpc"): + client = BatchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_task), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = task.Task( + name="name_value", + ) + response = client.get_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = batch.GetTaskRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, task.Task) + assert response.name == "name_value" + + +def test_get_task_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BatchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_task), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_task() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == batch.GetTaskRequest() + + +def test_get_task_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BatchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = batch.GetTaskRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_task), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_task(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == batch.GetTaskRequest( + name="name_value", + ) + + +def test_get_task_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BatchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_task in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_task] = mock_rpc + request = {} + client.get_task(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_task(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -3693,7 +3689,7 @@ def test_list_tasks_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_list_tasks_empty_call_async(): +async def test_get_task_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = BatchServiceAsyncClient( @@ -3702,22 +3698,21 @@ async def test_list_tasks_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_tasks), "__call__") as call: + with mock.patch.object(type(client.transport.get_task), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - batch.ListTasksResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], + task.Task( + name="name_value", ) ) - response = await client.list_tasks() + response = await client.get_task() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == batch.ListTasksRequest() + assert args[0] == batch.GetTaskRequest() @pytest.mark.asyncio -async def test_list_tasks_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_get_task_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -3732,23 +3727,23 @@ async def test_list_tasks_async_use_cached_wrapped_rpc(transport: str = "grpc_as # Ensure method has been cached assert ( - client._client._transport.list_tasks + client._client._transport.get_task in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ - client._client._transport.list_tasks + client._client._transport.get_task ] = mock_object request = {} - await client.list_tasks(request) + await client.get_task(request) # Establish that the underlying gRPC stub method was called. assert mock_object.call_count == 1 - await client.list_tasks(request) + await client.get_task(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -3756,8 +3751,8 @@ async def test_list_tasks_async_use_cached_wrapped_rpc(transport: str = "grpc_as @pytest.mark.asyncio -async def test_list_tasks_async( - transport: str = "grpc_asyncio", request_type=batch.ListTasksRequest +async def test_get_task_async( + transport: str = "grpc_asyncio", request_type=batch.GetTaskRequest ): client = BatchServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3769,48 +3764,46 @@ async def test_list_tasks_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_tasks), "__call__") as call: + with mock.patch.object(type(client.transport.get_task), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - batch.ListTasksResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], + task.Task( + name="name_value", ) ) - response = await client.list_tasks(request) + response = await client.get_task(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = batch.ListTasksRequest() + request = batch.GetTaskRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListTasksAsyncPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert isinstance(response, task.Task) + assert response.name == "name_value" @pytest.mark.asyncio -async def test_list_tasks_async_from_dict(): - await test_list_tasks_async(request_type=dict) +async def test_get_task_async_from_dict(): + await test_get_task_async(request_type=dict) -def test_list_tasks_field_headers(): +def test_get_task_field_headers(): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = batch.ListTasksRequest() + request = batch.GetTaskRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_tasks), "__call__") as call: - call.return_value = batch.ListTasksResponse() - client.list_tasks(request) + with mock.patch.object(type(client.transport.get_task), "__call__") as call: + call.return_value = task.Task() + client.get_task(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -3821,28 +3814,26 @@ def test_list_tasks_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_list_tasks_field_headers_async(): +async def test_get_task_field_headers_async(): client = BatchServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = batch.ListTasksRequest() + request = batch.GetTaskRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_tasks), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - batch.ListTasksResponse() - ) - await client.list_tasks(request) + with mock.patch.object(type(client.transport.get_task), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(task.Task()) + await client.get_task(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -3853,35 +3844,35 @@ async def test_list_tasks_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] -def test_list_tasks_flattened(): +def test_get_task_flattened(): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_tasks), "__call__") as call: + with mock.patch.object(type(client.transport.get_task), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = batch.ListTasksResponse() + call.return_value = task.Task() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_tasks( - parent="parent_value", + client.get_task( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_list_tasks_flattened_error(): +def test_get_task_flattened_error(): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -3889,43 +3880,41 @@ def test_list_tasks_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_tasks( - batch.ListTasksRequest(), - parent="parent_value", + client.get_task( + batch.GetTaskRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_list_tasks_flattened_async(): +async def test_get_task_flattened_async(): client = BatchServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_tasks), "__call__") as call: + with mock.patch.object(type(client.transport.get_task), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = batch.ListTasksResponse() + call.return_value = task.Task() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - batch.ListTasksResponse() - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(task.Task()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_tasks( - parent="parent_value", + response = await client.get_task( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_list_tasks_flattened_error_async(): +async def test_get_task_flattened_error_async(): client = BatchServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -3933,214 +3922,20 @@ async def test_list_tasks_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_tasks( - batch.ListTasksRequest(), - parent="parent_value", - ) - - -def test_list_tasks_pager(transport_name: str = "grpc"): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_tasks), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - batch.ListTasksResponse( - tasks=[ - task.Task(), - task.Task(), - task.Task(), - ], - next_page_token="abc", - ), - batch.ListTasksResponse( - tasks=[], - next_page_token="def", - ), - batch.ListTasksResponse( - tasks=[ - task.Task(), - ], - next_page_token="ghi", - ), - batch.ListTasksResponse( - tasks=[ - task.Task(), - task.Task(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), - ) - pager = client.list_tasks(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, task.Task) for i in results) - - -def test_list_tasks_pages(transport_name: str = "grpc"): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_tasks), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - batch.ListTasksResponse( - tasks=[ - task.Task(), - task.Task(), - task.Task(), - ], - next_page_token="abc", - ), - batch.ListTasksResponse( - tasks=[], - next_page_token="def", - ), - batch.ListTasksResponse( - tasks=[ - task.Task(), - ], - next_page_token="ghi", - ), - batch.ListTasksResponse( - tasks=[ - task.Task(), - task.Task(), - ], - ), - RuntimeError, - ) - pages = list(client.list_tasks(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.asyncio -async def test_list_tasks_async_pager(): - client = BatchServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tasks), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - batch.ListTasksResponse( - tasks=[ - task.Task(), - task.Task(), - task.Task(), - ], - next_page_token="abc", - ), - batch.ListTasksResponse( - tasks=[], - next_page_token="def", - ), - batch.ListTasksResponse( - tasks=[ - task.Task(), - ], - next_page_token="ghi", - ), - batch.ListTasksResponse( - tasks=[ - task.Task(), - task.Task(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_tasks( - request={}, - ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, task.Task) for i in responses) - - -@pytest.mark.asyncio -async def test_list_tasks_async_pages(): - client = BatchServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tasks), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - batch.ListTasksResponse( - tasks=[ - task.Task(), - task.Task(), - task.Task(), - ], - next_page_token="abc", - ), - batch.ListTasksResponse( - tasks=[], - next_page_token="def", - ), - batch.ListTasksResponse( - tasks=[ - task.Task(), - ], - next_page_token="ghi", - ), - batch.ListTasksResponse( - tasks=[ - task.Task(), - task.Task(), - ], - ), - RuntimeError, + await client.get_task( + batch.GetTaskRequest(), + name="name_value", ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_tasks(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token @pytest.mark.parametrize( "request_type", [ - batch.CreateResourceAllowanceRequest, + batch.ListTasksRequest, dict, ], ) -def test_create_resource_allowance(request_type, transport: str = "grpc"): +def test_list_tasks(request_type, transport: str = "grpc"): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4151,29 +3946,27 @@ def test_create_resource_allowance(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_resource_allowance), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_tasks), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = gcb_resource_allowance.ResourceAllowance( - name="name_value", - uid="uid_value", + call.return_value = batch.ListTasksResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) - response = client.create_resource_allowance(request) + response = client.list_tasks(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = batch.CreateResourceAllowanceRequest() + request = batch.ListTasksRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, gcb_resource_allowance.ResourceAllowance) - assert response.name == "name_value" - assert response.uid == "uid_value" + assert isinstance(response, pagers.ListTasksPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] -def test_create_resource_allowance_empty_call(): +def test_list_tasks_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = BatchServiceClient( @@ -4182,19 +3975,17 @@ def test_create_resource_allowance_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_resource_allowance), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_tasks), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.create_resource_allowance() + client.list_tasks() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == batch.CreateResourceAllowanceRequest() + assert args[0] == batch.ListTasksRequest() -def test_create_resource_allowance_non_empty_request_with_auto_populated_field(): +def test_list_tasks_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = BatchServiceClient( @@ -4205,28 +3996,30 @@ def test_create_resource_allowance_non_empty_request_with_auto_populated_field() # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = batch.CreateResourceAllowanceRequest( + request = batch.ListTasksRequest( parent="parent_value", - resource_allowance_id="resource_allowance_id_value", + filter="filter_value", + order_by="order_by_value", + page_token="page_token_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_resource_allowance), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_tasks), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.create_resource_allowance(request=request) + client.list_tasks(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == batch.CreateResourceAllowanceRequest( + assert args[0] == batch.ListTasksRequest( parent="parent_value", - resource_allowance_id="resource_allowance_id_value", + filter="filter_value", + order_by="order_by_value", + page_token="page_token_value", ) -def test_create_resource_allowance_use_cached_wrapped_rpc(): +def test_list_tasks_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -4240,26 +4033,21 @@ def test_create_resource_allowance_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.create_resource_allowance - in client._transport._wrapped_methods - ) + assert client._transport.list_tasks in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_resource_allowance - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_tasks] = mock_rpc request = {} - client.create_resource_allowance(request) + client.list_tasks(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.create_resource_allowance(request) + client.list_tasks(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -4267,7 +4055,7 @@ def test_create_resource_allowance_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_create_resource_allowance_empty_call_async(): +async def test_list_tasks_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = BatchServiceAsyncClient( @@ -4276,26 +4064,22 @@ async def test_create_resource_allowance_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_resource_allowance), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_tasks), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - gcb_resource_allowance.ResourceAllowance( - name="name_value", - uid="uid_value", + batch.ListTasksResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) ) - response = await client.create_resource_allowance() + response = await client.list_tasks() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == batch.CreateResourceAllowanceRequest() + assert args[0] == batch.ListTasksRequest() @pytest.mark.asyncio -async def test_create_resource_allowance_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): +async def test_list_tasks_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -4310,23 +4094,23 @@ async def test_create_resource_allowance_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.create_resource_allowance + client._client._transport.list_tasks in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ - client._client._transport.create_resource_allowance + client._client._transport.list_tasks ] = mock_object request = {} - await client.create_resource_allowance(request) + await client.list_tasks(request) # Establish that the underlying gRPC stub method was called. assert mock_object.call_count == 1 - await client.create_resource_allowance(request) + await client.list_tasks(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -4334,8 +4118,8 @@ async def test_create_resource_allowance_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_create_resource_allowance_async( - transport: str = "grpc_asyncio", request_type=batch.CreateResourceAllowanceRequest +async def test_list_tasks_async( + transport: str = "grpc_asyncio", request_type=batch.ListTasksRequest ): client = BatchServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4347,52 +4131,48 @@ async def test_create_resource_allowance_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_resource_allowance), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_tasks), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - gcb_resource_allowance.ResourceAllowance( - name="name_value", - uid="uid_value", + batch.ListTasksResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) ) - response = await client.create_resource_allowance(request) + response = await client.list_tasks(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = batch.CreateResourceAllowanceRequest() + request = batch.ListTasksRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, gcb_resource_allowance.ResourceAllowance) - assert response.name == "name_value" - assert response.uid == "uid_value" + assert isinstance(response, pagers.ListTasksAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] @pytest.mark.asyncio -async def test_create_resource_allowance_async_from_dict(): - await test_create_resource_allowance_async(request_type=dict) +async def test_list_tasks_async_from_dict(): + await test_list_tasks_async(request_type=dict) -def test_create_resource_allowance_field_headers(): +def test_list_tasks_field_headers(): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = batch.CreateResourceAllowanceRequest() + request = batch.ListTasksRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_resource_allowance), "__call__" - ) as call: - call.return_value = gcb_resource_allowance.ResourceAllowance() - client.create_resource_allowance(request) + with mock.patch.object(type(client.transport.list_tasks), "__call__") as call: + call.return_value = batch.ListTasksResponse() + client.list_tasks(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -4408,25 +4188,23 @@ def test_create_resource_allowance_field_headers(): @pytest.mark.asyncio -async def test_create_resource_allowance_field_headers_async(): +async def test_list_tasks_field_headers_async(): client = BatchServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = batch.CreateResourceAllowanceRequest() + request = batch.ListTasksRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_resource_allowance), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_tasks), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - gcb_resource_allowance.ResourceAllowance() + batch.ListTasksResponse() ) - await client.create_resource_allowance(request) + await client.list_tasks(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -4441,29 +4219,19 @@ async def test_create_resource_allowance_field_headers_async(): ) in kw["metadata"] -def test_create_resource_allowance_flattened(): +def test_list_tasks_flattened(): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_resource_allowance), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_tasks), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = gcb_resource_allowance.ResourceAllowance() + call.return_value = batch.ListTasksResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.create_resource_allowance( + client.list_tasks( parent="parent_value", - resource_allowance=gcb_resource_allowance.ResourceAllowance( - usage_resource_allowance=gcb_resource_allowance.UsageResourceAllowance( - spec=gcb_resource_allowance.UsageResourceAllowanceSpec( - type_="type__value" - ) - ) - ), - resource_allowance_id="resource_allowance_id_value", ) # Establish that the underlying call was made with the expected @@ -4473,21 +4241,9 @@ def test_create_resource_allowance_flattened(): arg = args[0].parent mock_val = "parent_value" assert arg == mock_val - arg = args[0].resource_allowance - mock_val = gcb_resource_allowance.ResourceAllowance( - usage_resource_allowance=gcb_resource_allowance.UsageResourceAllowance( - spec=gcb_resource_allowance.UsageResourceAllowanceSpec( - type_="type__value" - ) - ) - ) - assert arg == mock_val - arg = args[0].resource_allowance_id - mock_val = "resource_allowance_id_value" - assert arg == mock_val -def test_create_resource_allowance_flattened_error(): +def test_list_tasks_flattened_error(): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -4495,48 +4251,30 @@ def test_create_resource_allowance_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_resource_allowance( - batch.CreateResourceAllowanceRequest(), + client.list_tasks( + batch.ListTasksRequest(), parent="parent_value", - resource_allowance=gcb_resource_allowance.ResourceAllowance( - usage_resource_allowance=gcb_resource_allowance.UsageResourceAllowance( - spec=gcb_resource_allowance.UsageResourceAllowanceSpec( - type_="type__value" - ) - ) - ), - resource_allowance_id="resource_allowance_id_value", ) @pytest.mark.asyncio -async def test_create_resource_allowance_flattened_async(): +async def test_list_tasks_flattened_async(): client = BatchServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_resource_allowance), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_tasks), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = gcb_resource_allowance.ResourceAllowance() + call.return_value = batch.ListTasksResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - gcb_resource_allowance.ResourceAllowance() + batch.ListTasksResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.create_resource_allowance( + response = await client.list_tasks( parent="parent_value", - resource_allowance=gcb_resource_allowance.ResourceAllowance( - usage_resource_allowance=gcb_resource_allowance.UsageResourceAllowance( - spec=gcb_resource_allowance.UsageResourceAllowanceSpec( - type_="type__value" - ) - ) - ), - resource_allowance_id="resource_allowance_id_value", ) # Establish that the underlying call was made with the expected @@ -4546,22 +4284,10 @@ async def test_create_resource_allowance_flattened_async(): arg = args[0].parent mock_val = "parent_value" assert arg == mock_val - arg = args[0].resource_allowance - mock_val = gcb_resource_allowance.ResourceAllowance( - usage_resource_allowance=gcb_resource_allowance.UsageResourceAllowance( - spec=gcb_resource_allowance.UsageResourceAllowanceSpec( - type_="type__value" - ) - ) - ) - assert arg == mock_val - arg = args[0].resource_allowance_id - mock_val = "resource_allowance_id_value" - assert arg == mock_val @pytest.mark.asyncio -async def test_create_resource_allowance_flattened_error_async(): +async def test_list_tasks_flattened_error_async(): client = BatchServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -4569,61 +4295,247 @@ async def test_create_resource_allowance_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.create_resource_allowance( - batch.CreateResourceAllowanceRequest(), + await client.list_tasks( + batch.ListTasksRequest(), parent="parent_value", - resource_allowance=gcb_resource_allowance.ResourceAllowance( - usage_resource_allowance=gcb_resource_allowance.UsageResourceAllowance( - spec=gcb_resource_allowance.UsageResourceAllowanceSpec( - type_="type__value" - ) - ) - ), - resource_allowance_id="resource_allowance_id_value", ) -@pytest.mark.parametrize( - "request_type", - [ - batch.GetResourceAllowanceRequest, - dict, - ], -) -def test_get_resource_allowance(request_type, transport: str = "grpc"): +def test_list_tasks_pager(transport_name: str = "grpc"): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport=transport_name, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_resource_allowance), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = resource_allowance.ResourceAllowance( - name="name_value", - uid="uid_value", + with mock.patch.object(type(client.transport.list_tasks), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + batch.ListTasksResponse( + tasks=[ + task.Task(), + task.Task(), + task.Task(), + ], + next_page_token="abc", + ), + batch.ListTasksResponse( + tasks=[], + next_page_token="def", + ), + batch.ListTasksResponse( + tasks=[ + task.Task(), + ], + next_page_token="ghi", + ), + batch.ListTasksResponse( + tasks=[ + task.Task(), + task.Task(), + ], + ), + RuntimeError, ) - response = client.get_resource_allowance(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = batch.GetResourceAllowanceRequest() + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_tasks(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, task.Task) for i in results) + + +def test_list_tasks_pages(transport_name: str = "grpc"): + client = BatchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_tasks), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + batch.ListTasksResponse( + tasks=[ + task.Task(), + task.Task(), + task.Task(), + ], + next_page_token="abc", + ), + batch.ListTasksResponse( + tasks=[], + next_page_token="def", + ), + batch.ListTasksResponse( + tasks=[ + task.Task(), + ], + next_page_token="ghi", + ), + batch.ListTasksResponse( + tasks=[ + task.Task(), + task.Task(), + ], + ), + RuntimeError, + ) + pages = list(client.list_tasks(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_tasks_async_pager(): + client = BatchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tasks), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + batch.ListTasksResponse( + tasks=[ + task.Task(), + task.Task(), + task.Task(), + ], + next_page_token="abc", + ), + batch.ListTasksResponse( + tasks=[], + next_page_token="def", + ), + batch.ListTasksResponse( + tasks=[ + task.Task(), + ], + next_page_token="ghi", + ), + batch.ListTasksResponse( + tasks=[ + task.Task(), + task.Task(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_tasks( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, task.Task) for i in responses) + + +@pytest.mark.asyncio +async def test_list_tasks_async_pages(): + client = BatchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tasks), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + batch.ListTasksResponse( + tasks=[ + task.Task(), + task.Task(), + task.Task(), + ], + next_page_token="abc", + ), + batch.ListTasksResponse( + tasks=[], + next_page_token="def", + ), + batch.ListTasksResponse( + tasks=[ + task.Task(), + ], + next_page_token="ghi", + ), + batch.ListTasksResponse( + tasks=[ + task.Task(), + task.Task(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_tasks(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + batch.CreateResourceAllowanceRequest, + dict, + ], +) +def test_create_resource_allowance(request_type, transport: str = "grpc"): + client = BatchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_resource_allowance), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gcb_resource_allowance.ResourceAllowance( + name="name_value", + uid="uid_value", + ) + response = client.create_resource_allowance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = batch.CreateResourceAllowanceRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resource_allowance.ResourceAllowance) + assert isinstance(response, gcb_resource_allowance.ResourceAllowance) assert response.name == "name_value" assert response.uid == "uid_value" -def test_get_resource_allowance_empty_call(): +def test_create_resource_allowance_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = BatchServiceClient( @@ -4633,18 +4545,18 @@ def test_get_resource_allowance_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_resource_allowance), "__call__" + type(client.transport.create_resource_allowance), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_resource_allowance() + client.create_resource_allowance() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == batch.GetResourceAllowanceRequest() + assert args[0] == batch.CreateResourceAllowanceRequest() -def test_get_resource_allowance_non_empty_request_with_auto_populated_field(): +def test_create_resource_allowance_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = BatchServiceClient( @@ -4655,26 +4567,28 @@ def test_get_resource_allowance_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = batch.GetResourceAllowanceRequest( - name="name_value", + request = batch.CreateResourceAllowanceRequest( + parent="parent_value", + resource_allowance_id="resource_allowance_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_resource_allowance), "__call__" + type(client.transport.create_resource_allowance), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_resource_allowance(request=request) + client.create_resource_allowance(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == batch.GetResourceAllowanceRequest( - name="name_value", + assert args[0] == batch.CreateResourceAllowanceRequest( + parent="parent_value", + resource_allowance_id="resource_allowance_id_value", ) -def test_get_resource_allowance_use_cached_wrapped_rpc(): +def test_create_resource_allowance_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -4689,7 +4603,7 @@ def test_get_resource_allowance_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.get_resource_allowance + client._transport.create_resource_allowance in client._transport._wrapped_methods ) @@ -4699,15 +4613,15 @@ def test_get_resource_allowance_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.get_resource_allowance + client._transport.create_resource_allowance ] = mock_rpc request = {} - client.get_resource_allowance(request) + client.create_resource_allowance(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_resource_allowance(request) + client.create_resource_allowance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -4715,7 +4629,7 @@ def test_get_resource_allowance_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_resource_allowance_empty_call_async(): +async def test_create_resource_allowance_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = BatchServiceAsyncClient( @@ -4725,23 +4639,23 @@ async def test_get_resource_allowance_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_resource_allowance), "__call__" + type(client.transport.create_resource_allowance), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resource_allowance.ResourceAllowance( + gcb_resource_allowance.ResourceAllowance( name="name_value", uid="uid_value", ) ) - response = await client.get_resource_allowance() + response = await client.create_resource_allowance() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == batch.GetResourceAllowanceRequest() + assert args[0] == batch.CreateResourceAllowanceRequest() @pytest.mark.asyncio -async def test_get_resource_allowance_async_use_cached_wrapped_rpc( +async def test_create_resource_allowance_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -4758,23 +4672,23 @@ async def test_get_resource_allowance_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.get_resource_allowance + client._client._transport.create_resource_allowance in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ - client._client._transport.get_resource_allowance + client._client._transport.create_resource_allowance ] = mock_object request = {} - await client.get_resource_allowance(request) + await client.create_resource_allowance(request) # Establish that the underlying gRPC stub method was called. assert mock_object.call_count == 1 - await client.get_resource_allowance(request) + await client.create_resource_allowance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -4782,8 +4696,8 @@ async def test_get_resource_allowance_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_get_resource_allowance_async( - transport: str = "grpc_asyncio", request_type=batch.GetResourceAllowanceRequest +async def test_create_resource_allowance_async( + transport: str = "grpc_asyncio", request_type=batch.CreateResourceAllowanceRequest ): client = BatchServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4796,51 +4710,51 @@ async def test_get_resource_allowance_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_resource_allowance), "__call__" + type(client.transport.create_resource_allowance), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resource_allowance.ResourceAllowance( + gcb_resource_allowance.ResourceAllowance( name="name_value", uid="uid_value", ) ) - response = await client.get_resource_allowance(request) + response = await client.create_resource_allowance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = batch.GetResourceAllowanceRequest() + request = batch.CreateResourceAllowanceRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resource_allowance.ResourceAllowance) + assert isinstance(response, gcb_resource_allowance.ResourceAllowance) assert response.name == "name_value" assert response.uid == "uid_value" @pytest.mark.asyncio -async def test_get_resource_allowance_async_from_dict(): - await test_get_resource_allowance_async(request_type=dict) +async def test_create_resource_allowance_async_from_dict(): + await test_create_resource_allowance_async(request_type=dict) -def test_get_resource_allowance_field_headers(): +def test_create_resource_allowance_field_headers(): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = batch.GetResourceAllowanceRequest() + request = batch.CreateResourceAllowanceRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_resource_allowance), "__call__" + type(client.transport.create_resource_allowance), "__call__" ) as call: - call.return_value = resource_allowance.ResourceAllowance() - client.get_resource_allowance(request) + call.return_value = gcb_resource_allowance.ResourceAllowance() + client.create_resource_allowance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -4851,30 +4765,30 @@ def test_get_resource_allowance_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_get_resource_allowance_field_headers_async(): +async def test_create_resource_allowance_field_headers_async(): client = BatchServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = batch.GetResourceAllowanceRequest() + request = batch.CreateResourceAllowanceRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_resource_allowance), "__call__" + type(client.transport.create_resource_allowance), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resource_allowance.ResourceAllowance() + gcb_resource_allowance.ResourceAllowance() ) - await client.get_resource_allowance(request) + await client.create_resource_allowance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -4885,37 +4799,57 @@ async def test_get_resource_allowance_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] -def test_get_resource_allowance_flattened(): +def test_create_resource_allowance_flattened(): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_resource_allowance), "__call__" + type(client.transport.create_resource_allowance), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resource_allowance.ResourceAllowance() + call.return_value = gcb_resource_allowance.ResourceAllowance() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_resource_allowance( - name="name_value", + client.create_resource_allowance( + parent="parent_value", + resource_allowance=gcb_resource_allowance.ResourceAllowance( + usage_resource_allowance=gcb_resource_allowance.UsageResourceAllowance( + spec=gcb_resource_allowance.UsageResourceAllowanceSpec( + type_="type__value" + ) + ) + ), + resource_allowance_id="resource_allowance_id_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].resource_allowance + mock_val = gcb_resource_allowance.ResourceAllowance( + usage_resource_allowance=gcb_resource_allowance.UsageResourceAllowance( + spec=gcb_resource_allowance.UsageResourceAllowanceSpec( + type_="type__value" + ) + ) + ) + assert arg == mock_val + arg = args[0].resource_allowance_id + mock_val = "resource_allowance_id_value" assert arg == mock_val -def test_get_resource_allowance_flattened_error(): +def test_create_resource_allowance_flattened_error(): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -4923,45 +4857,73 @@ def test_get_resource_allowance_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_resource_allowance( - batch.GetResourceAllowanceRequest(), - name="name_value", + client.create_resource_allowance( + batch.CreateResourceAllowanceRequest(), + parent="parent_value", + resource_allowance=gcb_resource_allowance.ResourceAllowance( + usage_resource_allowance=gcb_resource_allowance.UsageResourceAllowance( + spec=gcb_resource_allowance.UsageResourceAllowanceSpec( + type_="type__value" + ) + ) + ), + resource_allowance_id="resource_allowance_id_value", ) @pytest.mark.asyncio -async def test_get_resource_allowance_flattened_async(): +async def test_create_resource_allowance_flattened_async(): client = BatchServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_resource_allowance), "__call__" + type(client.transport.create_resource_allowance), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resource_allowance.ResourceAllowance() + call.return_value = gcb_resource_allowance.ResourceAllowance() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resource_allowance.ResourceAllowance() + gcb_resource_allowance.ResourceAllowance() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_resource_allowance( - name="name_value", + response = await client.create_resource_allowance( + parent="parent_value", + resource_allowance=gcb_resource_allowance.ResourceAllowance( + usage_resource_allowance=gcb_resource_allowance.UsageResourceAllowance( + spec=gcb_resource_allowance.UsageResourceAllowanceSpec( + type_="type__value" + ) + ) + ), + resource_allowance_id="resource_allowance_id_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].resource_allowance + mock_val = gcb_resource_allowance.ResourceAllowance( + usage_resource_allowance=gcb_resource_allowance.UsageResourceAllowance( + spec=gcb_resource_allowance.UsageResourceAllowanceSpec( + type_="type__value" + ) + ) + ) + assert arg == mock_val + arg = args[0].resource_allowance_id + mock_val = "resource_allowance_id_value" assert arg == mock_val @pytest.mark.asyncio -async def test_get_resource_allowance_flattened_error_async(): +async def test_create_resource_allowance_flattened_error_async(): client = BatchServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -4969,20 +4931,28 @@ async def test_get_resource_allowance_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_resource_allowance( - batch.GetResourceAllowanceRequest(), - name="name_value", + await client.create_resource_allowance( + batch.CreateResourceAllowanceRequest(), + parent="parent_value", + resource_allowance=gcb_resource_allowance.ResourceAllowance( + usage_resource_allowance=gcb_resource_allowance.UsageResourceAllowance( + spec=gcb_resource_allowance.UsageResourceAllowanceSpec( + type_="type__value" + ) + ) + ), + resource_allowance_id="resource_allowance_id_value", ) @pytest.mark.parametrize( "request_type", [ - batch.DeleteResourceAllowanceRequest, + batch.GetResourceAllowanceRequest, dict, ], ) -def test_delete_resource_allowance(request_type, transport: str = "grpc"): +def test_get_resource_allowance(request_type, transport: str = "grpc"): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4994,23 +4964,28 @@ def test_delete_resource_allowance(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_resource_allowance), "__call__" + type(client.transport.get_resource_allowance), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.delete_resource_allowance(request) + call.return_value = resource_allowance.ResourceAllowance( + name="name_value", + uid="uid_value", + ) + response = client.get_resource_allowance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = batch.DeleteResourceAllowanceRequest() + request = batch.GetResourceAllowanceRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert isinstance(response, resource_allowance.ResourceAllowance) + assert response.name == "name_value" + assert response.uid == "uid_value" -def test_delete_resource_allowance_empty_call(): +def test_get_resource_allowance_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = BatchServiceClient( @@ -5020,18 +4995,18 @@ def test_delete_resource_allowance_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_resource_allowance), "__call__" + type(client.transport.get_resource_allowance), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.delete_resource_allowance() + client.get_resource_allowance() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == batch.DeleteResourceAllowanceRequest() + assert args[0] == batch.GetResourceAllowanceRequest() -def test_delete_resource_allowance_non_empty_request_with_auto_populated_field(): +def test_get_resource_allowance_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = BatchServiceClient( @@ -5042,28 +5017,26 @@ def test_delete_resource_allowance_non_empty_request_with_auto_populated_field() # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = batch.DeleteResourceAllowanceRequest( + request = batch.GetResourceAllowanceRequest( name="name_value", - reason="reason_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_resource_allowance), "__call__" + type(client.transport.get_resource_allowance), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.delete_resource_allowance(request=request) + client.get_resource_allowance(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == batch.DeleteResourceAllowanceRequest( + assert args[0] == batch.GetResourceAllowanceRequest( name="name_value", - reason="reason_value", ) -def test_delete_resource_allowance_use_cached_wrapped_rpc(): +def test_get_resource_allowance_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -5078,7 +5051,7 @@ def test_delete_resource_allowance_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.delete_resource_allowance + client._transport.get_resource_allowance in client._transport._wrapped_methods ) @@ -5088,19 +5061,15 @@ def test_delete_resource_allowance_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.delete_resource_allowance + client._transport.get_resource_allowance ] = mock_rpc request = {} - client.delete_resource_allowance(request) + client.get_resource_allowance(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_resource_allowance(request) + client.get_resource_allowance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -5108,7 +5077,7 @@ def test_delete_resource_allowance_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_delete_resource_allowance_empty_call_async(): +async def test_get_resource_allowance_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = BatchServiceAsyncClient( @@ -5118,20 +5087,23 @@ async def test_delete_resource_allowance_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_resource_allowance), "__call__" + type(client.transport.get_resource_allowance), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + resource_allowance.ResourceAllowance( + name="name_value", + uid="uid_value", + ) ) - response = await client.delete_resource_allowance() + response = await client.get_resource_allowance() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == batch.DeleteResourceAllowanceRequest() + assert args[0] == batch.GetResourceAllowanceRequest() @pytest.mark.asyncio -async def test_delete_resource_allowance_async_use_cached_wrapped_rpc( +async def test_get_resource_allowance_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -5148,27 +5120,23 @@ async def test_delete_resource_allowance_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.delete_resource_allowance + client._client._transport.get_resource_allowance in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ - client._client._transport.delete_resource_allowance + client._client._transport.get_resource_allowance ] = mock_object request = {} - await client.delete_resource_allowance(request) + await client.get_resource_allowance(request) # Establish that the underlying gRPC stub method was called. assert mock_object.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.delete_resource_allowance(request) + await client.get_resource_allowance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -5176,8 +5144,8 @@ async def test_delete_resource_allowance_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_delete_resource_allowance_async( - transport: str = "grpc_asyncio", request_type=batch.DeleteResourceAllowanceRequest +async def test_get_resource_allowance_async( + transport: str = "grpc_asyncio", request_type=batch.GetResourceAllowanceRequest ): client = BatchServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5190,46 +5158,51 @@ async def test_delete_resource_allowance_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_resource_allowance), "__call__" + type(client.transport.get_resource_allowance), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + resource_allowance.ResourceAllowance( + name="name_value", + uid="uid_value", + ) ) - response = await client.delete_resource_allowance(request) + response = await client.get_resource_allowance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = batch.DeleteResourceAllowanceRequest() + request = batch.GetResourceAllowanceRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert isinstance(response, resource_allowance.ResourceAllowance) + assert response.name == "name_value" + assert response.uid == "uid_value" @pytest.mark.asyncio -async def test_delete_resource_allowance_async_from_dict(): - await test_delete_resource_allowance_async(request_type=dict) +async def test_get_resource_allowance_async_from_dict(): + await test_get_resource_allowance_async(request_type=dict) -def test_delete_resource_allowance_field_headers(): +def test_get_resource_allowance_field_headers(): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = batch.DeleteResourceAllowanceRequest() + request = batch.GetResourceAllowanceRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_resource_allowance), "__call__" + type(client.transport.get_resource_allowance), "__call__" ) as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.delete_resource_allowance(request) + call.return_value = resource_allowance.ResourceAllowance() + client.get_resource_allowance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -5245,25 +5218,25 @@ def test_delete_resource_allowance_field_headers(): @pytest.mark.asyncio -async def test_delete_resource_allowance_field_headers_async(): +async def test_get_resource_allowance_field_headers_async(): client = BatchServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = batch.DeleteResourceAllowanceRequest() + request = batch.GetResourceAllowanceRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_resource_allowance), "__call__" + type(client.transport.get_resource_allowance), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/op") + resource_allowance.ResourceAllowance() ) - await client.delete_resource_allowance(request) + await client.get_resource_allowance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -5278,20 +5251,20 @@ async def test_delete_resource_allowance_field_headers_async(): ) in kw["metadata"] -def test_delete_resource_allowance_flattened(): +def test_get_resource_allowance_flattened(): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_resource_allowance), "__call__" + type(client.transport.get_resource_allowance), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = resource_allowance.ResourceAllowance() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_resource_allowance( + client.get_resource_allowance( name="name_value", ) @@ -5304,7 +5277,7 @@ def test_delete_resource_allowance_flattened(): assert arg == mock_val -def test_delete_resource_allowance_flattened_error(): +def test_get_resource_allowance_flattened_error(): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -5312,31 +5285,31 @@ def test_delete_resource_allowance_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_resource_allowance( - batch.DeleteResourceAllowanceRequest(), + client.get_resource_allowance( + batch.GetResourceAllowanceRequest(), name="name_value", ) @pytest.mark.asyncio -async def test_delete_resource_allowance_flattened_async(): +async def test_get_resource_allowance_flattened_async(): client = BatchServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_resource_allowance), "__call__" + type(client.transport.get_resource_allowance), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = resource_allowance.ResourceAllowance() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + resource_allowance.ResourceAllowance() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_resource_allowance( + response = await client.get_resource_allowance( name="name_value", ) @@ -5350,7 +5323,7 @@ async def test_delete_resource_allowance_flattened_async(): @pytest.mark.asyncio -async def test_delete_resource_allowance_flattened_error_async(): +async def test_get_resource_allowance_flattened_error_async(): client = BatchServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -5358,8 +5331,8 @@ async def test_delete_resource_allowance_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.delete_resource_allowance( - batch.DeleteResourceAllowanceRequest(), + await client.get_resource_allowance( + batch.GetResourceAllowanceRequest(), name="name_value", ) @@ -5367,11 +5340,11 @@ async def test_delete_resource_allowance_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - batch.ListResourceAllowancesRequest, + batch.DeleteResourceAllowanceRequest, dict, ], ) -def test_list_resource_allowances(request_type, transport: str = "grpc"): +def test_delete_resource_allowance(request_type, transport: str = "grpc"): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5383,28 +5356,23 @@ def test_list_resource_allowances(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_resource_allowances), "__call__" + type(client.transport.delete_resource_allowance), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = batch.ListResourceAllowancesResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) - response = client.list_resource_allowances(request) + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_resource_allowance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = batch.ListResourceAllowancesRequest() + request = batch.DeleteResourceAllowanceRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListResourceAllowancesPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert isinstance(response, future.Future) -def test_list_resource_allowances_empty_call(): +def test_delete_resource_allowance_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = BatchServiceClient( @@ -5414,18 +5382,18 @@ def test_list_resource_allowances_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_resource_allowances), "__call__" + type(client.transport.delete_resource_allowance), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_resource_allowances() + client.delete_resource_allowance() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == batch.ListResourceAllowancesRequest() + assert args[0] == batch.DeleteResourceAllowanceRequest() -def test_list_resource_allowances_non_empty_request_with_auto_populated_field(): +def test_delete_resource_allowance_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = BatchServiceClient( @@ -5436,28 +5404,28 @@ def test_list_resource_allowances_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = batch.ListResourceAllowancesRequest( - parent="parent_value", - page_token="page_token_value", + request = batch.DeleteResourceAllowanceRequest( + name="name_value", + reason="reason_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_resource_allowances), "__call__" + type(client.transport.delete_resource_allowance), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_resource_allowances(request=request) + client.delete_resource_allowance(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == batch.ListResourceAllowancesRequest( - parent="parent_value", - page_token="page_token_value", + assert args[0] == batch.DeleteResourceAllowanceRequest( + name="name_value", + reason="reason_value", ) -def test_list_resource_allowances_use_cached_wrapped_rpc(): +def test_delete_resource_allowance_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -5472,7 +5440,7 @@ def test_list_resource_allowances_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.list_resource_allowances + client._transport.delete_resource_allowance in client._transport._wrapped_methods ) @@ -5482,15 +5450,19 @@ def test_list_resource_allowances_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_resource_allowances + client._transport.delete_resource_allowance ] = mock_rpc request = {} - client.list_resource_allowances(request) + client.delete_resource_allowance(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_resource_allowances(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_resource_allowance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -5498,7 +5470,7 @@ def test_list_resource_allowances_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_list_resource_allowances_empty_call_async(): +async def test_delete_resource_allowance_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = BatchServiceAsyncClient( @@ -5508,23 +5480,20 @@ async def test_list_resource_allowances_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_resource_allowances), "__call__" + type(client.transport.delete_resource_allowance), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - batch.ListResourceAllowancesResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) + operations_pb2.Operation(name="operations/spam") ) - response = await client.list_resource_allowances() + response = await client.delete_resource_allowance() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == batch.ListResourceAllowancesRequest() + assert args[0] == batch.DeleteResourceAllowanceRequest() @pytest.mark.asyncio -async def test_list_resource_allowances_async_use_cached_wrapped_rpc( +async def test_delete_resource_allowance_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -5541,23 +5510,27 @@ async def test_list_resource_allowances_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.list_resource_allowances + client._client._transport.delete_resource_allowance in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ - client._client._transport.list_resource_allowances + client._client._transport.delete_resource_allowance ] = mock_object request = {} - await client.list_resource_allowances(request) + await client.delete_resource_allowance(request) # Establish that the underlying gRPC stub method was called. assert mock_object.call_count == 1 - await client.list_resource_allowances(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_resource_allowance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -5565,8 +5538,8 @@ async def test_list_resource_allowances_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_list_resource_allowances_async( - transport: str = "grpc_asyncio", request_type=batch.ListResourceAllowancesRequest +async def test_delete_resource_allowance_async( + transport: str = "grpc_asyncio", request_type=batch.DeleteResourceAllowanceRequest ): client = BatchServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5579,51 +5552,46 @@ async def test_list_resource_allowances_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_resource_allowances), "__call__" + type(client.transport.delete_resource_allowance), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - batch.ListResourceAllowancesResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) + operations_pb2.Operation(name="operations/spam") ) - response = await client.list_resource_allowances(request) + response = await client.delete_resource_allowance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = batch.ListResourceAllowancesRequest() + request = batch.DeleteResourceAllowanceRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListResourceAllowancesAsyncPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_list_resource_allowances_async_from_dict(): - await test_list_resource_allowances_async(request_type=dict) +async def test_delete_resource_allowance_async_from_dict(): + await test_delete_resource_allowance_async(request_type=dict) -def test_list_resource_allowances_field_headers(): +def test_delete_resource_allowance_field_headers(): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = batch.ListResourceAllowancesRequest() + request = batch.DeleteResourceAllowanceRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_resource_allowances), "__call__" + type(client.transport.delete_resource_allowance), "__call__" ) as call: - call.return_value = batch.ListResourceAllowancesResponse() - client.list_resource_allowances(request) + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_resource_allowance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -5634,30 +5602,30 @@ def test_list_resource_allowances_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_list_resource_allowances_field_headers_async(): +async def test_delete_resource_allowance_field_headers_async(): client = BatchServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = batch.ListResourceAllowancesRequest() + request = batch.DeleteResourceAllowanceRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_resource_allowances), "__call__" + type(client.transport.delete_resource_allowance), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - batch.ListResourceAllowancesResponse() + operations_pb2.Operation(name="operations/op") ) - await client.list_resource_allowances(request) + await client.delete_resource_allowance(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -5668,37 +5636,37 @@ async def test_list_resource_allowances_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] -def test_list_resource_allowances_flattened(): +def test_delete_resource_allowance_flattened(): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_resource_allowances), "__call__" + type(client.transport.delete_resource_allowance), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = batch.ListResourceAllowancesResponse() + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_resource_allowances( - parent="parent_value", + client.delete_resource_allowance( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_list_resource_allowances_flattened_error(): +def test_delete_resource_allowance_flattened_error(): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -5706,45 +5674,45 @@ def test_list_resource_allowances_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_resource_allowances( - batch.ListResourceAllowancesRequest(), - parent="parent_value", + client.delete_resource_allowance( + batch.DeleteResourceAllowanceRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_list_resource_allowances_flattened_async(): +async def test_delete_resource_allowance_flattened_async(): client = BatchServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_resource_allowances), "__call__" + type(client.transport.delete_resource_allowance), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = batch.ListResourceAllowancesResponse() + call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - batch.ListResourceAllowancesResponse() + operations_pb2.Operation(name="operations/spam") ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_resource_allowances( - parent="parent_value", + response = await client.delete_resource_allowance( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_list_resource_allowances_flattened_error_async(): +async def test_delete_resource_allowance_flattened_error_async(): client = BatchServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -5752,259 +5720,53 @@ async def test_list_resource_allowances_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_resource_allowances( - batch.ListResourceAllowancesRequest(), - parent="parent_value", + await client.delete_resource_allowance( + batch.DeleteResourceAllowanceRequest(), + name="name_value", ) -def test_list_resource_allowances_pager(transport_name: str = "grpc"): +@pytest.mark.parametrize( + "request_type", + [ + batch.ListResourceAllowancesRequest, + dict, + ], +) +def test_list_resource_allowances(request_type, transport: str = "grpc"): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, + transport=transport, ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_resource_allowances), "__call__" - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - batch.ListResourceAllowancesResponse( - resource_allowances=[ - resource_allowance.ResourceAllowance(), - resource_allowance.ResourceAllowance(), - resource_allowance.ResourceAllowance(), - ], - next_page_token="abc", - ), - batch.ListResourceAllowancesResponse( - resource_allowances=[], - next_page_token="def", - ), - batch.ListResourceAllowancesResponse( - resource_allowances=[ - resource_allowance.ResourceAllowance(), - ], - next_page_token="ghi", - ), - batch.ListResourceAllowancesResponse( - resource_allowances=[ - resource_allowance.ResourceAllowance(), - resource_allowance.ResourceAllowance(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), - ) - pager = client.list_resource_allowances( - request={}, retry=retry, timeout=timeout - ) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, resource_allowance.ResourceAllowance) for i in results) - - -def test_list_resource_allowances_pages(transport_name: str = "grpc"): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_resource_allowances), "__call__" - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - batch.ListResourceAllowancesResponse( - resource_allowances=[ - resource_allowance.ResourceAllowance(), - resource_allowance.ResourceAllowance(), - resource_allowance.ResourceAllowance(), - ], - next_page_token="abc", - ), - batch.ListResourceAllowancesResponse( - resource_allowances=[], - next_page_token="def", - ), - batch.ListResourceAllowancesResponse( - resource_allowances=[ - resource_allowance.ResourceAllowance(), - ], - next_page_token="ghi", - ), - batch.ListResourceAllowancesResponse( - resource_allowances=[ - resource_allowance.ResourceAllowance(), - resource_allowance.ResourceAllowance(), - ], - ), - RuntimeError, - ) - pages = list(client.list_resource_allowances(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.asyncio -async def test_list_resource_allowances_async_pager(): - client = BatchServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_resource_allowances), - "__call__", - new_callable=mock.AsyncMock, - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - batch.ListResourceAllowancesResponse( - resource_allowances=[ - resource_allowance.ResourceAllowance(), - resource_allowance.ResourceAllowance(), - resource_allowance.ResourceAllowance(), - ], - next_page_token="abc", - ), - batch.ListResourceAllowancesResponse( - resource_allowances=[], - next_page_token="def", - ), - batch.ListResourceAllowancesResponse( - resource_allowances=[ - resource_allowance.ResourceAllowance(), - ], - next_page_token="ghi", - ), - batch.ListResourceAllowancesResponse( - resource_allowances=[ - resource_allowance.ResourceAllowance(), - resource_allowance.ResourceAllowance(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_resource_allowances( - request={}, - ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all( - isinstance(i, resource_allowance.ResourceAllowance) for i in responses - ) - - -@pytest.mark.asyncio -async def test_list_resource_allowances_async_pages(): - client = BatchServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_resource_allowances), - "__call__", - new_callable=mock.AsyncMock, - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - batch.ListResourceAllowancesResponse( - resource_allowances=[ - resource_allowance.ResourceAllowance(), - resource_allowance.ResourceAllowance(), - resource_allowance.ResourceAllowance(), - ], - next_page_token="abc", - ), - batch.ListResourceAllowancesResponse( - resource_allowances=[], - next_page_token="def", - ), - batch.ListResourceAllowancesResponse( - resource_allowances=[ - resource_allowance.ResourceAllowance(), - ], - next_page_token="ghi", - ), - batch.ListResourceAllowancesResponse( - resource_allowances=[ - resource_allowance.ResourceAllowance(), - resource_allowance.ResourceAllowance(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_resource_allowances(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize( - "request_type", - [ - batch.UpdateResourceAllowanceRequest, - dict, - ], -) -def test_update_resource_allowance(request_type, transport: str = "grpc"): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_resource_allowance), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = gcb_resource_allowance.ResourceAllowance( - name="name_value", - uid="uid_value", + call.return_value = batch.ListResourceAllowancesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) - response = client.update_resource_allowance(request) + response = client.list_resource_allowances(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = batch.UpdateResourceAllowanceRequest() + request = batch.ListResourceAllowancesRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, gcb_resource_allowance.ResourceAllowance) - assert response.name == "name_value" - assert response.uid == "uid_value" + assert isinstance(response, pagers.ListResourceAllowancesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] -def test_update_resource_allowance_empty_call(): +def test_list_resource_allowances_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = BatchServiceClient( @@ -6014,18 +5776,18 @@ def test_update_resource_allowance_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_resource_allowance), "__call__" + type(client.transport.list_resource_allowances), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.update_resource_allowance() + client.list_resource_allowances() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == batch.UpdateResourceAllowanceRequest() + assert args[0] == batch.ListResourceAllowancesRequest() -def test_update_resource_allowance_non_empty_request_with_auto_populated_field(): +def test_list_resource_allowances_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = BatchServiceClient( @@ -6036,22 +5798,28 @@ def test_update_resource_allowance_non_empty_request_with_auto_populated_field() # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = batch.UpdateResourceAllowanceRequest() + request = batch.ListResourceAllowancesRequest( + parent="parent_value", + page_token="page_token_value", + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_resource_allowance), "__call__" + type(client.transport.list_resource_allowances), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.update_resource_allowance(request=request) + client.list_resource_allowances(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == batch.UpdateResourceAllowanceRequest() + assert args[0] == batch.ListResourceAllowancesRequest( + parent="parent_value", + page_token="page_token_value", + ) -def test_update_resource_allowance_use_cached_wrapped_rpc(): +def test_list_resource_allowances_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -6066,7 +5834,7 @@ def test_update_resource_allowance_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.update_resource_allowance + client._transport.list_resource_allowances in client._transport._wrapped_methods ) @@ -6076,15 +5844,15 @@ def test_update_resource_allowance_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.update_resource_allowance + client._transport.list_resource_allowances ] = mock_rpc request = {} - client.update_resource_allowance(request) + client.list_resource_allowances(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.update_resource_allowance(request) + client.list_resource_allowances(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -6092,7 +5860,7 @@ def test_update_resource_allowance_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_update_resource_allowance_empty_call_async(): +async def test_list_resource_allowances_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = BatchServiceAsyncClient( @@ -6102,23 +5870,23 @@ async def test_update_resource_allowance_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_resource_allowance), "__call__" + type(client.transport.list_resource_allowances), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - gcb_resource_allowance.ResourceAllowance( - name="name_value", - uid="uid_value", + batch.ListResourceAllowancesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) ) - response = await client.update_resource_allowance() + response = await client.list_resource_allowances() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == batch.UpdateResourceAllowanceRequest() + assert args[0] == batch.ListResourceAllowancesRequest() @pytest.mark.asyncio -async def test_update_resource_allowance_async_use_cached_wrapped_rpc( +async def test_list_resource_allowances_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -6135,23 +5903,23 @@ async def test_update_resource_allowance_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.update_resource_allowance + client._client._transport.list_resource_allowances in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[ - client._client._transport.update_resource_allowance + client._client._transport.list_resource_allowances ] = mock_object request = {} - await client.update_resource_allowance(request) + await client.list_resource_allowances(request) # Establish that the underlying gRPC stub method was called. assert mock_object.call_count == 1 - await client.update_resource_allowance(request) + await client.list_resource_allowances(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -6159,8 +5927,8 @@ async def test_update_resource_allowance_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_update_resource_allowance_async( - transport: str = "grpc_asyncio", request_type=batch.UpdateResourceAllowanceRequest +async def test_list_resource_allowances_async( + transport: str = "grpc_asyncio", request_type=batch.ListResourceAllowancesRequest ): client = BatchServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6173,51 +5941,51 @@ async def test_update_resource_allowance_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_resource_allowance), "__call__" + type(client.transport.list_resource_allowances), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - gcb_resource_allowance.ResourceAllowance( - name="name_value", - uid="uid_value", + batch.ListResourceAllowancesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) ) - response = await client.update_resource_allowance(request) + response = await client.list_resource_allowances(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = batch.UpdateResourceAllowanceRequest() + request = batch.ListResourceAllowancesRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, gcb_resource_allowance.ResourceAllowance) - assert response.name == "name_value" - assert response.uid == "uid_value" + assert isinstance(response, pagers.ListResourceAllowancesAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] @pytest.mark.asyncio -async def test_update_resource_allowance_async_from_dict(): - await test_update_resource_allowance_async(request_type=dict) +async def test_list_resource_allowances_async_from_dict(): + await test_list_resource_allowances_async(request_type=dict) -def test_update_resource_allowance_field_headers(): +def test_list_resource_allowances_field_headers(): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = batch.UpdateResourceAllowanceRequest() + request = batch.ListResourceAllowancesRequest() - request.resource_allowance.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_resource_allowance), "__call__" + type(client.transport.list_resource_allowances), "__call__" ) as call: - call.return_value = gcb_resource_allowance.ResourceAllowance() - client.update_resource_allowance(request) + call.return_value = batch.ListResourceAllowancesResponse() + client.list_resource_allowances(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -6228,30 +5996,30 @@ def test_update_resource_allowance_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "resource_allowance.name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_update_resource_allowance_field_headers_async(): +async def test_list_resource_allowances_field_headers_async(): client = BatchServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = batch.UpdateResourceAllowanceRequest() + request = batch.ListResourceAllowancesRequest() - request.resource_allowance.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_resource_allowance), "__call__" + type(client.transport.list_resource_allowances), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - gcb_resource_allowance.ResourceAllowance() + batch.ListResourceAllowancesResponse() ) - await client.update_resource_allowance(request) + await client.list_resource_allowances(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -6262,53 +6030,37 @@ async def test_update_resource_allowance_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "resource_allowance.name=name_value", + "parent=parent_value", ) in kw["metadata"] -def test_update_resource_allowance_flattened(): +def test_list_resource_allowances_flattened(): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_resource_allowance), "__call__" + type(client.transport.list_resource_allowances), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = gcb_resource_allowance.ResourceAllowance() + call.return_value = batch.ListResourceAllowancesResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.update_resource_allowance( - resource_allowance=gcb_resource_allowance.ResourceAllowance( - usage_resource_allowance=gcb_resource_allowance.UsageResourceAllowance( - spec=gcb_resource_allowance.UsageResourceAllowanceSpec( - type_="type__value" - ) - ) - ), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.list_resource_allowances( + parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].resource_allowance - mock_val = gcb_resource_allowance.ResourceAllowance( - usage_resource_allowance=gcb_resource_allowance.UsageResourceAllowance( - spec=gcb_resource_allowance.UsageResourceAllowanceSpec( - type_="type__value" - ) - ) - ) - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val -def test_update_resource_allowance_flattened_error(): +def test_list_resource_allowances_flattened_error(): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -6316,68 +6068,45 @@ def test_update_resource_allowance_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_resource_allowance( - batch.UpdateResourceAllowanceRequest(), - resource_allowance=gcb_resource_allowance.ResourceAllowance( - usage_resource_allowance=gcb_resource_allowance.UsageResourceAllowance( - spec=gcb_resource_allowance.UsageResourceAllowanceSpec( - type_="type__value" - ) - ) - ), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.list_resource_allowances( + batch.ListResourceAllowancesRequest(), + parent="parent_value", ) @pytest.mark.asyncio -async def test_update_resource_allowance_flattened_async(): +async def test_list_resource_allowances_flattened_async(): client = BatchServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_resource_allowance), "__call__" + type(client.transport.list_resource_allowances), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = gcb_resource_allowance.ResourceAllowance() + call.return_value = batch.ListResourceAllowancesResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - gcb_resource_allowance.ResourceAllowance() + batch.ListResourceAllowancesResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.update_resource_allowance( - resource_allowance=gcb_resource_allowance.ResourceAllowance( - usage_resource_allowance=gcb_resource_allowance.UsageResourceAllowance( - spec=gcb_resource_allowance.UsageResourceAllowanceSpec( - type_="type__value" - ) - ) - ), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + response = await client.list_resource_allowances( + parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].resource_allowance - mock_val = gcb_resource_allowance.ResourceAllowance( - usage_resource_allowance=gcb_resource_allowance.UsageResourceAllowance( - spec=gcb_resource_allowance.UsageResourceAllowanceSpec( - type_="type__value" - ) - ) - ) - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val @pytest.mark.asyncio -async def test_update_resource_allowance_flattened_error_async(): +async def test_list_resource_allowances_flattened_error_async(): client = BatchServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -6385,64 +6114,697 @@ async def test_update_resource_allowance_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.update_resource_allowance( - batch.UpdateResourceAllowanceRequest(), - resource_allowance=gcb_resource_allowance.ResourceAllowance( - usage_resource_allowance=gcb_resource_allowance.UsageResourceAllowance( - spec=gcb_resource_allowance.UsageResourceAllowanceSpec( - type_="type__value" - ) - ) + await client.list_resource_allowances( + batch.ListResourceAllowancesRequest(), + parent="parent_value", + ) + + +def test_list_resource_allowances_pager(transport_name: str = "grpc"): + client = BatchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_resource_allowances), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + batch.ListResourceAllowancesResponse( + resource_allowances=[ + resource_allowance.ResourceAllowance(), + resource_allowance.ResourceAllowance(), + resource_allowance.ResourceAllowance(), + ], + next_page_token="abc", ), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + batch.ListResourceAllowancesResponse( + resource_allowances=[], + next_page_token="def", + ), + batch.ListResourceAllowancesResponse( + resource_allowances=[ + resource_allowance.ResourceAllowance(), + ], + next_page_token="ghi", + ), + batch.ListResourceAllowancesResponse( + resource_allowances=[ + resource_allowance.ResourceAllowance(), + resource_allowance.ResourceAllowance(), + ], + ), + RuntimeError, ) + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_resource_allowances( + request={}, retry=retry, timeout=timeout + ) -@pytest.mark.parametrize( - "request_type", - [ - batch.CreateJobRequest, - dict, - ], -) -def test_create_job_rest(request_type): + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resource_allowance.ResourceAllowance) for i in results) + + +def test_list_resource_allowances_pages(transport_name: str = "grpc"): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport_name, ) - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request_init["job"] = { - "name": "name_value", - "uid": "uid_value", - "priority": 898, - "task_groups": [ - { - "name": "name_value", - "task_spec": { - "runnables": [ - { - "container": { - "image_uri": "image_uri_value", - "commands": ["commands_value1", "commands_value2"], - "entrypoint": "entrypoint_value", - "volumes": ["volumes_value1", "volumes_value2"], - "options": "options_value", - "block_external_network": True, - "username": "username_value", - "password": "password_value", - "enable_image_streaming": True, - }, - "script": {"path": "path_value", "text": "text_value"}, - "barrier": {"name": "name_value"}, - "display_name": "display_name_value", - "ignore_exit_status": True, - "background": True, - "always_run": True, - "environment": { - "variables": {}, - "secret_variables": {}, + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_resource_allowances), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + batch.ListResourceAllowancesResponse( + resource_allowances=[ + resource_allowance.ResourceAllowance(), + resource_allowance.ResourceAllowance(), + resource_allowance.ResourceAllowance(), + ], + next_page_token="abc", + ), + batch.ListResourceAllowancesResponse( + resource_allowances=[], + next_page_token="def", + ), + batch.ListResourceAllowancesResponse( + resource_allowances=[ + resource_allowance.ResourceAllowance(), + ], + next_page_token="ghi", + ), + batch.ListResourceAllowancesResponse( + resource_allowances=[ + resource_allowance.ResourceAllowance(), + resource_allowance.ResourceAllowance(), + ], + ), + RuntimeError, + ) + pages = list(client.list_resource_allowances(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_resource_allowances_async_pager(): + client = BatchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_resource_allowances), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + batch.ListResourceAllowancesResponse( + resource_allowances=[ + resource_allowance.ResourceAllowance(), + resource_allowance.ResourceAllowance(), + resource_allowance.ResourceAllowance(), + ], + next_page_token="abc", + ), + batch.ListResourceAllowancesResponse( + resource_allowances=[], + next_page_token="def", + ), + batch.ListResourceAllowancesResponse( + resource_allowances=[ + resource_allowance.ResourceAllowance(), + ], + next_page_token="ghi", + ), + batch.ListResourceAllowancesResponse( + resource_allowances=[ + resource_allowance.ResourceAllowance(), + resource_allowance.ResourceAllowance(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_resource_allowances( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all( + isinstance(i, resource_allowance.ResourceAllowance) for i in responses + ) + + +@pytest.mark.asyncio +async def test_list_resource_allowances_async_pages(): + client = BatchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_resource_allowances), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + batch.ListResourceAllowancesResponse( + resource_allowances=[ + resource_allowance.ResourceAllowance(), + resource_allowance.ResourceAllowance(), + resource_allowance.ResourceAllowance(), + ], + next_page_token="abc", + ), + batch.ListResourceAllowancesResponse( + resource_allowances=[], + next_page_token="def", + ), + batch.ListResourceAllowancesResponse( + resource_allowances=[ + resource_allowance.ResourceAllowance(), + ], + next_page_token="ghi", + ), + batch.ListResourceAllowancesResponse( + resource_allowances=[ + resource_allowance.ResourceAllowance(), + resource_allowance.ResourceAllowance(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_resource_allowances(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + batch.UpdateResourceAllowanceRequest, + dict, + ], +) +def test_update_resource_allowance(request_type, transport: str = "grpc"): + client = BatchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_resource_allowance), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gcb_resource_allowance.ResourceAllowance( + name="name_value", + uid="uid_value", + ) + response = client.update_resource_allowance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = batch.UpdateResourceAllowanceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gcb_resource_allowance.ResourceAllowance) + assert response.name == "name_value" + assert response.uid == "uid_value" + + +def test_update_resource_allowance_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BatchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_resource_allowance), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_resource_allowance() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == batch.UpdateResourceAllowanceRequest() + + +def test_update_resource_allowance_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BatchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = batch.UpdateResourceAllowanceRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_resource_allowance), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_resource_allowance(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == batch.UpdateResourceAllowanceRequest() + + +def test_update_resource_allowance_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BatchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_resource_allowance + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_resource_allowance + ] = mock_rpc + request = {} + client.update_resource_allowance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_resource_allowance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_resource_allowance_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BatchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_resource_allowance), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcb_resource_allowance.ResourceAllowance( + name="name_value", + uid="uid_value", + ) + ) + response = await client.update_resource_allowance() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == batch.UpdateResourceAllowanceRequest() + + +@pytest.mark.asyncio +async def test_update_resource_allowance_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BatchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_resource_allowance + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.update_resource_allowance + ] = mock_object + + request = {} + await client.update_resource_allowance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.update_resource_allowance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_resource_allowance_async( + transport: str = "grpc_asyncio", request_type=batch.UpdateResourceAllowanceRequest +): + client = BatchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_resource_allowance), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcb_resource_allowance.ResourceAllowance( + name="name_value", + uid="uid_value", + ) + ) + response = await client.update_resource_allowance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = batch.UpdateResourceAllowanceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gcb_resource_allowance.ResourceAllowance) + assert response.name == "name_value" + assert response.uid == "uid_value" + + +@pytest.mark.asyncio +async def test_update_resource_allowance_async_from_dict(): + await test_update_resource_allowance_async(request_type=dict) + + +def test_update_resource_allowance_field_headers(): + client = BatchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = batch.UpdateResourceAllowanceRequest() + + request.resource_allowance.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_resource_allowance), "__call__" + ) as call: + call.return_value = gcb_resource_allowance.ResourceAllowance() + client.update_resource_allowance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource_allowance.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_resource_allowance_field_headers_async(): + client = BatchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = batch.UpdateResourceAllowanceRequest() + + request.resource_allowance.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_resource_allowance), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcb_resource_allowance.ResourceAllowance() + ) + await client.update_resource_allowance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource_allowance.name=name_value", + ) in kw["metadata"] + + +def test_update_resource_allowance_flattened(): + client = BatchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_resource_allowance), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gcb_resource_allowance.ResourceAllowance() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_resource_allowance( + resource_allowance=gcb_resource_allowance.ResourceAllowance( + usage_resource_allowance=gcb_resource_allowance.UsageResourceAllowance( + spec=gcb_resource_allowance.UsageResourceAllowanceSpec( + type_="type__value" + ) + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].resource_allowance + mock_val = gcb_resource_allowance.ResourceAllowance( + usage_resource_allowance=gcb_resource_allowance.UsageResourceAllowance( + spec=gcb_resource_allowance.UsageResourceAllowanceSpec( + type_="type__value" + ) + ) + ) + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_resource_allowance_flattened_error(): + client = BatchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_resource_allowance( + batch.UpdateResourceAllowanceRequest(), + resource_allowance=gcb_resource_allowance.ResourceAllowance( + usage_resource_allowance=gcb_resource_allowance.UsageResourceAllowance( + spec=gcb_resource_allowance.UsageResourceAllowanceSpec( + type_="type__value" + ) + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_resource_allowance_flattened_async(): + client = BatchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_resource_allowance), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gcb_resource_allowance.ResourceAllowance() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcb_resource_allowance.ResourceAllowance() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_resource_allowance( + resource_allowance=gcb_resource_allowance.ResourceAllowance( + usage_resource_allowance=gcb_resource_allowance.UsageResourceAllowance( + spec=gcb_resource_allowance.UsageResourceAllowanceSpec( + type_="type__value" + ) + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].resource_allowance + mock_val = gcb_resource_allowance.ResourceAllowance( + usage_resource_allowance=gcb_resource_allowance.UsageResourceAllowance( + spec=gcb_resource_allowance.UsageResourceAllowanceSpec( + type_="type__value" + ) + ) + ) + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_resource_allowance_flattened_error_async(): + client = BatchServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_resource_allowance( + batch.UpdateResourceAllowanceRequest(), + resource_allowance=gcb_resource_allowance.ResourceAllowance( + usage_resource_allowance=gcb_resource_allowance.UsageResourceAllowance( + spec=gcb_resource_allowance.UsageResourceAllowanceSpec( + type_="type__value" + ) + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + batch.CreateJobRequest, + dict, + ], +) +def test_create_job_rest(request_type): + client = BatchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["job"] = { + "name": "name_value", + "uid": "uid_value", + "priority": 898, + "task_groups": [ + { + "name": "name_value", + "task_spec": { + "runnables": [ + { + "container": { + "image_uri": "image_uri_value", + "commands": ["commands_value1", "commands_value2"], + "entrypoint": "entrypoint_value", + "volumes": ["volumes_value1", "volumes_value2"], + "options": "options_value", + "block_external_network": True, + "username": "username_value", + "password": "password_value", + "enable_image_streaming": True, + }, + "script": {"path": "path_value", "text": "text_value"}, + "barrier": {"name": "name_value"}, + "display_name": "display_name_value", + "ignore_exit_status": True, + "background": True, + "always_run": True, + "environment": { + "variables": {}, + "secret_variables": {}, "encrypted_variables": { "key_name": "key_name_value", "cipher_text": "cipher_text_value", @@ -6538,6 +6900,7 @@ def test_create_job_rest(request_type): "instance_template": "instance_template_value", "install_gpu_drivers": True, "install_ops_agent": True, + "block_project_ssh_keys": True, } ], "instance_templates": [ @@ -6574,143 +6937,470 @@ def test_create_job_rest(request_type): "run_as_non_root": True, "service_account": {}, } - ], - "scheduling_policy": 1, - "dependencies": [{"items": {}}], - "allocation_policy": {}, - "labels": {}, - "status": { - "state": 1, - "status_events": [ - { - "type_": "type__value", - "description": "description_value", - "event_time": {"seconds": 751, "nanos": 543}, - "task_execution": { - "exit_code": 948, - "stderr_snippet": "stderr_snippet_value", - }, - "task_state": 1, - } + ], + "scheduling_policy": 1, + "dependencies": [{"items": {}}], + "allocation_policy": {}, + "labels": {}, + "status": { + "state": 1, + "status_events": [ + { + "type_": "type__value", + "description": "description_value", + "event_time": {"seconds": 751, "nanos": 543}, + "task_execution": { + "exit_code": 948, + "stderr_snippet": "stderr_snippet_value", + }, + "task_state": 1, + } + ], + "task_groups": {}, + "run_duration": {}, + "resource_usage": {"core_hours": 0.1081}, + }, + "notification": { + "pubsub_topic": "pubsub_topic_value", + "message": {"type_": 1, "new_job_state": 1, "new_task_state": 1}, + }, + "create_time": {}, + "update_time": {}, + "logs_policy": { + "destination": 1, + "logs_path": "logs_path_value", + "cloud_logging_option": {"use_generic_task_monitored_resource": True}, + }, + "notifications": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = batch.CreateJobRequest.meta.fields["job"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["job"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["job"][field])): + del request_init["job"][field][i][subfield] + else: + del request_init["job"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gcb_job.Job( + name="name_value", + uid="uid_value", + priority=898, + scheduling_policy=gcb_job.Job.SchedulingPolicy.AS_SOON_AS_POSSIBLE, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = gcb_job.Job.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_job(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, gcb_job.Job) + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.priority == 898 + assert ( + response.scheduling_policy == gcb_job.Job.SchedulingPolicy.AS_SOON_AS_POSSIBLE + ) + + +def test_create_job_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BatchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_job in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_job] = mock_rpc + + request = {} + client.create_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_job_rest_required_fields(request_type=batch.CreateJobRequest): + transport_class = transports.BatchServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_job._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "job_id", + "request_id", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = BatchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = gcb_job.Job() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = gcb_job.Job.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_job(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_job_rest_unset_required_fields(): + transport = transports.BatchServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_job._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "jobId", + "requestId", + ) + ) + & set( + ( + "parent", + "job", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_job_rest_interceptors(null_interceptor): + transport = transports.BatchServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.BatchServiceRestInterceptor(), + ) + client = BatchServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BatchServiceRestInterceptor, "post_create_job" + ) as post, mock.patch.object( + transports.BatchServiceRestInterceptor, "pre_create_job" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = batch.CreateJobRequest.pb(batch.CreateJobRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = gcb_job.Job.to_json(gcb_job.Job()) + + request = batch.CreateJobRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = gcb_job.Job() + + client.create_job( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), ], - "task_groups": {}, - "run_duration": {}, - "resource_usage": {"core_hours": 0.1081}, - }, - "notification": { - "pubsub_topic": "pubsub_topic_value", - "message": {"type_": 1, "new_job_state": 1, "new_task_state": 1}, - }, - "create_time": {}, - "update_time": {}, - "logs_policy": { - "destination": 1, - "logs_path": "logs_path_value", - "cloud_logging_option": {"use_generic_task_monitored_resource": True}, - }, - "notifications": {}, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_job_rest_bad_request( + transport: str = "rest", request_type=batch.CreateJobRequest +): + client = BatchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_job(request) - # Determine if the message type is proto-plus or protobuf - test_field = batch.CreateJobRequest.meta.fields["job"] - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] +def test_create_job_rest_flattened(): + client = BatchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gcb_job.Job() - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + job=gcb_job.Job(name="name_value"), + job_id="job_id_value", + ) + mock_args.update(sample_request) - subfields_not_in_runtime = [] + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = gcb_job.Job.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["job"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value + client.create_job(**mock_args) - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{parent=projects/*/locations/*}/jobs" % client.transport._host, + args[1], + ) - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["job"][field])): - del request_init["job"][field][i][subfield] - else: - del request_init["job"][field][subfield] + +def test_create_job_rest_flattened_error(transport: str = "rest"): + client = BatchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_job( + batch.CreateJobRequest(), + parent="parent_value", + job=gcb_job.Job(name="name_value"), + job_id="job_id_value", + ) + + +def test_create_job_rest_error(): + client = BatchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + batch.GetJobRequest, + dict, + ], +) +def test_get_job_rest(request_type): + client = BatchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/jobs/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = gcb_job.Job( + return_value = job.Job( name="name_value", uid="uid_value", priority=898, - scheduling_policy=gcb_job.Job.SchedulingPolicy.AS_SOON_AS_POSSIBLE, + scheduling_policy=job.Job.SchedulingPolicy.AS_SOON_AS_POSSIBLE, ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = gcb_job.Job.pb(return_value) + return_value = job.Job.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_job(request) + response = client.get_job(request) # Establish that the response is the type that we expect. - assert isinstance(response, gcb_job.Job) + assert isinstance(response, job.Job) assert response.name == "name_value" assert response.uid == "uid_value" assert response.priority == 898 - assert ( - response.scheduling_policy == gcb_job.Job.SchedulingPolicy.AS_SOON_AS_POSSIBLE - ) + assert response.scheduling_policy == job.Job.SchedulingPolicy.AS_SOON_AS_POSSIBLE -def test_create_job_rest_use_cached_wrapped_rpc(): +def test_get_job_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -6724,33 +7414,33 @@ def test_create_job_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_job in client._transport._wrapped_methods + assert client._transport.get_job in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.create_job] = mock_rpc + client._transport._wrapped_methods[client._transport.get_job] = mock_rpc request = {} - client.create_job(request) + client.get_job(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.create_job(request) + client.get_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_job_rest_required_fields(request_type=batch.CreateJobRequest): +def test_get_job_rest_required_fields(request_type=batch.GetJobRequest): transport_class = transports.BatchServiceRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -6761,28 +7451,21 @@ def test_create_job_rest_required_fields(request_type=batch.CreateJobRequest): unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_job._get_unset_required_fields(jsonified_request) + ).get_job._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_job._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "job_id", - "request_id", - ) - ) + ).get_job._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6791,7 +7474,7 @@ def test_create_job_rest_required_fields(request_type=batch.CreateJobRequest): request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = gcb_job.Job() + return_value = job.Job() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -6803,53 +7486,39 @@ def test_create_job_rest_required_fields(request_type=batch.CreateJobRequest): pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = gcb_job.Job.pb(return_value) + return_value = job.Job.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_job(request) + response = client.get_job(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_job_rest_unset_required_fields(): +def test_get_job_rest_unset_required_fields(): transport = transports.BatchServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_job._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "jobId", - "requestId", - ) - ) - & set( - ( - "parent", - "job", - ) - ) - ) + unset_fields = transport.get_job._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_job_rest_interceptors(null_interceptor): +def test_get_job_rest_interceptors(null_interceptor): transport = transports.BatchServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -6862,13 +7531,13 @@ def test_create_job_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.BatchServiceRestInterceptor, "post_create_job" + transports.BatchServiceRestInterceptor, "post_get_job" ) as post, mock.patch.object( - transports.BatchServiceRestInterceptor, "pre_create_job" + transports.BatchServiceRestInterceptor, "pre_get_job" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = batch.CreateJobRequest.pb(batch.CreateJobRequest()) + pb_message = batch.GetJobRequest.pb(batch.GetJobRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -6879,17 +7548,17 @@ def test_create_job_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = gcb_job.Job.to_json(gcb_job.Job()) + req.return_value._content = job.Job.to_json(job.Job()) - request = batch.CreateJobRequest() + request = batch.GetJobRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = gcb_job.Job() + post.return_value = job.Job() - client.create_job( + client.get_job( request, metadata=[ ("key", "val"), @@ -6901,8 +7570,8 @@ def test_create_job_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_job_rest_bad_request( - transport: str = "rest", request_type=batch.CreateJobRequest +def test_get_job_rest_bad_request( + transport: str = "rest", request_type=batch.GetJobRequest ): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6910,7 +7579,7 @@ def test_create_job_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = {"name": "projects/sample1/locations/sample2/jobs/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -6922,10 +7591,10 @@ def test_create_job_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_job(request) + client.get_job(request) -def test_create_job_rest_flattened(): +def test_get_job_rest_flattened(): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -6934,16 +7603,14 @@ def test_create_job_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = gcb_job.Job() + return_value = job.Job() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = {"name": "projects/sample1/locations/sample2/jobs/sample3"} # get truthy value for each flattened field mock_args = dict( - parent="parent_value", - job=gcb_job.Job(name="name_value"), - job_id="job_id_value", + name="name_value", ) mock_args.update(sample_request) @@ -6951,24 +7618,24 @@ def test_create_job_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = gcb_job.Job.pb(return_value) + return_value = job.Job.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_job(**mock_args) + client.get_job(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{parent=projects/*/locations/*}/jobs" % client.transport._host, + "%s/v1alpha/{name=projects/*/locations/*/jobs/*}" % client.transport._host, args[1], ) -def test_create_job_rest_flattened_error(transport: str = "rest"): +def test_get_job_rest_flattened_error(transport: str = "rest"): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -6977,15 +7644,13 @@ def test_create_job_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_job( - batch.CreateJobRequest(), - parent="parent_value", - job=gcb_job.Job(name="name_value"), - job_id="job_id_value", + client.get_job( + batch.GetJobRequest(), + name="name_value", ) -def test_create_job_rest_error(): +def test_get_job_rest_error(): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -6994,11 +7659,11 @@ def test_create_job_rest_error(): @pytest.mark.parametrize( "request_type", [ - batch.GetJobRequest, + batch.DeleteJobRequest, dict, ], ) -def test_get_job_rest(request_type): +def test_delete_job_rest(request_type): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -7011,33 +7676,22 @@ def test_get_job_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = job.Job( - name="name_value", - uid="uid_value", - priority=898, - scheduling_policy=job.Job.SchedulingPolicy.AS_SOON_AS_POSSIBLE, - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = job.Job.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_job(request) + response = client.delete_job(request) # Establish that the response is the type that we expect. - assert isinstance(response, job.Job) - assert response.name == "name_value" - assert response.uid == "uid_value" - assert response.priority == 898 - assert response.scheduling_policy == job.Job.SchedulingPolicy.AS_SOON_AS_POSSIBLE + assert response.operation.name == "operations/spam" -def test_get_job_rest_use_cached_wrapped_rpc(): +def test_delete_job_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -7051,111 +7705,34 @@ def test_get_job_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_job in client._transport._wrapped_methods + assert client._transport.delete_job in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_job] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_job] = mock_rpc request = {} - client.get_job(request) + client.delete_job(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_job(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_job_rest_required_fields(request_type=batch.GetJobRequest): - transport_class = transports.BatchServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_job._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_job._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = job.Job() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = job.Job.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_job(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_get_job_rest_unset_required_fields(): - transport = transports.BatchServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.get_job._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - - @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_job_rest_interceptors(null_interceptor): +def test_delete_job_rest_interceptors(null_interceptor): transport = transports.BatchServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -7168,13 +7745,15 @@ def test_get_job_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.BatchServiceRestInterceptor, "post_get_job" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.BatchServiceRestInterceptor, "post_delete_job" ) as post, mock.patch.object( - transports.BatchServiceRestInterceptor, "pre_get_job" + transports.BatchServiceRestInterceptor, "pre_delete_job" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = batch.GetJobRequest.pb(batch.GetJobRequest()) + pb_message = batch.DeleteJobRequest.pb(batch.DeleteJobRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -7185,17 +7764,19 @@ def test_get_job_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = job.Job.to_json(job.Job()) + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) - request = batch.GetJobRequest() + request = batch.DeleteJobRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = job.Job() + post.return_value = operations_pb2.Operation() - client.get_job( + client.delete_job( request, metadata=[ ("key", "val"), @@ -7207,8 +7788,8 @@ def test_get_job_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_job_rest_bad_request( - transport: str = "rest", request_type=batch.GetJobRequest +def test_delete_job_rest_bad_request( + transport: str = "rest", request_type=batch.DeleteJobRequest ): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7228,10 +7809,10 @@ def test_get_job_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_job(request) + client.delete_job(request) -def test_get_job_rest_flattened(): +def test_delete_job_rest_flattened(): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -7240,7 +7821,7 @@ def test_get_job_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = job.Job() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method sample_request = {"name": "projects/sample1/locations/sample2/jobs/sample3"} @@ -7254,13 +7835,11 @@ def test_get_job_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = job.Job.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_job(**mock_args) + client.delete_job(**mock_args) # Establish that the underlying call was made with the expected # request object values. @@ -7272,7 +7851,7 @@ def test_get_job_rest_flattened(): ) -def test_get_job_rest_flattened_error(transport: str = "rest"): +def test_delete_job_rest_flattened_error(transport: str = "rest"): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -7281,13 +7860,13 @@ def test_get_job_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_job( - batch.GetJobRequest(), + client.delete_job( + batch.DeleteJobRequest(), name="name_value", ) -def test_get_job_rest_error(): +def test_delete_job_rest_error(): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -7296,11 +7875,11 @@ def test_get_job_rest_error(): @pytest.mark.parametrize( "request_type", [ - batch.DeleteJobRequest, + batch.CancelJobRequest, dict, ], ) -def test_delete_job_rest(request_type): +def test_cancel_job_rest(request_type): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -7322,13 +7901,13 @@ def test_delete_job_rest(request_type): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_job(request) + response = client.cancel_job(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_delete_job_rest_use_cached_wrapped_rpc(): +def test_cancel_job_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -7342,17 +7921,17 @@ def test_delete_job_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.delete_job in client._transport._wrapped_methods + assert client._transport.cancel_job in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.delete_job] = mock_rpc + client._transport._wrapped_methods[client._transport.cancel_job] = mock_rpc request = {} - client.delete_job(request) + client.cancel_job(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -7361,15 +7940,94 @@ def test_delete_job_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.delete_job(request) + client.cancel_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 +def test_cancel_job_rest_required_fields(request_type=batch.CancelJobRequest): + transport_class = transports.BatchServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).cancel_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).cancel_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = BatchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.cancel_job(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_cancel_job_rest_unset_required_fields(): + transport = transports.BatchServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.cancel_job._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_job_rest_interceptors(null_interceptor): +def test_cancel_job_rest_interceptors(null_interceptor): transport = transports.BatchServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -7384,13 +8042,13 @@ def test_delete_job_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.BatchServiceRestInterceptor, "post_delete_job" + transports.BatchServiceRestInterceptor, "post_cancel_job" ) as post, mock.patch.object( - transports.BatchServiceRestInterceptor, "pre_delete_job" + transports.BatchServiceRestInterceptor, "pre_cancel_job" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = batch.DeleteJobRequest.pb(batch.DeleteJobRequest()) + pb_message = batch.CancelJobRequest.pb(batch.CancelJobRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -7405,7 +8063,7 @@ def test_delete_job_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = batch.DeleteJobRequest() + request = batch.CancelJobRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -7413,7 +8071,7 @@ def test_delete_job_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.delete_job( + client.cancel_job( request, metadata=[ ("key", "val"), @@ -7425,8 +8083,8 @@ def test_delete_job_rest_interceptors(null_interceptor): post.assert_called_once() -def test_delete_job_rest_bad_request( - transport: str = "rest", request_type=batch.DeleteJobRequest +def test_cancel_job_rest_bad_request( + transport: str = "rest", request_type=batch.CancelJobRequest ): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7446,10 +8104,10 @@ def test_delete_job_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_job(request) + client.cancel_job(request) -def test_delete_job_rest_flattened(): +def test_cancel_job_rest_flattened(): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -7476,19 +8134,20 @@ def test_delete_job_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.delete_job(**mock_args) + client.cancel_job(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{name=projects/*/locations/*/jobs/*}" % client.transport._host, + "%s/v1alpha/{name=projects/*/locations/*/jobs/*}:cancel" + % client.transport._host, args[1], ) -def test_delete_job_rest_flattened_error(transport: str = "rest"): +def test_cancel_job_rest_flattened_error(transport: str = "rest"): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -7497,13 +8156,13 @@ def test_delete_job_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_job( - batch.DeleteJobRequest(), + client.cancel_job( + batch.CancelJobRequest(), name="name_value", ) -def test_delete_job_rest_error(): +def test_cancel_job_rest_error(): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -7649,6 +8308,7 @@ def test_update_job_rest(request_type): "instance_template": "instance_template_value", "install_gpu_drivers": True, "install_ops_agent": True, + "block_project_ssh_keys": True, } ], "instance_templates": [ @@ -11106,6 +11766,7 @@ def test_batch_service_base_transport(): "create_job", "get_job", "delete_job", + "cancel_job", "update_job", "list_jobs", "get_task", @@ -11407,6 +12068,9 @@ def test_batch_service_client_transport_session_collision(transport_name): session1 = client1.transport.delete_job._session session2 = client2.transport.delete_job._session assert session1 != session2 + session1 = client1.transport.cancel_job._session + session2 = client2.transport.cancel_job._session + assert session1 != session2 session1 = client1.transport.update_job._session session2 = client2.transport.update_job._session assert session1 != session2