diff --git a/google/cloud/dataflow_v1beta3/gapic_metadata.json b/google/cloud/dataflow_v1beta3/gapic_metadata.json index 8d17272..ab8a5b6 100644 --- a/google/cloud/dataflow_v1beta3/gapic_metadata.json +++ b/google/cloud/dataflow_v1beta3/gapic_metadata.json @@ -26,6 +26,16 @@ ] } } + }, + "rest": { + "libraryClient": "FlexTemplatesServiceClient", + "rpcs": { + "LaunchFlexTemplate": { + "methods": [ + "launch_flex_template" + ] + } + } } } }, @@ -110,6 +120,46 @@ ] } } + }, + "rest": { + "libraryClient": "JobsV1Beta3Client", + "rpcs": { + "AggregatedListJobs": { + "methods": [ + "aggregated_list_jobs" + ] + }, + "CheckActiveJobs": { + "methods": [ + "check_active_jobs" + ] + }, + "CreateJob": { + "methods": [ + "create_job" + ] + }, + "GetJob": { + "methods": [ + "get_job" + ] + }, + "ListJobs": { + "methods": [ + "list_jobs" + ] + }, + "SnapshotJob": { + "methods": [ + "snapshot_job" + ] + }, + "UpdateJob": { + "methods": [ + "update_job" + ] + } + } } } }, @@ -134,6 +184,16 @@ ] } } + }, + "rest": { + "libraryClient": "MessagesV1Beta3Client", + "rpcs": { + "ListJobMessages": { + "methods": [ + "list_job_messages" + ] + } + } } } }, @@ -178,6 +238,26 @@ ] } } + }, + "rest": { + "libraryClient": "MetricsV1Beta3Client", + "rpcs": { + "GetJobExecutionDetails": { + "methods": [ + "get_job_execution_details" + ] + }, + "GetJobMetrics": { + "methods": [ + "get_job_metrics" + ] + }, + "GetStageExecutionDetails": { + "methods": [ + "get_stage_execution_details" + ] + } + } } } }, @@ -222,6 +302,26 @@ ] } } + }, + "rest": { + "libraryClient": "SnapshotsV1Beta3Client", + "rpcs": { + "DeleteSnapshot": { + "methods": [ + "delete_snapshot" + ] + }, + "GetSnapshot": { + "methods": [ + "get_snapshot" + ] + }, + "ListSnapshots": { + "methods": [ + "list_snapshots" + ] + } + } } } }, @@ -266,6 +366,26 @@ ] } } + }, + "rest": { + "libraryClient": "TemplatesServiceClient", + "rpcs": { + "CreateJobFromTemplate": { + "methods": [ + "create_job_from_template" + ] + }, + "GetTemplate": { + "methods": [ + "get_template" + ] + }, + "LaunchTemplate": { + "methods": [ + "launch_template" + ] + } + } } } } diff --git a/google/cloud/dataflow_v1beta3/services/flex_templates_service/client.py b/google/cloud/dataflow_v1beta3/services/flex_templates_service/client.py index 2873405..a443b47 100644 --- a/google/cloud/dataflow_v1beta3/services/flex_templates_service/client.py +++ b/google/cloud/dataflow_v1beta3/services/flex_templates_service/client.py @@ -39,6 +39,7 @@ from .transports.base import DEFAULT_CLIENT_INFO, FlexTemplatesServiceTransport from .transports.grpc import FlexTemplatesServiceGrpcTransport from .transports.grpc_asyncio import FlexTemplatesServiceGrpcAsyncIOTransport +from .transports.rest import FlexTemplatesServiceRestTransport class FlexTemplatesServiceClientMeta(type): @@ -54,6 +55,7 @@ class FlexTemplatesServiceClientMeta(type): ) # type: Dict[str, Type[FlexTemplatesServiceTransport]] _transport_registry["grpc"] = FlexTemplatesServiceGrpcTransport _transport_registry["grpc_asyncio"] = FlexTemplatesServiceGrpcAsyncIOTransport + _transport_registry["rest"] = FlexTemplatesServiceRestTransport def get_transport_class( cls, @@ -327,6 +329,9 @@ def __init__( transport (Union[str, FlexTemplatesServiceTransport]): The transport to use. If set to None, a transport is chosen automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. client_options (google.api_core.client_options.ClientOptions): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the diff --git a/google/cloud/dataflow_v1beta3/services/flex_templates_service/transports/__init__.py b/google/cloud/dataflow_v1beta3/services/flex_templates_service/transports/__init__.py index 1568f78..ff6c45e 100644 --- a/google/cloud/dataflow_v1beta3/services/flex_templates_service/transports/__init__.py +++ b/google/cloud/dataflow_v1beta3/services/flex_templates_service/transports/__init__.py @@ -19,6 +19,7 @@ from .base import FlexTemplatesServiceTransport from .grpc import FlexTemplatesServiceGrpcTransport from .grpc_asyncio import FlexTemplatesServiceGrpcAsyncIOTransport +from .rest import FlexTemplatesServiceRestInterceptor, FlexTemplatesServiceRestTransport # Compile a registry of transports. _transport_registry = ( @@ -26,9 +27,12 @@ ) # type: Dict[str, Type[FlexTemplatesServiceTransport]] _transport_registry["grpc"] = FlexTemplatesServiceGrpcTransport _transport_registry["grpc_asyncio"] = FlexTemplatesServiceGrpcAsyncIOTransport +_transport_registry["rest"] = FlexTemplatesServiceRestTransport __all__ = ( "FlexTemplatesServiceTransport", "FlexTemplatesServiceGrpcTransport", "FlexTemplatesServiceGrpcAsyncIOTransport", + "FlexTemplatesServiceRestTransport", + "FlexTemplatesServiceRestInterceptor", ) diff --git a/google/cloud/dataflow_v1beta3/services/flex_templates_service/transports/rest.py b/google/cloud/dataflow_v1beta3/services/flex_templates_service/transports/rest.py new file mode 100644 index 0000000..1986e26 --- /dev/null +++ b/google/cloud/dataflow_v1beta3/services/flex_templates_service/transports/rest.py @@ -0,0 +1,313 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.dataflow_v1beta3.types import templates + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import FlexTemplatesServiceTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class FlexTemplatesServiceRestInterceptor: + """Interceptor for FlexTemplatesService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the FlexTemplatesServiceRestTransport. + + .. code-block:: python + class MyCustomFlexTemplatesServiceInterceptor(FlexTemplatesServiceRestInterceptor): + def pre_launch_flex_template(request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_launch_flex_template(response): + logging.log(f"Received response: {response}") + + transport = FlexTemplatesServiceRestTransport(interceptor=MyCustomFlexTemplatesServiceInterceptor()) + client = FlexTemplatesServiceClient(transport=transport) + + + """ + + def pre_launch_flex_template( + self, + request: templates.LaunchFlexTemplateRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[templates.LaunchFlexTemplateRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for launch_flex_template + + Override in a subclass to manipulate the request or metadata + before they are sent to the FlexTemplatesService server. + """ + return request, metadata + + def post_launch_flex_template( + self, response: templates.LaunchFlexTemplateResponse + ) -> templates.LaunchFlexTemplateResponse: + """Post-rpc interceptor for launch_flex_template + + Override in a subclass to manipulate the response + after it is returned by the FlexTemplatesService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class FlexTemplatesServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: FlexTemplatesServiceRestInterceptor + + +class FlexTemplatesServiceRestTransport(FlexTemplatesServiceTransport): + """REST backend transport for FlexTemplatesService. + + Provides a service for Flex templates. This feature is not + ready yet. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__( + self, + *, + host: str = "dataflow.googleapis.com", + credentials: ga_credentials.Credentials = None, + credentials_file: str = None, + scopes: Sequence[str] = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[FlexTemplatesServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or FlexTemplatesServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _LaunchFlexTemplate(FlexTemplatesServiceRestStub): + def __hash__(self): + return hash("LaunchFlexTemplate") + + def __call__( + self, + request: templates.LaunchFlexTemplateRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> templates.LaunchFlexTemplateResponse: + r"""Call the launch flex template method over HTTP. + + Args: + request (~.templates.LaunchFlexTemplateRequest): + The request object. A request to launch a Cloud Dataflow + job from a FlexTemplate. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.templates.LaunchFlexTemplateResponse: + Response to the request to launch a + job from Flex Template. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1b3/projects/{project_id}/locations/{location}/flexTemplates:launch", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_launch_flex_template( + request, metadata + ) + pb_request = templates.LaunchFlexTemplateRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = templates.LaunchFlexTemplateResponse() + pb_resp = templates.LaunchFlexTemplateResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_launch_flex_template(resp) + return resp + + @property + def launch_flex_template( + self, + ) -> Callable[ + [templates.LaunchFlexTemplateRequest], templates.LaunchFlexTemplateResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._LaunchFlexTemplate(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("FlexTemplatesServiceRestTransport",) diff --git a/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/client.py b/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/client.py index 0ab1cb5..d216313 100644 --- a/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/client.py +++ b/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/client.py @@ -43,6 +43,7 @@ from .transports.base import DEFAULT_CLIENT_INFO, JobsV1Beta3Transport from .transports.grpc import JobsV1Beta3GrpcTransport from .transports.grpc_asyncio import JobsV1Beta3GrpcAsyncIOTransport +from .transports.rest import JobsV1Beta3RestTransport class JobsV1Beta3ClientMeta(type): @@ -56,6 +57,7 @@ class JobsV1Beta3ClientMeta(type): _transport_registry = OrderedDict() # type: Dict[str, Type[JobsV1Beta3Transport]] _transport_registry["grpc"] = JobsV1Beta3GrpcTransport _transport_registry["grpc_asyncio"] = JobsV1Beta3GrpcAsyncIOTransport + _transport_registry["rest"] = JobsV1Beta3RestTransport def get_transport_class( cls, @@ -330,6 +332,9 @@ def __init__( transport (Union[str, JobsV1Beta3Transport]): The transport to use. If set to None, a transport is chosen automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. client_options (google.api_core.client_options.ClientOptions): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the diff --git a/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/transports/__init__.py b/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/transports/__init__.py index 75f38da..92553cc 100644 --- a/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/transports/__init__.py +++ b/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/transports/__init__.py @@ -19,14 +19,18 @@ from .base import JobsV1Beta3Transport from .grpc import JobsV1Beta3GrpcTransport from .grpc_asyncio import JobsV1Beta3GrpcAsyncIOTransport +from .rest import JobsV1Beta3RestInterceptor, JobsV1Beta3RestTransport # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[JobsV1Beta3Transport]] _transport_registry["grpc"] = JobsV1Beta3GrpcTransport _transport_registry["grpc_asyncio"] = JobsV1Beta3GrpcAsyncIOTransport +_transport_registry["rest"] = JobsV1Beta3RestTransport __all__ = ( "JobsV1Beta3Transport", "JobsV1Beta3GrpcTransport", "JobsV1Beta3GrpcAsyncIOTransport", + "JobsV1Beta3RestTransport", + "JobsV1Beta3RestInterceptor", ) diff --git a/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/transports/rest.py b/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/transports/rest.py new file mode 100644 index 0000000..1e8598a --- /dev/null +++ b/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/transports/rest.py @@ -0,0 +1,933 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.dataflow_v1beta3.types import jobs, snapshots + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import JobsV1Beta3Transport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class JobsV1Beta3RestInterceptor: + """Interceptor for JobsV1Beta3. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the JobsV1Beta3RestTransport. + + .. code-block:: python + class MyCustomJobsV1Beta3Interceptor(JobsV1Beta3RestInterceptor): + def pre_aggregated_list_jobs(request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_aggregated_list_jobs(response): + logging.log(f"Received response: {response}") + + def pre_check_active_jobs(request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_check_active_jobs(response): + logging.log(f"Received response: {response}") + + def pre_create_job(request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_job(response): + logging.log(f"Received response: {response}") + + def pre_get_job(request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_job(response): + logging.log(f"Received response: {response}") + + def pre_list_jobs(request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_jobs(response): + logging.log(f"Received response: {response}") + + def pre_snapshot_job(request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_snapshot_job(response): + logging.log(f"Received response: {response}") + + def pre_update_job(request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_job(response): + logging.log(f"Received response: {response}") + + transport = JobsV1Beta3RestTransport(interceptor=MyCustomJobsV1Beta3Interceptor()) + client = JobsV1Beta3Client(transport=transport) + + + """ + + def pre_aggregated_list_jobs( + self, request: jobs.ListJobsRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[jobs.ListJobsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for aggregated_list_jobs + + Override in a subclass to manipulate the request or metadata + before they are sent to the JobsV1Beta3 server. + """ + return request, metadata + + def post_aggregated_list_jobs( + self, response: jobs.ListJobsResponse + ) -> jobs.ListJobsResponse: + """Post-rpc interceptor for aggregated_list_jobs + + Override in a subclass to manipulate the response + after it is returned by the JobsV1Beta3 server but before + it is returned to user code. + """ + return response + + def pre_create_job( + self, request: jobs.CreateJobRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[jobs.CreateJobRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_job + + Override in a subclass to manipulate the request or metadata + before they are sent to the JobsV1Beta3 server. + """ + return request, metadata + + def post_create_job(self, response: jobs.Job) -> jobs.Job: + """Post-rpc interceptor for create_job + + Override in a subclass to manipulate the response + after it is returned by the JobsV1Beta3 server but before + it is returned to user code. + """ + return response + + def pre_get_job( + self, request: jobs.GetJobRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[jobs.GetJobRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_job + + Override in a subclass to manipulate the request or metadata + before they are sent to the JobsV1Beta3 server. + """ + return request, metadata + + def post_get_job(self, response: jobs.Job) -> jobs.Job: + """Post-rpc interceptor for get_job + + Override in a subclass to manipulate the response + after it is returned by the JobsV1Beta3 server but before + it is returned to user code. + """ + return response + + def pre_list_jobs( + self, request: jobs.ListJobsRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[jobs.ListJobsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_jobs + + Override in a subclass to manipulate the request or metadata + before they are sent to the JobsV1Beta3 server. + """ + return request, metadata + + def post_list_jobs(self, response: jobs.ListJobsResponse) -> jobs.ListJobsResponse: + """Post-rpc interceptor for list_jobs + + Override in a subclass to manipulate the response + after it is returned by the JobsV1Beta3 server but before + it is returned to user code. + """ + return response + + def pre_snapshot_job( + self, request: jobs.SnapshotJobRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[jobs.SnapshotJobRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for snapshot_job + + Override in a subclass to manipulate the request or metadata + before they are sent to the JobsV1Beta3 server. + """ + return request, metadata + + def post_snapshot_job(self, response: snapshots.Snapshot) -> snapshots.Snapshot: + """Post-rpc interceptor for snapshot_job + + Override in a subclass to manipulate the response + after it is returned by the JobsV1Beta3 server but before + it is returned to user code. + """ + return response + + def pre_update_job( + self, request: jobs.UpdateJobRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[jobs.UpdateJobRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_job + + Override in a subclass to manipulate the request or metadata + before they are sent to the JobsV1Beta3 server. + """ + return request, metadata + + def post_update_job(self, response: jobs.Job) -> jobs.Job: + """Post-rpc interceptor for update_job + + Override in a subclass to manipulate the response + after it is returned by the JobsV1Beta3 server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class JobsV1Beta3RestStub: + _session: AuthorizedSession + _host: str + _interceptor: JobsV1Beta3RestInterceptor + + +class JobsV1Beta3RestTransport(JobsV1Beta3Transport): + """REST backend transport for JobsV1Beta3. + + Provides a method to create and modify Google Cloud Dataflow + jobs. A Job is a multi-stage computation graph run by the Cloud + Dataflow service. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__( + self, + *, + host: str = "dataflow.googleapis.com", + credentials: ga_credentials.Credentials = None, + credentials_file: str = None, + scopes: Sequence[str] = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[JobsV1Beta3RestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or JobsV1Beta3RestInterceptor() + self._prep_wrapped_messages(client_info) + + class _AggregatedListJobs(JobsV1Beta3RestStub): + def __hash__(self): + return hash("AggregatedListJobs") + + def __call__( + self, + request: jobs.ListJobsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> jobs.ListJobsResponse: + r"""Call the aggregated list jobs method over HTTP. + + Args: + request (~.jobs.ListJobsRequest): + The request object. Request to list Cloud Dataflow jobs. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.jobs.ListJobsResponse: + Response to a request to list Cloud + Dataflow jobs in a project. This might + be a partial response, depending on the + page size in the ListJobsRequest. + However, if the project does not have + any jobs, an instance of + ListJobsResponse is not returned and the + requests's response body is empty {}. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1b3/projects/{project_id}/jobs:aggregated", + }, + ] + request, metadata = self._interceptor.pre_aggregated_list_jobs( + request, metadata + ) + pb_request = jobs.ListJobsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = jobs.ListJobsResponse() + pb_resp = jobs.ListJobsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_aggregated_list_jobs(resp) + return resp + + class _CheckActiveJobs(JobsV1Beta3RestStub): + def __hash__(self): + return hash("CheckActiveJobs") + + def __call__( + self, + request: jobs.CheckActiveJobsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> jobs.CheckActiveJobsResponse: + raise RuntimeError( + "Cannot define a method without a valid 'google.api.http' annotation." + ) + + class _CreateJob(JobsV1Beta3RestStub): + def __hash__(self): + return hash("CreateJob") + + def __call__( + self, + request: jobs.CreateJobRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> jobs.Job: + r"""Call the create job method over HTTP. + + Args: + request (~.jobs.CreateJobRequest): + The request object. Request to create a Cloud Dataflow + job. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.jobs.Job: + Defines a job to be run by the Cloud + Dataflow service. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1b3/projects/{project_id}/locations/{location}/jobs", + "body": "job", + }, + { + "method": "post", + "uri": "/v1b3/projects/{project_id}/jobs", + "body": "job", + }, + ] + request, metadata = self._interceptor.pre_create_job(request, metadata) + pb_request = jobs.CreateJobRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = jobs.Job() + pb_resp = jobs.Job.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_job(resp) + return resp + + class _GetJob(JobsV1Beta3RestStub): + def __hash__(self): + return hash("GetJob") + + def __call__( + self, + request: jobs.GetJobRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> jobs.Job: + r"""Call the get job method over HTTP. + + Args: + request (~.jobs.GetJobRequest): + The request object. Request to get the state of a Cloud + Dataflow job. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.jobs.Job: + Defines a job to be run by the Cloud + Dataflow service. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1b3/projects/{project_id}/locations/{location}/jobs/{job_id}", + }, + { + "method": "get", + "uri": "/v1b3/projects/{project_id}/jobs/{job_id}", + }, + ] + request, metadata = self._interceptor.pre_get_job(request, metadata) + pb_request = jobs.GetJobRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = jobs.Job() + pb_resp = jobs.Job.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_job(resp) + return resp + + class _ListJobs(JobsV1Beta3RestStub): + def __hash__(self): + return hash("ListJobs") + + def __call__( + self, + request: jobs.ListJobsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> jobs.ListJobsResponse: + r"""Call the list jobs method over HTTP. + + Args: + request (~.jobs.ListJobsRequest): + The request object. Request to list Cloud Dataflow jobs. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.jobs.ListJobsResponse: + Response to a request to list Cloud + Dataflow jobs in a project. This might + be a partial response, depending on the + page size in the ListJobsRequest. + However, if the project does not have + any jobs, an instance of + ListJobsResponse is not returned and the + requests's response body is empty {}. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1b3/projects/{project_id}/locations/{location}/jobs", + }, + { + "method": "get", + "uri": "/v1b3/projects/{project_id}/jobs", + }, + ] + request, metadata = self._interceptor.pre_list_jobs(request, metadata) + pb_request = jobs.ListJobsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = jobs.ListJobsResponse() + pb_resp = jobs.ListJobsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_jobs(resp) + return resp + + class _SnapshotJob(JobsV1Beta3RestStub): + def __hash__(self): + return hash("SnapshotJob") + + def __call__( + self, + request: jobs.SnapshotJobRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> snapshots.Snapshot: + r"""Call the snapshot job method over HTTP. + + Args: + request (~.jobs.SnapshotJobRequest): + The request object. Request to create a snapshot of a + job. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.snapshots.Snapshot: + Represents a snapshot of a job. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1b3/projects/{project_id}/locations/{location}/jobs/{job_id}:snapshot", + "body": "*", + }, + { + "method": "post", + "uri": "/v1b3/projects/{project_id}/jobs/{job_id}:snapshot", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_snapshot_job(request, metadata) + pb_request = jobs.SnapshotJobRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = snapshots.Snapshot() + pb_resp = snapshots.Snapshot.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_snapshot_job(resp) + return resp + + class _UpdateJob(JobsV1Beta3RestStub): + def __hash__(self): + return hash("UpdateJob") + + def __call__( + self, + request: jobs.UpdateJobRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> jobs.Job: + r"""Call the update job method over HTTP. + + Args: + request (~.jobs.UpdateJobRequest): + The request object. Request to update a Cloud Dataflow + job. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.jobs.Job: + Defines a job to be run by the Cloud + Dataflow service. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "put", + "uri": "/v1b3/projects/{project_id}/locations/{location}/jobs/{job_id}", + "body": "job", + }, + { + "method": "put", + "uri": "/v1b3/projects/{project_id}/jobs/{job_id}", + "body": "job", + }, + ] + request, metadata = self._interceptor.pre_update_job(request, metadata) + pb_request = jobs.UpdateJobRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = jobs.Job() + pb_resp = jobs.Job.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_job(resp) + return resp + + @property + def aggregated_list_jobs( + self, + ) -> Callable[[jobs.ListJobsRequest], jobs.ListJobsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AggregatedListJobs(self._session, self._host, self._interceptor) # type: ignore + + @property + def check_active_jobs( + self, + ) -> Callable[[jobs.CheckActiveJobsRequest], jobs.CheckActiveJobsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CheckActiveJobs(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_job(self) -> Callable[[jobs.CreateJobRequest], jobs.Job]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateJob(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_job(self) -> Callable[[jobs.GetJobRequest], jobs.Job]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetJob(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_jobs(self) -> Callable[[jobs.ListJobsRequest], jobs.ListJobsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListJobs(self._session, self._host, self._interceptor) # type: ignore + + @property + def snapshot_job(self) -> Callable[[jobs.SnapshotJobRequest], snapshots.Snapshot]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SnapshotJob(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_job(self) -> Callable[[jobs.UpdateJobRequest], jobs.Job]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateJob(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("JobsV1Beta3RestTransport",) diff --git a/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/client.py b/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/client.py index eb1305d..435a92f 100644 --- a/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/client.py +++ b/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/client.py @@ -40,6 +40,7 @@ from .transports.base import DEFAULT_CLIENT_INFO, MessagesV1Beta3Transport from .transports.grpc import MessagesV1Beta3GrpcTransport from .transports.grpc_asyncio import MessagesV1Beta3GrpcAsyncIOTransport +from .transports.rest import MessagesV1Beta3RestTransport class MessagesV1Beta3ClientMeta(type): @@ -55,6 +56,7 @@ class MessagesV1Beta3ClientMeta(type): ) # type: Dict[str, Type[MessagesV1Beta3Transport]] _transport_registry["grpc"] = MessagesV1Beta3GrpcTransport _transport_registry["grpc_asyncio"] = MessagesV1Beta3GrpcAsyncIOTransport + _transport_registry["rest"] = MessagesV1Beta3RestTransport def get_transport_class( cls, @@ -328,6 +330,9 @@ def __init__( transport (Union[str, MessagesV1Beta3Transport]): The transport to use. If set to None, a transport is chosen automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. client_options (google.api_core.client_options.ClientOptions): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the diff --git a/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/transports/__init__.py b/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/transports/__init__.py index 401cb80..67894a0 100644 --- a/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/transports/__init__.py +++ b/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/transports/__init__.py @@ -19,14 +19,18 @@ from .base import MessagesV1Beta3Transport from .grpc import MessagesV1Beta3GrpcTransport from .grpc_asyncio import MessagesV1Beta3GrpcAsyncIOTransport +from .rest import MessagesV1Beta3RestInterceptor, MessagesV1Beta3RestTransport # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[MessagesV1Beta3Transport]] _transport_registry["grpc"] = MessagesV1Beta3GrpcTransport _transport_registry["grpc_asyncio"] = MessagesV1Beta3GrpcAsyncIOTransport +_transport_registry["rest"] = MessagesV1Beta3RestTransport __all__ = ( "MessagesV1Beta3Transport", "MessagesV1Beta3GrpcTransport", "MessagesV1Beta3GrpcAsyncIOTransport", + "MessagesV1Beta3RestTransport", + "MessagesV1Beta3RestInterceptor", ) diff --git a/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/transports/rest.py b/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/transports/rest.py new file mode 100644 index 0000000..c08b7db --- /dev/null +++ b/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/transports/rest.py @@ -0,0 +1,309 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.dataflow_v1beta3.types import messages + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import MessagesV1Beta3Transport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class MessagesV1Beta3RestInterceptor: + """Interceptor for MessagesV1Beta3. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the MessagesV1Beta3RestTransport. + + .. code-block:: python + class MyCustomMessagesV1Beta3Interceptor(MessagesV1Beta3RestInterceptor): + def pre_list_job_messages(request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_job_messages(response): + logging.log(f"Received response: {response}") + + transport = MessagesV1Beta3RestTransport(interceptor=MyCustomMessagesV1Beta3Interceptor()) + client = MessagesV1Beta3Client(transport=transport) + + + """ + + def pre_list_job_messages( + self, + request: messages.ListJobMessagesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[messages.ListJobMessagesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_job_messages + + Override in a subclass to manipulate the request or metadata + before they are sent to the MessagesV1Beta3 server. + """ + return request, metadata + + def post_list_job_messages( + self, response: messages.ListJobMessagesResponse + ) -> messages.ListJobMessagesResponse: + """Post-rpc interceptor for list_job_messages + + Override in a subclass to manipulate the response + after it is returned by the MessagesV1Beta3 server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class MessagesV1Beta3RestStub: + _session: AuthorizedSession + _host: str + _interceptor: MessagesV1Beta3RestInterceptor + + +class MessagesV1Beta3RestTransport(MessagesV1Beta3Transport): + """REST backend transport for MessagesV1Beta3. + + The Dataflow Messages API is used for monitoring the progress + of Dataflow jobs. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__( + self, + *, + host: str = "dataflow.googleapis.com", + credentials: ga_credentials.Credentials = None, + credentials_file: str = None, + scopes: Sequence[str] = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[MessagesV1Beta3RestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or MessagesV1Beta3RestInterceptor() + self._prep_wrapped_messages(client_info) + + class _ListJobMessages(MessagesV1Beta3RestStub): + def __hash__(self): + return hash("ListJobMessages") + + def __call__( + self, + request: messages.ListJobMessagesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> messages.ListJobMessagesResponse: + r"""Call the list job messages method over HTTP. + + Args: + request (~.messages.ListJobMessagesRequest): + The request object. Request to list job messages. Up to max_results messages + will be returned in the time range specified starting + with the oldest messages first. If no time range is + specified the results with start with the oldest + message. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.messages.ListJobMessagesResponse: + Response to a request to list job + messages. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1b3/projects/{project_id}/locations/{location}/jobs/{job_id}/messages", + }, + { + "method": "get", + "uri": "/v1b3/projects/{project_id}/jobs/{job_id}/messages", + }, + ] + request, metadata = self._interceptor.pre_list_job_messages( + request, metadata + ) + pb_request = messages.ListJobMessagesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = messages.ListJobMessagesResponse() + pb_resp = messages.ListJobMessagesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_job_messages(resp) + return resp + + @property + def list_job_messages( + self, + ) -> Callable[[messages.ListJobMessagesRequest], messages.ListJobMessagesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListJobMessages(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("MessagesV1Beta3RestTransport",) diff --git a/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/client.py b/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/client.py index f4fa06a..b032f51 100644 --- a/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/client.py +++ b/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/client.py @@ -42,6 +42,7 @@ from .transports.base import DEFAULT_CLIENT_INFO, MetricsV1Beta3Transport from .transports.grpc import MetricsV1Beta3GrpcTransport from .transports.grpc_asyncio import MetricsV1Beta3GrpcAsyncIOTransport +from .transports.rest import MetricsV1Beta3RestTransport class MetricsV1Beta3ClientMeta(type): @@ -57,6 +58,7 @@ class MetricsV1Beta3ClientMeta(type): ) # type: Dict[str, Type[MetricsV1Beta3Transport]] _transport_registry["grpc"] = MetricsV1Beta3GrpcTransport _transport_registry["grpc_asyncio"] = MetricsV1Beta3GrpcAsyncIOTransport + _transport_registry["rest"] = MetricsV1Beta3RestTransport def get_transport_class( cls, @@ -330,6 +332,9 @@ def __init__( transport (Union[str, MetricsV1Beta3Transport]): The transport to use. If set to None, a transport is chosen automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. client_options (google.api_core.client_options.ClientOptions): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the diff --git a/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/transports/__init__.py b/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/transports/__init__.py index fd061be..38b52b0 100644 --- a/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/transports/__init__.py +++ b/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/transports/__init__.py @@ -19,14 +19,18 @@ from .base import MetricsV1Beta3Transport from .grpc import MetricsV1Beta3GrpcTransport from .grpc_asyncio import MetricsV1Beta3GrpcAsyncIOTransport +from .rest import MetricsV1Beta3RestInterceptor, MetricsV1Beta3RestTransport # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[MetricsV1Beta3Transport]] _transport_registry["grpc"] = MetricsV1Beta3GrpcTransport _transport_registry["grpc_asyncio"] = MetricsV1Beta3GrpcAsyncIOTransport +_transport_registry["rest"] = MetricsV1Beta3RestTransport __all__ = ( "MetricsV1Beta3Transport", "MetricsV1Beta3GrpcTransport", "MetricsV1Beta3GrpcAsyncIOTransport", + "MetricsV1Beta3RestTransport", + "MetricsV1Beta3RestInterceptor", ) diff --git a/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/transports/rest.py b/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/transports/rest.py new file mode 100644 index 0000000..f889276 --- /dev/null +++ b/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/transports/rest.py @@ -0,0 +1,543 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.dataflow_v1beta3.types import metrics + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import MetricsV1Beta3Transport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class MetricsV1Beta3RestInterceptor: + """Interceptor for MetricsV1Beta3. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the MetricsV1Beta3RestTransport. + + .. code-block:: python + class MyCustomMetricsV1Beta3Interceptor(MetricsV1Beta3RestInterceptor): + def pre_get_job_execution_details(request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_job_execution_details(response): + logging.log(f"Received response: {response}") + + def pre_get_job_metrics(request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_job_metrics(response): + logging.log(f"Received response: {response}") + + def pre_get_stage_execution_details(request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_stage_execution_details(response): + logging.log(f"Received response: {response}") + + transport = MetricsV1Beta3RestTransport(interceptor=MyCustomMetricsV1Beta3Interceptor()) + client = MetricsV1Beta3Client(transport=transport) + + + """ + + def pre_get_job_execution_details( + self, + request: metrics.GetJobExecutionDetailsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[metrics.GetJobExecutionDetailsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_job_execution_details + + Override in a subclass to manipulate the request or metadata + before they are sent to the MetricsV1Beta3 server. + """ + return request, metadata + + def post_get_job_execution_details( + self, response: metrics.JobExecutionDetails + ) -> metrics.JobExecutionDetails: + """Post-rpc interceptor for get_job_execution_details + + Override in a subclass to manipulate the response + after it is returned by the MetricsV1Beta3 server but before + it is returned to user code. + """ + return response + + def pre_get_job_metrics( + self, request: metrics.GetJobMetricsRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[metrics.GetJobMetricsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_job_metrics + + Override in a subclass to manipulate the request or metadata + before they are sent to the MetricsV1Beta3 server. + """ + return request, metadata + + def post_get_job_metrics(self, response: metrics.JobMetrics) -> metrics.JobMetrics: + """Post-rpc interceptor for get_job_metrics + + Override in a subclass to manipulate the response + after it is returned by the MetricsV1Beta3 server but before + it is returned to user code. + """ + return response + + def pre_get_stage_execution_details( + self, + request: metrics.GetStageExecutionDetailsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[metrics.GetStageExecutionDetailsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_stage_execution_details + + Override in a subclass to manipulate the request or metadata + before they are sent to the MetricsV1Beta3 server. + """ + return request, metadata + + def post_get_stage_execution_details( + self, response: metrics.StageExecutionDetails + ) -> metrics.StageExecutionDetails: + """Post-rpc interceptor for get_stage_execution_details + + Override in a subclass to manipulate the response + after it is returned by the MetricsV1Beta3 server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class MetricsV1Beta3RestStub: + _session: AuthorizedSession + _host: str + _interceptor: MetricsV1Beta3RestInterceptor + + +class MetricsV1Beta3RestTransport(MetricsV1Beta3Transport): + """REST backend transport for MetricsV1Beta3. + + The Dataflow Metrics API lets you monitor the progress of + Dataflow jobs. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__( + self, + *, + host: str = "dataflow.googleapis.com", + credentials: ga_credentials.Credentials = None, + credentials_file: str = None, + scopes: Sequence[str] = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[MetricsV1Beta3RestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or MetricsV1Beta3RestInterceptor() + self._prep_wrapped_messages(client_info) + + class _GetJobExecutionDetails(MetricsV1Beta3RestStub): + def __hash__(self): + return hash("GetJobExecutionDetails") + + def __call__( + self, + request: metrics.GetJobExecutionDetailsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metrics.JobExecutionDetails: + r"""Call the get job execution details method over HTTP. + + Args: + request (~.metrics.GetJobExecutionDetailsRequest): + The request object. Request to get job execution details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.metrics.JobExecutionDetails: + Information about the execution of a + job. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1b3/projects/{project_id}/locations/{location}/jobs/{job_id}/executionDetails", + }, + ] + request, metadata = self._interceptor.pre_get_job_execution_details( + request, metadata + ) + pb_request = metrics.GetJobExecutionDetailsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = metrics.JobExecutionDetails() + pb_resp = metrics.JobExecutionDetails.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_job_execution_details(resp) + return resp + + class _GetJobMetrics(MetricsV1Beta3RestStub): + def __hash__(self): + return hash("GetJobMetrics") + + def __call__( + self, + request: metrics.GetJobMetricsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metrics.JobMetrics: + r"""Call the get job metrics method over HTTP. + + Args: + request (~.metrics.GetJobMetricsRequest): + The request object. Request to get job metrics. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.metrics.JobMetrics: + JobMetrics contains a collection of + metrics describing the detailed progress + of a Dataflow job. Metrics correspond to + user-defined and system-defined metrics + in the job. + + This resource captures only the most + recent values of each metric; + time-series data can be queried for them + (under the same metric names) from Cloud + Monitoring. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1b3/projects/{project_id}/locations/{location}/jobs/{job_id}/metrics", + }, + { + "method": "get", + "uri": "/v1b3/projects/{project_id}/jobs/{job_id}/metrics", + }, + ] + request, metadata = self._interceptor.pre_get_job_metrics(request, metadata) + pb_request = metrics.GetJobMetricsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = metrics.JobMetrics() + pb_resp = metrics.JobMetrics.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_job_metrics(resp) + return resp + + class _GetStageExecutionDetails(MetricsV1Beta3RestStub): + def __hash__(self): + return hash("GetStageExecutionDetails") + + def __call__( + self, + request: metrics.GetStageExecutionDetailsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metrics.StageExecutionDetails: + r"""Call the get stage execution + details method over HTTP. + + Args: + request (~.metrics.GetStageExecutionDetailsRequest): + The request object. Request to get information about a + particular execution stage of a job. + Currently only tracked for Batch jobs. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.metrics.StageExecutionDetails: + Information about the workers and + work items within a stage. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1b3/projects/{project_id}/locations/{location}/jobs/{job_id}/stages/{stage_id}/executionDetails", + }, + ] + request, metadata = self._interceptor.pre_get_stage_execution_details( + request, metadata + ) + pb_request = metrics.GetStageExecutionDetailsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = metrics.StageExecutionDetails() + pb_resp = metrics.StageExecutionDetails.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_stage_execution_details(resp) + return resp + + @property + def get_job_execution_details( + self, + ) -> Callable[[metrics.GetJobExecutionDetailsRequest], metrics.JobExecutionDetails]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetJobExecutionDetails(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_job_metrics( + self, + ) -> Callable[[metrics.GetJobMetricsRequest], metrics.JobMetrics]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetJobMetrics(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_stage_execution_details( + self, + ) -> Callable[ + [metrics.GetStageExecutionDetailsRequest], metrics.StageExecutionDetails + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetStageExecutionDetails(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("MetricsV1Beta3RestTransport",) diff --git a/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/client.py b/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/client.py index f96c4e4..015ead3 100644 --- a/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/client.py +++ b/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/client.py @@ -42,6 +42,7 @@ from .transports.base import DEFAULT_CLIENT_INFO, SnapshotsV1Beta3Transport from .transports.grpc import SnapshotsV1Beta3GrpcTransport from .transports.grpc_asyncio import SnapshotsV1Beta3GrpcAsyncIOTransport +from .transports.rest import SnapshotsV1Beta3RestTransport class SnapshotsV1Beta3ClientMeta(type): @@ -57,6 +58,7 @@ class SnapshotsV1Beta3ClientMeta(type): ) # type: Dict[str, Type[SnapshotsV1Beta3Transport]] _transport_registry["grpc"] = SnapshotsV1Beta3GrpcTransport _transport_registry["grpc_asyncio"] = SnapshotsV1Beta3GrpcAsyncIOTransport + _transport_registry["rest"] = SnapshotsV1Beta3RestTransport def get_transport_class( cls, @@ -330,6 +332,9 @@ def __init__( transport (Union[str, SnapshotsV1Beta3Transport]): The transport to use. If set to None, a transport is chosen automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. client_options (google.api_core.client_options.ClientOptions): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the diff --git a/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/transports/__init__.py b/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/transports/__init__.py index 1b1001f..4e2cb30 100644 --- a/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/transports/__init__.py +++ b/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/transports/__init__.py @@ -19,14 +19,18 @@ from .base import SnapshotsV1Beta3Transport from .grpc import SnapshotsV1Beta3GrpcTransport from .grpc_asyncio import SnapshotsV1Beta3GrpcAsyncIOTransport +from .rest import SnapshotsV1Beta3RestInterceptor, SnapshotsV1Beta3RestTransport # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[SnapshotsV1Beta3Transport]] _transport_registry["grpc"] = SnapshotsV1Beta3GrpcTransport _transport_registry["grpc_asyncio"] = SnapshotsV1Beta3GrpcAsyncIOTransport +_transport_registry["rest"] = SnapshotsV1Beta3RestTransport __all__ = ( "SnapshotsV1Beta3Transport", "SnapshotsV1Beta3GrpcTransport", "SnapshotsV1Beta3GrpcAsyncIOTransport", + "SnapshotsV1Beta3RestTransport", + "SnapshotsV1Beta3RestInterceptor", ) diff --git a/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/transports/rest.py b/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/transports/rest.py new file mode 100644 index 0000000..e92b4df --- /dev/null +++ b/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/transports/rest.py @@ -0,0 +1,532 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.dataflow_v1beta3.types import snapshots + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import SnapshotsV1Beta3Transport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class SnapshotsV1Beta3RestInterceptor: + """Interceptor for SnapshotsV1Beta3. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the SnapshotsV1Beta3RestTransport. + + .. code-block:: python + class MyCustomSnapshotsV1Beta3Interceptor(SnapshotsV1Beta3RestInterceptor): + def pre_delete_snapshot(request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_snapshot(response): + logging.log(f"Received response: {response}") + + def pre_get_snapshot(request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_snapshot(response): + logging.log(f"Received response: {response}") + + def pre_list_snapshots(request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_snapshots(response): + logging.log(f"Received response: {response}") + + transport = SnapshotsV1Beta3RestTransport(interceptor=MyCustomSnapshotsV1Beta3Interceptor()) + client = SnapshotsV1Beta3Client(transport=transport) + + + """ + + def pre_delete_snapshot( + self, + request: snapshots.DeleteSnapshotRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[snapshots.DeleteSnapshotRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_snapshot + + Override in a subclass to manipulate the request or metadata + before they are sent to the SnapshotsV1Beta3 server. + """ + return request, metadata + + def post_delete_snapshot( + self, response: snapshots.DeleteSnapshotResponse + ) -> snapshots.DeleteSnapshotResponse: + """Post-rpc interceptor for delete_snapshot + + Override in a subclass to manipulate the response + after it is returned by the SnapshotsV1Beta3 server but before + it is returned to user code. + """ + return response + + def pre_get_snapshot( + self, request: snapshots.GetSnapshotRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[snapshots.GetSnapshotRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_snapshot + + Override in a subclass to manipulate the request or metadata + before they are sent to the SnapshotsV1Beta3 server. + """ + return request, metadata + + def post_get_snapshot(self, response: snapshots.Snapshot) -> snapshots.Snapshot: + """Post-rpc interceptor for get_snapshot + + Override in a subclass to manipulate the response + after it is returned by the SnapshotsV1Beta3 server but before + it is returned to user code. + """ + return response + + def pre_list_snapshots( + self, + request: snapshots.ListSnapshotsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[snapshots.ListSnapshotsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_snapshots + + Override in a subclass to manipulate the request or metadata + before they are sent to the SnapshotsV1Beta3 server. + """ + return request, metadata + + def post_list_snapshots( + self, response: snapshots.ListSnapshotsResponse + ) -> snapshots.ListSnapshotsResponse: + """Post-rpc interceptor for list_snapshots + + Override in a subclass to manipulate the response + after it is returned by the SnapshotsV1Beta3 server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class SnapshotsV1Beta3RestStub: + _session: AuthorizedSession + _host: str + _interceptor: SnapshotsV1Beta3RestInterceptor + + +class SnapshotsV1Beta3RestTransport(SnapshotsV1Beta3Transport): + """REST backend transport for SnapshotsV1Beta3. + + Provides methods to manage snapshots of Google Cloud Dataflow + jobs. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__( + self, + *, + host: str = "dataflow.googleapis.com", + credentials: ga_credentials.Credentials = None, + credentials_file: str = None, + scopes: Sequence[str] = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[SnapshotsV1Beta3RestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or SnapshotsV1Beta3RestInterceptor() + self._prep_wrapped_messages(client_info) + + class _DeleteSnapshot(SnapshotsV1Beta3RestStub): + def __hash__(self): + return hash("DeleteSnapshot") + + def __call__( + self, + request: snapshots.DeleteSnapshotRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> snapshots.DeleteSnapshotResponse: + r"""Call the delete snapshot method over HTTP. + + Args: + request (~.snapshots.DeleteSnapshotRequest): + The request object. Request to delete a snapshot. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.snapshots.DeleteSnapshotResponse: + Response from deleting a snapshot. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1b3/projects/{project_id}/locations/{location}/snapshots/{snapshot_id}", + }, + { + "method": "delete", + "uri": "/v1b3/projects/{project_id}/snapshots", + }, + ] + request, metadata = self._interceptor.pre_delete_snapshot(request, metadata) + pb_request = snapshots.DeleteSnapshotRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = snapshots.DeleteSnapshotResponse() + pb_resp = snapshots.DeleteSnapshotResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_snapshot(resp) + return resp + + class _GetSnapshot(SnapshotsV1Beta3RestStub): + def __hash__(self): + return hash("GetSnapshot") + + def __call__( + self, + request: snapshots.GetSnapshotRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> snapshots.Snapshot: + r"""Call the get snapshot method over HTTP. + + Args: + request (~.snapshots.GetSnapshotRequest): + The request object. Request to get information about a + snapshot + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.snapshots.Snapshot: + Represents a snapshot of a job. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1b3/projects/{project_id}/locations/{location}/snapshots/{snapshot_id}", + }, + { + "method": "get", + "uri": "/v1b3/projects/{project_id}/snapshots/{snapshot_id}", + }, + ] + request, metadata = self._interceptor.pre_get_snapshot(request, metadata) + pb_request = snapshots.GetSnapshotRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = snapshots.Snapshot() + pb_resp = snapshots.Snapshot.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_snapshot(resp) + return resp + + class _ListSnapshots(SnapshotsV1Beta3RestStub): + def __hash__(self): + return hash("ListSnapshots") + + def __call__( + self, + request: snapshots.ListSnapshotsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> snapshots.ListSnapshotsResponse: + r"""Call the list snapshots method over HTTP. + + Args: + request (~.snapshots.ListSnapshotsRequest): + The request object. Request to list snapshots. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.snapshots.ListSnapshotsResponse: + List of snapshots. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1b3/projects/{project_id}/locations/{location}/jobs/{job_id}/snapshots", + }, + { + "method": "get", + "uri": "/v1b3/projects/{project_id}/locations/{location}/snapshots", + }, + { + "method": "get", + "uri": "/v1b3/projects/{project_id}/snapshots", + }, + ] + request, metadata = self._interceptor.pre_list_snapshots(request, metadata) + pb_request = snapshots.ListSnapshotsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = snapshots.ListSnapshotsResponse() + pb_resp = snapshots.ListSnapshotsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_snapshots(resp) + return resp + + @property + def delete_snapshot( + self, + ) -> Callable[[snapshots.DeleteSnapshotRequest], snapshots.DeleteSnapshotResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteSnapshot(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_snapshot( + self, + ) -> Callable[[snapshots.GetSnapshotRequest], snapshots.Snapshot]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetSnapshot(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_snapshots( + self, + ) -> Callable[[snapshots.ListSnapshotsRequest], snapshots.ListSnapshotsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListSnapshots(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("SnapshotsV1Beta3RestTransport",) diff --git a/google/cloud/dataflow_v1beta3/services/templates_service/client.py b/google/cloud/dataflow_v1beta3/services/templates_service/client.py index 363deb5..ef7eefc 100644 --- a/google/cloud/dataflow_v1beta3/services/templates_service/client.py +++ b/google/cloud/dataflow_v1beta3/services/templates_service/client.py @@ -42,6 +42,7 @@ from .transports.base import DEFAULT_CLIENT_INFO, TemplatesServiceTransport from .transports.grpc import TemplatesServiceGrpcTransport from .transports.grpc_asyncio import TemplatesServiceGrpcAsyncIOTransport +from .transports.rest import TemplatesServiceRestTransport class TemplatesServiceClientMeta(type): @@ -57,6 +58,7 @@ class TemplatesServiceClientMeta(type): ) # type: Dict[str, Type[TemplatesServiceTransport]] _transport_registry["grpc"] = TemplatesServiceGrpcTransport _transport_registry["grpc_asyncio"] = TemplatesServiceGrpcAsyncIOTransport + _transport_registry["rest"] = TemplatesServiceRestTransport def get_transport_class( cls, @@ -330,6 +332,9 @@ def __init__( transport (Union[str, TemplatesServiceTransport]): The transport to use. If set to None, a transport is chosen automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. client_options (google.api_core.client_options.ClientOptions): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the diff --git a/google/cloud/dataflow_v1beta3/services/templates_service/transports/__init__.py b/google/cloud/dataflow_v1beta3/services/templates_service/transports/__init__.py index 6cebe12..410d70c 100644 --- a/google/cloud/dataflow_v1beta3/services/templates_service/transports/__init__.py +++ b/google/cloud/dataflow_v1beta3/services/templates_service/transports/__init__.py @@ -19,14 +19,18 @@ from .base import TemplatesServiceTransport from .grpc import TemplatesServiceGrpcTransport from .grpc_asyncio import TemplatesServiceGrpcAsyncIOTransport +from .rest import TemplatesServiceRestInterceptor, TemplatesServiceRestTransport # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[TemplatesServiceTransport]] _transport_registry["grpc"] = TemplatesServiceGrpcTransport _transport_registry["grpc_asyncio"] = TemplatesServiceGrpcAsyncIOTransport +_transport_registry["rest"] = TemplatesServiceRestTransport __all__ = ( "TemplatesServiceTransport", "TemplatesServiceGrpcTransport", "TemplatesServiceGrpcAsyncIOTransport", + "TemplatesServiceRestTransport", + "TemplatesServiceRestInterceptor", ) diff --git a/google/cloud/dataflow_v1beta3/services/templates_service/transports/rest.py b/google/cloud/dataflow_v1beta3/services/templates_service/transports/rest.py new file mode 100644 index 0000000..64b68b6 --- /dev/null +++ b/google/cloud/dataflow_v1beta3/services/templates_service/transports/rest.py @@ -0,0 +1,558 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.dataflow_v1beta3.types import jobs, templates + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import TemplatesServiceTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class TemplatesServiceRestInterceptor: + """Interceptor for TemplatesService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the TemplatesServiceRestTransport. + + .. code-block:: python + class MyCustomTemplatesServiceInterceptor(TemplatesServiceRestInterceptor): + def pre_create_job_from_template(request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_job_from_template(response): + logging.log(f"Received response: {response}") + + def pre_get_template(request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_template(response): + logging.log(f"Received response: {response}") + + def pre_launch_template(request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_launch_template(response): + logging.log(f"Received response: {response}") + + transport = TemplatesServiceRestTransport(interceptor=MyCustomTemplatesServiceInterceptor()) + client = TemplatesServiceClient(transport=transport) + + + """ + + def pre_create_job_from_template( + self, + request: templates.CreateJobFromTemplateRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[templates.CreateJobFromTemplateRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_job_from_template + + Override in a subclass to manipulate the request or metadata + before they are sent to the TemplatesService server. + """ + return request, metadata + + def post_create_job_from_template(self, response: jobs.Job) -> jobs.Job: + """Post-rpc interceptor for create_job_from_template + + Override in a subclass to manipulate the response + after it is returned by the TemplatesService server but before + it is returned to user code. + """ + return response + + def pre_get_template( + self, request: templates.GetTemplateRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[templates.GetTemplateRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_template + + Override in a subclass to manipulate the request or metadata + before they are sent to the TemplatesService server. + """ + return request, metadata + + def post_get_template( + self, response: templates.GetTemplateResponse + ) -> templates.GetTemplateResponse: + """Post-rpc interceptor for get_template + + Override in a subclass to manipulate the response + after it is returned by the TemplatesService server but before + it is returned to user code. + """ + return response + + def pre_launch_template( + self, + request: templates.LaunchTemplateRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[templates.LaunchTemplateRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for launch_template + + Override in a subclass to manipulate the request or metadata + before they are sent to the TemplatesService server. + """ + return request, metadata + + def post_launch_template( + self, response: templates.LaunchTemplateResponse + ) -> templates.LaunchTemplateResponse: + """Post-rpc interceptor for launch_template + + Override in a subclass to manipulate the response + after it is returned by the TemplatesService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class TemplatesServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: TemplatesServiceRestInterceptor + + +class TemplatesServiceRestTransport(TemplatesServiceTransport): + """REST backend transport for TemplatesService. + + Provides a method to create Cloud Dataflow jobs from + templates. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__( + self, + *, + host: str = "dataflow.googleapis.com", + credentials: ga_credentials.Credentials = None, + credentials_file: str = None, + scopes: Sequence[str] = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[TemplatesServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or TemplatesServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _CreateJobFromTemplate(TemplatesServiceRestStub): + def __hash__(self): + return hash("CreateJobFromTemplate") + + def __call__( + self, + request: templates.CreateJobFromTemplateRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> jobs.Job: + r"""Call the create job from template method over HTTP. + + Args: + request (~.templates.CreateJobFromTemplateRequest): + The request object. A request to create a Cloud Dataflow + job from a template. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.jobs.Job: + Defines a job to be run by the Cloud + Dataflow service. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1b3/projects/{project_id}/locations/{location}/templates", + "body": "*", + }, + { + "method": "post", + "uri": "/v1b3/projects/{project_id}/templates", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_create_job_from_template( + request, metadata + ) + pb_request = templates.CreateJobFromTemplateRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = jobs.Job() + pb_resp = jobs.Job.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_job_from_template(resp) + return resp + + class _GetTemplate(TemplatesServiceRestStub): + def __hash__(self): + return hash("GetTemplate") + + def __call__( + self, + request: templates.GetTemplateRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> templates.GetTemplateResponse: + r"""Call the get template method over HTTP. + + Args: + request (~.templates.GetTemplateRequest): + The request object. A request to retrieve a Cloud + Dataflow job template. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.templates.GetTemplateResponse: + The response to a GetTemplate + request. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1b3/projects/{project_id}/locations/{location}/templates:get", + }, + { + "method": "get", + "uri": "/v1b3/projects/{project_id}/templates:get", + }, + ] + request, metadata = self._interceptor.pre_get_template(request, metadata) + pb_request = templates.GetTemplateRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = templates.GetTemplateResponse() + pb_resp = templates.GetTemplateResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_template(resp) + return resp + + class _LaunchTemplate(TemplatesServiceRestStub): + def __hash__(self): + return hash("LaunchTemplate") + + def __call__( + self, + request: templates.LaunchTemplateRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> templates.LaunchTemplateResponse: + r"""Call the launch template method over HTTP. + + Args: + request (~.templates.LaunchTemplateRequest): + The request object. A request to launch a template. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.templates.LaunchTemplateResponse: + Response to the request to launch a + template. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1b3/projects/{project_id}/locations/{location}/templates:launch", + "body": "launch_parameters", + }, + { + "method": "post", + "uri": "/v1b3/projects/{project_id}/templates:launch", + "body": "launch_parameters", + }, + ] + request, metadata = self._interceptor.pre_launch_template(request, metadata) + pb_request = templates.LaunchTemplateRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = templates.LaunchTemplateResponse() + pb_resp = templates.LaunchTemplateResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_launch_template(resp) + return resp + + @property + def create_job_from_template( + self, + ) -> Callable[[templates.CreateJobFromTemplateRequest], jobs.Job]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateJobFromTemplate(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_template( + self, + ) -> Callable[[templates.GetTemplateRequest], templates.GetTemplateResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetTemplate(self._session, self._host, self._interceptor) # type: ignore + + @property + def launch_template( + self, + ) -> Callable[[templates.LaunchTemplateRequest], templates.LaunchTemplateResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._LaunchTemplate(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("TemplatesServiceRestTransport",) diff --git a/samples/generated_samples/dataflow_v1beta3_generated_flex_templates_service_launch_flex_template_sync.py b/samples/generated_samples/dataflow_v1beta3_generated_flex_templates_service_launch_flex_template_sync_4148a07f.py similarity index 97% rename from samples/generated_samples/dataflow_v1beta3_generated_flex_templates_service_launch_flex_template_sync.py rename to samples/generated_samples/dataflow_v1beta3_generated_flex_templates_service_launch_flex_template_sync_4148a07f.py index 8f236ed..9fb332d 100644 --- a/samples/generated_samples/dataflow_v1beta3_generated_flex_templates_service_launch_flex_template_sync.py +++ b/samples/generated_samples/dataflow_v1beta3_generated_flex_templates_service_launch_flex_template_sync_4148a07f.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-dataflow-client -# [START dataflow_v1beta3_generated_FlexTemplatesService_LaunchFlexTemplate_sync] +# [START dataflow_v1beta3_generated_FlexTemplatesService_LaunchFlexTemplate_sync_4148a07f] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -48,4 +48,4 @@ def sample_launch_flex_template(): # Handle the response print(response) -# [END dataflow_v1beta3_generated_FlexTemplatesService_LaunchFlexTemplate_sync] +# [END dataflow_v1beta3_generated_FlexTemplatesService_LaunchFlexTemplate_sync_4148a07f] diff --git a/samples/generated_samples/dataflow_v1beta3_generated_flex_templates_service_launch_flex_template_sync_a4f75f91.py b/samples/generated_samples/dataflow_v1beta3_generated_flex_templates_service_launch_flex_template_sync_a4f75f91.py new file mode 100644 index 0000000..2cc8a42 --- /dev/null +++ b/samples/generated_samples/dataflow_v1beta3_generated_flex_templates_service_launch_flex_template_sync_a4f75f91.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for LaunchFlexTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataflow-client + + +# [START dataflow_v1beta3_generated_FlexTemplatesService_LaunchFlexTemplate_sync_a4f75f91] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataflow_v1beta3 + + +def sample_launch_flex_template(): + # Create a client + client = dataflow_v1beta3.FlexTemplatesServiceClient() + + # Initialize request argument(s) + request = dataflow_v1beta3.LaunchFlexTemplateRequest( + ) + + # Make the request + response = client.launch_flex_template(request=request) + + # Handle the response + print(response) + +# [END dataflow_v1beta3_generated_FlexTemplatesService_LaunchFlexTemplate_sync_a4f75f91] diff --git a/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_aggregated_list_jobs_sync.py b/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_aggregated_list_jobs_sync_0d901b38.py similarity index 98% rename from samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_aggregated_list_jobs_sync.py rename to samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_aggregated_list_jobs_sync_0d901b38.py index 7b9946e..a45077e 100644 --- a/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_aggregated_list_jobs_sync.py +++ b/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_aggregated_list_jobs_sync_0d901b38.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-dataflow-client -# [START dataflow_v1beta3_generated_JobsV1Beta3_AggregatedListJobs_sync] +# [START dataflow_v1beta3_generated_JobsV1Beta3_AggregatedListJobs_sync_0d901b38] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -49,4 +49,4 @@ def sample_aggregated_list_jobs(): for response in page_result: print(response) -# [END dataflow_v1beta3_generated_JobsV1Beta3_AggregatedListJobs_sync] +# [END dataflow_v1beta3_generated_JobsV1Beta3_AggregatedListJobs_sync_0d901b38] diff --git a/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_aggregated_list_jobs_sync_26f07383.py b/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_aggregated_list_jobs_sync_26f07383.py new file mode 100644 index 0000000..778a4f1 --- /dev/null +++ b/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_aggregated_list_jobs_sync_26f07383.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AggregatedListJobs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataflow-client + + +# [START dataflow_v1beta3_generated_JobsV1Beta3_AggregatedListJobs_sync_26f07383] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataflow_v1beta3 + + +def sample_aggregated_list_jobs(): + # Create a client + client = dataflow_v1beta3.JobsV1Beta3Client() + + # Initialize request argument(s) + request = dataflow_v1beta3.ListJobsRequest( + ) + + # Make the request + page_result = client.aggregated_list_jobs(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dataflow_v1beta3_generated_JobsV1Beta3_AggregatedListJobs_sync_26f07383] diff --git a/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_check_active_jobs_sync.py b/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_check_active_jobs_sync_aab49b35.py similarity index 98% rename from samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_check_active_jobs_sync.py rename to samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_check_active_jobs_sync_aab49b35.py index 591caa0..d6c5c9a 100644 --- a/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_check_active_jobs_sync.py +++ b/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_check_active_jobs_sync_aab49b35.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-dataflow-client -# [START dataflow_v1beta3_generated_JobsV1Beta3_CheckActiveJobs_sync] +# [START dataflow_v1beta3_generated_JobsV1Beta3_CheckActiveJobs_sync_aab49b35] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -48,4 +48,4 @@ def sample_check_active_jobs(): # Handle the response print(response) -# [END dataflow_v1beta3_generated_JobsV1Beta3_CheckActiveJobs_sync] +# [END dataflow_v1beta3_generated_JobsV1Beta3_CheckActiveJobs_sync_aab49b35] diff --git a/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_check_active_jobs_sync_baed931b.py b/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_check_active_jobs_sync_baed931b.py new file mode 100644 index 0000000..dc9691c --- /dev/null +++ b/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_check_active_jobs_sync_baed931b.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CheckActiveJobs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataflow-client + + +# [START dataflow_v1beta3_generated_JobsV1Beta3_CheckActiveJobs_sync_baed931b] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataflow_v1beta3 + + +def sample_check_active_jobs(): + # Create a client + client = dataflow_v1beta3.JobsV1Beta3Client() + + # Initialize request argument(s) + request = dataflow_v1beta3.CheckActiveJobsRequest( + ) + + # Make the request + response = client.check_active_jobs(request=request) + + # Handle the response + print(response) + +# [END dataflow_v1beta3_generated_JobsV1Beta3_CheckActiveJobs_sync_baed931b] diff --git a/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_create_job_sync.py b/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_create_job_sync_2a1b0208.py similarity index 92% rename from samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_create_job_sync.py rename to samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_create_job_sync_2a1b0208.py index b91dc91..406ead7 100644 --- a/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_create_job_sync.py +++ b/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_create_job_sync_2a1b0208.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-dataflow-client -# [START dataflow_v1beta3_generated_JobsV1Beta3_CreateJob_sync] +# [START dataflow_v1beta3_generated_JobsV1Beta3_CreateJob_sync_2a1b0208] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -48,4 +48,4 @@ def sample_create_job(): # Handle the response print(response) -# [END dataflow_v1beta3_generated_JobsV1Beta3_CreateJob_sync] +# [END dataflow_v1beta3_generated_JobsV1Beta3_CreateJob_sync_2a1b0208] diff --git a/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_create_job_sync_ad13d605.py b/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_create_job_sync_ad13d605.py new file mode 100644 index 0000000..93aadf8 --- /dev/null +++ b/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_create_job_sync_ad13d605.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataflow-client + + +# [START dataflow_v1beta3_generated_JobsV1Beta3_CreateJob_sync_ad13d605] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataflow_v1beta3 + + +def sample_create_job(): + # Create a client + client = dataflow_v1beta3.JobsV1Beta3Client() + + # Initialize request argument(s) + request = dataflow_v1beta3.CreateJobRequest( + ) + + # Make the request + response = client.create_job(request=request) + + # Handle the response + print(response) + +# [END dataflow_v1beta3_generated_JobsV1Beta3_CreateJob_sync_ad13d605] diff --git a/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_get_job_sync.py b/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_get_job_sync_3e418026.py similarity index 92% rename from samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_get_job_sync.py rename to samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_get_job_sync_3e418026.py index c4bef41..9980879 100644 --- a/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_get_job_sync.py +++ b/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_get_job_sync_3e418026.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-dataflow-client -# [START dataflow_v1beta3_generated_JobsV1Beta3_GetJob_sync] +# [START dataflow_v1beta3_generated_JobsV1Beta3_GetJob_sync_3e418026] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -48,4 +48,4 @@ def sample_get_job(): # Handle the response print(response) -# [END dataflow_v1beta3_generated_JobsV1Beta3_GetJob_sync] +# [END dataflow_v1beta3_generated_JobsV1Beta3_GetJob_sync_3e418026] diff --git a/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_get_job_sync_45212860.py b/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_get_job_sync_45212860.py new file mode 100644 index 0000000..364e642 --- /dev/null +++ b/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_get_job_sync_45212860.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataflow-client + + +# [START dataflow_v1beta3_generated_JobsV1Beta3_GetJob_sync_45212860] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataflow_v1beta3 + + +def sample_get_job(): + # Create a client + client = dataflow_v1beta3.JobsV1Beta3Client() + + # Initialize request argument(s) + request = dataflow_v1beta3.GetJobRequest( + ) + + # Make the request + response = client.get_job(request=request) + + # Handle the response + print(response) + +# [END dataflow_v1beta3_generated_JobsV1Beta3_GetJob_sync_45212860] diff --git a/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_list_jobs_sync.py b/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_list_jobs_sync_97a60855.py similarity index 92% rename from samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_list_jobs_sync.py rename to samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_list_jobs_sync_97a60855.py index a975672..8ebfabf 100644 --- a/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_list_jobs_sync.py +++ b/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_list_jobs_sync_97a60855.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-dataflow-client -# [START dataflow_v1beta3_generated_JobsV1Beta3_ListJobs_sync] +# [START dataflow_v1beta3_generated_JobsV1Beta3_ListJobs_sync_97a60855] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -49,4 +49,4 @@ def sample_list_jobs(): for response in page_result: print(response) -# [END dataflow_v1beta3_generated_JobsV1Beta3_ListJobs_sync] +# [END dataflow_v1beta3_generated_JobsV1Beta3_ListJobs_sync_97a60855] diff --git a/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_list_jobs_sync_bfb75b4c.py b/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_list_jobs_sync_bfb75b4c.py new file mode 100644 index 0000000..633ac93 --- /dev/null +++ b/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_list_jobs_sync_bfb75b4c.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListJobs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataflow-client + + +# [START dataflow_v1beta3_generated_JobsV1Beta3_ListJobs_sync_bfb75b4c] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataflow_v1beta3 + + +def sample_list_jobs(): + # Create a client + client = dataflow_v1beta3.JobsV1Beta3Client() + + # Initialize request argument(s) + request = dataflow_v1beta3.ListJobsRequest( + ) + + # Make the request + page_result = client.list_jobs(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dataflow_v1beta3_generated_JobsV1Beta3_ListJobs_sync_bfb75b4c] diff --git a/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_snapshot_job_sync.py b/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_snapshot_job_sync_60f4d8fe.py similarity index 95% rename from samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_snapshot_job_sync.py rename to samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_snapshot_job_sync_60f4d8fe.py index db5e822..49a3645 100644 --- a/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_snapshot_job_sync.py +++ b/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_snapshot_job_sync_60f4d8fe.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-dataflow-client -# [START dataflow_v1beta3_generated_JobsV1Beta3_SnapshotJob_sync] +# [START dataflow_v1beta3_generated_JobsV1Beta3_SnapshotJob_sync_60f4d8fe] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -48,4 +48,4 @@ def sample_snapshot_job(): # Handle the response print(response) -# [END dataflow_v1beta3_generated_JobsV1Beta3_SnapshotJob_sync] +# [END dataflow_v1beta3_generated_JobsV1Beta3_SnapshotJob_sync_60f4d8fe] diff --git a/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_snapshot_job_sync_ff3cb8bd.py b/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_snapshot_job_sync_ff3cb8bd.py new file mode 100644 index 0000000..a767e86 --- /dev/null +++ b/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_snapshot_job_sync_ff3cb8bd.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SnapshotJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataflow-client + + +# [START dataflow_v1beta3_generated_JobsV1Beta3_SnapshotJob_sync_ff3cb8bd] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataflow_v1beta3 + + +def sample_snapshot_job(): + # Create a client + client = dataflow_v1beta3.JobsV1Beta3Client() + + # Initialize request argument(s) + request = dataflow_v1beta3.SnapshotJobRequest( + ) + + # Make the request + response = client.snapshot_job(request=request) + + # Handle the response + print(response) + +# [END dataflow_v1beta3_generated_JobsV1Beta3_SnapshotJob_sync_ff3cb8bd] diff --git a/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_update_job_sync.py b/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_update_job_sync_8db1fd08.py similarity index 92% rename from samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_update_job_sync.py rename to samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_update_job_sync_8db1fd08.py index 9fff7ed..0eba295 100644 --- a/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_update_job_sync.py +++ b/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_update_job_sync_8db1fd08.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-dataflow-client -# [START dataflow_v1beta3_generated_JobsV1Beta3_UpdateJob_sync] +# [START dataflow_v1beta3_generated_JobsV1Beta3_UpdateJob_sync_8db1fd08] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -48,4 +48,4 @@ def sample_update_job(): # Handle the response print(response) -# [END dataflow_v1beta3_generated_JobsV1Beta3_UpdateJob_sync] +# [END dataflow_v1beta3_generated_JobsV1Beta3_UpdateJob_sync_8db1fd08] diff --git a/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_update_job_sync_e2fa191d.py b/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_update_job_sync_e2fa191d.py new file mode 100644 index 0000000..c3dac71 --- /dev/null +++ b/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_update_job_sync_e2fa191d.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataflow-client + + +# [START dataflow_v1beta3_generated_JobsV1Beta3_UpdateJob_sync_e2fa191d] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataflow_v1beta3 + + +def sample_update_job(): + # Create a client + client = dataflow_v1beta3.JobsV1Beta3Client() + + # Initialize request argument(s) + request = dataflow_v1beta3.UpdateJobRequest( + ) + + # Make the request + response = client.update_job(request=request) + + # Handle the response + print(response) + +# [END dataflow_v1beta3_generated_JobsV1Beta3_UpdateJob_sync_e2fa191d] diff --git a/samples/generated_samples/dataflow_v1beta3_generated_messages_v1_beta3_list_job_messages_sync.py b/samples/generated_samples/dataflow_v1beta3_generated_messages_v1_beta3_list_job_messages_sync_04dec136.py similarity index 98% rename from samples/generated_samples/dataflow_v1beta3_generated_messages_v1_beta3_list_job_messages_sync.py rename to samples/generated_samples/dataflow_v1beta3_generated_messages_v1_beta3_list_job_messages_sync_04dec136.py index 256bde8..82358ed 100644 --- a/samples/generated_samples/dataflow_v1beta3_generated_messages_v1_beta3_list_job_messages_sync.py +++ b/samples/generated_samples/dataflow_v1beta3_generated_messages_v1_beta3_list_job_messages_sync_04dec136.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-dataflow-client -# [START dataflow_v1beta3_generated_MessagesV1Beta3_ListJobMessages_sync] +# [START dataflow_v1beta3_generated_MessagesV1Beta3_ListJobMessages_sync_04dec136] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -49,4 +49,4 @@ def sample_list_job_messages(): for response in page_result: print(response) -# [END dataflow_v1beta3_generated_MessagesV1Beta3_ListJobMessages_sync] +# [END dataflow_v1beta3_generated_MessagesV1Beta3_ListJobMessages_sync_04dec136] diff --git a/samples/generated_samples/dataflow_v1beta3_generated_messages_v1_beta3_list_job_messages_sync_f19ed68d.py b/samples/generated_samples/dataflow_v1beta3_generated_messages_v1_beta3_list_job_messages_sync_f19ed68d.py new file mode 100644 index 0000000..1aa793d --- /dev/null +++ b/samples/generated_samples/dataflow_v1beta3_generated_messages_v1_beta3_list_job_messages_sync_f19ed68d.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListJobMessages +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataflow-client + + +# [START dataflow_v1beta3_generated_MessagesV1Beta3_ListJobMessages_sync_f19ed68d] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataflow_v1beta3 + + +def sample_list_job_messages(): + # Create a client + client = dataflow_v1beta3.MessagesV1Beta3Client() + + # Initialize request argument(s) + request = dataflow_v1beta3.ListJobMessagesRequest( + ) + + # Make the request + page_result = client.list_job_messages(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dataflow_v1beta3_generated_MessagesV1Beta3_ListJobMessages_sync_f19ed68d] diff --git a/samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_job_execution_details_sync.py b/samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_job_execution_details_sync_a4ff4d57.py similarity index 97% rename from samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_job_execution_details_sync.py rename to samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_job_execution_details_sync_a4ff4d57.py index 9268495..a1e521e 100644 --- a/samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_job_execution_details_sync.py +++ b/samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_job_execution_details_sync_a4ff4d57.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-dataflow-client -# [START dataflow_v1beta3_generated_MetricsV1Beta3_GetJobExecutionDetails_sync] +# [START dataflow_v1beta3_generated_MetricsV1Beta3_GetJobExecutionDetails_sync_a4ff4d57] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -49,4 +49,4 @@ def sample_get_job_execution_details(): for response in page_result: print(response) -# [END dataflow_v1beta3_generated_MetricsV1Beta3_GetJobExecutionDetails_sync] +# [END dataflow_v1beta3_generated_MetricsV1Beta3_GetJobExecutionDetails_sync_a4ff4d57] diff --git a/samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_job_execution_details_sync_b7550163.py b/samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_job_execution_details_sync_b7550163.py new file mode 100644 index 0000000..211b983 --- /dev/null +++ b/samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_job_execution_details_sync_b7550163.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetJobExecutionDetails +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataflow-client + + +# [START dataflow_v1beta3_generated_MetricsV1Beta3_GetJobExecutionDetails_sync_b7550163] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataflow_v1beta3 + + +def sample_get_job_execution_details(): + # Create a client + client = dataflow_v1beta3.MetricsV1Beta3Client() + + # Initialize request argument(s) + request = dataflow_v1beta3.GetJobExecutionDetailsRequest( + ) + + # Make the request + page_result = client.get_job_execution_details(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dataflow_v1beta3_generated_MetricsV1Beta3_GetJobExecutionDetails_sync_b7550163] diff --git a/samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_job_metrics_sync.py b/samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_job_metrics_sync_02835968.py similarity index 98% rename from samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_job_metrics_sync.py rename to samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_job_metrics_sync_02835968.py index bbe9622..54a22ec 100644 --- a/samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_job_metrics_sync.py +++ b/samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_job_metrics_sync_02835968.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-dataflow-client -# [START dataflow_v1beta3_generated_MetricsV1Beta3_GetJobMetrics_sync] +# [START dataflow_v1beta3_generated_MetricsV1Beta3_GetJobMetrics_sync_02835968] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -48,4 +48,4 @@ def sample_get_job_metrics(): # Handle the response print(response) -# [END dataflow_v1beta3_generated_MetricsV1Beta3_GetJobMetrics_sync] +# [END dataflow_v1beta3_generated_MetricsV1Beta3_GetJobMetrics_sync_02835968] diff --git a/samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_job_metrics_sync_59dce217.py b/samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_job_metrics_sync_59dce217.py new file mode 100644 index 0000000..ffc68c4 --- /dev/null +++ b/samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_job_metrics_sync_59dce217.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetJobMetrics +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataflow-client + + +# [START dataflow_v1beta3_generated_MetricsV1Beta3_GetJobMetrics_sync_59dce217] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataflow_v1beta3 + + +def sample_get_job_metrics(): + # Create a client + client = dataflow_v1beta3.MetricsV1Beta3Client() + + # Initialize request argument(s) + request = dataflow_v1beta3.GetJobMetricsRequest( + ) + + # Make the request + response = client.get_job_metrics(request=request) + + # Handle the response + print(response) + +# [END dataflow_v1beta3_generated_MetricsV1Beta3_GetJobMetrics_sync_59dce217] diff --git a/samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_stage_execution_details_sync.py b/samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_stage_execution_details_sync_4acb3ded.py similarity index 97% rename from samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_stage_execution_details_sync.py rename to samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_stage_execution_details_sync_4acb3ded.py index c9e9729..76af1fd 100644 --- a/samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_stage_execution_details_sync.py +++ b/samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_stage_execution_details_sync_4acb3ded.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-dataflow-client -# [START dataflow_v1beta3_generated_MetricsV1Beta3_GetStageExecutionDetails_sync] +# [START dataflow_v1beta3_generated_MetricsV1Beta3_GetStageExecutionDetails_sync_4acb3ded] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -49,4 +49,4 @@ def sample_get_stage_execution_details(): for response in page_result: print(response) -# [END dataflow_v1beta3_generated_MetricsV1Beta3_GetStageExecutionDetails_sync] +# [END dataflow_v1beta3_generated_MetricsV1Beta3_GetStageExecutionDetails_sync_4acb3ded] diff --git a/samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_stage_execution_details_sync_9490a11d.py b/samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_stage_execution_details_sync_9490a11d.py new file mode 100644 index 0000000..f88599e --- /dev/null +++ b/samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_stage_execution_details_sync_9490a11d.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetStageExecutionDetails +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataflow-client + + +# [START dataflow_v1beta3_generated_MetricsV1Beta3_GetStageExecutionDetails_sync_9490a11d] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataflow_v1beta3 + + +def sample_get_stage_execution_details(): + # Create a client + client = dataflow_v1beta3.MetricsV1Beta3Client() + + # Initialize request argument(s) + request = dataflow_v1beta3.GetStageExecutionDetailsRequest( + ) + + # Make the request + page_result = client.get_stage_execution_details(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dataflow_v1beta3_generated_MetricsV1Beta3_GetStageExecutionDetails_sync_9490a11d] diff --git a/samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_delete_snapshot_sync.py b/samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_delete_snapshot_sync_34b1dfd7.py similarity index 98% rename from samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_delete_snapshot_sync.py rename to samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_delete_snapshot_sync_34b1dfd7.py index 59a50f5..cc7f9e5 100644 --- a/samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_delete_snapshot_sync.py +++ b/samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_delete_snapshot_sync_34b1dfd7.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-dataflow-client -# [START dataflow_v1beta3_generated_SnapshotsV1Beta3_DeleteSnapshot_sync] +# [START dataflow_v1beta3_generated_SnapshotsV1Beta3_DeleteSnapshot_sync_34b1dfd7] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -48,4 +48,4 @@ def sample_delete_snapshot(): # Handle the response print(response) -# [END dataflow_v1beta3_generated_SnapshotsV1Beta3_DeleteSnapshot_sync] +# [END dataflow_v1beta3_generated_SnapshotsV1Beta3_DeleteSnapshot_sync_34b1dfd7] diff --git a/samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_delete_snapshot_sync_567e01e6.py b/samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_delete_snapshot_sync_567e01e6.py new file mode 100644 index 0000000..938c6e9 --- /dev/null +++ b/samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_delete_snapshot_sync_567e01e6.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteSnapshot +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataflow-client + + +# [START dataflow_v1beta3_generated_SnapshotsV1Beta3_DeleteSnapshot_sync_567e01e6] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataflow_v1beta3 + + +def sample_delete_snapshot(): + # Create a client + client = dataflow_v1beta3.SnapshotsV1Beta3Client() + + # Initialize request argument(s) + request = dataflow_v1beta3.DeleteSnapshotRequest( + ) + + # Make the request + response = client.delete_snapshot(request=request) + + # Handle the response + print(response) + +# [END dataflow_v1beta3_generated_SnapshotsV1Beta3_DeleteSnapshot_sync_567e01e6] diff --git a/samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_get_snapshot_sync.py b/samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_get_snapshot_sync_846de01f.py similarity index 98% rename from samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_get_snapshot_sync.py rename to samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_get_snapshot_sync_846de01f.py index f1861f6..9eb621a 100644 --- a/samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_get_snapshot_sync.py +++ b/samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_get_snapshot_sync_846de01f.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-dataflow-client -# [START dataflow_v1beta3_generated_SnapshotsV1Beta3_GetSnapshot_sync] +# [START dataflow_v1beta3_generated_SnapshotsV1Beta3_GetSnapshot_sync_846de01f] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -48,4 +48,4 @@ def sample_get_snapshot(): # Handle the response print(response) -# [END dataflow_v1beta3_generated_SnapshotsV1Beta3_GetSnapshot_sync] +# [END dataflow_v1beta3_generated_SnapshotsV1Beta3_GetSnapshot_sync_846de01f] diff --git a/samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_get_snapshot_sync_ba1ca2b6.py b/samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_get_snapshot_sync_ba1ca2b6.py new file mode 100644 index 0000000..f4ef41f --- /dev/null +++ b/samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_get_snapshot_sync_ba1ca2b6.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetSnapshot +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataflow-client + + +# [START dataflow_v1beta3_generated_SnapshotsV1Beta3_GetSnapshot_sync_ba1ca2b6] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataflow_v1beta3 + + +def sample_get_snapshot(): + # Create a client + client = dataflow_v1beta3.SnapshotsV1Beta3Client() + + # Initialize request argument(s) + request = dataflow_v1beta3.GetSnapshotRequest( + ) + + # Make the request + response = client.get_snapshot(request=request) + + # Handle the response + print(response) + +# [END dataflow_v1beta3_generated_SnapshotsV1Beta3_GetSnapshot_sync_ba1ca2b6] diff --git a/samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_list_snapshots_sync.py b/samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_list_snapshots_sync_425e5024.py similarity index 98% rename from samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_list_snapshots_sync.py rename to samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_list_snapshots_sync_425e5024.py index bdd2529..d6955d2 100644 --- a/samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_list_snapshots_sync.py +++ b/samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_list_snapshots_sync_425e5024.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-dataflow-client -# [START dataflow_v1beta3_generated_SnapshotsV1Beta3_ListSnapshots_sync] +# [START dataflow_v1beta3_generated_SnapshotsV1Beta3_ListSnapshots_sync_425e5024] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -48,4 +48,4 @@ def sample_list_snapshots(): # Handle the response print(response) -# [END dataflow_v1beta3_generated_SnapshotsV1Beta3_ListSnapshots_sync] +# [END dataflow_v1beta3_generated_SnapshotsV1Beta3_ListSnapshots_sync_425e5024] diff --git a/samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_list_snapshots_sync_6da08cb9.py b/samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_list_snapshots_sync_6da08cb9.py new file mode 100644 index 0000000..a8fdc15 --- /dev/null +++ b/samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_list_snapshots_sync_6da08cb9.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListSnapshots +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataflow-client + + +# [START dataflow_v1beta3_generated_SnapshotsV1Beta3_ListSnapshots_sync_6da08cb9] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataflow_v1beta3 + + +def sample_list_snapshots(): + # Create a client + client = dataflow_v1beta3.SnapshotsV1Beta3Client() + + # Initialize request argument(s) + request = dataflow_v1beta3.ListSnapshotsRequest( + ) + + # Make the request + response = client.list_snapshots(request=request) + + # Handle the response + print(response) + +# [END dataflow_v1beta3_generated_SnapshotsV1Beta3_ListSnapshots_sync_6da08cb9] diff --git a/samples/generated_samples/dataflow_v1beta3_generated_templates_service_create_job_from_template_sync.py b/samples/generated_samples/dataflow_v1beta3_generated_templates_service_create_job_from_template_sync_9e26a4bf.py similarity index 97% rename from samples/generated_samples/dataflow_v1beta3_generated_templates_service_create_job_from_template_sync.py rename to samples/generated_samples/dataflow_v1beta3_generated_templates_service_create_job_from_template_sync_9e26a4bf.py index 175c180..3c0a699 100644 --- a/samples/generated_samples/dataflow_v1beta3_generated_templates_service_create_job_from_template_sync.py +++ b/samples/generated_samples/dataflow_v1beta3_generated_templates_service_create_job_from_template_sync_9e26a4bf.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-dataflow-client -# [START dataflow_v1beta3_generated_TemplatesService_CreateJobFromTemplate_sync] +# [START dataflow_v1beta3_generated_TemplatesService_CreateJobFromTemplate_sync_9e26a4bf] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -49,4 +49,4 @@ def sample_create_job_from_template(): # Handle the response print(response) -# [END dataflow_v1beta3_generated_TemplatesService_CreateJobFromTemplate_sync] +# [END dataflow_v1beta3_generated_TemplatesService_CreateJobFromTemplate_sync_9e26a4bf] diff --git a/samples/generated_samples/dataflow_v1beta3_generated_templates_service_create_job_from_template_sync_e2cb482f.py b/samples/generated_samples/dataflow_v1beta3_generated_templates_service_create_job_from_template_sync_e2cb482f.py new file mode 100644 index 0000000..2470a87 --- /dev/null +++ b/samples/generated_samples/dataflow_v1beta3_generated_templates_service_create_job_from_template_sync_e2cb482f.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateJobFromTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataflow-client + + +# [START dataflow_v1beta3_generated_TemplatesService_CreateJobFromTemplate_sync_e2cb482f] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataflow_v1beta3 + + +def sample_create_job_from_template(): + # Create a client + client = dataflow_v1beta3.TemplatesServiceClient() + + # Initialize request argument(s) + request = dataflow_v1beta3.CreateJobFromTemplateRequest( + gcs_path="gcs_path_value", + ) + + # Make the request + response = client.create_job_from_template(request=request) + + # Handle the response + print(response) + +# [END dataflow_v1beta3_generated_TemplatesService_CreateJobFromTemplate_sync_e2cb482f] diff --git a/samples/generated_samples/dataflow_v1beta3_generated_templates_service_get_template_sync.py b/samples/generated_samples/dataflow_v1beta3_generated_templates_service_get_template_sync_07cd261a.py similarity index 98% rename from samples/generated_samples/dataflow_v1beta3_generated_templates_service_get_template_sync.py rename to samples/generated_samples/dataflow_v1beta3_generated_templates_service_get_template_sync_07cd261a.py index c7f9cae..9691908 100644 --- a/samples/generated_samples/dataflow_v1beta3_generated_templates_service_get_template_sync.py +++ b/samples/generated_samples/dataflow_v1beta3_generated_templates_service_get_template_sync_07cd261a.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-dataflow-client -# [START dataflow_v1beta3_generated_TemplatesService_GetTemplate_sync] +# [START dataflow_v1beta3_generated_TemplatesService_GetTemplate_sync_07cd261a] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -49,4 +49,4 @@ def sample_get_template(): # Handle the response print(response) -# [END dataflow_v1beta3_generated_TemplatesService_GetTemplate_sync] +# [END dataflow_v1beta3_generated_TemplatesService_GetTemplate_sync_07cd261a] diff --git a/samples/generated_samples/dataflow_v1beta3_generated_templates_service_get_template_sync_732e6209.py b/samples/generated_samples/dataflow_v1beta3_generated_templates_service_get_template_sync_732e6209.py new file mode 100644 index 0000000..88cf605 --- /dev/null +++ b/samples/generated_samples/dataflow_v1beta3_generated_templates_service_get_template_sync_732e6209.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataflow-client + + +# [START dataflow_v1beta3_generated_TemplatesService_GetTemplate_sync_732e6209] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataflow_v1beta3 + + +def sample_get_template(): + # Create a client + client = dataflow_v1beta3.TemplatesServiceClient() + + # Initialize request argument(s) + request = dataflow_v1beta3.GetTemplateRequest( + gcs_path="gcs_path_value", + ) + + # Make the request + response = client.get_template(request=request) + + # Handle the response + print(response) + +# [END dataflow_v1beta3_generated_TemplatesService_GetTemplate_sync_732e6209] diff --git a/samples/generated_samples/dataflow_v1beta3_generated_templates_service_launch_template_sync.py b/samples/generated_samples/dataflow_v1beta3_generated_templates_service_launch_template_sync_140179ca.py similarity index 98% rename from samples/generated_samples/dataflow_v1beta3_generated_templates_service_launch_template_sync.py rename to samples/generated_samples/dataflow_v1beta3_generated_templates_service_launch_template_sync_140179ca.py index 7f1f81d..98b969f 100644 --- a/samples/generated_samples/dataflow_v1beta3_generated_templates_service_launch_template_sync.py +++ b/samples/generated_samples/dataflow_v1beta3_generated_templates_service_launch_template_sync_140179ca.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-dataflow-client -# [START dataflow_v1beta3_generated_TemplatesService_LaunchTemplate_sync] +# [START dataflow_v1beta3_generated_TemplatesService_LaunchTemplate_sync_140179ca] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -49,4 +49,4 @@ def sample_launch_template(): # Handle the response print(response) -# [END dataflow_v1beta3_generated_TemplatesService_LaunchTemplate_sync] +# [END dataflow_v1beta3_generated_TemplatesService_LaunchTemplate_sync_140179ca] diff --git a/samples/generated_samples/dataflow_v1beta3_generated_templates_service_launch_template_sync_77764eb9.py b/samples/generated_samples/dataflow_v1beta3_generated_templates_service_launch_template_sync_77764eb9.py new file mode 100644 index 0000000..74d62b9 --- /dev/null +++ b/samples/generated_samples/dataflow_v1beta3_generated_templates_service_launch_template_sync_77764eb9.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for LaunchTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataflow-client + + +# [START dataflow_v1beta3_generated_TemplatesService_LaunchTemplate_sync_77764eb9] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataflow_v1beta3 + + +def sample_launch_template(): + # Create a client + client = dataflow_v1beta3.TemplatesServiceClient() + + # Initialize request argument(s) + request = dataflow_v1beta3.LaunchTemplateRequest( + gcs_path="gcs_path_value", + ) + + # Make the request + response = client.launch_template(request=request) + + # Handle the response + print(response) + +# [END dataflow_v1beta3_generated_TemplatesService_LaunchTemplate_sync_77764eb9] diff --git a/samples/generated_samples/snippet_metadata_dataflow_v1beta3.json b/samples/generated_samples/snippet_metadata_dataflow_v1beta3.json index 2234bc0..ac911cf 100644 --- a/samples/generated_samples/snippet_metadata_dataflow_v1beta3.json +++ b/samples/generated_samples/snippet_metadata_dataflow_v1beta3.json @@ -125,7 +125,7 @@ "shortName": "launch_flex_template" }, "description": "Sample for LaunchFlexTemplate", - "file": "dataflow_v1beta3_generated_flex_templates_service_launch_flex_template_sync.py", + "file": "dataflow_v1beta3_generated_flex_templates_service_launch_flex_template_sync_4148a07f.py", "language": "PYTHON", "origin": "API_DEFINITION", "regionTag": "dataflow_v1beta3_generated_FlexTemplatesService_LaunchFlexTemplate_sync", @@ -161,29 +161,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "dataflow_v1beta3_generated_flex_templates_service_launch_flex_template_sync.py" + "title": "dataflow_v1beta3_generated_flex_templates_service_launch_flex_template_sync_4148a07f.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3AsyncClient", - "shortName": "JobsV1Beta3AsyncClient" + "fullName": "google.cloud.dataflow_v1beta3.FlexTemplatesServiceClient", + "shortName": "FlexTemplatesServiceClient" }, - "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3AsyncClient.aggregated_list_jobs", + "fullName": "google.cloud.dataflow_v1beta3.FlexTemplatesServiceClient.launch_flex_template", "method": { - "fullName": "google.dataflow.v1beta3.JobsV1Beta3.AggregatedListJobs", + "fullName": "google.dataflow.v1beta3.FlexTemplatesService.LaunchFlexTemplate", "service": { - "fullName": "google.dataflow.v1beta3.JobsV1Beta3", - "shortName": "JobsV1Beta3" + "fullName": "google.dataflow.v1beta3.FlexTemplatesService", + "shortName": "FlexTemplatesService" }, - "shortName": "AggregatedListJobs" + "shortName": "LaunchFlexTemplate" }, "parameters": [ { "name": "request", - "type": "google.cloud.dataflow_v1beta3.types.ListJobsRequest" + "type": "google.cloud.dataflow_v1beta3.types.LaunchFlexTemplateRequest" }, { "name": "retry", @@ -198,22 +197,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.dataflow_v1beta3.services.jobs_v1_beta3.pagers.AggregatedListJobsAsyncPager", - "shortName": "aggregated_list_jobs" + "resultType": "google.cloud.dataflow_v1beta3.types.LaunchFlexTemplateResponse", + "shortName": "launch_flex_template" }, - "description": "Sample for AggregatedListJobs", - "file": "dataflow_v1beta3_generated_jobs_v1_beta3_aggregated_list_jobs_async.py", + "description": "Sample for LaunchFlexTemplate", + "file": "dataflow_v1beta3_generated_flex_templates_service_launch_flex_template_sync_a4f75f91.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "dataflow_v1beta3_generated_JobsV1Beta3_AggregatedListJobs_async", + "regionTag": "dataflow_v1beta3_generated_FlexTemplatesService_LaunchFlexTemplate_sync", "segments": [ { - "end": 51, + "end": 50, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 50, "start": 27, "type": "SHORT" }, @@ -233,21 +232,22 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 51, "start": 48, "type": "RESPONSE_HANDLING" } ], - "title": "dataflow_v1beta3_generated_jobs_v1_beta3_aggregated_list_jobs_async.py" + "title": "dataflow_v1beta3_generated_flex_templates_service_launch_flex_template_sync_a4f75f91.py" }, { "canonical": true, "clientMethod": { + "async": true, "client": { - "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3Client", - "shortName": "JobsV1Beta3Client" + "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3AsyncClient", + "shortName": "JobsV1Beta3AsyncClient" }, - "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3Client.aggregated_list_jobs", + "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3AsyncClient.aggregated_list_jobs", "method": { "fullName": "google.dataflow.v1beta3.JobsV1Beta3.AggregatedListJobs", "service": { @@ -274,14 +274,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.dataflow_v1beta3.services.jobs_v1_beta3.pagers.AggregatedListJobsPager", + "resultType": "google.cloud.dataflow_v1beta3.services.jobs_v1_beta3.pagers.AggregatedListJobsAsyncPager", "shortName": "aggregated_list_jobs" }, "description": "Sample for AggregatedListJobs", - "file": "dataflow_v1beta3_generated_jobs_v1_beta3_aggregated_list_jobs_sync.py", + "file": "dataflow_v1beta3_generated_jobs_v1_beta3_aggregated_list_jobs_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "dataflow_v1beta3_generated_JobsV1Beta3_AggregatedListJobs_sync", + "regionTag": "dataflow_v1beta3_generated_JobsV1Beta3_AggregatedListJobs_async", "segments": [ { "end": 51, @@ -314,29 +314,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "dataflow_v1beta3_generated_jobs_v1_beta3_aggregated_list_jobs_sync.py" + "title": "dataflow_v1beta3_generated_jobs_v1_beta3_aggregated_list_jobs_async.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3AsyncClient", - "shortName": "JobsV1Beta3AsyncClient" + "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3Client", + "shortName": "JobsV1Beta3Client" }, - "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3AsyncClient.check_active_jobs", + "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3Client.aggregated_list_jobs", "method": { - "fullName": "google.dataflow.v1beta3.JobsV1Beta3.CheckActiveJobs", + "fullName": "google.dataflow.v1beta3.JobsV1Beta3.AggregatedListJobs", "service": { "fullName": "google.dataflow.v1beta3.JobsV1Beta3", "shortName": "JobsV1Beta3" }, - "shortName": "CheckActiveJobs" + "shortName": "AggregatedListJobs" }, "parameters": [ { "name": "request", - "type": "google.cloud.dataflow_v1beta3.types.CheckActiveJobsRequest" + "type": "google.cloud.dataflow_v1beta3.types.ListJobsRequest" }, { "name": "retry", @@ -351,22 +350,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.dataflow_v1beta3.types.CheckActiveJobsResponse", - "shortName": "check_active_jobs" + "resultType": "google.cloud.dataflow_v1beta3.services.jobs_v1_beta3.pagers.AggregatedListJobsPager", + "shortName": "aggregated_list_jobs" }, - "description": "Sample for CheckActiveJobs", - "file": "dataflow_v1beta3_generated_jobs_v1_beta3_check_active_jobs_async.py", + "description": "Sample for AggregatedListJobs", + "file": "dataflow_v1beta3_generated_jobs_v1_beta3_aggregated_list_jobs_sync_0d901b38.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "dataflow_v1beta3_generated_JobsV1Beta3_CheckActiveJobs_async", + "regionTag": "dataflow_v1beta3_generated_JobsV1Beta3_AggregatedListJobs_sync", "segments": [ { - "end": 50, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 50, + "end": 51, "start": 27, "type": "SHORT" }, @@ -386,12 +385,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 51, + "end": 52, "start": 48, "type": "RESPONSE_HANDLING" } ], - "title": "dataflow_v1beta3_generated_jobs_v1_beta3_check_active_jobs_async.py" + "title": "dataflow_v1beta3_generated_jobs_v1_beta3_aggregated_list_jobs_sync_0d901b38.py" }, { "canonical": true, @@ -400,19 +399,19 @@ "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3Client", "shortName": "JobsV1Beta3Client" }, - "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3Client.check_active_jobs", + "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3Client.aggregated_list_jobs", "method": { - "fullName": "google.dataflow.v1beta3.JobsV1Beta3.CheckActiveJobs", + "fullName": "google.dataflow.v1beta3.JobsV1Beta3.AggregatedListJobs", "service": { "fullName": "google.dataflow.v1beta3.JobsV1Beta3", "shortName": "JobsV1Beta3" }, - "shortName": "CheckActiveJobs" + "shortName": "AggregatedListJobs" }, "parameters": [ { "name": "request", - "type": "google.cloud.dataflow_v1beta3.types.CheckActiveJobsRequest" + "type": "google.cloud.dataflow_v1beta3.types.ListJobsRequest" }, { "name": "retry", @@ -427,22 +426,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.dataflow_v1beta3.types.CheckActiveJobsResponse", - "shortName": "check_active_jobs" + "resultType": "google.cloud.dataflow_v1beta3.services.jobs_v1_beta3.pagers.AggregatedListJobsPager", + "shortName": "aggregated_list_jobs" }, - "description": "Sample for CheckActiveJobs", - "file": "dataflow_v1beta3_generated_jobs_v1_beta3_check_active_jobs_sync.py", + "description": "Sample for AggregatedListJobs", + "file": "dataflow_v1beta3_generated_jobs_v1_beta3_aggregated_list_jobs_sync_26f07383.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "dataflow_v1beta3_generated_JobsV1Beta3_CheckActiveJobs_sync", + "regionTag": "dataflow_v1beta3_generated_JobsV1Beta3_AggregatedListJobs_sync", "segments": [ { - "end": 50, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 50, + "end": 51, "start": 27, "type": "SHORT" }, @@ -462,12 +461,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 51, + "end": 52, "start": 48, "type": "RESPONSE_HANDLING" } ], - "title": "dataflow_v1beta3_generated_jobs_v1_beta3_check_active_jobs_sync.py" + "title": "dataflow_v1beta3_generated_jobs_v1_beta3_aggregated_list_jobs_sync_26f07383.py" }, { "canonical": true, @@ -477,19 +476,19 @@ "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3AsyncClient", "shortName": "JobsV1Beta3AsyncClient" }, - "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3AsyncClient.create_job", + "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3AsyncClient.check_active_jobs", "method": { - "fullName": "google.dataflow.v1beta3.JobsV1Beta3.CreateJob", + "fullName": "google.dataflow.v1beta3.JobsV1Beta3.CheckActiveJobs", "service": { "fullName": "google.dataflow.v1beta3.JobsV1Beta3", "shortName": "JobsV1Beta3" }, - "shortName": "CreateJob" + "shortName": "CheckActiveJobs" }, "parameters": [ { "name": "request", - "type": "google.cloud.dataflow_v1beta3.types.CreateJobRequest" + "type": "google.cloud.dataflow_v1beta3.types.CheckActiveJobsRequest" }, { "name": "retry", @@ -504,14 +503,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.dataflow_v1beta3.types.Job", - "shortName": "create_job" + "resultType": "google.cloud.dataflow_v1beta3.types.CheckActiveJobsResponse", + "shortName": "check_active_jobs" }, - "description": "Sample for CreateJob", - "file": "dataflow_v1beta3_generated_jobs_v1_beta3_create_job_async.py", + "description": "Sample for CheckActiveJobs", + "file": "dataflow_v1beta3_generated_jobs_v1_beta3_check_active_jobs_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "dataflow_v1beta3_generated_JobsV1Beta3_CreateJob_async", + "regionTag": "dataflow_v1beta3_generated_JobsV1Beta3_CheckActiveJobs_async", "segments": [ { "end": 50, @@ -544,7 +543,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "dataflow_v1beta3_generated_jobs_v1_beta3_create_job_async.py" + "title": "dataflow_v1beta3_generated_jobs_v1_beta3_check_active_jobs_async.py" }, { "canonical": true, @@ -553,19 +552,19 @@ "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3Client", "shortName": "JobsV1Beta3Client" }, - "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3Client.create_job", + "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3Client.check_active_jobs", "method": { - "fullName": "google.dataflow.v1beta3.JobsV1Beta3.CreateJob", + "fullName": "google.dataflow.v1beta3.JobsV1Beta3.CheckActiveJobs", "service": { "fullName": "google.dataflow.v1beta3.JobsV1Beta3", "shortName": "JobsV1Beta3" }, - "shortName": "CreateJob" + "shortName": "CheckActiveJobs" }, "parameters": [ { "name": "request", - "type": "google.cloud.dataflow_v1beta3.types.CreateJobRequest" + "type": "google.cloud.dataflow_v1beta3.types.CheckActiveJobsRequest" }, { "name": "retry", @@ -580,14 +579,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.dataflow_v1beta3.types.Job", - "shortName": "create_job" + "resultType": "google.cloud.dataflow_v1beta3.types.CheckActiveJobsResponse", + "shortName": "check_active_jobs" }, - "description": "Sample for CreateJob", - "file": "dataflow_v1beta3_generated_jobs_v1_beta3_create_job_sync.py", + "description": "Sample for CheckActiveJobs", + "file": "dataflow_v1beta3_generated_jobs_v1_beta3_check_active_jobs_sync_aab49b35.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "dataflow_v1beta3_generated_JobsV1Beta3_CreateJob_sync", + "regionTag": "dataflow_v1beta3_generated_JobsV1Beta3_CheckActiveJobs_sync", "segments": [ { "end": 50, @@ -620,29 +619,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "dataflow_v1beta3_generated_jobs_v1_beta3_create_job_sync.py" + "title": "dataflow_v1beta3_generated_jobs_v1_beta3_check_active_jobs_sync_aab49b35.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3AsyncClient", - "shortName": "JobsV1Beta3AsyncClient" + "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3Client", + "shortName": "JobsV1Beta3Client" }, - "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3AsyncClient.get_job", + "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3Client.check_active_jobs", "method": { - "fullName": "google.dataflow.v1beta3.JobsV1Beta3.GetJob", + "fullName": "google.dataflow.v1beta3.JobsV1Beta3.CheckActiveJobs", "service": { "fullName": "google.dataflow.v1beta3.JobsV1Beta3", "shortName": "JobsV1Beta3" }, - "shortName": "GetJob" + "shortName": "CheckActiveJobs" }, "parameters": [ { "name": "request", - "type": "google.cloud.dataflow_v1beta3.types.GetJobRequest" + "type": "google.cloud.dataflow_v1beta3.types.CheckActiveJobsRequest" }, { "name": "retry", @@ -657,14 +655,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.dataflow_v1beta3.types.Job", - "shortName": "get_job" + "resultType": "google.cloud.dataflow_v1beta3.types.CheckActiveJobsResponse", + "shortName": "check_active_jobs" }, - "description": "Sample for GetJob", - "file": "dataflow_v1beta3_generated_jobs_v1_beta3_get_job_async.py", + "description": "Sample for CheckActiveJobs", + "file": "dataflow_v1beta3_generated_jobs_v1_beta3_check_active_jobs_sync_baed931b.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "dataflow_v1beta3_generated_JobsV1Beta3_GetJob_async", + "regionTag": "dataflow_v1beta3_generated_JobsV1Beta3_CheckActiveJobs_sync", "segments": [ { "end": 50, @@ -697,28 +695,29 @@ "type": "RESPONSE_HANDLING" } ], - "title": "dataflow_v1beta3_generated_jobs_v1_beta3_get_job_async.py" + "title": "dataflow_v1beta3_generated_jobs_v1_beta3_check_active_jobs_sync_baed931b.py" }, { "canonical": true, "clientMethod": { + "async": true, "client": { - "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3Client", - "shortName": "JobsV1Beta3Client" + "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3AsyncClient", + "shortName": "JobsV1Beta3AsyncClient" }, - "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3Client.get_job", + "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3AsyncClient.create_job", "method": { - "fullName": "google.dataflow.v1beta3.JobsV1Beta3.GetJob", + "fullName": "google.dataflow.v1beta3.JobsV1Beta3.CreateJob", "service": { "fullName": "google.dataflow.v1beta3.JobsV1Beta3", "shortName": "JobsV1Beta3" }, - "shortName": "GetJob" + "shortName": "CreateJob" }, "parameters": [ { "name": "request", - "type": "google.cloud.dataflow_v1beta3.types.GetJobRequest" + "type": "google.cloud.dataflow_v1beta3.types.CreateJobRequest" }, { "name": "retry", @@ -734,13 +733,13 @@ } ], "resultType": "google.cloud.dataflow_v1beta3.types.Job", - "shortName": "get_job" + "shortName": "create_job" }, - "description": "Sample for GetJob", - "file": "dataflow_v1beta3_generated_jobs_v1_beta3_get_job_sync.py", + "description": "Sample for CreateJob", + "file": "dataflow_v1beta3_generated_jobs_v1_beta3_create_job_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "dataflow_v1beta3_generated_JobsV1Beta3_GetJob_sync", + "regionTag": "dataflow_v1beta3_generated_JobsV1Beta3_CreateJob_async", "segments": [ { "end": 50, @@ -773,29 +772,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "dataflow_v1beta3_generated_jobs_v1_beta3_get_job_sync.py" + "title": "dataflow_v1beta3_generated_jobs_v1_beta3_create_job_async.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3AsyncClient", - "shortName": "JobsV1Beta3AsyncClient" + "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3Client", + "shortName": "JobsV1Beta3Client" }, - "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3AsyncClient.list_jobs", + "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3Client.create_job", "method": { - "fullName": "google.dataflow.v1beta3.JobsV1Beta3.ListJobs", + "fullName": "google.dataflow.v1beta3.JobsV1Beta3.CreateJob", "service": { "fullName": "google.dataflow.v1beta3.JobsV1Beta3", "shortName": "JobsV1Beta3" }, - "shortName": "ListJobs" + "shortName": "CreateJob" }, "parameters": [ { "name": "request", - "type": "google.cloud.dataflow_v1beta3.types.ListJobsRequest" + "type": "google.cloud.dataflow_v1beta3.types.CreateJobRequest" }, { "name": "retry", @@ -810,22 +808,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.dataflow_v1beta3.services.jobs_v1_beta3.pagers.ListJobsAsyncPager", - "shortName": "list_jobs" + "resultType": "google.cloud.dataflow_v1beta3.types.Job", + "shortName": "create_job" }, - "description": "Sample for ListJobs", - "file": "dataflow_v1beta3_generated_jobs_v1_beta3_list_jobs_async.py", + "description": "Sample for CreateJob", + "file": "dataflow_v1beta3_generated_jobs_v1_beta3_create_job_sync_ad13d605.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "dataflow_v1beta3_generated_JobsV1Beta3_ListJobs_async", + "regionTag": "dataflow_v1beta3_generated_JobsV1Beta3_CreateJob_sync", "segments": [ { - "end": 51, + "end": 50, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 50, "start": 27, "type": "SHORT" }, @@ -845,12 +843,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 51, "start": 48, "type": "RESPONSE_HANDLING" } ], - "title": "dataflow_v1beta3_generated_jobs_v1_beta3_list_jobs_async.py" + "title": "dataflow_v1beta3_generated_jobs_v1_beta3_create_job_sync_ad13d605.py" }, { "canonical": true, @@ -859,19 +857,19 @@ "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3Client", "shortName": "JobsV1Beta3Client" }, - "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3Client.list_jobs", + "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3Client.create_job", "method": { - "fullName": "google.dataflow.v1beta3.JobsV1Beta3.ListJobs", + "fullName": "google.dataflow.v1beta3.JobsV1Beta3.CreateJob", "service": { "fullName": "google.dataflow.v1beta3.JobsV1Beta3", "shortName": "JobsV1Beta3" }, - "shortName": "ListJobs" + "shortName": "CreateJob" }, "parameters": [ { "name": "request", - "type": "google.cloud.dataflow_v1beta3.types.ListJobsRequest" + "type": "google.cloud.dataflow_v1beta3.types.CreateJobRequest" }, { "name": "retry", @@ -886,22 +884,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.dataflow_v1beta3.services.jobs_v1_beta3.pagers.ListJobsPager", - "shortName": "list_jobs" + "resultType": "google.cloud.dataflow_v1beta3.types.Job", + "shortName": "create_job" }, - "description": "Sample for ListJobs", - "file": "dataflow_v1beta3_generated_jobs_v1_beta3_list_jobs_sync.py", + "description": "Sample for CreateJob", + "file": "dataflow_v1beta3_generated_jobs_v1_beta3_create_job_sync_2a1b0208.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "dataflow_v1beta3_generated_JobsV1Beta3_ListJobs_sync", + "regionTag": "dataflow_v1beta3_generated_JobsV1Beta3_CreateJob_sync", "segments": [ { - "end": 51, + "end": 50, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 50, "start": 27, "type": "SHORT" }, @@ -921,12 +919,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 51, "start": 48, "type": "RESPONSE_HANDLING" } ], - "title": "dataflow_v1beta3_generated_jobs_v1_beta3_list_jobs_sync.py" + "title": "dataflow_v1beta3_generated_jobs_v1_beta3_create_job_sync_2a1b0208.py" }, { "canonical": true, @@ -936,19 +934,19 @@ "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3AsyncClient", "shortName": "JobsV1Beta3AsyncClient" }, - "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3AsyncClient.snapshot_job", + "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3AsyncClient.get_job", "method": { - "fullName": "google.dataflow.v1beta3.JobsV1Beta3.SnapshotJob", + "fullName": "google.dataflow.v1beta3.JobsV1Beta3.GetJob", "service": { "fullName": "google.dataflow.v1beta3.JobsV1Beta3", "shortName": "JobsV1Beta3" }, - "shortName": "SnapshotJob" + "shortName": "GetJob" }, "parameters": [ { "name": "request", - "type": "google.cloud.dataflow_v1beta3.types.SnapshotJobRequest" + "type": "google.cloud.dataflow_v1beta3.types.GetJobRequest" }, { "name": "retry", @@ -963,14 +961,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.dataflow_v1beta3.types.Snapshot", - "shortName": "snapshot_job" + "resultType": "google.cloud.dataflow_v1beta3.types.Job", + "shortName": "get_job" }, - "description": "Sample for SnapshotJob", - "file": "dataflow_v1beta3_generated_jobs_v1_beta3_snapshot_job_async.py", + "description": "Sample for GetJob", + "file": "dataflow_v1beta3_generated_jobs_v1_beta3_get_job_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "dataflow_v1beta3_generated_JobsV1Beta3_SnapshotJob_async", + "regionTag": "dataflow_v1beta3_generated_JobsV1Beta3_GetJob_async", "segments": [ { "end": 50, @@ -1003,7 +1001,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "dataflow_v1beta3_generated_jobs_v1_beta3_snapshot_job_async.py" + "title": "dataflow_v1beta3_generated_jobs_v1_beta3_get_job_async.py" }, { "canonical": true, @@ -1012,19 +1010,19 @@ "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3Client", "shortName": "JobsV1Beta3Client" }, - "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3Client.snapshot_job", + "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3Client.get_job", "method": { - "fullName": "google.dataflow.v1beta3.JobsV1Beta3.SnapshotJob", + "fullName": "google.dataflow.v1beta3.JobsV1Beta3.GetJob", "service": { "fullName": "google.dataflow.v1beta3.JobsV1Beta3", "shortName": "JobsV1Beta3" }, - "shortName": "SnapshotJob" + "shortName": "GetJob" }, "parameters": [ { "name": "request", - "type": "google.cloud.dataflow_v1beta3.types.SnapshotJobRequest" + "type": "google.cloud.dataflow_v1beta3.types.GetJobRequest" }, { "name": "retry", @@ -1039,14 +1037,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.dataflow_v1beta3.types.Snapshot", - "shortName": "snapshot_job" + "resultType": "google.cloud.dataflow_v1beta3.types.Job", + "shortName": "get_job" }, - "description": "Sample for SnapshotJob", - "file": "dataflow_v1beta3_generated_jobs_v1_beta3_snapshot_job_sync.py", + "description": "Sample for GetJob", + "file": "dataflow_v1beta3_generated_jobs_v1_beta3_get_job_sync_3e418026.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "dataflow_v1beta3_generated_JobsV1Beta3_SnapshotJob_sync", + "regionTag": "dataflow_v1beta3_generated_JobsV1Beta3_GetJob_sync", "segments": [ { "end": 50, @@ -1079,29 +1077,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "dataflow_v1beta3_generated_jobs_v1_beta3_snapshot_job_sync.py" + "title": "dataflow_v1beta3_generated_jobs_v1_beta3_get_job_sync_3e418026.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3AsyncClient", - "shortName": "JobsV1Beta3AsyncClient" + "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3Client", + "shortName": "JobsV1Beta3Client" }, - "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3AsyncClient.update_job", + "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3Client.get_job", "method": { - "fullName": "google.dataflow.v1beta3.JobsV1Beta3.UpdateJob", + "fullName": "google.dataflow.v1beta3.JobsV1Beta3.GetJob", "service": { "fullName": "google.dataflow.v1beta3.JobsV1Beta3", "shortName": "JobsV1Beta3" }, - "shortName": "UpdateJob" + "shortName": "GetJob" }, "parameters": [ { "name": "request", - "type": "google.cloud.dataflow_v1beta3.types.UpdateJobRequest" + "type": "google.cloud.dataflow_v1beta3.types.GetJobRequest" }, { "name": "retry", @@ -1117,13 +1114,13 @@ } ], "resultType": "google.cloud.dataflow_v1beta3.types.Job", - "shortName": "update_job" + "shortName": "get_job" }, - "description": "Sample for UpdateJob", - "file": "dataflow_v1beta3_generated_jobs_v1_beta3_update_job_async.py", + "description": "Sample for GetJob", + "file": "dataflow_v1beta3_generated_jobs_v1_beta3_get_job_sync_45212860.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "dataflow_v1beta3_generated_JobsV1Beta3_UpdateJob_async", + "regionTag": "dataflow_v1beta3_generated_JobsV1Beta3_GetJob_sync", "segments": [ { "end": 50, @@ -1156,28 +1153,29 @@ "type": "RESPONSE_HANDLING" } ], - "title": "dataflow_v1beta3_generated_jobs_v1_beta3_update_job_async.py" + "title": "dataflow_v1beta3_generated_jobs_v1_beta3_get_job_sync_45212860.py" }, { "canonical": true, "clientMethod": { + "async": true, "client": { - "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3Client", - "shortName": "JobsV1Beta3Client" + "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3AsyncClient", + "shortName": "JobsV1Beta3AsyncClient" }, - "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3Client.update_job", + "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3AsyncClient.list_jobs", "method": { - "fullName": "google.dataflow.v1beta3.JobsV1Beta3.UpdateJob", + "fullName": "google.dataflow.v1beta3.JobsV1Beta3.ListJobs", "service": { "fullName": "google.dataflow.v1beta3.JobsV1Beta3", "shortName": "JobsV1Beta3" }, - "shortName": "UpdateJob" + "shortName": "ListJobs" }, "parameters": [ { "name": "request", - "type": "google.cloud.dataflow_v1beta3.types.UpdateJobRequest" + "type": "google.cloud.dataflow_v1beta3.types.ListJobsRequest" }, { "name": "retry", @@ -1192,22 +1190,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.dataflow_v1beta3.types.Job", - "shortName": "update_job" + "resultType": "google.cloud.dataflow_v1beta3.services.jobs_v1_beta3.pagers.ListJobsAsyncPager", + "shortName": "list_jobs" }, - "description": "Sample for UpdateJob", - "file": "dataflow_v1beta3_generated_jobs_v1_beta3_update_job_sync.py", + "description": "Sample for ListJobs", + "file": "dataflow_v1beta3_generated_jobs_v1_beta3_list_jobs_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "dataflow_v1beta3_generated_JobsV1Beta3_UpdateJob_sync", + "regionTag": "dataflow_v1beta3_generated_JobsV1Beta3_ListJobs_async", "segments": [ { - "end": 50, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 50, + "end": 51, "start": 27, "type": "SHORT" }, @@ -1227,34 +1225,33 @@ "type": "REQUEST_EXECUTION" }, { - "end": 51, + "end": 52, "start": 48, "type": "RESPONSE_HANDLING" } ], - "title": "dataflow_v1beta3_generated_jobs_v1_beta3_update_job_sync.py" + "title": "dataflow_v1beta3_generated_jobs_v1_beta3_list_jobs_async.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.dataflow_v1beta3.MessagesV1Beta3AsyncClient", - "shortName": "MessagesV1Beta3AsyncClient" + "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3Client", + "shortName": "JobsV1Beta3Client" }, - "fullName": "google.cloud.dataflow_v1beta3.MessagesV1Beta3AsyncClient.list_job_messages", + "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3Client.list_jobs", "method": { - "fullName": "google.dataflow.v1beta3.MessagesV1Beta3.ListJobMessages", + "fullName": "google.dataflow.v1beta3.JobsV1Beta3.ListJobs", "service": { - "fullName": "google.dataflow.v1beta3.MessagesV1Beta3", - "shortName": "MessagesV1Beta3" + "fullName": "google.dataflow.v1beta3.JobsV1Beta3", + "shortName": "JobsV1Beta3" }, - "shortName": "ListJobMessages" + "shortName": "ListJobs" }, "parameters": [ { "name": "request", - "type": "google.cloud.dataflow_v1beta3.types.ListJobMessagesRequest" + "type": "google.cloud.dataflow_v1beta3.types.ListJobsRequest" }, { "name": "retry", @@ -1269,14 +1266,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.dataflow_v1beta3.services.messages_v1_beta3.pagers.ListJobMessagesAsyncPager", - "shortName": "list_job_messages" + "resultType": "google.cloud.dataflow_v1beta3.services.jobs_v1_beta3.pagers.ListJobsPager", + "shortName": "list_jobs" }, - "description": "Sample for ListJobMessages", - "file": "dataflow_v1beta3_generated_messages_v1_beta3_list_job_messages_async.py", + "description": "Sample for ListJobs", + "file": "dataflow_v1beta3_generated_jobs_v1_beta3_list_jobs_sync_97a60855.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "dataflow_v1beta3_generated_MessagesV1Beta3_ListJobMessages_async", + "regionTag": "dataflow_v1beta3_generated_JobsV1Beta3_ListJobs_sync", "segments": [ { "end": 51, @@ -1309,28 +1306,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "dataflow_v1beta3_generated_messages_v1_beta3_list_job_messages_async.py" + "title": "dataflow_v1beta3_generated_jobs_v1_beta3_list_jobs_sync_97a60855.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.dataflow_v1beta3.MessagesV1Beta3Client", - "shortName": "MessagesV1Beta3Client" + "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3Client", + "shortName": "JobsV1Beta3Client" }, - "fullName": "google.cloud.dataflow_v1beta3.MessagesV1Beta3Client.list_job_messages", + "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3Client.list_jobs", "method": { - "fullName": "google.dataflow.v1beta3.MessagesV1Beta3.ListJobMessages", + "fullName": "google.dataflow.v1beta3.JobsV1Beta3.ListJobs", "service": { - "fullName": "google.dataflow.v1beta3.MessagesV1Beta3", - "shortName": "MessagesV1Beta3" + "fullName": "google.dataflow.v1beta3.JobsV1Beta3", + "shortName": "JobsV1Beta3" }, - "shortName": "ListJobMessages" + "shortName": "ListJobs" }, "parameters": [ { "name": "request", - "type": "google.cloud.dataflow_v1beta3.types.ListJobMessagesRequest" + "type": "google.cloud.dataflow_v1beta3.types.ListJobsRequest" }, { "name": "retry", @@ -1345,14 +1342,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.dataflow_v1beta3.services.messages_v1_beta3.pagers.ListJobMessagesPager", - "shortName": "list_job_messages" + "resultType": "google.cloud.dataflow_v1beta3.services.jobs_v1_beta3.pagers.ListJobsPager", + "shortName": "list_jobs" }, - "description": "Sample for ListJobMessages", - "file": "dataflow_v1beta3_generated_messages_v1_beta3_list_job_messages_sync.py", + "description": "Sample for ListJobs", + "file": "dataflow_v1beta3_generated_jobs_v1_beta3_list_jobs_sync_bfb75b4c.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "dataflow_v1beta3_generated_MessagesV1Beta3_ListJobMessages_sync", + "regionTag": "dataflow_v1beta3_generated_JobsV1Beta3_ListJobs_sync", "segments": [ { "end": 51, @@ -1385,29 +1382,29 @@ "type": "RESPONSE_HANDLING" } ], - "title": "dataflow_v1beta3_generated_messages_v1_beta3_list_job_messages_sync.py" + "title": "dataflow_v1beta3_generated_jobs_v1_beta3_list_jobs_sync_bfb75b4c.py" }, { "canonical": true, "clientMethod": { "async": true, "client": { - "fullName": "google.cloud.dataflow_v1beta3.MetricsV1Beta3AsyncClient", - "shortName": "MetricsV1Beta3AsyncClient" + "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3AsyncClient", + "shortName": "JobsV1Beta3AsyncClient" }, - "fullName": "google.cloud.dataflow_v1beta3.MetricsV1Beta3AsyncClient.get_job_execution_details", + "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3AsyncClient.snapshot_job", "method": { - "fullName": "google.dataflow.v1beta3.MetricsV1Beta3.GetJobExecutionDetails", + "fullName": "google.dataflow.v1beta3.JobsV1Beta3.SnapshotJob", "service": { - "fullName": "google.dataflow.v1beta3.MetricsV1Beta3", - "shortName": "MetricsV1Beta3" + "fullName": "google.dataflow.v1beta3.JobsV1Beta3", + "shortName": "JobsV1Beta3" }, - "shortName": "GetJobExecutionDetails" + "shortName": "SnapshotJob" }, "parameters": [ { "name": "request", - "type": "google.cloud.dataflow_v1beta3.types.GetJobExecutionDetailsRequest" + "type": "google.cloud.dataflow_v1beta3.types.SnapshotJobRequest" }, { "name": "retry", @@ -1422,22 +1419,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.dataflow_v1beta3.services.metrics_v1_beta3.pagers.GetJobExecutionDetailsAsyncPager", - "shortName": "get_job_execution_details" + "resultType": "google.cloud.dataflow_v1beta3.types.Snapshot", + "shortName": "snapshot_job" }, - "description": "Sample for GetJobExecutionDetails", - "file": "dataflow_v1beta3_generated_metrics_v1_beta3_get_job_execution_details_async.py", + "description": "Sample for SnapshotJob", + "file": "dataflow_v1beta3_generated_jobs_v1_beta3_snapshot_job_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "dataflow_v1beta3_generated_MetricsV1Beta3_GetJobExecutionDetails_async", + "regionTag": "dataflow_v1beta3_generated_JobsV1Beta3_SnapshotJob_async", "segments": [ { - "end": 51, + "end": 50, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 50, "start": 27, "type": "SHORT" }, @@ -1457,33 +1454,33 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 51, "start": 48, "type": "RESPONSE_HANDLING" } ], - "title": "dataflow_v1beta3_generated_metrics_v1_beta3_get_job_execution_details_async.py" + "title": "dataflow_v1beta3_generated_jobs_v1_beta3_snapshot_job_async.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.dataflow_v1beta3.MetricsV1Beta3Client", - "shortName": "MetricsV1Beta3Client" + "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3Client", + "shortName": "JobsV1Beta3Client" }, - "fullName": "google.cloud.dataflow_v1beta3.MetricsV1Beta3Client.get_job_execution_details", + "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3Client.snapshot_job", "method": { - "fullName": "google.dataflow.v1beta3.MetricsV1Beta3.GetJobExecutionDetails", + "fullName": "google.dataflow.v1beta3.JobsV1Beta3.SnapshotJob", "service": { - "fullName": "google.dataflow.v1beta3.MetricsV1Beta3", - "shortName": "MetricsV1Beta3" + "fullName": "google.dataflow.v1beta3.JobsV1Beta3", + "shortName": "JobsV1Beta3" }, - "shortName": "GetJobExecutionDetails" + "shortName": "SnapshotJob" }, "parameters": [ { "name": "request", - "type": "google.cloud.dataflow_v1beta3.types.GetJobExecutionDetailsRequest" + "type": "google.cloud.dataflow_v1beta3.types.SnapshotJobRequest" }, { "name": "retry", @@ -1498,22 +1495,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.dataflow_v1beta3.services.metrics_v1_beta3.pagers.GetJobExecutionDetailsPager", - "shortName": "get_job_execution_details" + "resultType": "google.cloud.dataflow_v1beta3.types.Snapshot", + "shortName": "snapshot_job" }, - "description": "Sample for GetJobExecutionDetails", - "file": "dataflow_v1beta3_generated_metrics_v1_beta3_get_job_execution_details_sync.py", + "description": "Sample for SnapshotJob", + "file": "dataflow_v1beta3_generated_jobs_v1_beta3_snapshot_job_sync_60f4d8fe.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "dataflow_v1beta3_generated_MetricsV1Beta3_GetJobExecutionDetails_sync", + "regionTag": "dataflow_v1beta3_generated_JobsV1Beta3_SnapshotJob_sync", "segments": [ { - "end": 51, + "end": 50, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 50, "start": 27, "type": "SHORT" }, @@ -1533,34 +1530,33 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 51, "start": 48, "type": "RESPONSE_HANDLING" } ], - "title": "dataflow_v1beta3_generated_metrics_v1_beta3_get_job_execution_details_sync.py" + "title": "dataflow_v1beta3_generated_jobs_v1_beta3_snapshot_job_sync_60f4d8fe.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.dataflow_v1beta3.MetricsV1Beta3AsyncClient", - "shortName": "MetricsV1Beta3AsyncClient" + "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3Client", + "shortName": "JobsV1Beta3Client" }, - "fullName": "google.cloud.dataflow_v1beta3.MetricsV1Beta3AsyncClient.get_job_metrics", + "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3Client.snapshot_job", "method": { - "fullName": "google.dataflow.v1beta3.MetricsV1Beta3.GetJobMetrics", + "fullName": "google.dataflow.v1beta3.JobsV1Beta3.SnapshotJob", "service": { - "fullName": "google.dataflow.v1beta3.MetricsV1Beta3", - "shortName": "MetricsV1Beta3" + "fullName": "google.dataflow.v1beta3.JobsV1Beta3", + "shortName": "JobsV1Beta3" }, - "shortName": "GetJobMetrics" + "shortName": "SnapshotJob" }, "parameters": [ { "name": "request", - "type": "google.cloud.dataflow_v1beta3.types.GetJobMetricsRequest" + "type": "google.cloud.dataflow_v1beta3.types.SnapshotJobRequest" }, { "name": "retry", @@ -1575,14 +1571,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.dataflow_v1beta3.types.JobMetrics", - "shortName": "get_job_metrics" + "resultType": "google.cloud.dataflow_v1beta3.types.Snapshot", + "shortName": "snapshot_job" }, - "description": "Sample for GetJobMetrics", - "file": "dataflow_v1beta3_generated_metrics_v1_beta3_get_job_metrics_async.py", + "description": "Sample for SnapshotJob", + "file": "dataflow_v1beta3_generated_jobs_v1_beta3_snapshot_job_sync_ff3cb8bd.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "dataflow_v1beta3_generated_MetricsV1Beta3_GetJobMetrics_async", + "regionTag": "dataflow_v1beta3_generated_JobsV1Beta3_SnapshotJob_sync", "segments": [ { "end": 50, @@ -1615,28 +1611,29 @@ "type": "RESPONSE_HANDLING" } ], - "title": "dataflow_v1beta3_generated_metrics_v1_beta3_get_job_metrics_async.py" + "title": "dataflow_v1beta3_generated_jobs_v1_beta3_snapshot_job_sync_ff3cb8bd.py" }, { "canonical": true, "clientMethod": { + "async": true, "client": { - "fullName": "google.cloud.dataflow_v1beta3.MetricsV1Beta3Client", - "shortName": "MetricsV1Beta3Client" + "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3AsyncClient", + "shortName": "JobsV1Beta3AsyncClient" }, - "fullName": "google.cloud.dataflow_v1beta3.MetricsV1Beta3Client.get_job_metrics", + "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3AsyncClient.update_job", "method": { - "fullName": "google.dataflow.v1beta3.MetricsV1Beta3.GetJobMetrics", + "fullName": "google.dataflow.v1beta3.JobsV1Beta3.UpdateJob", "service": { - "fullName": "google.dataflow.v1beta3.MetricsV1Beta3", - "shortName": "MetricsV1Beta3" + "fullName": "google.dataflow.v1beta3.JobsV1Beta3", + "shortName": "JobsV1Beta3" }, - "shortName": "GetJobMetrics" + "shortName": "UpdateJob" }, "parameters": [ { "name": "request", - "type": "google.cloud.dataflow_v1beta3.types.GetJobMetricsRequest" + "type": "google.cloud.dataflow_v1beta3.types.UpdateJobRequest" }, { "name": "retry", @@ -1651,14 +1648,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.dataflow_v1beta3.types.JobMetrics", - "shortName": "get_job_metrics" + "resultType": "google.cloud.dataflow_v1beta3.types.Job", + "shortName": "update_job" }, - "description": "Sample for GetJobMetrics", - "file": "dataflow_v1beta3_generated_metrics_v1_beta3_get_job_metrics_sync.py", + "description": "Sample for UpdateJob", + "file": "dataflow_v1beta3_generated_jobs_v1_beta3_update_job_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "dataflow_v1beta3_generated_MetricsV1Beta3_GetJobMetrics_sync", + "regionTag": "dataflow_v1beta3_generated_JobsV1Beta3_UpdateJob_async", "segments": [ { "end": 50, @@ -1691,29 +1688,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "dataflow_v1beta3_generated_metrics_v1_beta3_get_job_metrics_sync.py" + "title": "dataflow_v1beta3_generated_jobs_v1_beta3_update_job_async.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.dataflow_v1beta3.MetricsV1Beta3AsyncClient", - "shortName": "MetricsV1Beta3AsyncClient" + "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3Client", + "shortName": "JobsV1Beta3Client" }, - "fullName": "google.cloud.dataflow_v1beta3.MetricsV1Beta3AsyncClient.get_stage_execution_details", + "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3Client.update_job", "method": { - "fullName": "google.dataflow.v1beta3.MetricsV1Beta3.GetStageExecutionDetails", + "fullName": "google.dataflow.v1beta3.JobsV1Beta3.UpdateJob", "service": { - "fullName": "google.dataflow.v1beta3.MetricsV1Beta3", - "shortName": "MetricsV1Beta3" + "fullName": "google.dataflow.v1beta3.JobsV1Beta3", + "shortName": "JobsV1Beta3" }, - "shortName": "GetStageExecutionDetails" + "shortName": "UpdateJob" }, "parameters": [ { "name": "request", - "type": "google.cloud.dataflow_v1beta3.types.GetStageExecutionDetailsRequest" + "type": "google.cloud.dataflow_v1beta3.types.UpdateJobRequest" }, { "name": "retry", @@ -1728,22 +1724,1243 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.dataflow_v1beta3.services.metrics_v1_beta3.pagers.GetStageExecutionDetailsAsyncPager", - "shortName": "get_stage_execution_details" + "resultType": "google.cloud.dataflow_v1beta3.types.Job", + "shortName": "update_job" }, - "description": "Sample for GetStageExecutionDetails", - "file": "dataflow_v1beta3_generated_metrics_v1_beta3_get_stage_execution_details_async.py", + "description": "Sample for UpdateJob", + "file": "dataflow_v1beta3_generated_jobs_v1_beta3_update_job_sync_8db1fd08.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "dataflow_v1beta3_generated_MetricsV1Beta3_GetStageExecutionDetails_async", + "regionTag": "dataflow_v1beta3_generated_JobsV1Beta3_UpdateJob_sync", "segments": [ { - "end": 51, + "end": 50, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataflow_v1beta3_generated_jobs_v1_beta3_update_job_sync_8db1fd08.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3Client", + "shortName": "JobsV1Beta3Client" + }, + "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3Client.update_job", + "method": { + "fullName": "google.dataflow.v1beta3.JobsV1Beta3.UpdateJob", + "service": { + "fullName": "google.dataflow.v1beta3.JobsV1Beta3", + "shortName": "JobsV1Beta3" + }, + "shortName": "UpdateJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataflow_v1beta3.types.UpdateJobRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataflow_v1beta3.types.Job", + "shortName": "update_job" + }, + "description": "Sample for UpdateJob", + "file": "dataflow_v1beta3_generated_jobs_v1_beta3_update_job_sync_e2fa191d.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataflow_v1beta3_generated_JobsV1Beta3_UpdateJob_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataflow_v1beta3_generated_jobs_v1_beta3_update_job_sync_e2fa191d.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataflow_v1beta3.MessagesV1Beta3AsyncClient", + "shortName": "MessagesV1Beta3AsyncClient" + }, + "fullName": "google.cloud.dataflow_v1beta3.MessagesV1Beta3AsyncClient.list_job_messages", + "method": { + "fullName": "google.dataflow.v1beta3.MessagesV1Beta3.ListJobMessages", + "service": { + "fullName": "google.dataflow.v1beta3.MessagesV1Beta3", + "shortName": "MessagesV1Beta3" + }, + "shortName": "ListJobMessages" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataflow_v1beta3.types.ListJobMessagesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataflow_v1beta3.services.messages_v1_beta3.pagers.ListJobMessagesAsyncPager", + "shortName": "list_job_messages" + }, + "description": "Sample for ListJobMessages", + "file": "dataflow_v1beta3_generated_messages_v1_beta3_list_job_messages_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataflow_v1beta3_generated_MessagesV1Beta3_ListJobMessages_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataflow_v1beta3_generated_messages_v1_beta3_list_job_messages_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataflow_v1beta3.MessagesV1Beta3Client", + "shortName": "MessagesV1Beta3Client" + }, + "fullName": "google.cloud.dataflow_v1beta3.MessagesV1Beta3Client.list_job_messages", + "method": { + "fullName": "google.dataflow.v1beta3.MessagesV1Beta3.ListJobMessages", + "service": { + "fullName": "google.dataflow.v1beta3.MessagesV1Beta3", + "shortName": "MessagesV1Beta3" + }, + "shortName": "ListJobMessages" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataflow_v1beta3.types.ListJobMessagesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataflow_v1beta3.services.messages_v1_beta3.pagers.ListJobMessagesPager", + "shortName": "list_job_messages" + }, + "description": "Sample for ListJobMessages", + "file": "dataflow_v1beta3_generated_messages_v1_beta3_list_job_messages_sync_04dec136.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataflow_v1beta3_generated_MessagesV1Beta3_ListJobMessages_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataflow_v1beta3_generated_messages_v1_beta3_list_job_messages_sync_04dec136.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataflow_v1beta3.MessagesV1Beta3Client", + "shortName": "MessagesV1Beta3Client" + }, + "fullName": "google.cloud.dataflow_v1beta3.MessagesV1Beta3Client.list_job_messages", + "method": { + "fullName": "google.dataflow.v1beta3.MessagesV1Beta3.ListJobMessages", + "service": { + "fullName": "google.dataflow.v1beta3.MessagesV1Beta3", + "shortName": "MessagesV1Beta3" + }, + "shortName": "ListJobMessages" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataflow_v1beta3.types.ListJobMessagesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataflow_v1beta3.services.messages_v1_beta3.pagers.ListJobMessagesPager", + "shortName": "list_job_messages" + }, + "description": "Sample for ListJobMessages", + "file": "dataflow_v1beta3_generated_messages_v1_beta3_list_job_messages_sync_f19ed68d.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataflow_v1beta3_generated_MessagesV1Beta3_ListJobMessages_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataflow_v1beta3_generated_messages_v1_beta3_list_job_messages_sync_f19ed68d.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataflow_v1beta3.MetricsV1Beta3AsyncClient", + "shortName": "MetricsV1Beta3AsyncClient" + }, + "fullName": "google.cloud.dataflow_v1beta3.MetricsV1Beta3AsyncClient.get_job_execution_details", + "method": { + "fullName": "google.dataflow.v1beta3.MetricsV1Beta3.GetJobExecutionDetails", + "service": { + "fullName": "google.dataflow.v1beta3.MetricsV1Beta3", + "shortName": "MetricsV1Beta3" + }, + "shortName": "GetJobExecutionDetails" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataflow_v1beta3.types.GetJobExecutionDetailsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataflow_v1beta3.services.metrics_v1_beta3.pagers.GetJobExecutionDetailsAsyncPager", + "shortName": "get_job_execution_details" + }, + "description": "Sample for GetJobExecutionDetails", + "file": "dataflow_v1beta3_generated_metrics_v1_beta3_get_job_execution_details_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataflow_v1beta3_generated_MetricsV1Beta3_GetJobExecutionDetails_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataflow_v1beta3_generated_metrics_v1_beta3_get_job_execution_details_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataflow_v1beta3.MetricsV1Beta3Client", + "shortName": "MetricsV1Beta3Client" + }, + "fullName": "google.cloud.dataflow_v1beta3.MetricsV1Beta3Client.get_job_execution_details", + "method": { + "fullName": "google.dataflow.v1beta3.MetricsV1Beta3.GetJobExecutionDetails", + "service": { + "fullName": "google.dataflow.v1beta3.MetricsV1Beta3", + "shortName": "MetricsV1Beta3" + }, + "shortName": "GetJobExecutionDetails" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataflow_v1beta3.types.GetJobExecutionDetailsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataflow_v1beta3.services.metrics_v1_beta3.pagers.GetJobExecutionDetailsPager", + "shortName": "get_job_execution_details" + }, + "description": "Sample for GetJobExecutionDetails", + "file": "dataflow_v1beta3_generated_metrics_v1_beta3_get_job_execution_details_sync_a4ff4d57.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataflow_v1beta3_generated_MetricsV1Beta3_GetJobExecutionDetails_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataflow_v1beta3_generated_metrics_v1_beta3_get_job_execution_details_sync_a4ff4d57.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataflow_v1beta3.MetricsV1Beta3Client", + "shortName": "MetricsV1Beta3Client" + }, + "fullName": "google.cloud.dataflow_v1beta3.MetricsV1Beta3Client.get_job_execution_details", + "method": { + "fullName": "google.dataflow.v1beta3.MetricsV1Beta3.GetJobExecutionDetails", + "service": { + "fullName": "google.dataflow.v1beta3.MetricsV1Beta3", + "shortName": "MetricsV1Beta3" + }, + "shortName": "GetJobExecutionDetails" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataflow_v1beta3.types.GetJobExecutionDetailsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataflow_v1beta3.services.metrics_v1_beta3.pagers.GetJobExecutionDetailsPager", + "shortName": "get_job_execution_details" + }, + "description": "Sample for GetJobExecutionDetails", + "file": "dataflow_v1beta3_generated_metrics_v1_beta3_get_job_execution_details_sync_b7550163.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataflow_v1beta3_generated_MetricsV1Beta3_GetJobExecutionDetails_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataflow_v1beta3_generated_metrics_v1_beta3_get_job_execution_details_sync_b7550163.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataflow_v1beta3.MetricsV1Beta3AsyncClient", + "shortName": "MetricsV1Beta3AsyncClient" + }, + "fullName": "google.cloud.dataflow_v1beta3.MetricsV1Beta3AsyncClient.get_job_metrics", + "method": { + "fullName": "google.dataflow.v1beta3.MetricsV1Beta3.GetJobMetrics", + "service": { + "fullName": "google.dataflow.v1beta3.MetricsV1Beta3", + "shortName": "MetricsV1Beta3" + }, + "shortName": "GetJobMetrics" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataflow_v1beta3.types.GetJobMetricsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataflow_v1beta3.types.JobMetrics", + "shortName": "get_job_metrics" + }, + "description": "Sample for GetJobMetrics", + "file": "dataflow_v1beta3_generated_metrics_v1_beta3_get_job_metrics_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataflow_v1beta3_generated_MetricsV1Beta3_GetJobMetrics_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataflow_v1beta3_generated_metrics_v1_beta3_get_job_metrics_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataflow_v1beta3.MetricsV1Beta3Client", + "shortName": "MetricsV1Beta3Client" + }, + "fullName": "google.cloud.dataflow_v1beta3.MetricsV1Beta3Client.get_job_metrics", + "method": { + "fullName": "google.dataflow.v1beta3.MetricsV1Beta3.GetJobMetrics", + "service": { + "fullName": "google.dataflow.v1beta3.MetricsV1Beta3", + "shortName": "MetricsV1Beta3" + }, + "shortName": "GetJobMetrics" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataflow_v1beta3.types.GetJobMetricsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataflow_v1beta3.types.JobMetrics", + "shortName": "get_job_metrics" + }, + "description": "Sample for GetJobMetrics", + "file": "dataflow_v1beta3_generated_metrics_v1_beta3_get_job_metrics_sync_59dce217.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataflow_v1beta3_generated_MetricsV1Beta3_GetJobMetrics_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataflow_v1beta3_generated_metrics_v1_beta3_get_job_metrics_sync_59dce217.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataflow_v1beta3.MetricsV1Beta3Client", + "shortName": "MetricsV1Beta3Client" + }, + "fullName": "google.cloud.dataflow_v1beta3.MetricsV1Beta3Client.get_job_metrics", + "method": { + "fullName": "google.dataflow.v1beta3.MetricsV1Beta3.GetJobMetrics", + "service": { + "fullName": "google.dataflow.v1beta3.MetricsV1Beta3", + "shortName": "MetricsV1Beta3" + }, + "shortName": "GetJobMetrics" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataflow_v1beta3.types.GetJobMetricsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataflow_v1beta3.types.JobMetrics", + "shortName": "get_job_metrics" + }, + "description": "Sample for GetJobMetrics", + "file": "dataflow_v1beta3_generated_metrics_v1_beta3_get_job_metrics_sync_02835968.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataflow_v1beta3_generated_MetricsV1Beta3_GetJobMetrics_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataflow_v1beta3_generated_metrics_v1_beta3_get_job_metrics_sync_02835968.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataflow_v1beta3.MetricsV1Beta3AsyncClient", + "shortName": "MetricsV1Beta3AsyncClient" + }, + "fullName": "google.cloud.dataflow_v1beta3.MetricsV1Beta3AsyncClient.get_stage_execution_details", + "method": { + "fullName": "google.dataflow.v1beta3.MetricsV1Beta3.GetStageExecutionDetails", + "service": { + "fullName": "google.dataflow.v1beta3.MetricsV1Beta3", + "shortName": "MetricsV1Beta3" + }, + "shortName": "GetStageExecutionDetails" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataflow_v1beta3.types.GetStageExecutionDetailsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataflow_v1beta3.services.metrics_v1_beta3.pagers.GetStageExecutionDetailsAsyncPager", + "shortName": "get_stage_execution_details" + }, + "description": "Sample for GetStageExecutionDetails", + "file": "dataflow_v1beta3_generated_metrics_v1_beta3_get_stage_execution_details_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataflow_v1beta3_generated_MetricsV1Beta3_GetStageExecutionDetails_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataflow_v1beta3_generated_metrics_v1_beta3_get_stage_execution_details_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataflow_v1beta3.MetricsV1Beta3Client", + "shortName": "MetricsV1Beta3Client" + }, + "fullName": "google.cloud.dataflow_v1beta3.MetricsV1Beta3Client.get_stage_execution_details", + "method": { + "fullName": "google.dataflow.v1beta3.MetricsV1Beta3.GetStageExecutionDetails", + "service": { + "fullName": "google.dataflow.v1beta3.MetricsV1Beta3", + "shortName": "MetricsV1Beta3" + }, + "shortName": "GetStageExecutionDetails" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataflow_v1beta3.types.GetStageExecutionDetailsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataflow_v1beta3.services.metrics_v1_beta3.pagers.GetStageExecutionDetailsPager", + "shortName": "get_stage_execution_details" + }, + "description": "Sample for GetStageExecutionDetails", + "file": "dataflow_v1beta3_generated_metrics_v1_beta3_get_stage_execution_details_sync_9490a11d.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataflow_v1beta3_generated_MetricsV1Beta3_GetStageExecutionDetails_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataflow_v1beta3_generated_metrics_v1_beta3_get_stage_execution_details_sync_9490a11d.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataflow_v1beta3.MetricsV1Beta3Client", + "shortName": "MetricsV1Beta3Client" + }, + "fullName": "google.cloud.dataflow_v1beta3.MetricsV1Beta3Client.get_stage_execution_details", + "method": { + "fullName": "google.dataflow.v1beta3.MetricsV1Beta3.GetStageExecutionDetails", + "service": { + "fullName": "google.dataflow.v1beta3.MetricsV1Beta3", + "shortName": "MetricsV1Beta3" + }, + "shortName": "GetStageExecutionDetails" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataflow_v1beta3.types.GetStageExecutionDetailsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataflow_v1beta3.services.metrics_v1_beta3.pagers.GetStageExecutionDetailsPager", + "shortName": "get_stage_execution_details" + }, + "description": "Sample for GetStageExecutionDetails", + "file": "dataflow_v1beta3_generated_metrics_v1_beta3_get_stage_execution_details_sync_4acb3ded.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataflow_v1beta3_generated_MetricsV1Beta3_GetStageExecutionDetails_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataflow_v1beta3_generated_metrics_v1_beta3_get_stage_execution_details_sync_4acb3ded.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataflow_v1beta3.SnapshotsV1Beta3AsyncClient", + "shortName": "SnapshotsV1Beta3AsyncClient" + }, + "fullName": "google.cloud.dataflow_v1beta3.SnapshotsV1Beta3AsyncClient.delete_snapshot", + "method": { + "fullName": "google.dataflow.v1beta3.SnapshotsV1Beta3.DeleteSnapshot", + "service": { + "fullName": "google.dataflow.v1beta3.SnapshotsV1Beta3", + "shortName": "SnapshotsV1Beta3" + }, + "shortName": "DeleteSnapshot" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataflow_v1beta3.types.DeleteSnapshotRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataflow_v1beta3.types.DeleteSnapshotResponse", + "shortName": "delete_snapshot" + }, + "description": "Sample for DeleteSnapshot", + "file": "dataflow_v1beta3_generated_snapshots_v1_beta3_delete_snapshot_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataflow_v1beta3_generated_SnapshotsV1Beta3_DeleteSnapshot_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataflow_v1beta3_generated_snapshots_v1_beta3_delete_snapshot_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataflow_v1beta3.SnapshotsV1Beta3Client", + "shortName": "SnapshotsV1Beta3Client" + }, + "fullName": "google.cloud.dataflow_v1beta3.SnapshotsV1Beta3Client.delete_snapshot", + "method": { + "fullName": "google.dataflow.v1beta3.SnapshotsV1Beta3.DeleteSnapshot", + "service": { + "fullName": "google.dataflow.v1beta3.SnapshotsV1Beta3", + "shortName": "SnapshotsV1Beta3" + }, + "shortName": "DeleteSnapshot" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataflow_v1beta3.types.DeleteSnapshotRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataflow_v1beta3.types.DeleteSnapshotResponse", + "shortName": "delete_snapshot" + }, + "description": "Sample for DeleteSnapshot", + "file": "dataflow_v1beta3_generated_snapshots_v1_beta3_delete_snapshot_sync_567e01e6.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataflow_v1beta3_generated_SnapshotsV1Beta3_DeleteSnapshot_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataflow_v1beta3_generated_snapshots_v1_beta3_delete_snapshot_sync_567e01e6.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataflow_v1beta3.SnapshotsV1Beta3Client", + "shortName": "SnapshotsV1Beta3Client" + }, + "fullName": "google.cloud.dataflow_v1beta3.SnapshotsV1Beta3Client.delete_snapshot", + "method": { + "fullName": "google.dataflow.v1beta3.SnapshotsV1Beta3.DeleteSnapshot", + "service": { + "fullName": "google.dataflow.v1beta3.SnapshotsV1Beta3", + "shortName": "SnapshotsV1Beta3" + }, + "shortName": "DeleteSnapshot" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataflow_v1beta3.types.DeleteSnapshotRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataflow_v1beta3.types.DeleteSnapshotResponse", + "shortName": "delete_snapshot" + }, + "description": "Sample for DeleteSnapshot", + "file": "dataflow_v1beta3_generated_snapshots_v1_beta3_delete_snapshot_sync_34b1dfd7.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataflow_v1beta3_generated_SnapshotsV1Beta3_DeleteSnapshot_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, "start": 27, "type": "SHORT" }, @@ -1763,33 +2980,34 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 51, "start": 48, "type": "RESPONSE_HANDLING" } ], - "title": "dataflow_v1beta3_generated_metrics_v1_beta3_get_stage_execution_details_async.py" + "title": "dataflow_v1beta3_generated_snapshots_v1_beta3_delete_snapshot_sync_34b1dfd7.py" }, { "canonical": true, "clientMethod": { + "async": true, "client": { - "fullName": "google.cloud.dataflow_v1beta3.MetricsV1Beta3Client", - "shortName": "MetricsV1Beta3Client" + "fullName": "google.cloud.dataflow_v1beta3.SnapshotsV1Beta3AsyncClient", + "shortName": "SnapshotsV1Beta3AsyncClient" }, - "fullName": "google.cloud.dataflow_v1beta3.MetricsV1Beta3Client.get_stage_execution_details", + "fullName": "google.cloud.dataflow_v1beta3.SnapshotsV1Beta3AsyncClient.get_snapshot", "method": { - "fullName": "google.dataflow.v1beta3.MetricsV1Beta3.GetStageExecutionDetails", + "fullName": "google.dataflow.v1beta3.SnapshotsV1Beta3.GetSnapshot", "service": { - "fullName": "google.dataflow.v1beta3.MetricsV1Beta3", - "shortName": "MetricsV1Beta3" + "fullName": "google.dataflow.v1beta3.SnapshotsV1Beta3", + "shortName": "SnapshotsV1Beta3" }, - "shortName": "GetStageExecutionDetails" + "shortName": "GetSnapshot" }, "parameters": [ { "name": "request", - "type": "google.cloud.dataflow_v1beta3.types.GetStageExecutionDetailsRequest" + "type": "google.cloud.dataflow_v1beta3.types.GetSnapshotRequest" }, { "name": "retry", @@ -1804,22 +3022,174 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.dataflow_v1beta3.services.metrics_v1_beta3.pagers.GetStageExecutionDetailsPager", - "shortName": "get_stage_execution_details" + "resultType": "google.cloud.dataflow_v1beta3.types.Snapshot", + "shortName": "get_snapshot" }, - "description": "Sample for GetStageExecutionDetails", - "file": "dataflow_v1beta3_generated_metrics_v1_beta3_get_stage_execution_details_sync.py", + "description": "Sample for GetSnapshot", + "file": "dataflow_v1beta3_generated_snapshots_v1_beta3_get_snapshot_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "dataflow_v1beta3_generated_MetricsV1Beta3_GetStageExecutionDetails_sync", + "regionTag": "dataflow_v1beta3_generated_SnapshotsV1Beta3_GetSnapshot_async", "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, { "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataflow_v1beta3_generated_snapshots_v1_beta3_get_snapshot_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataflow_v1beta3.SnapshotsV1Beta3Client", + "shortName": "SnapshotsV1Beta3Client" + }, + "fullName": "google.cloud.dataflow_v1beta3.SnapshotsV1Beta3Client.get_snapshot", + "method": { + "fullName": "google.dataflow.v1beta3.SnapshotsV1Beta3.GetSnapshot", + "service": { + "fullName": "google.dataflow.v1beta3.SnapshotsV1Beta3", + "shortName": "SnapshotsV1Beta3" + }, + "shortName": "GetSnapshot" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataflow_v1beta3.types.GetSnapshotRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataflow_v1beta3.types.Snapshot", + "shortName": "get_snapshot" + }, + "description": "Sample for GetSnapshot", + "file": "dataflow_v1beta3_generated_snapshots_v1_beta3_get_snapshot_sync_ba1ca2b6.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataflow_v1beta3_generated_SnapshotsV1Beta3_GetSnapshot_sync", + "segments": [ + { + "end": 50, "start": 27, "type": "FULL" }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, { "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataflow_v1beta3_generated_snapshots_v1_beta3_get_snapshot_sync_ba1ca2b6.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataflow_v1beta3.SnapshotsV1Beta3Client", + "shortName": "SnapshotsV1Beta3Client" + }, + "fullName": "google.cloud.dataflow_v1beta3.SnapshotsV1Beta3Client.get_snapshot", + "method": { + "fullName": "google.dataflow.v1beta3.SnapshotsV1Beta3.GetSnapshot", + "service": { + "fullName": "google.dataflow.v1beta3.SnapshotsV1Beta3", + "shortName": "SnapshotsV1Beta3" + }, + "shortName": "GetSnapshot" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataflow_v1beta3.types.GetSnapshotRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataflow_v1beta3.types.Snapshot", + "shortName": "get_snapshot" + }, + "description": "Sample for GetSnapshot", + "file": "dataflow_v1beta3_generated_snapshots_v1_beta3_get_snapshot_sync_846de01f.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataflow_v1beta3_generated_SnapshotsV1Beta3_GetSnapshot_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, "start": 27, "type": "SHORT" }, @@ -1839,12 +3209,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 51, "start": 48, "type": "RESPONSE_HANDLING" } ], - "title": "dataflow_v1beta3_generated_metrics_v1_beta3_get_stage_execution_details_sync.py" + "title": "dataflow_v1beta3_generated_snapshots_v1_beta3_get_snapshot_sync_846de01f.py" }, { "canonical": true, @@ -1854,19 +3224,19 @@ "fullName": "google.cloud.dataflow_v1beta3.SnapshotsV1Beta3AsyncClient", "shortName": "SnapshotsV1Beta3AsyncClient" }, - "fullName": "google.cloud.dataflow_v1beta3.SnapshotsV1Beta3AsyncClient.delete_snapshot", + "fullName": "google.cloud.dataflow_v1beta3.SnapshotsV1Beta3AsyncClient.list_snapshots", "method": { - "fullName": "google.dataflow.v1beta3.SnapshotsV1Beta3.DeleteSnapshot", + "fullName": "google.dataflow.v1beta3.SnapshotsV1Beta3.ListSnapshots", "service": { "fullName": "google.dataflow.v1beta3.SnapshotsV1Beta3", "shortName": "SnapshotsV1Beta3" }, - "shortName": "DeleteSnapshot" + "shortName": "ListSnapshots" }, "parameters": [ { "name": "request", - "type": "google.cloud.dataflow_v1beta3.types.DeleteSnapshotRequest" + "type": "google.cloud.dataflow_v1beta3.types.ListSnapshotsRequest" }, { "name": "retry", @@ -1881,14 +3251,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.dataflow_v1beta3.types.DeleteSnapshotResponse", - "shortName": "delete_snapshot" + "resultType": "google.cloud.dataflow_v1beta3.types.ListSnapshotsResponse", + "shortName": "list_snapshots" }, - "description": "Sample for DeleteSnapshot", - "file": "dataflow_v1beta3_generated_snapshots_v1_beta3_delete_snapshot_async.py", + "description": "Sample for ListSnapshots", + "file": "dataflow_v1beta3_generated_snapshots_v1_beta3_list_snapshots_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "dataflow_v1beta3_generated_SnapshotsV1Beta3_DeleteSnapshot_async", + "regionTag": "dataflow_v1beta3_generated_SnapshotsV1Beta3_ListSnapshots_async", "segments": [ { "end": 50, @@ -1921,7 +3291,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "dataflow_v1beta3_generated_snapshots_v1_beta3_delete_snapshot_async.py" + "title": "dataflow_v1beta3_generated_snapshots_v1_beta3_list_snapshots_async.py" }, { "canonical": true, @@ -1930,19 +3300,19 @@ "fullName": "google.cloud.dataflow_v1beta3.SnapshotsV1Beta3Client", "shortName": "SnapshotsV1Beta3Client" }, - "fullName": "google.cloud.dataflow_v1beta3.SnapshotsV1Beta3Client.delete_snapshot", + "fullName": "google.cloud.dataflow_v1beta3.SnapshotsV1Beta3Client.list_snapshots", "method": { - "fullName": "google.dataflow.v1beta3.SnapshotsV1Beta3.DeleteSnapshot", + "fullName": "google.dataflow.v1beta3.SnapshotsV1Beta3.ListSnapshots", "service": { "fullName": "google.dataflow.v1beta3.SnapshotsV1Beta3", "shortName": "SnapshotsV1Beta3" }, - "shortName": "DeleteSnapshot" + "shortName": "ListSnapshots" }, "parameters": [ { "name": "request", - "type": "google.cloud.dataflow_v1beta3.types.DeleteSnapshotRequest" + "type": "google.cloud.dataflow_v1beta3.types.ListSnapshotsRequest" }, { "name": "retry", @@ -1957,14 +3327,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.dataflow_v1beta3.types.DeleteSnapshotResponse", - "shortName": "delete_snapshot" + "resultType": "google.cloud.dataflow_v1beta3.types.ListSnapshotsResponse", + "shortName": "list_snapshots" }, - "description": "Sample for DeleteSnapshot", - "file": "dataflow_v1beta3_generated_snapshots_v1_beta3_delete_snapshot_sync.py", + "description": "Sample for ListSnapshots", + "file": "dataflow_v1beta3_generated_snapshots_v1_beta3_list_snapshots_sync_425e5024.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "dataflow_v1beta3_generated_SnapshotsV1Beta3_DeleteSnapshot_sync", + "regionTag": "dataflow_v1beta3_generated_SnapshotsV1Beta3_ListSnapshots_sync", "segments": [ { "end": 50, @@ -1997,29 +3367,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "dataflow_v1beta3_generated_snapshots_v1_beta3_delete_snapshot_sync.py" + "title": "dataflow_v1beta3_generated_snapshots_v1_beta3_list_snapshots_sync_425e5024.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.dataflow_v1beta3.SnapshotsV1Beta3AsyncClient", - "shortName": "SnapshotsV1Beta3AsyncClient" + "fullName": "google.cloud.dataflow_v1beta3.SnapshotsV1Beta3Client", + "shortName": "SnapshotsV1Beta3Client" }, - "fullName": "google.cloud.dataflow_v1beta3.SnapshotsV1Beta3AsyncClient.get_snapshot", + "fullName": "google.cloud.dataflow_v1beta3.SnapshotsV1Beta3Client.list_snapshots", "method": { - "fullName": "google.dataflow.v1beta3.SnapshotsV1Beta3.GetSnapshot", + "fullName": "google.dataflow.v1beta3.SnapshotsV1Beta3.ListSnapshots", "service": { "fullName": "google.dataflow.v1beta3.SnapshotsV1Beta3", "shortName": "SnapshotsV1Beta3" }, - "shortName": "GetSnapshot" + "shortName": "ListSnapshots" }, "parameters": [ { "name": "request", - "type": "google.cloud.dataflow_v1beta3.types.GetSnapshotRequest" + "type": "google.cloud.dataflow_v1beta3.types.ListSnapshotsRequest" }, { "name": "retry", @@ -2034,14 +3403,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.dataflow_v1beta3.types.Snapshot", - "shortName": "get_snapshot" + "resultType": "google.cloud.dataflow_v1beta3.types.ListSnapshotsResponse", + "shortName": "list_snapshots" }, - "description": "Sample for GetSnapshot", - "file": "dataflow_v1beta3_generated_snapshots_v1_beta3_get_snapshot_async.py", + "description": "Sample for ListSnapshots", + "file": "dataflow_v1beta3_generated_snapshots_v1_beta3_list_snapshots_sync_6da08cb9.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "dataflow_v1beta3_generated_SnapshotsV1Beta3_GetSnapshot_async", + "regionTag": "dataflow_v1beta3_generated_SnapshotsV1Beta3_ListSnapshots_sync", "segments": [ { "end": 50, @@ -2074,28 +3443,29 @@ "type": "RESPONSE_HANDLING" } ], - "title": "dataflow_v1beta3_generated_snapshots_v1_beta3_get_snapshot_async.py" + "title": "dataflow_v1beta3_generated_snapshots_v1_beta3_list_snapshots_sync_6da08cb9.py" }, { "canonical": true, "clientMethod": { + "async": true, "client": { - "fullName": "google.cloud.dataflow_v1beta3.SnapshotsV1Beta3Client", - "shortName": "SnapshotsV1Beta3Client" + "fullName": "google.cloud.dataflow_v1beta3.TemplatesServiceAsyncClient", + "shortName": "TemplatesServiceAsyncClient" }, - "fullName": "google.cloud.dataflow_v1beta3.SnapshotsV1Beta3Client.get_snapshot", + "fullName": "google.cloud.dataflow_v1beta3.TemplatesServiceAsyncClient.create_job_from_template", "method": { - "fullName": "google.dataflow.v1beta3.SnapshotsV1Beta3.GetSnapshot", + "fullName": "google.dataflow.v1beta3.TemplatesService.CreateJobFromTemplate", "service": { - "fullName": "google.dataflow.v1beta3.SnapshotsV1Beta3", - "shortName": "SnapshotsV1Beta3" + "fullName": "google.dataflow.v1beta3.TemplatesService", + "shortName": "TemplatesService" }, - "shortName": "GetSnapshot" + "shortName": "CreateJobFromTemplate" }, "parameters": [ { "name": "request", - "type": "google.cloud.dataflow_v1beta3.types.GetSnapshotRequest" + "type": "google.cloud.dataflow_v1beta3.types.CreateJobFromTemplateRequest" }, { "name": "retry", @@ -2110,22 +3480,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.dataflow_v1beta3.types.Snapshot", - "shortName": "get_snapshot" + "resultType": "google.cloud.dataflow_v1beta3.types.Job", + "shortName": "create_job_from_template" }, - "description": "Sample for GetSnapshot", - "file": "dataflow_v1beta3_generated_snapshots_v1_beta3_get_snapshot_sync.py", + "description": "Sample for CreateJobFromTemplate", + "file": "dataflow_v1beta3_generated_templates_service_create_job_from_template_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "dataflow_v1beta3_generated_SnapshotsV1Beta3_GetSnapshot_sync", + "regionTag": "dataflow_v1beta3_generated_TemplatesService_CreateJobFromTemplate_async", "segments": [ { - "end": 50, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 50, + "end": 51, "start": 27, "type": "SHORT" }, @@ -2135,44 +3505,43 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 47, - "start": 45, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 51, - "start": 48, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "dataflow_v1beta3_generated_snapshots_v1_beta3_get_snapshot_sync.py" + "title": "dataflow_v1beta3_generated_templates_service_create_job_from_template_async.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.dataflow_v1beta3.SnapshotsV1Beta3AsyncClient", - "shortName": "SnapshotsV1Beta3AsyncClient" + "fullName": "google.cloud.dataflow_v1beta3.TemplatesServiceClient", + "shortName": "TemplatesServiceClient" }, - "fullName": "google.cloud.dataflow_v1beta3.SnapshotsV1Beta3AsyncClient.list_snapshots", + "fullName": "google.cloud.dataflow_v1beta3.TemplatesServiceClient.create_job_from_template", "method": { - "fullName": "google.dataflow.v1beta3.SnapshotsV1Beta3.ListSnapshots", + "fullName": "google.dataflow.v1beta3.TemplatesService.CreateJobFromTemplate", "service": { - "fullName": "google.dataflow.v1beta3.SnapshotsV1Beta3", - "shortName": "SnapshotsV1Beta3" + "fullName": "google.dataflow.v1beta3.TemplatesService", + "shortName": "TemplatesService" }, - "shortName": "ListSnapshots" + "shortName": "CreateJobFromTemplate" }, "parameters": [ { "name": "request", - "type": "google.cloud.dataflow_v1beta3.types.ListSnapshotsRequest" + "type": "google.cloud.dataflow_v1beta3.types.CreateJobFromTemplateRequest" }, { "name": "retry", @@ -2187,22 +3556,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.dataflow_v1beta3.types.ListSnapshotsResponse", - "shortName": "list_snapshots" + "resultType": "google.cloud.dataflow_v1beta3.types.Job", + "shortName": "create_job_from_template" }, - "description": "Sample for ListSnapshots", - "file": "dataflow_v1beta3_generated_snapshots_v1_beta3_list_snapshots_async.py", + "description": "Sample for CreateJobFromTemplate", + "file": "dataflow_v1beta3_generated_templates_service_create_job_from_template_sync_9e26a4bf.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "dataflow_v1beta3_generated_SnapshotsV1Beta3_ListSnapshots_async", + "regionTag": "dataflow_v1beta3_generated_TemplatesService_CreateJobFromTemplate_sync", "segments": [ { - "end": 50, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 50, + "end": 51, "start": 27, "type": "SHORT" }, @@ -2212,43 +3581,43 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 47, - "start": 45, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 51, - "start": 48, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "dataflow_v1beta3_generated_snapshots_v1_beta3_list_snapshots_async.py" + "title": "dataflow_v1beta3_generated_templates_service_create_job_from_template_sync_9e26a4bf.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.dataflow_v1beta3.SnapshotsV1Beta3Client", - "shortName": "SnapshotsV1Beta3Client" + "fullName": "google.cloud.dataflow_v1beta3.TemplatesServiceClient", + "shortName": "TemplatesServiceClient" }, - "fullName": "google.cloud.dataflow_v1beta3.SnapshotsV1Beta3Client.list_snapshots", + "fullName": "google.cloud.dataflow_v1beta3.TemplatesServiceClient.create_job_from_template", "method": { - "fullName": "google.dataflow.v1beta3.SnapshotsV1Beta3.ListSnapshots", + "fullName": "google.dataflow.v1beta3.TemplatesService.CreateJobFromTemplate", "service": { - "fullName": "google.dataflow.v1beta3.SnapshotsV1Beta3", - "shortName": "SnapshotsV1Beta3" + "fullName": "google.dataflow.v1beta3.TemplatesService", + "shortName": "TemplatesService" }, - "shortName": "ListSnapshots" + "shortName": "CreateJobFromTemplate" }, "parameters": [ { "name": "request", - "type": "google.cloud.dataflow_v1beta3.types.ListSnapshotsRequest" + "type": "google.cloud.dataflow_v1beta3.types.CreateJobFromTemplateRequest" }, { "name": "retry", @@ -2263,22 +3632,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.dataflow_v1beta3.types.ListSnapshotsResponse", - "shortName": "list_snapshots" + "resultType": "google.cloud.dataflow_v1beta3.types.Job", + "shortName": "create_job_from_template" }, - "description": "Sample for ListSnapshots", - "file": "dataflow_v1beta3_generated_snapshots_v1_beta3_list_snapshots_sync.py", + "description": "Sample for CreateJobFromTemplate", + "file": "dataflow_v1beta3_generated_templates_service_create_job_from_template_sync_e2cb482f.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "dataflow_v1beta3_generated_SnapshotsV1Beta3_ListSnapshots_sync", + "regionTag": "dataflow_v1beta3_generated_TemplatesService_CreateJobFromTemplate_sync", "segments": [ { - "end": 50, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 50, + "end": 51, "start": 27, "type": "SHORT" }, @@ -2288,22 +3657,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 47, - "start": 45, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 51, - "start": 48, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "dataflow_v1beta3_generated_snapshots_v1_beta3_list_snapshots_sync.py" + "title": "dataflow_v1beta3_generated_templates_service_create_job_from_template_sync_e2cb482f.py" }, { "canonical": true, @@ -2313,19 +3682,19 @@ "fullName": "google.cloud.dataflow_v1beta3.TemplatesServiceAsyncClient", "shortName": "TemplatesServiceAsyncClient" }, - "fullName": "google.cloud.dataflow_v1beta3.TemplatesServiceAsyncClient.create_job_from_template", + "fullName": "google.cloud.dataflow_v1beta3.TemplatesServiceAsyncClient.get_template", "method": { - "fullName": "google.dataflow.v1beta3.TemplatesService.CreateJobFromTemplate", + "fullName": "google.dataflow.v1beta3.TemplatesService.GetTemplate", "service": { "fullName": "google.dataflow.v1beta3.TemplatesService", "shortName": "TemplatesService" }, - "shortName": "CreateJobFromTemplate" + "shortName": "GetTemplate" }, "parameters": [ { "name": "request", - "type": "google.cloud.dataflow_v1beta3.types.CreateJobFromTemplateRequest" + "type": "google.cloud.dataflow_v1beta3.types.GetTemplateRequest" }, { "name": "retry", @@ -2340,14 +3709,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.dataflow_v1beta3.types.Job", - "shortName": "create_job_from_template" + "resultType": "google.cloud.dataflow_v1beta3.types.GetTemplateResponse", + "shortName": "get_template" }, - "description": "Sample for CreateJobFromTemplate", - "file": "dataflow_v1beta3_generated_templates_service_create_job_from_template_async.py", + "description": "Sample for GetTemplate", + "file": "dataflow_v1beta3_generated_templates_service_get_template_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "dataflow_v1beta3_generated_TemplatesService_CreateJobFromTemplate_async", + "regionTag": "dataflow_v1beta3_generated_TemplatesService_GetTemplate_async", "segments": [ { "end": 51, @@ -2380,7 +3749,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "dataflow_v1beta3_generated_templates_service_create_job_from_template_async.py" + "title": "dataflow_v1beta3_generated_templates_service_get_template_async.py" }, { "canonical": true, @@ -2389,19 +3758,19 @@ "fullName": "google.cloud.dataflow_v1beta3.TemplatesServiceClient", "shortName": "TemplatesServiceClient" }, - "fullName": "google.cloud.dataflow_v1beta3.TemplatesServiceClient.create_job_from_template", + "fullName": "google.cloud.dataflow_v1beta3.TemplatesServiceClient.get_template", "method": { - "fullName": "google.dataflow.v1beta3.TemplatesService.CreateJobFromTemplate", + "fullName": "google.dataflow.v1beta3.TemplatesService.GetTemplate", "service": { "fullName": "google.dataflow.v1beta3.TemplatesService", "shortName": "TemplatesService" }, - "shortName": "CreateJobFromTemplate" + "shortName": "GetTemplate" }, "parameters": [ { "name": "request", - "type": "google.cloud.dataflow_v1beta3.types.CreateJobFromTemplateRequest" + "type": "google.cloud.dataflow_v1beta3.types.GetTemplateRequest" }, { "name": "retry", @@ -2416,14 +3785,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.dataflow_v1beta3.types.Job", - "shortName": "create_job_from_template" + "resultType": "google.cloud.dataflow_v1beta3.types.GetTemplateResponse", + "shortName": "get_template" }, - "description": "Sample for CreateJobFromTemplate", - "file": "dataflow_v1beta3_generated_templates_service_create_job_from_template_sync.py", + "description": "Sample for GetTemplate", + "file": "dataflow_v1beta3_generated_templates_service_get_template_sync_732e6209.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "dataflow_v1beta3_generated_TemplatesService_CreateJobFromTemplate_sync", + "regionTag": "dataflow_v1beta3_generated_TemplatesService_GetTemplate_sync", "segments": [ { "end": 51, @@ -2456,17 +3825,16 @@ "type": "RESPONSE_HANDLING" } ], - "title": "dataflow_v1beta3_generated_templates_service_create_job_from_template_sync.py" + "title": "dataflow_v1beta3_generated_templates_service_get_template_sync_732e6209.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.dataflow_v1beta3.TemplatesServiceAsyncClient", - "shortName": "TemplatesServiceAsyncClient" + "fullName": "google.cloud.dataflow_v1beta3.TemplatesServiceClient", + "shortName": "TemplatesServiceClient" }, - "fullName": "google.cloud.dataflow_v1beta3.TemplatesServiceAsyncClient.get_template", + "fullName": "google.cloud.dataflow_v1beta3.TemplatesServiceClient.get_template", "method": { "fullName": "google.dataflow.v1beta3.TemplatesService.GetTemplate", "service": { @@ -2497,10 +3865,10 @@ "shortName": "get_template" }, "description": "Sample for GetTemplate", - "file": "dataflow_v1beta3_generated_templates_service_get_template_async.py", + "file": "dataflow_v1beta3_generated_templates_service_get_template_sync_07cd261a.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "dataflow_v1beta3_generated_TemplatesService_GetTemplate_async", + "regionTag": "dataflow_v1beta3_generated_TemplatesService_GetTemplate_sync", "segments": [ { "end": 51, @@ -2533,28 +3901,29 @@ "type": "RESPONSE_HANDLING" } ], - "title": "dataflow_v1beta3_generated_templates_service_get_template_async.py" + "title": "dataflow_v1beta3_generated_templates_service_get_template_sync_07cd261a.py" }, { "canonical": true, "clientMethod": { + "async": true, "client": { - "fullName": "google.cloud.dataflow_v1beta3.TemplatesServiceClient", - "shortName": "TemplatesServiceClient" + "fullName": "google.cloud.dataflow_v1beta3.TemplatesServiceAsyncClient", + "shortName": "TemplatesServiceAsyncClient" }, - "fullName": "google.cloud.dataflow_v1beta3.TemplatesServiceClient.get_template", + "fullName": "google.cloud.dataflow_v1beta3.TemplatesServiceAsyncClient.launch_template", "method": { - "fullName": "google.dataflow.v1beta3.TemplatesService.GetTemplate", + "fullName": "google.dataflow.v1beta3.TemplatesService.LaunchTemplate", "service": { "fullName": "google.dataflow.v1beta3.TemplatesService", "shortName": "TemplatesService" }, - "shortName": "GetTemplate" + "shortName": "LaunchTemplate" }, "parameters": [ { "name": "request", - "type": "google.cloud.dataflow_v1beta3.types.GetTemplateRequest" + "type": "google.cloud.dataflow_v1beta3.types.LaunchTemplateRequest" }, { "name": "retry", @@ -2569,14 +3938,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.dataflow_v1beta3.types.GetTemplateResponse", - "shortName": "get_template" + "resultType": "google.cloud.dataflow_v1beta3.types.LaunchTemplateResponse", + "shortName": "launch_template" }, - "description": "Sample for GetTemplate", - "file": "dataflow_v1beta3_generated_templates_service_get_template_sync.py", + "description": "Sample for LaunchTemplate", + "file": "dataflow_v1beta3_generated_templates_service_launch_template_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "dataflow_v1beta3_generated_TemplatesService_GetTemplate_sync", + "regionTag": "dataflow_v1beta3_generated_TemplatesService_LaunchTemplate_async", "segments": [ { "end": 51, @@ -2609,17 +3978,16 @@ "type": "RESPONSE_HANDLING" } ], - "title": "dataflow_v1beta3_generated_templates_service_get_template_sync.py" + "title": "dataflow_v1beta3_generated_templates_service_launch_template_async.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.dataflow_v1beta3.TemplatesServiceAsyncClient", - "shortName": "TemplatesServiceAsyncClient" + "fullName": "google.cloud.dataflow_v1beta3.TemplatesServiceClient", + "shortName": "TemplatesServiceClient" }, - "fullName": "google.cloud.dataflow_v1beta3.TemplatesServiceAsyncClient.launch_template", + "fullName": "google.cloud.dataflow_v1beta3.TemplatesServiceClient.launch_template", "method": { "fullName": "google.dataflow.v1beta3.TemplatesService.LaunchTemplate", "service": { @@ -2650,10 +4018,10 @@ "shortName": "launch_template" }, "description": "Sample for LaunchTemplate", - "file": "dataflow_v1beta3_generated_templates_service_launch_template_async.py", + "file": "dataflow_v1beta3_generated_templates_service_launch_template_sync_77764eb9.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "dataflow_v1beta3_generated_TemplatesService_LaunchTemplate_async", + "regionTag": "dataflow_v1beta3_generated_TemplatesService_LaunchTemplate_sync", "segments": [ { "end": 51, @@ -2686,7 +4054,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "dataflow_v1beta3_generated_templates_service_launch_template_async.py" + "title": "dataflow_v1beta3_generated_templates_service_launch_template_sync_77764eb9.py" }, { "canonical": true, @@ -2726,7 +4094,7 @@ "shortName": "launch_template" }, "description": "Sample for LaunchTemplate", - "file": "dataflow_v1beta3_generated_templates_service_launch_template_sync.py", + "file": "dataflow_v1beta3_generated_templates_service_launch_template_sync_140179ca.py", "language": "PYTHON", "origin": "API_DEFINITION", "regionTag": "dataflow_v1beta3_generated_TemplatesService_LaunchTemplate_sync", @@ -2762,7 +4130,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "dataflow_v1beta3_generated_templates_service_launch_template_sync.py" + "title": "dataflow_v1beta3_generated_templates_service_launch_template_sync_140179ca.py" } ] } diff --git a/setup.py b/setup.py index a89c058..44ef4fc 100644 --- a/setup.py +++ b/setup.py @@ -25,9 +25,9 @@ release_status = "Development Status :: 4 - Beta" url = "https://github.com/googleapis/python-dataflow-client" dependencies = [ - "google-api-core[grpc] >= 1.32.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*", + "google-api-core[grpc] >= 1.33.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*", "proto-plus >= 1.22.0, <2.0.0dev", - "protobuf >= 3.19.0, <5.0.0dev", + "protobuf >= 3.20.1, <5.0.0dev", ] package_root = os.path.abspath(os.path.dirname(__file__)) diff --git a/testing/constraints-3.7.txt b/testing/constraints-3.7.txt index 810c7cb..d09d847 100644 --- a/testing/constraints-3.7.txt +++ b/testing/constraints-3.7.txt @@ -4,6 +4,6 @@ # Pin the version to the lower bound. # e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", # Then this file should have google-cloud-foo==1.14.0 -google-api-core==1.32.0 +google-api-core==1.33.0 proto-plus==1.22.0 -protobuf==3.19.0 +protobuf==3.20.1 diff --git a/tests/unit/gapic/dataflow_v1beta3/test_flex_templates_service.py b/tests/unit/gapic/dataflow_v1beta3/test_flex_templates_service.py index 2a303f7..55bfd8a 100644 --- a/tests/unit/gapic/dataflow_v1beta3/test_flex_templates_service.py +++ b/tests/unit/gapic/dataflow_v1beta3/test_flex_templates_service.py @@ -22,6 +22,8 @@ except ImportError: # pragma: NO COVER import mock +from collections.abc import Iterable +import json import math from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template @@ -31,10 +33,14 @@ from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.oauth2 import service_account +from google.protobuf import json_format import grpc from grpc.experimental import aio +from proto.marshal.rules import wrappers from proto.marshal.rules.dates import DurationRule, TimestampRule import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session from google.cloud.dataflow_v1beta3.services.flex_templates_service import ( FlexTemplatesServiceAsyncClient, @@ -94,6 +100,7 @@ def test__get_default_mtls_endpoint(): [ (FlexTemplatesServiceClient, "grpc"), (FlexTemplatesServiceAsyncClient, "grpc_asyncio"), + (FlexTemplatesServiceClient, "rest"), ], ) def test_flex_templates_service_client_from_service_account_info( @@ -109,7 +116,11 @@ def test_flex_templates_service_client_from_service_account_info( assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == ("dataflow.googleapis.com:443") + assert client.transport._host == ( + "dataflow.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://dataflow.googleapis.com" + ) @pytest.mark.parametrize( @@ -117,6 +128,7 @@ def test_flex_templates_service_client_from_service_account_info( [ (transports.FlexTemplatesServiceGrpcTransport, "grpc"), (transports.FlexTemplatesServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.FlexTemplatesServiceRestTransport, "rest"), ], ) def test_flex_templates_service_client_service_account_always_use_jwt( @@ -142,6 +154,7 @@ def test_flex_templates_service_client_service_account_always_use_jwt( [ (FlexTemplatesServiceClient, "grpc"), (FlexTemplatesServiceAsyncClient, "grpc_asyncio"), + (FlexTemplatesServiceClient, "rest"), ], ) def test_flex_templates_service_client_from_service_account_file( @@ -164,13 +177,18 @@ def test_flex_templates_service_client_from_service_account_file( assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == ("dataflow.googleapis.com:443") + assert client.transport._host == ( + "dataflow.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://dataflow.googleapis.com" + ) def test_flex_templates_service_client_get_transport_class(): transport = FlexTemplatesServiceClient.get_transport_class() available_transports = [ transports.FlexTemplatesServiceGrpcTransport, + transports.FlexTemplatesServiceRestTransport, ] assert transport in available_transports @@ -191,6 +209,11 @@ def test_flex_templates_service_client_get_transport_class(): transports.FlexTemplatesServiceGrpcAsyncIOTransport, "grpc_asyncio", ), + ( + FlexTemplatesServiceClient, + transports.FlexTemplatesServiceRestTransport, + "rest", + ), ], ) @mock.patch.object( @@ -346,6 +369,18 @@ def test_flex_templates_service_client_client_options( "grpc_asyncio", "false", ), + ( + FlexTemplatesServiceClient, + transports.FlexTemplatesServiceRestTransport, + "rest", + "true", + ), + ( + FlexTemplatesServiceClient, + transports.FlexTemplatesServiceRestTransport, + "rest", + "false", + ), ], ) @mock.patch.object( @@ -549,6 +584,11 @@ def test_flex_templates_service_client_get_mtls_endpoint_and_cert_source(client_ transports.FlexTemplatesServiceGrpcAsyncIOTransport, "grpc_asyncio", ), + ( + FlexTemplatesServiceClient, + transports.FlexTemplatesServiceRestTransport, + "rest", + ), ], ) def test_flex_templates_service_client_client_options_scopes( @@ -589,6 +629,12 @@ def test_flex_templates_service_client_client_options_scopes( "grpc_asyncio", grpc_helpers_async, ), + ( + FlexTemplatesServiceClient, + transports.FlexTemplatesServiceRestTransport, + "rest", + None, + ), ], ) def test_flex_templates_service_client_client_options_credentials_file( @@ -862,6 +908,129 @@ async def test_launch_flex_template_field_headers_async(): ) in kw["metadata"] +@pytest.mark.parametrize( + "request_type", + [ + templates.LaunchFlexTemplateRequest, + dict, + ], +) +def test_launch_flex_template_rest(request_type): + client = FlexTemplatesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1", "location": "sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = templates.LaunchFlexTemplateResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = templates.LaunchFlexTemplateResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.launch_flex_template(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, templates.LaunchFlexTemplateResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_launch_flex_template_rest_interceptors(null_interceptor): + transport = transports.FlexTemplatesServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.FlexTemplatesServiceRestInterceptor(), + ) + client = FlexTemplatesServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.FlexTemplatesServiceRestInterceptor, "post_launch_flex_template" + ) as post, mock.patch.object( + transports.FlexTemplatesServiceRestInterceptor, "pre_launch_flex_template" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = templates.LaunchFlexTemplateRequest.pb( + templates.LaunchFlexTemplateRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = templates.LaunchFlexTemplateResponse.to_json( + templates.LaunchFlexTemplateResponse() + ) + + request = templates.LaunchFlexTemplateRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = templates.LaunchFlexTemplateResponse() + + client.launch_flex_template( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_launch_flex_template_rest_bad_request( + transport: str = "rest", request_type=templates.LaunchFlexTemplateRequest +): + client = FlexTemplatesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1", "location": "sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.launch_flex_template(request) + + +def test_launch_flex_template_rest_error(): + client = FlexTemplatesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.FlexTemplatesServiceGrpcTransport( @@ -943,6 +1112,7 @@ def test_transport_get_channel(): [ transports.FlexTemplatesServiceGrpcTransport, transports.FlexTemplatesServiceGrpcAsyncIOTransport, + transports.FlexTemplatesServiceRestTransport, ], ) def test_transport_adc(transport_class): @@ -957,6 +1127,7 @@ def test_transport_adc(transport_class): "transport_name", [ "grpc", + "rest", ], ) def test_transport_kind(transport_name): @@ -1099,6 +1270,7 @@ def test_flex_templates_service_transport_auth_adc(transport_class): [ transports.FlexTemplatesServiceGrpcTransport, transports.FlexTemplatesServiceGrpcAsyncIOTransport, + transports.FlexTemplatesServiceRestTransport, ], ) def test_flex_templates_service_transport_auth_gdch_credentials(transport_class): @@ -1203,11 +1375,23 @@ def test_flex_templates_service_grpc_transport_client_cert_source_for_mtls( ) +def test_flex_templates_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.FlexTemplatesServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + @pytest.mark.parametrize( "transport_name", [ "grpc", "grpc_asyncio", + "rest", ], ) def test_flex_templates_service_host_no_port(transport_name): @@ -1218,7 +1402,11 @@ def test_flex_templates_service_host_no_port(transport_name): ), transport=transport_name, ) - assert client.transport._host == ("dataflow.googleapis.com:443") + assert client.transport._host == ( + "dataflow.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://dataflow.googleapis.com" + ) @pytest.mark.parametrize( @@ -1226,6 +1414,7 @@ def test_flex_templates_service_host_no_port(transport_name): [ "grpc", "grpc_asyncio", + "rest", ], ) def test_flex_templates_service_host_with_port(transport_name): @@ -1236,7 +1425,33 @@ def test_flex_templates_service_host_with_port(transport_name): ), transport=transport_name, ) - assert client.transport._host == ("dataflow.googleapis.com:8000") + assert client.transport._host == ( + "dataflow.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://dataflow.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_flex_templates_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = FlexTemplatesServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = FlexTemplatesServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.launch_flex_template._session + session2 = client2.transport.launch_flex_template._session + assert session1 != session2 def test_flex_templates_service_grpc_transport_channel(): @@ -1507,6 +1722,7 @@ async def test_transport_close_async(): def test_transport_close(): transports = { + "rest": "_session", "grpc": "_grpc_channel", } @@ -1524,6 +1740,7 @@ def test_transport_close(): def test_client_ctx(): transports = [ + "rest", "grpc", ] for transport in transports: diff --git a/tests/unit/gapic/dataflow_v1beta3/test_jobs_v1_beta3.py b/tests/unit/gapic/dataflow_v1beta3/test_jobs_v1_beta3.py index a8596fa..b10585a 100644 --- a/tests/unit/gapic/dataflow_v1beta3/test_jobs_v1_beta3.py +++ b/tests/unit/gapic/dataflow_v1beta3/test_jobs_v1_beta3.py @@ -22,6 +22,8 @@ except ImportError: # pragma: NO COVER import mock +from collections.abc import Iterable +import json import math from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template @@ -33,12 +35,16 @@ from google.oauth2 import service_account from google.protobuf import any_pb2 # type: ignore from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import json_format from google.protobuf import struct_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore import grpc from grpc.experimental import aio +from proto.marshal.rules import wrappers from proto.marshal.rules.dates import DurationRule, TimestampRule import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session from google.cloud.dataflow_v1beta3.services.jobs_v1_beta3 import ( JobsV1Beta3AsyncClient, @@ -95,6 +101,7 @@ def test__get_default_mtls_endpoint(): [ (JobsV1Beta3Client, "grpc"), (JobsV1Beta3AsyncClient, "grpc_asyncio"), + (JobsV1Beta3Client, "rest"), ], ) def test_jobs_v1_beta3_client_from_service_account_info(client_class, transport_name): @@ -108,7 +115,11 @@ def test_jobs_v1_beta3_client_from_service_account_info(client_class, transport_ assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == ("dataflow.googleapis.com:443") + assert client.transport._host == ( + "dataflow.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://dataflow.googleapis.com" + ) @pytest.mark.parametrize( @@ -116,6 +127,7 @@ def test_jobs_v1_beta3_client_from_service_account_info(client_class, transport_ [ (transports.JobsV1Beta3GrpcTransport, "grpc"), (transports.JobsV1Beta3GrpcAsyncIOTransport, "grpc_asyncio"), + (transports.JobsV1Beta3RestTransport, "rest"), ], ) def test_jobs_v1_beta3_client_service_account_always_use_jwt( @@ -141,6 +153,7 @@ def test_jobs_v1_beta3_client_service_account_always_use_jwt( [ (JobsV1Beta3Client, "grpc"), (JobsV1Beta3AsyncClient, "grpc_asyncio"), + (JobsV1Beta3Client, "rest"), ], ) def test_jobs_v1_beta3_client_from_service_account_file(client_class, transport_name): @@ -161,13 +174,18 @@ def test_jobs_v1_beta3_client_from_service_account_file(client_class, transport_ assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == ("dataflow.googleapis.com:443") + assert client.transport._host == ( + "dataflow.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://dataflow.googleapis.com" + ) def test_jobs_v1_beta3_client_get_transport_class(): transport = JobsV1Beta3Client.get_transport_class() available_transports = [ transports.JobsV1Beta3GrpcTransport, + transports.JobsV1Beta3RestTransport, ] assert transport in available_transports @@ -184,6 +202,7 @@ def test_jobs_v1_beta3_client_get_transport_class(): transports.JobsV1Beta3GrpcAsyncIOTransport, "grpc_asyncio", ), + (JobsV1Beta3Client, transports.JobsV1Beta3RestTransport, "rest"), ], ) @mock.patch.object( @@ -327,6 +346,8 @@ def test_jobs_v1_beta3_client_client_options( "grpc_asyncio", "false", ), + (JobsV1Beta3Client, transports.JobsV1Beta3RestTransport, "rest", "true"), + (JobsV1Beta3Client, transports.JobsV1Beta3RestTransport, "rest", "false"), ], ) @mock.patch.object( @@ -520,6 +541,7 @@ def test_jobs_v1_beta3_client_get_mtls_endpoint_and_cert_source(client_class): transports.JobsV1Beta3GrpcAsyncIOTransport, "grpc_asyncio", ), + (JobsV1Beta3Client, transports.JobsV1Beta3RestTransport, "rest"), ], ) def test_jobs_v1_beta3_client_client_options_scopes( @@ -555,6 +577,7 @@ def test_jobs_v1_beta3_client_client_options_scopes( "grpc_asyncio", grpc_helpers_async, ), + (JobsV1Beta3Client, transports.JobsV1Beta3RestTransport, "rest", None), ], ) def test_jobs_v1_beta3_client_client_options_credentials_file( @@ -2247,139 +2270,1997 @@ async def test_snapshot_job_field_headers_async(): ) in kw["metadata"] -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.JobsV1Beta3GrpcTransport( +@pytest.mark.parametrize( + "request_type", + [ + jobs.CreateJobRequest, + dict, + ], +) +def test_create_job_rest(request_type): + client = JobsV1Beta3Client( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - with pytest.raises(ValueError): - client = JobsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1", "location": "sample2"} + request_init["job"] = { + "id": "id_value", + "project_id": "project_id_value", + "name": "name_value", + "type_": 1, + "environment": { + "temp_storage_prefix": "temp_storage_prefix_value", + "cluster_manager_api_service": "cluster_manager_api_service_value", + "experiments": ["experiments_value1", "experiments_value2"], + "service_options": ["service_options_value1", "service_options_value2"], + "service_kms_key_name": "service_kms_key_name_value", + "worker_pools": [ + { + "kind": "kind_value", + "num_workers": 1212, + "packages": [{"name": "name_value", "location": "location_value"}], + "default_package_set": 1, + "machine_type": "machine_type_value", + "teardown_policy": 1, + "disk_size_gb": 1261, + "disk_type": "disk_type_value", + "disk_source_image": "disk_source_image_value", + "zone": "zone_value", + "taskrunner_settings": { + "task_user": "task_user_value", + "task_group": "task_group_value", + "oauth_scopes": ["oauth_scopes_value1", "oauth_scopes_value2"], + "base_url": "base_url_value", + "dataflow_api_version": "dataflow_api_version_value", + "parallel_worker_settings": { + "base_url": "base_url_value", + "reporting_enabled": True, + "service_path": "service_path_value", + "shuffle_service_path": "shuffle_service_path_value", + "worker_id": "worker_id_value", + "temp_storage_prefix": "temp_storage_prefix_value", + }, + "base_task_dir": "base_task_dir_value", + "continue_on_exception": True, + "log_to_serialconsole": True, + "alsologtostderr": True, + "log_upload_location": "log_upload_location_value", + "log_dir": "log_dir_value", + "temp_storage_prefix": "temp_storage_prefix_value", + "harness_command": "harness_command_value", + "workflow_file_name": "workflow_file_name_value", + "commandlines_file_name": "commandlines_file_name_value", + "vm_id": "vm_id_value", + "language_hint": "language_hint_value", + "streaming_worker_main_class": "streaming_worker_main_class_value", + }, + "on_host_maintenance": "on_host_maintenance_value", + "data_disks": [ + { + "size_gb": 739, + "disk_type": "disk_type_value", + "mount_point": "mount_point_value", + } + ], + "metadata": {}, + "autoscaling_settings": {"algorithm": 1, "max_num_workers": 1633}, + "pool_args": { + "type_url": "type.googleapis.com/google.protobuf.Duration", + "value": b"\x08\x0c\x10\xdb\x07", + }, + "network": "network_value", + "subnetwork": "subnetwork_value", + "worker_harness_container_image": "worker_harness_container_image_value", + "num_threads_per_worker": 2361, + "ip_configuration": 1, + "sdk_harness_container_images": [ + { + "container_image": "container_image_value", + "use_single_core_per_container": True, + "environment_id": "environment_id_value", + "capabilities": [ + "capabilities_value1", + "capabilities_value2", + ], + } + ], + } + ], + "user_agent": {"fields": {}}, + "version": {}, + "dataset": "dataset_value", + "sdk_pipeline_options": {}, + "internal_experiments": {}, + "service_account_email": "service_account_email_value", + "flex_resource_scheduling_goal": 1, + "worker_region": "worker_region_value", + "worker_zone": "worker_zone_value", + "shuffle_mode": 1, + "debug_options": {"enable_hot_key_logging": True}, + }, + "steps": [{"kind": "kind_value", "name": "name_value", "properties": {}}], + "steps_location": "steps_location_value", + "current_state": 1, + "current_state_time": {"seconds": 751, "nanos": 543}, + "requested_state": 1, + "execution_info": {"stages": {}}, + "create_time": {}, + "replace_job_id": "replace_job_id_value", + "transform_name_mapping": {}, + "client_request_id": "client_request_id_value", + "replaced_by_job_id": "replaced_by_job_id_value", + "temp_files": ["temp_files_value1", "temp_files_value2"], + "labels": {}, + "location": "location_value", + "pipeline_description": { + "original_pipeline_transform": [ + { + "kind": 1, + "id": "id_value", + "name": "name_value", + "display_data": [ + { + "key": "key_value", + "namespace": "namespace_value", + "str_value": "str_value_value", + "int64_value": 1073, + "float_value": 0.117, + "java_class_value": "java_class_value_value", + "timestamp_value": {}, + "duration_value": {"seconds": 751, "nanos": 543}, + "bool_value": True, + "short_str_value": "short_str_value_value", + "url": "url_value", + "label": "label_value", + } + ], + "output_collection_name": [ + "output_collection_name_value1", + "output_collection_name_value2", + ], + "input_collection_name": [ + "input_collection_name_value1", + "input_collection_name_value2", + ], + } + ], + "execution_pipeline_stage": [ + { + "name": "name_value", + "id": "id_value", + "kind": 1, + "input_source": [ + { + "user_name": "user_name_value", + "name": "name_value", + "original_transform_or_collection": "original_transform_or_collection_value", + "size_bytes": 1089, + } + ], + "output_source": {}, + "prerequisite_stage": [ + "prerequisite_stage_value1", + "prerequisite_stage_value2", + ], + "component_transform": [ + { + "user_name": "user_name_value", + "name": "name_value", + "original_transform": "original_transform_value", + } + ], + "component_source": [ + { + "user_name": "user_name_value", + "name": "name_value", + "original_transform_or_collection": "original_transform_or_collection_value", + } + ], + } + ], + "display_data": {}, + }, + "stage_states": [ + { + "execution_stage_name": "execution_stage_name_value", + "execution_stage_state": 1, + "current_state_time": {}, + } + ], + "job_metadata": { + "sdk_version": { + "version": "version_value", + "version_display_name": "version_display_name_value", + "sdk_support_status": 1, + }, + "spanner_details": [ + { + "project_id": "project_id_value", + "instance_id": "instance_id_value", + "database_id": "database_id_value", + } + ], + "bigquery_details": [ + { + "table": "table_value", + "dataset": "dataset_value", + "project_id": "project_id_value", + "query": "query_value", + } + ], + "big_table_details": [ + { + "project_id": "project_id_value", + "instance_id": "instance_id_value", + "table_id": "table_id_value", + } + ], + "pubsub_details": [ + {"topic": "topic_value", "subscription": "subscription_value"} + ], + "file_details": [{"file_pattern": "file_pattern_value"}], + "datastore_details": [ + {"namespace": "namespace_value", "project_id": "project_id_value"} + ], + }, + "start_time": {}, + "created_from_snapshot_id": "created_from_snapshot_id_value", + "satisfies_pzs": True, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = jobs.Job( + id="id_value", + project_id="project_id_value", + name="name_value", + type_=environment.JobType.JOB_TYPE_BATCH, + steps_location="steps_location_value", + current_state=jobs.JobState.JOB_STATE_STOPPED, + requested_state=jobs.JobState.JOB_STATE_STOPPED, + replace_job_id="replace_job_id_value", + client_request_id="client_request_id_value", + replaced_by_job_id="replaced_by_job_id_value", + temp_files=["temp_files_value"], + location="location_value", + created_from_snapshot_id="created_from_snapshot_id_value", + satisfies_pzs=True, ) - # It is an error to provide a credentials file and a transport instance. - transport = transports.JobsV1Beta3GrpcTransport( + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = jobs.Job.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_job(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, jobs.Job) + assert response.id == "id_value" + assert response.project_id == "project_id_value" + assert response.name == "name_value" + assert response.type_ == environment.JobType.JOB_TYPE_BATCH + assert response.steps_location == "steps_location_value" + assert response.current_state == jobs.JobState.JOB_STATE_STOPPED + assert response.requested_state == jobs.JobState.JOB_STATE_STOPPED + assert response.replace_job_id == "replace_job_id_value" + assert response.client_request_id == "client_request_id_value" + assert response.replaced_by_job_id == "replaced_by_job_id_value" + assert response.temp_files == ["temp_files_value"] + assert response.location == "location_value" + assert response.created_from_snapshot_id == "created_from_snapshot_id_value" + assert response.satisfies_pzs is True + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_job_rest_interceptors(null_interceptor): + transport = transports.JobsV1Beta3RestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.JobsV1Beta3RestInterceptor(), ) - with pytest.raises(ValueError): - client = JobsV1Beta3Client( - client_options={"credentials_file": "credentials.json"}, - transport=transport, + client = JobsV1Beta3Client(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.JobsV1Beta3RestInterceptor, "post_create_job" + ) as post, mock.patch.object( + transports.JobsV1Beta3RestInterceptor, "pre_create_job" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = jobs.CreateJobRequest.pb(jobs.CreateJobRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = jobs.Job.to_json(jobs.Job()) + + request = jobs.CreateJobRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = jobs.Job() + + client.create_job( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # It is an error to provide an api_key and a transport instance. - transport = transports.JobsV1Beta3GrpcTransport( + pre.assert_called_once() + post.assert_called_once() + + +def test_create_job_rest_bad_request( + transport: str = "rest", request_type=jobs.CreateJobRequest +): + client = JobsV1Beta3Client( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = JobsV1Beta3Client( - client_options=options, - transport=transport, - ) - # It is an error to provide an api_key and a credential. - options = mock.Mock() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = JobsV1Beta3Client( - client_options=options, credentials=ga_credentials.AnonymousCredentials() - ) + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1", "location": "sample2"} + request_init["job"] = { + "id": "id_value", + "project_id": "project_id_value", + "name": "name_value", + "type_": 1, + "environment": { + "temp_storage_prefix": "temp_storage_prefix_value", + "cluster_manager_api_service": "cluster_manager_api_service_value", + "experiments": ["experiments_value1", "experiments_value2"], + "service_options": ["service_options_value1", "service_options_value2"], + "service_kms_key_name": "service_kms_key_name_value", + "worker_pools": [ + { + "kind": "kind_value", + "num_workers": 1212, + "packages": [{"name": "name_value", "location": "location_value"}], + "default_package_set": 1, + "machine_type": "machine_type_value", + "teardown_policy": 1, + "disk_size_gb": 1261, + "disk_type": "disk_type_value", + "disk_source_image": "disk_source_image_value", + "zone": "zone_value", + "taskrunner_settings": { + "task_user": "task_user_value", + "task_group": "task_group_value", + "oauth_scopes": ["oauth_scopes_value1", "oauth_scopes_value2"], + "base_url": "base_url_value", + "dataflow_api_version": "dataflow_api_version_value", + "parallel_worker_settings": { + "base_url": "base_url_value", + "reporting_enabled": True, + "service_path": "service_path_value", + "shuffle_service_path": "shuffle_service_path_value", + "worker_id": "worker_id_value", + "temp_storage_prefix": "temp_storage_prefix_value", + }, + "base_task_dir": "base_task_dir_value", + "continue_on_exception": True, + "log_to_serialconsole": True, + "alsologtostderr": True, + "log_upload_location": "log_upload_location_value", + "log_dir": "log_dir_value", + "temp_storage_prefix": "temp_storage_prefix_value", + "harness_command": "harness_command_value", + "workflow_file_name": "workflow_file_name_value", + "commandlines_file_name": "commandlines_file_name_value", + "vm_id": "vm_id_value", + "language_hint": "language_hint_value", + "streaming_worker_main_class": "streaming_worker_main_class_value", + }, + "on_host_maintenance": "on_host_maintenance_value", + "data_disks": [ + { + "size_gb": 739, + "disk_type": "disk_type_value", + "mount_point": "mount_point_value", + } + ], + "metadata": {}, + "autoscaling_settings": {"algorithm": 1, "max_num_workers": 1633}, + "pool_args": { + "type_url": "type.googleapis.com/google.protobuf.Duration", + "value": b"\x08\x0c\x10\xdb\x07", + }, + "network": "network_value", + "subnetwork": "subnetwork_value", + "worker_harness_container_image": "worker_harness_container_image_value", + "num_threads_per_worker": 2361, + "ip_configuration": 1, + "sdk_harness_container_images": [ + { + "container_image": "container_image_value", + "use_single_core_per_container": True, + "environment_id": "environment_id_value", + "capabilities": [ + "capabilities_value1", + "capabilities_value2", + ], + } + ], + } + ], + "user_agent": {"fields": {}}, + "version": {}, + "dataset": "dataset_value", + "sdk_pipeline_options": {}, + "internal_experiments": {}, + "service_account_email": "service_account_email_value", + "flex_resource_scheduling_goal": 1, + "worker_region": "worker_region_value", + "worker_zone": "worker_zone_value", + "shuffle_mode": 1, + "debug_options": {"enable_hot_key_logging": True}, + }, + "steps": [{"kind": "kind_value", "name": "name_value", "properties": {}}], + "steps_location": "steps_location_value", + "current_state": 1, + "current_state_time": {"seconds": 751, "nanos": 543}, + "requested_state": 1, + "execution_info": {"stages": {}}, + "create_time": {}, + "replace_job_id": "replace_job_id_value", + "transform_name_mapping": {}, + "client_request_id": "client_request_id_value", + "replaced_by_job_id": "replaced_by_job_id_value", + "temp_files": ["temp_files_value1", "temp_files_value2"], + "labels": {}, + "location": "location_value", + "pipeline_description": { + "original_pipeline_transform": [ + { + "kind": 1, + "id": "id_value", + "name": "name_value", + "display_data": [ + { + "key": "key_value", + "namespace": "namespace_value", + "str_value": "str_value_value", + "int64_value": 1073, + "float_value": 0.117, + "java_class_value": "java_class_value_value", + "timestamp_value": {}, + "duration_value": {"seconds": 751, "nanos": 543}, + "bool_value": True, + "short_str_value": "short_str_value_value", + "url": "url_value", + "label": "label_value", + } + ], + "output_collection_name": [ + "output_collection_name_value1", + "output_collection_name_value2", + ], + "input_collection_name": [ + "input_collection_name_value1", + "input_collection_name_value2", + ], + } + ], + "execution_pipeline_stage": [ + { + "name": "name_value", + "id": "id_value", + "kind": 1, + "input_source": [ + { + "user_name": "user_name_value", + "name": "name_value", + "original_transform_or_collection": "original_transform_or_collection_value", + "size_bytes": 1089, + } + ], + "output_source": {}, + "prerequisite_stage": [ + "prerequisite_stage_value1", + "prerequisite_stage_value2", + ], + "component_transform": [ + { + "user_name": "user_name_value", + "name": "name_value", + "original_transform": "original_transform_value", + } + ], + "component_source": [ + { + "user_name": "user_name_value", + "name": "name_value", + "original_transform_or_collection": "original_transform_or_collection_value", + } + ], + } + ], + "display_data": {}, + }, + "stage_states": [ + { + "execution_stage_name": "execution_stage_name_value", + "execution_stage_state": 1, + "current_state_time": {}, + } + ], + "job_metadata": { + "sdk_version": { + "version": "version_value", + "version_display_name": "version_display_name_value", + "sdk_support_status": 1, + }, + "spanner_details": [ + { + "project_id": "project_id_value", + "instance_id": "instance_id_value", + "database_id": "database_id_value", + } + ], + "bigquery_details": [ + { + "table": "table_value", + "dataset": "dataset_value", + "project_id": "project_id_value", + "query": "query_value", + } + ], + "big_table_details": [ + { + "project_id": "project_id_value", + "instance_id": "instance_id_value", + "table_id": "table_id_value", + } + ], + "pubsub_details": [ + {"topic": "topic_value", "subscription": "subscription_value"} + ], + "file_details": [{"file_pattern": "file_pattern_value"}], + "datastore_details": [ + {"namespace": "namespace_value", "project_id": "project_id_value"} + ], + }, + "start_time": {}, + "created_from_snapshot_id": "created_from_snapshot_id_value", + "satisfies_pzs": True, + } + request = request_type(**request_init) - # It is an error to provide scopes and a transport instance. - transport = transports.JobsV1Beta3GrpcTransport( + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_job(request) + + +def test_create_job_rest_error(): + client = JobsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + jobs.GetJobRequest, + dict, + ], +) +def test_get_job_rest(request_type): + client = JobsV1Beta3Client( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - with pytest.raises(ValueError): - client = JobsV1Beta3Client( - client_options={"scopes": ["1", "2"]}, - transport=transport, + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1", "location": "sample2", "job_id": "sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = jobs.Job( + id="id_value", + project_id="project_id_value", + name="name_value", + type_=environment.JobType.JOB_TYPE_BATCH, + steps_location="steps_location_value", + current_state=jobs.JobState.JOB_STATE_STOPPED, + requested_state=jobs.JobState.JOB_STATE_STOPPED, + replace_job_id="replace_job_id_value", + client_request_id="client_request_id_value", + replaced_by_job_id="replaced_by_job_id_value", + temp_files=["temp_files_value"], + location="location_value", + created_from_snapshot_id="created_from_snapshot_id_value", + satisfies_pzs=True, ) + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = jobs.Job.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.JobsV1Beta3GrpcTransport( + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_job(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, jobs.Job) + assert response.id == "id_value" + assert response.project_id == "project_id_value" + assert response.name == "name_value" + assert response.type_ == environment.JobType.JOB_TYPE_BATCH + assert response.steps_location == "steps_location_value" + assert response.current_state == jobs.JobState.JOB_STATE_STOPPED + assert response.requested_state == jobs.JobState.JOB_STATE_STOPPED + assert response.replace_job_id == "replace_job_id_value" + assert response.client_request_id == "client_request_id_value" + assert response.replaced_by_job_id == "replaced_by_job_id_value" + assert response.temp_files == ["temp_files_value"] + assert response.location == "location_value" + assert response.created_from_snapshot_id == "created_from_snapshot_id_value" + assert response.satisfies_pzs is True + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_job_rest_interceptors(null_interceptor): + transport = transports.JobsV1Beta3RestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.JobsV1Beta3RestInterceptor(), ) client = JobsV1Beta3Client(transport=transport) - assert client.transport is transport + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.JobsV1Beta3RestInterceptor, "post_get_job" + ) as post, mock.patch.object( + transports.JobsV1Beta3RestInterceptor, "pre_get_job" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = jobs.GetJobRequest.pb(jobs.GetJobRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = jobs.Job.to_json(jobs.Job()) + + request = jobs.GetJobRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = jobs.Job() + + client.get_job( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + pre.assert_called_once() + post.assert_called_once() -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.JobsV1Beta3GrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - transport = transports.JobsV1Beta3GrpcAsyncIOTransport( +def test_get_job_rest_bad_request( + transport: str = "rest", request_type=jobs.GetJobRequest +): + client = JobsV1Beta3Client( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - channel = transport.grpc_channel - assert channel + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1", "location": "sample2", "job_id": "sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_job(request) -@pytest.mark.parametrize( - "transport_class", - [ - transports.JobsV1Beta3GrpcTransport, - transports.JobsV1Beta3GrpcAsyncIOTransport, - ], -) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() + +def test_get_job_rest_error(): + client = JobsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) @pytest.mark.parametrize( - "transport_name", + "request_type", [ - "grpc", + jobs.UpdateJobRequest, + dict, ], ) -def test_transport_kind(transport_name): - transport = JobsV1Beta3Client.get_transport_class(transport_name)( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert transport.kind == transport_name - - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. +def test_update_job_rest(request_type): client = JobsV1Beta3Client( credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.JobsV1Beta3GrpcTransport, + transport="rest", ) + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1", "location": "sample2", "job_id": "sample3"} + request_init["job"] = { + "id": "id_value", + "project_id": "project_id_value", + "name": "name_value", + "type_": 1, + "environment": { + "temp_storage_prefix": "temp_storage_prefix_value", + "cluster_manager_api_service": "cluster_manager_api_service_value", + "experiments": ["experiments_value1", "experiments_value2"], + "service_options": ["service_options_value1", "service_options_value2"], + "service_kms_key_name": "service_kms_key_name_value", + "worker_pools": [ + { + "kind": "kind_value", + "num_workers": 1212, + "packages": [{"name": "name_value", "location": "location_value"}], + "default_package_set": 1, + "machine_type": "machine_type_value", + "teardown_policy": 1, + "disk_size_gb": 1261, + "disk_type": "disk_type_value", + "disk_source_image": "disk_source_image_value", + "zone": "zone_value", + "taskrunner_settings": { + "task_user": "task_user_value", + "task_group": "task_group_value", + "oauth_scopes": ["oauth_scopes_value1", "oauth_scopes_value2"], + "base_url": "base_url_value", + "dataflow_api_version": "dataflow_api_version_value", + "parallel_worker_settings": { + "base_url": "base_url_value", + "reporting_enabled": True, + "service_path": "service_path_value", + "shuffle_service_path": "shuffle_service_path_value", + "worker_id": "worker_id_value", + "temp_storage_prefix": "temp_storage_prefix_value", + }, + "base_task_dir": "base_task_dir_value", + "continue_on_exception": True, + "log_to_serialconsole": True, + "alsologtostderr": True, + "log_upload_location": "log_upload_location_value", + "log_dir": "log_dir_value", + "temp_storage_prefix": "temp_storage_prefix_value", + "harness_command": "harness_command_value", + "workflow_file_name": "workflow_file_name_value", + "commandlines_file_name": "commandlines_file_name_value", + "vm_id": "vm_id_value", + "language_hint": "language_hint_value", + "streaming_worker_main_class": "streaming_worker_main_class_value", + }, + "on_host_maintenance": "on_host_maintenance_value", + "data_disks": [ + { + "size_gb": 739, + "disk_type": "disk_type_value", + "mount_point": "mount_point_value", + } + ], + "metadata": {}, + "autoscaling_settings": {"algorithm": 1, "max_num_workers": 1633}, + "pool_args": { + "type_url": "type.googleapis.com/google.protobuf.Duration", + "value": b"\x08\x0c\x10\xdb\x07", + }, + "network": "network_value", + "subnetwork": "subnetwork_value", + "worker_harness_container_image": "worker_harness_container_image_value", + "num_threads_per_worker": 2361, + "ip_configuration": 1, + "sdk_harness_container_images": [ + { + "container_image": "container_image_value", + "use_single_core_per_container": True, + "environment_id": "environment_id_value", + "capabilities": [ + "capabilities_value1", + "capabilities_value2", + ], + } + ], + } + ], + "user_agent": {"fields": {}}, + "version": {}, + "dataset": "dataset_value", + "sdk_pipeline_options": {}, + "internal_experiments": {}, + "service_account_email": "service_account_email_value", + "flex_resource_scheduling_goal": 1, + "worker_region": "worker_region_value", + "worker_zone": "worker_zone_value", + "shuffle_mode": 1, + "debug_options": {"enable_hot_key_logging": True}, + }, + "steps": [{"kind": "kind_value", "name": "name_value", "properties": {}}], + "steps_location": "steps_location_value", + "current_state": 1, + "current_state_time": {"seconds": 751, "nanos": 543}, + "requested_state": 1, + "execution_info": {"stages": {}}, + "create_time": {}, + "replace_job_id": "replace_job_id_value", + "transform_name_mapping": {}, + "client_request_id": "client_request_id_value", + "replaced_by_job_id": "replaced_by_job_id_value", + "temp_files": ["temp_files_value1", "temp_files_value2"], + "labels": {}, + "location": "location_value", + "pipeline_description": { + "original_pipeline_transform": [ + { + "kind": 1, + "id": "id_value", + "name": "name_value", + "display_data": [ + { + "key": "key_value", + "namespace": "namespace_value", + "str_value": "str_value_value", + "int64_value": 1073, + "float_value": 0.117, + "java_class_value": "java_class_value_value", + "timestamp_value": {}, + "duration_value": {"seconds": 751, "nanos": 543}, + "bool_value": True, + "short_str_value": "short_str_value_value", + "url": "url_value", + "label": "label_value", + } + ], + "output_collection_name": [ + "output_collection_name_value1", + "output_collection_name_value2", + ], + "input_collection_name": [ + "input_collection_name_value1", + "input_collection_name_value2", + ], + } + ], + "execution_pipeline_stage": [ + { + "name": "name_value", + "id": "id_value", + "kind": 1, + "input_source": [ + { + "user_name": "user_name_value", + "name": "name_value", + "original_transform_or_collection": "original_transform_or_collection_value", + "size_bytes": 1089, + } + ], + "output_source": {}, + "prerequisite_stage": [ + "prerequisite_stage_value1", + "prerequisite_stage_value2", + ], + "component_transform": [ + { + "user_name": "user_name_value", + "name": "name_value", + "original_transform": "original_transform_value", + } + ], + "component_source": [ + { + "user_name": "user_name_value", + "name": "name_value", + "original_transform_or_collection": "original_transform_or_collection_value", + } + ], + } + ], + "display_data": {}, + }, + "stage_states": [ + { + "execution_stage_name": "execution_stage_name_value", + "execution_stage_state": 1, + "current_state_time": {}, + } + ], + "job_metadata": { + "sdk_version": { + "version": "version_value", + "version_display_name": "version_display_name_value", + "sdk_support_status": 1, + }, + "spanner_details": [ + { + "project_id": "project_id_value", + "instance_id": "instance_id_value", + "database_id": "database_id_value", + } + ], + "bigquery_details": [ + { + "table": "table_value", + "dataset": "dataset_value", + "project_id": "project_id_value", + "query": "query_value", + } + ], + "big_table_details": [ + { + "project_id": "project_id_value", + "instance_id": "instance_id_value", + "table_id": "table_id_value", + } + ], + "pubsub_details": [ + {"topic": "topic_value", "subscription": "subscription_value"} + ], + "file_details": [{"file_pattern": "file_pattern_value"}], + "datastore_details": [ + {"namespace": "namespace_value", "project_id": "project_id_value"} + ], + }, + "start_time": {}, + "created_from_snapshot_id": "created_from_snapshot_id_value", + "satisfies_pzs": True, + } + request = request_type(**request_init) -def test_jobs_v1_beta3_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.JobsV1Beta3Transport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json", + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = jobs.Job( + id="id_value", + project_id="project_id_value", + name="name_value", + type_=environment.JobType.JOB_TYPE_BATCH, + steps_location="steps_location_value", + current_state=jobs.JobState.JOB_STATE_STOPPED, + requested_state=jobs.JobState.JOB_STATE_STOPPED, + replace_job_id="replace_job_id_value", + client_request_id="client_request_id_value", + replaced_by_job_id="replaced_by_job_id_value", + temp_files=["temp_files_value"], + location="location_value", + created_from_snapshot_id="created_from_snapshot_id_value", + satisfies_pzs=True, ) + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = jobs.Job.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) -def test_jobs_v1_beta3_base_transport(): - # Instantiate the base transport. - with mock.patch( - "google.cloud.dataflow_v1beta3.services.jobs_v1_beta3.transports.JobsV1Beta3Transport.__init__" - ) as Transport: - Transport.return_value = None - transport = transports.JobsV1Beta3Transport( - credentials=ga_credentials.AnonymousCredentials(), - ) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_job(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, jobs.Job) + assert response.id == "id_value" + assert response.project_id == "project_id_value" + assert response.name == "name_value" + assert response.type_ == environment.JobType.JOB_TYPE_BATCH + assert response.steps_location == "steps_location_value" + assert response.current_state == jobs.JobState.JOB_STATE_STOPPED + assert response.requested_state == jobs.JobState.JOB_STATE_STOPPED + assert response.replace_job_id == "replace_job_id_value" + assert response.client_request_id == "client_request_id_value" + assert response.replaced_by_job_id == "replaced_by_job_id_value" + assert response.temp_files == ["temp_files_value"] + assert response.location == "location_value" + assert response.created_from_snapshot_id == "created_from_snapshot_id_value" + assert response.satisfies_pzs is True + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_job_rest_interceptors(null_interceptor): + transport = transports.JobsV1Beta3RestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.JobsV1Beta3RestInterceptor(), + ) + client = JobsV1Beta3Client(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.JobsV1Beta3RestInterceptor, "post_update_job" + ) as post, mock.patch.object( + transports.JobsV1Beta3RestInterceptor, "pre_update_job" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = jobs.UpdateJobRequest.pb(jobs.UpdateJobRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = jobs.Job.to_json(jobs.Job()) + + request = jobs.UpdateJobRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = jobs.Job() + + client.update_job( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_job_rest_bad_request( + transport: str = "rest", request_type=jobs.UpdateJobRequest +): + client = JobsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1", "location": "sample2", "job_id": "sample3"} + request_init["job"] = { + "id": "id_value", + "project_id": "project_id_value", + "name": "name_value", + "type_": 1, + "environment": { + "temp_storage_prefix": "temp_storage_prefix_value", + "cluster_manager_api_service": "cluster_manager_api_service_value", + "experiments": ["experiments_value1", "experiments_value2"], + "service_options": ["service_options_value1", "service_options_value2"], + "service_kms_key_name": "service_kms_key_name_value", + "worker_pools": [ + { + "kind": "kind_value", + "num_workers": 1212, + "packages": [{"name": "name_value", "location": "location_value"}], + "default_package_set": 1, + "machine_type": "machine_type_value", + "teardown_policy": 1, + "disk_size_gb": 1261, + "disk_type": "disk_type_value", + "disk_source_image": "disk_source_image_value", + "zone": "zone_value", + "taskrunner_settings": { + "task_user": "task_user_value", + "task_group": "task_group_value", + "oauth_scopes": ["oauth_scopes_value1", "oauth_scopes_value2"], + "base_url": "base_url_value", + "dataflow_api_version": "dataflow_api_version_value", + "parallel_worker_settings": { + "base_url": "base_url_value", + "reporting_enabled": True, + "service_path": "service_path_value", + "shuffle_service_path": "shuffle_service_path_value", + "worker_id": "worker_id_value", + "temp_storage_prefix": "temp_storage_prefix_value", + }, + "base_task_dir": "base_task_dir_value", + "continue_on_exception": True, + "log_to_serialconsole": True, + "alsologtostderr": True, + "log_upload_location": "log_upload_location_value", + "log_dir": "log_dir_value", + "temp_storage_prefix": "temp_storage_prefix_value", + "harness_command": "harness_command_value", + "workflow_file_name": "workflow_file_name_value", + "commandlines_file_name": "commandlines_file_name_value", + "vm_id": "vm_id_value", + "language_hint": "language_hint_value", + "streaming_worker_main_class": "streaming_worker_main_class_value", + }, + "on_host_maintenance": "on_host_maintenance_value", + "data_disks": [ + { + "size_gb": 739, + "disk_type": "disk_type_value", + "mount_point": "mount_point_value", + } + ], + "metadata": {}, + "autoscaling_settings": {"algorithm": 1, "max_num_workers": 1633}, + "pool_args": { + "type_url": "type.googleapis.com/google.protobuf.Duration", + "value": b"\x08\x0c\x10\xdb\x07", + }, + "network": "network_value", + "subnetwork": "subnetwork_value", + "worker_harness_container_image": "worker_harness_container_image_value", + "num_threads_per_worker": 2361, + "ip_configuration": 1, + "sdk_harness_container_images": [ + { + "container_image": "container_image_value", + "use_single_core_per_container": True, + "environment_id": "environment_id_value", + "capabilities": [ + "capabilities_value1", + "capabilities_value2", + ], + } + ], + } + ], + "user_agent": {"fields": {}}, + "version": {}, + "dataset": "dataset_value", + "sdk_pipeline_options": {}, + "internal_experiments": {}, + "service_account_email": "service_account_email_value", + "flex_resource_scheduling_goal": 1, + "worker_region": "worker_region_value", + "worker_zone": "worker_zone_value", + "shuffle_mode": 1, + "debug_options": {"enable_hot_key_logging": True}, + }, + "steps": [{"kind": "kind_value", "name": "name_value", "properties": {}}], + "steps_location": "steps_location_value", + "current_state": 1, + "current_state_time": {"seconds": 751, "nanos": 543}, + "requested_state": 1, + "execution_info": {"stages": {}}, + "create_time": {}, + "replace_job_id": "replace_job_id_value", + "transform_name_mapping": {}, + "client_request_id": "client_request_id_value", + "replaced_by_job_id": "replaced_by_job_id_value", + "temp_files": ["temp_files_value1", "temp_files_value2"], + "labels": {}, + "location": "location_value", + "pipeline_description": { + "original_pipeline_transform": [ + { + "kind": 1, + "id": "id_value", + "name": "name_value", + "display_data": [ + { + "key": "key_value", + "namespace": "namespace_value", + "str_value": "str_value_value", + "int64_value": 1073, + "float_value": 0.117, + "java_class_value": "java_class_value_value", + "timestamp_value": {}, + "duration_value": {"seconds": 751, "nanos": 543}, + "bool_value": True, + "short_str_value": "short_str_value_value", + "url": "url_value", + "label": "label_value", + } + ], + "output_collection_name": [ + "output_collection_name_value1", + "output_collection_name_value2", + ], + "input_collection_name": [ + "input_collection_name_value1", + "input_collection_name_value2", + ], + } + ], + "execution_pipeline_stage": [ + { + "name": "name_value", + "id": "id_value", + "kind": 1, + "input_source": [ + { + "user_name": "user_name_value", + "name": "name_value", + "original_transform_or_collection": "original_transform_or_collection_value", + "size_bytes": 1089, + } + ], + "output_source": {}, + "prerequisite_stage": [ + "prerequisite_stage_value1", + "prerequisite_stage_value2", + ], + "component_transform": [ + { + "user_name": "user_name_value", + "name": "name_value", + "original_transform": "original_transform_value", + } + ], + "component_source": [ + { + "user_name": "user_name_value", + "name": "name_value", + "original_transform_or_collection": "original_transform_or_collection_value", + } + ], + } + ], + "display_data": {}, + }, + "stage_states": [ + { + "execution_stage_name": "execution_stage_name_value", + "execution_stage_state": 1, + "current_state_time": {}, + } + ], + "job_metadata": { + "sdk_version": { + "version": "version_value", + "version_display_name": "version_display_name_value", + "sdk_support_status": 1, + }, + "spanner_details": [ + { + "project_id": "project_id_value", + "instance_id": "instance_id_value", + "database_id": "database_id_value", + } + ], + "bigquery_details": [ + { + "table": "table_value", + "dataset": "dataset_value", + "project_id": "project_id_value", + "query": "query_value", + } + ], + "big_table_details": [ + { + "project_id": "project_id_value", + "instance_id": "instance_id_value", + "table_id": "table_id_value", + } + ], + "pubsub_details": [ + {"topic": "topic_value", "subscription": "subscription_value"} + ], + "file_details": [{"file_pattern": "file_pattern_value"}], + "datastore_details": [ + {"namespace": "namespace_value", "project_id": "project_id_value"} + ], + }, + "start_time": {}, + "created_from_snapshot_id": "created_from_snapshot_id_value", + "satisfies_pzs": True, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_job(request) + + +def test_update_job_rest_error(): + client = JobsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + jobs.ListJobsRequest, + dict, + ], +) +def test_list_jobs_rest(request_type): + client = JobsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1", "location": "sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = jobs.ListJobsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = jobs.ListJobsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_jobs(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListJobsPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_jobs_rest_interceptors(null_interceptor): + transport = transports.JobsV1Beta3RestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.JobsV1Beta3RestInterceptor(), + ) + client = JobsV1Beta3Client(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.JobsV1Beta3RestInterceptor, "post_list_jobs" + ) as post, mock.patch.object( + transports.JobsV1Beta3RestInterceptor, "pre_list_jobs" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = jobs.ListJobsRequest.pb(jobs.ListJobsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = jobs.ListJobsResponse.to_json( + jobs.ListJobsResponse() + ) + + request = jobs.ListJobsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = jobs.ListJobsResponse() + + client.list_jobs( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_jobs_rest_bad_request( + transport: str = "rest", request_type=jobs.ListJobsRequest +): + client = JobsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1", "location": "sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_jobs(request) + + +def test_list_jobs_rest_pager(transport: str = "rest"): + client = JobsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + jobs.ListJobsResponse( + jobs=[ + jobs.Job(), + jobs.Job(), + jobs.Job(), + ], + next_page_token="abc", + ), + jobs.ListJobsResponse( + jobs=[], + next_page_token="def", + ), + jobs.ListJobsResponse( + jobs=[ + jobs.Job(), + ], + next_page_token="ghi", + ), + jobs.ListJobsResponse( + jobs=[ + jobs.Job(), + jobs.Job(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(jobs.ListJobsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"project_id": "sample1", "location": "sample2"} + + pager = client.list_jobs(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, jobs.Job) for i in results) + + pages = list(client.list_jobs(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + jobs.ListJobsRequest, + dict, + ], +) +def test_aggregated_list_jobs_rest(request_type): + client = JobsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = jobs.ListJobsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = jobs.ListJobsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.aggregated_list_jobs(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.AggregatedListJobsPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_aggregated_list_jobs_rest_interceptors(null_interceptor): + transport = transports.JobsV1Beta3RestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.JobsV1Beta3RestInterceptor(), + ) + client = JobsV1Beta3Client(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.JobsV1Beta3RestInterceptor, "post_aggregated_list_jobs" + ) as post, mock.patch.object( + transports.JobsV1Beta3RestInterceptor, "pre_aggregated_list_jobs" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = jobs.ListJobsRequest.pb(jobs.ListJobsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = jobs.ListJobsResponse.to_json( + jobs.ListJobsResponse() + ) + + request = jobs.ListJobsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = jobs.ListJobsResponse() + + client.aggregated_list_jobs( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_aggregated_list_jobs_rest_bad_request( + transport: str = "rest", request_type=jobs.ListJobsRequest +): + client = JobsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.aggregated_list_jobs(request) + + +def test_aggregated_list_jobs_rest_pager(transport: str = "rest"): + client = JobsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + jobs.ListJobsResponse( + jobs=[ + jobs.Job(), + jobs.Job(), + jobs.Job(), + ], + next_page_token="abc", + ), + jobs.ListJobsResponse( + jobs=[], + next_page_token="def", + ), + jobs.ListJobsResponse( + jobs=[ + jobs.Job(), + ], + next_page_token="ghi", + ), + jobs.ListJobsResponse( + jobs=[ + jobs.Job(), + jobs.Job(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(jobs.ListJobsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"project_id": "sample1"} + + pager = client.aggregated_list_jobs(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, jobs.Job) for i in results) + + pages = list(client.aggregated_list_jobs(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_check_active_jobs_rest_no_http_options(): + client = JobsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = jobs.CheckActiveJobsRequest() + with pytest.raises(RuntimeError): + client.check_active_jobs(request) + + +@pytest.mark.parametrize( + "request_type", + [ + jobs.SnapshotJobRequest, + dict, + ], +) +def test_snapshot_job_rest(request_type): + client = JobsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1", "location": "sample2", "job_id": "sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = snapshots.Snapshot( + id="id_value", + project_id="project_id_value", + source_job_id="source_job_id_value", + state=snapshots.SnapshotState.PENDING, + description="description_value", + disk_size_bytes=1611, + region="region_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = snapshots.Snapshot.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.snapshot_job(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, snapshots.Snapshot) + assert response.id == "id_value" + assert response.project_id == "project_id_value" + assert response.source_job_id == "source_job_id_value" + assert response.state == snapshots.SnapshotState.PENDING + assert response.description == "description_value" + assert response.disk_size_bytes == 1611 + assert response.region == "region_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_snapshot_job_rest_interceptors(null_interceptor): + transport = transports.JobsV1Beta3RestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.JobsV1Beta3RestInterceptor(), + ) + client = JobsV1Beta3Client(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.JobsV1Beta3RestInterceptor, "post_snapshot_job" + ) as post, mock.patch.object( + transports.JobsV1Beta3RestInterceptor, "pre_snapshot_job" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = jobs.SnapshotJobRequest.pb(jobs.SnapshotJobRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = snapshots.Snapshot.to_json(snapshots.Snapshot()) + + request = jobs.SnapshotJobRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = snapshots.Snapshot() + + client.snapshot_job( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_snapshot_job_rest_bad_request( + transport: str = "rest", request_type=jobs.SnapshotJobRequest +): + client = JobsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1", "location": "sample2", "job_id": "sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.snapshot_job(request) + + +def test_snapshot_job_rest_error(): + client = JobsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_check_active_jobs_rest_error(): + client = JobsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # Since a `google.api.http` annotation is required for using a rest transport + # method, this should error. + with pytest.raises(RuntimeError) as runtime_error: + client.check_active_jobs({}) + assert ( + "Cannot define a method without a valid 'google.api.http' annotation." + in str(runtime_error.value) + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.JobsV1Beta3GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = JobsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.JobsV1Beta3GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = JobsV1Beta3Client( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.JobsV1Beta3GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = JobsV1Beta3Client( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = JobsV1Beta3Client( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.JobsV1Beta3GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = JobsV1Beta3Client( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.JobsV1Beta3GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = JobsV1Beta3Client(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.JobsV1Beta3GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.JobsV1Beta3GrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.JobsV1Beta3GrpcTransport, + transports.JobsV1Beta3GrpcAsyncIOTransport, + transports.JobsV1Beta3RestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = JobsV1Beta3Client.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = JobsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.JobsV1Beta3GrpcTransport, + ) + + +def test_jobs_v1_beta3_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.JobsV1Beta3Transport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_jobs_v1_beta3_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.dataflow_v1beta3.services.jobs_v1_beta3.transports.JobsV1Beta3Transport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.JobsV1Beta3Transport( + credentials=ga_credentials.AnonymousCredentials(), + ) # Every method on the transport should just blindly # raise NotImplementedError. @@ -2492,6 +4373,7 @@ def test_jobs_v1_beta3_transport_auth_adc(transport_class): [ transports.JobsV1Beta3GrpcTransport, transports.JobsV1Beta3GrpcAsyncIOTransport, + transports.JobsV1Beta3RestTransport, ], ) def test_jobs_v1_beta3_transport_auth_gdch_credentials(transport_class): @@ -2591,11 +4473,23 @@ def test_jobs_v1_beta3_grpc_transport_client_cert_source_for_mtls(transport_clas ) +def test_jobs_v1_beta3_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.JobsV1Beta3RestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + @pytest.mark.parametrize( "transport_name", [ "grpc", "grpc_asyncio", + "rest", ], ) def test_jobs_v1_beta3_host_no_port(transport_name): @@ -2606,7 +4500,11 @@ def test_jobs_v1_beta3_host_no_port(transport_name): ), transport=transport_name, ) - assert client.transport._host == ("dataflow.googleapis.com:443") + assert client.transport._host == ( + "dataflow.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://dataflow.googleapis.com" + ) @pytest.mark.parametrize( @@ -2614,6 +4512,7 @@ def test_jobs_v1_beta3_host_no_port(transport_name): [ "grpc", "grpc_asyncio", + "rest", ], ) def test_jobs_v1_beta3_host_with_port(transport_name): @@ -2624,7 +4523,51 @@ def test_jobs_v1_beta3_host_with_port(transport_name): ), transport=transport_name, ) - assert client.transport._host == ("dataflow.googleapis.com:8000") + assert client.transport._host == ( + "dataflow.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://dataflow.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_jobs_v1_beta3_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = JobsV1Beta3Client( + credentials=creds1, + transport=transport_name, + ) + client2 = JobsV1Beta3Client( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.create_job._session + session2 = client2.transport.create_job._session + assert session1 != session2 + session1 = client1.transport.get_job._session + session2 = client2.transport.get_job._session + assert session1 != session2 + session1 = client1.transport.update_job._session + session2 = client2.transport.update_job._session + assert session1 != session2 + session1 = client1.transport.list_jobs._session + session2 = client2.transport.list_jobs._session + assert session1 != session2 + session1 = client1.transport.aggregated_list_jobs._session + session2 = client2.transport.aggregated_list_jobs._session + assert session1 != session2 + session1 = client1.transport.check_active_jobs._session + session2 = client2.transport.check_active_jobs._session + assert session1 != session2 + session1 = client1.transport.snapshot_job._session + session2 = client2.transport.snapshot_job._session + assert session1 != session2 def test_jobs_v1_beta3_grpc_transport_channel(): @@ -2887,6 +4830,7 @@ async def test_transport_close_async(): def test_transport_close(): transports = { + "rest": "_session", "grpc": "_grpc_channel", } @@ -2904,6 +4848,7 @@ def test_transport_close(): def test_client_ctx(): transports = [ + "rest", "grpc", ] for transport in transports: diff --git a/tests/unit/gapic/dataflow_v1beta3/test_messages_v1_beta3.py b/tests/unit/gapic/dataflow_v1beta3/test_messages_v1_beta3.py index cd91c06..94082a0 100644 --- a/tests/unit/gapic/dataflow_v1beta3/test_messages_v1_beta3.py +++ b/tests/unit/gapic/dataflow_v1beta3/test_messages_v1_beta3.py @@ -22,6 +22,8 @@ except ImportError: # pragma: NO COVER import mock +from collections.abc import Iterable +import json import math from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template @@ -31,11 +33,15 @@ from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.oauth2 import service_account +from google.protobuf import json_format from google.protobuf import timestamp_pb2 # type: ignore import grpc from grpc.experimental import aio +from proto.marshal.rules import wrappers from proto.marshal.rules.dates import DurationRule, TimestampRule import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session from google.cloud.dataflow_v1beta3.services.messages_v1_beta3 import ( MessagesV1Beta3AsyncClient, @@ -95,6 +101,7 @@ def test__get_default_mtls_endpoint(): [ (MessagesV1Beta3Client, "grpc"), (MessagesV1Beta3AsyncClient, "grpc_asyncio"), + (MessagesV1Beta3Client, "rest"), ], ) def test_messages_v1_beta3_client_from_service_account_info( @@ -110,7 +117,11 @@ def test_messages_v1_beta3_client_from_service_account_info( assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == ("dataflow.googleapis.com:443") + assert client.transport._host == ( + "dataflow.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://dataflow.googleapis.com" + ) @pytest.mark.parametrize( @@ -118,6 +129,7 @@ def test_messages_v1_beta3_client_from_service_account_info( [ (transports.MessagesV1Beta3GrpcTransport, "grpc"), (transports.MessagesV1Beta3GrpcAsyncIOTransport, "grpc_asyncio"), + (transports.MessagesV1Beta3RestTransport, "rest"), ], ) def test_messages_v1_beta3_client_service_account_always_use_jwt( @@ -143,6 +155,7 @@ def test_messages_v1_beta3_client_service_account_always_use_jwt( [ (MessagesV1Beta3Client, "grpc"), (MessagesV1Beta3AsyncClient, "grpc_asyncio"), + (MessagesV1Beta3Client, "rest"), ], ) def test_messages_v1_beta3_client_from_service_account_file( @@ -165,13 +178,18 @@ def test_messages_v1_beta3_client_from_service_account_file( assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == ("dataflow.googleapis.com:443") + assert client.transport._host == ( + "dataflow.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://dataflow.googleapis.com" + ) def test_messages_v1_beta3_client_get_transport_class(): transport = MessagesV1Beta3Client.get_transport_class() available_transports = [ transports.MessagesV1Beta3GrpcTransport, + transports.MessagesV1Beta3RestTransport, ] assert transport in available_transports @@ -188,6 +206,7 @@ def test_messages_v1_beta3_client_get_transport_class(): transports.MessagesV1Beta3GrpcAsyncIOTransport, "grpc_asyncio", ), + (MessagesV1Beta3Client, transports.MessagesV1Beta3RestTransport, "rest"), ], ) @mock.patch.object( @@ -343,6 +362,18 @@ def test_messages_v1_beta3_client_client_options( "grpc_asyncio", "false", ), + ( + MessagesV1Beta3Client, + transports.MessagesV1Beta3RestTransport, + "rest", + "true", + ), + ( + MessagesV1Beta3Client, + transports.MessagesV1Beta3RestTransport, + "rest", + "false", + ), ], ) @mock.patch.object( @@ -542,6 +573,7 @@ def test_messages_v1_beta3_client_get_mtls_endpoint_and_cert_source(client_class transports.MessagesV1Beta3GrpcAsyncIOTransport, "grpc_asyncio", ), + (MessagesV1Beta3Client, transports.MessagesV1Beta3RestTransport, "rest"), ], ) def test_messages_v1_beta3_client_client_options_scopes( @@ -582,6 +614,7 @@ def test_messages_v1_beta3_client_client_options_scopes( "grpc_asyncio", grpc_helpers_async, ), + (MessagesV1Beta3Client, transports.MessagesV1Beta3RestTransport, "rest", None), ], ) def test_messages_v1_beta3_client_client_options_credentials_file( @@ -1065,6 +1098,191 @@ async def test_list_job_messages_async_pages(): assert page_.raw_page.next_page_token == token +@pytest.mark.parametrize( + "request_type", + [ + messages.ListJobMessagesRequest, + dict, + ], +) +def test_list_job_messages_rest(request_type): + client = MessagesV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1", "location": "sample2", "job_id": "sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = messages.ListJobMessagesResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = messages.ListJobMessagesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_job_messages(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListJobMessagesPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_job_messages_rest_interceptors(null_interceptor): + transport = transports.MessagesV1Beta3RestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.MessagesV1Beta3RestInterceptor(), + ) + client = MessagesV1Beta3Client(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MessagesV1Beta3RestInterceptor, "post_list_job_messages" + ) as post, mock.patch.object( + transports.MessagesV1Beta3RestInterceptor, "pre_list_job_messages" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = messages.ListJobMessagesRequest.pb( + messages.ListJobMessagesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = messages.ListJobMessagesResponse.to_json( + messages.ListJobMessagesResponse() + ) + + request = messages.ListJobMessagesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = messages.ListJobMessagesResponse() + + client.list_job_messages( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_job_messages_rest_bad_request( + transport: str = "rest", request_type=messages.ListJobMessagesRequest +): + client = MessagesV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1", "location": "sample2", "job_id": "sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_job_messages(request) + + +def test_list_job_messages_rest_pager(transport: str = "rest"): + client = MessagesV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + messages.ListJobMessagesResponse( + job_messages=[ + messages.JobMessage(), + messages.JobMessage(), + messages.JobMessage(), + ], + next_page_token="abc", + ), + messages.ListJobMessagesResponse( + job_messages=[], + next_page_token="def", + ), + messages.ListJobMessagesResponse( + job_messages=[ + messages.JobMessage(), + ], + next_page_token="ghi", + ), + messages.ListJobMessagesResponse( + job_messages=[ + messages.JobMessage(), + messages.JobMessage(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(messages.ListJobMessagesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "project_id": "sample1", + "location": "sample2", + "job_id": "sample3", + } + + pager = client.list_job_messages(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, messages.JobMessage) for i in results) + + pages = list(client.list_job_messages(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.MessagesV1Beta3GrpcTransport( @@ -1146,6 +1364,7 @@ def test_transport_get_channel(): [ transports.MessagesV1Beta3GrpcTransport, transports.MessagesV1Beta3GrpcAsyncIOTransport, + transports.MessagesV1Beta3RestTransport, ], ) def test_transport_adc(transport_class): @@ -1160,6 +1379,7 @@ def test_transport_adc(transport_class): "transport_name", [ "grpc", + "rest", ], ) def test_transport_kind(transport_name): @@ -1302,6 +1522,7 @@ def test_messages_v1_beta3_transport_auth_adc(transport_class): [ transports.MessagesV1Beta3GrpcTransport, transports.MessagesV1Beta3GrpcAsyncIOTransport, + transports.MessagesV1Beta3RestTransport, ], ) def test_messages_v1_beta3_transport_auth_gdch_credentials(transport_class): @@ -1404,11 +1625,23 @@ def test_messages_v1_beta3_grpc_transport_client_cert_source_for_mtls(transport_ ) +def test_messages_v1_beta3_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.MessagesV1Beta3RestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + @pytest.mark.parametrize( "transport_name", [ "grpc", "grpc_asyncio", + "rest", ], ) def test_messages_v1_beta3_host_no_port(transport_name): @@ -1419,7 +1652,11 @@ def test_messages_v1_beta3_host_no_port(transport_name): ), transport=transport_name, ) - assert client.transport._host == ("dataflow.googleapis.com:443") + assert client.transport._host == ( + "dataflow.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://dataflow.googleapis.com" + ) @pytest.mark.parametrize( @@ -1427,6 +1664,7 @@ def test_messages_v1_beta3_host_no_port(transport_name): [ "grpc", "grpc_asyncio", + "rest", ], ) def test_messages_v1_beta3_host_with_port(transport_name): @@ -1437,7 +1675,33 @@ def test_messages_v1_beta3_host_with_port(transport_name): ), transport=transport_name, ) - assert client.transport._host == ("dataflow.googleapis.com:8000") + assert client.transport._host == ( + "dataflow.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://dataflow.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_messages_v1_beta3_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = MessagesV1Beta3Client( + credentials=creds1, + transport=transport_name, + ) + client2 = MessagesV1Beta3Client( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.list_job_messages._session + session2 = client2.transport.list_job_messages._session + assert session1 != session2 def test_messages_v1_beta3_grpc_transport_channel(): @@ -1708,6 +1972,7 @@ async def test_transport_close_async(): def test_transport_close(): transports = { + "rest": "_session", "grpc": "_grpc_channel", } @@ -1725,6 +1990,7 @@ def test_transport_close(): def test_client_ctx(): transports = [ + "rest", "grpc", ] for transport in transports: diff --git a/tests/unit/gapic/dataflow_v1beta3/test_metrics_v1_beta3.py b/tests/unit/gapic/dataflow_v1beta3/test_metrics_v1_beta3.py index 55fe551..11345e5 100644 --- a/tests/unit/gapic/dataflow_v1beta3/test_metrics_v1_beta3.py +++ b/tests/unit/gapic/dataflow_v1beta3/test_metrics_v1_beta3.py @@ -22,6 +22,8 @@ except ImportError: # pragma: NO COVER import mock +from collections.abc import Iterable +import json import math from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template @@ -31,11 +33,15 @@ from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.oauth2 import service_account +from google.protobuf import json_format from google.protobuf import timestamp_pb2 # type: ignore import grpc from grpc.experimental import aio +from proto.marshal.rules import wrappers from proto.marshal.rules.dates import DurationRule, TimestampRule import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session from google.cloud.dataflow_v1beta3.services.metrics_v1_beta3 import ( MetricsV1Beta3AsyncClient, @@ -95,6 +101,7 @@ def test__get_default_mtls_endpoint(): [ (MetricsV1Beta3Client, "grpc"), (MetricsV1Beta3AsyncClient, "grpc_asyncio"), + (MetricsV1Beta3Client, "rest"), ], ) def test_metrics_v1_beta3_client_from_service_account_info( @@ -110,7 +117,11 @@ def test_metrics_v1_beta3_client_from_service_account_info( assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == ("dataflow.googleapis.com:443") + assert client.transport._host == ( + "dataflow.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://dataflow.googleapis.com" + ) @pytest.mark.parametrize( @@ -118,6 +129,7 @@ def test_metrics_v1_beta3_client_from_service_account_info( [ (transports.MetricsV1Beta3GrpcTransport, "grpc"), (transports.MetricsV1Beta3GrpcAsyncIOTransport, "grpc_asyncio"), + (transports.MetricsV1Beta3RestTransport, "rest"), ], ) def test_metrics_v1_beta3_client_service_account_always_use_jwt( @@ -143,6 +155,7 @@ def test_metrics_v1_beta3_client_service_account_always_use_jwt( [ (MetricsV1Beta3Client, "grpc"), (MetricsV1Beta3AsyncClient, "grpc_asyncio"), + (MetricsV1Beta3Client, "rest"), ], ) def test_metrics_v1_beta3_client_from_service_account_file( @@ -165,13 +178,18 @@ def test_metrics_v1_beta3_client_from_service_account_file( assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == ("dataflow.googleapis.com:443") + assert client.transport._host == ( + "dataflow.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://dataflow.googleapis.com" + ) def test_metrics_v1_beta3_client_get_transport_class(): transport = MetricsV1Beta3Client.get_transport_class() available_transports = [ transports.MetricsV1Beta3GrpcTransport, + transports.MetricsV1Beta3RestTransport, ] assert transport in available_transports @@ -188,6 +206,7 @@ def test_metrics_v1_beta3_client_get_transport_class(): transports.MetricsV1Beta3GrpcAsyncIOTransport, "grpc_asyncio", ), + (MetricsV1Beta3Client, transports.MetricsV1Beta3RestTransport, "rest"), ], ) @mock.patch.object( @@ -333,6 +352,8 @@ def test_metrics_v1_beta3_client_client_options( "grpc_asyncio", "false", ), + (MetricsV1Beta3Client, transports.MetricsV1Beta3RestTransport, "rest", "true"), + (MetricsV1Beta3Client, transports.MetricsV1Beta3RestTransport, "rest", "false"), ], ) @mock.patch.object( @@ -532,6 +553,7 @@ def test_metrics_v1_beta3_client_get_mtls_endpoint_and_cert_source(client_class) transports.MetricsV1Beta3GrpcAsyncIOTransport, "grpc_asyncio", ), + (MetricsV1Beta3Client, transports.MetricsV1Beta3RestTransport, "rest"), ], ) def test_metrics_v1_beta3_client_client_options_scopes( @@ -572,6 +594,7 @@ def test_metrics_v1_beta3_client_client_options_scopes( "grpc_asyncio", grpc_helpers_async, ), + (MetricsV1Beta3Client, transports.MetricsV1Beta3RestTransport, "rest", None), ], ) def test_metrics_v1_beta3_client_client_options_credentials_file( @@ -1569,6 +1592,506 @@ async def test_get_stage_execution_details_async_pages(): assert page_.raw_page.next_page_token == token +@pytest.mark.parametrize( + "request_type", + [ + metrics.GetJobMetricsRequest, + dict, + ], +) +def test_get_job_metrics_rest(request_type): + client = MetricsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1", "location": "sample2", "job_id": "sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = metrics.JobMetrics() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = metrics.JobMetrics.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_job_metrics(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, metrics.JobMetrics) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_job_metrics_rest_interceptors(null_interceptor): + transport = transports.MetricsV1Beta3RestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.MetricsV1Beta3RestInterceptor(), + ) + client = MetricsV1Beta3Client(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MetricsV1Beta3RestInterceptor, "post_get_job_metrics" + ) as post, mock.patch.object( + transports.MetricsV1Beta3RestInterceptor, "pre_get_job_metrics" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = metrics.GetJobMetricsRequest.pb(metrics.GetJobMetricsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = metrics.JobMetrics.to_json(metrics.JobMetrics()) + + request = metrics.GetJobMetricsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = metrics.JobMetrics() + + client.get_job_metrics( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_job_metrics_rest_bad_request( + transport: str = "rest", request_type=metrics.GetJobMetricsRequest +): + client = MetricsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1", "location": "sample2", "job_id": "sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_job_metrics(request) + + +def test_get_job_metrics_rest_error(): + client = MetricsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + metrics.GetJobExecutionDetailsRequest, + dict, + ], +) +def test_get_job_execution_details_rest(request_type): + client = MetricsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1", "location": "sample2", "job_id": "sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = metrics.JobExecutionDetails( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = metrics.JobExecutionDetails.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_job_execution_details(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.GetJobExecutionDetailsPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_job_execution_details_rest_interceptors(null_interceptor): + transport = transports.MetricsV1Beta3RestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.MetricsV1Beta3RestInterceptor(), + ) + client = MetricsV1Beta3Client(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MetricsV1Beta3RestInterceptor, "post_get_job_execution_details" + ) as post, mock.patch.object( + transports.MetricsV1Beta3RestInterceptor, "pre_get_job_execution_details" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = metrics.GetJobExecutionDetailsRequest.pb( + metrics.GetJobExecutionDetailsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = metrics.JobExecutionDetails.to_json( + metrics.JobExecutionDetails() + ) + + request = metrics.GetJobExecutionDetailsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = metrics.JobExecutionDetails() + + client.get_job_execution_details( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_job_execution_details_rest_bad_request( + transport: str = "rest", request_type=metrics.GetJobExecutionDetailsRequest +): + client = MetricsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1", "location": "sample2", "job_id": "sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_job_execution_details(request) + + +def test_get_job_execution_details_rest_pager(transport: str = "rest"): + client = MetricsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + metrics.JobExecutionDetails( + stages=[ + metrics.StageSummary(), + metrics.StageSummary(), + metrics.StageSummary(), + ], + next_page_token="abc", + ), + metrics.JobExecutionDetails( + stages=[], + next_page_token="def", + ), + metrics.JobExecutionDetails( + stages=[ + metrics.StageSummary(), + ], + next_page_token="ghi", + ), + metrics.JobExecutionDetails( + stages=[ + metrics.StageSummary(), + metrics.StageSummary(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(metrics.JobExecutionDetails.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "project_id": "sample1", + "location": "sample2", + "job_id": "sample3", + } + + pager = client.get_job_execution_details(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, metrics.StageSummary) for i in results) + + pages = list(client.get_job_execution_details(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + metrics.GetStageExecutionDetailsRequest, + dict, + ], +) +def test_get_stage_execution_details_rest(request_type): + client = MetricsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "project_id": "sample1", + "location": "sample2", + "job_id": "sample3", + "stage_id": "sample4", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = metrics.StageExecutionDetails( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = metrics.StageExecutionDetails.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_stage_execution_details(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.GetStageExecutionDetailsPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_stage_execution_details_rest_interceptors(null_interceptor): + transport = transports.MetricsV1Beta3RestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.MetricsV1Beta3RestInterceptor(), + ) + client = MetricsV1Beta3Client(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MetricsV1Beta3RestInterceptor, "post_get_stage_execution_details" + ) as post, mock.patch.object( + transports.MetricsV1Beta3RestInterceptor, "pre_get_stage_execution_details" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = metrics.GetStageExecutionDetailsRequest.pb( + metrics.GetStageExecutionDetailsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = metrics.StageExecutionDetails.to_json( + metrics.StageExecutionDetails() + ) + + request = metrics.GetStageExecutionDetailsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = metrics.StageExecutionDetails() + + client.get_stage_execution_details( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_stage_execution_details_rest_bad_request( + transport: str = "rest", request_type=metrics.GetStageExecutionDetailsRequest +): + client = MetricsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project_id": "sample1", + "location": "sample2", + "job_id": "sample3", + "stage_id": "sample4", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_stage_execution_details(request) + + +def test_get_stage_execution_details_rest_pager(transport: str = "rest"): + client = MetricsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + metrics.StageExecutionDetails( + workers=[ + metrics.WorkerDetails(), + metrics.WorkerDetails(), + metrics.WorkerDetails(), + ], + next_page_token="abc", + ), + metrics.StageExecutionDetails( + workers=[], + next_page_token="def", + ), + metrics.StageExecutionDetails( + workers=[ + metrics.WorkerDetails(), + ], + next_page_token="ghi", + ), + metrics.StageExecutionDetails( + workers=[ + metrics.WorkerDetails(), + metrics.WorkerDetails(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(metrics.StageExecutionDetails.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "project_id": "sample1", + "location": "sample2", + "job_id": "sample3", + "stage_id": "sample4", + } + + pager = client.get_stage_execution_details(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, metrics.WorkerDetails) for i in results) + + pages = list(client.get_stage_execution_details(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.MetricsV1Beta3GrpcTransport( @@ -1650,6 +2173,7 @@ def test_transport_get_channel(): [ transports.MetricsV1Beta3GrpcTransport, transports.MetricsV1Beta3GrpcAsyncIOTransport, + transports.MetricsV1Beta3RestTransport, ], ) def test_transport_adc(transport_class): @@ -1664,6 +2188,7 @@ def test_transport_adc(transport_class): "transport_name", [ "grpc", + "rest", ], ) def test_transport_kind(transport_name): @@ -1810,6 +2335,7 @@ def test_metrics_v1_beta3_transport_auth_adc(transport_class): [ transports.MetricsV1Beta3GrpcTransport, transports.MetricsV1Beta3GrpcAsyncIOTransport, + transports.MetricsV1Beta3RestTransport, ], ) def test_metrics_v1_beta3_transport_auth_gdch_credentials(transport_class): @@ -1912,11 +2438,23 @@ def test_metrics_v1_beta3_grpc_transport_client_cert_source_for_mtls(transport_c ) +def test_metrics_v1_beta3_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.MetricsV1Beta3RestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + @pytest.mark.parametrize( "transport_name", [ "grpc", "grpc_asyncio", + "rest", ], ) def test_metrics_v1_beta3_host_no_port(transport_name): @@ -1927,7 +2465,11 @@ def test_metrics_v1_beta3_host_no_port(transport_name): ), transport=transport_name, ) - assert client.transport._host == ("dataflow.googleapis.com:443") + assert client.transport._host == ( + "dataflow.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://dataflow.googleapis.com" + ) @pytest.mark.parametrize( @@ -1935,6 +2477,7 @@ def test_metrics_v1_beta3_host_no_port(transport_name): [ "grpc", "grpc_asyncio", + "rest", ], ) def test_metrics_v1_beta3_host_with_port(transport_name): @@ -1945,7 +2488,39 @@ def test_metrics_v1_beta3_host_with_port(transport_name): ), transport=transport_name, ) - assert client.transport._host == ("dataflow.googleapis.com:8000") + assert client.transport._host == ( + "dataflow.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://dataflow.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_metrics_v1_beta3_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = MetricsV1Beta3Client( + credentials=creds1, + transport=transport_name, + ) + client2 = MetricsV1Beta3Client( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.get_job_metrics._session + session2 = client2.transport.get_job_metrics._session + assert session1 != session2 + session1 = client1.transport.get_job_execution_details._session + session2 = client2.transport.get_job_execution_details._session + assert session1 != session2 + session1 = client1.transport.get_stage_execution_details._session + session2 = client2.transport.get_stage_execution_details._session + assert session1 != session2 def test_metrics_v1_beta3_grpc_transport_channel(): @@ -2216,6 +2791,7 @@ async def test_transport_close_async(): def test_transport_close(): transports = { + "rest": "_session", "grpc": "_grpc_channel", } @@ -2233,6 +2809,7 @@ def test_transport_close(): def test_client_ctx(): transports = [ + "rest", "grpc", ] for transport in transports: diff --git a/tests/unit/gapic/dataflow_v1beta3/test_snapshots_v1_beta3.py b/tests/unit/gapic/dataflow_v1beta3/test_snapshots_v1_beta3.py index 57b834c..ab23669 100644 --- a/tests/unit/gapic/dataflow_v1beta3/test_snapshots_v1_beta3.py +++ b/tests/unit/gapic/dataflow_v1beta3/test_snapshots_v1_beta3.py @@ -22,6 +22,8 @@ except ImportError: # pragma: NO COVER import mock +from collections.abc import Iterable +import json import math from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template @@ -32,11 +34,15 @@ from google.auth.exceptions import MutualTLSChannelError from google.oauth2 import service_account from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import json_format from google.protobuf import timestamp_pb2 # type: ignore import grpc from grpc.experimental import aio +from proto.marshal.rules import wrappers from proto.marshal.rules.dates import DurationRule, TimestampRule import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session from google.cloud.dataflow_v1beta3.services.snapshots_v1_beta3 import ( SnapshotsV1Beta3AsyncClient, @@ -96,6 +102,7 @@ def test__get_default_mtls_endpoint(): [ (SnapshotsV1Beta3Client, "grpc"), (SnapshotsV1Beta3AsyncClient, "grpc_asyncio"), + (SnapshotsV1Beta3Client, "rest"), ], ) def test_snapshots_v1_beta3_client_from_service_account_info( @@ -111,7 +118,11 @@ def test_snapshots_v1_beta3_client_from_service_account_info( assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == ("dataflow.googleapis.com:443") + assert client.transport._host == ( + "dataflow.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://dataflow.googleapis.com" + ) @pytest.mark.parametrize( @@ -119,6 +130,7 @@ def test_snapshots_v1_beta3_client_from_service_account_info( [ (transports.SnapshotsV1Beta3GrpcTransport, "grpc"), (transports.SnapshotsV1Beta3GrpcAsyncIOTransport, "grpc_asyncio"), + (transports.SnapshotsV1Beta3RestTransport, "rest"), ], ) def test_snapshots_v1_beta3_client_service_account_always_use_jwt( @@ -144,6 +156,7 @@ def test_snapshots_v1_beta3_client_service_account_always_use_jwt( [ (SnapshotsV1Beta3Client, "grpc"), (SnapshotsV1Beta3AsyncClient, "grpc_asyncio"), + (SnapshotsV1Beta3Client, "rest"), ], ) def test_snapshots_v1_beta3_client_from_service_account_file( @@ -166,13 +179,18 @@ def test_snapshots_v1_beta3_client_from_service_account_file( assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == ("dataflow.googleapis.com:443") + assert client.transport._host == ( + "dataflow.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://dataflow.googleapis.com" + ) def test_snapshots_v1_beta3_client_get_transport_class(): transport = SnapshotsV1Beta3Client.get_transport_class() available_transports = [ transports.SnapshotsV1Beta3GrpcTransport, + transports.SnapshotsV1Beta3RestTransport, ] assert transport in available_transports @@ -189,6 +207,7 @@ def test_snapshots_v1_beta3_client_get_transport_class(): transports.SnapshotsV1Beta3GrpcAsyncIOTransport, "grpc_asyncio", ), + (SnapshotsV1Beta3Client, transports.SnapshotsV1Beta3RestTransport, "rest"), ], ) @mock.patch.object( @@ -344,6 +363,18 @@ def test_snapshots_v1_beta3_client_client_options( "grpc_asyncio", "false", ), + ( + SnapshotsV1Beta3Client, + transports.SnapshotsV1Beta3RestTransport, + "rest", + "true", + ), + ( + SnapshotsV1Beta3Client, + transports.SnapshotsV1Beta3RestTransport, + "rest", + "false", + ), ], ) @mock.patch.object( @@ -543,6 +574,7 @@ def test_snapshots_v1_beta3_client_get_mtls_endpoint_and_cert_source(client_clas transports.SnapshotsV1Beta3GrpcAsyncIOTransport, "grpc_asyncio", ), + (SnapshotsV1Beta3Client, transports.SnapshotsV1Beta3RestTransport, "rest"), ], ) def test_snapshots_v1_beta3_client_client_options_scopes( @@ -583,6 +615,12 @@ def test_snapshots_v1_beta3_client_client_options_scopes( "grpc_asyncio", grpc_helpers_async, ), + ( + SnapshotsV1Beta3Client, + transports.SnapshotsV1Beta3RestTransport, + "rest", + None, + ), ], ) def test_snapshots_v1_beta3_client_client_options_credentials_file( @@ -1172,6 +1210,400 @@ async def test_list_snapshots_field_headers_async(): ) in kw["metadata"] +@pytest.mark.parametrize( + "request_type", + [ + snapshots.GetSnapshotRequest, + dict, + ], +) +def test_get_snapshot_rest(request_type): + client = SnapshotsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "project_id": "sample1", + "location": "sample2", + "snapshot_id": "sample3", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = snapshots.Snapshot( + id="id_value", + project_id="project_id_value", + source_job_id="source_job_id_value", + state=snapshots.SnapshotState.PENDING, + description="description_value", + disk_size_bytes=1611, + region="region_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = snapshots.Snapshot.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_snapshot(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, snapshots.Snapshot) + assert response.id == "id_value" + assert response.project_id == "project_id_value" + assert response.source_job_id == "source_job_id_value" + assert response.state == snapshots.SnapshotState.PENDING + assert response.description == "description_value" + assert response.disk_size_bytes == 1611 + assert response.region == "region_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_snapshot_rest_interceptors(null_interceptor): + transport = transports.SnapshotsV1Beta3RestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SnapshotsV1Beta3RestInterceptor(), + ) + client = SnapshotsV1Beta3Client(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SnapshotsV1Beta3RestInterceptor, "post_get_snapshot" + ) as post, mock.patch.object( + transports.SnapshotsV1Beta3RestInterceptor, "pre_get_snapshot" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = snapshots.GetSnapshotRequest.pb(snapshots.GetSnapshotRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = snapshots.Snapshot.to_json(snapshots.Snapshot()) + + request = snapshots.GetSnapshotRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = snapshots.Snapshot() + + client.get_snapshot( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_snapshot_rest_bad_request( + transport: str = "rest", request_type=snapshots.GetSnapshotRequest +): + client = SnapshotsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project_id": "sample1", + "location": "sample2", + "snapshot_id": "sample3", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_snapshot(request) + + +def test_get_snapshot_rest_error(): + client = SnapshotsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + snapshots.DeleteSnapshotRequest, + dict, + ], +) +def test_delete_snapshot_rest(request_type): + client = SnapshotsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "project_id": "sample1", + "location": "sample2", + "snapshot_id": "sample3", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = snapshots.DeleteSnapshotResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = snapshots.DeleteSnapshotResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_snapshot(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, snapshots.DeleteSnapshotResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_snapshot_rest_interceptors(null_interceptor): + transport = transports.SnapshotsV1Beta3RestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SnapshotsV1Beta3RestInterceptor(), + ) + client = SnapshotsV1Beta3Client(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SnapshotsV1Beta3RestInterceptor, "post_delete_snapshot" + ) as post, mock.patch.object( + transports.SnapshotsV1Beta3RestInterceptor, "pre_delete_snapshot" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = snapshots.DeleteSnapshotRequest.pb( + snapshots.DeleteSnapshotRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = snapshots.DeleteSnapshotResponse.to_json( + snapshots.DeleteSnapshotResponse() + ) + + request = snapshots.DeleteSnapshotRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = snapshots.DeleteSnapshotResponse() + + client.delete_snapshot( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_snapshot_rest_bad_request( + transport: str = "rest", request_type=snapshots.DeleteSnapshotRequest +): + client = SnapshotsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project_id": "sample1", + "location": "sample2", + "snapshot_id": "sample3", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_snapshot(request) + + +def test_delete_snapshot_rest_error(): + client = SnapshotsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + snapshots.ListSnapshotsRequest, + dict, + ], +) +def test_list_snapshots_rest(request_type): + client = SnapshotsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1", "location": "sample2", "job_id": "sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = snapshots.ListSnapshotsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = snapshots.ListSnapshotsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_snapshots(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, snapshots.ListSnapshotsResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_snapshots_rest_interceptors(null_interceptor): + transport = transports.SnapshotsV1Beta3RestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SnapshotsV1Beta3RestInterceptor(), + ) + client = SnapshotsV1Beta3Client(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SnapshotsV1Beta3RestInterceptor, "post_list_snapshots" + ) as post, mock.patch.object( + transports.SnapshotsV1Beta3RestInterceptor, "pre_list_snapshots" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = snapshots.ListSnapshotsRequest.pb(snapshots.ListSnapshotsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = snapshots.ListSnapshotsResponse.to_json( + snapshots.ListSnapshotsResponse() + ) + + request = snapshots.ListSnapshotsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = snapshots.ListSnapshotsResponse() + + client.list_snapshots( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_snapshots_rest_bad_request( + transport: str = "rest", request_type=snapshots.ListSnapshotsRequest +): + client = SnapshotsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1", "location": "sample2", "job_id": "sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_snapshots(request) + + +def test_list_snapshots_rest_error(): + client = SnapshotsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.SnapshotsV1Beta3GrpcTransport( @@ -1253,6 +1685,7 @@ def test_transport_get_channel(): [ transports.SnapshotsV1Beta3GrpcTransport, transports.SnapshotsV1Beta3GrpcAsyncIOTransport, + transports.SnapshotsV1Beta3RestTransport, ], ) def test_transport_adc(transport_class): @@ -1267,6 +1700,7 @@ def test_transport_adc(transport_class): "transport_name", [ "grpc", + "rest", ], ) def test_transport_kind(transport_name): @@ -1413,6 +1847,7 @@ def test_snapshots_v1_beta3_transport_auth_adc(transport_class): [ transports.SnapshotsV1Beta3GrpcTransport, transports.SnapshotsV1Beta3GrpcAsyncIOTransport, + transports.SnapshotsV1Beta3RestTransport, ], ) def test_snapshots_v1_beta3_transport_auth_gdch_credentials(transport_class): @@ -1515,11 +1950,23 @@ def test_snapshots_v1_beta3_grpc_transport_client_cert_source_for_mtls(transport ) +def test_snapshots_v1_beta3_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.SnapshotsV1Beta3RestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + @pytest.mark.parametrize( "transport_name", [ "grpc", "grpc_asyncio", + "rest", ], ) def test_snapshots_v1_beta3_host_no_port(transport_name): @@ -1530,7 +1977,11 @@ def test_snapshots_v1_beta3_host_no_port(transport_name): ), transport=transport_name, ) - assert client.transport._host == ("dataflow.googleapis.com:443") + assert client.transport._host == ( + "dataflow.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://dataflow.googleapis.com" + ) @pytest.mark.parametrize( @@ -1538,6 +1989,7 @@ def test_snapshots_v1_beta3_host_no_port(transport_name): [ "grpc", "grpc_asyncio", + "rest", ], ) def test_snapshots_v1_beta3_host_with_port(transport_name): @@ -1548,7 +2000,39 @@ def test_snapshots_v1_beta3_host_with_port(transport_name): ), transport=transport_name, ) - assert client.transport._host == ("dataflow.googleapis.com:8000") + assert client.transport._host == ( + "dataflow.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://dataflow.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_snapshots_v1_beta3_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = SnapshotsV1Beta3Client( + credentials=creds1, + transport=transport_name, + ) + client2 = SnapshotsV1Beta3Client( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.get_snapshot._session + session2 = client2.transport.get_snapshot._session + assert session1 != session2 + session1 = client1.transport.delete_snapshot._session + session2 = client2.transport.delete_snapshot._session + assert session1 != session2 + session1 = client1.transport.list_snapshots._session + session2 = client2.transport.list_snapshots._session + assert session1 != session2 def test_snapshots_v1_beta3_grpc_transport_channel(): @@ -1819,6 +2303,7 @@ async def test_transport_close_async(): def test_transport_close(): transports = { + "rest": "_session", "grpc": "_grpc_channel", } @@ -1836,6 +2321,7 @@ def test_transport_close(): def test_client_ctx(): transports = [ + "rest", "grpc", ] for transport in transports: diff --git a/tests/unit/gapic/dataflow_v1beta3/test_templates_service.py b/tests/unit/gapic/dataflow_v1beta3/test_templates_service.py index 3cfcde3..c246da3 100644 --- a/tests/unit/gapic/dataflow_v1beta3/test_templates_service.py +++ b/tests/unit/gapic/dataflow_v1beta3/test_templates_service.py @@ -22,6 +22,8 @@ except ImportError: # pragma: NO COVER import mock +from collections.abc import Iterable +import json import math from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template @@ -31,12 +33,16 @@ from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.oauth2 import service_account +from google.protobuf import json_format from google.protobuf import timestamp_pb2 # type: ignore from google.rpc import status_pb2 # type: ignore import grpc from grpc.experimental import aio +from proto.marshal.rules import wrappers from proto.marshal.rules.dates import DurationRule, TimestampRule import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session from google.cloud.dataflow_v1beta3.services.templates_service import ( TemplatesServiceAsyncClient, @@ -96,6 +102,7 @@ def test__get_default_mtls_endpoint(): [ (TemplatesServiceClient, "grpc"), (TemplatesServiceAsyncClient, "grpc_asyncio"), + (TemplatesServiceClient, "rest"), ], ) def test_templates_service_client_from_service_account_info( @@ -111,7 +118,11 @@ def test_templates_service_client_from_service_account_info( assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == ("dataflow.googleapis.com:443") + assert client.transport._host == ( + "dataflow.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://dataflow.googleapis.com" + ) @pytest.mark.parametrize( @@ -119,6 +130,7 @@ def test_templates_service_client_from_service_account_info( [ (transports.TemplatesServiceGrpcTransport, "grpc"), (transports.TemplatesServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.TemplatesServiceRestTransport, "rest"), ], ) def test_templates_service_client_service_account_always_use_jwt( @@ -144,6 +156,7 @@ def test_templates_service_client_service_account_always_use_jwt( [ (TemplatesServiceClient, "grpc"), (TemplatesServiceAsyncClient, "grpc_asyncio"), + (TemplatesServiceClient, "rest"), ], ) def test_templates_service_client_from_service_account_file( @@ -166,13 +179,18 @@ def test_templates_service_client_from_service_account_file( assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == ("dataflow.googleapis.com:443") + assert client.transport._host == ( + "dataflow.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://dataflow.googleapis.com" + ) def test_templates_service_client_get_transport_class(): transport = TemplatesServiceClient.get_transport_class() available_transports = [ transports.TemplatesServiceGrpcTransport, + transports.TemplatesServiceRestTransport, ] assert transport in available_transports @@ -189,6 +207,7 @@ def test_templates_service_client_get_transport_class(): transports.TemplatesServiceGrpcAsyncIOTransport, "grpc_asyncio", ), + (TemplatesServiceClient, transports.TemplatesServiceRestTransport, "rest"), ], ) @mock.patch.object( @@ -344,6 +363,18 @@ def test_templates_service_client_client_options( "grpc_asyncio", "false", ), + ( + TemplatesServiceClient, + transports.TemplatesServiceRestTransport, + "rest", + "true", + ), + ( + TemplatesServiceClient, + transports.TemplatesServiceRestTransport, + "rest", + "false", + ), ], ) @mock.patch.object( @@ -543,6 +574,7 @@ def test_templates_service_client_get_mtls_endpoint_and_cert_source(client_class transports.TemplatesServiceGrpcAsyncIOTransport, "grpc_asyncio", ), + (TemplatesServiceClient, transports.TemplatesServiceRestTransport, "rest"), ], ) def test_templates_service_client_client_options_scopes( @@ -583,6 +615,12 @@ def test_templates_service_client_client_options_scopes( "grpc_asyncio", grpc_helpers_async, ), + ( + TemplatesServiceClient, + transports.TemplatesServiceRestTransport, + "rest", + None, + ), ], ) def test_templates_service_client_client_options_credentials_file( @@ -1210,6 +1248,457 @@ async def test_get_template_field_headers_async(): ) in kw["metadata"] +@pytest.mark.parametrize( + "request_type", + [ + templates.CreateJobFromTemplateRequest, + dict, + ], +) +def test_create_job_from_template_rest(request_type): + client = TemplatesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1", "location": "sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = jobs.Job( + id="id_value", + project_id="project_id_value", + name="name_value", + type_=environment.JobType.JOB_TYPE_BATCH, + steps_location="steps_location_value", + current_state=jobs.JobState.JOB_STATE_STOPPED, + requested_state=jobs.JobState.JOB_STATE_STOPPED, + replace_job_id="replace_job_id_value", + client_request_id="client_request_id_value", + replaced_by_job_id="replaced_by_job_id_value", + temp_files=["temp_files_value"], + location="location_value", + created_from_snapshot_id="created_from_snapshot_id_value", + satisfies_pzs=True, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = jobs.Job.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_job_from_template(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, jobs.Job) + assert response.id == "id_value" + assert response.project_id == "project_id_value" + assert response.name == "name_value" + assert response.type_ == environment.JobType.JOB_TYPE_BATCH + assert response.steps_location == "steps_location_value" + assert response.current_state == jobs.JobState.JOB_STATE_STOPPED + assert response.requested_state == jobs.JobState.JOB_STATE_STOPPED + assert response.replace_job_id == "replace_job_id_value" + assert response.client_request_id == "client_request_id_value" + assert response.replaced_by_job_id == "replaced_by_job_id_value" + assert response.temp_files == ["temp_files_value"] + assert response.location == "location_value" + assert response.created_from_snapshot_id == "created_from_snapshot_id_value" + assert response.satisfies_pzs is True + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_job_from_template_rest_interceptors(null_interceptor): + transport = transports.TemplatesServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.TemplatesServiceRestInterceptor(), + ) + client = TemplatesServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.TemplatesServiceRestInterceptor, "post_create_job_from_template" + ) as post, mock.patch.object( + transports.TemplatesServiceRestInterceptor, "pre_create_job_from_template" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = templates.CreateJobFromTemplateRequest.pb( + templates.CreateJobFromTemplateRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = jobs.Job.to_json(jobs.Job()) + + request = templates.CreateJobFromTemplateRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = jobs.Job() + + client.create_job_from_template( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_job_from_template_rest_bad_request( + transport: str = "rest", request_type=templates.CreateJobFromTemplateRequest +): + client = TemplatesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1", "location": "sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_job_from_template(request) + + +def test_create_job_from_template_rest_error(): + client = TemplatesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + templates.LaunchTemplateRequest, + dict, + ], +) +def test_launch_template_rest(request_type): + client = TemplatesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1", "location": "sample2"} + request_init["launch_parameters"] = { + "job_name": "job_name_value", + "parameters": {}, + "environment": { + "num_workers": 1212, + "max_workers": 1202, + "zone": "zone_value", + "service_account_email": "service_account_email_value", + "temp_location": "temp_location_value", + "bypass_temp_dir_validation": True, + "machine_type": "machine_type_value", + "additional_experiments": [ + "additional_experiments_value1", + "additional_experiments_value2", + ], + "network": "network_value", + "subnetwork": "subnetwork_value", + "additional_user_labels": {}, + "kms_key_name": "kms_key_name_value", + "ip_configuration": 1, + "worker_region": "worker_region_value", + "worker_zone": "worker_zone_value", + "enable_streaming_engine": True, + }, + "update": True, + "transform_name_mapping": {}, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = templates.LaunchTemplateResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = templates.LaunchTemplateResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.launch_template(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, templates.LaunchTemplateResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_launch_template_rest_interceptors(null_interceptor): + transport = transports.TemplatesServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.TemplatesServiceRestInterceptor(), + ) + client = TemplatesServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.TemplatesServiceRestInterceptor, "post_launch_template" + ) as post, mock.patch.object( + transports.TemplatesServiceRestInterceptor, "pre_launch_template" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = templates.LaunchTemplateRequest.pb( + templates.LaunchTemplateRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = templates.LaunchTemplateResponse.to_json( + templates.LaunchTemplateResponse() + ) + + request = templates.LaunchTemplateRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = templates.LaunchTemplateResponse() + + client.launch_template( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_launch_template_rest_bad_request( + transport: str = "rest", request_type=templates.LaunchTemplateRequest +): + client = TemplatesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1", "location": "sample2"} + request_init["launch_parameters"] = { + "job_name": "job_name_value", + "parameters": {}, + "environment": { + "num_workers": 1212, + "max_workers": 1202, + "zone": "zone_value", + "service_account_email": "service_account_email_value", + "temp_location": "temp_location_value", + "bypass_temp_dir_validation": True, + "machine_type": "machine_type_value", + "additional_experiments": [ + "additional_experiments_value1", + "additional_experiments_value2", + ], + "network": "network_value", + "subnetwork": "subnetwork_value", + "additional_user_labels": {}, + "kms_key_name": "kms_key_name_value", + "ip_configuration": 1, + "worker_region": "worker_region_value", + "worker_zone": "worker_zone_value", + "enable_streaming_engine": True, + }, + "update": True, + "transform_name_mapping": {}, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.launch_template(request) + + +def test_launch_template_rest_error(): + client = TemplatesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + templates.GetTemplateRequest, + dict, + ], +) +def test_get_template_rest(request_type): + client = TemplatesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1", "location": "sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = templates.GetTemplateResponse( + template_type=templates.GetTemplateResponse.TemplateType.LEGACY, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = templates.GetTemplateResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_template(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, templates.GetTemplateResponse) + assert response.template_type == templates.GetTemplateResponse.TemplateType.LEGACY + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_template_rest_interceptors(null_interceptor): + transport = transports.TemplatesServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.TemplatesServiceRestInterceptor(), + ) + client = TemplatesServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.TemplatesServiceRestInterceptor, "post_get_template" + ) as post, mock.patch.object( + transports.TemplatesServiceRestInterceptor, "pre_get_template" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = templates.GetTemplateRequest.pb(templates.GetTemplateRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = templates.GetTemplateResponse.to_json( + templates.GetTemplateResponse() + ) + + request = templates.GetTemplateRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = templates.GetTemplateResponse() + + client.get_template( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_template_rest_bad_request( + transport: str = "rest", request_type=templates.GetTemplateRequest +): + client = TemplatesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1", "location": "sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_template(request) + + +def test_get_template_rest_error(): + client = TemplatesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.TemplatesServiceGrpcTransport( @@ -1291,6 +1780,7 @@ def test_transport_get_channel(): [ transports.TemplatesServiceGrpcTransport, transports.TemplatesServiceGrpcAsyncIOTransport, + transports.TemplatesServiceRestTransport, ], ) def test_transport_adc(transport_class): @@ -1305,6 +1795,7 @@ def test_transport_adc(transport_class): "transport_name", [ "grpc", + "rest", ], ) def test_transport_kind(transport_name): @@ -1451,6 +1942,7 @@ def test_templates_service_transport_auth_adc(transport_class): [ transports.TemplatesServiceGrpcTransport, transports.TemplatesServiceGrpcAsyncIOTransport, + transports.TemplatesServiceRestTransport, ], ) def test_templates_service_transport_auth_gdch_credentials(transport_class): @@ -1553,11 +2045,23 @@ def test_templates_service_grpc_transport_client_cert_source_for_mtls(transport_ ) +def test_templates_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.TemplatesServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + @pytest.mark.parametrize( "transport_name", [ "grpc", "grpc_asyncio", + "rest", ], ) def test_templates_service_host_no_port(transport_name): @@ -1568,7 +2072,11 @@ def test_templates_service_host_no_port(transport_name): ), transport=transport_name, ) - assert client.transport._host == ("dataflow.googleapis.com:443") + assert client.transport._host == ( + "dataflow.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://dataflow.googleapis.com" + ) @pytest.mark.parametrize( @@ -1576,6 +2084,7 @@ def test_templates_service_host_no_port(transport_name): [ "grpc", "grpc_asyncio", + "rest", ], ) def test_templates_service_host_with_port(transport_name): @@ -1586,7 +2095,39 @@ def test_templates_service_host_with_port(transport_name): ), transport=transport_name, ) - assert client.transport._host == ("dataflow.googleapis.com:8000") + assert client.transport._host == ( + "dataflow.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://dataflow.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_templates_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = TemplatesServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = TemplatesServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.create_job_from_template._session + session2 = client2.transport.create_job_from_template._session + assert session1 != session2 + session1 = client1.transport.launch_template._session + session2 = client2.transport.launch_template._session + assert session1 != session2 + session1 = client1.transport.get_template._session + session2 = client2.transport.get_template._session + assert session1 != session2 def test_templates_service_grpc_transport_channel(): @@ -1857,6 +2398,7 @@ async def test_transport_close_async(): def test_transport_close(): transports = { + "rest": "_session", "grpc": "_grpc_channel", } @@ -1874,6 +2416,7 @@ def test_transport_close(): def test_client_ctx(): transports = [ + "rest", "grpc", ] for transport in transports: