From 062347d235ab7042dbd3ab1d774473b437f071db Mon Sep 17 00:00:00 2001 From: Tarun Annapareddy Date: Thu, 12 Feb 2026 10:06:17 -0800 Subject: [PATCH 1/5] Add Pause and Paused states --- .../runners/dataflow/dataflow_runner.py | 5 ++- .../runners/dataflow/dataflow_runner_test.py | 41 +++++++++++++++++++ .../dataflow/dataflow_v1b3_messages.py | 12 ++++++ sdks/python/apache_beam/runners/runner.py | 2 + 4 files changed, 59 insertions(+), 1 deletion(-) diff --git a/sdks/python/apache_beam/runners/dataflow/dataflow_runner.py b/sdks/python/apache_beam/runners/dataflow/dataflow_runner.py index d33c33f84fee..a91565e5be2d 100644 --- a/sdks/python/apache_beam/runners/dataflow/dataflow_runner.py +++ b/sdks/python/apache_beam/runners/dataflow/dataflow_runner.py @@ -156,7 +156,8 @@ def rank_error(msg): state_update_callback(response.currentState) _LOGGER.info('Job %s is in state %s', job_id, response.currentState) last_job_state = response.currentState - if str(response.currentState) != 'JOB_STATE_RUNNING': + if str(response.currentState) not in ( + 'JOB_STATE_RUNNING', 'JOB_STATE_PAUSED', 'JOB_STATE_PAUSING'): # Stop checking for new messages on timeout, explanatory # message received, success, or a terminal job state caused # by the user that therefore doesn't require explanation. @@ -751,6 +752,8 @@ def api_jobstate_to_pipeline_state(api_jobstate): values_enum.JOB_STATE_CANCELLING: PipelineState.CANCELLING, values_enum.JOB_STATE_RESOURCE_CLEANING_UP: PipelineState. RESOURCE_CLEANING_UP, + values_enum.JOB_STATE_PAUSING: PipelineState.PAUSING, + values_enum.JOB_STATE_PAUSED: PipelineState.PAUSED, }) return ( diff --git a/sdks/python/apache_beam/runners/dataflow/dataflow_runner_test.py b/sdks/python/apache_beam/runners/dataflow/dataflow_runner_test.py index d5d8ba662f06..5300c21feedc 100644 --- a/sdks/python/apache_beam/runners/dataflow/dataflow_runner_test.py +++ b/sdks/python/apache_beam/runners/dataflow/dataflow_runner_test.py @@ -195,6 +195,22 @@ def get_job_side_effect(*args, **kwargs): result = duration_timedout_result.wait_until_finish(5000) self.assertEqual(result, PipelineState.RUNNING) + with mock.patch('time.time', mock.MagicMock(side_effect=[1, 9, 9, 20, 20])): + duration_timedout_runner = MockDataflowRunner( + [values_enum.JOB_STATE_PAUSING]) + duration_timedout_result = DataflowPipelineResult( + duration_timedout_runner.job, duration_timedout_runner, options) + result = duration_timedout_result.wait_until_finish(5000) + self.assertEqual(result, PipelineState.PAUSING) + + with mock.patch('time.time', mock.MagicMock(side_effect=[1, 9, 9, 20, 20])): + duration_timedout_runner = MockDataflowRunner( + [values_enum.JOB_STATE_PAUSED]) + duration_timedout_result = DataflowPipelineResult( + duration_timedout_runner.job, duration_timedout_runner, options) + result = duration_timedout_result.wait_until_finish(5000) + self.assertEqual(result, PipelineState.PAUSED) + with mock.patch('time.time', mock.MagicMock(side_effect=[1, 1, 2, 2, 3])): with self.assertRaisesRegex(DataflowRuntimeException, 'Dataflow pipeline failed. State: CANCELLED'): @@ -239,6 +255,31 @@ def __init__(self, state, cancel_result): terminal_runner.job, terminal_runner, options) terminal_result.cancel() + def test_api_jobstate_to_pipeline_state(self): + values_enum = dataflow_api.Job.CurrentStateValueValuesEnum + expected_mappings = [ + (values_enum.JOB_STATE_UNKNOWN, PipelineState.UNKNOWN), + (values_enum.JOB_STATE_STOPPED, PipelineState.STOPPED), + (values_enum.JOB_STATE_RUNNING, PipelineState.RUNNING), + (values_enum.JOB_STATE_DONE, PipelineState.DONE), + (values_enum.JOB_STATE_FAILED, PipelineState.FAILED), + (values_enum.JOB_STATE_CANCELLED, PipelineState.CANCELLED), + (values_enum.JOB_STATE_UPDATED, PipelineState.UPDATED), + (values_enum.JOB_STATE_DRAINING, PipelineState.DRAINING), + (values_enum.JOB_STATE_DRAINED, PipelineState.DRAINED), + (values_enum.JOB_STATE_PENDING, PipelineState.PENDING), + (values_enum.JOB_STATE_CANCELLING, PipelineState.CANCELLING), + (values_enum.JOB_STATE_RESOURCE_CLEANING_UP, + PipelineState.RESOURCE_CLEANING_UP), + (values_enum.JOB_STATE_PAUSING, PipelineState.PAUSING), + (values_enum.JOB_STATE_PAUSED, PipelineState.PAUSED), + ] + + for api_state, pipeline_state in expected_mappings: + self.assertEqual( + DataflowPipelineResult.api_jobstate_to_pipeline_state(api_state), + pipeline_state) + def test_create_runner(self): self.assertTrue(isinstance(create_runner('DataflowRunner'), DataflowRunner)) self.assertTrue( diff --git a/sdks/python/apache_beam/runners/dataflow/internal/clients/dataflow/dataflow_v1b3_messages.py b/sdks/python/apache_beam/runners/dataflow/internal/clients/dataflow/dataflow_v1b3_messages.py index c0bbfa74ac1e..e3a60c1b0257 100644 --- a/sdks/python/apache_beam/runners/dataflow/internal/clients/dataflow/dataflow_v1b3_messages.py +++ b/sdks/python/apache_beam/runners/dataflow/internal/clients/dataflow/dataflow_v1b3_messages.py @@ -2372,6 +2372,8 @@ class ExecutionStageStateValueValuesEnum(_messages.Enum): indicates that the batch job's associated resources are currently being cleaned up after a successful run. Currently, this is an opt-in feature, please reach out to Cloud support team if you are interested. + JOB_STATE_PAUSING: `JOB_STATE_PAUSING` is not implemented yet. + JOB_STATE_PAUSED: `JOB_STATE_PAUSED` is not implemented yet. """ JOB_STATE_UNKNOWN = 0 JOB_STATE_STOPPED = 1 @@ -2386,6 +2388,8 @@ class ExecutionStageStateValueValuesEnum(_messages.Enum): JOB_STATE_CANCELLING = 10 JOB_STATE_QUEUED = 11 JOB_STATE_RESOURCE_CLEANING_UP = 12 + JOB_STATE_PAUSING = 13 + JOB_STATE_PAUSED = 14 currentStateTime = _messages.StringField(1) executionStageName = _messages.StringField(2) @@ -3166,6 +3170,8 @@ class CurrentStateValueValuesEnum(_messages.Enum): indicates that the batch job's associated resources are currently being cleaned up after a successful run. Currently, this is an opt-in feature, please reach out to Cloud support team if you are interested. + JOB_STATE_PAUSING: `JOB_STATE_PAUSING` is not implemented yet. + JOB_STATE_PAUSED: `JOB_STATE_PAUSED` is not implemented yet. """ JOB_STATE_UNKNOWN = 0 JOB_STATE_STOPPED = 1 @@ -3180,6 +3186,8 @@ class CurrentStateValueValuesEnum(_messages.Enum): JOB_STATE_CANCELLING = 10 JOB_STATE_QUEUED = 11 JOB_STATE_RESOURCE_CLEANING_UP = 12 + JOB_STATE_PAUSING = 13 + JOB_STATE_PAUSED = 14 class RequestedStateValueValuesEnum(_messages.Enum): r"""The job's requested state. Applies to `UpdateJob` requests. Set @@ -3240,6 +3248,8 @@ class RequestedStateValueValuesEnum(_messages.Enum): indicates that the batch job's associated resources are currently being cleaned up after a successful run. Currently, this is an opt-in feature, please reach out to Cloud support team if you are interested. + JOB_STATE_PAUSING: `JOB_STATE_PAUSING` is not implemented yet. + JOB_STATE_PAUSED: `JOB_STATE_PAUSED` is not implemented yet. """ JOB_STATE_UNKNOWN = 0 JOB_STATE_STOPPED = 1 @@ -3254,6 +3264,8 @@ class RequestedStateValueValuesEnum(_messages.Enum): JOB_STATE_CANCELLING = 10 JOB_STATE_QUEUED = 11 JOB_STATE_RESOURCE_CLEANING_UP = 12 + JOB_STATE_PAUSING = 13 + JOB_STATE_PAUSED = 14 class TypeValueValuesEnum(_messages.Enum): r"""Optional. The type of Dataflow job. diff --git a/sdks/python/apache_beam/runners/runner.py b/sdks/python/apache_beam/runners/runner.py index 00ca84bb8e7d..26c5076032a2 100644 --- a/sdks/python/apache_beam/runners/runner.py +++ b/sdks/python/apache_beam/runners/runner.py @@ -253,6 +253,8 @@ class PipelineState(object): # in the process of stopping RESOURCE_CLEANING_UP = 'RESOURCE_CLEANING_UP' # job's resources are being # cleaned up + PAUSING = 'PAUSING' # job is in the process of pausing + PAUSED = 'PAUSED' # job has been paused UNRECOGNIZED = 'UNRECOGNIZED' # the job state reported by a runner cannot be # interpreted by the SDK. From 7e591fdd8e8e95878dabc270b8fe7ebe12a703e1 Mon Sep 17 00:00:00 2001 From: Tarun Annapareddy Date: Thu, 12 Feb 2026 10:21:37 -0800 Subject: [PATCH 2/5] fix formatting --- sdks/python/apache_beam/runners/dataflow/dataflow_runner.py | 5 +++-- .../apache_beam/runners/dataflow/dataflow_runner_test.py | 5 +++-- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/sdks/python/apache_beam/runners/dataflow/dataflow_runner.py b/sdks/python/apache_beam/runners/dataflow/dataflow_runner.py index a91565e5be2d..0c23e6024dc6 100644 --- a/sdks/python/apache_beam/runners/dataflow/dataflow_runner.py +++ b/sdks/python/apache_beam/runners/dataflow/dataflow_runner.py @@ -156,8 +156,9 @@ def rank_error(msg): state_update_callback(response.currentState) _LOGGER.info('Job %s is in state %s', job_id, response.currentState) last_job_state = response.currentState - if str(response.currentState) not in ( - 'JOB_STATE_RUNNING', 'JOB_STATE_PAUSED', 'JOB_STATE_PAUSING'): + if str(response.currentState) not in ('JOB_STATE_RUNNING', + 'JOB_STATE_PAUSED', + 'JOB_STATE_PAUSING'): # Stop checking for new messages on timeout, explanatory # message received, success, or a terminal job state caused # by the user that therefore doesn't require explanation. diff --git a/sdks/python/apache_beam/runners/dataflow/dataflow_runner_test.py b/sdks/python/apache_beam/runners/dataflow/dataflow_runner_test.py index 5300c21feedc..e1b8be6682f9 100644 --- a/sdks/python/apache_beam/runners/dataflow/dataflow_runner_test.py +++ b/sdks/python/apache_beam/runners/dataflow/dataflow_runner_test.py @@ -269,8 +269,9 @@ def test_api_jobstate_to_pipeline_state(self): (values_enum.JOB_STATE_DRAINED, PipelineState.DRAINED), (values_enum.JOB_STATE_PENDING, PipelineState.PENDING), (values_enum.JOB_STATE_CANCELLING, PipelineState.CANCELLING), - (values_enum.JOB_STATE_RESOURCE_CLEANING_UP, - PipelineState.RESOURCE_CLEANING_UP), + ( + values_enum.JOB_STATE_RESOURCE_CLEANING_UP, + PipelineState.RESOURCE_CLEANING_UP), (values_enum.JOB_STATE_PAUSING, PipelineState.PAUSING), (values_enum.JOB_STATE_PAUSED, PipelineState.PAUSED), ] From e69e30f3c7218170fecba7fd107ea0bdccf81b0b Mon Sep 17 00:00:00 2001 From: Tarun Annapareddy Date: Thu, 12 Feb 2026 13:24:26 -0800 Subject: [PATCH 3/5] use Auto-generat --- .../clients/dataflow/dataflow_v1b3_client.py | 517 ++++++----- .../dataflow/dataflow_v1b3_messages.py | 869 +++++++++++------- 2 files changed, 839 insertions(+), 547 deletions(-) diff --git a/sdks/python/apache_beam/runners/dataflow/internal/clients/dataflow/dataflow_v1b3_client.py b/sdks/python/apache_beam/runners/dataflow/internal/clients/dataflow/dataflow_v1b3_client.py index d89e699512d3..6f62edbae1cf 100644 --- a/sdks/python/apache_beam/runners/dataflow/internal/clients/dataflow/dataflow_v1b3_client.py +++ b/sdks/python/apache_beam/runners/dataflow/internal/clients/dataflow/dataflow_v1b3_client.py @@ -1,5 +1,8 @@ """Generated client library for dataflow version v1b3.""" # NOTE: This file is autogenerated and should not be edited by hand. + +from __future__ import absolute_import + from apitools.base.py import base_api from . import dataflow_v1b3_messages as messages @@ -12,41 +15,26 @@ class DataflowV1b3(base_api.BaseApiClient): MTLS_BASE_URL = 'https://dataflow.mtls.googleapis.com/' _PACKAGE = 'dataflow' - _SCOPES = [ - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/compute' - ] + _SCOPES = ['https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/compute'] _VERSION = 'v1b3' - _CLIENT_ID = '1042881264118.apps.googleusercontent.com' - _CLIENT_SECRET = 'x_Tw5K8nnjoRAqULM9PFAC2b' + _CLIENT_ID = 'CLIENT_ID' + _CLIENT_SECRET = 'CLIENT_SECRET' _USER_AGENT = 'x_Tw5K8nnjoRAqULM9PFAC2b' _CLIENT_CLASS_NAME = 'DataflowV1b3' _URL_VERSION = 'v1b3' _API_KEY = None - def __init__( - self, - url='', - credentials=None, - get_credentials=True, - http=None, - model=None, - log_request=False, - log_response=False, - credentials_args=None, - default_global_params=None, - additional_http_headers=None, - response_encoding=None): + def __init__(self, url='', credentials=None, + get_credentials=True, http=None, model=None, + log_request=False, log_response=False, + credentials_args=None, default_global_params=None, + additional_http_headers=None, response_encoding=None): """Create a new dataflow handle.""" url = url or self.BASE_URL super(DataflowV1b3, self).__init__( - url, - credentials=credentials, - get_credentials=get_credentials, - http=http, - model=model, - log_request=log_request, - log_response=log_response, + url, credentials=credentials, + get_credentials=get_credentials, http=http, model=model, + log_request=log_request, log_response=log_response, credentials_args=credentials_args, default_global_params=default_global_params, additional_http_headers=additional_http_headers, @@ -55,23 +43,15 @@ def __init__( self.projects_jobs_messages = self.ProjectsJobsMessagesService(self) self.projects_jobs_workItems = self.ProjectsJobsWorkItemsService(self) self.projects_jobs = self.ProjectsJobsService(self) - self.projects_locations_flexTemplates = self.ProjectsLocationsFlexTemplatesService( - self) - self.projects_locations_jobs_debug = self.ProjectsLocationsJobsDebugService( - self) - self.projects_locations_jobs_messages = self.ProjectsLocationsJobsMessagesService( - self) - self.projects_locations_jobs_snapshots = self.ProjectsLocationsJobsSnapshotsService( - self) - self.projects_locations_jobs_stages = self.ProjectsLocationsJobsStagesService( - self) - self.projects_locations_jobs_workItems = self.ProjectsLocationsJobsWorkItemsService( - self) + self.projects_locations_flexTemplates = self.ProjectsLocationsFlexTemplatesService(self) + self.projects_locations_jobs_debug = self.ProjectsLocationsJobsDebugService(self) + self.projects_locations_jobs_messages = self.ProjectsLocationsJobsMessagesService(self) + self.projects_locations_jobs_snapshots = self.ProjectsLocationsJobsSnapshotsService(self) + self.projects_locations_jobs_stages = self.ProjectsLocationsJobsStagesService(self) + self.projects_locations_jobs_workItems = self.ProjectsLocationsJobsWorkItemsService(self) self.projects_locations_jobs = self.ProjectsLocationsJobsService(self) - self.projects_locations_snapshots = self.ProjectsLocationsSnapshotsService( - self) - self.projects_locations_templates = self.ProjectsLocationsTemplatesService( - self) + self.projects_locations_snapshots = self.ProjectsLocationsSnapshotsService(self) + self.projects_locations_templates = self.ProjectsLocationsTemplatesService(self) self.projects_locations = self.ProjectsLocationsService(self) self.projects_snapshots = self.ProjectsSnapshotsService(self) self.projects_templates = self.ProjectsTemplatesService(self) @@ -84,7 +64,8 @@ class ProjectsJobsDebugService(base_api.BaseApiService): def __init__(self, client): super(DataflowV1b3.ProjectsJobsDebugService, self).__init__(client) - self._upload_configs = {} + self._upload_configs = { + } def GetConfig(self, request, global_params=None): r"""Get encoded debug configuration for component. Not cacheable. @@ -96,7 +77,8 @@ def GetConfig(self, request, global_params=None): (GetDebugConfigResponse) The response message. """ config = self.GetMethodConfig('GetConfig') - return self._RunMethod(config, request, global_params=global_params) + return self._RunMethod( + config, request, global_params=global_params) GetConfig.method_config = lambda: base_api.ApiMethodInfo( http_method='POST', @@ -108,7 +90,8 @@ def GetConfig(self, request, global_params=None): request_field='getDebugConfigRequest', request_type_name='DataflowProjectsJobsDebugGetConfigRequest', response_type_name='GetDebugConfigResponse', - supports_download=False, ) + supports_download=False, + ) def SendCapture(self, request, global_params=None): r"""Send encoded debug capture data for component. @@ -120,7 +103,8 @@ def SendCapture(self, request, global_params=None): (SendDebugCaptureResponse) The response message. """ config = self.GetMethodConfig('SendCapture') - return self._RunMethod(config, request, global_params=global_params) + return self._RunMethod( + config, request, global_params=global_params) SendCapture.method_config = lambda: base_api.ApiMethodInfo( http_method='POST', @@ -128,12 +112,12 @@ def SendCapture(self, request, global_params=None): ordered_params=['projectId', 'jobId'], path_params=['jobId', 'projectId'], query_params=[], - relative_path= - 'v1b3/projects/{projectId}/jobs/{jobId}/debug/sendCapture', + relative_path='v1b3/projects/{projectId}/jobs/{jobId}/debug/sendCapture', request_field='sendDebugCaptureRequest', request_type_name='DataflowProjectsJobsDebugSendCaptureRequest', response_type_name='SendDebugCaptureResponse', - supports_download=False, ) + supports_download=False, + ) class ProjectsJobsMessagesService(base_api.BaseApiService): """Service class for the projects_jobs_messages resource.""" @@ -142,7 +126,8 @@ class ProjectsJobsMessagesService(base_api.BaseApiService): def __init__(self, client): super(DataflowV1b3.ProjectsJobsMessagesService, self).__init__(client) - self._upload_configs = {} + self._upload_configs = { + } def List(self, request, global_params=None): r"""Request the job status. To request the status of a job, we recommend using `projects.locations.jobs.messages.list` with a [regional endpoint] (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). Using `projects.jobs.messages.list` is not recommended, as you can only request the status of jobs that are running in `us-central1`. @@ -154,22 +139,21 @@ def List(self, request, global_params=None): (ListJobMessagesResponse) The response message. """ config = self.GetMethodConfig('List') - return self._RunMethod(config, request, global_params=global_params) + return self._RunMethod( + config, request, global_params=global_params) List.method_config = lambda: base_api.ApiMethodInfo( http_method='GET', method_id='dataflow.projects.jobs.messages.list', ordered_params=['projectId', 'jobId'], path_params=['jobId', 'projectId'], - query_params=[ - 'endTime', 'location', 'minimumImportance', 'pageSize', 'pageToken', - 'startTime' - ], + query_params=['endTime', 'location', 'minimumImportance', 'pageSize', 'pageToken', 'startTime'], relative_path='v1b3/projects/{projectId}/jobs/{jobId}/messages', request_field='', request_type_name='DataflowProjectsJobsMessagesListRequest', response_type_name='ListJobMessagesResponse', - supports_download=False, ) + supports_download=False, + ) class ProjectsJobsWorkItemsService(base_api.BaseApiService): """Service class for the projects_jobs_workItems resource.""" @@ -178,7 +162,8 @@ class ProjectsJobsWorkItemsService(base_api.BaseApiService): def __init__(self, client): super(DataflowV1b3.ProjectsJobsWorkItemsService, self).__init__(client) - self._upload_configs = {} + self._upload_configs = { + } def Lease(self, request, global_params=None): r"""Leases a dataflow WorkItem to run. @@ -190,7 +175,8 @@ def Lease(self, request, global_params=None): (LeaseWorkItemResponse) The response message. """ config = self.GetMethodConfig('Lease') - return self._RunMethod(config, request, global_params=global_params) + return self._RunMethod( + config, request, global_params=global_params) Lease.method_config = lambda: base_api.ApiMethodInfo( http_method='POST', @@ -202,7 +188,8 @@ def Lease(self, request, global_params=None): request_field='leaseWorkItemRequest', request_type_name='DataflowProjectsJobsWorkItemsLeaseRequest', response_type_name='LeaseWorkItemResponse', - supports_download=False, ) + supports_download=False, + ) def ReportStatus(self, request, global_params=None): r"""Reports the status of dataflow WorkItems leased by a worker. @@ -214,7 +201,8 @@ def ReportStatus(self, request, global_params=None): (ReportWorkItemStatusResponse) The response message. """ config = self.GetMethodConfig('ReportStatus') - return self._RunMethod(config, request, global_params=global_params) + return self._RunMethod( + config, request, global_params=global_params) ReportStatus.method_config = lambda: base_api.ApiMethodInfo( http_method='POST', @@ -222,12 +210,12 @@ def ReportStatus(self, request, global_params=None): ordered_params=['projectId', 'jobId'], path_params=['jobId', 'projectId'], query_params=[], - relative_path= - 'v1b3/projects/{projectId}/jobs/{jobId}/workItems:reportStatus', + relative_path='v1b3/projects/{projectId}/jobs/{jobId}/workItems:reportStatus', request_field='reportWorkItemStatusRequest', request_type_name='DataflowProjectsJobsWorkItemsReportStatusRequest', response_type_name='ReportWorkItemStatusResponse', - supports_download=False, ) + supports_download=False, + ) class ProjectsJobsService(base_api.BaseApiService): """Service class for the projects_jobs resource.""" @@ -236,7 +224,8 @@ class ProjectsJobsService(base_api.BaseApiService): def __init__(self, client): super(DataflowV1b3.ProjectsJobsService, self).__init__(client) - self._upload_configs = {} + self._upload_configs = { + } def Aggregated(self, request, global_params=None): r"""List the jobs of a project across all regions. **Note:** This method doesn't support filtering the list of jobs by name. @@ -248,23 +237,24 @@ def Aggregated(self, request, global_params=None): (ListJobsResponse) The response message. """ config = self.GetMethodConfig('Aggregated') - return self._RunMethod(config, request, global_params=global_params) + return self._RunMethod( + config, request, global_params=global_params) Aggregated.method_config = lambda: base_api.ApiMethodInfo( http_method='GET', method_id='dataflow.projects.jobs.aggregated', ordered_params=['projectId'], path_params=['projectId'], - query_params= - ['filter', 'location', 'name', 'pageSize', 'pageToken', 'view'], + query_params=['filter', 'location', 'name', 'pageSize', 'pageToken', 'view'], relative_path='v1b3/projects/{projectId}/jobs:aggregated', request_field='', request_type_name='DataflowProjectsJobsAggregatedRequest', response_type_name='ListJobsResponse', - supports_download=False, ) + supports_download=False, + ) def Create(self, request, global_params=None): - r"""Creates a Cloud Dataflow job. To create a job, we recommend using `projects.locations.jobs.create` with a [regional endpoint] (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). Using `projects.jobs.create` is not recommended, as your job will always start in `us-central1`. Do not enter confidential information when you supply string values using the API. + r"""Creates a Dataflow job. To create a job, we recommend using `projects.locations.jobs.create` with a [regional endpoint] (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). Using `projects.jobs.create` is not recommended, as your job will always start in `us-central1`. Do not enter confidential information when you supply string values using the API. Args: request: (DataflowProjectsJobsCreateRequest) input message @@ -273,7 +263,8 @@ def Create(self, request, global_params=None): (Job) The response message. """ config = self.GetMethodConfig('Create') - return self._RunMethod(config, request, global_params=global_params) + return self._RunMethod( + config, request, global_params=global_params) Create.method_config = lambda: base_api.ApiMethodInfo( http_method='POST', @@ -285,7 +276,8 @@ def Create(self, request, global_params=None): request_field='job', request_type_name='DataflowProjectsJobsCreateRequest', response_type_name='Job', - supports_download=False, ) + supports_download=False, + ) def Get(self, request, global_params=None): r"""Gets the state of the specified Cloud Dataflow job. To get the state of a job, we recommend using `projects.locations.jobs.get` with a [regional endpoint] (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). Using `projects.jobs.get` is not recommended, as you can only get the state of jobs that are running in `us-central1`. @@ -297,7 +289,8 @@ def Get(self, request, global_params=None): (Job) The response message. """ config = self.GetMethodConfig('Get') - return self._RunMethod(config, request, global_params=global_params) + return self._RunMethod( + config, request, global_params=global_params) Get.method_config = lambda: base_api.ApiMethodInfo( http_method='GET', @@ -309,7 +302,8 @@ def Get(self, request, global_params=None): request_field='', request_type_name='DataflowProjectsJobsGetRequest', response_type_name='Job', - supports_download=False, ) + supports_download=False, + ) def GetMetrics(self, request, global_params=None): r"""Request the job status. To request the status of a job, we recommend using `projects.locations.jobs.getMetrics` with a [regional endpoint] (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). Using `projects.jobs.getMetrics` is not recommended, as you can only request the status of jobs that are running in `us-central1`. @@ -321,7 +315,8 @@ def GetMetrics(self, request, global_params=None): (JobMetrics) The response message. """ config = self.GetMethodConfig('GetMetrics') - return self._RunMethod(config, request, global_params=global_params) + return self._RunMethod( + config, request, global_params=global_params) GetMetrics.method_config = lambda: base_api.ApiMethodInfo( http_method='GET', @@ -333,7 +328,8 @@ def GetMetrics(self, request, global_params=None): request_field='', request_type_name='DataflowProjectsJobsGetMetricsRequest', response_type_name='JobMetrics', - supports_download=False, ) + supports_download=False, + ) def List(self, request, global_params=None): r"""List the jobs of a project. To list the jobs of a project in a region, we recommend using `projects.locations.jobs.list` with a [regional endpoint] (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). To list the all jobs across all regions, use `projects.jobs.aggregated`. Using `projects.jobs.list` is not recommended, because you can only get the list of jobs that are running in `us-central1`. `projects.locations.jobs.list` and `projects.jobs.list` support filtering the list of jobs by name. Filtering by name isn't supported by `projects.jobs.aggregated`. @@ -345,20 +341,21 @@ def List(self, request, global_params=None): (ListJobsResponse) The response message. """ config = self.GetMethodConfig('List') - return self._RunMethod(config, request, global_params=global_params) + return self._RunMethod( + config, request, global_params=global_params) List.method_config = lambda: base_api.ApiMethodInfo( http_method='GET', method_id='dataflow.projects.jobs.list', ordered_params=['projectId'], path_params=['projectId'], - query_params= - ['filter', 'location', 'name', 'pageSize', 'pageToken', 'view'], + query_params=['filter', 'location', 'name', 'pageSize', 'pageToken', 'view'], relative_path='v1b3/projects/{projectId}/jobs', request_field='', request_type_name='DataflowProjectsJobsListRequest', response_type_name='ListJobsResponse', - supports_download=False, ) + supports_download=False, + ) def Snapshot(self, request, global_params=None): r"""Snapshot the state of a streaming job. @@ -370,7 +367,8 @@ def Snapshot(self, request, global_params=None): (Snapshot) The response message. """ config = self.GetMethodConfig('Snapshot') - return self._RunMethod(config, request, global_params=global_params) + return self._RunMethod( + config, request, global_params=global_params) Snapshot.method_config = lambda: base_api.ApiMethodInfo( http_method='POST', @@ -382,7 +380,8 @@ def Snapshot(self, request, global_params=None): request_field='snapshotJobRequest', request_type_name='DataflowProjectsJobsSnapshotRequest', response_type_name='Snapshot', - supports_download=False, ) + supports_download=False, + ) def Update(self, request, global_params=None): r"""Updates the state of an existing Cloud Dataflow job. To update the state of an existing job, we recommend using `projects.locations.jobs.update` with a [regional endpoint] (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). Using `projects.jobs.update` is not recommended, as you can only update the state of jobs that are running in `us-central1`. @@ -394,7 +393,8 @@ def Update(self, request, global_params=None): (Job) The response message. """ config = self.GetMethodConfig('Update') - return self._RunMethod(config, request, global_params=global_params) + return self._RunMethod( + config, request, global_params=global_params) Update.method_config = lambda: base_api.ApiMethodInfo( http_method='PUT', @@ -406,7 +406,8 @@ def Update(self, request, global_params=None): request_field='job', request_type_name='DataflowProjectsJobsUpdateRequest', response_type_name='Job', - supports_download=False, ) + supports_download=False, + ) class ProjectsLocationsFlexTemplatesService(base_api.BaseApiService): """Service class for the projects_locations_flexTemplates resource.""" @@ -414,9 +415,9 @@ class ProjectsLocationsFlexTemplatesService(base_api.BaseApiService): _NAME = 'projects_locations_flexTemplates' def __init__(self, client): - super(DataflowV1b3.ProjectsLocationsFlexTemplatesService, - self).__init__(client) - self._upload_configs = {} + super(DataflowV1b3.ProjectsLocationsFlexTemplatesService, self).__init__(client) + self._upload_configs = { + } def Launch(self, request, global_params=None): r"""Launch a job with a FlexTemplate. @@ -428,7 +429,8 @@ def Launch(self, request, global_params=None): (LaunchFlexTemplateResponse) The response message. """ config = self.GetMethodConfig('Launch') - return self._RunMethod(config, request, global_params=global_params) + return self._RunMethod( + config, request, global_params=global_params) Launch.method_config = lambda: base_api.ApiMethodInfo( http_method='POST', @@ -436,12 +438,12 @@ def Launch(self, request, global_params=None): ordered_params=['projectId', 'location'], path_params=['location', 'projectId'], query_params=[], - relative_path= - 'v1b3/projects/{projectId}/locations/{location}/flexTemplates:launch', + relative_path='v1b3/projects/{projectId}/locations/{location}/flexTemplates:launch', request_field='launchFlexTemplateRequest', request_type_name='DataflowProjectsLocationsFlexTemplatesLaunchRequest', response_type_name='LaunchFlexTemplateResponse', - supports_download=False, ) + supports_download=False, + ) class ProjectsLocationsJobsDebugService(base_api.BaseApiService): """Service class for the projects_locations_jobs_debug resource.""" @@ -449,9 +451,9 @@ class ProjectsLocationsJobsDebugService(base_api.BaseApiService): _NAME = 'projects_locations_jobs_debug' def __init__(self, client): - super(DataflowV1b3.ProjectsLocationsJobsDebugService, - self).__init__(client) - self._upload_configs = {} + super(DataflowV1b3.ProjectsLocationsJobsDebugService, self).__init__(client) + self._upload_configs = { + } def GetConfig(self, request, global_params=None): r"""Get encoded debug configuration for component. Not cacheable. @@ -463,7 +465,8 @@ def GetConfig(self, request, global_params=None): (GetDebugConfigResponse) The response message. """ config = self.GetMethodConfig('GetConfig') - return self._RunMethod(config, request, global_params=global_params) + return self._RunMethod( + config, request, global_params=global_params) GetConfig.method_config = lambda: base_api.ApiMethodInfo( http_method='POST', @@ -471,12 +474,38 @@ def GetConfig(self, request, global_params=None): ordered_params=['projectId', 'location', 'jobId'], path_params=['jobId', 'location', 'projectId'], query_params=[], - relative_path= - 'v1b3/projects/{projectId}/locations/{location}/jobs/{jobId}/debug/getConfig', + relative_path='v1b3/projects/{projectId}/locations/{location}/jobs/{jobId}/debug/getConfig', request_field='getDebugConfigRequest', request_type_name='DataflowProjectsLocationsJobsDebugGetConfigRequest', response_type_name='GetDebugConfigResponse', - supports_download=False, ) + supports_download=False, + ) + + def GetWorkerStacktraces(self, request, global_params=None): + r"""Get worker stacktraces from debug capture. + + Args: + request: (DataflowProjectsLocationsJobsDebugGetWorkerStacktracesRequest) input message + global_params: (StandardQueryParameters, default: None) global arguments + Returns: + (GetWorkerStacktracesResponse) The response message. + """ + config = self.GetMethodConfig('GetWorkerStacktraces') + return self._RunMethod( + config, request, global_params=global_params) + + GetWorkerStacktraces.method_config = lambda: base_api.ApiMethodInfo( + http_method='POST', + method_id='dataflow.projects.locations.jobs.debug.getWorkerStacktraces', + ordered_params=['projectId', 'location', 'jobId'], + path_params=['jobId', 'location', 'projectId'], + query_params=[], + relative_path='v1b3/projects/{projectId}/locations/{location}/jobs/{jobId}/debug/getWorkerStacktraces', + request_field='getWorkerStacktracesRequest', + request_type_name='DataflowProjectsLocationsJobsDebugGetWorkerStacktracesRequest', + response_type_name='GetWorkerStacktracesResponse', + supports_download=False, + ) def SendCapture(self, request, global_params=None): r"""Send encoded debug capture data for component. @@ -488,7 +517,8 @@ def SendCapture(self, request, global_params=None): (SendDebugCaptureResponse) The response message. """ config = self.GetMethodConfig('SendCapture') - return self._RunMethod(config, request, global_params=global_params) + return self._RunMethod( + config, request, global_params=global_params) SendCapture.method_config = lambda: base_api.ApiMethodInfo( http_method='POST', @@ -496,13 +526,12 @@ def SendCapture(self, request, global_params=None): ordered_params=['projectId', 'location', 'jobId'], path_params=['jobId', 'location', 'projectId'], query_params=[], - relative_path= - 'v1b3/projects/{projectId}/locations/{location}/jobs/{jobId}/debug/sendCapture', + relative_path='v1b3/projects/{projectId}/locations/{location}/jobs/{jobId}/debug/sendCapture', request_field='sendDebugCaptureRequest', - request_type_name= - 'DataflowProjectsLocationsJobsDebugSendCaptureRequest', + request_type_name='DataflowProjectsLocationsJobsDebugSendCaptureRequest', response_type_name='SendDebugCaptureResponse', - supports_download=False, ) + supports_download=False, + ) class ProjectsLocationsJobsMessagesService(base_api.BaseApiService): """Service class for the projects_locations_jobs_messages resource.""" @@ -510,9 +539,9 @@ class ProjectsLocationsJobsMessagesService(base_api.BaseApiService): _NAME = 'projects_locations_jobs_messages' def __init__(self, client): - super(DataflowV1b3.ProjectsLocationsJobsMessagesService, - self).__init__(client) - self._upload_configs = {} + super(DataflowV1b3.ProjectsLocationsJobsMessagesService, self).__init__(client) + self._upload_configs = { + } def List(self, request, global_params=None): r"""Request the job status. To request the status of a job, we recommend using `projects.locations.jobs.messages.list` with a [regional endpoint] (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). Using `projects.jobs.messages.list` is not recommended, as you can only request the status of jobs that are running in `us-central1`. @@ -524,21 +553,21 @@ def List(self, request, global_params=None): (ListJobMessagesResponse) The response message. """ config = self.GetMethodConfig('List') - return self._RunMethod(config, request, global_params=global_params) + return self._RunMethod( + config, request, global_params=global_params) List.method_config = lambda: base_api.ApiMethodInfo( http_method='GET', method_id='dataflow.projects.locations.jobs.messages.list', ordered_params=['projectId', 'location', 'jobId'], path_params=['jobId', 'location', 'projectId'], - query_params= - ['endTime', 'minimumImportance', 'pageSize', 'pageToken', 'startTime'], - relative_path= - 'v1b3/projects/{projectId}/locations/{location}/jobs/{jobId}/messages', + query_params=['endTime', 'minimumImportance', 'pageSize', 'pageToken', 'startTime'], + relative_path='v1b3/projects/{projectId}/locations/{location}/jobs/{jobId}/messages', request_field='', request_type_name='DataflowProjectsLocationsJobsMessagesListRequest', response_type_name='ListJobMessagesResponse', - supports_download=False, ) + supports_download=False, + ) class ProjectsLocationsJobsSnapshotsService(base_api.BaseApiService): """Service class for the projects_locations_jobs_snapshots resource.""" @@ -546,9 +575,9 @@ class ProjectsLocationsJobsSnapshotsService(base_api.BaseApiService): _NAME = 'projects_locations_jobs_snapshots' def __init__(self, client): - super(DataflowV1b3.ProjectsLocationsJobsSnapshotsService, - self).__init__(client) - self._upload_configs = {} + super(DataflowV1b3.ProjectsLocationsJobsSnapshotsService, self).__init__(client) + self._upload_configs = { + } def List(self, request, global_params=None): r"""Lists snapshots. @@ -560,7 +589,8 @@ def List(self, request, global_params=None): (ListSnapshotsResponse) The response message. """ config = self.GetMethodConfig('List') - return self._RunMethod(config, request, global_params=global_params) + return self._RunMethod( + config, request, global_params=global_params) List.method_config = lambda: base_api.ApiMethodInfo( http_method='GET', @@ -568,12 +598,12 @@ def List(self, request, global_params=None): ordered_params=['projectId', 'location', 'jobId'], path_params=['jobId', 'location', 'projectId'], query_params=[], - relative_path= - 'v1b3/projects/{projectId}/locations/{location}/jobs/{jobId}/snapshots', + relative_path='v1b3/projects/{projectId}/locations/{location}/jobs/{jobId}/snapshots', request_field='', request_type_name='DataflowProjectsLocationsJobsSnapshotsListRequest', response_type_name='ListSnapshotsResponse', - supports_download=False, ) + supports_download=False, + ) class ProjectsLocationsJobsStagesService(base_api.BaseApiService): """Service class for the projects_locations_jobs_stages resource.""" @@ -581,9 +611,9 @@ class ProjectsLocationsJobsStagesService(base_api.BaseApiService): _NAME = 'projects_locations_jobs_stages' def __init__(self, client): - super(DataflowV1b3.ProjectsLocationsJobsStagesService, - self).__init__(client) - self._upload_configs = {} + super(DataflowV1b3.ProjectsLocationsJobsStagesService, self).__init__(client) + self._upload_configs = { + } def GetExecutionDetails(self, request, global_params=None): r"""Request detailed information about the execution status of a stage of the job. EXPERIMENTAL. This API is subject to change or removal without notice. @@ -595,7 +625,8 @@ def GetExecutionDetails(self, request, global_params=None): (StageExecutionDetails) The response message. """ config = self.GetMethodConfig('GetExecutionDetails') - return self._RunMethod(config, request, global_params=global_params) + return self._RunMethod( + config, request, global_params=global_params) GetExecutionDetails.method_config = lambda: base_api.ApiMethodInfo( http_method='GET', @@ -603,13 +634,12 @@ def GetExecutionDetails(self, request, global_params=None): ordered_params=['projectId', 'location', 'jobId', 'stageId'], path_params=['jobId', 'location', 'projectId', 'stageId'], query_params=['endTime', 'pageSize', 'pageToken', 'startTime'], - relative_path= - 'v1b3/projects/{projectId}/locations/{location}/jobs/{jobId}/stages/{stageId}/executionDetails', + relative_path='v1b3/projects/{projectId}/locations/{location}/jobs/{jobId}/stages/{stageId}/executionDetails', request_field='', - request_type_name= - 'DataflowProjectsLocationsJobsStagesGetExecutionDetailsRequest', + request_type_name='DataflowProjectsLocationsJobsStagesGetExecutionDetailsRequest', response_type_name='StageExecutionDetails', - supports_download=False, ) + supports_download=False, + ) class ProjectsLocationsJobsWorkItemsService(base_api.BaseApiService): """Service class for the projects_locations_jobs_workItems resource.""" @@ -617,9 +647,9 @@ class ProjectsLocationsJobsWorkItemsService(base_api.BaseApiService): _NAME = 'projects_locations_jobs_workItems' def __init__(self, client): - super(DataflowV1b3.ProjectsLocationsJobsWorkItemsService, - self).__init__(client) - self._upload_configs = {} + super(DataflowV1b3.ProjectsLocationsJobsWorkItemsService, self).__init__(client) + self._upload_configs = { + } def Lease(self, request, global_params=None): r"""Leases a dataflow WorkItem to run. @@ -631,7 +661,8 @@ def Lease(self, request, global_params=None): (LeaseWorkItemResponse) The response message. """ config = self.GetMethodConfig('Lease') - return self._RunMethod(config, request, global_params=global_params) + return self._RunMethod( + config, request, global_params=global_params) Lease.method_config = lambda: base_api.ApiMethodInfo( http_method='POST', @@ -639,12 +670,12 @@ def Lease(self, request, global_params=None): ordered_params=['projectId', 'location', 'jobId'], path_params=['jobId', 'location', 'projectId'], query_params=[], - relative_path= - 'v1b3/projects/{projectId}/locations/{location}/jobs/{jobId}/workItems:lease', + relative_path='v1b3/projects/{projectId}/locations/{location}/jobs/{jobId}/workItems:lease', request_field='leaseWorkItemRequest', request_type_name='DataflowProjectsLocationsJobsWorkItemsLeaseRequest', response_type_name='LeaseWorkItemResponse', - supports_download=False, ) + supports_download=False, + ) def ReportStatus(self, request, global_params=None): r"""Reports the status of dataflow WorkItems leased by a worker. @@ -656,7 +687,8 @@ def ReportStatus(self, request, global_params=None): (ReportWorkItemStatusResponse) The response message. """ config = self.GetMethodConfig('ReportStatus') - return self._RunMethod(config, request, global_params=global_params) + return self._RunMethod( + config, request, global_params=global_params) ReportStatus.method_config = lambda: base_api.ApiMethodInfo( http_method='POST', @@ -664,13 +696,12 @@ def ReportStatus(self, request, global_params=None): ordered_params=['projectId', 'location', 'jobId'], path_params=['jobId', 'location', 'projectId'], query_params=[], - relative_path= - 'v1b3/projects/{projectId}/locations/{location}/jobs/{jobId}/workItems:reportStatus', + relative_path='v1b3/projects/{projectId}/locations/{location}/jobs/{jobId}/workItems:reportStatus', request_field='reportWorkItemStatusRequest', - request_type_name= - 'DataflowProjectsLocationsJobsWorkItemsReportStatusRequest', + request_type_name='DataflowProjectsLocationsJobsWorkItemsReportStatusRequest', response_type_name='ReportWorkItemStatusResponse', - supports_download=False, ) + supports_download=False, + ) class ProjectsLocationsJobsService(base_api.BaseApiService): """Service class for the projects_locations_jobs resource.""" @@ -679,10 +710,11 @@ class ProjectsLocationsJobsService(base_api.BaseApiService): def __init__(self, client): super(DataflowV1b3.ProjectsLocationsJobsService, self).__init__(client) - self._upload_configs = {} + self._upload_configs = { + } def Create(self, request, global_params=None): - r"""Creates a Cloud Dataflow job. To create a job, we recommend using `projects.locations.jobs.create` with a [regional endpoint] (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). Using `projects.jobs.create` is not recommended, as your job will always start in `us-central1`. Do not enter confidential information when you supply string values using the API. + r"""Creates a Dataflow job. To create a job, we recommend using `projects.locations.jobs.create` with a [regional endpoint] (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). Using `projects.jobs.create` is not recommended, as your job will always start in `us-central1`. Do not enter confidential information when you supply string values using the API. Args: request: (DataflowProjectsLocationsJobsCreateRequest) input message @@ -691,7 +723,8 @@ def Create(self, request, global_params=None): (Job) The response message. """ config = self.GetMethodConfig('Create') - return self._RunMethod(config, request, global_params=global_params) + return self._RunMethod( + config, request, global_params=global_params) Create.method_config = lambda: base_api.ApiMethodInfo( http_method='POST', @@ -703,7 +736,8 @@ def Create(self, request, global_params=None): request_field='job', request_type_name='DataflowProjectsLocationsJobsCreateRequest', response_type_name='Job', - supports_download=False, ) + supports_download=False, + ) def Get(self, request, global_params=None): r"""Gets the state of the specified Cloud Dataflow job. To get the state of a job, we recommend using `projects.locations.jobs.get` with a [regional endpoint] (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). Using `projects.jobs.get` is not recommended, as you can only get the state of jobs that are running in `us-central1`. @@ -715,7 +749,8 @@ def Get(self, request, global_params=None): (Job) The response message. """ config = self.GetMethodConfig('Get') - return self._RunMethod(config, request, global_params=global_params) + return self._RunMethod( + config, request, global_params=global_params) Get.method_config = lambda: base_api.ApiMethodInfo( http_method='GET', @@ -723,12 +758,12 @@ def Get(self, request, global_params=None): ordered_params=['projectId', 'location', 'jobId'], path_params=['jobId', 'location', 'projectId'], query_params=['view'], - relative_path= - 'v1b3/projects/{projectId}/locations/{location}/jobs/{jobId}', + relative_path='v1b3/projects/{projectId}/locations/{location}/jobs/{jobId}', request_field='', request_type_name='DataflowProjectsLocationsJobsGetRequest', response_type_name='Job', - supports_download=False, ) + supports_download=False, + ) def GetExecutionDetails(self, request, global_params=None): r"""Request detailed information about the execution status of the job. EXPERIMENTAL. This API is subject to change or removal without notice. @@ -740,7 +775,8 @@ def GetExecutionDetails(self, request, global_params=None): (JobExecutionDetails) The response message. """ config = self.GetMethodConfig('GetExecutionDetails') - return self._RunMethod(config, request, global_params=global_params) + return self._RunMethod( + config, request, global_params=global_params) GetExecutionDetails.method_config = lambda: base_api.ApiMethodInfo( http_method='GET', @@ -748,13 +784,12 @@ def GetExecutionDetails(self, request, global_params=None): ordered_params=['projectId', 'location', 'jobId'], path_params=['jobId', 'location', 'projectId'], query_params=['pageSize', 'pageToken'], - relative_path= - 'v1b3/projects/{projectId}/locations/{location}/jobs/{jobId}/executionDetails', + relative_path='v1b3/projects/{projectId}/locations/{location}/jobs/{jobId}/executionDetails', request_field='', - request_type_name= - 'DataflowProjectsLocationsJobsGetExecutionDetailsRequest', + request_type_name='DataflowProjectsLocationsJobsGetExecutionDetailsRequest', response_type_name='JobExecutionDetails', - supports_download=False, ) + supports_download=False, + ) def GetMetrics(self, request, global_params=None): r"""Request the job status. To request the status of a job, we recommend using `projects.locations.jobs.getMetrics` with a [regional endpoint] (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). Using `projects.jobs.getMetrics` is not recommended, as you can only request the status of jobs that are running in `us-central1`. @@ -766,7 +801,8 @@ def GetMetrics(self, request, global_params=None): (JobMetrics) The response message. """ config = self.GetMethodConfig('GetMetrics') - return self._RunMethod(config, request, global_params=global_params) + return self._RunMethod( + config, request, global_params=global_params) GetMetrics.method_config = lambda: base_api.ApiMethodInfo( http_method='GET', @@ -774,12 +810,12 @@ def GetMetrics(self, request, global_params=None): ordered_params=['projectId', 'location', 'jobId'], path_params=['jobId', 'location', 'projectId'], query_params=['startTime'], - relative_path= - 'v1b3/projects/{projectId}/locations/{location}/jobs/{jobId}/metrics', + relative_path='v1b3/projects/{projectId}/locations/{location}/jobs/{jobId}/metrics', request_field='', request_type_name='DataflowProjectsLocationsJobsGetMetricsRequest', response_type_name='JobMetrics', - supports_download=False, ) + supports_download=False, + ) def List(self, request, global_params=None): r"""List the jobs of a project. To list the jobs of a project in a region, we recommend using `projects.locations.jobs.list` with a [regional endpoint] (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). To list the all jobs across all regions, use `projects.jobs.aggregated`. Using `projects.jobs.list` is not recommended, because you can only get the list of jobs that are running in `us-central1`. `projects.locations.jobs.list` and `projects.jobs.list` support filtering the list of jobs by name. Filtering by name isn't supported by `projects.jobs.aggregated`. @@ -791,7 +827,8 @@ def List(self, request, global_params=None): (ListJobsResponse) The response message. """ config = self.GetMethodConfig('List') - return self._RunMethod(config, request, global_params=global_params) + return self._RunMethod( + config, request, global_params=global_params) List.method_config = lambda: base_api.ApiMethodInfo( http_method='GET', @@ -803,7 +840,8 @@ def List(self, request, global_params=None): request_field='', request_type_name='DataflowProjectsLocationsJobsListRequest', response_type_name='ListJobsResponse', - supports_download=False, ) + supports_download=False, + ) def Snapshot(self, request, global_params=None): r"""Snapshot the state of a streaming job. @@ -815,7 +853,8 @@ def Snapshot(self, request, global_params=None): (Snapshot) The response message. """ config = self.GetMethodConfig('Snapshot') - return self._RunMethod(config, request, global_params=global_params) + return self._RunMethod( + config, request, global_params=global_params) Snapshot.method_config = lambda: base_api.ApiMethodInfo( http_method='POST', @@ -823,12 +862,12 @@ def Snapshot(self, request, global_params=None): ordered_params=['projectId', 'location', 'jobId'], path_params=['jobId', 'location', 'projectId'], query_params=[], - relative_path= - 'v1b3/projects/{projectId}/locations/{location}/jobs/{jobId}:snapshot', + relative_path='v1b3/projects/{projectId}/locations/{location}/jobs/{jobId}:snapshot', request_field='snapshotJobRequest', request_type_name='DataflowProjectsLocationsJobsSnapshotRequest', response_type_name='Snapshot', - supports_download=False, ) + supports_download=False, + ) def Update(self, request, global_params=None): r"""Updates the state of an existing Cloud Dataflow job. To update the state of an existing job, we recommend using `projects.locations.jobs.update` with a [regional endpoint] (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). Using `projects.jobs.update` is not recommended, as you can only update the state of jobs that are running in `us-central1`. @@ -840,7 +879,8 @@ def Update(self, request, global_params=None): (Job) The response message. """ config = self.GetMethodConfig('Update') - return self._RunMethod(config, request, global_params=global_params) + return self._RunMethod( + config, request, global_params=global_params) Update.method_config = lambda: base_api.ApiMethodInfo( http_method='PUT', @@ -848,12 +888,12 @@ def Update(self, request, global_params=None): ordered_params=['projectId', 'location', 'jobId'], path_params=['jobId', 'location', 'projectId'], query_params=['updateMask'], - relative_path= - 'v1b3/projects/{projectId}/locations/{location}/jobs/{jobId}', + relative_path='v1b3/projects/{projectId}/locations/{location}/jobs/{jobId}', request_field='job', request_type_name='DataflowProjectsLocationsJobsUpdateRequest', response_type_name='Job', - supports_download=False, ) + supports_download=False, + ) class ProjectsLocationsSnapshotsService(base_api.BaseApiService): """Service class for the projects_locations_snapshots resource.""" @@ -861,9 +901,9 @@ class ProjectsLocationsSnapshotsService(base_api.BaseApiService): _NAME = 'projects_locations_snapshots' def __init__(self, client): - super(DataflowV1b3.ProjectsLocationsSnapshotsService, - self).__init__(client) - self._upload_configs = {} + super(DataflowV1b3.ProjectsLocationsSnapshotsService, self).__init__(client) + self._upload_configs = { + } def Delete(self, request, global_params=None): r"""Deletes a snapshot. @@ -875,7 +915,8 @@ def Delete(self, request, global_params=None): (DeleteSnapshotResponse) The response message. """ config = self.GetMethodConfig('Delete') - return self._RunMethod(config, request, global_params=global_params) + return self._RunMethod( + config, request, global_params=global_params) Delete.method_config = lambda: base_api.ApiMethodInfo( http_method='DELETE', @@ -883,12 +924,12 @@ def Delete(self, request, global_params=None): ordered_params=['projectId', 'location', 'snapshotId'], path_params=['location', 'projectId', 'snapshotId'], query_params=[], - relative_path= - 'v1b3/projects/{projectId}/locations/{location}/snapshots/{snapshotId}', + relative_path='v1b3/projects/{projectId}/locations/{location}/snapshots/{snapshotId}', request_field='', request_type_name='DataflowProjectsLocationsSnapshotsDeleteRequest', response_type_name='DeleteSnapshotResponse', - supports_download=False, ) + supports_download=False, + ) def Get(self, request, global_params=None): r"""Gets information about a snapshot. @@ -900,7 +941,8 @@ def Get(self, request, global_params=None): (Snapshot) The response message. """ config = self.GetMethodConfig('Get') - return self._RunMethod(config, request, global_params=global_params) + return self._RunMethod( + config, request, global_params=global_params) Get.method_config = lambda: base_api.ApiMethodInfo( http_method='GET', @@ -908,12 +950,12 @@ def Get(self, request, global_params=None): ordered_params=['projectId', 'location', 'snapshotId'], path_params=['location', 'projectId', 'snapshotId'], query_params=[], - relative_path= - 'v1b3/projects/{projectId}/locations/{location}/snapshots/{snapshotId}', + relative_path='v1b3/projects/{projectId}/locations/{location}/snapshots/{snapshotId}', request_field='', request_type_name='DataflowProjectsLocationsSnapshotsGetRequest', response_type_name='Snapshot', - supports_download=False, ) + supports_download=False, + ) def List(self, request, global_params=None): r"""Lists snapshots. @@ -925,7 +967,8 @@ def List(self, request, global_params=None): (ListSnapshotsResponse) The response message. """ config = self.GetMethodConfig('List') - return self._RunMethod(config, request, global_params=global_params) + return self._RunMethod( + config, request, global_params=global_params) List.method_config = lambda: base_api.ApiMethodInfo( http_method='GET', @@ -933,12 +976,12 @@ def List(self, request, global_params=None): ordered_params=['projectId', 'location'], path_params=['location', 'projectId'], query_params=['jobId'], - relative_path= - 'v1b3/projects/{projectId}/locations/{location}/snapshots', + relative_path='v1b3/projects/{projectId}/locations/{location}/snapshots', request_field='', request_type_name='DataflowProjectsLocationsSnapshotsListRequest', response_type_name='ListSnapshotsResponse', - supports_download=False, ) + supports_download=False, + ) class ProjectsLocationsTemplatesService(base_api.BaseApiService): """Service class for the projects_locations_templates resource.""" @@ -946,9 +989,9 @@ class ProjectsLocationsTemplatesService(base_api.BaseApiService): _NAME = 'projects_locations_templates' def __init__(self, client): - super(DataflowV1b3.ProjectsLocationsTemplatesService, - self).__init__(client) - self._upload_configs = {} + super(DataflowV1b3.ProjectsLocationsTemplatesService, self).__init__(client) + self._upload_configs = { + } def Create(self, request, global_params=None): r"""Creates a Cloud Dataflow job from a template. Do not enter confidential information when you supply string values using the API. To create a job, we recommend using `projects.locations.templates.create` with a [regional endpoint] (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). Using `projects.templates.create` is not recommended, because your job will always start in `us-central1`. @@ -960,7 +1003,8 @@ def Create(self, request, global_params=None): (Job) The response message. """ config = self.GetMethodConfig('Create') - return self._RunMethod(config, request, global_params=global_params) + return self._RunMethod( + config, request, global_params=global_params) Create.method_config = lambda: base_api.ApiMethodInfo( http_method='POST', @@ -968,12 +1012,12 @@ def Create(self, request, global_params=None): ordered_params=['projectId', 'location'], path_params=['location', 'projectId'], query_params=[], - relative_path= - 'v1b3/projects/{projectId}/locations/{location}/templates', + relative_path='v1b3/projects/{projectId}/locations/{location}/templates', request_field='createJobFromTemplateRequest', request_type_name='DataflowProjectsLocationsTemplatesCreateRequest', response_type_name='Job', - supports_download=False, ) + supports_download=False, + ) def Get(self, request, global_params=None): r"""Get the template associated with a template. To get the template, we recommend using `projects.locations.templates.get` with a [regional endpoint] (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). Using `projects.templates.get` is not recommended, because only templates that are running in `us-central1` are retrieved. @@ -985,7 +1029,8 @@ def Get(self, request, global_params=None): (GetTemplateResponse) The response message. """ config = self.GetMethodConfig('Get') - return self._RunMethod(config, request, global_params=global_params) + return self._RunMethod( + config, request, global_params=global_params) Get.method_config = lambda: base_api.ApiMethodInfo( http_method='GET', @@ -993,12 +1038,12 @@ def Get(self, request, global_params=None): ordered_params=['projectId', 'location'], path_params=['location', 'projectId'], query_params=['gcsPath', 'view'], - relative_path= - 'v1b3/projects/{projectId}/locations/{location}/templates:get', + relative_path='v1b3/projects/{projectId}/locations/{location}/templates:get', request_field='', request_type_name='DataflowProjectsLocationsTemplatesGetRequest', response_type_name='GetTemplateResponse', - supports_download=False, ) + supports_download=False, + ) def Launch(self, request, global_params=None): r"""Launches a template. To launch a template, we recommend using `projects.locations.templates.launch` with a [regional endpoint] (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). Using `projects.templates.launch` is not recommended, because jobs launched from the template will always start in `us-central1`. @@ -1010,23 +1055,21 @@ def Launch(self, request, global_params=None): (LaunchTemplateResponse) The response message. """ config = self.GetMethodConfig('Launch') - return self._RunMethod(config, request, global_params=global_params) + return self._RunMethod( + config, request, global_params=global_params) Launch.method_config = lambda: base_api.ApiMethodInfo( http_method='POST', method_id='dataflow.projects.locations.templates.launch', ordered_params=['projectId', 'location'], path_params=['location', 'projectId'], - query_params=[ - 'dynamicTemplate_gcsPath', 'dynamicTemplate_stagingLocation', - 'gcsPath', 'validateOnly' - ], - relative_path= - 'v1b3/projects/{projectId}/locations/{location}/templates:launch', + query_params=['dynamicTemplate_gcsPath', 'dynamicTemplate_stagingLocation', 'gcsPath', 'validateOnly'], + relative_path='v1b3/projects/{projectId}/locations/{location}/templates:launch', request_field='launchTemplateParameters', request_type_name='DataflowProjectsLocationsTemplatesLaunchRequest', response_type_name='LaunchTemplateResponse', - supports_download=False, ) + supports_download=False, + ) class ProjectsLocationsService(base_api.BaseApiService): """Service class for the projects_locations resource.""" @@ -1035,7 +1078,8 @@ class ProjectsLocationsService(base_api.BaseApiService): def __init__(self, client): super(DataflowV1b3.ProjectsLocationsService, self).__init__(client) - self._upload_configs = {} + self._upload_configs = { + } def WorkerMessages(self, request, global_params=None): r"""Send a worker_message to the service. @@ -1047,7 +1091,8 @@ def WorkerMessages(self, request, global_params=None): (SendWorkerMessagesResponse) The response message. """ config = self.GetMethodConfig('WorkerMessages') - return self._RunMethod(config, request, global_params=global_params) + return self._RunMethod( + config, request, global_params=global_params) WorkerMessages.method_config = lambda: base_api.ApiMethodInfo( http_method='POST', @@ -1055,12 +1100,12 @@ def WorkerMessages(self, request, global_params=None): ordered_params=['projectId', 'location'], path_params=['location', 'projectId'], query_params=[], - relative_path= - 'v1b3/projects/{projectId}/locations/{location}/WorkerMessages', + relative_path='v1b3/projects/{projectId}/locations/{location}/WorkerMessages', request_field='sendWorkerMessagesRequest', request_type_name='DataflowProjectsLocationsWorkerMessagesRequest', response_type_name='SendWorkerMessagesResponse', - supports_download=False, ) + supports_download=False, + ) class ProjectsSnapshotsService(base_api.BaseApiService): """Service class for the projects_snapshots resource.""" @@ -1069,7 +1114,8 @@ class ProjectsSnapshotsService(base_api.BaseApiService): def __init__(self, client): super(DataflowV1b3.ProjectsSnapshotsService, self).__init__(client) - self._upload_configs = {} + self._upload_configs = { + } def Get(self, request, global_params=None): r"""Gets information about a snapshot. @@ -1081,7 +1127,8 @@ def Get(self, request, global_params=None): (Snapshot) The response message. """ config = self.GetMethodConfig('Get') - return self._RunMethod(config, request, global_params=global_params) + return self._RunMethod( + config, request, global_params=global_params) Get.method_config = lambda: base_api.ApiMethodInfo( http_method='GET', @@ -1093,7 +1140,8 @@ def Get(self, request, global_params=None): request_field='', request_type_name='DataflowProjectsSnapshotsGetRequest', response_type_name='Snapshot', - supports_download=False, ) + supports_download=False, + ) def List(self, request, global_params=None): r"""Lists snapshots. @@ -1105,7 +1153,8 @@ def List(self, request, global_params=None): (ListSnapshotsResponse) The response message. """ config = self.GetMethodConfig('List') - return self._RunMethod(config, request, global_params=global_params) + return self._RunMethod( + config, request, global_params=global_params) List.method_config = lambda: base_api.ApiMethodInfo( http_method='GET', @@ -1117,7 +1166,8 @@ def List(self, request, global_params=None): request_field='', request_type_name='DataflowProjectsSnapshotsListRequest', response_type_name='ListSnapshotsResponse', - supports_download=False, ) + supports_download=False, + ) class ProjectsTemplatesService(base_api.BaseApiService): """Service class for the projects_templates resource.""" @@ -1126,7 +1176,8 @@ class ProjectsTemplatesService(base_api.BaseApiService): def __init__(self, client): super(DataflowV1b3.ProjectsTemplatesService, self).__init__(client) - self._upload_configs = {} + self._upload_configs = { + } def Create(self, request, global_params=None): r"""Creates a Cloud Dataflow job from a template. Do not enter confidential information when you supply string values using the API. To create a job, we recommend using `projects.locations.templates.create` with a [regional endpoint] (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). Using `projects.templates.create` is not recommended, because your job will always start in `us-central1`. @@ -1138,7 +1189,8 @@ def Create(self, request, global_params=None): (Job) The response message. """ config = self.GetMethodConfig('Create') - return self._RunMethod(config, request, global_params=global_params) + return self._RunMethod( + config, request, global_params=global_params) Create.method_config = lambda: base_api.ApiMethodInfo( http_method='POST', @@ -1150,7 +1202,8 @@ def Create(self, request, global_params=None): request_field='createJobFromTemplateRequest', request_type_name='DataflowProjectsTemplatesCreateRequest', response_type_name='Job', - supports_download=False, ) + supports_download=False, + ) def Get(self, request, global_params=None): r"""Get the template associated with a template. To get the template, we recommend using `projects.locations.templates.get` with a [regional endpoint] (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). Using `projects.templates.get` is not recommended, because only templates that are running in `us-central1` are retrieved. @@ -1162,7 +1215,8 @@ def Get(self, request, global_params=None): (GetTemplateResponse) The response message. """ config = self.GetMethodConfig('Get') - return self._RunMethod(config, request, global_params=global_params) + return self._RunMethod( + config, request, global_params=global_params) Get.method_config = lambda: base_api.ApiMethodInfo( http_method='GET', @@ -1174,7 +1228,8 @@ def Get(self, request, global_params=None): request_field='', request_type_name='DataflowProjectsTemplatesGetRequest', response_type_name='GetTemplateResponse', - supports_download=False, ) + supports_download=False, + ) def Launch(self, request, global_params=None): r"""Launches a template. To launch a template, we recommend using `projects.locations.templates.launch` with a [regional endpoint] (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). Using `projects.templates.launch` is not recommended, because jobs launched from the template will always start in `us-central1`. @@ -1186,22 +1241,21 @@ def Launch(self, request, global_params=None): (LaunchTemplateResponse) The response message. """ config = self.GetMethodConfig('Launch') - return self._RunMethod(config, request, global_params=global_params) + return self._RunMethod( + config, request, global_params=global_params) Launch.method_config = lambda: base_api.ApiMethodInfo( http_method='POST', method_id='dataflow.projects.templates.launch', ordered_params=['projectId'], path_params=['projectId'], - query_params=[ - 'dynamicTemplate_gcsPath', 'dynamicTemplate_stagingLocation', - 'gcsPath', 'location', 'validateOnly' - ], + query_params=['dynamicTemplate_gcsPath', 'dynamicTemplate_stagingLocation', 'gcsPath', 'location', 'validateOnly'], relative_path='v1b3/projects/{projectId}/templates:launch', request_field='launchTemplateParameters', request_type_name='DataflowProjectsTemplatesLaunchRequest', response_type_name='LaunchTemplateResponse', - supports_download=False, ) + supports_download=False, + ) class ProjectsService(base_api.BaseApiService): """Service class for the projects resource.""" @@ -1210,7 +1264,8 @@ class ProjectsService(base_api.BaseApiService): def __init__(self, client): super(DataflowV1b3.ProjectsService, self).__init__(client) - self._upload_configs = {} + self._upload_configs = { + } def DeleteSnapshots(self, request, global_params=None): r"""Deletes a snapshot. @@ -1222,7 +1277,8 @@ def DeleteSnapshots(self, request, global_params=None): (DeleteSnapshotResponse) The response message. """ config = self.GetMethodConfig('DeleteSnapshots') - return self._RunMethod(config, request, global_params=global_params) + return self._RunMethod( + config, request, global_params=global_params) DeleteSnapshots.method_config = lambda: base_api.ApiMethodInfo( http_method='DELETE', @@ -1234,7 +1290,8 @@ def DeleteSnapshots(self, request, global_params=None): request_field='', request_type_name='DataflowProjectsDeleteSnapshotsRequest', response_type_name='DeleteSnapshotResponse', - supports_download=False, ) + supports_download=False, + ) def WorkerMessages(self, request, global_params=None): r"""Send a worker_message to the service. @@ -1246,7 +1303,8 @@ def WorkerMessages(self, request, global_params=None): (SendWorkerMessagesResponse) The response message. """ config = self.GetMethodConfig('WorkerMessages') - return self._RunMethod(config, request, global_params=global_params) + return self._RunMethod( + config, request, global_params=global_params) WorkerMessages.method_config = lambda: base_api.ApiMethodInfo( http_method='POST', @@ -1258,4 +1316,5 @@ def WorkerMessages(self, request, global_params=None): request_field='sendWorkerMessagesRequest', request_type_name='DataflowProjectsWorkerMessagesRequest', response_type_name='SendWorkerMessagesResponse', - supports_download=False, ) + supports_download=False, + ) diff --git a/sdks/python/apache_beam/runners/dataflow/internal/clients/dataflow/dataflow_v1b3_messages.py b/sdks/python/apache_beam/runners/dataflow/internal/clients/dataflow/dataflow_v1b3_messages.py index e3a60c1b0257..06d7a8fd738b 100644 --- a/sdks/python/apache_beam/runners/dataflow/internal/clients/dataflow/dataflow_v1b3_messages.py +++ b/sdks/python/apache_beam/runners/dataflow/internal/clients/dataflow/dataflow_v1b3_messages.py @@ -4,10 +4,13 @@ """ # NOTE: This file is autogenerated and should not be edited by hand. +from __future__ import absolute_import + from apitools.base.protorpclite import messages as _messages from apitools.base.py import encoding from apitools.base.py import extra_types + package = 'dataflow' @@ -105,6 +108,7 @@ class AutoscalingEvent(_messages.Message): workerPool: A short and friendly name for the worker pool this event refers to. """ + class EventTypeValueValuesEnum(_messages.Enum): r"""The type of autoscaling event to report. @@ -150,6 +154,7 @@ class AutoscalingSettings(_messages.Message): algorithm: The algorithm to use for autoscaling. maxNumWorkers: The maximum number of workers to cap scaling at. """ + class AlgorithmValueValuesEnum(_messages.Enum): r"""The algorithm to use for autoscaling. @@ -213,6 +218,61 @@ class BigTableIODetails(_messages.Message): tableId = _messages.StringField(3) +class BoundedTrie(_messages.Message): + r"""The message type used for encoding metrics of type bounded trie. + + Fields: + bound: The maximum number of elements to store before truncation. + root: A compact representation of all the elements in this trie. + singleton: A more efficient representation for metrics consisting of a + single value. + """ + + bound = _messages.IntegerField(1, variant=_messages.Variant.INT32) + root = _messages.MessageField('BoundedTrieNode', 2) + singleton = _messages.StringField(3, repeated=True) + + +class BoundedTrieNode(_messages.Message): + r"""A single node in a BoundedTrie. + + Messages: + ChildrenValue: Children of this node. Must be empty if truncated is true. + + Fields: + children: Children of this node. Must be empty if truncated is true. + truncated: Whether this node has been truncated. A truncated leaf + represents possibly many children with the same prefix. + """ + + @encoding.MapUnrecognizedFields('additionalProperties') + class ChildrenValue(_messages.Message): + r"""Children of this node. Must be empty if truncated is true. + + Messages: + AdditionalProperty: An additional property for a ChildrenValue object. + + Fields: + additionalProperties: Additional properties of type ChildrenValue + """ + + class AdditionalProperty(_messages.Message): + r"""An additional property for a ChildrenValue object. + + Fields: + key: Name of the additional property. + value: A BoundedTrieNode attribute. + """ + + key = _messages.StringField(1) + value = _messages.MessageField('BoundedTrieNode', 2) + + additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True) + + children = _messages.MessageField('ChildrenValue', 1) + truncated = _messages.BooleanField(2) + + class BucketOptions(_messages.Message): r"""`BucketOptions` describes the bucket boundaries used in the histogram. @@ -325,8 +385,7 @@ class ContainerSpec(_messages.Message): sdkInfo: Required. SDK info of the Flex Template. """ - defaultEnvironment = _messages.MessageField( - 'FlexTemplateRuntimeEnvironment', 1) + defaultEnvironment = _messages.MessageField('FlexTemplateRuntimeEnvironment', 1) image = _messages.StringField(2) imageRepositoryCertPath = _messages.StringField(3) imageRepositoryPasswordSecretId = _messages.StringField(4) @@ -349,6 +408,7 @@ class CounterMetadata(_messages.Message): otherUnits: A string referring to the unit type. standardUnits: System defined Units, see above enum. """ + class KindValueValuesEnum(_messages.Enum): r"""Counter aggregation kind. @@ -436,6 +496,7 @@ class CounterStructuredName(_messages.Message): portion: Portion of this counter, either key or value. workerId: ID of a particular worker. """ + class OriginValueValuesEnum(_messages.Enum): r"""One of the standard Origins defined above. @@ -484,10 +545,11 @@ class CounterStructuredNameAndMetadata(_messages.Message): class CounterUpdate(_messages.Message): - r"""An update to a Counter sent from a worker. + r"""An update to a Counter sent from a worker. Next ID: 17 Fields: boolean: Boolean value for And, Or. + boundedTrie: Bounded trie data cumulative: True if this counter is reported as the total cumulative aggregate value accumulated since the worker started working on this WorkItem. By default this is false, indicating that this counter is @@ -511,21 +573,21 @@ class CounterUpdate(_messages.Message): """ boolean = _messages.BooleanField(1) - cumulative = _messages.BooleanField(2) - distribution = _messages.MessageField('DistributionUpdate', 3) - floatingPoint = _messages.FloatField(4) - floatingPointList = _messages.MessageField('FloatingPointList', 5) - floatingPointMean = _messages.MessageField('FloatingPointMean', 6) - integer = _messages.MessageField('SplitInt64', 7) - integerGauge = _messages.MessageField('IntegerGauge', 8) - integerList = _messages.MessageField('IntegerList', 9) - integerMean = _messages.MessageField('IntegerMean', 10) - internal = _messages.MessageField('extra_types.JsonValue', 11) - nameAndKind = _messages.MessageField('NameAndKind', 12) - shortId = _messages.IntegerField(13) - stringList = _messages.MessageField('StringList', 14) - structuredNameAndMetadata = _messages.MessageField( - 'CounterStructuredNameAndMetadata', 15) + boundedTrie = _messages.MessageField('BoundedTrie', 2) + cumulative = _messages.BooleanField(3) + distribution = _messages.MessageField('DistributionUpdate', 4) + floatingPoint = _messages.FloatField(5) + floatingPointList = _messages.MessageField('FloatingPointList', 6) + floatingPointMean = _messages.MessageField('FloatingPointMean', 7) + integer = _messages.MessageField('SplitInt64', 8) + integerGauge = _messages.MessageField('IntegerGauge', 9) + integerList = _messages.MessageField('IntegerList', 10) + integerMean = _messages.MessageField('IntegerMean', 11) + internal = _messages.MessageField('extra_types.JsonValue', 12) + nameAndKind = _messages.MessageField('NameAndKind', 13) + shortId = _messages.IntegerField(14) + stringList = _messages.MessageField('StringList', 15) + structuredNameAndMetadata = _messages.MessageField('CounterStructuredNameAndMetadata', 16) class CreateJobFromTemplateRequest(_messages.Message): @@ -545,6 +607,7 @@ class CreateJobFromTemplateRequest(_messages.Message): which to direct the request. parameters: The runtime parameters to pass to the job. """ + @encoding.MapUnrecognizedFields('additionalProperties') class ParametersValue(_messages.Message): r"""The runtime parameters to pass to the job. @@ -555,6 +618,7 @@ class ParametersValue(_messages.Message): Fields: additionalProperties: Additional properties of type ParametersValue """ + class AdditionalProperty(_messages.Message): r"""An additional property for a ParametersValue object. @@ -566,8 +630,7 @@ class AdditionalProperty(_messages.Message): key = _messages.StringField(1) value = _messages.StringField(2) - additionalProperties = _messages.MessageField( - 'AdditionalProperty', 1, repeated=True) + additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True) environment = _messages.MessageField('RuntimeEnvironment', 1) gcsPath = _messages.StringField(2) @@ -617,6 +680,7 @@ class DataSamplingConfig(_messages.Message): exception sampling. If DISABLED is in the list, then sampling will be disabled and ignore the other given behaviors. Ordering does not matter. """ + class BehaviorsValueListEntryValuesEnum(_messages.Enum): r"""BehaviorsValueListEntryValuesEnum enum type. @@ -634,8 +698,7 @@ class BehaviorsValueListEntryValuesEnum(_messages.Enum): ALWAYS_ON = 2 EXCEPTIONS = 3 - behaviors = _messages.EnumField( - 'BehaviorsValueListEntryValuesEnum', 1, repeated=True) + behaviors = _messages.EnumField('BehaviorsValueListEntryValuesEnum', 1, repeated=True) class DataSamplingReport(_messages.Message): @@ -667,6 +730,18 @@ class DataSamplingReport(_messages.Message): translationErrorsCount = _messages.IntegerField(7) +class DataflowGaugeValue(_messages.Message): + r"""The gauge value of a metric. + + Fields: + measuredTime: The timestamp when the gauge was recorded. + value: The value of the gauge. + """ + + measuredTime = _messages.StringField(1) + value = _messages.IntegerField(2) + + class DataflowHistogramValue(_messages.Message): r"""Summary statistics for a population of values. HistogramValue contains a sequence of buckets and gives a count of values that fall into each bucket. @@ -729,6 +804,7 @@ class DataflowProjectsJobsAggregatedRequest(_messages.Message): view: Deprecated. ListJobs always returns summaries now. Use GetJob for other JobViews. """ + class FilterValueValuesEnum(_messages.Enum): r"""The kind of filter to use. @@ -797,6 +873,7 @@ class DataflowProjectsJobsCreateRequest(_messages.Message): replaceJobId: Deprecated. This field is now in the Job message. view: The level of information requested in response. """ + class ViewValueValuesEnum(_messages.Enum): r"""The level of information requested in response. @@ -891,6 +968,7 @@ class DataflowProjectsJobsGetRequest(_messages.Message): projectId: The ID of the Cloud Platform project that the job belongs to. view: The level of information requested in response. """ + class ViewValueValuesEnum(_messages.Enum): r"""The level of information requested in response. @@ -944,6 +1022,7 @@ class DataflowProjectsJobsListRequest(_messages.Message): view: Deprecated. ListJobs always returns summaries now. Use GetJob for other JobViews. """ + class FilterValueValuesEnum(_messages.Enum): r"""The kind of filter to use. @@ -1023,6 +1102,7 @@ class DataflowProjectsJobsMessagesListRequest(_messages.Message): start_time. The default is the job creation time (i.e. beginning of messages). """ + class MinimumImportanceValueValuesEnum(_messages.Enum): r"""Filter to only get messages with importance >= level @@ -1136,8 +1216,7 @@ class DataflowProjectsJobsWorkItemsReportStatusRequest(_messages.Message): jobId = _messages.StringField(1, required=True) projectId = _messages.StringField(2, required=True) - reportWorkItemStatusRequest = _messages.MessageField( - 'ReportWorkItemStatusRequest', 3) + reportWorkItemStatusRequest = _messages.MessageField('ReportWorkItemStatusRequest', 3) class DataflowProjectsLocationsFlexTemplatesLaunchRequest(_messages.Message): @@ -1153,8 +1232,7 @@ class DataflowProjectsLocationsFlexTemplatesLaunchRequest(_messages.Message): belongs to. """ - launchFlexTemplateRequest = _messages.MessageField( - 'LaunchFlexTemplateRequest', 1) + launchFlexTemplateRequest = _messages.MessageField('LaunchFlexTemplateRequest', 1) location = _messages.StringField(2, required=True) projectId = _messages.StringField(3, required=True) @@ -1174,6 +1252,7 @@ class DataflowProjectsLocationsJobsCreateRequest(_messages.Message): replaceJobId: Deprecated. This field is now in the Job message. view: The level of information requested in response. """ + class ViewValueValuesEnum(_messages.Enum): r"""The level of information requested in response. @@ -1224,6 +1303,25 @@ class DataflowProjectsLocationsJobsDebugGetConfigRequest(_messages.Message): projectId = _messages.StringField(4, required=True) +class DataflowProjectsLocationsJobsDebugGetWorkerStacktracesRequest(_messages.Message): + r"""A DataflowProjectsLocationsJobsDebugGetWorkerStacktracesRequest object. + + Fields: + getWorkerStacktracesRequest: A GetWorkerStacktracesRequest resource to be + passed as the request body. + jobId: The job for which to get stacktraces. + location: The [regional endpoint] + (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints) + that contains the job specified by job_id. + projectId: The project id. + """ + + getWorkerStacktracesRequest = _messages.MessageField('GetWorkerStacktracesRequest', 1) + jobId = _messages.StringField(2, required=True) + location = _messages.StringField(3, required=True) + projectId = _messages.StringField(4, required=True) + + class DataflowProjectsLocationsJobsDebugSendCaptureRequest(_messages.Message): r"""A DataflowProjectsLocationsJobsDebugSendCaptureRequest object. @@ -1243,8 +1341,7 @@ class DataflowProjectsLocationsJobsDebugSendCaptureRequest(_messages.Message): sendDebugCaptureRequest = _messages.MessageField('SendDebugCaptureRequest', 4) -class DataflowProjectsLocationsJobsGetExecutionDetailsRequest(_messages.Message - ): +class DataflowProjectsLocationsJobsGetExecutionDetailsRequest(_messages.Message): r"""A DataflowProjectsLocationsJobsGetExecutionDetailsRequest object. Fields: @@ -1301,6 +1398,7 @@ class DataflowProjectsLocationsJobsGetRequest(_messages.Message): projectId: The ID of the Cloud Platform project that the job belongs to. view: The level of information requested in response. """ + class ViewValueValuesEnum(_messages.Enum): r"""The level of information requested in response. @@ -1354,6 +1452,7 @@ class DataflowProjectsLocationsJobsListRequest(_messages.Message): view: Deprecated. ListJobs always returns summaries now. Use GetJob for other JobViews. """ + class FilterValueValuesEnum(_messages.Enum): r"""The kind of filter to use. @@ -1433,6 +1532,7 @@ class DataflowProjectsLocationsJobsMessagesListRequest(_messages.Message): start_time. The default is the job creation time (i.e. beginning of messages). """ + class MinimumImportanceValueValuesEnum(_messages.Enum): r"""Filter to only get messages with importance >= level @@ -1510,8 +1610,7 @@ class DataflowProjectsLocationsJobsSnapshotsListRequest(_messages.Message): projectId = _messages.StringField(3, required=True) -class DataflowProjectsLocationsJobsStagesGetExecutionDetailsRequest( - _messages.Message): +class DataflowProjectsLocationsJobsStagesGetExecutionDetailsRequest(_messages.Message): r"""A DataflowProjectsLocationsJobsStagesGetExecutionDetailsRequest object. Fields: @@ -1585,8 +1684,7 @@ class DataflowProjectsLocationsJobsWorkItemsLeaseRequest(_messages.Message): projectId = _messages.StringField(4, required=True) -class DataflowProjectsLocationsJobsWorkItemsReportStatusRequest( - _messages.Message): +class DataflowProjectsLocationsJobsWorkItemsReportStatusRequest(_messages.Message): r"""A DataflowProjectsLocationsJobsWorkItemsReportStatusRequest object. Fields: @@ -1602,8 +1700,7 @@ class DataflowProjectsLocationsJobsWorkItemsReportStatusRequest( jobId = _messages.StringField(1, required=True) location = _messages.StringField(2, required=True) projectId = _messages.StringField(3, required=True) - reportWorkItemStatusRequest = _messages.MessageField( - 'ReportWorkItemStatusRequest', 4) + reportWorkItemStatusRequest = _messages.MessageField('ReportWorkItemStatusRequest', 4) class DataflowProjectsLocationsSnapshotsDeleteRequest(_messages.Message): @@ -1663,8 +1760,7 @@ class DataflowProjectsLocationsTemplatesCreateRequest(_messages.Message): belongs to. """ - createJobFromTemplateRequest = _messages.MessageField( - 'CreateJobFromTemplateRequest', 1) + createJobFromTemplateRequest = _messages.MessageField('CreateJobFromTemplateRequest', 1) location = _messages.StringField(2, required=True) projectId = _messages.StringField(3, required=True) @@ -1685,6 +1781,7 @@ class DataflowProjectsLocationsTemplatesGetRequest(_messages.Message): belongs to. view: The view to retrieve. Defaults to METADATA_ONLY. """ + class ViewValueValuesEnum(_messages.Enum): r"""The view to retrieve. Defaults to METADATA_ONLY. @@ -1725,8 +1822,7 @@ class DataflowProjectsLocationsTemplatesLaunchRequest(_messages.Message): dynamicTemplate_gcsPath = _messages.StringField(1) dynamicTemplate_stagingLocation = _messages.StringField(2) gcsPath = _messages.StringField(3) - launchTemplateParameters = _messages.MessageField( - 'LaunchTemplateParameters', 4) + launchTemplateParameters = _messages.MessageField('LaunchTemplateParameters', 4) location = _messages.StringField(5, required=True) projectId = _messages.StringField(6, required=True) validateOnly = _messages.BooleanField(7) @@ -1746,8 +1842,7 @@ class DataflowProjectsLocationsWorkerMessagesRequest(_messages.Message): location = _messages.StringField(1, required=True) projectId = _messages.StringField(2, required=True) - sendWorkerMessagesRequest = _messages.MessageField( - 'SendWorkerMessagesRequest', 3) + sendWorkerMessagesRequest = _messages.MessageField('SendWorkerMessagesRequest', 3) class DataflowProjectsSnapshotsGetRequest(_messages.Message): @@ -1789,8 +1884,7 @@ class DataflowProjectsTemplatesCreateRequest(_messages.Message): belongs to. """ - createJobFromTemplateRequest = _messages.MessageField( - 'CreateJobFromTemplateRequest', 1) + createJobFromTemplateRequest = _messages.MessageField('CreateJobFromTemplateRequest', 1) projectId = _messages.StringField(2, required=True) @@ -1810,6 +1904,7 @@ class DataflowProjectsTemplatesGetRequest(_messages.Message): belongs to. view: The view to retrieve. Defaults to METADATA_ONLY. """ + class ViewValueValuesEnum(_messages.Enum): r"""The view to retrieve. Defaults to METADATA_ONLY. @@ -1850,8 +1945,7 @@ class DataflowProjectsTemplatesLaunchRequest(_messages.Message): dynamicTemplate_gcsPath = _messages.StringField(1) dynamicTemplate_stagingLocation = _messages.StringField(2) gcsPath = _messages.StringField(3) - launchTemplateParameters = _messages.MessageField( - 'LaunchTemplateParameters', 4) + launchTemplateParameters = _messages.MessageField('LaunchTemplateParameters', 4) location = _messages.StringField(5) projectId = _messages.StringField(6, required=True) validateOnly = _messages.BooleanField(7) @@ -1867,8 +1961,7 @@ class DataflowProjectsWorkerMessagesRequest(_messages.Message): """ projectId = _messages.StringField(1, required=True) - sendWorkerMessagesRequest = _messages.MessageField( - 'SendWorkerMessagesRequest', 2) + sendWorkerMessagesRequest = _messages.MessageField('SendWorkerMessagesRequest', 2) class DatastoreIODetails(_messages.Message): @@ -1915,6 +2008,7 @@ class DerivedSource(_messages.Message): derivationMode: What source to base the produced source on (if any). source: Specification of the source. """ + class DerivationModeValueValuesEnum(_messages.Enum): r"""What source to base the produced source on (if any). @@ -2065,7 +2159,8 @@ class Environment(_messages.Message): by the user. These options are passed through the service and are used to recreate the SDK pipeline options on the worker in a language agnostic and platform independent way. - UserAgentValue: A description of the process that generated the request. + UserAgentValue: Optional. A description of the process that generated the + request. VersionValue: A structure describing which components and their versions of the service are required in order to run the job. @@ -2115,9 +2210,12 @@ class Environment(_messages.Message): The supported resource type is: Google Cloud Storage: storage.googleapis.com/{bucket}/{object} bucket.storage.googleapis.com/{object} + usePublicIps: Optional. True when any worker pool that uses public IPs is + present. useStreamingEngineResourceBasedBilling: Output only. Whether the job uses the Streaming Engine resource-based billing model. - userAgent: A description of the process that generated the request. + userAgent: Optional. A description of the process that generated the + request. version: A structure describing which components and their versions of the service are required in order to run the job. workerPools: The worker pools. At least one "harness" worker pool must be @@ -2134,6 +2232,7 @@ class Environment(_messages.Message): is specified, a zone in the control plane's region is chosen based on available capacity. """ + class FlexResourceSchedulingGoalValueValuesEnum(_messages.Enum): r"""Optional. Which Flexible Resource Scheduling mode to run in. @@ -2193,6 +2292,7 @@ class InternalExperimentsValue(_messages.Message): additionalProperties: Properties of the object. Contains field @type with type URL. """ + class AdditionalProperty(_messages.Message): r"""An additional property for a InternalExperimentsValue object. @@ -2204,8 +2304,7 @@ class AdditionalProperty(_messages.Message): key = _messages.StringField(1) value = _messages.MessageField('extra_types.JsonValue', 2) - additionalProperties = _messages.MessageField( - 'AdditionalProperty', 1, repeated=True) + additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True) @encoding.MapUnrecognizedFields('additionalProperties') class SdkPipelineOptionsValue(_messages.Message): @@ -2221,6 +2320,7 @@ class SdkPipelineOptionsValue(_messages.Message): Fields: additionalProperties: Properties of the object. """ + class AdditionalProperty(_messages.Message): r"""An additional property for a SdkPipelineOptionsValue object. @@ -2232,12 +2332,11 @@ class AdditionalProperty(_messages.Message): key = _messages.StringField(1) value = _messages.MessageField('extra_types.JsonValue', 2) - additionalProperties = _messages.MessageField( - 'AdditionalProperty', 1, repeated=True) + additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True) @encoding.MapUnrecognizedFields('additionalProperties') class UserAgentValue(_messages.Message): - r"""A description of the process that generated the request. + r"""Optional. A description of the process that generated the request. Messages: AdditionalProperty: An additional property for a UserAgentValue object. @@ -2245,6 +2344,7 @@ class UserAgentValue(_messages.Message): Fields: additionalProperties: Properties of the object. """ + class AdditionalProperty(_messages.Message): r"""An additional property for a UserAgentValue object. @@ -2256,8 +2356,7 @@ class AdditionalProperty(_messages.Message): key = _messages.StringField(1) value = _messages.MessageField('extra_types.JsonValue', 2) - additionalProperties = _messages.MessageField( - 'AdditionalProperty', 1, repeated=True) + additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True) @encoding.MapUnrecognizedFields('additionalProperties') class VersionValue(_messages.Message): @@ -2270,6 +2369,7 @@ class VersionValue(_messages.Message): Fields: additionalProperties: Properties of the object. """ + class AdditionalProperty(_messages.Message): r"""An additional property for a VersionValue object. @@ -2281,15 +2381,13 @@ class AdditionalProperty(_messages.Message): key = _messages.StringField(1) value = _messages.MessageField('extra_types.JsonValue', 2) - additionalProperties = _messages.MessageField( - 'AdditionalProperty', 1, repeated=True) + additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True) clusterManagerApiService = _messages.StringField(1) dataset = _messages.StringField(2) debugOptions = _messages.MessageField('DebugOptions', 3) experiments = _messages.StringField(4, repeated=True) - flexResourceSchedulingGoal = _messages.EnumField( - 'FlexResourceSchedulingGoalValueValuesEnum', 5) + flexResourceSchedulingGoal = _messages.EnumField('FlexResourceSchedulingGoalValueValuesEnum', 5) internalExperiments = _messages.MessageField('InternalExperimentsValue', 6) sdkPipelineOptions = _messages.MessageField('SdkPipelineOptionsValue', 7) serviceAccountEmail = _messages.StringField(8) @@ -2298,12 +2396,13 @@ class AdditionalProperty(_messages.Message): shuffleMode = _messages.EnumField('ShuffleModeValueValuesEnum', 11) streamingMode = _messages.EnumField('StreamingModeValueValuesEnum', 12) tempStoragePrefix = _messages.StringField(13) - useStreamingEngineResourceBasedBilling = _messages.BooleanField(14) - userAgent = _messages.MessageField('UserAgentValue', 15) - version = _messages.MessageField('VersionValue', 16) - workerPools = _messages.MessageField('WorkerPool', 17, repeated=True) - workerRegion = _messages.StringField(18) - workerZone = _messages.StringField(19) + usePublicIps = _messages.BooleanField(14) + useStreamingEngineResourceBasedBilling = _messages.BooleanField(15) + userAgent = _messages.MessageField('UserAgentValue', 16) + version = _messages.MessageField('VersionValue', 17) + workerPools = _messages.MessageField('WorkerPool', 18, repeated=True) + workerRegion = _messages.StringField(19) + workerZone = _messages.StringField(20) class ExecutionStageState(_messages.Message): @@ -2319,6 +2418,7 @@ class ExecutionStageState(_messages.Message): executionStageState: Executions stage states allow the same set of values as JobState. """ + class ExecutionStageStateValueValuesEnum(_messages.Enum): r"""Executions stage states allow the same set of values as JobState. @@ -2393,8 +2493,7 @@ class ExecutionStageStateValueValuesEnum(_messages.Enum): currentStateTime = _messages.StringField(1) executionStageName = _messages.StringField(2) - executionStageState = _messages.EnumField( - 'ExecutionStageStateValueValuesEnum', 3) + executionStageState = _messages.EnumField('ExecutionStageStateValueValuesEnum', 3) class ExecutionStageSummary(_messages.Message): @@ -2417,6 +2516,7 @@ class ExecutionStageSummary(_messages.Message): prerequisiteStage: Other stages that must complete before this stage can run. """ + class KindValueValuesEnum(_messages.Enum): r"""Type of transform this stage is executing. @@ -2443,8 +2543,7 @@ class KindValueValuesEnum(_messages.Enum): SHUFFLE_KIND = 8 componentSource = _messages.MessageField('ComponentSource', 1, repeated=True) - componentTransform = _messages.MessageField( - 'ComponentTransform', 2, repeated=True) + componentTransform = _messages.MessageField('ComponentTransform', 2, repeated=True) id = _messages.StringField(3) inputSource = _messages.MessageField('StageSource', 4, repeated=True) kind = _messages.EnumField('KindValueValuesEnum', 5) @@ -2490,7 +2589,6 @@ class FlattenInstruction(_messages.Message): class FlexTemplateRuntimeEnvironment(_messages.Message): r"""The environment values to be set at runtime for flex template. - LINT.IfChange Enums: AutoscalingAlgorithmValueValuesEnum: The algorithm to use for autoscaling @@ -2514,6 +2612,8 @@ class FlexTemplateRuntimeEnvironment(_messages.Message): Fields: additionalExperiments: Additional experiment flags for the job. + additionalPipelineOptions: Optional. Additional pipeline option flags for + the job. additionalUserLabels: Additional user labels to be specified for the job. Keys and values must follow the restrictions specified in the [labeling restrictions](https://cloud.google.com/compute/docs/labeling- @@ -2588,6 +2688,7 @@ class FlexTemplateRuntimeEnvironment(_messages.Message): for launching worker instances to run your pipeline. In the future, worker_zone will take precedence. """ + class AutoscalingAlgorithmValueValuesEnum(_messages.Enum): r"""The algorithm to use for autoscaling @@ -2665,6 +2766,7 @@ class AdditionalUserLabelsValue(_messages.Message): additionalProperties: Additional properties of type AdditionalUserLabelsValue """ + class AdditionalProperty(_messages.Message): r"""An additional property for a AdditionalUserLabelsValue object. @@ -2676,35 +2778,34 @@ class AdditionalProperty(_messages.Message): key = _messages.StringField(1) value = _messages.StringField(2) - additionalProperties = _messages.MessageField( - 'AdditionalProperty', 1, repeated=True) + additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True) additionalExperiments = _messages.StringField(1, repeated=True) - additionalUserLabels = _messages.MessageField('AdditionalUserLabelsValue', 2) - autoscalingAlgorithm = _messages.EnumField( - 'AutoscalingAlgorithmValueValuesEnum', 3) - diskSizeGb = _messages.IntegerField(4, variant=_messages.Variant.INT32) - dumpHeapOnOom = _messages.BooleanField(5) - enableLauncherVmSerialPortLogging = _messages.BooleanField(6) - enableStreamingEngine = _messages.BooleanField(7) - flexrsGoal = _messages.EnumField('FlexrsGoalValueValuesEnum', 8) - ipConfiguration = _messages.EnumField('IpConfigurationValueValuesEnum', 9) - kmsKeyName = _messages.StringField(10) - launcherMachineType = _messages.StringField(11) - machineType = _messages.StringField(12) - maxWorkers = _messages.IntegerField(13, variant=_messages.Variant.INT32) - network = _messages.StringField(14) - numWorkers = _messages.IntegerField(15, variant=_messages.Variant.INT32) - saveHeapDumpsToGcsPath = _messages.StringField(16) - sdkContainerImage = _messages.StringField(17) - serviceAccountEmail = _messages.StringField(18) - stagingLocation = _messages.StringField(19) - streamingMode = _messages.EnumField('StreamingModeValueValuesEnum', 20) - subnetwork = _messages.StringField(21) - tempLocation = _messages.StringField(22) - workerRegion = _messages.StringField(23) - workerZone = _messages.StringField(24) - zone = _messages.StringField(25) + additionalPipelineOptions = _messages.StringField(2, repeated=True) + additionalUserLabels = _messages.MessageField('AdditionalUserLabelsValue', 3) + autoscalingAlgorithm = _messages.EnumField('AutoscalingAlgorithmValueValuesEnum', 4) + diskSizeGb = _messages.IntegerField(5, variant=_messages.Variant.INT32) + dumpHeapOnOom = _messages.BooleanField(6) + enableLauncherVmSerialPortLogging = _messages.BooleanField(7) + enableStreamingEngine = _messages.BooleanField(8) + flexrsGoal = _messages.EnumField('FlexrsGoalValueValuesEnum', 9) + ipConfiguration = _messages.EnumField('IpConfigurationValueValuesEnum', 10) + kmsKeyName = _messages.StringField(11) + launcherMachineType = _messages.StringField(12) + machineType = _messages.StringField(13) + maxWorkers = _messages.IntegerField(14, variant=_messages.Variant.INT32) + network = _messages.StringField(15) + numWorkers = _messages.IntegerField(16, variant=_messages.Variant.INT32) + saveHeapDumpsToGcsPath = _messages.StringField(17) + sdkContainerImage = _messages.StringField(18) + serviceAccountEmail = _messages.StringField(19) + stagingLocation = _messages.StringField(20) + streamingMode = _messages.EnumField('StreamingModeValueValuesEnum', 21) + subnetwork = _messages.StringField(22) + tempLocation = _messages.StringField(23) + workerRegion = _messages.StringField(24) + workerZone = _messages.StringField(25) + zone = _messages.StringField(26) class FloatingPointList(_messages.Message): @@ -2729,6 +2830,29 @@ class FloatingPointMean(_messages.Message): sum = _messages.FloatField(2) +class GPUUsage(_messages.Message): + r"""Information about the GPU usage on the worker. + + Fields: + timestamp: Required. Timestamp of the measurement. + utilization: Required. Utilization info about the GPU. + """ + + timestamp = _messages.StringField(1) + utilization = _messages.MessageField('GPUUtilization', 2) + + +class GPUUtilization(_messages.Message): + r"""Utilization details about the GPU. + + Fields: + rate: Required. GPU utilization rate of any kernel over the last sample + period in the range of [0, 1]. + """ + + rate = _messages.FloatField(1) + + class GetDebugConfigRequest(_messages.Message): r"""Request to get updated debug configuration for component. @@ -2771,6 +2895,7 @@ class GetTemplateResponse(_messages.Message): request will be indicated in the error_details. templateType: Template Type. """ + class TemplateTypeValueValuesEnum(_messages.Enum): r"""Template Type. @@ -2789,6 +2914,30 @@ class TemplateTypeValueValuesEnum(_messages.Enum): templateType = _messages.EnumField('TemplateTypeValueValuesEnum', 4) +class GetWorkerStacktracesRequest(_messages.Message): + r"""Request to get worker stacktraces from debug capture. + + Fields: + endTime: The end time for the stacktrace query. The returned stacktraces + will be a recent stack trace at or shortly before this time. + workerId: The worker for which to get stacktraces. The returned + stacktraces will be for the SDK harness running on this worker. + """ + + endTime = _messages.StringField(1) + workerId = _messages.StringField(2) + + +class GetWorkerStacktracesResponse(_messages.Message): + r"""Response to get worker stacktraces from debug capture. + + Fields: + sdks: Repeated as unified worker may have multiple SDK processes. + """ + + sdks = _messages.MessageField('Sdk', 1, repeated=True) + + class Histogram(_messages.Message): r"""Histogram of value counts for a distribution. Buckets have an inclusive lower bound and exclusive upper bound and use "1,2,5 bucketing": The first @@ -2821,6 +2970,7 @@ class HotKeyDebuggingInfo(_messages.Message): detectedHotKeys: Debugging information for each detected hot key. Keyed by a hash of the key. """ + @encoding.MapUnrecognizedFields('additionalProperties') class DetectedHotKeysValue(_messages.Message): r"""Debugging information for each detected hot key. Keyed by a hash of @@ -2833,6 +2983,7 @@ class DetectedHotKeysValue(_messages.Message): Fields: additionalProperties: Additional properties of type DetectedHotKeysValue """ + class AdditionalProperty(_messages.Message): r"""An additional property for a DetectedHotKeysValue object. @@ -2844,8 +2995,7 @@ class AdditionalProperty(_messages.Message): key = _messages.StringField(1) value = _messages.MessageField('HotKeyInfo', 2) - additionalProperties = _messages.MessageField( - 'AdditionalProperty', 1, repeated=True) + additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True) detectedHotKeys = _messages.MessageField('DetectedHotKeysValue', 1) @@ -2898,8 +3048,7 @@ class InstructionInput(_messages.Message): """ outputNum = _messages.IntegerField(1, variant=_messages.Variant.INT32) - producerInstructionIndex = _messages.IntegerField( - 2, variant=_messages.Variant.INT32) + producerInstructionIndex = _messages.IntegerField(2, variant=_messages.Variant.INT32) class InstructionOutput(_messages.Message): @@ -2921,6 +3070,7 @@ class InstructionOutput(_messages.Message): systemName: System-defined name of this output. Unique across the workflow. """ + @encoding.MapUnrecognizedFields('additionalProperties') class CodecValue(_messages.Message): r"""The codec to use to encode data being written via this output. @@ -2931,6 +3081,7 @@ class CodecValue(_messages.Message): Fields: additionalProperties: Properties of the object. """ + class AdditionalProperty(_messages.Message): r"""An additional property for a CodecValue object. @@ -2942,8 +3093,7 @@ class AdditionalProperty(_messages.Message): key = _messages.StringField(1) value = _messages.MessageField('extra_types.JsonValue', 2) - additionalProperties = _messages.MessageField( - 'AdditionalProperty', 1, repeated=True) + additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True) codec = _messages.MessageField('CodecValue', 1) name = _messages.StringField(2) @@ -3112,6 +3262,7 @@ class Job(_messages.Message): job to be replaced to the corresponding name prefixes of the new job. type: Optional. The type of Dataflow job. """ + class CurrentStateValueValuesEnum(_messages.Enum): r"""The current state of the job. Jobs are created in the `JOB_STATE_STOPPED` state unless otherwise specified. A job in the @@ -3296,6 +3447,7 @@ class LabelsValue(_messages.Message): Fields: additionalProperties: Additional properties of type LabelsValue """ + class AdditionalProperty(_messages.Message): r"""An additional property for a LabelsValue object. @@ -3307,8 +3459,7 @@ class AdditionalProperty(_messages.Message): key = _messages.StringField(1) value = _messages.StringField(2) - additionalProperties = _messages.MessageField( - 'AdditionalProperty', 1, repeated=True) + additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True) @encoding.MapUnrecognizedFields('additionalProperties') class TransformNameMappingValue(_messages.Message): @@ -3323,6 +3474,7 @@ class TransformNameMappingValue(_messages.Message): additionalProperties: Additional properties of type TransformNameMappingValue """ + class AdditionalProperty(_messages.Message): r"""An additional property for a TransformNameMappingValue object. @@ -3334,8 +3486,7 @@ class AdditionalProperty(_messages.Message): key = _messages.StringField(1) value = _messages.StringField(2) - additionalProperties = _messages.MessageField( - 'AdditionalProperty', 1, repeated=True) + additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True) clientRequestId = _messages.StringField(1) createTime = _messages.StringField(2) @@ -3392,6 +3543,7 @@ class JobExecutionInfo(_messages.Message): Fields: stages: A mapping from each stage to the information about that stage. """ + @encoding.MapUnrecognizedFields('additionalProperties') class StagesValue(_messages.Message): r"""A mapping from each stage to the information about that stage. @@ -3402,6 +3554,7 @@ class StagesValue(_messages.Message): Fields: additionalProperties: Additional properties of type StagesValue """ + class AdditionalProperty(_messages.Message): r"""An additional property for a StagesValue object. @@ -3413,8 +3566,7 @@ class AdditionalProperty(_messages.Message): key = _messages.StringField(1) value = _messages.MessageField('JobExecutionStageInfo', 2) - additionalProperties = _messages.MessageField( - 'AdditionalProperty', 1, repeated=True) + additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True) stages = _messages.MessageField('StagesValue', 1) @@ -3444,6 +3596,7 @@ class JobMessage(_messages.Message): messageText: The text of the message. time: The timestamp of the message. """ + class MessageImportanceValueValuesEnum(_messages.Enum): r"""Importance level of the message. @@ -3509,6 +3662,7 @@ class JobMetadata(_messages.Message): job. userDisplayProperties: List of display properties to help UI filter jobs. """ + @encoding.MapUnrecognizedFields('additionalProperties') class UserDisplayPropertiesValue(_messages.Message): r"""List of display properties to help UI filter jobs. @@ -3521,6 +3675,7 @@ class UserDisplayPropertiesValue(_messages.Message): additionalProperties: Additional properties of type UserDisplayPropertiesValue """ + class AdditionalProperty(_messages.Message): r"""An additional property for a UserDisplayPropertiesValue object. @@ -3532,21 +3687,16 @@ class AdditionalProperty(_messages.Message): key = _messages.StringField(1) value = _messages.StringField(2) - additionalProperties = _messages.MessageField( - 'AdditionalProperty', 1, repeated=True) + additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True) - bigTableDetails = _messages.MessageField( - 'BigTableIODetails', 1, repeated=True) - bigqueryDetails = _messages.MessageField( - 'BigQueryIODetails', 2, repeated=True) - datastoreDetails = _messages.MessageField( - 'DatastoreIODetails', 3, repeated=True) + bigTableDetails = _messages.MessageField('BigTableIODetails', 1, repeated=True) + bigqueryDetails = _messages.MessageField('BigQueryIODetails', 2, repeated=True) + datastoreDetails = _messages.MessageField('DatastoreIODetails', 3, repeated=True) fileDetails = _messages.MessageField('FileIODetails', 4, repeated=True) pubsubDetails = _messages.MessageField('PubSubIODetails', 5, repeated=True) sdkVersion = _messages.MessageField('SdkVersion', 6) spannerDetails = _messages.MessageField('SpannerIODetails', 7, repeated=True) - userDisplayProperties = _messages.MessageField( - 'UserDisplayPropertiesValue', 8) + userDisplayProperties = _messages.MessageField('UserDisplayPropertiesValue', 8) class JobMetrics(_messages.Message): @@ -3640,6 +3790,7 @@ class LaunchFlexTemplateParameter(_messages.Message): streaming job. When set, the job name should be the same as the running job. """ + @encoding.MapUnrecognizedFields('additionalProperties') class LaunchOptionsValue(_messages.Message): r"""Launch options for this flex template job. This is a common set of @@ -3653,6 +3804,7 @@ class LaunchOptionsValue(_messages.Message): Fields: additionalProperties: Additional properties of type LaunchOptionsValue """ + class AdditionalProperty(_messages.Message): r"""An additional property for a LaunchOptionsValue object. @@ -3664,8 +3816,7 @@ class AdditionalProperty(_messages.Message): key = _messages.StringField(1) value = _messages.StringField(2) - additionalProperties = _messages.MessageField( - 'AdditionalProperty', 1, repeated=True) + additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True) @encoding.MapUnrecognizedFields('additionalProperties') class ParametersValue(_messages.Message): @@ -3677,6 +3828,7 @@ class ParametersValue(_messages.Message): Fields: additionalProperties: Additional properties of type ParametersValue """ + class AdditionalProperty(_messages.Message): r"""An additional property for a ParametersValue object. @@ -3688,8 +3840,7 @@ class AdditionalProperty(_messages.Message): key = _messages.StringField(1) value = _messages.StringField(2) - additionalProperties = _messages.MessageField( - 'AdditionalProperty', 1, repeated=True) + additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True) @encoding.MapUnrecognizedFields('additionalProperties') class TransformNameMappingsValue(_messages.Message): @@ -3704,6 +3855,7 @@ class TransformNameMappingsValue(_messages.Message): additionalProperties: Additional properties of type TransformNameMappingsValue """ + class AdditionalProperty(_messages.Message): r"""An additional property for a TransformNameMappingsValue object. @@ -3715,8 +3867,7 @@ class AdditionalProperty(_messages.Message): key = _messages.StringField(1) value = _messages.StringField(2) - additionalProperties = _messages.MessageField( - 'AdditionalProperty', 1, repeated=True) + additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True) containerSpec = _messages.MessageField('ContainerSpec', 1) containerSpecGcsPath = _messages.StringField(2) @@ -3724,8 +3875,7 @@ class AdditionalProperty(_messages.Message): jobName = _messages.StringField(4) launchOptions = _messages.MessageField('LaunchOptionsValue', 5) parameters = _messages.MessageField('ParametersValue', 6) - transformNameMappings = _messages.MessageField( - 'TransformNameMappingsValue', 7) + transformNameMappings = _messages.MessageField('TransformNameMappingsValue', 7) update = _messages.BooleanField(8) @@ -3776,6 +3926,7 @@ class LaunchTemplateParameters(_messages.Message): update: If set, replace the existing pipeline with the name specified by jobName with this pipeline, preserving state. """ + @encoding.MapUnrecognizedFields('additionalProperties') class ParametersValue(_messages.Message): r"""The runtime parameters to pass to the job. @@ -3786,6 +3937,7 @@ class ParametersValue(_messages.Message): Fields: additionalProperties: Additional properties of type ParametersValue """ + class AdditionalProperty(_messages.Message): r"""An additional property for a ParametersValue object. @@ -3797,8 +3949,7 @@ class AdditionalProperty(_messages.Message): key = _messages.StringField(1) value = _messages.StringField(2) - additionalProperties = _messages.MessageField( - 'AdditionalProperty', 1, repeated=True) + additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True) @encoding.MapUnrecognizedFields('additionalProperties') class TransformNameMappingValue(_messages.Message): @@ -3814,6 +3965,7 @@ class TransformNameMappingValue(_messages.Message): additionalProperties: Additional properties of type TransformNameMappingValue """ + class AdditionalProperty(_messages.Message): r"""An additional property for a TransformNameMappingValue object. @@ -3825,8 +3977,7 @@ class AdditionalProperty(_messages.Message): key = _messages.StringField(1) value = _messages.StringField(2) - additionalProperties = _messages.MessageField( - 'AdditionalProperty', 1, repeated=True) + additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True) environment = _messages.MessageField('RuntimeEnvironment', 1) jobName = _messages.StringField(2) @@ -3858,6 +4009,8 @@ class LeaseWorkItemRequest(_messages.Message): location: The [regional endpoint] (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints) that contains the WorkItem's job. + projectNumber: Optional. The project number of the project this worker + belongs to. requestedLeaseDuration: The initial lease period. unifiedWorkerRequest: Untranslated bag-of-bytes WorkRequest from UnifiedWorker. @@ -3867,6 +4020,7 @@ class LeaseWorkItemRequest(_messages.Message): workerId: Identifies the worker leasing work -- typically the ID of the virtual machine running the worker. """ + @encoding.MapUnrecognizedFields('additionalProperties') class UnifiedWorkerRequestValue(_messages.Message): r"""Untranslated bag-of-bytes WorkRequest from UnifiedWorker. @@ -3879,6 +4033,7 @@ class UnifiedWorkerRequestValue(_messages.Message): additionalProperties: Properties of the object. Contains field @type with type URL. """ + class AdditionalProperty(_messages.Message): r"""An additional property for a UnifiedWorkerRequestValue object. @@ -3890,16 +4045,16 @@ class AdditionalProperty(_messages.Message): key = _messages.StringField(1) value = _messages.MessageField('extra_types.JsonValue', 2) - additionalProperties = _messages.MessageField( - 'AdditionalProperty', 1, repeated=True) + additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True) currentWorkerTime = _messages.StringField(1) location = _messages.StringField(2) - requestedLeaseDuration = _messages.StringField(3) - unifiedWorkerRequest = _messages.MessageField('UnifiedWorkerRequestValue', 4) - workItemTypes = _messages.StringField(5, repeated=True) - workerCapabilities = _messages.StringField(6, repeated=True) - workerId = _messages.StringField(7) + projectNumber = _messages.IntegerField(3) + requestedLeaseDuration = _messages.StringField(4) + unifiedWorkerRequest = _messages.MessageField('UnifiedWorkerRequestValue', 5) + workItemTypes = _messages.StringField(6, repeated=True) + workerCapabilities = _messages.StringField(7, repeated=True) + workerId = _messages.StringField(8) class LeaseWorkItemResponse(_messages.Message): @@ -3914,6 +4069,7 @@ class LeaseWorkItemResponse(_messages.Message): UnifiedWorker. workItems: A list of the leased WorkItems. """ + @encoding.MapUnrecognizedFields('additionalProperties') class UnifiedWorkerResponseValue(_messages.Message): r"""Untranslated bag-of-bytes WorkResponse for UnifiedWorker. @@ -3926,6 +4082,7 @@ class UnifiedWorkerResponseValue(_messages.Message): additionalProperties: Properties of the object. Contains field @type with type URL. """ + class AdditionalProperty(_messages.Message): r"""An additional property for a UnifiedWorkerResponseValue object. @@ -3937,11 +4094,9 @@ class AdditionalProperty(_messages.Message): key = _messages.StringField(1) value = _messages.MessageField('extra_types.JsonValue', 2) - additionalProperties = _messages.MessageField( - 'AdditionalProperty', 1, repeated=True) + additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True) - unifiedWorkerResponse = _messages.MessageField( - 'UnifiedWorkerResponseValue', 1) + unifiedWorkerResponse = _messages.MessageField('UnifiedWorkerResponseValue', 1) workItems = _messages.MessageField('WorkItem', 2, repeated=True) @@ -3970,8 +4125,7 @@ class ListJobMessagesResponse(_messages.Message): more. """ - autoscalingEvents = _messages.MessageField( - 'AutoscalingEvent', 1, repeated=True) + autoscalingEvents = _messages.MessageField('AutoscalingEvent', 1, repeated=True) jobMessages = _messages.MessageField('JobMessage', 2, repeated=True) nextPageToken = _messages.StringField(3) @@ -4041,8 +4195,7 @@ class MemInfo(_messages.Message): totalGbMs: Total memory (RSS) usage since start up in GB * ms. """ - currentLimitBytes = _messages.IntegerField( - 1, variant=_messages.Variant.UINT64) + currentLimitBytes = _messages.IntegerField(1, variant=_messages.Variant.UINT64) currentOoms = _messages.IntegerField(2) currentRssBytes = _messages.IntegerField(3, variant=_messages.Variant.UINT64) timestamp = _messages.StringField(4) @@ -4085,6 +4238,7 @@ class MetricStructuredName(_messages.Message): metrics; will be "dataflow" for metrics defined by the Dataflow service or SDK. """ + @encoding.MapUnrecognizedFields('additionalProperties') class ContextValue(_messages.Message): r"""Zero or more labeled fields which identify the part of the job this @@ -4099,6 +4253,7 @@ class ContextValue(_messages.Message): Fields: additionalProperties: Additional properties of type ContextValue """ + class AdditionalProperty(_messages.Message): r"""An additional property for a ContextValue object. @@ -4110,8 +4265,7 @@ class AdditionalProperty(_messages.Message): key = _messages.StringField(1) value = _messages.StringField(2) - additionalProperties = _messages.MessageField( - 'AdditionalProperty', 1, repeated=True) + additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True) context = _messages.MessageField('ContextValue', 1) name = _messages.StringField(2) @@ -4122,6 +4276,10 @@ class MetricUpdate(_messages.Message): r"""Describes the state of a metric. Fields: + boundedTrie: Worker-computed aggregate value for the "Trie" aggregation + kind. The only possible value type is a BoundedTrieNode. Introduced this + field to avoid breaking older SDKs when Dataflow service starts to + populate the `bounded_trie` field. cumulative: True if this metric is reported as the total cumulative aggregate value accumulated since the worker started working on this WorkItem. By default this is false, indicating that this metric is @@ -4151,24 +4309,28 @@ class MetricUpdate(_messages.Message): Double, and Boolean. set: Worker-computed aggregate value for the "Set" aggregation kind. The only possible value type is a list of Values whose type can be Long, - Double, or String, according to the metric's type. All Values in the - list must be of the same type. + Double, String, or BoundedTrie according to the metric's type. All + Values in the list must be of the same type. + trie: Worker-computed aggregate value for the "Trie" aggregation kind. The + only possible value type is a BoundedTrieNode. updateTime: Timestamp associated with the metric value. Optional when workers are reporting work progress; it will be filled in responses from the metrics API. """ - cumulative = _messages.BooleanField(1) - distribution = _messages.MessageField('extra_types.JsonValue', 2) - gauge = _messages.MessageField('extra_types.JsonValue', 3) - internal = _messages.MessageField('extra_types.JsonValue', 4) - kind = _messages.StringField(5) - meanCount = _messages.MessageField('extra_types.JsonValue', 6) - meanSum = _messages.MessageField('extra_types.JsonValue', 7) - name = _messages.MessageField('MetricStructuredName', 8) - scalar = _messages.MessageField('extra_types.JsonValue', 9) - set = _messages.MessageField('extra_types.JsonValue', 10) - updateTime = _messages.StringField(11) + boundedTrie = _messages.MessageField('extra_types.JsonValue', 1) + cumulative = _messages.BooleanField(2) + distribution = _messages.MessageField('extra_types.JsonValue', 3) + gauge = _messages.MessageField('extra_types.JsonValue', 4) + internal = _messages.MessageField('extra_types.JsonValue', 5) + kind = _messages.StringField(6) + meanCount = _messages.MessageField('extra_types.JsonValue', 7) + meanSum = _messages.MessageField('extra_types.JsonValue', 8) + name = _messages.MessageField('MetricStructuredName', 9) + scalar = _messages.MessageField('extra_types.JsonValue', 10) + set = _messages.MessageField('extra_types.JsonValue', 11) + trie = _messages.MessageField('extra_types.JsonValue', 12) + updateTime = _messages.StringField(13) class MetricValue(_messages.Message): @@ -4180,9 +4342,11 @@ class MetricValue(_messages.Message): Fields: metric: Base name for this metric. metricLabels: Optional. Set of metric labels for this metric. + valueGauge64: Non-cumulative int64 value of this metric. valueHistogram: Histogram value of this metric. valueInt64: Integer value of this metric. """ + @encoding.MapUnrecognizedFields('additionalProperties') class MetricLabelsValue(_messages.Message): r"""Optional. Set of metric labels for this metric. @@ -4194,6 +4358,7 @@ class MetricLabelsValue(_messages.Message): Fields: additionalProperties: Additional properties of type MetricLabelsValue """ + class AdditionalProperty(_messages.Message): r"""An additional property for a MetricLabelsValue object. @@ -4205,13 +4370,13 @@ class AdditionalProperty(_messages.Message): key = _messages.StringField(1) value = _messages.StringField(2) - additionalProperties = _messages.MessageField( - 'AdditionalProperty', 1, repeated=True) + additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True) metric = _messages.StringField(1) metricLabels = _messages.MessageField('MetricLabelsValue', 2) - valueHistogram = _messages.MessageField('DataflowHistogramValue', 3) - valueInt64 = _messages.IntegerField(4) + valueGauge64 = _messages.MessageField('DataflowGaugeValue', 3) + valueHistogram = _messages.MessageField('DataflowHistogramValue', 4) + valueInt64 = _messages.IntegerField(5) class MountedDataDisk(_messages.Message): @@ -4247,6 +4412,7 @@ class NameAndKind(_messages.Message): kind: Counter aggregation kind. name: Name of the counter. """ + class KindValueValuesEnum(_messages.Enum): r"""Counter aggregation kind. @@ -4332,6 +4498,7 @@ class ParDoInstruction(_messages.Message): sideInputs: Zero or more side inputs. userFn: The user function to invoke. """ + @encoding.MapUnrecognizedFields('additionalProperties') class UserFnValue(_messages.Message): r"""The user function to invoke. @@ -4342,6 +4509,7 @@ class UserFnValue(_messages.Message): Fields: additionalProperties: Properties of the object. """ + class AdditionalProperty(_messages.Message): r"""An additional property for a UserFnValue object. @@ -4353,8 +4521,7 @@ class AdditionalProperty(_messages.Message): key = _messages.StringField(1) value = _messages.MessageField('extra_types.JsonValue', 2) - additionalProperties = _messages.MessageField( - 'AdditionalProperty', 1, repeated=True) + additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True) input = _messages.MessageField('InstructionInput', 1) multiOutputInfos = _messages.MessageField('MultiOutputInfo', 2, repeated=True) @@ -4446,6 +4613,7 @@ class ParameterMetadata(_messages.Message): been provided). regexes: Optional. Regexes that the parameter must match. """ + class ParamTypeValueValuesEnum(_messages.Enum): r"""Optional. The type of the parameter. Used for selecting input picker. @@ -4521,6 +4689,7 @@ class CustomMetadataValue(_messages.Message): Fields: additionalProperties: Additional properties of type CustomMetadataValue """ + class AdditionalProperty(_messages.Message): r"""An additional property for a CustomMetadataValue object. @@ -4532,13 +4701,11 @@ class AdditionalProperty(_messages.Message): key = _messages.StringField(1) value = _messages.StringField(2) - additionalProperties = _messages.MessageField( - 'AdditionalProperty', 1, repeated=True) + additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True) customMetadata = _messages.MessageField('CustomMetadataValue', 1) defaultValue = _messages.StringField(2) - enumOptions = _messages.MessageField( - 'ParameterMetadataEnumOption', 3, repeated=True) + enumOptions = _messages.MessageField('ParameterMetadataEnumOption', 3, repeated=True) groupName = _messages.StringField(4) helpText = _messages.StringField(5) hiddenUi = _messages.BooleanField(6) @@ -4587,6 +4754,7 @@ class PartialGroupByKeyInstruction(_messages.Message): sideInputs: Zero or more side inputs. valueCombiningFn: The value combining function to invoke. """ + @encoding.MapUnrecognizedFields('additionalProperties') class InputElementCodecValue(_messages.Message): r"""The codec to use for interpreting an element in the input PTable. @@ -4598,6 +4766,7 @@ class InputElementCodecValue(_messages.Message): Fields: additionalProperties: Properties of the object. """ + class AdditionalProperty(_messages.Message): r"""An additional property for a InputElementCodecValue object. @@ -4609,8 +4778,7 @@ class AdditionalProperty(_messages.Message): key = _messages.StringField(1) value = _messages.MessageField('extra_types.JsonValue', 2) - additionalProperties = _messages.MessageField( - 'AdditionalProperty', 1, repeated=True) + additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True) @encoding.MapUnrecognizedFields('additionalProperties') class ValueCombiningFnValue(_messages.Message): @@ -4623,6 +4791,7 @@ class ValueCombiningFnValue(_messages.Message): Fields: additionalProperties: Properties of the object. """ + class AdditionalProperty(_messages.Message): r"""An additional property for a ValueCombiningFnValue object. @@ -4634,8 +4803,7 @@ class AdditionalProperty(_messages.Message): key = _messages.StringField(1) value = _messages.MessageField('extra_types.JsonValue', 2) - additionalProperties = _messages.MessageField( - 'AdditionalProperty', 1, repeated=True) + additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True) input = _messages.MessageField('InstructionInput', 1) inputElementCodec = _messages.MessageField('InputElementCodecValue', 2) @@ -4671,8 +4839,7 @@ class PerWorkerMetrics(_messages.Message): and namespace. """ - perStepNamespaceMetrics = _messages.MessageField( - 'PerStepNamespaceMetrics', 1, repeated=True) + perStepNamespaceMetrics = _messages.MessageField('PerStepNamespaceMetrics', 1, repeated=True) class PipelineDescription(_messages.Message): @@ -4691,10 +4858,8 @@ class PipelineDescription(_messages.Message): """ displayData = _messages.MessageField('DisplayData', 1, repeated=True) - executionPipelineStage = _messages.MessageField( - 'ExecutionStageSummary', 2, repeated=True) - originalPipelineTransform = _messages.MessageField( - 'TransformSummary', 3, repeated=True) + executionPipelineStage = _messages.MessageField('ExecutionStageSummary', 2, repeated=True) + originalPipelineTransform = _messages.MessageField('TransformSummary', 3, repeated=True) stepNamesHash = _messages.StringField(4) @@ -4828,6 +4993,8 @@ class ReportWorkItemStatusRequest(_messages.Message): location: The [regional endpoint] (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints) that contains the WorkItem's job. + projectNumber: Optional. The project number of the project which owns the + WorkItem's job. unifiedWorkerRequest: Untranslated bag-of-bytes WorkProgressUpdateRequest from UnifiedWorker. workItemStatuses: The order is unimportant, except that the order of the @@ -4838,6 +5005,7 @@ class ReportWorkItemStatusRequest(_messages.Message): currently has the lease on the WorkItem, the report will be dropped (with an error response). """ + @encoding.MapUnrecognizedFields('additionalProperties') class UnifiedWorkerRequestValue(_messages.Message): r"""Untranslated bag-of-bytes WorkProgressUpdateRequest from @@ -4851,6 +5019,7 @@ class UnifiedWorkerRequestValue(_messages.Message): additionalProperties: Properties of the object. Contains field @type with type URL. """ + class AdditionalProperty(_messages.Message): r"""An additional property for a UnifiedWorkerRequestValue object. @@ -4862,14 +5031,14 @@ class AdditionalProperty(_messages.Message): key = _messages.StringField(1) value = _messages.MessageField('extra_types.JsonValue', 2) - additionalProperties = _messages.MessageField( - 'AdditionalProperty', 1, repeated=True) + additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True) currentWorkerTime = _messages.StringField(1) location = _messages.StringField(2) - unifiedWorkerRequest = _messages.MessageField('UnifiedWorkerRequestValue', 3) - workItemStatuses = _messages.MessageField('WorkItemStatus', 4, repeated=True) - workerId = _messages.StringField(5) + projectNumber = _messages.IntegerField(3) + unifiedWorkerRequest = _messages.MessageField('UnifiedWorkerRequestValue', 4) + workItemStatuses = _messages.MessageField('WorkItemStatus', 5, repeated=True) + workerId = _messages.StringField(6) class ReportWorkItemStatusResponse(_messages.Message): @@ -4887,6 +5056,7 @@ class ReportWorkItemStatusResponse(_messages.Message): WorkItemStatus messages in the ReportWorkItemStatusRequest which resulting in this response. """ + @encoding.MapUnrecognizedFields('additionalProperties') class UnifiedWorkerResponseValue(_messages.Message): r"""Untranslated bag-of-bytes WorkProgressUpdateResponse for @@ -4900,6 +5070,7 @@ class UnifiedWorkerResponseValue(_messages.Message): additionalProperties: Properties of the object. Contains field @type with type URL. """ + class AdditionalProperty(_messages.Message): r"""An additional property for a UnifiedWorkerResponseValue object. @@ -4911,13 +5082,10 @@ class AdditionalProperty(_messages.Message): key = _messages.StringField(1) value = _messages.MessageField('extra_types.JsonValue', 2) - additionalProperties = _messages.MessageField( - 'AdditionalProperty', 1, repeated=True) + additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True) - unifiedWorkerResponse = _messages.MessageField( - 'UnifiedWorkerResponseValue', 1) - workItemServiceStates = _messages.MessageField( - 'WorkItemServiceState', 2, repeated=True) + unifiedWorkerResponse = _messages.MessageField('UnifiedWorkerResponseValue', 1) + workItemServiceStates = _messages.MessageField('WorkItemServiceState', 2, repeated=True) class ReportedParallelism(_messages.Message): @@ -4948,8 +5116,10 @@ class ResourceUtilizationReport(_messages.Message): Fields: containers: Per container information. Key: container name. cpuTime: CPU utilization samples. + gpuUsage: Optional. GPU usage samples. memoryInfo: Memory utilization samples. """ + @encoding.MapUnrecognizedFields('additionalProperties') class ContainersValue(_messages.Message): r"""Per container information. Key: container name. @@ -4960,6 +5130,7 @@ class ContainersValue(_messages.Message): Fields: additionalProperties: Additional properties of type ContainersValue """ + class AdditionalProperty(_messages.Message): r"""An additional property for a ContainersValue object. @@ -4971,12 +5142,12 @@ class AdditionalProperty(_messages.Message): key = _messages.StringField(1) value = _messages.MessageField('ResourceUtilizationReport', 2) - additionalProperties = _messages.MessageField( - 'AdditionalProperty', 1, repeated=True) + additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True) containers = _messages.MessageField('ContainersValue', 1) cpuTime = _messages.MessageField('CPUTime', 2, repeated=True) - memoryInfo = _messages.MessageField('MemInfo', 3, repeated=True) + gpuUsage = _messages.MessageField('GPUUsage', 3, repeated=True) + memoryInfo = _messages.MessageField('MemInfo', 4, repeated=True) class ResourceUtilizationReportResponse(_messages.Message): @@ -4984,8 +5155,9 @@ class ResourceUtilizationReportResponse(_messages.Message): """ + class RuntimeEnvironment(_messages.Message): - r"""The environment values to set at runtime. LINT.IfChange + r"""The environment values to set at runtime. Enums: IpConfigurationValueValuesEnum: Optional. Configuration for VM IPs. @@ -5008,6 +5180,8 @@ class RuntimeEnvironment(_messages.Message): Fields: additionalExperiments: Optional. Additional experiment flags for the job, specified with the `--experiments` option. + additionalPipelineOptions: Optional. Additional pipeline option flags for + the job. additionalUserLabels: Optional. Additional user labels to be specified for the job. Keys and values should follow the restrictions specified in the [labeling restrictions](https://cloud.google.com/compute/docs/labeling- @@ -5065,6 +5239,7 @@ class RuntimeEnvironment(_messages.Message): for launching worker instances to run your pipeline. In the future, worker_zone will take precedence. """ + class IpConfigurationValueValuesEnum(_messages.Enum): r"""Optional. Configuration for VM IPs. @@ -5116,6 +5291,7 @@ class AdditionalUserLabelsValue(_messages.Message): additionalProperties: Additional properties of type AdditionalUserLabelsValue """ + class AdditionalProperty(_messages.Message): r"""An additional property for a AdditionalUserLabelsValue object. @@ -5127,27 +5303,27 @@ class AdditionalProperty(_messages.Message): key = _messages.StringField(1) value = _messages.StringField(2) - additionalProperties = _messages.MessageField( - 'AdditionalProperty', 1, repeated=True) + additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True) additionalExperiments = _messages.StringField(1, repeated=True) - additionalUserLabels = _messages.MessageField('AdditionalUserLabelsValue', 2) - bypassTempDirValidation = _messages.BooleanField(3) - diskSizeGb = _messages.IntegerField(4, variant=_messages.Variant.INT32) - enableStreamingEngine = _messages.BooleanField(5) - ipConfiguration = _messages.EnumField('IpConfigurationValueValuesEnum', 6) - kmsKeyName = _messages.StringField(7) - machineType = _messages.StringField(8) - maxWorkers = _messages.IntegerField(9, variant=_messages.Variant.INT32) - network = _messages.StringField(10) - numWorkers = _messages.IntegerField(11, variant=_messages.Variant.INT32) - serviceAccountEmail = _messages.StringField(12) - streamingMode = _messages.EnumField('StreamingModeValueValuesEnum', 13) - subnetwork = _messages.StringField(14) - tempLocation = _messages.StringField(15) - workerRegion = _messages.StringField(16) - workerZone = _messages.StringField(17) - zone = _messages.StringField(18) + additionalPipelineOptions = _messages.StringField(2, repeated=True) + additionalUserLabels = _messages.MessageField('AdditionalUserLabelsValue', 3) + bypassTempDirValidation = _messages.BooleanField(4) + diskSizeGb = _messages.IntegerField(5, variant=_messages.Variant.INT32) + enableStreamingEngine = _messages.BooleanField(6) + ipConfiguration = _messages.EnumField('IpConfigurationValueValuesEnum', 7) + kmsKeyName = _messages.StringField(8) + machineType = _messages.StringField(9) + maxWorkers = _messages.IntegerField(10, variant=_messages.Variant.INT32) + network = _messages.StringField(11) + numWorkers = _messages.IntegerField(12, variant=_messages.Variant.INT32) + serviceAccountEmail = _messages.StringField(13) + streamingMode = _messages.EnumField('StreamingModeValueValuesEnum', 14) + subnetwork = _messages.StringField(15) + tempLocation = _messages.StringField(16) + workerRegion = _messages.StringField(17) + workerZone = _messages.StringField(18) + zone = _messages.StringField(19) class RuntimeMetadata(_messages.Message): @@ -5168,6 +5344,8 @@ class RuntimeUpdatableParams(_messages.Message): during job creation. Fields: + acceptableBacklogDuration: Optional. The backlog threshold duration in + seconds for autoscaling. Value must be non-negative. maxNumWorkers: The maximum number of workers to cap autoscaling at. This field is currently only supported for Streaming Engine jobs. minNumWorkers: The minimum number of workers to scale down to. This field @@ -5180,9 +5358,10 @@ class RuntimeUpdatableParams(_messages.Message): pipeline). """ - maxNumWorkers = _messages.IntegerField(1, variant=_messages.Variant.INT32) - minNumWorkers = _messages.IntegerField(2, variant=_messages.Variant.INT32) - workerUtilizationHint = _messages.FloatField(3) + acceptableBacklogDuration = _messages.StringField(1) + maxNumWorkers = _messages.IntegerField(2, variant=_messages.Variant.INT32) + minNumWorkers = _messages.IntegerField(3, variant=_messages.Variant.INT32) + workerUtilizationHint = _messages.FloatField(4) class SDKInfo(_messages.Message): @@ -5195,6 +5374,7 @@ class SDKInfo(_messages.Message): language: Required. The SDK Language. version: Optional. The SDK version. """ + class LanguageValueValuesEnum(_messages.Enum): r"""Required. The SDK Language. @@ -5203,16 +5383,30 @@ class LanguageValueValuesEnum(_messages.Enum): JAVA: Java. PYTHON: Python. GO: Go. + YAML: YAML. """ UNKNOWN = 0 JAVA = 1 PYTHON = 2 GO = 3 + YAML = 4 language = _messages.EnumField('LanguageValueValuesEnum', 1) version = _messages.StringField(2) +class Sdk(_messages.Message): + r"""A structured representation of an SDK. + + Fields: + sdkId: The SDK harness id. + stacks: The stacktraces for the processes running on the SDK harness. + """ + + sdkId = _messages.StringField(1) + stacks = _messages.MessageField('Stack', 2, repeated=True) + + class SdkBug(_messages.Message): r"""A bug found in the Dataflow SDK. @@ -5225,6 +5419,7 @@ class SdkBug(_messages.Message): type: Output only. Describes the impact of this SDK bug. uri: Output only. Link to more information on the bug. """ + class SeverityValueValuesEnum(_messages.Enum): r"""Output only. How severe the SDK bug is. @@ -5300,6 +5495,7 @@ class SdkVersion(_messages.Message): version: The version of the SDK used to run the job. versionDisplayName: A readable string describing the version of the SDK. """ + class SdkSupportStatusValueValuesEnum(_messages.Enum): r"""The support status for this SDK version. @@ -5341,6 +5537,7 @@ class SendDebugCaptureRequest(_messages.Message): that contains the job specified by job_id. workerId: The worker id, i.e., VM hostname. """ + class DataFormatValueValuesEnum(_messages.Enum): r"""Format for the data field above (id=5). @@ -5391,8 +5588,7 @@ class SendWorkerMessagesResponse(_messages.Message): workerMessageResponses: The servers response to the worker messages. """ - workerMessageResponses = _messages.MessageField( - 'WorkerMessageResponse', 1, repeated=True) + workerMessageResponses = _messages.MessageField('WorkerMessageResponse', 1, repeated=True) class SeqMapTask(_messages.Message): @@ -5411,6 +5607,7 @@ class SeqMapTask(_messages.Message): workflow. userFn: The user function to invoke. """ + @encoding.MapUnrecognizedFields('additionalProperties') class UserFnValue(_messages.Message): r"""The user function to invoke. @@ -5421,6 +5618,7 @@ class UserFnValue(_messages.Message): Fields: additionalProperties: Properties of the object. """ + class AdditionalProperty(_messages.Message): r"""An additional property for a UserFnValue object. @@ -5432,8 +5630,7 @@ class AdditionalProperty(_messages.Message): key = _messages.StringField(1) value = _messages.MessageField('extra_types.JsonValue', 2) - additionalProperties = _messages.MessageField( - 'AdditionalProperty', 1, repeated=True) + additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True) inputs = _messages.MessageField('SideInputInfo', 1, repeated=True) name = _messages.StringField(2) @@ -5493,6 +5690,7 @@ class SideInputInfo(_messages.Message): tag: The id of the tag the user code will access this side input by; this should correspond to the tag of some MultiOutputInfo. """ + @encoding.MapUnrecognizedFields('additionalProperties') class KindValue(_messages.Message): r"""How to interpret the source element(s) as a side input value. @@ -5503,6 +5701,7 @@ class KindValue(_messages.Message): Fields: additionalProperties: Properties of the object. """ + class AdditionalProperty(_messages.Message): r"""An additional property for a KindValue object. @@ -5514,8 +5713,7 @@ class AdditionalProperty(_messages.Message): key = _messages.StringField(1) value = _messages.MessageField('extra_types.JsonValue', 2) - additionalProperties = _messages.MessageField( - 'AdditionalProperty', 1, repeated=True) + additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True) kind = _messages.MessageField('KindValue', 1) sources = _messages.MessageField('Source', 2, repeated=True) @@ -5533,6 +5731,7 @@ class Sink(_messages.Message): codec: The codec to use to encode data written to the sink. spec: The sink to write to, plus its parameters. """ + @encoding.MapUnrecognizedFields('additionalProperties') class CodecValue(_messages.Message): r"""The codec to use to encode data written to the sink. @@ -5543,6 +5742,7 @@ class CodecValue(_messages.Message): Fields: additionalProperties: Properties of the object. """ + class AdditionalProperty(_messages.Message): r"""An additional property for a CodecValue object. @@ -5554,8 +5754,7 @@ class AdditionalProperty(_messages.Message): key = _messages.StringField(1) value = _messages.MessageField('extra_types.JsonValue', 2) - additionalProperties = _messages.MessageField( - 'AdditionalProperty', 1, repeated=True) + additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True) @encoding.MapUnrecognizedFields('additionalProperties') class SpecValue(_messages.Message): @@ -5567,6 +5766,7 @@ class SpecValue(_messages.Message): Fields: additionalProperties: Properties of the object. """ + class AdditionalProperty(_messages.Message): r"""An additional property for a SpecValue object. @@ -5578,8 +5778,7 @@ class AdditionalProperty(_messages.Message): key = _messages.StringField(1) value = _messages.MessageField('extra_types.JsonValue', 2) - additionalProperties = _messages.MessageField( - 'AdditionalProperty', 1, repeated=True) + additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True) codec = _messages.MessageField('CodecValue', 1) spec = _messages.MessageField('SpecValue', 2) @@ -5604,6 +5803,7 @@ class Snapshot(_messages.Message): state: State of the snapshot. ttl: The time after which this snapshot will be automatically deleted. """ + class StateValueValuesEnum(_messages.Enum): r"""State of the snapshot. @@ -5628,8 +5828,7 @@ class StateValueValuesEnum(_messages.Enum): diskSizeBytes = _messages.IntegerField(3) id = _messages.StringField(4) projectId = _messages.StringField(5) - pubsubMetadata = _messages.MessageField( - 'PubsubSnapshotMetadata', 6, repeated=True) + pubsubMetadata = _messages.MessageField('PubsubSnapshotMetadata', 6, repeated=True) region = _messages.StringField(7) sourceJobId = _messages.StringField(8) state = _messages.EnumField('StateValueValuesEnum', 9) @@ -5690,6 +5889,7 @@ class Source(_messages.Message): field populated. spec: The source to read from, plus its parameters. """ + @encoding.MapUnrecognizedFields('additionalProperties') class BaseSpecsValueListEntry(_messages.Message): r"""A BaseSpecsValueListEntry object. @@ -5701,6 +5901,7 @@ class BaseSpecsValueListEntry(_messages.Message): Fields: additionalProperties: Properties of the object. """ + class AdditionalProperty(_messages.Message): r"""An additional property for a BaseSpecsValueListEntry object. @@ -5712,8 +5913,7 @@ class AdditionalProperty(_messages.Message): key = _messages.StringField(1) value = _messages.MessageField('extra_types.JsonValue', 2) - additionalProperties = _messages.MessageField( - 'AdditionalProperty', 1, repeated=True) + additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True) @encoding.MapUnrecognizedFields('additionalProperties') class CodecValue(_messages.Message): @@ -5725,6 +5925,7 @@ class CodecValue(_messages.Message): Fields: additionalProperties: Properties of the object. """ + class AdditionalProperty(_messages.Message): r"""An additional property for a CodecValue object. @@ -5736,8 +5937,7 @@ class AdditionalProperty(_messages.Message): key = _messages.StringField(1) value = _messages.MessageField('extra_types.JsonValue', 2) - additionalProperties = _messages.MessageField( - 'AdditionalProperty', 1, repeated=True) + additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True) @encoding.MapUnrecognizedFields('additionalProperties') class SpecValue(_messages.Message): @@ -5749,6 +5949,7 @@ class SpecValue(_messages.Message): Fields: additionalProperties: Properties of the object. """ + class AdditionalProperty(_messages.Message): r"""An additional property for a SpecValue object. @@ -5760,11 +5961,9 @@ class AdditionalProperty(_messages.Message): key = _messages.StringField(1) value = _messages.MessageField('extra_types.JsonValue', 2) - additionalProperties = _messages.MessageField( - 'AdditionalProperty', 1, repeated=True) + additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True) - baseSpecs = _messages.MessageField( - 'BaseSpecsValueListEntry', 1, repeated=True) + baseSpecs = _messages.MessageField('BaseSpecsValueListEntry', 1, repeated=True) codec = _messages.MessageField('CodecValue', 2) doesNotNeedSplitting = _messages.BooleanField(3) metadata = _messages.MessageField('SourceMetadata', 4) @@ -5921,6 +6120,7 @@ class SourceSplitResponse(_messages.Message): bundles into which the source was split. shards: DEPRECATED in favor of bundles. """ + class OutcomeValueValuesEnum(_messages.Enum): r"""Indicates whether splitting happened and produced a list of bundles. If this is USE_CURRENT_SOURCE_AS_IS, the current source should be @@ -5955,6 +6155,7 @@ class SourceSplitShard(_messages.Message): derivationMode: DEPRECATED source: DEPRECATED """ + class DerivationModeValueValuesEnum(_messages.Enum): r"""DEPRECATED @@ -6004,6 +6205,26 @@ class SplitInt64(_messages.Message): lowBits = _messages.IntegerField(2, variant=_messages.Variant.UINT32) +class Stack(_messages.Message): + r"""A structuredstacktrace for a process running on the worker. + + Fields: + stackContent: The raw stack trace. + threadCount: With java thread dumps we may get collapsed stacks e.g., N + threads in stack "". Instead of having to copy over the same stack trace + N times, this int field captures this. + threadName: Thread name. For example, "CommitThread-0,10,main" + threadState: The state of the thread. For example, "WAITING". + timestamp: Timestamp at which the stack was captured. + """ + + stackContent = _messages.StringField(1) + threadCount = _messages.IntegerField(2, variant=_messages.Variant.INT32) + threadName = _messages.StringField(3) + threadState = _messages.StringField(4) + timestamp = _messages.StringField(5) + + class StageExecutionDetails(_messages.Message): r"""Information about the workers and work items within a stage. @@ -6053,6 +6274,7 @@ class StageSummary(_messages.Message): state: State of this stage. stragglerSummary: Straggler summary for this stage. """ + class StateValueValuesEnum(_messages.Enum): r"""State of this stage. @@ -6106,6 +6328,7 @@ class StandardQueryParameters(_messages.Message): uploadType: Legacy upload protocol for media (e.g. "media", "multipart"). upload_protocol: Upload protocol for media (e.g. "raw", "multipart"). """ + class AltValueValuesEnum(_messages.Enum): r"""Data format for response. @@ -6173,6 +6396,7 @@ class Status(_messages.Message): user-facing error message should be localized and sent in the google.rpc.Status.details field, or localized by the client. """ + @encoding.MapUnrecognizedFields('additionalProperties') class DetailsValueListEntry(_messages.Message): r"""A DetailsValueListEntry object. @@ -6185,6 +6409,7 @@ class DetailsValueListEntry(_messages.Message): additionalProperties: Properties of the object. Contains field @type with type URL. """ + class AdditionalProperty(_messages.Message): r"""An additional property for a DetailsValueListEntry object. @@ -6196,8 +6421,7 @@ class AdditionalProperty(_messages.Message): key = _messages.StringField(1) value = _messages.MessageField('extra_types.JsonValue', 2) - additionalProperties = _messages.MessageField( - 'AdditionalProperty', 1, repeated=True) + additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True) code = _messages.IntegerField(1, variant=_messages.Variant.INT32) details = _messages.MessageField('DetailsValueListEntry', 2, repeated=True) @@ -6232,6 +6456,7 @@ class Step(_messages.Message): predefined step has its own required set of properties. Must be provided on Create. Only retrieved with JOB_VIEW_ALL. """ + @encoding.MapUnrecognizedFields('additionalProperties') class PropertiesValue(_messages.Message): r"""Named properties associated with the step. Each kind of predefined @@ -6244,6 +6469,7 @@ class PropertiesValue(_messages.Message): Fields: additionalProperties: Properties of the object. """ + class AdditionalProperty(_messages.Message): r"""An additional property for a PropertiesValue object. @@ -6255,8 +6481,7 @@ class AdditionalProperty(_messages.Message): key = _messages.StringField(1) value = _messages.MessageField('extra_types.JsonValue', 2) - additionalProperties = _messages.MessageField( - 'AdditionalProperty', 1, repeated=True) + additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True) kind = _messages.StringField(1) name = _messages.StringField(2) @@ -6302,6 +6527,7 @@ class StragglerInfo(_messages.Message): each straggler cause. startTime: The time when the work item attempt became a straggler. """ + @encoding.MapUnrecognizedFields('additionalProperties') class CausesValue(_messages.Message): r"""The straggler causes, keyed by the string representation of the @@ -6314,6 +6540,7 @@ class CausesValue(_messages.Message): Fields: additionalProperties: Additional properties of type CausesValue """ + class AdditionalProperty(_messages.Message): r"""An additional property for a CausesValue object. @@ -6325,8 +6552,7 @@ class AdditionalProperty(_messages.Message): key = _messages.StringField(1) value = _messages.MessageField('StragglerDebuggingInfo', 2) - additionalProperties = _messages.MessageField( - 'AdditionalProperty', 1, repeated=True) + additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True) causes = _messages.MessageField('CausesValue', 1) startTime = _messages.StringField(2) @@ -6345,6 +6571,7 @@ class StragglerSummary(_messages.Message): string representation of the StragglerCause enum. totalStragglerCount: The total count of stragglers. """ + @encoding.MapUnrecognizedFields('additionalProperties') class StragglerCauseCountValue(_messages.Message): r"""Aggregated counts of straggler causes, keyed by the string @@ -6358,6 +6585,7 @@ class StragglerCauseCountValue(_messages.Message): additionalProperties: Additional properties of type StragglerCauseCountValue """ + class AdditionalProperty(_messages.Message): r"""An additional property for a StragglerCauseCountValue object. @@ -6369,8 +6597,7 @@ class AdditionalProperty(_messages.Message): key = _messages.StringField(1) value = _messages.IntegerField(2) - additionalProperties = _messages.MessageField( - 'AdditionalProperty', 1, repeated=True) + additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True) recentStragglers = _messages.MessageField('Straggler', 1, repeated=True) stragglerCauseCount = _messages.MessageField('StragglerCauseCountValue', 2) @@ -6424,6 +6651,7 @@ class StreamingComputationConfig(_messages.Message): transformUserNameToStateFamily: Map from user name of stateful transforms in this stage to their state family. """ + @encoding.MapUnrecognizedFields('additionalProperties') class TransformUserNameToStateFamilyValue(_messages.Message): r"""Map from user name of stateful transforms in this stage to their state @@ -6437,6 +6665,7 @@ class TransformUserNameToStateFamilyValue(_messages.Message): additionalProperties: Additional properties of type TransformUserNameToStateFamilyValue """ + class AdditionalProperty(_messages.Message): r"""An additional property for a TransformUserNameToStateFamilyValue object. @@ -6449,15 +6678,13 @@ class AdditionalProperty(_messages.Message): key = _messages.StringField(1) value = _messages.StringField(2) - additionalProperties = _messages.MessageField( - 'AdditionalProperty', 1, repeated=True) + additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True) computationId = _messages.StringField(1) instructions = _messages.MessageField('ParallelInstruction', 2, repeated=True) stageName = _messages.StringField(3) systemName = _messages.StringField(4) - transformUserNameToStateFamily = _messages.MessageField( - 'TransformUserNameToStateFamilyValue', 5) + transformUserNameToStateFamily = _messages.MessageField('TransformUserNameToStateFamilyValue', 5) class StreamingComputationRanges(_messages.Message): @@ -6470,8 +6697,7 @@ class StreamingComputationRanges(_messages.Message): """ computationId = _messages.StringField(1) - rangeAssignments = _messages.MessageField( - 'KeyRangeDataDiskAssignment', 2, repeated=True) + rangeAssignments = _messages.MessageField('KeyRangeDataDiskAssignment', 2, repeated=True) class StreamingComputationTask(_messages.Message): @@ -6487,6 +6713,7 @@ class StreamingComputationTask(_messages.Message): dataDisks: Describes the set of data disks this task should apply to. taskType: A type of streaming computation task. """ + class TaskTypeValueValuesEnum(_messages.Enum): r"""A type of streaming computation task. @@ -6502,8 +6729,7 @@ class TaskTypeValueValuesEnum(_messages.Enum): STREAMING_COMPUTATION_TASK_STOP = 1 STREAMING_COMPUTATION_TASK_START = 2 - computationRanges = _messages.MessageField( - 'StreamingComputationRanges', 1, repeated=True) + computationRanges = _messages.MessageField('StreamingComputationRanges', 1, repeated=True) dataDisks = _messages.MessageField('MountedDataDisk', 2, repeated=True) taskType = _messages.EnumField('TaskTypeValueValuesEnum', 3) @@ -6526,7 +6752,13 @@ class StreamingConfigTask(_messages.Message): operationalLimits: Operational limits for the streaming job. Can be used by the worker to validate outputs sent to the backend. streamingComputationConfigs: Set of computation configuration information. + streamingEngineStateTagEncodingVersion: Optional. The state tag encoding + format version for streaming engine jobs. userStepToStateFamilyNameMap: Map from user step names to state families. + userWorkerRunnerV1Settings: Binary encoded proto to control runtime + behavior of the java runner v1 user worker. + userWorkerRunnerV2Settings: Binary encoded proto to control runtime + behavior of the runner v2 user worker. windmillServiceEndpoint: If present, the worker must use this endpoint to communicate with Windmill Service dispatchers, otherwise the worker must continue to use whatever endpoint it had been using. @@ -6534,6 +6766,7 @@ class StreamingConfigTask(_messages.Message): communicate with Windmill Service dispatchers. Only applicable when windmill_service_endpoint is specified. """ + @encoding.MapUnrecognizedFields('additionalProperties') class UserStepToStateFamilyNameMapValue(_messages.Message): r"""Map from user step names to state families. @@ -6546,6 +6779,7 @@ class UserStepToStateFamilyNameMapValue(_messages.Message): additionalProperties: Additional properties of type UserStepToStateFamilyNameMapValue """ + class AdditionalProperty(_messages.Message): r"""An additional property for a UserStepToStateFamilyNameMapValue object. @@ -6558,19 +6792,19 @@ class AdditionalProperty(_messages.Message): key = _messages.StringField(1) value = _messages.StringField(2) - additionalProperties = _messages.MessageField( - 'AdditionalProperty', 1, repeated=True) + additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True) commitStreamChunkSizeBytes = _messages.IntegerField(1) getDataStreamChunkSizeBytes = _messages.IntegerField(2) maxWorkItemCommitBytes = _messages.IntegerField(3) operationalLimits = _messages.MessageField('StreamingOperationalLimits', 4) - streamingComputationConfigs = _messages.MessageField( - 'StreamingComputationConfig', 5, repeated=True) - userStepToStateFamilyNameMap = _messages.MessageField( - 'UserStepToStateFamilyNameMapValue', 6) - windmillServiceEndpoint = _messages.StringField(7) - windmillServicePort = _messages.IntegerField(8) + streamingComputationConfigs = _messages.MessageField('StreamingComputationConfig', 5, repeated=True) + streamingEngineStateTagEncodingVersion = _messages.IntegerField(6, variant=_messages.Variant.INT32) + userStepToStateFamilyNameMap = _messages.MessageField('UserStepToStateFamilyNameMapValue', 7) + userWorkerRunnerV1Settings = _messages.BytesField(8) + userWorkerRunnerV2Settings = _messages.BytesField(9) + windmillServiceEndpoint = _messages.StringField(10) + windmillServicePort = _messages.IntegerField(11) class StreamingOperationalLimits(_messages.Message): @@ -6615,17 +6849,13 @@ class StreamingScalingReport(_messages.Message): activeBundleCount = _messages.IntegerField(1, variant=_messages.Variant.INT32) activeThreadCount = _messages.IntegerField(2, variant=_messages.Variant.INT32) - maximumBundleCount = _messages.IntegerField( - 3, variant=_messages.Variant.INT32) + maximumBundleCount = _messages.IntegerField(3, variant=_messages.Variant.INT32) maximumBytes = _messages.IntegerField(4) maximumBytesCount = _messages.IntegerField(5, variant=_messages.Variant.INT32) - maximumThreadCount = _messages.IntegerField( - 6, variant=_messages.Variant.INT32) - outstandingBundleCount = _messages.IntegerField( - 7, variant=_messages.Variant.INT32) + maximumThreadCount = _messages.IntegerField(6, variant=_messages.Variant.INT32) + outstandingBundleCount = _messages.IntegerField(7, variant=_messages.Variant.INT32) outstandingBytes = _messages.IntegerField(8) - outstandingBytesCount = _messages.IntegerField( - 9, variant=_messages.Variant.INT32) + outstandingBytesCount = _messages.IntegerField(9, variant=_messages.Variant.INT32) class StreamingScalingReportResponse(_messages.Message): @@ -6636,8 +6866,7 @@ class StreamingScalingReportResponse(_messages.Message): maximumThreadCount: Maximum thread count limit; """ - maximumThreadCount = _messages.IntegerField( - 1, variant=_messages.Variant.INT32) + maximumThreadCount = _messages.IntegerField(1, variant=_messages.Variant.INT32) class StreamingSetupTask(_messages.Message): @@ -6812,6 +7041,7 @@ class TemplateMetadata(_messages.Message): supports at least once mode. supportsExactlyOnce: Optional. Indicates if the streaming template supports exactly once mode. + yamlDefinition: Optional. For future use. """ defaultStreamingMode = _messages.StringField(1) @@ -6821,6 +7051,7 @@ class TemplateMetadata(_messages.Message): streaming = _messages.BooleanField(5) supportsAtLeastOnce = _messages.BooleanField(6) supportsExactlyOnce = _messages.BooleanField(7) + yamlDefinition = _messages.StringField(8) class TopologyConfig(_messages.Message): @@ -6840,6 +7071,7 @@ class TopologyConfig(_messages.Message): userStageToComputationNameMap: Maps user stage names to stable computation names. """ + @encoding.MapUnrecognizedFields('additionalProperties') class UserStageToComputationNameMapValue(_messages.Message): r"""Maps user stage names to stable computation names. @@ -6852,6 +7084,7 @@ class UserStageToComputationNameMapValue(_messages.Message): additionalProperties: Additional properties of type UserStageToComputationNameMapValue """ + class AdditionalProperty(_messages.Message): r"""An additional property for a UserStageToComputationNameMapValue object. @@ -6864,17 +7097,13 @@ class AdditionalProperty(_messages.Message): key = _messages.StringField(1) value = _messages.StringField(2) - additionalProperties = _messages.MessageField( - 'AdditionalProperty', 1, repeated=True) + additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True) computations = _messages.MessageField('ComputationTopology', 1, repeated=True) - dataDiskAssignments = _messages.MessageField( - 'DataDiskAssignment', 2, repeated=True) + dataDiskAssignments = _messages.MessageField('DataDiskAssignment', 2, repeated=True) forwardingKeyBits = _messages.IntegerField(3, variant=_messages.Variant.INT32) - persistentStateVersion = _messages.IntegerField( - 4, variant=_messages.Variant.INT32) - userStageToComputationNameMap = _messages.MessageField( - 'UserStageToComputationNameMapValue', 5) + persistentStateVersion = _messages.IntegerField(4, variant=_messages.Variant.INT32) + userStageToComputationNameMap = _messages.MessageField('UserStageToComputationNameMapValue', 5) class TransformSummary(_messages.Message): @@ -6893,6 +7122,7 @@ class TransformSummary(_messages.Message): outputCollectionName: User names for all collection outputs to this transform. """ + class KindValueValuesEnum(_messages.Enum): r"""Type of transform. @@ -6966,8 +7196,7 @@ class WorkItem(_messages.Message): seqMapTask = _messages.MessageField('SeqMapTask', 10) shellTask = _messages.MessageField('ShellTask', 11) sourceOperationTask = _messages.MessageField('SourceOperationRequest', 12) - streamingComputationTask = _messages.MessageField( - 'StreamingComputationTask', 13) + streamingComputationTask = _messages.MessageField('StreamingComputationTask', 13) streamingConfigTask = _messages.MessageField('StreamingConfigTask', 14) streamingSetupTask = _messages.MessageField('StreamingSetupTask', 15) @@ -6990,6 +7219,7 @@ class WorkItemDetails(_messages.Message): stragglerInfo: Information about straggler detections for this work item. taskId: Name of this work item. """ + class StateValueValuesEnum(_messages.Enum): r"""State of this work item. @@ -7052,6 +7282,7 @@ class WorkItemServiceState(_messages.Message): suggestedStopPoint: DEPRECATED in favor of split_request. suggestedStopPosition: Obsolete, always empty. """ + @encoding.MapUnrecognizedFields('additionalProperties') class HarnessDataValue(_messages.Message): r"""Other data returned by the service, specific to the particular worker @@ -7064,6 +7295,7 @@ class HarnessDataValue(_messages.Message): Fields: additionalProperties: Properties of the object. """ + class AdditionalProperty(_messages.Message): r"""An additional property for a HarnessDataValue object. @@ -7075,8 +7307,7 @@ class AdditionalProperty(_messages.Message): key = _messages.StringField(1) value = _messages.MessageField('extra_types.JsonValue', 2) - additionalProperties = _messages.MessageField( - 'AdditionalProperty', 1, repeated=True) + additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True) completeWorkStatus = _messages.MessageField('Status', 1) harnessData = _messages.MessageField('HarnessDataValue', 2) @@ -7158,8 +7389,7 @@ class WorkItemStatus(_messages.Message): reportedProgress = _messages.MessageField('ApproximateReportedProgress', 8) requestedLeaseDuration = _messages.StringField(9) sourceFork = _messages.MessageField('SourceFork', 10) - sourceOperationResponse = _messages.MessageField( - 'SourceOperationResponse', 11) + sourceOperationResponse = _messages.MessageField('SourceOperationResponse', 11) stopPosition = _messages.MessageField('Position', 12) totalThrottlerWaitTimeSeconds = _messages.FloatField(13) workItemId = _messages.StringField(14) @@ -7202,6 +7432,7 @@ class WorkerHealthReport(_messages.Message): vmIsHealthy: Whether the VM is currently healthy. vmStartupTime: The time the VM was booted. """ + @encoding.MapUnrecognizedFields('additionalProperties') class PodsValueListEntry(_messages.Message): r"""A PodsValueListEntry object. @@ -7213,6 +7444,7 @@ class PodsValueListEntry(_messages.Message): Fields: additionalProperties: Properties of the object. """ + class AdditionalProperty(_messages.Message): r"""An additional property for a PodsValueListEntry object. @@ -7224,8 +7456,7 @@ class AdditionalProperty(_messages.Message): key = _messages.StringField(1) value = _messages.MessageField('extra_types.JsonValue', 2) - additionalProperties = _messages.MessageField( - 'AdditionalProperty', 1, repeated=True) + additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True) msg = _messages.StringField(1) pods = _messages.MessageField('PodsValueListEntry', 2, repeated=True) @@ -7271,6 +7502,7 @@ class WorkerLifecycleEvent(_messages.Message): metadata: Other stats that can accompany an event. E.g. { "downloaded_bytes" : "123456" } """ + class EventValueValuesEnum(_messages.Enum): r"""The event being reported. @@ -7306,6 +7538,7 @@ class MetadataValue(_messages.Message): Fields: additionalProperties: Additional properties of type MetadataValue """ + class AdditionalProperty(_messages.Message): r"""An additional property for a MetadataValue object. @@ -7317,8 +7550,7 @@ class AdditionalProperty(_messages.Message): key = _messages.StringField(1) value = _messages.StringField(2) - additionalProperties = _messages.MessageField( - 'AdditionalProperty', 1, repeated=True) + additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True) containerStartTime = _messages.StringField(1) event = _messages.EnumField('EventValueValuesEnum', 2) @@ -7358,6 +7590,7 @@ class WorkerMessage(_messages.Message): workerShutdownNotice: Shutdown notice by workers. workerThreadScalingReport: Thread scaling information reported by workers. """ + @encoding.MapUnrecognizedFields('additionalProperties') class LabelsValue(_messages.Message): r"""Labels are used to group WorkerMessages. For example, a worker_message @@ -7373,6 +7606,7 @@ class LabelsValue(_messages.Message): Fields: additionalProperties: Additional properties of type LabelsValue """ + class AdditionalProperty(_messages.Message): r"""An additional property for a LabelsValue object. @@ -7384,8 +7618,7 @@ class AdditionalProperty(_messages.Message): key = _messages.StringField(1) value = _messages.StringField(2) - additionalProperties = _messages.MessageField( - 'AdditionalProperty', 1, repeated=True) + additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True) dataSamplingReport = _messages.MessageField('DataSamplingReport', 1) labels = _messages.MessageField('LabelsValue', 2) @@ -7397,8 +7630,7 @@ class AdditionalProperty(_messages.Message): workerMessageCode = _messages.MessageField('WorkerMessageCode', 8) workerMetrics = _messages.MessageField('ResourceUtilizationReport', 9) workerShutdownNotice = _messages.MessageField('WorkerShutdownNotice', 10) - workerThreadScalingReport = _messages.MessageField( - 'WorkerThreadScalingReport', 11) + workerThreadScalingReport = _messages.MessageField('WorkerThreadScalingReport', 11) class WorkerMessageCode(_messages.Message): @@ -7446,6 +7678,7 @@ class WorkerMessageCode(_messages.Message): worker identifiers should almost always be passed as labels since they will be included on most messages. """ + @encoding.MapUnrecognizedFields('additionalProperties') class ParametersValue(_messages.Message): r"""Parameters contains specific information about the code. This is a @@ -7467,6 +7700,7 @@ class ParametersValue(_messages.Message): Fields: additionalProperties: Properties of the object. """ + class AdditionalProperty(_messages.Message): r"""An additional property for a ParametersValue object. @@ -7478,8 +7712,7 @@ class AdditionalProperty(_messages.Message): key = _messages.StringField(1) value = _messages.MessageField('extra_types.JsonValue', 2) - additionalProperties = _messages.MessageField( - 'AdditionalProperty', 1, repeated=True) + additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True) code = _messages.StringField(1) parameters = _messages.MessageField('ParametersValue', 2) @@ -7502,16 +7735,11 @@ class WorkerMessageResponse(_messages.Message): for workers. """ - streamingScalingReportResponse = _messages.MessageField( - 'StreamingScalingReportResponse', 1) - workerHealthReportResponse = _messages.MessageField( - 'WorkerHealthReportResponse', 2) - workerMetricsResponse = _messages.MessageField( - 'ResourceUtilizationReportResponse', 3) - workerShutdownNoticeResponse = _messages.MessageField( - 'WorkerShutdownNoticeResponse', 4) - workerThreadScalingReportResponse = _messages.MessageField( - 'WorkerThreadScalingReportResponse', 5) + streamingScalingReportResponse = _messages.MessageField('StreamingScalingReportResponse', 1) + workerHealthReportResponse = _messages.MessageField('WorkerHealthReportResponse', 2) + workerMetricsResponse = _messages.MessageField('ResourceUtilizationReportResponse', 3) + workerShutdownNoticeResponse = _messages.MessageField('WorkerShutdownNoticeResponse', 4) + workerThreadScalingReportResponse = _messages.MessageField('WorkerThreadScalingReportResponse', 5) class WorkerPool(_messages.Message): @@ -7600,6 +7828,7 @@ class WorkerPool(_messages.Message): zone: Zone to run the worker pools in. If empty or unspecified, the service will attempt to choose a reasonable default. """ + class DefaultPackageSetValueValuesEnum(_messages.Enum): r"""The default package set to install. This allows the service to select a default set of packages which are useful to worker harnesses written in @@ -7669,6 +7898,7 @@ class MetadataValue(_messages.Message): Fields: additionalProperties: Additional properties of type MetadataValue """ + class AdditionalProperty(_messages.Message): r"""An additional property for a MetadataValue object. @@ -7680,8 +7910,7 @@ class AdditionalProperty(_messages.Message): key = _messages.StringField(1) value = _messages.StringField(2) - additionalProperties = _messages.MessageField( - 'AdditionalProperty', 1, repeated=True) + additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True) @encoding.MapUnrecognizedFields('additionalProperties') class PoolArgsValue(_messages.Message): @@ -7694,6 +7923,7 @@ class PoolArgsValue(_messages.Message): additionalProperties: Properties of the object. Contains field @type with type URL. """ + class AdditionalProperty(_messages.Message): r"""An additional property for a PoolArgsValue object. @@ -7705,8 +7935,7 @@ class AdditionalProperty(_messages.Message): key = _messages.StringField(1) value = _messages.MessageField('extra_types.JsonValue', 2) - additionalProperties = _messages.MessageField( - 'AdditionalProperty', 1, repeated=True) + additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True) autoscalingSettings = _messages.MessageField('AutoscalingSettings', 1) dataDisks = _messages.MessageField('Disk', 2, repeated=True) @@ -7719,14 +7948,12 @@ class AdditionalProperty(_messages.Message): machineType = _messages.StringField(9) metadata = _messages.MessageField('MetadataValue', 10) network = _messages.StringField(11) - numThreadsPerWorker = _messages.IntegerField( - 12, variant=_messages.Variant.INT32) + numThreadsPerWorker = _messages.IntegerField(12, variant=_messages.Variant.INT32) numWorkers = _messages.IntegerField(13, variant=_messages.Variant.INT32) onHostMaintenance = _messages.StringField(14) packages = _messages.MessageField('Package', 15, repeated=True) poolArgs = _messages.MessageField('PoolArgsValue', 16) - sdkHarnessContainerImages = _messages.MessageField( - 'SdkHarnessContainerImage', 17, repeated=True) + sdkHarnessContainerImages = _messages.MessageField('SdkHarnessContainerImage', 17, repeated=True) subnetwork = _messages.StringField(18) taskrunnerSettings = _messages.MessageField('TaskRunnerSettings', 19) teardownPolicy = _messages.EnumField('TeardownPolicyValueValuesEnum', 20) @@ -7789,8 +8016,7 @@ class WorkerThreadScalingReport(_messages.Message): currentThreadCount: Current number of active threads in a worker. """ - currentThreadCount = _messages.IntegerField( - 1, variant=_messages.Variant.INT32) + currentThreadCount = _messages.IntegerField(1, variant=_messages.Variant.INT32) class WorkerThreadScalingReportResponse(_messages.Message): @@ -7801,8 +8027,7 @@ class WorkerThreadScalingReportResponse(_messages.Message): recommendedThreadCount: Recommended number of threads for a worker. """ - recommendedThreadCount = _messages.IntegerField( - 1, variant=_messages.Variant.INT32) + recommendedThreadCount = _messages.IntegerField(1, variant=_messages.Variant.INT32) class WriteInstruction(_messages.Message): @@ -7824,3 +8049,11 @@ class WriteInstruction(_messages.Message): StandardQueryParameters.FXgafvValueValuesEnum, '_1', '1') encoding.AddCustomJsonEnumMapping( StandardQueryParameters.FXgafvValueValuesEnum, '_2', '2') +encoding.AddCustomJsonFieldMapping( + DataflowProjectsLocationsTemplatesLaunchRequest, 'dynamicTemplate_gcsPath', 'dynamicTemplate.gcsPath') +encoding.AddCustomJsonFieldMapping( + DataflowProjectsLocationsTemplatesLaunchRequest, 'dynamicTemplate_stagingLocation', 'dynamicTemplate.stagingLocation') +encoding.AddCustomJsonFieldMapping( + DataflowProjectsTemplatesLaunchRequest, 'dynamicTemplate_gcsPath', 'dynamicTemplate.gcsPath') +encoding.AddCustomJsonFieldMapping( + DataflowProjectsTemplatesLaunchRequest, 'dynamicTemplate_stagingLocation', 'dynamicTemplate.stagingLocation') From 491e3ac958c7906bb5ed471f97ac30d95deee068 Mon Sep 17 00:00:00 2001 From: Tarun Annapareddy Date: Fri, 13 Feb 2026 13:23:40 -0800 Subject: [PATCH 4/5] fix import --- .../dataflow/internal/clients/dataflow/dataflow_v1b3_client.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/sdks/python/apache_beam/runners/dataflow/internal/clients/dataflow/dataflow_v1b3_client.py b/sdks/python/apache_beam/runners/dataflow/internal/clients/dataflow/dataflow_v1b3_client.py index 6f62edbae1cf..179e51eb95e8 100644 --- a/sdks/python/apache_beam/runners/dataflow/internal/clients/dataflow/dataflow_v1b3_client.py +++ b/sdks/python/apache_beam/runners/dataflow/internal/clients/dataflow/dataflow_v1b3_client.py @@ -1,8 +1,6 @@ """Generated client library for dataflow version v1b3.""" # NOTE: This file is autogenerated and should not be edited by hand. -from __future__ import absolute_import - from apitools.base.py import base_api from . import dataflow_v1b3_messages as messages From db302f69b237222ad1f0b2bd06ab555cbc2ed57b Mon Sep 17 00:00:00 2001 From: Tarun Annapareddy Date: Fri, 13 Feb 2026 13:27:05 -0800 Subject: [PATCH 5/5] fix import --- .../internal/clients/dataflow/dataflow_v1b3_messages.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/sdks/python/apache_beam/runners/dataflow/internal/clients/dataflow/dataflow_v1b3_messages.py b/sdks/python/apache_beam/runners/dataflow/internal/clients/dataflow/dataflow_v1b3_messages.py index 06d7a8fd738b..0c096e73c1ac 100644 --- a/sdks/python/apache_beam/runners/dataflow/internal/clients/dataflow/dataflow_v1b3_messages.py +++ b/sdks/python/apache_beam/runners/dataflow/internal/clients/dataflow/dataflow_v1b3_messages.py @@ -4,8 +4,6 @@ """ # NOTE: This file is autogenerated and should not be edited by hand. -from __future__ import absolute_import - from apitools.base.protorpclite import messages as _messages from apitools.base.py import encoding from apitools.base.py import extra_types