You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
Copy file name to clipboardExpand all lines: docs/dyn/dataflow_v1b3.projects.locations.flexTemplates.html
+6-5Lines changed: 6 additions & 5 deletions
Original file line number
Diff line number
Diff line change
@@ -229,7 +229,7 @@ <h3>Method Details</h3>
229
229
"clientRequestId": "A String", # The client's unique identifier of the job, re-used across retried attempts. If this field is set, the service will ensure its uniqueness. The request to create a job will fail if the service has knowledge of a previously submitted job with the same client's ID and job name. The caller may use this field to ensure idempotence of job creation across retried attempts to create a job. By default, the field is empty and, in that case, the service ignores it.
230
230
"createTime": "A String", # The timestamp when the job was initially created. Immutable and set by the Cloud Dataflow service.
231
231
"createdFromSnapshotId": "A String", # If this is specified, the job's initial state is populated from the given snapshot.
232
-
"currentState": "A String", # The current state of the job. Jobs are created in the `JOB_STATE_STOPPED` state unless otherwise specified. A job in the `JOB_STATE_RUNNING` state may asynchronously enter a terminal state. After a job has reached a terminal state, no further state updates may be made. This field may be mutated by the Cloud Dataflow service; callers cannot mutate it.
232
+
"currentState": "A String", # The current state of the job. Jobs are created in the `JOB_STATE_STOPPED` state unless otherwise specified. A job in the `JOB_STATE_RUNNING` state may asynchronously enter a terminal state. After a job has reached a terminal state, no further state updates may be made. This field might be mutated by the Dataflow service; callers cannot mutate it.
233
233
"currentStateTime": "A String", # The timestamp associated with the current state.
234
234
"environment": { # Describes the environment in which a Dataflow Job runs. # The environment for the job.
235
235
"clusterManagerApiService": "A String", # The type of cluster manager API to use. If unknown or unspecified, the service will attempt to choose a reasonable default. This should be in the form of the API service name, e.g. "compute.googleapis.com".
@@ -360,7 +360,7 @@ <h3>Method Details</h3>
360
360
},
361
361
},
362
362
},
363
-
"id": "A String", # The unique ID of this job. This field is set by the Cloud Dataflow service when the Job is created, and is immutable for the life of the job.
363
+
"id": "A String", # The unique ID of this job. This field is set by the Dataflow service when the job is created, and is immutable for the life of the job.
364
364
"jobMetadata": { # Metadata available primarily for filtering jobs. Will be included in the ListJob response and Job SUMMARY view. # This field is populated by the Dataflow service to support filtering jobs by the metadata values provided here. Populated for ListJobs and all GetJob views SUMMARY and higher.
365
365
"bigTableDetails": [ # Identification of a Cloud Bigtable source used in the Dataflow job.
366
366
{ # Metadata for a Cloud Bigtable connector used by the job.
@@ -421,7 +421,7 @@ <h3>Method Details</h3>
421
421
"a_key": "A String",
422
422
},
423
423
"location": "A String", # The [regional endpoint] (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints) that contains this job.
424
-
"name": "A String", # The user-specified Cloud Dataflow job name. Only one Job with a given name can exist in a project within one region at any given time. Jobs in different regions can have the same name. If a caller attempts to create a Job with the same name as an already-existing Job, the attempt returns the existing Job. The name must match the regular expression `[a-z]([-a-z0-9]{0,1022}[a-z0-9])?`
424
+
"name": "A String", # The user-specified Dataflow job name. Only one active job with a given name can exist in a project within one region at any given time. Jobs in different regions can have the same name. If a caller attempts to create a job with the same name as an active job that already exists, the attempt returns the existing job. The name must match the regular expression `[a-z]([-a-z0-9]{0,1022}[a-z0-9])?`
425
425
"pipelineDescription": { # A descriptive representation of submitted pipeline as well as the executed form. This data is provided by the Dataflow service for ease of visualizing the pipeline and interpreting Dataflow provided metrics. # Preliminary field: The format of this data may change at any time. A description of the user pipeline and stages through which it is executed. Created by Cloud Dataflow service. Only retrieved with JOB_VIEW_DESCRIPTION or JOB_VIEW_ALL.
{ # Data provided with a pipeline or transform to provide descriptive info.
@@ -510,13 +510,14 @@ <h3>Method Details</h3>
510
510
],
511
511
"stepNamesHash": "A String", # A hash value of the submitted pipeline portable graph step names if exists.
512
512
},
513
-
"projectId": "A String", # The ID of the Cloud Platform project that the job belongs to.
513
+
"projectId": "A String", # The ID of the Google Cloud project that the job belongs to.
514
514
"replaceJobId": "A String", # If this job is an update of an existing job, this field is the job ID of the job it replaced. When sending a `CreateJobRequest`, you can update a job by specifying it here. The job named here is stopped, and its intermediate state is transferred to this job.
515
515
"replacedByJobId": "A String", # If another job is an update of this job (and thus, this job is in `JOB_STATE_UPDATED`), this field contains the ID of that job.
516
516
"requestedState": "A String", # The job's requested state. Applies to `UpdateJob` requests. Set `requested_state` with `UpdateJob` requests to switch between the states `JOB_STATE_STOPPED` and `JOB_STATE_RUNNING`. You can also use `UpdateJob` requests to change a job's state from `JOB_STATE_RUNNING` to `JOB_STATE_CANCELLED`, `JOB_STATE_DONE`, or `JOB_STATE_DRAINED`. These states irrevocably terminate the job if it hasn't already reached a terminal state. This field has no effect on `CreateJob` requests.
517
517
"runtimeUpdatableParams": { # Additional job parameters that can only be updated during runtime using the projects.jobs.update method. These fields have no effect when specified during job creation. # This field may ONLY be modified at runtime using the projects.jobs.update method to adjust job behavior. This field has no effect when specified at job creation.
518
518
"maxNumWorkers": 42, # The maximum number of workers to cap autoscaling at. This field is currently only supported for Streaming Engine jobs.
519
519
"minNumWorkers": 42, # The minimum number of workers to scale down to. This field is currently only supported for Streaming Engine jobs.
520
+
"workerUtilizationHint": 3.14, # Target worker utilization, compared against the aggregate utilization of the worker pool by autoscaler, to determine upscaling and downscaling when absent other constraints such as backlog.
520
521
},
521
522
"satisfiesPzi": True or False, # Output only. Reserved for future use. This field is set only in responses from the server; it is ignored if it is set in any requests.
522
523
"satisfiesPzs": True or False, # Reserved for future use. This field is set only in responses from the server; it is ignored if it is set in any requests.
@@ -544,7 +545,7 @@ <h3>Method Details</h3>
544
545
"transformNameMapping": { # The map of transform name prefixes of the job to be replaced to the corresponding name prefixes of the new job.
545
546
"a_key": "A String",
546
547
},
547
-
"type": "A String", # The type of Cloud Dataflow job.
548
+
"type": "A String", # The type of Dataflow job.
0 commit comments