From e44a234cdd8603e50c70561c433c3081eebb7dbe Mon Sep 17 00:00:00 2001 From: Alex Ott Date: Thu, 29 Jun 2023 15:49:16 +0200 Subject: [PATCH] Add `full_refresh` attribute to the `pipeline_task` in `databricks_job` This allows to force full refresh of the pipeline from the job this fixes #2362 --- docs/resources/job.md | 1 + jobs/resource_job.go | 3 ++- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/docs/resources/job.md b/docs/resources/job.md index bad4fc2613..40726032dd 100644 --- a/docs/resources/job.md +++ b/docs/resources/job.md @@ -226,6 +226,7 @@ You can invoke Spark submit tasks only on new clusters. **In the `new_cluster` s ### pipeline_task Configuration Block * `pipeline_id` - (Required) The pipeline's unique ID. +* `full_refresh` - (Optional) (Bool) Specifies if there should be full refresh of the pipeline. -> **Note** The following configuration blocks are only supported inside a `task` block diff --git a/jobs/resource_job.go b/jobs/resource_job.go index d69f0e6243..77866997b3 100644 --- a/jobs/resource_job.go +++ b/jobs/resource_job.go @@ -57,7 +57,8 @@ type PythonWheelTask struct { // PipelineTask contains the information for pipeline jobs type PipelineTask struct { - PipelineID string `json:"pipeline_id"` + PipelineID string `json:"pipeline_id"` + FullRefresh bool `json:"full_refresh,omitempty"` } type SqlQueryTask struct {