Skip to content

Commit

Permalink
Update address comments
Browse files Browse the repository at this point in the history
  • Loading branch information
tt810 committed Oct 11, 2019
1 parent c13013b commit 50057cc
Show file tree
Hide file tree
Showing 5 changed files with 22 additions and 17 deletions.
2 changes: 1 addition & 1 deletion datadog/resource_datadog_logs_integration_pipeline.go
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ func resourceDatadogLogsIntegrationPipeline() *schema.Resource {
}

func resourceDatadogLogsIntegrationPipelineCreate(d *schema.ResourceData, meta interface{}) error {
return fmt.Errorf("cannot create an integration pipeline, try import it")
return fmt.Errorf("cannot create an integration pipeline, please import it first to make changes")
}

func resourceDatadogLogsIntegrationPipelineRead(d *schema.ResourceData, meta interface{}) error {
Expand Down
32 changes: 16 additions & 16 deletions datadog/resource_datadog_logs_pipeline_order_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -6,14 +6,14 @@ import (
)

const pipelinesConfig = `
resource "datadog_logs_customer_pipeline" "pipeline_1" {
resource "datadog_logs_custom_pipeline" "pipeline_1" {
name = "my first pipeline"
is_enabled = true
filter {
query = "source:redis"
}
}
resource "datadog_logs_customer_pipeline" "pipeline_2" {
resource "datadog_logs_custom_pipeline" "pipeline_2" {
name = "my second pipeline"
is_enabled = true
filter {
Expand All @@ -23,26 +23,26 @@ resource "datadog_logs_customer_pipeline" "pipeline_2" {
resource "datadog_logs_pipeline_order" "pipelines" {
depends_on = [
"datadog_logs_customer_pipeline.pipeline_1",
"datadog_logs_customer_pipeline.pipeline_2"
"datadog_logs_custom_pipeline.pipeline_1",
"datadog_logs_custom_pipeline.pipeline_2"
]
name = "pipelines"
pipelines = [
"${datadog_logs_customer_pipeline.pipeline_1.id}",
"${datadog_logs_customer_pipeline.pipeline_2.id}"
"${datadog_logs_custom_pipeline.pipeline_1.id}",
"${datadog_logs_custom_pipeline.pipeline_2.id}"
]
}
`

const orderUpdateConfig = `
resource "datadog_logs_customer_pipeline" "pipeline_1" {
resource "datadog_logs_custom_pipeline" "pipeline_1" {
name = "my first pipeline"
is_enabled = true
filter {
query = "source:redis"
}
}
resource "datadog_logs_customer_pipeline" "pipeline_2" {
resource "datadog_logs_custom_pipeline" "pipeline_2" {
name = "my second pipeline"
is_enabled = true
filter {
Expand All @@ -52,13 +52,13 @@ resource "datadog_logs_customer_pipeline" "pipeline_2" {
resource "datadog_logs_pipeline_order" "pipelines" {
depends_on = [
"datadog_logs_customer_pipeline.pipeline_1",
"datadog_logs_customer_pipeline.pipeline_2"
"datadog_logs_custom_pipeline.pipeline_1",
"datadog_logs_custom_pipeline.pipeline_2"
]
name = "pipelines"
pipelines = [
"${datadog_logs_customer_pipeline.pipeline_2.id}",
"${datadog_logs_customer_pipeline.pipeline_1.id}"
"${datadog_logs_custom_pipeline.pipeline_2.id}",
"${datadog_logs_custom_pipeline.pipeline_1.id}"
]
}
`
Expand All @@ -72,8 +72,8 @@ func TestAccDatadogLogsPipelineOrder_basic(t *testing.T) {
{
Config: pipelinesConfig,
Check: resource.ComposeTestCheckFunc(
testAccCheckPipelineExists("datadog_logs_customer_pipeline.pipeline_1"),
testAccCheckPipelineExists("datadog_logs_customer_pipeline.pipeline_2"),
testAccCheckPipelineExists("datadog_logs_custom_pipeline.pipeline_1"),
testAccCheckPipelineExists("datadog_logs_custom_pipeline.pipeline_2"),
resource.TestCheckResourceAttr(
"datadog_logs_pipeline_order.pipelines", "name", "pipelines"),
resource.TestCheckResourceAttr(
Expand All @@ -83,8 +83,8 @@ func TestAccDatadogLogsPipelineOrder_basic(t *testing.T) {
{
Config: orderUpdateConfig,
Check: resource.ComposeTestCheckFunc(
testAccCheckPipelineExists("datadog_logs_customer_pipeline.pipeline_2"),
testAccCheckPipelineExists("datadog_logs_customer_pipeline.pipeline_1"),
testAccCheckPipelineExists("datadog_logs_custom_pipeline.pipeline_2"),
testAccCheckPipelineExists("datadog_logs_custom_pipeline.pipeline_1"),
resource.TestCheckResourceAttr(
"datadog_logs_pipeline_order.pipelines", "name", "pipelines"),
resource.TestCheckResourceAttr(
Expand Down
2 changes: 2 additions & 0 deletions go.mod
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
module github.com/terraform-providers/terraform-provider-datadog

replace git.apache.org/thrift.git => github.com/apache/thrift v0.0.0-20180902110319-2566ecd5d999

require (
github.com/cenkalti/backoff v2.1.1+incompatible // indirect
github.com/golang/protobuf v1.3.2 // indirect
Expand Down
1 change: 1 addition & 0 deletions go.sum
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@ github.com/aliyun/aliyun-tablestore-go-sdk v4.1.2+incompatible/go.mod h1:LDQHRZy
github.com/anmitsu/go-shlex v0.0.0-20161002113705-648efa622239/go.mod h1:2FmKhYUyUczH0OGQWaF5ceTx0UBShxjsH6f8oGKYe2c=
github.com/antchfx/xpath v0.0.0-20190129040759-c8489ed3251e/go.mod h1:Yee4kTMuNiPYJ7nSNorELQMr1J33uOpXDMByNYhvtNk=
github.com/antchfx/xquery v0.0.0-20180515051857-ad5b8c7a47b0/go.mod h1:LzD22aAzDP8/dyiCKFp31He4m2GPjl0AFyzDtZzUu9M=
github.com/apache/thrift v0.0.0-20180902110319-2566ecd5d999/go.mod h1:cp2SuWMxlEZw2r+iP2GNCdIi4C1qmUzdZFSVb+bacwQ=
github.com/apparentlymart/go-cidr v1.0.0 h1:lGDvXx8Lv9QHjrAVP7jyzleG4F9+FkRhJcEsDFxeb8w=
github.com/apparentlymart/go-cidr v1.0.0/go.mod h1:EBcsNrHc3zQeuaeCeCtQruQm+n9/YjEn/vI25Lg7Gwc=
github.com/apparentlymart/go-dump v0.0.0-20180507223929-23540a00eaa3 h1:ZSTrOEhiM5J5RFxEaFvMZVEAM1KvT1YzbEOwB2EAGjA=
Expand Down
2 changes: 2 additions & 0 deletions website/docs/r/logs_custom_pipeline.html.markdown
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,8 @@ Provides a Datadog [Logs Pipeline API](https://docs.datadoghq.com/api/?lang=pyth

## Example Usage

Create a Datadog logs pipeline:

```hcl
resource "datadog_logs_custom_pipeline" "sample_pipeline" {
filter {
Expand Down

0 comments on commit 50057cc

Please sign in to comment.