Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add data source for default scraper configuration #35280

Merged
merged 16 commits into from
Oct 9, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions .changelog/35280.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
```release-note:new-data-source
aws_prometheus_default_scraper_configuration
```
3 changes: 3 additions & 0 deletions internal/provider/provider.go
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ import (
"context"
"errors"
"fmt"
"log"
"os"
"strings"
"time"
Expand All @@ -33,6 +34,8 @@ import (
// New returns a new, initialized Terraform Plugin SDK v2-style provider instance.
// The provider instance is fully configured once the `ConfigureContextFunc` has been called.
func New(ctx context.Context) (*schema.Provider, error) {
log.Printf("Initializing Terraform AWS Provider...")

provider := &schema.Provider{
// This schema must match exactly the Terraform Protocol v6 (Terraform Plugin Framework) provider's schema.
// Notably the attributes can have no Default values.
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,81 @@
// Copyright (c) HashiCorp, Inc.
// SPDX-License-Identifier: MPL-2.0

package amp

import (
"context"

"github.com/aws/aws-sdk-go-v2/service/amp"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-provider-aws/internal/framework"
fwflex "github.com/hashicorp/terraform-provider-aws/internal/framework/flex"
"github.com/hashicorp/terraform-provider-aws/internal/tfresource"
"github.com/hashicorp/terraform-provider-aws/names"
)

// @FrameworkDataSource(aws_prometheus_default_scraper_configuration, name="Default Scraper Configuration")
func newDefaultScraperConfigurationDataSource(context.Context) (datasource.DataSourceWithConfigure, error) {
return &defaultScraperConfigurationDataSource{}, nil
}

type defaultScraperConfigurationDataSource struct {
framework.DataSourceWithConfigure
}

func (*defaultScraperConfigurationDataSource) Metadata(_ context.Context, request datasource.MetadataRequest, response *datasource.MetadataResponse) { // nosemgrep:ci.meta-in-func-name
response.TypeName = "aws_prometheus_default_scraper_configuration"
}

func (d *defaultScraperConfigurationDataSource) Schema(ctx context.Context, request datasource.SchemaRequest, response *datasource.SchemaResponse) {
response.Schema = schema.Schema{
Attributes: map[string]schema.Attribute{
names.AttrConfiguration: schema.StringAttribute{
Computed: true,
},
},
}
}

func (d *defaultScraperConfigurationDataSource) Read(ctx context.Context, request datasource.ReadRequest, response *datasource.ReadResponse) {
var data defaultScraperConfigurationDataSourceModel
response.Diagnostics.Append(request.Config.Get(ctx, &data)...)
if response.Diagnostics.HasError() {
return
}

conn := d.Meta().AMPClient(ctx)

out, err := findDefaultScraperConfiguration(ctx, conn)

if err != nil {
response.Diagnostics.AddError("reading Prometheus Default Scraper Configuration", err.Error())

return
}

data.Configuration = fwflex.StringValueToFramework(ctx, string(out))

response.Diagnostics.Append(response.State.Set(ctx, &data)...)
}

func findDefaultScraperConfiguration(ctx context.Context, conn *amp.Client) ([]byte, error) {
input := &amp.GetDefaultScraperConfigurationInput{}
output, err := conn.GetDefaultScraperConfiguration(ctx, input)

if err != nil {
return nil, err
}

if output == nil || output.Configuration == nil {
return nil, tfresource.NewEmptyResultError(input)
}

return output.Configuration, err
}

type defaultScraperConfigurationDataSourceModel struct {
Configuration types.String `tfsdk:"configuration"`
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
// Copyright (c) HashiCorp, Inc.
// SPDX-License-Identifier: MPL-2.0

package amp_test

import (
"testing"

"github.com/hashicorp/terraform-plugin-testing/helper/resource"
"github.com/hashicorp/terraform-provider-aws/internal/acctest"
"github.com/hashicorp/terraform-provider-aws/names"
)

func TestAccAMPDefaultScraperConfigurationDataSource_basic(t *testing.T) {
ctx := acctest.Context(t)
dataSourceName := "data.aws_prometheus_default_scraper_configuration.test"

resource.ParallelTest(t, resource.TestCase{
PreCheck: func() {
acctest.PreCheck(ctx, t)
acctest.PreCheckPartitionHasService(t, names.AMPEndpointID)
},
ErrorCheck: acctest.ErrorCheck(t, names.AMPServiceID),
ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories,
Steps: []resource.TestStep{
{
Config: testAccDefaultScraperConfigurationDataSourceConfig_basic(),
Check: resource.ComposeTestCheckFunc(
resource.TestCheckResourceAttrSet(dataSourceName, names.AttrConfiguration),
),
},
},
})
}

func testAccDefaultScraperConfigurationDataSourceConfig_basic() string {
return `
data "aws_prometheus_default_scraper_configuration" "test" {}
`
}
7 changes: 6 additions & 1 deletion internal/service/amp/service_package_gen.go

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

14 changes: 10 additions & 4 deletions main.go
Original file line number Diff line number Diff line change
Expand Up @@ -7,15 +7,25 @@ import (
"context"
"flag"
"log"
"runtime/debug"

"github.com/hashicorp/terraform-plugin-go/tfprotov5/tf5server"
"github.com/hashicorp/terraform-provider-aws/internal/provider"
"github.com/hashicorp/terraform-provider-aws/version"
)

func main() {
debugFlag := flag.Bool("debug", false, "Start provider in debug mode.")
flag.Parse()

logFlags := log.Flags()
logFlags = logFlags &^ (log.Ldate | log.Ltime)
log.SetFlags(logFlags)

if buildInfo, ok := debug.ReadBuildInfo(); ok {
log.Printf("Starting %s@%s (%s)...", buildInfo.Main.Path, version.ProviderVersion, buildInfo.GoVersion)
}

serverFactory, _, err := provider.ProtoV5ProviderServerFactory(context.Background())

if err != nil {
Expand All @@ -28,10 +38,6 @@ func main() {
serveOpts = append(serveOpts, tf5server.WithManagedDebug())
}

logFlags := log.Flags()
logFlags = logFlags &^ (log.Ldate | log.Ltime)
log.SetFlags(logFlags)

err = tf5server.Serve(
"registry.terraform.io/hashicorp/aws",
serverFactory,
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
---
subcategory: "AMP (Managed Prometheus)"
layout: "aws"
page_title: "AWS: aws_prometheus_default_scraper_configuration"
description: |-
Returns the default scraper configuration used when Amazon EKS creates a scraper for you.
---


# Data Source: aws_prometheus_default_scraper_configuration

Returns the default scraper configuration used when Amazon EKS creates a scraper for you.

## Example Usage

```terraform
data "aws_prometheus_default_scraper_configuration" "example" {}
```

## Attribute Reference

This data source exports the following attributes in addition to the arguments above:

* `configuration` - The configuration file.
30 changes: 30 additions & 0 deletions website/docs/r/prometheus_scraper.html.markdown
Original file line number Diff line number Diff line change
Expand Up @@ -94,6 +94,36 @@ EOT
}
```

### Use default EKS scraper configuration

You can use the data source `aws_prometheus_scraper_configuration` to use a
service managed scrape configuration.

-> **Note:** If the configuration is updated, this will trigger a replacement
of your scraper.

```terraform
data "aws_prometheus_default_scraper_configuration" "example" {}

resource "aws_prometheus_scraper" "example" {

destination {
amp {
workspace_arn = aws_prometheus_workspace.example.arn
}
}

scrape_configuration = data.aws_prometheus_scraper_configuration.example.configuration

source {
eks {
cluster_arn = data.aws_eks_cluster.example.arn
subnet_ids = data.aws_eks_cluster.example.vpc_config[0].subnet_ids
}
}
}
```

### Ignoring changes to Prometheus Workspace destination

A managed scraper will add a `AMPAgentlessScraper` tag to its Prometheus workspace
Expand Down
Loading