Skip to content

Commit

Permalink
feat(influx): extend pkg export all cmd to allow for filtering of output
Browse files Browse the repository at this point in the history
currently, support by filtering labelName and or by resource kind

closes: #17029
  • Loading branch information
jsteenb2 committed Mar 7, 2020
1 parent 8bfe05e commit 0305354
Show file tree
Hide file tree
Showing 5 changed files with 238 additions and 19 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
1. [17085](https://github.com/influxdata/influxdb/pull/17085): Clicking on bucket name takes user to Data Explorer with bucket selected
1. [17095](https://github.com/influxdata/influxdb/pull/17095): Extend pkger dashboards with table view support
1. [17114](https://github.com/influxdata/influxdb/pull/17114): Allow for retention to be provided to influx setup command as a duration
1. [17138](https://github.com/influxdata/influxdb/pull/17138): Extend pkger export all capabilities to support filtering by lable name and resource type

### Bug Fixes

Expand Down
30 changes: 29 additions & 1 deletion cmd/influx/pkg.go
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,7 @@ type cmdPkgBuilder struct {
encoding string
file string
files []string
filters []string
disableColor bool
disableTableBorders bool
org organization
Expand All @@ -53,6 +54,7 @@ type cmdPkgBuilder struct {
force string
secrets []string
}

exportOpts struct {
resourceType string
buckets string
Expand Down Expand Up @@ -260,6 +262,7 @@ func (b *cmdPkgBuilder) cmdPkgExportAll() *cobra.Command {
cmd.Short = "Export all existing resources for an organization as a package"

cmd.Flags().StringVarP(&b.file, "file", "f", "", "output file for created pkg; defaults to std out if no file provided; the extension of provided file (.yml/.json) will dictate encoding")
cmd.Flags().StringArrayVar(&b.filters, "filter", nil, "Filter exported resources by labelName or resourceKind (format: --filter=labelName=example)")

b.org.register(cmd, false)

Expand All @@ -277,8 +280,33 @@ func (b *cmdPkgBuilder) pkgExportAllRunEFn(cmd *cobra.Command, args []string) er
return err
}

var (
labelNames []string
resourceKinds []pkger.Kind
)
for _, filter := range b.filters {
pair := strings.SplitN(filter, "=", 2)
if len(pair) < 2 {
continue
}
switch key, val := pair[0], pair[1]; key {
case "labelName":
labelNames = append(labelNames, val)
case "resourceKind":
k := pkger.Kind(val)
if err := k.OK(); err != nil {
return err
}
resourceKinds = append(resourceKinds, k)
default:
return fmt.Errorf("invalid filter provided %q; filter must be 1 in [labelName, resourceKind]", filter)
}
}

orgOpt := pkger.CreateWithAllOrgResources(pkger.CreateByOrgIDOpt{
OrgID: orgID,
OrgID: orgID,
LabelNames: labelNames,
ResourceKinds: resourceKinds,
})
return b.writePkg(cmd.OutOrStdout(), pkgSVC, b.file, orgOpt)
}
Expand Down
146 changes: 139 additions & 7 deletions cmd/influx/pkg_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -35,10 +35,19 @@ func TestCmdPkg(t *testing.T) {
}

t.Run("export all", func(t *testing.T) {
defaultAssertFn := func(t *testing.T, pkg *pkger.Pkg) {
t.Helper()
sum := pkg.Summary()

require.Len(t, sum.Buckets, 1)
assert.Equal(t, "bucket1", sum.Buckets[0].Name)
}

expectedOrgID := influxdb.ID(9000)

tests := []struct {
pkgFileArgs
assertFn func(t *testing.T, pkg *pkger.Pkg)
}{
{
pkgFileArgs: pkgFileArgs{
Expand Down Expand Up @@ -72,6 +81,106 @@ func TestCmdPkg(t *testing.T) {
envVars: map[string]string{"INFLUX_ORG_ID": expectedOrgID.String()},
},
},
{
pkgFileArgs: pkgFileArgs{
name: "with labelName filter",
encoding: pkger.EncodingYAML,
filename: "pkg_0.yml",
args: []string{
"--org-id=" + expectedOrgID.String(),
"--filter=labelName=foo",
},
},
assertFn: func(t *testing.T, pkg *pkger.Pkg) {
defaultAssertFn(t, pkg)

sum := pkg.Summary()

require.Len(t, sum.Labels, 1)
assert.Equal(t, "foo", sum.Labels[0].Name)
},
},
{
pkgFileArgs: pkgFileArgs{
name: "with multiple labelName filters",
encoding: pkger.EncodingYAML,
filename: "pkg_0.yml",
args: []string{
"--org-id=" + expectedOrgID.String(),
"--filter=labelName=foo",
"--filter=labelName=bar",
},
},
assertFn: func(t *testing.T, pkg *pkger.Pkg) {
defaultAssertFn(t, pkg)

sum := pkg.Summary()

require.Len(t, sum.Labels, 2)
assert.Equal(t, "bar", sum.Labels[0].Name)
assert.Equal(t, "foo", sum.Labels[1].Name)
},
},
{
pkgFileArgs: pkgFileArgs{
name: "with resourceKind filter",
encoding: pkger.EncodingYAML,
filename: "pkg_0.yml",
args: []string{
"--org-id=" + expectedOrgID.String(),
"--filter=resourceKind=Dashboard",
},
},
assertFn: func(t *testing.T, pkg *pkger.Pkg) {
sum := pkg.Summary()

require.Len(t, sum.Dashboards, 1)
assert.Equal(t, "Dashboard", sum.Dashboards[0].Name)
},
},
{
pkgFileArgs: pkgFileArgs{
name: "with multiple resourceKind filter",
encoding: pkger.EncodingYAML,
filename: "pkg_0.yml",
args: []string{
"--org-id=" + expectedOrgID.String(),
"--filter=resourceKind=Dashboard",
"--filter=resourceKind=Bucket",
},
},
assertFn: func(t *testing.T, pkg *pkger.Pkg) {
sum := pkg.Summary()

require.Len(t, sum.Buckets, 1)
assert.Equal(t, "Bucket", sum.Buckets[0].Name)
require.Len(t, sum.Dashboards, 1)
assert.Equal(t, "Dashboard", sum.Dashboards[0].Name)
},
},
{
pkgFileArgs: pkgFileArgs{
name: "with mixed resourceKind and labelName filters",
encoding: pkger.EncodingYAML,
filename: "pkg_0.yml",
args: []string{
"--org-id=" + expectedOrgID.String(),
"--filter=labelName=foo",
"--filter=resourceKind=Dashboard",
"--filter=resourceKind=Bucket",
},
},
assertFn: func(t *testing.T, pkg *pkger.Pkg) {
sum := pkg.Summary()

require.Len(t, sum.Labels, 1)
assert.Equal(t, "foo", sum.Labels[0].Name)
require.Len(t, sum.Buckets, 1)
assert.Equal(t, "Bucket", sum.Buckets[0].Name)
require.Len(t, sum.Dashboards, 1)
assert.Equal(t, "Dashboard", sum.Dashboards[0].Name)
},
},
}

cmdFn := func(_ *globalFlags, opt genericCLIOpts) *cobra.Command {
Expand All @@ -83,11 +192,34 @@ func TestCmdPkg(t *testing.T) {
return nil, err
}
}
if opt.OrgIDs[0].OrgID != expectedOrgID {

orgIDOpt := opt.OrgIDs[0]
if orgIDOpt.OrgID != expectedOrgID {
return nil, errors.New("did not provide expected orgID")
}

var pkg pkger.Pkg
for _, labelName := range orgIDOpt.LabelNames {
pkg.Objects = append(pkg.Objects, pkger.Object{
APIVersion: pkger.APIVersion,
Type: pkger.KindLabel,
Metadata: pkger.Resource{"name": labelName},
})
}
if len(orgIDOpt.ResourceKinds) > 0 {
for _, k := range orgIDOpt.ResourceKinds {
pkg.Objects = append(pkg.Objects, pkger.Object{
APIVersion: pkger.APIVersion,
Type: k,
Metadata: pkger.Resource{
"name": k.String(),
},
})
}
// return early so we don't get the default bucket
return &pkg, nil
}

pkg.Objects = append(pkg.Objects, pkger.Object{
APIVersion: pkger.APIVersion,
Type: pkger.KindBucket,
Expand All @@ -98,14 +230,14 @@ func TestCmdPkg(t *testing.T) {
}
return newCmdPkgBuilder(fakeSVCFn(pkgSVC), opt).cmd()
}

for _, tt := range tests {
tt.pkgFileArgs.args = append([]string{"pkg", "export", "all"}, tt.pkgFileArgs.args...)
testPkgWrites(t, cmdFn, tt.pkgFileArgs, func(t *testing.T, pkg *pkger.Pkg) {
sum := pkg.Summary()

require.Len(t, sum.Buckets, 1)
assert.Equal(t, "bucket1", sum.Buckets[0].Name)
})
assertFn := defaultAssertFn
if tt.assertFn != nil {
assertFn = tt.assertFn
}
testPkgWrites(t, cmdFn, tt.pkgFileArgs, assertFn)
}
})

Expand Down
22 changes: 22 additions & 0 deletions cmd/influxd/launcher/pkger_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -415,6 +415,28 @@ spec:
assert.Empty(t, newSum.Dashboards)
assert.Empty(t, newSum.NotificationEndpoints)
assert.Empty(t, newSum.NotificationRules)
assert.Empty(t, newSum.TelegrafConfigs)
assert.Empty(t, newSum.Variables)
})

t.Run("filtered by label resource type", func(t *testing.T) {
newPkg, err := svc.CreatePkg(timedCtx(2*time.Second), pkger.CreateWithAllOrgResources(
pkger.CreateByOrgIDOpt{
OrgID: l.Org.ID,
ResourceKinds: []pkger.Kind{pkger.KindLabel},
},
))
require.NoError(t, err)

newSum := newPkg.Summary()
assert.NotEmpty(t, newSum.Labels)
assert.Empty(t, newSum.Buckets)
assert.Empty(t, newSum.Checks)
assert.Empty(t, newSum.Dashboards)
assert.Empty(t, newSum.NotificationEndpoints)
assert.Empty(t, newSum.NotificationRules)
assert.Empty(t, newSum.Tasks)
assert.Empty(t, newSum.TelegrafConfigs)
assert.Empty(t, newSum.Variables)
})

Expand Down
58 changes: 47 additions & 11 deletions pkger/service.go
Original file line number Diff line number Diff line change
Expand Up @@ -234,7 +234,10 @@ func (s *Service) CreatePkg(ctx context.Context, setters ...CreatePkgSetFn) (*Pk
}

for _, orgIDOpt := range opt.OrgIDs {
cloneAssFn := s.resourceCloneAssociationsGen(orgIDOpt.LabelNames...)
cloneAssFn, err := s.resourceCloneAssociationsGen(ctx, orgIDOpt.LabelNames...)
if err != nil {
return nil, err
}
resourcesToClone, err := s.cloneOrgResources(ctx, orgIDOpt.OrgID, orgIDOpt.ResourceKinds)
if err != nil {
return nil, internalErr(err)
Expand All @@ -248,7 +251,10 @@ func (s *Service) CreatePkg(ctx context.Context, setters ...CreatePkgSetFn) (*Pk
}
}

cloneAssFn := s.resourceCloneAssociationsGen()
cloneAssFn, err := s.resourceCloneAssociationsGen(ctx)
if err != nil {
return nil, err
}
for _, r := range uniqResourcesToClone(opt.Resources) {
newKinds, err := s.resourceCloneToKind(ctx, r, cloneAssFn)
if err != nil {
Expand Down Expand Up @@ -666,10 +672,15 @@ type (
cloneAssociationsFn func(context.Context, ResourceToClone) (associations associations, skipResource bool, err error)
)

func (s *Service) resourceCloneAssociationsGen(labelNames ...string) cloneAssociationsFn {
func (s *Service) resourceCloneAssociationsGen(ctx context.Context, labelNames ...string) (cloneAssociationsFn, error) {
mLabelNames := make(map[string]bool)
for _, labelname := range labelNames {
mLabelNames[labelname] = true
for _, labelName := range labelNames {
mLabelNames[labelName] = true
}

mLabelIDs, err := getLabelIDMap(ctx, s.labelSVC, labelNames)
if err != nil {
return nil, err
}

type key struct {
Expand All @@ -678,9 +689,14 @@ func (s *Service) resourceCloneAssociationsGen(labelNames ...string) cloneAssoci
}
// memoize the labels so we dont' create duplicates
m := make(map[key]bool)
return func(ctx context.Context, r ResourceToClone) (associations, bool, error) {
if r.Kind.is(KindUnknown, KindLabel) {
return associations{}, false, nil
cloneFn := func(ctx context.Context, r ResourceToClone) (associations, bool, error) {
if r.Kind.is(KindUnknown) {
return associations{}, true, nil
}
if r.Kind.is(KindLabel) {
// check here verifies the label maps to an id of a valid label name
shouldSkip := len(mLabelIDs) > 0 && !mLabelIDs[r.ID]
return associations{}, shouldSkip, nil
}

labels, err := s.labelSVC.FindResourceLabels(ctx, influxdb.LabelMappingFilter{
Expand All @@ -694,7 +710,7 @@ func (s *Service) resourceCloneAssociationsGen(labelNames ...string) cloneAssoci
if len(mLabelNames) > 0 {
shouldSkip := true
for _, l := range labels {
if mLabelNames[l.Name] {
if _, ok := mLabelNames[l.Name]; ok {
shouldSkip = false
break
}
Expand All @@ -706,8 +722,10 @@ func (s *Service) resourceCloneAssociationsGen(labelNames ...string) cloneAssoci

var ass associations
for _, l := range labels {
if len(mLabelNames) > 0 && !mLabelNames[l.Name] {
continue
if len(mLabelNames) > 0 {
if _, ok := mLabelNames[l.Name]; !ok {
continue
}
}

ass.associations = append(ass.associations, Resource{
Expand All @@ -723,6 +741,8 @@ func (s *Service) resourceCloneAssociationsGen(labelNames ...string) cloneAssoci
}
return ass, false, nil
}

return cloneFn, nil
}

// DryRun provides a dry run of the pkg application. The pkg will be marked verified
Expand Down Expand Up @@ -2178,6 +2198,22 @@ func (s *Service) findDashboardByIDFull(ctx context.Context, id influxdb.ID) (*i
return dash, nil
}

func getLabelIDMap(ctx context.Context, labelSVC influxdb.LabelService, labelNames []string) (map[influxdb.ID]bool, error) {
mLabelIDs := make(map[influxdb.ID]bool)
for _, labelName := range labelNames {
iLabels, err := labelSVC.FindLabels(ctx, influxdb.LabelFilter{
Name: labelName,
})
if err != nil {
return nil, err
}
if len(iLabels) == 1 {
mLabelIDs[iLabels[0].ID] = true
}
}
return mLabelIDs, nil
}

type doMutex struct {
sync.Mutex
}
Expand Down

0 comments on commit 0305354

Please sign in to comment.