From 784b222d4dc4df96f84fa3a29e7511a501dc5f00 Mon Sep 17 00:00:00 2001 From: Johnny Steenbergen Date: Sun, 3 May 2020 10:34:24 -0700 Subject: [PATCH] feat(pkger): enforce metadata.name dns name compliance this PR includes a lot of small changes to names in existing test pkgs. the tests are updated to follow suit. closes: #17940 --- CHANGELOG.md | 1 + cmd/influx/pkg_test.go | 34 +- cmd/influxd/launcher/pkger_test.go | 317 ++++----- pkger/clone_resource.go | 6 +- pkger/http_server_test.go | 8 +- pkger/internal/wordplay/wordplay.go | 2 +- pkger/models_test.go | 4 +- pkger/parser.go | 62 +- pkger/parser_models.go | 19 +- pkger/parser_test.go | 622 +++++++++--------- pkger/service_test.go | 147 ++--- pkger/testdata/bucket.json | 4 +- pkger/testdata/bucket.yml | 4 +- pkger/testdata/bucket_associates_label.json | 18 +- pkger/testdata/bucket_associates_label.yml | 18 +- .../testdata/bucket_associates_labels.jsonnet | 10 +- pkger/testdata/checks.json | 10 +- pkger/testdata/checks.yml | 10 +- pkger/testdata/dashboard.json | 4 +- pkger/testdata/dashboard.yml | 4 +- .../testdata/dashboard_associates_label.json | 10 +- pkger/testdata/dashboard_associates_label.yml | 10 +- pkger/testdata/dashboard_gauge.json | 2 +- pkger/testdata/dashboard_gauge.yml | 2 +- pkger/testdata/dashboard_heatmap.json | 2 +- pkger/testdata/dashboard_heatmap.yml | 2 +- pkger/testdata/dashboard_histogram.json | 2 +- pkger/testdata/dashboard_histogram.yml | 2 +- pkger/testdata/dashboard_markdown.json | 2 +- pkger/testdata/dashboard_markdown.yml | 2 +- pkger/testdata/dashboard_scatter.json | 2 +- pkger/testdata/dashboard_scatter.yml | 2 +- .../dashboard_single_stat_plus_line.json | 2 +- .../dashboard_single_stat_plus_line.yml | 2 +- pkger/testdata/dashboard_table.json | 2 +- pkger/testdata/dashboard_table.yml | 2 +- pkger/testdata/dashboard_xy.json | 2 +- pkger/testdata/dashboard_xy.yml | 2 +- pkger/testdata/label.json | 6 +- pkger/testdata/label.yml | 6 +- pkger/testdata/notification_endpoint.json | 22 +- pkger/testdata/notification_endpoint.yml | 22 +- .../notification_endpoint_secrets.yml | 2 +- pkger/testdata/notification_rule.json | 14 +- pkger/testdata/notification_rule.yml | 14 +- pkger/testdata/remote_bucket.json | 2 +- pkger/testdata/tasks.json | 12 +- pkger/testdata/tasks.yml | 12 +- pkger/testdata/telegraf.json | 12 +- pkger/testdata/telegraf.yml | 12 +- pkger/testdata/variable_associates_label.yml | 6 +- pkger/testdata/variables.json | 14 +- pkger/testdata/variables.yml | 14 +- 53 files changed, 790 insertions(+), 736 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 814b35ead49..3f1135c8fb0 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,6 +3,7 @@ ### Features 1. [17934](https://github.com/influxdata/influxdb/pull/17934): Add ability to delete a stack and all the resources associated with it +1. [17941](https://github.com/influxdata/influxdb/pull/17941): Encorce dns name compliance on all pkger resources' metadata.name field ### Bug Fixes diff --git a/cmd/influx/pkg_test.go b/cmd/influx/pkg_test.go index b2ba7c27b07..75bea893a30 100644 --- a/cmd/influx/pkg_test.go +++ b/cmd/influx/pkg_test.go @@ -135,7 +135,7 @@ func TestCmdPkg(t *testing.T) { sum := pkg.Summary() require.Len(t, sum.Dashboards, 1) - assert.Equal(t, "Dashboard", sum.Dashboards[0].Name) + assert.Equal(t, "dashboard", sum.Dashboards[0].Name) }, }, { @@ -153,9 +153,9 @@ func TestCmdPkg(t *testing.T) { sum := pkg.Summary() require.Len(t, sum.Buckets, 1) - assert.Equal(t, "Bucket", sum.Buckets[0].Name) + assert.Equal(t, "bucket", sum.Buckets[0].Name) require.Len(t, sum.Dashboards, 1) - assert.Equal(t, "Dashboard", sum.Dashboards[0].Name) + assert.Equal(t, "dashboard", sum.Dashboards[0].Name) }, }, { @@ -176,9 +176,9 @@ func TestCmdPkg(t *testing.T) { require.Len(t, sum.Labels, 1) assert.Equal(t, "foo", sum.Labels[0].Name) require.Len(t, sum.Buckets, 1) - assert.Equal(t, "Bucket", sum.Buckets[0].Name) + assert.Equal(t, "bucket", sum.Buckets[0].Name) require.Len(t, sum.Dashboards, 1) - assert.Equal(t, "Dashboard", sum.Dashboards[0].Name) + assert.Equal(t, "dashboard", sum.Dashboards[0].Name) }, }, } @@ -212,7 +212,7 @@ func TestCmdPkg(t *testing.T) { APIVersion: pkger.APIVersion, Kind: k, Metadata: pkger.Resource{ - "name": k.String(), + "name": strings.ToLower(k.String()), }, }) } @@ -335,7 +335,7 @@ func TestCmdPkg(t *testing.T) { if rc.Kind == pkger.KindNotificationEndpoint { rc.Kind = pkger.KindNotificationEndpointHTTP } - name := rc.Kind.String() + strconv.Itoa(int(rc.ID)) + name := strings.ToLower(rc.Kind.String()) + strconv.Itoa(int(rc.ID)) pkg.Objects = append(pkg.Objects, pkger.Object{ APIVersion: pkger.APIVersion, Kind: rc.Kind, @@ -366,45 +366,49 @@ func TestCmdPkg(t *testing.T) { testPkgWrites(t, cmdFn, tt.pkgFileArgs, func(t *testing.T, pkg *pkger.Pkg) { sum := pkg.Summary() + kindToName := func(k pkger.Kind, id influxdb.ID) string { + return strings.ToLower(k.String()) + strconv.Itoa(int(id)) + } + require.Len(t, sum.Buckets, len(tt.bucketIDs)) for i, id := range tt.bucketIDs { actual := sum.Buckets[i] - assert.Equal(t, pkger.KindBucket.String()+strconv.Itoa(int(id)), actual.Name) + assert.Equal(t, kindToName(pkger.KindBucket, id), actual.Name) } require.Len(t, sum.Dashboards, len(tt.dashIDs)) for i, id := range tt.dashIDs { actual := sum.Dashboards[i] - assert.Equal(t, pkger.KindDashboard.String()+strconv.Itoa(int(id)), actual.Name) + assert.Equal(t, kindToName(pkger.KindDashboard, id), actual.Name) } require.Len(t, sum.NotificationEndpoints, len(tt.endpointIDs)) for i, id := range tt.endpointIDs { actual := sum.NotificationEndpoints[i] - assert.Equal(t, pkger.KindNotificationEndpointHTTP.String()+strconv.Itoa(int(id)), actual.NotificationEndpoint.GetName()) + assert.Equal(t, kindToName(pkger.KindNotificationEndpointHTTP, id), actual.NotificationEndpoint.GetName()) } require.Len(t, sum.Labels, len(tt.labelIDs)) for i, id := range tt.labelIDs { actual := sum.Labels[i] - assert.Equal(t, pkger.KindLabel.String()+strconv.Itoa(int(id)), actual.Name) + assert.Equal(t, kindToName(pkger.KindLabel, id), actual.Name) } require.Len(t, sum.NotificationRules, len(tt.ruleIDs)) for i, id := range tt.ruleIDs { actual := sum.NotificationRules[i] - assert.Equal(t, pkger.KindNotificationRule.String()+strconv.Itoa(int(id)), actual.Name) + assert.Equal(t, kindToName(pkger.KindNotificationRule, id), actual.Name) } require.Len(t, sum.Tasks, len(tt.taskIDs)) for i, id := range tt.taskIDs { actual := sum.Tasks[i] - assert.Equal(t, pkger.KindTask.String()+strconv.Itoa(int(id)), actual.Name) + assert.Equal(t, kindToName(pkger.KindTask, id), actual.Name) } require.Len(t, sum.TelegrafConfigs, len(tt.telegrafIDs)) for i, id := range tt.telegrafIDs { actual := sum.TelegrafConfigs[i] - assert.Equal(t, pkger.KindTelegraf.String()+strconv.Itoa(int(id)), actual.TelegrafConfig.Name) + assert.Equal(t, kindToName(pkger.KindTelegraf, id), actual.TelegrafConfig.Name) } require.Len(t, sum.Variables, len(tt.varIDs)) for i, id := range tt.varIDs { actual := sum.Variables[i] - assert.Equal(t, pkger.KindVariable.String()+strconv.Itoa(int(id)), actual.Name) + assert.Equal(t, kindToName(pkger.KindVariable, id), actual.Name) } }) } diff --git a/cmd/influxd/launcher/pkger_test.go b/cmd/influxd/launcher/pkger_test.go index 8d95e780776..03617ca5ba4 100644 --- a/cmd/influxd/launcher/pkger_test.go +++ b/cmd/influxd/launcher/pkger_test.go @@ -236,17 +236,17 @@ func TestLauncher_Pkger(t *testing.T) { }) require.NoError(t, err) - newEndpointPkgName := "non_existent_endpoint" + newEndpointPkgName := "non-existent-endpoint" allResourcesPkg := newPkg( - newBucketObject("non_existent_bucket", "", ""), - newCheckDeadmanObject(t, "non_existent_check", "", time.Minute), - newDashObject("non_existent_dash", "", ""), + newBucketObject("non-existent-bucket", "", ""), + newCheckDeadmanObject(t, "non-existent-check", "", time.Minute), + newDashObject("non-existent-dash", "", ""), newEndpointHTTP(newEndpointPkgName, "", ""), - newLabelObject("non_existent_label", "", "", ""), - newRuleObject(t, "non_existent_rule", "", newEndpointPkgName, ""), - newTaskObject("non_existent_task", "", ""), - newTelegrafObject("non_existent_tele", "", ""), - newVariableObject("non_existent_var", "", ""), + newLabelObject("non-existent-label", "", "", ""), + newRuleObject(t, "non-existent-rule", "", newEndpointPkgName, ""), + newTaskObject("non-existent-task", "", ""), + newTelegrafObject("non-existent-tele", "", ""), + newVariableObject("non-existent-var", "", ""), ) sum, _, err := svc.Apply(ctx, l.Org.ID, l.User.ID, allResourcesPkg, pkger.ApplyWithStackID(newStack.ID)) @@ -347,7 +347,7 @@ func TestLauncher_Pkger(t *testing.T) { t.Run("apply with only a stackID succeeds when stack has URLs", func(t *testing.T) { svr := httptest.NewServer(nethttp.HandlerFunc(func(w nethttp.ResponseWriter, r *nethttp.Request) { - pkg := newPkg(newBucketObject("bucket_0", "", "")) + pkg := newPkg(newBucketObject("bucket-0", "", "")) b, err := pkg.Encode(pkger.EncodingJSON) if err != nil { w.WriteHeader(nethttp.StatusInternalServerError) @@ -361,7 +361,7 @@ func TestLauncher_Pkger(t *testing.T) { require.NoError(t, err) defer f.Close() - pkg := newPkg(newBucketObject("bucket_1", "", "")) + pkg := newPkg(newBucketObject("bucket-1", "", "")) b, err := pkg.Encode(pkger.EncodingYAML) require.NoError(t, err) f.Write(b) @@ -387,10 +387,10 @@ func TestLauncher_Pkger(t *testing.T) { sumEquals := func(t *testing.T, sum pkger.Summary) { t.Helper() require.Len(t, sum.Buckets, 2) - assert.Equal(t, "bucket_0", sum.Buckets[0].PkgName) - assert.Equal(t, "bucket_0", sum.Buckets[0].Name) - assert.Equal(t, "bucket_1", sum.Buckets[1].PkgName) - assert.Equal(t, "bucket_1", sum.Buckets[1].Name) + assert.Equal(t, "bucket-0", sum.Buckets[0].PkgName) + assert.Equal(t, "bucket-0", sum.Buckets[0].Name) + assert.Equal(t, "bucket-1", sum.Buckets[1].PkgName) + assert.Equal(t, "bucket-1", sum.Buckets[1].Name) } sum, _, err := svc.DryRun(ctx, l.Org.ID, l.User.ID, nil, pkger.ApplyWithStackID(newStack.ID)) @@ -414,23 +414,23 @@ func TestLauncher_Pkger(t *testing.T) { require.Len(t, sum.Buckets, 1) assert.NotZero(t, sum.Buckets[0].ID) - assert.Equal(t, "bucket_0", sum.Buckets[0].Name) + assert.Equal(t, "bucket", sum.Buckets[0].Name) require.Len(t, sum.Checks, 1) assert.NotZero(t, sum.Checks[0].Check.GetID()) - assert.Equal(t, "check_0", sum.Checks[0].Check.GetName()) + assert.Equal(t, "check-0", sum.Checks[0].Check.GetName()) require.Len(t, sum.Dashboards, 1) assert.NotZero(t, sum.Dashboards[0].ID) - assert.Equal(t, "dash_0", sum.Dashboards[0].Name) + assert.Equal(t, "dash-0", sum.Dashboards[0].Name) require.Len(t, sum.NotificationEndpoints, 1) assert.NotZero(t, sum.NotificationEndpoints[0].NotificationEndpoint.GetID()) - assert.Equal(t, "endpoint_0", sum.NotificationEndpoints[0].NotificationEndpoint.GetName()) + assert.Equal(t, "endpoint-0", sum.NotificationEndpoints[0].NotificationEndpoint.GetName()) require.Len(t, sum.NotificationRules, 1) assert.NotZero(t, sum.NotificationRules[0].ID) - assert.Equal(t, "rule_0", sum.NotificationRules[0].Name) + assert.Equal(t, "rule-0", sum.NotificationRules[0].Name) require.Len(t, sum.Labels, 1) assert.NotZero(t, sum.Labels[0].ID) @@ -438,11 +438,11 @@ func TestLauncher_Pkger(t *testing.T) { require.Len(t, sum.Tasks, 1) assert.NotZero(t, sum.Tasks[0].ID) - assert.Equal(t, "task_0", sum.Tasks[0].Name) + assert.Equal(t, "task-0", sum.Tasks[0].Name) require.Len(t, sum.TelegrafConfigs, 1) assert.NotZero(t, sum.TelegrafConfigs[0].TelegrafConfig.ID) - assert.Equal(t, "tele_0", sum.TelegrafConfigs[0].TelegrafConfig.Name) + assert.Equal(t, "tele-0", sum.TelegrafConfigs[0].TelegrafConfig.Name) resources := []struct { resID influxdb.ID @@ -471,17 +471,17 @@ func TestLauncher_Pkger(t *testing.T) { newObjectsFn := func() []pkger.Object { return []pkger.Object{ - newBucketObject("bucket", "bucket_0", ""), - newCheckDeadmanObject(t, "check_0", "", time.Hour), - newDashObject("dash_0", "", ""), - newEndpointHTTP("endpoint_0", "", ""), - newRuleObject(t, "rule_0", "", "endpoint_0", ""), - newTaskObject("task_0", "", ""), - newTelegrafObject("tele_0", "", ""), - newVariableObject("var_0", "", ""), + newBucketObject("bucket", "", ""), + newCheckDeadmanObject(t, "check-0", "", time.Hour), + newDashObject("dash-0", "", ""), + newEndpointHTTP("endpoint-0", "", ""), + newRuleObject(t, "rule-0", "", "endpoint-0", ""), + newTaskObject("task-0", "", ""), + newTelegrafObject("tele-0", "", ""), + newVariableObject("var-0", "", ""), } } - labelObj := newLabelObject("label_1", "label 1", "", "") + labelObj := newLabelObject("label-1", "label 1", "", "") stack, err := svc.InitStack(ctx, l.User.ID, pkger.Stack{ OrgID: l.Org.ID, @@ -628,15 +628,15 @@ func TestLauncher_Pkger(t *testing.T) { applyOpt := pkger.ApplyWithStackID(stack.ID) var ( - initialBucketPkgName = "rucketeer_1" + initialBucketPkgName = "rucketeer-1" initialCheckPkgName = "checkers" - initialDashPkgName = "dash_of_salt" + initialDashPkgName = "dash-of-salt" initialEndpointPkgName = "endzo" initialLabelPkgName = "labelino" - initialRulePkgName = "oh_doyle_rules" + initialRulePkgName = "oh-doyle-rules" initialTaskPkgName = "tap" initialTelegrafPkgName = "teletype" - initialVariablePkgName = "laces out dan" + initialVariablePkgName = "laces-out-dan" ) initialPkg := newPkg( newBucketObject(initialBucketPkgName, "display name", "init desc"), @@ -851,21 +851,21 @@ func TestLauncher_Pkger(t *testing.T) { ) svc = pkger.MWLogging(logger)(svc) - endpointPkgName := "z_endpoint_rolls_back" + endpointPkgName := "z-endpoint-rolls-back" pkgWithDelete := newPkg( - newBucketObject("z_roll_me_back", "", ""), - newBucketObject("z_rolls_back_too", "", ""), - newDashObject("z_rolls_dash", "", ""), - newLabelObject("z_label_roller", "", "", ""), - newCheckDeadmanObject(t, "z_check", "", time.Hour), + newBucketObject("z-roll-me-back", "", ""), + newBucketObject("z-rolls-back-too", "", ""), + newDashObject("z-rolls-dash", "", ""), + newLabelObject("z-label-roller", "", "", ""), + newCheckDeadmanObject(t, "z-check", "", time.Hour), newEndpointHTTP(endpointPkgName, "", ""), - newRuleObject(t, "z_rules_back", "", endpointPkgName, ""), - newRuleObject(t, "z_rules_back_2", "", endpointPkgName, ""), - newRuleObject(t, "z_rules_back_3", "", endpointPkgName, ""), - newTaskObject("z_task_rolls_back", "", ""), - newTelegrafObject("z_telegraf_rolls_back", "", ""), - newVariableObject("z_var_rolls_back", "", ""), + newRuleObject(t, "z-rules-back", "", endpointPkgName, ""), + newRuleObject(t, "z-rules-back-2", "", endpointPkgName, ""), + newRuleObject(t, "z-rules-back-3", "", endpointPkgName, ""), + newTaskObject("z-task-rolls-back", "", ""), + newTelegrafObject("z-telegraf-rolls-back", "", ""), + newVariableObject("z-var-rolls-back", "", ""), ) _, _, err := svc.Apply(ctx, l.Org.ID, l.User.ID, pkgWithDelete, applyOpt) require.Error(t, err) @@ -902,48 +902,48 @@ func TestLauncher_Pkger(t *testing.T) { t.Log("validate all changes do not persist") { - for _, name := range []string{"z_roll_me_back", "z_rolls_back_too"} { + for _, name := range []string{"z-roll-me-back", "z-rolls-back-too"} { _, err := resourceCheck.getBucket(t, byName(name)) assert.Error(t, err) } - for _, name := range []string{"z_rules_back", "z_rules_back_2", "z_rules_back_3"} { + for _, name := range []string{"z-rules-back", "z-rules-back-2", "z-rules-back-3"} { _, err = resourceCheck.getRule(t, byName(name)) assert.Error(t, err) } - _, err := resourceCheck.getCheck(t, byName("z_check")) + _, err := resourceCheck.getCheck(t, byName("z-check")) assert.Error(t, err) - _, err = resourceCheck.getDashboard(t, byName("z_rolls_dash")) + _, err = resourceCheck.getDashboard(t, byName("z-rolls_dash")) assert.Error(t, err) - _, err = resourceCheck.getEndpoint(t, byName("z_endpoint_rolls_back")) + _, err = resourceCheck.getEndpoint(t, byName("z-endpoint-rolls-back")) assert.Error(t, err) - _, err = resourceCheck.getLabel(t, byName("z_label_roller")) + _, err = resourceCheck.getLabel(t, byName("z-label-roller")) assert.Error(t, err) - _, err = resourceCheck.getTelegrafConfig(t, byName("z_telegraf_rolls_back")) + _, err = resourceCheck.getTelegrafConfig(t, byName("z-telegraf-rolls-back")) assert.Error(t, err) - _, err = resourceCheck.getVariable(t, byName("z_var_rolls_back")) + _, err = resourceCheck.getVariable(t, byName("z-var-rolls-back")) assert.Error(t, err) } }) t.Run("apply pkg with stack id where resources have been removed since last run", func(t *testing.T) { - newEndpointPkgName := "non_existent_endpoint" + newEndpointPkgName := "non-existent-endpoint" allNewResourcesPkg := newPkg( - newBucketObject("non_existent_bucket", "", ""), - newCheckDeadmanObject(t, "non_existent_check", "", time.Minute), - newDashObject("non_existent_dash", "", ""), + newBucketObject("non-existent-bucket", "", ""), + newCheckDeadmanObject(t, "non-existent-check", "", time.Minute), + newDashObject("non-existent-dash", "", ""), newEndpointHTTP(newEndpointPkgName, "", ""), - newLabelObject("non_existent_label", "", "", ""), - newRuleObject(t, "non_existent_rule", "", newEndpointPkgName, ""), - newTaskObject("non_existent_task", "", ""), - newTelegrafObject("non_existent_tele", "", ""), - newVariableObject("non_existent_var", "", ""), + newLabelObject("non-existent-label", "", "", ""), + newRuleObject(t, "non-existent-rule", "", newEndpointPkgName, ""), + newTaskObject("non-existent-task", "", ""), + newTelegrafObject("non-existent-tele", "", ""), + newVariableObject("non-existent-var", "", ""), ) sum, _, err := svc.Apply(ctx, l.Org.ID, l.User.ID, allNewResourcesPkg, applyOpt) require.NoError(t, err) @@ -952,19 +952,19 @@ func TestLauncher_Pkger(t *testing.T) { assert.NotEqual(t, initialSum.Buckets[0].ID, sum.Buckets[0].ID) assert.NotZero(t, sum.Buckets[0].ID) defer resourceCheck.mustDeleteBucket(t, influxdb.ID(sum.Buckets[0].ID)) - assert.Equal(t, "non_existent_bucket", sum.Buckets[0].Name) + assert.Equal(t, "non-existent-bucket", sum.Buckets[0].Name) require.Len(t, sum.Checks, 1) assert.NotEqual(t, initialSum.Checks[0].Check.GetID(), sum.Checks[0].Check.GetID()) assert.NotZero(t, sum.Checks[0].Check.GetID()) defer resourceCheck.mustDeleteCheck(t, sum.Checks[0].Check.GetID()) - assert.Equal(t, "non_existent_check", sum.Checks[0].Check.GetName()) + assert.Equal(t, "non-existent-check", sum.Checks[0].Check.GetName()) require.Len(t, sum.Dashboards, 1) assert.NotEqual(t, initialSum.Dashboards[0].ID, sum.Dashboards[0].ID) assert.NotZero(t, sum.Dashboards[0].ID) defer resourceCheck.mustDeleteDashboard(t, influxdb.ID(sum.Dashboards[0].ID)) - assert.Equal(t, "non_existent_dash", sum.Dashboards[0].Name) + assert.Equal(t, "non-existent-dash", sum.Dashboards[0].Name) require.Len(t, sum.NotificationEndpoints, 1) sumEndpoint := sum.NotificationEndpoints[0].NotificationEndpoint @@ -978,57 +978,57 @@ func TestLauncher_Pkger(t *testing.T) { assert.NotEqual(t, initialSum.NotificationRules[0].ID, sumRule.ID) assert.NotZero(t, sumRule.ID) defer resourceCheck.mustDeleteRule(t, influxdb.ID(sumRule.ID)) - assert.Equal(t, "non_existent_rule", sumRule.Name) + assert.Equal(t, "non-existent-rule", sumRule.Name) require.Len(t, sum.Labels, 1) assert.NotEqual(t, initialSum.Labels[0].ID, sum.Labels[0].ID) assert.NotZero(t, sum.Labels[0].ID) defer resourceCheck.mustDeleteLabel(t, influxdb.ID(sum.Labels[0].ID)) - assert.Equal(t, "non_existent_label", sum.Labels[0].Name) + assert.Equal(t, "non-existent-label", sum.Labels[0].Name) require.Len(t, sum.Tasks, 1) assert.NotEqual(t, initialSum.Tasks[0].ID, sum.Tasks[0].ID) assert.NotZero(t, sum.Tasks[0].ID) defer resourceCheck.mustDeleteTask(t, influxdb.ID(sum.Tasks[0].ID)) - assert.Equal(t, "non_existent_task", sum.Tasks[0].Name) + assert.Equal(t, "non-existent-task", sum.Tasks[0].Name) require.Len(t, sum.TelegrafConfigs, 1) newTele := sum.TelegrafConfigs[0].TelegrafConfig assert.NotEqual(t, initialSum.TelegrafConfigs[0].TelegrafConfig.ID, newTele.ID) assert.NotZero(t, newTele.ID) defer resourceCheck.mustDeleteTelegrafConfig(t, newTele.ID) - assert.Equal(t, "non_existent_tele", newTele.Name) + assert.Equal(t, "non-existent-tele", newTele.Name) require.Len(t, sum.Variables, 1) assert.NotEqual(t, initialSum.Variables[0].ID, sum.Variables[0].ID) assert.NotZero(t, sum.Variables[0].ID) defer resourceCheck.mustDeleteVariable(t, influxdb.ID(sum.Variables[0].ID)) - assert.Equal(t, "non_existent_var", sum.Variables[0].Name) + assert.Equal(t, "non-existent-var", sum.Variables[0].Name) t.Log("\tvalidate all resources are created") { - bkt := resourceCheck.mustGetBucket(t, byName("non_existent_bucket")) + bkt := resourceCheck.mustGetBucket(t, byName("non-existent-bucket")) assert.Equal(t, pkger.SafeID(bkt.ID), sum.Buckets[0].ID) - chk := resourceCheck.mustGetCheck(t, byName("non_existent_check")) + chk := resourceCheck.mustGetCheck(t, byName("non-existent-check")) assert.Equal(t, chk.GetID(), sum.Checks[0].Check.GetID()) endpoint := resourceCheck.mustGetEndpoint(t, byName(newEndpointPkgName)) assert.Equal(t, endpoint.GetID(), sum.NotificationEndpoints[0].NotificationEndpoint.GetID()) - label := resourceCheck.mustGetLabel(t, byName("non_existent_label")) + label := resourceCheck.mustGetLabel(t, byName("non-existent-label")) assert.Equal(t, pkger.SafeID(label.ID), sum.Labels[0].ID) - actualRule := resourceCheck.mustGetRule(t, byName("non_existent_rule")) + actualRule := resourceCheck.mustGetRule(t, byName("non-existent-rule")) assert.Equal(t, pkger.SafeID(actualRule.GetID()), sum.NotificationRules[0].ID) - task := resourceCheck.mustGetTask(t, byName("non_existent_task")) + task := resourceCheck.mustGetTask(t, byName("non-existent-task")) assert.Equal(t, pkger.SafeID(task.ID), sum.Tasks[0].ID) - tele := resourceCheck.mustGetTelegrafConfig(t, byName("non_existent_tele")) + tele := resourceCheck.mustGetTelegrafConfig(t, byName("non-existent-tele")) assert.Equal(t, tele.ID, sum.TelegrafConfigs[0].TelegrafConfig.ID) - variable := resourceCheck.mustGetVariable(t, byName("non_existent_var")) + variable := resourceCheck.mustGetVariable(t, byName("non-existent-var")) assert.Equal(t, pkger.SafeID(variable.ID), sum.Variables[0].ID) } @@ -1175,50 +1175,50 @@ func TestLauncher_Pkger(t *testing.T) { labels := sum.Labels require.Len(t, labels, 2) - assert.Equal(t, "label_1", labels[0].Name) + assert.Equal(t, "label-1", labels[0].Name) assert.Equal(t, "the 2nd label", labels[1].Name) bkts := sum.Buckets require.Len(t, bkts, 1) assert.Equal(t, "rucketeer", bkts[0].Name) - hasLabelAssociations(t, bkts[0].LabelAssociations, 2, "label_1", "the 2nd label") + hasLabelAssociations(t, bkts[0].LabelAssociations, 2, "label-1", "the 2nd label") checks := sum.Checks require.Len(t, checks, 2) assert.Equal(t, "check 0 name", checks[0].Check.GetName()) - hasLabelAssociations(t, checks[0].LabelAssociations, 1, "label_1") - assert.Equal(t, "check_1", checks[1].Check.GetName()) - hasLabelAssociations(t, checks[1].LabelAssociations, 1, "label_1") + hasLabelAssociations(t, checks[0].LabelAssociations, 1, "label-1") + assert.Equal(t, "check-1", checks[1].Check.GetName()) + hasLabelAssociations(t, checks[1].LabelAssociations, 1, "label-1") dashs := sum.Dashboards require.Len(t, dashs, 1) assert.Equal(t, "dash_1", dashs[0].Name) assert.Equal(t, "desc1", dashs[0].Description) - hasLabelAssociations(t, dashs[0].LabelAssociations, 2, "label_1", "the 2nd label") + hasLabelAssociations(t, dashs[0].LabelAssociations, 2, "label-1", "the 2nd label") endpoints := sum.NotificationEndpoints require.Len(t, endpoints, 1) assert.Equal(t, "no auth endpoint", endpoints[0].NotificationEndpoint.GetName()) assert.Equal(t, "http none auth desc", endpoints[0].NotificationEndpoint.GetDescription()) - hasLabelAssociations(t, endpoints[0].LabelAssociations, 1, "label_1") + hasLabelAssociations(t, endpoints[0].LabelAssociations, 1, "label-1") require.Len(t, sum.Tasks, 1) task := sum.Tasks[0] assert.Equal(t, "task_1", task.Name) assert.Equal(t, "desc_1", task.Description) assert.Equal(t, "15 * * * *", task.Cron) - hasLabelAssociations(t, task.LabelAssociations, 1, "label_1") + hasLabelAssociations(t, task.LabelAssociations, 1, "label-1") teles := sum.TelegrafConfigs require.Len(t, teles, 1) assert.Equal(t, "first tele config", teles[0].TelegrafConfig.Name) assert.Equal(t, "desc", teles[0].TelegrafConfig.Description) - hasLabelAssociations(t, teles[0].LabelAssociations, 1, "label_1") + hasLabelAssociations(t, teles[0].LabelAssociations, 1, "label-1") vars := sum.Variables require.Len(t, vars, 1) assert.Equal(t, "query var", vars[0].Name) - hasLabelAssociations(t, vars[0].LabelAssociations, 1, "label_1") + hasLabelAssociations(t, vars[0].LabelAssociations, 1, "label-1") varArgs := vars[0].Arguments require.NotNil(t, varArgs) assert.Equal(t, "query", varArgs.Type) @@ -1276,7 +1276,7 @@ spec: labels := sum1.Labels require.Len(t, labels, 2) assert.NotZero(t, labels[0].ID) - assert.Equal(t, "label_1", labels[0].Name) + assert.Equal(t, "label-1", labels[0].Name) assert.Equal(t, "the 2nd label", labels[1].Name) bkts := sum1.Buckets @@ -1284,14 +1284,14 @@ spec: assert.NotZero(t, bkts[0].ID) assert.NotEmpty(t, bkts[0].PkgName) assert.Equal(t, "rucketeer", bkts[0].Name) - hasLabelAssociations(t, bkts[0].LabelAssociations, 2, "label_1", "the 2nd label") + hasLabelAssociations(t, bkts[0].LabelAssociations, 2, "label-1", "the 2nd label") checks := sum1.Checks require.Len(t, checks, 2) assert.Equal(t, "check 0 name", checks[0].Check.GetName()) - hasLabelAssociations(t, checks[0].LabelAssociations, 1, "label_1") - assert.Equal(t, "check_1", checks[1].Check.GetName()) - hasLabelAssociations(t, checks[1].LabelAssociations, 1, "label_1") + hasLabelAssociations(t, checks[0].LabelAssociations, 1, "label-1") + assert.Equal(t, "check-1", checks[1].Check.GetName()) + hasLabelAssociations(t, checks[1].LabelAssociations, 1, "label-1") for _, ch := range checks { assert.NotZero(t, ch.Check.GetID()) } @@ -1302,7 +1302,7 @@ spec: assert.NotEmpty(t, dashs[0].Name) assert.Equal(t, "dash_1", dashs[0].Name) assert.Equal(t, "desc1", dashs[0].Description) - hasLabelAssociations(t, dashs[0].LabelAssociations, 2, "label_1", "the 2nd label") + hasLabelAssociations(t, dashs[0].LabelAssociations, 2, "label-1", "the 2nd label") require.Len(t, dashs[0].Charts, 1) assert.Equal(t, influxdb.ViewPropertyTypeSingleStat, dashs[0].Charts[0].Properties.GetType()) @@ -1312,14 +1312,14 @@ spec: assert.Equal(t, "no auth endpoint", endpoints[0].NotificationEndpoint.GetName()) assert.Equal(t, "http none auth desc", endpoints[0].NotificationEndpoint.GetDescription()) assert.Equal(t, influxdb.TaskStatusInactive, string(endpoints[0].NotificationEndpoint.GetStatus())) - hasLabelAssociations(t, endpoints[0].LabelAssociations, 1, "label_1") + hasLabelAssociations(t, endpoints[0].LabelAssociations, 1, "label-1") require.Len(t, sum1.NotificationRules, 1) rule := sum1.NotificationRules[0] assert.NotZero(t, rule.ID) assert.Equal(t, "rule_0", rule.Name) assert.Equal(t, pkger.SafeID(endpoints[0].NotificationEndpoint.GetID()), rule.EndpointID) - assert.Equal(t, "http_none_auth_notification_endpoint", rule.EndpointPkgName) + assert.Equal(t, "http-none-auth-notification-endpoint", rule.EndpointPkgName) assert.Equalf(t, "http", rule.EndpointType, "rule: %+v", rule) require.Len(t, sum1.Tasks, 1) @@ -1340,7 +1340,7 @@ spec: require.Len(t, vars, 1) assert.NotZero(t, vars[0].ID) assert.Equal(t, "query var", vars[0].Name) - hasLabelAssociations(t, vars[0].LabelAssociations, 1, "label_1") + hasLabelAssociations(t, vars[0].LabelAssociations, 1, "label-1") varArgs := vars[0].Arguments require.NotNil(t, varArgs) assert.Equal(t, "query", varArgs.Type) @@ -1405,28 +1405,28 @@ spec: labels := sum.Labels require.Len(t, labels, 2) - assert.Equal(t, "label_1", labels[0].Name) + assert.Equal(t, "label-1", labels[0].Name) assert.Equal(t, "the 2nd label", labels[1].Name) bkts := sum.Buckets require.Len(t, bkts, 1) assert.NotEmpty(t, bkts[0].PkgName) assert.Equal(t, "rucketeer", bkts[0].Name) - hasLabelAssociations(t, bkts[0].LabelAssociations, 2, "label_1", "the 2nd label") + hasLabelAssociations(t, bkts[0].LabelAssociations, 2, "label-1", "the 2nd label") checks := sum.Checks require.Len(t, checks, 2) assert.Equal(t, "check 0 name", checks[0].Check.GetName()) - hasLabelAssociations(t, checks[0].LabelAssociations, 1, "label_1") - assert.Equal(t, "check_1", checks[1].Check.GetName()) - hasLabelAssociations(t, checks[1].LabelAssociations, 1, "label_1") + hasLabelAssociations(t, checks[0].LabelAssociations, 1, "label-1") + assert.Equal(t, "check-1", checks[1].Check.GetName()) + hasLabelAssociations(t, checks[1].LabelAssociations, 1, "label-1") dashs := sum.Dashboards require.Len(t, dashs, 1) assert.NotEmpty(t, dashs[0].Name) assert.Equal(t, "dash_1", dashs[0].Name) assert.Equal(t, "desc1", dashs[0].Description) - hasLabelAssociations(t, dashs[0].LabelAssociations, 2, "label_1", "the 2nd label") + hasLabelAssociations(t, dashs[0].LabelAssociations, 2, "label-1", "the 2nd label") require.Len(t, dashs[0].Charts, 1) assert.Equal(t, influxdb.ViewPropertyTypeSingleStat, dashs[0].Charts[0].Properties.GetType()) @@ -1435,7 +1435,7 @@ spec: assert.Equal(t, "no auth endpoint", endpoints[0].NotificationEndpoint.GetName()) assert.Equal(t, "http none auth desc", endpoints[0].NotificationEndpoint.GetDescription()) assert.Equal(t, influxdb.TaskStatusInactive, string(endpoints[0].NotificationEndpoint.GetStatus())) - hasLabelAssociations(t, endpoints[0].LabelAssociations, 1, "label_1") + hasLabelAssociations(t, endpoints[0].LabelAssociations, 1, "label-1") require.Len(t, sum.NotificationRules, 1) rule := sum.NotificationRules[0] @@ -1457,7 +1457,7 @@ spec: vars := sum.Variables require.Len(t, vars, 1) assert.Equal(t, "query var", vars[0].Name) - hasLabelAssociations(t, vars[0].LabelAssociations, 1, "label_1") + hasLabelAssociations(t, vars[0].LabelAssociations, 1, "label-1") varArgs := vars[0].Arguments require.NotNil(t, varArgs) assert.Equal(t, "query", varArgs.Type) @@ -1613,7 +1613,7 @@ spec: apiVersion: %[1]s kind: NotificationEndpointPagerDuty metadata: - name: pager_duty_notification_endpoint + name: pager-duty-notification-endpoint spec: url: http://localhost:8080/orgs/7167eb6719fa34e5/alert-history routingKey: secret-sauce @@ -1634,7 +1634,7 @@ spec: apiVersion: %[1]s kind: NotificationEndpointPagerDuty metadata: - name: pager_duty_notification_endpoint + name: pager-duty-notification-endpoint spec: url: http://localhost:8080/orgs/7167eb6719fa34e5/alert-history routingKey: @@ -1711,7 +1711,7 @@ spec: labels := newSum.Labels require.Len(t, labels, 2) assert.Zero(t, labels[0].ID) - assert.Equal(t, "label_1", labels[0].Name) + assert.Equal(t, "label-1", labels[0].Name) assert.Zero(t, labels[1].ID) assert.Equal(t, "the 2nd label", labels[1].Name) @@ -1719,21 +1719,21 @@ spec: require.Len(t, bkts, 1) assert.Zero(t, bkts[0].ID) assert.Equal(t, "rucketeer", bkts[0].Name) - hasLabelAssociations(t, bkts[0].LabelAssociations, 2, "label_1", "the 2nd label") + hasLabelAssociations(t, bkts[0].LabelAssociations, 2, "label-1", "the 2nd label") checks := newSum.Checks require.Len(t, checks, 2) assert.Equal(t, "check 0 name", checks[0].Check.GetName()) - hasLabelAssociations(t, checks[0].LabelAssociations, 1, "label_1") - assert.Equal(t, "check_1", checks[1].Check.GetName()) - hasLabelAssociations(t, checks[1].LabelAssociations, 1, "label_1") + hasLabelAssociations(t, checks[0].LabelAssociations, 1, "label-1") + assert.Equal(t, "check-1", checks[1].Check.GetName()) + hasLabelAssociations(t, checks[1].LabelAssociations, 1, "label-1") dashs := newSum.Dashboards require.Len(t, dashs, 1) assert.Zero(t, dashs[0].ID) assert.Equal(t, "dash_1", dashs[0].Name) assert.Equal(t, "desc1", dashs[0].Description) - hasLabelAssociations(t, dashs[0].LabelAssociations, 2, "label_1", "the 2nd label") + hasLabelAssociations(t, dashs[0].LabelAssociations, 2, "label-1", "the 2nd label") require.Len(t, dashs[0].Charts, 1) assert.Equal(t, influxdb.ViewPropertyTypeSingleStat, dashs[0].Charts[0].Properties.GetType()) @@ -1741,14 +1741,14 @@ spec: require.Len(t, newEndpoints, 1) assert.Equal(t, sum1Endpoints[0].NotificationEndpoint.GetName(), newEndpoints[0].NotificationEndpoint.GetName()) assert.Equal(t, sum1Endpoints[0].NotificationEndpoint.GetDescription(), newEndpoints[0].NotificationEndpoint.GetDescription()) - hasLabelAssociations(t, newEndpoints[0].LabelAssociations, 1, "label_1") + hasLabelAssociations(t, newEndpoints[0].LabelAssociations, 1, "label-1") require.Len(t, newSum.NotificationRules, 1) newRule := newSum.NotificationRules[0] assert.Equal(t, "new rule name", newRule.Name) assert.Zero(t, newRule.EndpointID) assert.NotEmpty(t, newRule.EndpointPkgName) - hasLabelAssociations(t, newRule.LabelAssociations, 1, "label_1") + hasLabelAssociations(t, newRule.LabelAssociations, 1, "label-1") require.Len(t, newSum.Tasks, 1) newTask := newSum.Tasks[0] @@ -1763,13 +1763,13 @@ spec: require.Len(t, newSum.TelegrafConfigs, 1) assert.Equal(t, sum1Teles[0].TelegrafConfig.Name, newSum.TelegrafConfigs[0].TelegrafConfig.Name) assert.Equal(t, sum1Teles[0].TelegrafConfig.Description, newSum.TelegrafConfigs[0].TelegrafConfig.Description) - hasLabelAssociations(t, newSum.TelegrafConfigs[0].LabelAssociations, 1, "label_1") + hasLabelAssociations(t, newSum.TelegrafConfigs[0].LabelAssociations, 1, "label-1") vars := newSum.Variables require.Len(t, vars, 1) assert.Zero(t, vars[0].ID) assert.Equal(t, "new name", vars[0].Name) // new name - hasLabelAssociations(t, vars[0].LabelAssociations, 1, "label_1") + hasLabelAssociations(t, vars[0].LabelAssociations, 1, "label-1") varArgs := vars[0].Arguments require.NotNil(t, varArgs) assert.Equal(t, "query", varArgs.Type) @@ -1837,8 +1837,9 @@ spec: apiVersion: %[1]s kind: Task metadata: - name: Http.POST Synthetic (POST) + name: http-post-synthetic spec: + name: Http.POST Synthetic (POST) every: 5m query: |- import "strings" @@ -1992,24 +1993,24 @@ spec: require.NoError(t, err) require.Len(t, sum.Buckets, 1) - assert.Equal(t, "$bkt-1-name-ref", sum.Buckets[0].Name) + assert.Equal(t, "env-bkt-1-name-ref", sum.Buckets[0].Name) assert.Len(t, sum.Buckets[0].LabelAssociations, 1) require.Len(t, sum.Checks, 1) - assert.Equal(t, "$check-1-name-ref", sum.Checks[0].Check.GetName()) + assert.Equal(t, "env-check-1-name-ref", sum.Checks[0].Check.GetName()) require.Len(t, sum.Dashboards, 1) - assert.Equal(t, "$dash-1-name-ref", sum.Dashboards[0].Name) + assert.Equal(t, "env-dash-1-name-ref", sum.Dashboards[0].Name) require.Len(t, sum.Labels, 1) - assert.Equal(t, "$label-1-name-ref", sum.Labels[0].Name) + assert.Equal(t, "env-label-1-name-ref", sum.Labels[0].Name) require.Len(t, sum.NotificationEndpoints, 1) - assert.Equal(t, "$endpoint-1-name-ref", sum.NotificationEndpoints[0].NotificationEndpoint.GetName()) + assert.Equal(t, "env-endpoint-1-name-ref", sum.NotificationEndpoints[0].NotificationEndpoint.GetName()) require.Len(t, sum.NotificationRules, 1) - assert.Equal(t, "$rule-1-name-ref", sum.NotificationRules[0].Name) + assert.Equal(t, "env-rule-1-name-ref", sum.NotificationRules[0].Name) require.Len(t, sum.TelegrafConfigs, 1) - assert.Equal(t, "$task-1-name-ref", sum.Tasks[0].Name) + assert.Equal(t, "env-task-1-name-ref", sum.Tasks[0].Name) require.Len(t, sum.TelegrafConfigs, 1) - assert.Equal(t, "$telegraf-1-name-ref", sum.TelegrafConfigs[0].TelegrafConfig.Name) + assert.Equal(t, "env-telegraf-1-name-ref", sum.TelegrafConfigs[0].TelegrafConfig.Name) require.Len(t, sum.Variables, 1) - assert.Equal(t, "$var-1-name-ref", sum.Variables[0].Name) + assert.Equal(t, "env-var-1-name-ref", sum.Variables[0].Name) expectedMissingEnvs := []string{ "bkt-1-name-ref", @@ -2078,31 +2079,31 @@ var pkgYMLStr = fmt.Sprintf(` apiVersion: %[1]s kind: Label metadata: - name: label_1 + name: label-1 --- apiVersion: %[1]s kind: Label metadata: - name: the 2nd label + name: the-2nd-label spec: name: the 2nd label --- apiVersion: %[1]s kind: Bucket metadata: - name: rucket_1 + name: rucket-1 spec: name: rucketeer associations: - kind: Label - name: label_1 + name: label-1 - kind: Label - name: the 2nd label + name: the-2nd-label --- apiVersion: %[1]s kind: Dashboard metadata: - name: dash_UUID + name: dash-uuid spec: name: dash_1 description: desc1 @@ -2122,14 +2123,14 @@ spec: hex: "#8F8AF4" associations: - kind: Label - name: label_1 + name: label-1 - kind: Label - name: the 2nd label + name: the-2nd-label --- apiVersion: %[1]s kind: Variable metadata: - name: var_query_1 + name: var-query-1 spec: name: query var description: var_query_1 desc @@ -2139,24 +2140,24 @@ spec: buckets() |> filter(fn: (r) => r.name !~ /^_/) |> rename(columns: {name: "_value"}) |> keep(columns: ["_value"]) associations: - kind: Label - name: label_1 + name: label-1 --- apiVersion: %[1]s kind: Telegraf metadata: - name: first_tele_config + name: first-tele-config spec: name: first tele config description: desc associations: - kind: Label - name: label_1 + name: label-1 config: %+q --- apiVersion: %[1]s kind: NotificationEndpointHTTP metadata: - name: http_none_auth_notification_endpoint # on export of resource created from this, will not be same name as this + name: http-none-auth-notification-endpoint # on export of resource created from this, will not be same name as this spec: name: no auth endpoint type: none @@ -2166,12 +2167,12 @@ spec: status: inactive associations: - kind: Label - name: label_1 + name: label-1 --- apiVersion: %[1]s kind: CheckThreshold metadata: - name: check_0 + name: check-0 spec: name: check 0 name every: 1m @@ -2203,12 +2204,12 @@ spec: val: 30 associations: - kind: Label - name: label_1 + name: label-1 --- apiVersion: %[1]s kind: CheckDeadman metadata: - name: check_1 + name: check-1 spec: description: desc_1 every: 5m @@ -2227,16 +2228,16 @@ spec: timeSince: 90s associations: - kind: Label - name: label_1 + name: label-1 --- apiVersion: %[1]s kind: NotificationRule metadata: - name: rule_UUID + name: rule-uuid spec: name: rule_0 description: desc_0 - endpointName: http_none_auth_notification_endpoint + endpointName: http-none-auth-notification-endpoint every: 10m offset: 30s messageTemplate: "Notification Rule: ${ r._notification_rule_name } triggered by check: ${ r._check_name }: ${ r._message }" @@ -2254,12 +2255,12 @@ spec: operator: eQuAl associations: - kind: Label - name: label_1 + name: label-1 --- apiVersion: %[1]s kind: Task metadata: - name: task_UUID + name: task-uuid spec: name: task_1 description: desc_1 @@ -2269,31 +2270,31 @@ spec: |> yield() associations: - kind: Label - name: label_1 + name: label-1 `, pkger.APIVersion, telegrafCfg) var updatePkgYMLStr = fmt.Sprintf(` apiVersion: %[1]s kind: Label metadata: - name: label_1 + name: label-1 spec: descriptin: new desc --- apiVersion: %[1]s kind: Bucket metadata: - name: rucket_1 + name: rucket-1 spec: descriptin: new desc associations: - kind: Label - name: label_1 + name: label-1 --- apiVersion: %[1]s kind: Variable metadata: - name: var_query_1 + name: var-query-1 spec: description: new desc type: query @@ -2302,12 +2303,12 @@ spec: buckets() |> filter(fn: (r) => r.name !~ /^_/) |> rename(columns: {name: "_value"}) |> keep(columns: ["_value"]) associations: - kind: Label - name: label_1 + name: label-1 --- apiVersion: %[1]s kind: NotificationEndpointHTTP metadata: - name: http_none_auth_notification_endpoint + name: http-none-auth-notification-endpoint spec: name: no auth endpoint type: none @@ -2319,7 +2320,7 @@ spec: apiVersion: %[1]s kind: CheckThreshold metadata: - name: check_0 + name: check-0 spec: every: 1m query: > diff --git a/pkger/clone_resource.go b/pkger/clone_resource.go index cb71dcc701a..b0b6ac1789d 100644 --- a/pkger/clone_resource.go +++ b/pkger/clone_resource.go @@ -377,9 +377,9 @@ func (ex *resourceExporter) findDashboardByIDFull(ctx context.Context, id influx } func (ex *resourceExporter) uniqName() string { - uuid := idGenerator.ID().String() + uuid := strings.ToLower(idGenerator.ID().String()) for i := 1; i < 250; i++ { - name := fmt.Sprintf("%s_%s", ex.nameGen(), uuid[10:]) + name := fmt.Sprintf("%s-%s", ex.nameGen(), uuid[10:]) if !ex.mPkgNames[name] { return name } @@ -992,7 +992,7 @@ func newObject(kind Kind, name string) Object { // this timestamp is added to make the resource unique. Should also indicate // to the end user that this is machine readable and the spec.name field is // the one they want to edit when a name change is desired. - fieldName: idGenerator.ID().String(), + fieldName: strings.ToLower(idGenerator.ID().String()), }, Spec: Resource{ fieldName: name, diff --git a/pkger/http_server_test.go b/pkger/http_server_test.go index aa7f685cbe4..e17da30fcf0 100644 --- a/pkger/http_server_test.go +++ b/pkger/http_server_test.go @@ -292,7 +292,7 @@ func TestPkgerHTTPServer(t *testing.T) { newBktPkg(t, "bkt3"), }, }, - expectedBkts: []string{"bkt1", "bkt2", "bkt3", "rucket_11"}, + expectedBkts: []string{"bkt1", "bkt2", "bkt3", "rucket-11"}, }, { name: "retrieves packages from raw single and list", @@ -763,7 +763,7 @@ local Bucket(name, desc) = { }; [ - Bucket(name="rucket_1", desc="bucket 1 description"), + Bucket(name="rucket-1", desc="bucket 1 description"), ] ` case pkger.EncodingJSON: @@ -772,7 +772,7 @@ local Bucket(name, desc) = { "apiVersion": "%[1]s", "kind": "Bucket", "metadata": { - "name": "rucket_11" + "name": "rucket-11" }, "spec": { "description": "bucket 1 description" @@ -784,7 +784,7 @@ local Bucket(name, desc) = { pkgStr = `apiVersion: %[1]s kind: Bucket metadata: - name: rucket_11 + name: rucket-11 spec: description: bucket 1 description ` diff --git a/pkger/internal/wordplay/wordplay.go b/pkger/internal/wordplay/wordplay.go index 385f37e5938..c2f15f53b77 100644 --- a/pkger/internal/wordplay/wordplay.go +++ b/pkger/internal/wordplay/wordplay.go @@ -878,5 +878,5 @@ var ( // formatted as "adjective_surname". For example 'focused_turing'. If retry is non-zero, a random // integer between 0 and 10 will be added to the end of the name, e.g `focused_turing3` func GetRandomName() string { - return fmt.Sprintf("%s_%s", left[rand.Intn(len(left))], right[rand.Intn(len(right))]) + return fmt.Sprintf("%s-%s", left[rand.Intn(len(left))], right[rand.Intn(len(right))]) } diff --git a/pkger/models_test.go b/pkger/models_test.go index 20e68de5a6f..00248496a4c 100644 --- a/pkger/models_test.go +++ b/pkger/models_test.go @@ -479,12 +479,12 @@ func TestPkg(t *testing.T) { { pkgFile: "testdata/label.yml", kind: KindLabel, - validName: "label_1", + validName: "label-1", }, { pkgFile: "testdata/notification_rule.yml", kind: KindNotificationRule, - validName: "rule_UUID", + validName: "rule-uuid", }, } diff --git a/pkger/parser.go b/pkger/parser.go index 42c4eb3166b..42e63f8d462 100644 --- a/pkger/parser.go +++ b/pkger/parser.go @@ -8,6 +8,7 @@ import ( "io" "io/ioutil" "net/http" + "regexp" "sort" "strconv" "strings" @@ -732,7 +733,7 @@ func (p *Pkg) graphResources() error { func (p *Pkg) graphBuckets() *parseErr { p.mBuckets = make(map[string]*bucket) tracker := p.trackNames(true) - return p.eachResource(KindBucket, bucketNameMinLength, func(o Object) []validationErr { + return p.eachResource(KindBucket, func(o Object) []validationErr { ident, errs := tracker(o) if len(errs) > 0 { return errs @@ -770,7 +771,7 @@ func (p *Pkg) graphBuckets() *parseErr { func (p *Pkg) graphLabels() *parseErr { p.mLabels = make(map[string]*label) tracker := p.trackNames(true) - return p.eachResource(KindLabel, labelNameMinLength, func(o Object) []validationErr { + return p.eachResource(KindLabel, func(o Object) []validationErr { ident, errs := tracker(o) if len(errs) > 0 { return errs @@ -801,7 +802,7 @@ func (p *Pkg) graphChecks() *parseErr { } var pErr parseErr for _, checkKind := range checkKinds { - err := p.eachResource(checkKind.kind, checkNameMinLength, func(o Object) []validationErr { + err := p.eachResource(checkKind.kind, func(o Object) []validationErr { ident, errs := tracker(o) if len(errs) > 0 { return errs @@ -862,7 +863,7 @@ func (p *Pkg) graphChecks() *parseErr { func (p *Pkg) graphDashboards() *parseErr { p.mDashboards = make(map[string]*dashboard) tracker := p.trackNames(false) - return p.eachResource(KindDashboard, dashboardNameMinLength, func(o Object) []validationErr { + return p.eachResource(KindDashboard, func(o Object) []validationErr { ident, errs := tracker(o) if len(errs) > 0 { return errs @@ -926,7 +927,7 @@ func (p *Pkg) graphNotificationEndpoints() *parseErr { var pErr parseErr for _, nk := range notificationKinds { - err := p.eachResource(nk.kind, 1, func(o Object) []validationErr { + err := p.eachResource(nk.kind, func(o Object) []validationErr { ident, errs := tracker(o) if len(errs) > 0 { return errs @@ -977,7 +978,7 @@ func (p *Pkg) graphNotificationEndpoints() *parseErr { func (p *Pkg) graphNotificationRules() *parseErr { p.mNotificationRules = make(map[string]*notificationRule) tracker := p.trackNames(false) - return p.eachResource(KindNotificationRule, 1, func(o Object) []validationErr { + return p.eachResource(KindNotificationRule, func(o Object) []validationErr { ident, errs := tracker(o) if len(errs) > 0 { return errs @@ -1027,7 +1028,7 @@ func (p *Pkg) graphNotificationRules() *parseErr { func (p *Pkg) graphTasks() *parseErr { p.mTasks = make(map[string]*task) tracker := p.trackNames(false) - return p.eachResource(KindTask, 1, func(o Object) []validationErr { + return p.eachResource(KindTask, func(o Object) []validationErr { ident, errs := tracker(o) if len(errs) > 0 { return errs @@ -1059,7 +1060,7 @@ func (p *Pkg) graphTasks() *parseErr { func (p *Pkg) graphTelegrafs() *parseErr { p.mTelegrafs = make(map[string]*telegraf) tracker := p.trackNames(false) - return p.eachResource(KindTelegraf, 0, func(o Object) []validationErr { + return p.eachResource(KindTelegraf, func(o Object) []validationErr { ident, errs := tracker(o) if len(errs) > 0 { return errs @@ -1088,7 +1089,7 @@ func (p *Pkg) graphTelegrafs() *parseErr { func (p *Pkg) graphVariables() *parseErr { p.mVariables = make(map[string]*variable) tracker := p.trackNames(true) - return p.eachResource(KindVariable, 1, func(o Object) []validationErr { + return p.eachResource(KindVariable, func(o Object) []validationErr { ident, errs := tracker(o) if len(errs) > 0 { return errs @@ -1118,7 +1119,7 @@ func (p *Pkg) graphVariables() *parseErr { }) } -func (p *Pkg) eachResource(resourceKind Kind, minNameLen int, fn func(o Object) []validationErr) *parseErr { +func (p *Pkg) eachResource(resourceKind Kind, fn func(o Object) []validationErr) *parseErr { var pErr parseErr for i, k := range p.Objects { if err := k.Kind.OK(); err != nil { @@ -1152,14 +1153,14 @@ func (p *Pkg) eachResource(resourceKind Kind, minNameLen int, fn func(o Object) continue } - if len(k.Name()) < minNameLen { + if errs := isDNS1123Label(k.Name()); len(errs) > 0 { pErr.append(resourceErr{ Kind: k.Kind.String(), Idx: intPtr(i), ValidationErrs: []validationErr{ objectValidationErr(fieldMetadata, validationErr{ Field: fieldName, - Msg: fmt.Sprintf("must be a string of at least %d chars in length", minNameLen), + Msg: fmt.Sprintf("name %q is invalid; %s", k.Name(), strings.Join(errs, "; ")), }), }, }) @@ -1434,6 +1435,43 @@ func parseChart(r Resource) (chart, []validationErr) { return c, nil } +// dns1123LabelMaxLength is a label's max length in DNS (RFC 1123) +const dns1123LabelMaxLength int = 63 + +const dns1123LabelFmt string = "[a-z0-9]([-a-z0-9]*[a-z0-9])?" +const dns1123LabelErrMsg string = "a DNS-1123 label must consist of lower case alphanumeric characters or '-', and must start and end with an alphanumeric character" + +var dns1123LabelRegexp = regexp.MustCompile("^" + dns1123LabelFmt + "$") + +// isDNS1123Label tests for a string that conforms to the definition of a label in +// DNS (RFC 1123). +func isDNS1123Label(value string) []string { + var errs []string + if len(value) > dns1123LabelMaxLength { + errs = append(errs, fmt.Sprintf("must be no more than %d characters", dns1123LabelMaxLength)) + } + if !dns1123LabelRegexp.MatchString(value) { + errs = append(errs, regexError(dns1123LabelErrMsg, dns1123LabelFmt, "my-name", "123-abc")) + } + return errs +} + +// regexError returns a string explanation of a regex validation failure. +func regexError(msg string, fmt string, examples ...string) string { + if len(examples) == 0 { + return msg + " (regex used for validation is '" + fmt + "')" + } + msg += " (e.g. " + for i := range examples { + if i > 0 { + msg += " or " + } + msg += "'" + examples[i] + "', " + } + msg += "regex used for validation is '" + fmt + "')" + return msg +} + // Resource is a pkger Resource kind. It can be one of any of // available kinds that are supported. type Resource map[string]interface{} diff --git a/pkger/parser_models.go b/pkger/parser_models.go index 439da2c37a4..01dd54bd93d 100644 --- a/pkger/parser_models.go +++ b/pkger/parser_models.go @@ -1335,6 +1335,10 @@ var validEndpointHTTPMethods = map[string]bool{ func (n *notificationEndpoint) valid() []validationErr { var failures []validationErr + if err, ok := isValidName(n.Name(), 1); !ok { + failures = append(failures, err) + } + if _, err := url.Parse(n.url); err != nil || n.url == "" { failures = append(failures, validationErr{ Field: fieldNotificationEndpointURL, @@ -1532,6 +1536,9 @@ func (r *notificationRule) toInfluxRule() influxdb.NotificationRule { func (r *notificationRule) valid() []validationErr { var vErrs []validationErr + if err, ok := isValidName(r.Name(), 1); !ok { + vErrs = append(vErrs, err) + } if !r.endpointName.hasValue() { vErrs = append(vErrs, validationErr{ Field: fieldNotificationRuleEndpointName, @@ -1714,6 +1721,9 @@ func (t *task) summarize() SummaryTask { func (t *task) valid() []validationErr { var vErrs []validationErr + if err, ok := isValidName(t.Name(), 1); !ok { + vErrs = append(vErrs, err) + } if t.cron == "" && t.every == 0 { vErrs = append(vErrs, validationErr{ @@ -1838,6 +1848,9 @@ func (t *telegraf) summarize() SummaryTelegraf { func (t *telegraf) valid() []validationErr { var vErrs []validationErr + if err, ok := isValidName(t.Name(), 1); !ok { + vErrs = append(vErrs, err) + } if t.config.Config == "" { vErrs = append(vErrs, validationErr{ Field: fieldTelegrafConfig, @@ -1918,6 +1931,10 @@ func (v *variable) influxVarArgs() *influxdb.VariableArguments { func (v *variable) valid() []validationErr { var failures []validationErr + if err, ok := isValidName(v.Name(), 1); !ok { + failures = append(failures, err) + } + switch v.Type { case "map": if len(v.MapValues) == 0 { @@ -1979,7 +1996,7 @@ func (r *references) String() string { return v } if r.EnvRef != "" { - return "$" + r.EnvRef + return "env-" + r.EnvRef } return "" } diff --git a/pkger/parser_test.go b/pkger/parser_test.go index 2b73a41b5c0..a92a6c2cd56 100644 --- a/pkger/parser_test.go +++ b/pkger/parser_test.go @@ -4,6 +4,7 @@ import ( "errors" "fmt" "path/filepath" + "sort" "strconv" "strings" "testing" @@ -26,7 +27,7 @@ func TestParse(t *testing.T) { actual := buckets[0] expectedBucket := SummaryBucket{ - PkgName: "rucket_22", + PkgName: "rucket-22", Name: "display name", Description: "bucket 2 description", LabelAssociations: []SummaryLabel{}, @@ -35,8 +36,8 @@ func TestParse(t *testing.T) { actual = buckets[1] expectedBucket = SummaryBucket{ - PkgName: "rucket_11", - Name: "rucket_11", + PkgName: "rucket-11", + Name: "rucket-11", Description: "bucket 1 description", RetentionPeriod: time.Hour, LabelAssociations: []SummaryLabel{}, @@ -64,7 +65,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Bucket metadata: - name: rucket_11 + name: rucket-11 --- apiVersion: influxdata.com/v2alpha1 kind: Bucket @@ -80,7 +81,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Bucket metadata: - name: rucket_11 + name: rucket-11 --- apiVersion: influxdata.com/v2alpha1 kind: Bucket @@ -101,12 +102,12 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Bucket metadata: - name: valid name + name: valid-name --- apiVersion: influxdata.com/v2alpha1 kind: Bucket metadata: - name: valid name + name: valid-name `, }, { @@ -117,14 +118,14 @@ metadata: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Bucket metadata: - name: rucket_1 + name: rucket-1 --- apiVersion: influxdata.com/v2alpha1 kind: Bucket metadata: - name: valid name + name: valid-name spec: - name: rucket_1 + name: rucket-1 `, }, { @@ -135,12 +136,12 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Bucket metadata: - name: rucket_1 + name: rucket-1 --- apiVersion: influxdata.com/v2alpha1 kind: Bucket metadata: - name: invalid name + name: invalid-name spec: name: f `, @@ -160,7 +161,7 @@ spec: require.Len(t, labels, 3) expectedLabel0 := SummaryLabel{ - PkgName: "label_3", + PkgName: "label-3", Name: "display name", Properties: struct { Color string `json:"color"` @@ -172,8 +173,8 @@ spec: assert.Equal(t, expectedLabel0, labels[0]) expectedLabel1 := SummaryLabel{ - PkgName: "label_1", - Name: "label_1", + PkgName: "label-1", + Name: "label-1", Properties: struct { Color string `json:"color"` Description string `json:"description"` @@ -185,8 +186,8 @@ spec: assert.Equal(t, expectedLabel1, labels[1]) expectedLabel2 := SummaryLabel{ - PkgName: "label_2", - Name: "label_2", + PkgName: "label-2", + Name: "label-2", Properties: struct { Color string `json:"color"` Description string `json:"description"` @@ -218,13 +219,12 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Label metadata: - name: valid name + name: valid-name spec: --- apiVersion: influxdata.com/v2alpha1 kind: Label metadata: - name: a spec: `, }, @@ -235,13 +235,13 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Label metadata: - name: valid name + name: valid-name spec: --- apiVersion: influxdata.com/v2alpha1 kind: Label metadata: - name: valid name + name: valid-name spec: `, }, @@ -255,7 +255,6 @@ kind: Label --- apiVersion: influxdata.com/v2alpha1 kind: Label - `, }, { @@ -265,15 +264,15 @@ kind: Label pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Label metadata: - name: valid name + name: valid-name spec: --- apiVersion: influxdata.com/v2alpha1 kind: Label metadata: - name: label_1 + name: label-1 spec: - name: valid name + name: valid-name `, }, { @@ -283,13 +282,13 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Label metadata: - name: valid name + name: valid-name spec: --- apiVersion: influxdata.com/v2alpha1 kind: Label metadata: - name: label_1 + name: label-1 spec: name: a `, @@ -316,16 +315,16 @@ spec: labels []string }{ { - bktName: "rucket_1", - labels: []string{"label_1"}, + bktName: "rucket-1", + labels: []string{"label-1"}, }, { - bktName: "rucket_2", - labels: []string{"label_2"}, + bktName: "rucket-2", + labels: []string{"label-2"}, }, { - bktName: "rucket_3", - labels: []string{"label_1", "label_2"}, + bktName: "rucket-3", + labels: []string{"label-1", "label-2"}, }, } for i, expected := range expectedLabels { @@ -339,28 +338,28 @@ spec: expectedMappings := []SummaryLabelMapping{ { - ResourcePkgName: "rucket_1", - ResourceName: "rucket_1", - LabelPkgName: "label_1", - LabelName: "label_1", + ResourcePkgName: "rucket-1", + ResourceName: "rucket-1", + LabelPkgName: "label-1", + LabelName: "label-1", }, { - ResourcePkgName: "rucket_2", - ResourceName: "rucket_2", - LabelPkgName: "label_2", - LabelName: "label_2", + ResourcePkgName: "rucket-2", + ResourceName: "rucket-2", + LabelPkgName: "label-2", + LabelName: "label-2", }, { - ResourcePkgName: "rucket_3", - ResourceName: "rucket_3", - LabelPkgName: "label_1", - LabelName: "label_1", + ResourcePkgName: "rucket-3", + ResourceName: "rucket-3", + LabelPkgName: "label-1", + LabelName: "label-1", }, { - ResourcePkgName: "rucket_3", - ResourceName: "rucket_3", - LabelPkgName: "label_2", - LabelName: "label_2", + ResourcePkgName: "rucket-3", + ResourceName: "rucket-3", + LabelPkgName: "label-2", + LabelName: "label-2", }, } @@ -381,11 +380,11 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Bucket metadata: - name: rucket_1 + name: rucket-1 spec: associations: - kind: Label - name: label_1 + name: label-1 `, }, { @@ -395,16 +394,16 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Label metadata: - name: label_1 + name: label-1 --- apiVersion: influxdata.com/v2alpha1 kind: Bucket metadata: - name: rucket_3 + name: rucket-3 spec: associations: - kind: Label - name: label_1 + name: label-1 - kind: Label name: NOT TO BE FOUND `, @@ -416,13 +415,13 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Bucket metadata: - name: rucket_3 + name: rucket-3 spec: associations: - kind: Label - name: label_1 + name: label-1 - kind: Label - name: label_2 + name: label-2 `, }, { @@ -432,18 +431,18 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Label metadata: - name: label_1 + name: label-1 --- apiVersion: influxdata.com/v2alpha1 kind: Bucket metadata: - name: rucket_3 + name: rucket-3 spec: associations: - kind: Label - name: label_1 + name: label-1 - kind: Label - name: label_1 + name: label-1 `, }, } @@ -465,7 +464,7 @@ spec: require.Truef(t, ok, "got: %#v", check1) expectedBase := icheck.Base{ - Name: "check_0", + Name: "check-0", Description: "desc_0", Every: mustDuration(t, time.Minute), Offset: mustDuration(t, 15*time.Second), @@ -531,15 +530,15 @@ spec: expectedMappings := []SummaryLabelMapping{ { - LabelPkgName: "label_1", - LabelName: "label_1", - ResourcePkgName: "check_0", - ResourceName: "check_0", + LabelPkgName: "label-1", + LabelName: "label-1", + ResourcePkgName: "check-0", + ResourceName: "check-0", }, { - LabelPkgName: "label_1", - LabelName: "label_1", - ResourcePkgName: "check_1", + LabelPkgName: "label-1", + LabelName: "label-1", + ResourcePkgName: "check-1", ResourceName: "display name", }, } @@ -565,7 +564,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: CheckDeadman metadata: - name: check_1 + name: check-1 spec: every: 5m level: cRiT @@ -576,7 +575,7 @@ spec: apiVersion: influxdata.com/v2alpha1 kind: CheckDeadman metadata: - name: check_1 + name: check-1 spec: every: 5m level: cRiT @@ -595,7 +594,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: CheckThreshold metadata: - name: check_0 + name: check-0 spec: query: > from(bucket: "rucket_1") |> yield(name: "mean") @@ -618,7 +617,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: CheckThreshold metadata: - name: check_0 + name: check-0 spec: every: 1m query: > @@ -640,7 +639,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: CheckThreshold metadata: - name: check_0 + name: check-0 spec: every: 1m query: > @@ -662,7 +661,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: CheckThreshold metadata: - name: check_0 + name: check-0 spec: every: 1m query: > @@ -686,7 +685,7 @@ spec: apiVersion: influxdata.com/v2alpha1 kind: CheckThreshold metadata: - name: check_0 + name: check-0 spec: every: 1m query: > @@ -705,7 +704,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: CheckThreshold metadata: - name: check_0 + name: check-0 spec: every: 1m statusMessageTemplate: "Check: ${ r._check_name } is: ${ r._level }" @@ -726,7 +725,7 @@ spec: apiVersion: influxdata.com/v2alpha1 kind: CheckThreshold metadata: - name: check_0 + name: check-0 spec: every: 1m query: > @@ -750,7 +749,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: CheckThreshold metadata: - name: check_0 + name: check-0 spec: every: 1m query: > @@ -771,7 +770,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: CheckDeadman metadata: - name: check_1 + name: check-1 spec: level: cRiT query: > @@ -790,7 +789,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: CheckDeadman metadata: - name: check_1 + name: check-1 spec: every: 5m level: cRiT @@ -808,7 +807,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: CheckDeadman metadata: - name: check_1 + name: check-1 spec: every: 5m level: cRiT @@ -818,7 +817,7 @@ spec: timeSince: 90s associations: - kind: Label - name: label_1 + name: label-1 `, }, }, @@ -831,12 +830,12 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Label metadata: - name: label_1 + name: label-1 --- apiVersion: influxdata.com/v2alpha1 kind: CheckDeadman metadata: - name: check_1 + name: check-1 spec: every: 5m level: cRiT @@ -846,9 +845,9 @@ spec: timeSince: 90s associations: - kind: Label - name: label_1 + name: label-1 - kind: Label - name: label_1 + name: label-1 `, }, }, @@ -862,7 +861,7 @@ spec: apiVersion: influxdata.com/v2alpha1 kind: CheckDeadman metadata: - name: check_1 + name: check-1 spec: every: 5m level: cRiT @@ -874,9 +873,9 @@ spec: apiVersion: influxdata.com/v2alpha1 kind: CheckDeadman metadata: - name: valid name + name: valid-name spec: - name: check_1 + name: check-1 every: 5m level: cRiT query: > @@ -902,7 +901,7 @@ spec: require.Len(t, sum.Dashboards, 1) actual := sum.Dashboards[0] - assert.Equal(t, "dash_1", actual.Name) + assert.Equal(t, "dash-1", actual.Name) assert.Equal(t, "desc1", actual.Description) require.Len(t, actual.Charts, 1) @@ -942,7 +941,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dash_1 + name: dash-1 spec: description: desc1 charts: @@ -978,7 +977,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dash_1 + name: dash-1 spec: description: desc1 charts: @@ -1022,7 +1021,7 @@ spec: require.Len(t, sum.Dashboards, 1) actual := sum.Dashboards[0] - assert.Equal(t, "dashboard w/ single heatmap chart", actual.Name) + assert.Equal(t, "dash-0", actual.Name) assert.Equal(t, "a dashboard w/ heatmap chart", actual.Description) require.Len(t, actual.Charts, 1) @@ -1063,7 +1062,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dashboard w/ single heatmap chart + name: dash-0 spec: charts: - kind: heatmap @@ -1106,7 +1105,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dashboard w/ single heatmap chart + name: dash-0 spec: charts: - kind: heatmap @@ -1132,7 +1131,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dashboard w/ single heatmap chart + name: dash-0 spec: charts: - kind: heatmap @@ -1180,7 +1179,7 @@ spec: require.Len(t, sum.Dashboards, 1) actual := sum.Dashboards[0] - assert.Equal(t, "dashboard w/ single histogram chart", actual.Name) + assert.Equal(t, "dash-0", actual.Name) assert.Equal(t, "a dashboard w/ single histogram chart", actual.Description) require.Len(t, actual.Charts, 1) @@ -1218,7 +1217,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dashboard w/ single histogram chart + name: dash-0 spec: description: a dashboard w/ single histogram chart charts: @@ -1246,7 +1245,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dashboard w/ single histogram chart + name: dash-0 spec: description: a dashboard w/ single histogram chart charts: @@ -1286,7 +1285,7 @@ spec: require.Len(t, sum.Dashboards, 1) actual := sum.Dashboards[0] - assert.Equal(t, "dashboard w/ single markdown chart", actual.Name) + assert.Equal(t, "dash-0", actual.Name) assert.Equal(t, "a dashboard w/ single markdown chart", actual.Description) require.Len(t, actual.Charts, 1) @@ -1307,7 +1306,7 @@ spec: require.Len(t, sum.Dashboards, 1) actual := sum.Dashboards[0] - assert.Equal(t, "dashboard w/ single scatter chart", actual.Name) + assert.Equal(t, "dash-0", actual.Name) assert.Equal(t, "a dashboard w/ single scatter chart", actual.Description) require.Len(t, actual.Charts, 1) @@ -1348,7 +1347,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dashboard w/ single scatter chart + name: dash-0 spec: description: a dashboard w/ single scatter chart charts: @@ -1375,7 +1374,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dashboard w/ single scatter chart + name: dash-0 spec: description: a dashboard w/ single scatter chart charts: @@ -1416,7 +1415,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dashboard w/ single scatter chart + name: dash-0 spec: description: a dashboard w/ single scatter chart charts: @@ -1456,7 +1455,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dashboard w/ single scatter chart + name: dash-0 spec: description: a dashboard w/ single scatter chart charts: @@ -1498,7 +1497,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dashboard w/ single scatter chart + name: dash-0 spec: description: a dashboard w/ single scatter chart charts: @@ -1540,7 +1539,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dashboard w/ single scatter chart + name: dash-0 spec: description: a dashboard w/ single scatter chart charts: @@ -1585,7 +1584,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dashboard w/ single scatter chart + name: dash-0 spec: description: a dashboard w/ single scatter chart charts: @@ -1621,7 +1620,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dashboard w/ single scatter chart + name: dash-0 spec: description: a dashboard w/ single scatter chart charts: @@ -1665,7 +1664,7 @@ spec: require.Len(t, sum.Dashboards, 2) actual := sum.Dashboards[0] - assert.Equal(t, "dash_1", actual.PkgName) + assert.Equal(t, "dash-1", actual.PkgName) assert.Equal(t, "display name", actual.Name) assert.Equal(t, "desc1", actual.Description) @@ -1702,8 +1701,8 @@ spec: assert.Equal(t, 3.0, c.Value) actual2 := sum.Dashboards[1] - assert.Equal(t, "dash_2", actual2.PkgName) - assert.Equal(t, "dash_2", actual2.Name) + assert.Equal(t, "dash-2", actual2.PkgName) + assert.Equal(t, "dash-2", actual2.Name) assert.Equal(t, "desc", actual2.Description) }) }) @@ -1717,7 +1716,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dash_1 + name: dash-1 spec: description: desc1 charts: @@ -1744,7 +1743,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dash_1 + name: dash-1 spec: description: desc1 charts: @@ -1769,7 +1768,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dash_1 + name: dash-1 spec: description: desc1 charts: @@ -1792,7 +1791,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dash_1 + name: dash-1 spec: description: desc1 charts: @@ -1816,7 +1815,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dash_1 + name: dash-1 spec: description: desc1 charts: @@ -1841,13 +1840,13 @@ spec: apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dash_1 + name: dash-1 spec: --- apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dash_1 + name: dash-1 spec: `, }, @@ -1860,7 +1859,7 @@ spec: apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dash_1 + name: dash-1 spec: name: d `, @@ -1880,7 +1879,7 @@ spec: require.Len(t, sum.Dashboards, 1) actual := sum.Dashboards[0] - assert.Equal(t, "dash_1", actual.Name) + assert.Equal(t, "dash-1", actual.Name) assert.Equal(t, "desc1", actual.Description) require.Len(t, actual.Charts, 1) @@ -1942,7 +1941,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dash_1 + name: dash-1 spec: description: desc1 charts: @@ -1982,7 +1981,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dash_1 + name: dash-1 spec: description: desc1 charts: @@ -2026,7 +2025,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dash_1 + name: dash-1 spec: description: desc1 charts: @@ -2068,7 +2067,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dash_1 + name: dash-1 spec: description: desc1 charts: @@ -2113,7 +2112,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dash_1 + name: dash-1 spec: description: desc1 charts: @@ -2157,7 +2156,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dash_1 + name: dash-1 spec: description: desc1 charts: @@ -2196,7 +2195,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dash_1 + name: dash-1 spec: description: desc1 charts: @@ -2243,7 +2242,7 @@ spec: require.Len(t, sum.Dashboards, 1) actual := sum.Dashboards[0] - assert.Equal(t, "dash_1", actual.Name) + assert.Equal(t, "dash-1", actual.Name) assert.Equal(t, "desc1", actual.Description) require.Len(t, actual.Charts, 1) @@ -2303,7 +2302,7 @@ spec: apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dash_1 + name: dash-1 spec: description: desc1 charts: @@ -2330,7 +2329,7 @@ spec: apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dash_1 + name: dash-1 spec: description: desc1 charts: @@ -2356,7 +2355,7 @@ spec: apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dash_1 + name: dash-1 spec: description: desc1 charts: @@ -2380,7 +2379,7 @@ spec: apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dash_1 + name: dash-1 spec: description: desc1 charts: @@ -2406,7 +2405,7 @@ spec: apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dash_1 + name: dash-1 spec: description: desc1 charts: @@ -2432,7 +2431,7 @@ spec: apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dash_1 + name: dash-1 spec: description: desc1 charts: @@ -2470,7 +2469,7 @@ spec: require.Len(t, sum.Dashboards, 1) actual := sum.Dashboards[0] - assert.Equal(t, "dash_1", actual.Name) + assert.Equal(t, "dash-1", actual.Name) assert.Equal(t, "desc1", actual.Description) require.Len(t, actual.Charts, 1) @@ -2512,7 +2511,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dash_1 + name: dash-1 spec: description: desc1 charts: @@ -2553,7 +2552,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dash_1 + name: dash-1 spec: description: desc1 charts: @@ -2604,28 +2603,28 @@ spec: require.Len(t, sum.Dashboards, 1) actual := sum.Dashboards[0] - assert.Equal(t, "dash_1", actual.Name) + assert.Equal(t, "dash-1", actual.Name) require.Len(t, actual.LabelAssociations, 2) - assert.Equal(t, "label_1", actual.LabelAssociations[0].Name) - assert.Equal(t, "label_2", actual.LabelAssociations[1].Name) + assert.Equal(t, "label-1", actual.LabelAssociations[0].Name) + assert.Equal(t, "label-2", actual.LabelAssociations[1].Name) expectedMappings := []SummaryLabelMapping{ { Status: StateStatusNew, ResourceType: influxdb.DashboardsResourceType, - ResourcePkgName: "dash_1", - ResourceName: "dash_1", - LabelPkgName: "label_1", - LabelName: "label_1", + ResourcePkgName: "dash-1", + ResourceName: "dash-1", + LabelPkgName: "label-1", + LabelName: "label-1", }, { Status: StateStatusNew, ResourceType: influxdb.DashboardsResourceType, - ResourcePkgName: "dash_1", - ResourceName: "dash_1", - LabelPkgName: "label_2", - LabelName: "label_2", + ResourcePkgName: "dash-1", + ResourceName: "dash-1", + LabelPkgName: "label-2", + LabelName: "label-2", }, } @@ -2644,11 +2643,11 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dash_1 + name: dash-1 spec: associations: - kind: Label - name: label_1 + name: label-1 `, }, { @@ -2658,16 +2657,16 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Label metadata: - name: label_1 + name: label-1 --- apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dash_1 + name: dash-1 spec: associations: - kind: Label - name: label_1 + name: label-1 - kind: Label name: unfound label `, @@ -2679,12 +2678,12 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Label metadata: - name: label_1 + name: label-1 --- apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dash_1 + name: dash-1 spec: associations: - kind: Label @@ -2700,18 +2699,18 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Label metadata: - name: label_1 + name: label-1 --- apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dash_1 + name: dash-1 spec: associations: - kind: Label - name: label_1 + name: label-1 - kind: Label - name: label_1 + name: label-1 `, }, } @@ -2727,7 +2726,7 @@ spec: testfileRunner(t, "testdata/notification_endpoint", func(t *testing.T, pkg *Pkg) { expectedEndpoints := []SummaryNotificationEndpoint{ { - PkgName: "http_basic_auth_notification_endpoint", + PkgName: "http-basic-auth-notification-endpoint", NotificationEndpoint: &endpoint.HTTP{ Base: endpoint.Base{ Name: "basic endpoint name", @@ -2742,10 +2741,10 @@ spec: }, }, { - PkgName: "http_bearer_auth_notification_endpoint", + PkgName: "http-bearer-auth-notification-endpoint", NotificationEndpoint: &endpoint.HTTP{ Base: endpoint.Base{ - Name: "http_bearer_auth_notification_endpoint", + Name: "http-bearer-auth-notification-endpoint", Description: "http bearer auth desc", Status: influxdb.TaskStatusActive, }, @@ -2756,10 +2755,10 @@ spec: }, }, { - PkgName: "http_none_auth_notification_endpoint", + PkgName: "http-none-auth-notification-endpoint", NotificationEndpoint: &endpoint.HTTP{ Base: endpoint.Base{ - Name: "http_none_auth_notification_endpoint", + Name: "http-none-auth-notification-endpoint", Description: "http none auth desc", Status: influxdb.TaskStatusActive, }, @@ -2769,7 +2768,7 @@ spec: }, }, { - PkgName: "pager_duty_notification_endpoint", + PkgName: "pager-duty-notification-endpoint", NotificationEndpoint: &endpoint.PagerDuty{ Base: endpoint.Base{ Name: "pager duty name", @@ -2781,7 +2780,7 @@ spec: }, }, { - PkgName: "slack_notification_endpoint", + PkgName: "slack-notification-endpoint", NotificationEndpoint: &endpoint.Slack{ Base: endpoint.Base{ Name: "slack name", @@ -2803,15 +2802,15 @@ spec: expected, actual := expectedEndpoints[i], endpoints[i] assert.Equalf(t, expected.NotificationEndpoint, actual.NotificationEndpoint, "index=%d", i) require.Len(t, actual.LabelAssociations, 1) - assert.Equal(t, "label_1", actual.LabelAssociations[0].Name) + assert.Equal(t, "label-1", actual.LabelAssociations[0].Name) assert.Contains(t, sum.LabelMappings, SummaryLabelMapping{ Status: StateStatusNew, ResourceType: influxdb.NotificationEndpointResourceType, ResourcePkgName: expected.PkgName, ResourceName: expected.NotificationEndpoint.GetName(), - LabelPkgName: "label_1", - LabelName: "label_1", + LabelPkgName: "label-1", + LabelName: "label-1", }) } }) @@ -2831,7 +2830,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: NotificationEndpointSlack metadata: - name: slack_notification_endpoint + name: slack-notification-endpoint spec: `, }, @@ -2845,7 +2844,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: NotificationEndpointPagerDuty metadata: - name: pager_duty_notification_endpoint + name: pager-duty-notification-endpoint spec: `, }, @@ -2859,7 +2858,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: NotificationEndpointHTTP metadata: - name: http_none_auth_notification_endpoint + name: http-none-auth-notification-endpoint spec: type: none method: get @@ -2875,7 +2874,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: NotificationEndpointHTTP metadata: - name: http_none_auth_notification_endpoint + name: http-none-auth-notification-endpoint spec: type: none method: get @@ -2892,7 +2891,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: NotificationEndpointHTTP metadata: - name: http_none_auth_notification_endpoint + name: http-none-auth-notification-endpoint spec: type: none url: https://www.example.com/endpoint/noneauth @@ -2908,7 +2907,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: NotificationEndpointHTTP metadata: - name: http_none_auth_notification_endpoint + name: http-basic-auth-notification-endpoint spec: type: none description: http none auth desc @@ -2926,7 +2925,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: NotificationEndpointHTTP metadata: - name: http_basic_auth_notification_endpoint + name: http-basic-auth-notification-endpoint spec: type: basic method: POST @@ -2944,7 +2943,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: NotificationEndpointHTTP metadata: - name: http_basic_auth_notification_endpoint + name: http-basic-auth-notification-endpoint spec: type: basic method: POST @@ -2962,7 +2961,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: NotificationEndpointHTTP metadata: - name: http_basic_auth_notification_endpoint + name: http-basic-auth-notification-endpoint spec: description: http basic auth desc type: basic @@ -2980,7 +2979,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: NotificationEndpointHTTP metadata: - name: http_bearer_auth_notification_endpoint + name: http-bearer-auth-notification-endpoint spec: description: http bearer auth desc type: bearer @@ -2998,7 +2997,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: NotificationEndpointHTTP metadata: - name: http_none_auth_notification_endpoint + name: http-basic-auth-notification-endpoint spec: type: RANDOM WRONG TYPE description: http none auth desc @@ -3016,7 +3015,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: NotificationEndpointSlack metadata: - name: slack_notification_endpoint + name: slack-notification-endpoint spec: url: https://hooks.slack.com/services/bip/piddy/boppidy --- @@ -3038,7 +3037,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: NotificationEndpointSlack metadata: - name: slack_notification_endpoint + name: slack-notification-endpoint spec: description: slack desc url: https://hooks.slack.com/services/bip/piddy/boppidy @@ -3063,7 +3062,7 @@ spec: apiVersion: influxdata.com/v2alpha1 kind: NotificationEndpointSlack metadata: - name: slack_notification_endpoint + name: slack-notification-endpoint spec: name: slack description: slack desc @@ -3088,7 +3087,7 @@ spec: rule := rules[0] assert.Equal(t, "rule_0", rule.Name) - assert.Equal(t, "endpoint_0", rule.EndpointPkgName) + assert.Equal(t, "endpoint-0", rule.EndpointPkgName) assert.Equal(t, "desc_0", rule.Description) assert.Equal(t, (10 * time.Minute).String(), rule.Every) assert.Equal(t, (30 * time.Second).String(), rule.Offset) @@ -3110,8 +3109,8 @@ spec: require.Len(t, sum.Labels, 2) require.Len(t, rule.LabelAssociations, 2) - assert.Equal(t, "label_1", rule.LabelAssociations[0].PkgName) - assert.Equal(t, "label_2", rule.LabelAssociations[1].PkgName) + assert.Equal(t, "label-1", rule.LabelAssociations[0].PkgName) + assert.Equal(t, "label-2", rule.LabelAssociations[1].PkgName) }) }) @@ -3121,7 +3120,7 @@ spec: apiVersion: influxdata.com/v2alpha1 kind: NotificationEndpointSlack metadata: - name: endpoint_0 + name: endpoint-0 spec: url: https://hooks.slack.com/services/bip/piddy/boppidy --- @@ -3142,7 +3141,7 @@ spec: kind: NotificationRule metadata: spec: - endpointName: endpoint_0 + endpointName: endpoint-0 every: 10m messageTemplate: "Notification Rule: ${ r._notification_rule_name } triggered by check: ${ r._check_name }: ${ r._message }" statusRules: @@ -3158,7 +3157,7 @@ spec: pkgStr: pkgWithValidEndpint(`apiVersion: influxdata.com/v2alpha1 kind: NotificationRule metadata: - name: rule_0 + name: rule-0 spec: every: 10m messageTemplate: "Notification Rule: ${ r._notification_rule_name } triggered by check: ${ r._check_name }: ${ r._message }" @@ -3175,9 +3174,9 @@ spec: pkgStr: pkgWithValidEndpint(`apiVersion: influxdata.com/v2alpha1 kind: NotificationRule metadata: - name: rule_0 + name: rule-0 spec: - endpointName: endpoint_0 + endpointName: endpoint-0 messageTemplate: "Notification Rule: ${ r._notification_rule_name } triggered by check: ${ r._check_name }: ${ r._message }" statusRules: - currentLevel: WARN @@ -3192,10 +3191,10 @@ spec: pkgStr: pkgWithValidEndpint(`apiVersion: influxdata.com/v2alpha1 kind: NotificationRule metadata: - name: rule_0 + name: rule-0 spec: every: 10m - endpointName: endpoint_0 + endpointName: endpoint-0 messageTemplate: "Notification Rule: ${ r._notification_rule_name } triggered by check: ${ r._check_name }: ${ r._message }" `), }, @@ -3208,10 +3207,10 @@ spec: pkgStr: pkgWithValidEndpint(`apiVersion: influxdata.com/v2alpha1 kind: NotificationRule metadata: - name: rule_0 + name: rule-0 spec: every: 10m - endpointName: endpoint_0 + endpointName: endpoint-0 messageTemplate: "Notification Rule: ${ r._notification_rule_name } triggered by check: ${ r._check_name }: ${ r._message }" statusRules: - currentLevel: WRONGO @@ -3226,9 +3225,9 @@ spec: pkgStr: pkgWithValidEndpint(`apiVersion: influxdata.com/v2alpha1 kind: NotificationRule metadata: - name: rule_0 + name: rule-0 spec: - endpointName: endpoint_0 + endpointName: endpoint-0 every: 10m messageTemplate: "Notification Rule: ${ r._notification_rule_name } triggered by check: ${ r._check_name }: ${ r._message }" statusRules: @@ -3245,9 +3244,9 @@ spec: pkgStr: pkgWithValidEndpint(`apiVersion: influxdata.com/v2alpha1 kind: NotificationRule metadata: - name: rule_0 + name: rule-0 spec: - endpointName: endpoint_0 + endpointName: endpoint-0 every: 10m messageTemplate: "Notification Rule: ${ r._notification_rule_name } triggered by check: ${ r._check_name }: ${ r._message }" statusRules: @@ -3267,9 +3266,9 @@ spec: pkgStr: pkgWithValidEndpint(`apiVersion: influxdata.com/v2alpha1 kind: NotificationRule metadata: - name: rule_0 + name: rule-0 spec: - endpointName: endpoint_0 + endpointName: endpoint-0 every: 10m messageTemplate: "Notification Rule: ${ r._notification_rule_name } triggered by check: ${ r._check_name }: ${ r._message }" status: RANDO STATUS @@ -3286,16 +3285,16 @@ spec: pkgStr: pkgWithValidEndpint(`apiVersion: influxdata.com/v2alpha1 kind: NotificationRule metadata: - name: rule_0 + name: rule-0 spec: - endpointName: endpoint_0 + endpointName: endpoint-0 every: 10m messageTemplate: "Notification Rule: ${ r._notification_rule_name } triggered by check: ${ r._check_name }: ${ r._message }" statusRules: - currentLevel: WARN associations: - kind: Label - name: label_1 + name: label-1 `), }, }, @@ -3307,23 +3306,23 @@ spec: pkgStr: pkgWithValidEndpint(`apiVersion: influxdata.com/v2alpha1 kind: Label metadata: - name: label_1 + name: label-1 --- apiVersion: influxdata.com/v2alpha1 kind: NotificationRule metadata: - name: rule_0 + name: rule-0 spec: - endpointName: endpoint_0 + endpointName: endpoint-0 every: 10m messageTemplate: "Notification Rule: ${ r._notification_rule_name } triggered by check: ${ r._check_name }: ${ r._message }" statusRules: - currentLevel: WARN associations: - kind: Label - name: label_1 + name: label-1 - kind: Label - name: label_1 + name: label-1 `), }, }, @@ -3336,9 +3335,9 @@ spec: apiVersion: influxdata.com/v2alpha1 kind: NotificationRule metadata: - name: rule_0 + name: rule-0 spec: - endpointName: endpoint_0 + endpointName: endpoint-0 every: 10m messageTemplate: "Notification Rule: ${ r._notification_rule_name } triggered by check: ${ r._check_name }: ${ r._message }" statusRules: @@ -3347,9 +3346,9 @@ spec: apiVersion: influxdata.com/v2alpha1 kind: NotificationRule metadata: - name: rule_0 + name: rule-0 spec: - endpointName: endpoint_0 + endpointName: endpoint-0 every: 10m messageTemplate: "Notification Rule: ${ r._notification_rule_name } triggered by check: ${ r._check_name }: ${ r._message }" statusRules: @@ -3366,7 +3365,7 @@ spec: apiVersion: influxdata.com/v2alpha1 kind: NotificationRule metadata: - name: rule_0 + name: rule-0 spec: endpointName: RANDO_ENDPOINT_NAME every: 10m @@ -3390,11 +3389,14 @@ spec: sum := pkg.Summary() tasks := sum.Tasks require.Len(t, tasks, 2) + sort.Slice(tasks, func(i, j int) bool { + return tasks[i].PkgName < tasks[j].PkgName + }) baseEqual := func(t *testing.T, i int, status influxdb.Status, actual SummaryTask) { t.Helper() - assert.Equal(t, "task_"+strconv.Itoa(i), actual.Name) + assert.Equal(t, "task-"+strconv.Itoa(i), actual.Name) assert.Equal(t, "desc_"+strconv.Itoa(i), actual.Description) assert.Equal(t, status, actual.Status) @@ -3402,19 +3404,19 @@ spec: assert.Equal(t, expectedQuery, actual.Query) require.Len(t, actual.LabelAssociations, 1) - assert.Equal(t, "label_1", actual.LabelAssociations[0].Name) + assert.Equal(t, "label-1", actual.LabelAssociations[0].Name) } require.Len(t, sum.Labels, 1) task0 := tasks[0] - baseEqual(t, 0, influxdb.Inactive, task0) - assert.Equal(t, (10 * time.Minute).String(), task0.Every) - assert.Equal(t, (15 * time.Second).String(), task0.Offset) + baseEqual(t, 1, influxdb.Active, task0) + assert.Equal(t, "15 * * * *", task0.Cron) task1 := tasks[1] - baseEqual(t, 1, influxdb.Active, task1) - assert.Equal(t, "15 * * * *", task1.Cron) + baseEqual(t, 0, influxdb.Inactive, task1) + assert.Equal(t, (10 * time.Minute).String(), task1.Every) + assert.Equal(t, (15 * time.Second).String(), task1.Offset) }) }) @@ -3449,7 +3451,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Task metadata: - name: task_0 + name: task-0 spec: cron: 15 * * * * query: > @@ -3467,7 +3469,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Task metadata: - name: task_0 + name: task-0 spec: description: desc_0 every: 10m @@ -3484,7 +3486,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Task metadata: - name: task_0 + name: task-0 spec: description: desc_0 offset: 15s @@ -3500,14 +3502,14 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Task metadata: - name: task_1 + name: task-1 spec: cron: 15 * * * * query: > from(bucket: "rucket_1") |> yield(name: "mean") associations: - kind: Label - name: label_1 + name: label-1 `, }, }, @@ -3521,12 +3523,12 @@ spec: apiVersion: influxdata.com/v2alpha1 kind: Label metadata: - name: label_1 + name: label-1 --- apiVersion: influxdata.com/v2alpha1 kind: Task metadata: - name: task_0 + name: task-0 spec: every: 10m offset: 15s @@ -3535,9 +3537,9 @@ spec: status: inactive associations: - kind: Label - name: label_1 + name: label-1 - kind: Label - name: label_1 + name: label-1 `, }, }, @@ -3551,7 +3553,7 @@ spec: apiVersion: influxdata.com/v2alpha1 kind: Task metadata: - name: task_0 + name: task-0 spec: every: 10m query: > @@ -3560,7 +3562,7 @@ spec: apiVersion: influxdata.com/v2alpha1 kind: Task metadata: - name: task_0 + name: task-0 spec: every: 10m query: > @@ -3587,25 +3589,25 @@ spec: assert.Equal(t, "desc", actual.TelegrafConfig.Description) require.Len(t, actual.LabelAssociations, 2) - assert.Equal(t, "label_1", actual.LabelAssociations[0].Name) - assert.Equal(t, "label_2", actual.LabelAssociations[1].Name) + assert.Equal(t, "label-1", actual.LabelAssociations[0].Name) + assert.Equal(t, "label-2", actual.LabelAssociations[1].Name) actual = sum.TelegrafConfigs[1] - assert.Equal(t, "tele_2", actual.TelegrafConfig.Name) + assert.Equal(t, "tele-2", actual.TelegrafConfig.Name) assert.Empty(t, actual.LabelAssociations) require.Len(t, sum.LabelMappings, 2) expectedMapping := SummaryLabelMapping{ Status: StateStatusNew, - ResourcePkgName: "first_tele_config", + ResourcePkgName: "first-tele-config", ResourceName: "display name", - LabelPkgName: "label_1", - LabelName: "label_1", + LabelPkgName: "label-1", + LabelName: "label-1", ResourceType: influxdb.TelegrafsResourceType, } assert.Equal(t, expectedMapping, sum.LabelMappings[0]) - expectedMapping.LabelPkgName = "label_2" - expectedMapping.LabelName = "label_2" + expectedMapping.LabelPkgName = "label-2" + expectedMapping.LabelName = "label-2" assert.Equal(t, expectedMapping, sum.LabelMappings[1]) }) }) @@ -3619,7 +3621,7 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Telegraf metadata: - name: first_tele_config + name: first-tele-config spec: `, }, @@ -3630,14 +3632,14 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Telegraf metadata: - name: tele_0 + name: tele-0 spec: config: fake tele config --- apiVersion: influxdata.com/v2alpha1 kind: Telegraf metadata: - name: tele_0 + name: tele-0 spec: config: fake tele config `, @@ -3679,21 +3681,21 @@ spec: // validates we support all known variable types varEquals(t, - "var_const_3", + "var-const-3", "constant", influxdb.VariableConstantValues([]string{"first val"}), sum.Variables[1], ) varEquals(t, - "var_map_4", + "var-map-4", "map", influxdb.VariableMapValues{"k1": "v1"}, sum.Variables[2], ) varEquals(t, - "var_query_2", + "var-query-2", "query", influxdb.VariableQueryValues{ Query: "an influxql query of sorts", @@ -3714,7 +3716,7 @@ spec: kind: Variable metadata: spec: - description: var_map_4 desc + description: var-map-4 desc type: map values: k1: v1 @@ -3727,9 +3729,9 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Variable metadata: - name: var_map_4 + name: var-map-4 spec: - description: var_map_4 desc + description: var-map-4 desc type: map `, }, @@ -3740,9 +3742,9 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Variable metadata: - name: var_const_3 + name: var-const-3 spec: - description: var_const_3 desc + description: var-const-3 desc type: constant `, }, @@ -3753,9 +3755,9 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Variable metadata: - name: var_query_2 + name: var-query-2 spec: - description: var_query_2 desc + description: var-query-2 desc type: query language: influxql `, @@ -3767,9 +3769,9 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Variable metadata: - name: var_query_2 + name: var-query-2 spec: - description: var_query_2 desc + description: var-query-2 desc type: query query: an influxql query of sorts `, @@ -3781,9 +3783,9 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Variable metadata: - name: var_query_2 + name: var-query-2 spec: - description: var_query_2 desc + description: var-query-2 desc type: query query: an influxql query of sorts language: wrong Language @@ -3796,9 +3798,9 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Variable metadata: - name: var_query_2 + name: var-query-2 spec: - description: var_query_2 desc + description: var-query-2 desc type: query query: an influxql query of sorts language: influxql @@ -3806,9 +3808,9 @@ spec: apiVersion: influxdata.com/v2alpha1 kind: Variable metadata: - name: var_query_2 + name: var-query-2 spec: - description: var_query_2 desc + description: var-query-2 desc type: query query: an influxql query of sorts language: influxql @@ -3821,9 +3823,9 @@ spec: pkgStr: `apiVersion: influxdata.com/v2alpha1 kind: Variable metadata: - name: var_query_2 + name: var-query-2 spec: - description: var_query_2 desc + description: var-query-2 desc type: query query: an influxql query of sorts language: influxql @@ -3831,10 +3833,10 @@ spec: apiVersion: influxdata.com/v2alpha1 kind: Variable metadata: - name: valid_query + name: valid-query spec: - name: var_query_2 - description: var_query_2 desc + name: var-query-2 + description: var-query-2 desc type: query query: an influxql query of sorts language: influxql @@ -3861,8 +3863,8 @@ spec: labels []string }{ { - varName: "var_1", - labels: []string{"label_1"}, + varName: "var-1", + labels: []string{"label-1"}, }, } for i, expected := range expectedLabelMappings { @@ -3877,10 +3879,10 @@ spec: expectedMappings := []SummaryLabelMapping{ { Status: StateStatusNew, - ResourcePkgName: "var_1", - ResourceName: "var_1", - LabelPkgName: "label_1", - LabelName: "label_1", + ResourcePkgName: "var-1", + ResourceName: "var-1", + LabelPkgName: "label-1", + LabelName: "label-1", }, } @@ -3908,7 +3910,7 @@ spec: expected := &endpoint.PagerDuty{ Base: endpoint.Base{ - Name: "pager_duty_notification_endpoint", + Name: "pager-duty-notification-endpoint", Status: influxdb.TaskStatusActive, }, ClientURL: "http://localhost:8080/orgs/7167eb6719fa34e5/alert-history", @@ -3935,43 +3937,43 @@ spec: sum := pkg.Summary() require.Len(t, sum.Buckets, 1) - assert.Equal(t, "$bkt-1-name-ref", sum.Buckets[0].Name) + assert.Equal(t, "env-bkt-1-name-ref", sum.Buckets[0].Name) assert.Len(t, sum.Buckets[0].LabelAssociations, 1) hasEnv(t, pkg.mEnv, "bkt-1-name-ref") require.Len(t, sum.Checks, 1) - assert.Equal(t, "$check-1-name-ref", sum.Checks[0].Check.GetName()) + assert.Equal(t, "env-check-1-name-ref", sum.Checks[0].Check.GetName()) assert.Len(t, sum.Checks[0].LabelAssociations, 1) hasEnv(t, pkg.mEnv, "check-1-name-ref") require.Len(t, sum.Dashboards, 1) - assert.Equal(t, "$dash-1-name-ref", sum.Dashboards[0].Name) + assert.Equal(t, "env-dash-1-name-ref", sum.Dashboards[0].Name) assert.Len(t, sum.Dashboards[0].LabelAssociations, 1) hasEnv(t, pkg.mEnv, "dash-1-name-ref") require.Len(t, sum.NotificationEndpoints, 1) - assert.Equal(t, "$endpoint-1-name-ref", sum.NotificationEndpoints[0].NotificationEndpoint.GetName()) + assert.Equal(t, "env-endpoint-1-name-ref", sum.NotificationEndpoints[0].NotificationEndpoint.GetName()) hasEnv(t, pkg.mEnv, "endpoint-1-name-ref") require.Len(t, sum.Labels, 1) - assert.Equal(t, "$label-1-name-ref", sum.Labels[0].Name) + assert.Equal(t, "env-label-1-name-ref", sum.Labels[0].Name) hasEnv(t, pkg.mEnv, "label-1-name-ref") require.Len(t, sum.NotificationRules, 1) - assert.Equal(t, "$rule-1-name-ref", sum.NotificationRules[0].Name) - assert.Equal(t, "$endpoint-1-name-ref", sum.NotificationRules[0].EndpointPkgName) + assert.Equal(t, "env-rule-1-name-ref", sum.NotificationRules[0].Name) + assert.Equal(t, "env-endpoint-1-name-ref", sum.NotificationRules[0].EndpointPkgName) hasEnv(t, pkg.mEnv, "rule-1-name-ref") require.Len(t, sum.Tasks, 1) - assert.Equal(t, "$task-1-name-ref", sum.Tasks[0].Name) + assert.Equal(t, "env-task-1-name-ref", sum.Tasks[0].Name) hasEnv(t, pkg.mEnv, "task-1-name-ref") require.Len(t, sum.TelegrafConfigs, 1) - assert.Equal(t, "$telegraf-1-name-ref", sum.TelegrafConfigs[0].TelegrafConfig.Name) + assert.Equal(t, "env-telegraf-1-name-ref", sum.TelegrafConfigs[0].TelegrafConfig.Name) hasEnv(t, pkg.mEnv, "telegraf-1-name-ref") require.Len(t, sum.Variables, 1) - assert.Equal(t, "$var-1-name-ref", sum.Variables[0].Name) + assert.Equal(t, "env-var-1-name-ref", sum.Variables[0].Name) hasEnv(t, pkg.mEnv, "var-1-name-ref") t.Log("applying env vars should populate env fields") @@ -4003,8 +4005,8 @@ spec: labels := []SummaryLabel{ { - PkgName: "label_1", - Name: "label_1", + PkgName: "label-1", + Name: "label-1", Properties: struct { Color string `json:"color"` Description string `json:"description"` @@ -4015,22 +4017,22 @@ spec: bkts := []SummaryBucket{ { - PkgName: "rucket_1", - Name: "rucket_1", + PkgName: "rucket-1", + Name: "rucket-1", Description: "desc_1", RetentionPeriod: 10000 * time.Second, LabelAssociations: labels, }, { - PkgName: "rucket_2", - Name: "rucket_2", - Description: "desc_2", + PkgName: "rucket-2", + Name: "rucket-2", + Description: "desc-2", RetentionPeriod: 20000 * time.Second, LabelAssociations: labels, }, { - PkgName: "rucket_3", - Name: "rucket_3", + PkgName: "rucket-3", + Name: "rucket-3", Description: "desc_3", RetentionPeriod: 30000 * time.Second, LabelAssociations: labels, @@ -4080,7 +4082,7 @@ func TestCombine(t *testing.T) { apiVersion: %[1]s kind: Label metadata: - name: label_%d + name: label-%d `, APIVersion, i)) pkgs = append(pkgs, pkg) } @@ -4089,35 +4091,35 @@ metadata: apiVersion: %[1]s kind: Bucket metadata: - name: rucket_1 + name: rucket-1 spec: associations: - kind: Label - name: label_1 + name: label-1 `, APIVersion))) pkgs = append(pkgs, newPkgFromYmlStr(t, fmt.Sprintf(` apiVersion: %[1]s kind: Bucket metadata: - name: rucket_2 + name: rucket-2 spec: associations: - kind: Label - name: label_2 + name: label-2 `, APIVersion))) pkgs = append(pkgs, newPkgFromYmlStr(t, fmt.Sprintf(` apiVersion: %[1]s kind: Bucket metadata: - name: rucket_3 + name: rucket-3 spec: associations: - kind: Label - name: label_1 + name: label-1 - kind: Label - name: label_2 + name: label-2 `, APIVersion))) combinedPkg, err := Combine(pkgs) @@ -4127,21 +4129,21 @@ spec: require.Len(t, sum.Labels, numLabels) for i := 0; i < numLabels; i++ { - assert.Equal(t, fmt.Sprintf("label_%d", i), sum.Labels[i].Name) + assert.Equal(t, fmt.Sprintf("label-%d", i), sum.Labels[i].Name) } require.Len(t, sum.Labels, numLabels) for i := 0; i < numLabels; i++ { - assert.Equal(t, fmt.Sprintf("label_%d", i), sum.Labels[i].Name) + assert.Equal(t, fmt.Sprintf("label-%d", i), sum.Labels[i].Name) } require.Len(t, sum.Buckets, 3) - assert.Equal(t, "rucket_1", sum.Buckets[0].Name) - associationsEqual(t, sum.Buckets[0].LabelAssociations, "label_1") - assert.Equal(t, "rucket_2", sum.Buckets[1].Name) - associationsEqual(t, sum.Buckets[1].LabelAssociations, "label_2") - assert.Equal(t, "rucket_3", sum.Buckets[2].Name) - associationsEqual(t, sum.Buckets[2].LabelAssociations, "label_1", "label_2") + assert.Equal(t, "rucket-1", sum.Buckets[0].Name) + associationsEqual(t, sum.Buckets[0].LabelAssociations, "label-1") + assert.Equal(t, "rucket-2", sum.Buckets[1].Name) + associationsEqual(t, sum.Buckets[1].LabelAssociations, "label-2") + assert.Equal(t, "rucket-3", sum.Buckets[2].Name) + associationsEqual(t, sum.Buckets[2].LabelAssociations, "label-1", "label-2") }) } diff --git a/pkger/service_test.go b/pkger/service_test.go index 2d98354e1b5..c2c208e4fa0 100644 --- a/pkger/service_test.go +++ b/pkger/service_test.go @@ -64,7 +64,7 @@ func TestService(t *testing.T) { testfileRunner(t, "testdata/bucket.yml", func(t *testing.T, pkg *Pkg) { fakeBktSVC := mock.NewBucketService() fakeBktSVC.FindBucketByNameFn = func(_ context.Context, orgID influxdb.ID, name string) (*influxdb.Bucket, error) { - if name != "rucket_11" { + if name != "rucket-11" { return nil, errors.New("not found") } return &influxdb.Bucket{ @@ -86,16 +86,16 @@ func TestService(t *testing.T) { DiffIdentifier: DiffIdentifier{ ID: SafeID(1), StateStatus: StateStatusExists, - PkgName: "rucket_11", + PkgName: "rucket-11", }, Old: &DiffBucketValues{ - Name: "rucket_11", + Name: "rucket-11", Description: "old desc", RetentionRules: retentionRules{newRetentionRule(30 * time.Hour)}, }, New: DiffBucketValues{ - Name: "rucket_11", + Name: "rucket-11", Description: "bucket 1 description", RetentionRules: retentionRules{newRetentionRule(time.Hour)}, }, @@ -119,11 +119,11 @@ func TestService(t *testing.T) { expected := DiffBucket{ DiffIdentifier: DiffIdentifier{ - PkgName: "rucket_11", + PkgName: "rucket-11", StateStatus: StateStatusNew, }, New: DiffBucketValues{ - Name: "rucket_11", + Name: "rucket-11", Description: "bucket 1 description", RetentionRules: retentionRules{newRetentionRule(time.Hour)}, }, @@ -160,13 +160,13 @@ func TestService(t *testing.T) { require.Len(t, checks, 2) check0 := checks[0] assert.True(t, check0.IsNew()) - assert.Equal(t, "check_0", check0.PkgName) + assert.Equal(t, "check-0", check0.PkgName) assert.Zero(t, check0.ID) assert.Nil(t, check0.Old) check1 := checks[1] assert.False(t, check1.IsNew()) - assert.Equal(t, "check_1", check1.PkgName) + assert.Equal(t, "check-1", check1.PkgName) assert.Equal(t, "display name", check1.New.GetName()) assert.NotZero(t, check1.ID) assert.Equal(t, existing, check1.Old.Check) @@ -200,26 +200,26 @@ func TestService(t *testing.T) { DiffIdentifier: DiffIdentifier{ ID: SafeID(1), StateStatus: StateStatusExists, - PkgName: "label_1", + PkgName: "label-1", }, Old: &DiffLabelValues{ - Name: "label_1", + Name: "label-1", Color: "old color", Description: "old description", }, New: DiffLabelValues{ - Name: "label_1", + Name: "label-1", Color: "#FFFFFF", Description: "label 1 description", }, } assert.Contains(t, diff.Labels, expected) - expected.PkgName = "label_2" - expected.New.Name = "label_2" + expected.PkgName = "label-2" + expected.New.Name = "label-2" expected.New.Color = "#000000" expected.New.Description = "label 2 description" - expected.Old.Name = "label_2" + expected.Old.Name = "label-2" assert.Contains(t, diff.Labels, expected) }) }) @@ -239,19 +239,19 @@ func TestService(t *testing.T) { expected := DiffLabel{ DiffIdentifier: DiffIdentifier{ - PkgName: "label_1", + PkgName: "label-1", StateStatus: StateStatusNew, }, New: DiffLabelValues{ - Name: "label_1", + Name: "label-1", Color: "#FFFFFF", Description: "label 1 description", }, } assert.Contains(t, diff.Labels, expected) - expected.PkgName = "label_2" - expected.New.Name = "label_2" + expected.PkgName = "label-2" + expected.New.Name = "label-2" expected.New.Color = "#000000" expected.New.Description = "label 2 description" assert.Contains(t, diff.Labels, expected) @@ -266,7 +266,7 @@ func TestService(t *testing.T) { existing := &endpoint.HTTP{ Base: endpoint.Base{ ID: &id, - Name: "http_none_auth_notification_endpoint", + Name: "http-none-auth-notification-endpoint", Description: "old desc", Status: influxdb.TaskStatusInactive, }, @@ -302,7 +302,7 @@ func TestService(t *testing.T) { expected := DiffNotificationEndpoint{ DiffIdentifier: DiffIdentifier{ ID: 1, - PkgName: "http_none_auth_notification_endpoint", + PkgName: "http-none-auth-notification-endpoint", StateStatus: StateStatusExists, }, Old: &DiffNotificationEndpointValues{ @@ -312,7 +312,7 @@ func TestService(t *testing.T) { NotificationEndpoint: &endpoint.HTTP{ Base: endpoint.Base{ ID: &id, - Name: "http_none_auth_notification_endpoint", + Name: "http-none-auth-notification-endpoint", Description: "http none auth desc", Status: influxdb.TaskStatusActive, }, @@ -334,7 +334,7 @@ func TestService(t *testing.T) { Base: endpoint.Base{ ID: &id, // This name here matches the endpoint identified in the pkg notification rule - Name: "endpoint_0", + Name: "endpoint-0", Description: "old desc", Status: influxdb.TaskStatusInactive, }, @@ -392,13 +392,13 @@ func TestService(t *testing.T) { }) t.Run("variables", func(t *testing.T) { - testfileRunner(t, "testdata/variables", func(t *testing.T, pkg *Pkg) { + testfileRunner(t, "testdata/variables.json", func(t *testing.T, pkg *Pkg) { fakeVarSVC := mock.NewVariableService() fakeVarSVC.FindVariablesF = func(_ context.Context, filter influxdb.VariableFilter, opts ...influxdb.FindOptions) ([]*influxdb.Variable, error) { return []*influxdb.Variable{ { ID: influxdb.ID(1), - Name: "var_const_3", + Name: "var-const-3", Description: "old desc", }, }, nil @@ -413,16 +413,16 @@ func TestService(t *testing.T) { expected := DiffVariable{ DiffIdentifier: DiffIdentifier{ ID: 1, - PkgName: "var_const_3", + PkgName: "var-const-3", StateStatus: StateStatusExists, }, Old: &DiffVariableValues{ - Name: "var_const_3", + Name: "var-const-3", Description: "old desc", }, New: DiffVariableValues{ - Name: "var_const_3", - Description: "var_const_3 desc", + Name: "var-const-3", + Description: "var-const-3 desc", Args: &influxdb.VariableArguments{ Type: "constant", Values: influxdb.VariableConstantValues{"first val"}, @@ -434,12 +434,12 @@ func TestService(t *testing.T) { expected = DiffVariable{ DiffIdentifier: DiffIdentifier{ // no ID here since this one would be new - PkgName: "var_map_4", + PkgName: "var-map-4", StateStatus: StateStatusNew, }, New: DiffVariableValues{ - Name: "var_map_4", - Description: "var_map_4 desc", + Name: "var-map-4", + Description: "var-map-4 desc", Args: &influxdb.VariableArguments{ Type: "map", Values: influxdb.VariableMapValues{"k1": "v1"}, @@ -480,8 +480,8 @@ func TestService(t *testing.T) { expected := SummaryBucket{ ID: SafeID(time.Hour), OrgID: SafeID(orgID), - PkgName: "rucket_11", - Name: "rucket_11", + PkgName: "rucket-11", + Name: "rucket-11", Description: "bucket 1 description", RetentionPeriod: time.Hour, LabelAssociations: []SummaryLabel{}, @@ -503,7 +503,7 @@ func TestService(t *testing.T) { id := influxdb.ID(3) if name == "display name" { id = 4 - name = "rucket_22" + name = "rucket-22" } if bkt, ok := pkg.mBuckets[name]; ok { return &influxdb.Bucket{ @@ -530,8 +530,8 @@ func TestService(t *testing.T) { expected := SummaryBucket{ ID: SafeID(3), OrgID: SafeID(orgID), - PkgName: "rucket_11", - Name: "rucket_11", + PkgName: "rucket-11", + Name: "rucket-11", Description: "bucket 1 description", RetentionPeriod: time.Hour, LabelAssociations: []SummaryLabel{}, @@ -543,7 +543,7 @@ func TestService(t *testing.T) { }) t.Run("rolls back all created buckets on an error", func(t *testing.T) { - testfileRunner(t, "testdata/bucket", func(t *testing.T, pkg *Pkg) { + testfileRunner(t, "testdata/bucket.yml", func(t *testing.T, pkg *Pkg) { fakeBktSVC := mock.NewBucketService() fakeBktSVC.FindBucketByNameFn = func(_ context.Context, id influxdb.ID, s string) (*influxdb.Bucket, error) { // forces the bucket to be created a new @@ -556,9 +556,6 @@ func TestService(t *testing.T) { return nil } - pkg.mBuckets["copybuck1"] = pkg.mBuckets["rucket_11"] - pkg.mBuckets["copybuck2"] = pkg.mBuckets["rucket_11"] - svc := newTestService(WithBucketSVC(fakeBktSVC)) orgID := influxdb.ID(9000) @@ -604,7 +601,7 @@ func TestService(t *testing.T) { assert.Fail(t, "did not find notification by name: "+name) } - for _, expectedName := range []string{"check_0", "display name"} { + for _, expectedName := range []string{"check-0", "display name"} { containsWithID(t, expectedName) } }) @@ -621,11 +618,6 @@ func TestService(t *testing.T) { return nil } - // create some dupes - for name, c := range pkg.mChecks { - pkg.mChecks["copy"+name] = c - } - svc := newTestService(WithCheckSVC(fakeCheckSVC)) orgID := influxdb.ID(9000) @@ -663,8 +655,8 @@ func TestService(t *testing.T) { assert.Contains(t, sum.Labels, SummaryLabel{ ID: 1, OrgID: SafeID(orgID), - PkgName: "label_1", - Name: "label_1", + PkgName: "label-1", + Name: "label-1", Properties: struct { Color string `json:"color"` Description string `json:"description"` @@ -677,8 +669,8 @@ func TestService(t *testing.T) { assert.Contains(t, sum.Labels, SummaryLabel{ ID: 2, OrgID: SafeID(orgID), - PkgName: "label_2", - Name: "label_2", + PkgName: "label-2", + Name: "label-2", Properties: struct { Color string `json:"color"` Description string `json:"description"` @@ -701,9 +693,6 @@ func TestService(t *testing.T) { return nil } - pkg.mLabels["copy1"] = pkg.mLabels["label_1"] - pkg.mLabels["copy2"] = pkg.mLabels["label_2"] - svc := newTestService(WithLabelSVC(fakeLabelSVC)) orgID := influxdb.ID(9000) @@ -721,6 +710,7 @@ func TestService(t *testing.T) { stubExisting := func(name string, id influxdb.ID) *influxdb.Label { pkgLabel := pkg.mLabels[name] + fmt.Println(name, pkgLabel) return &influxdb.Label{ // makes all pkg changes same as they are on the existing ID: id, @@ -732,24 +722,24 @@ func TestService(t *testing.T) { }, } } - stubExisting("label_1", 1) - stubExisting("label_3", 3) + stubExisting("label-1", 1) + stubExisting("label-3", 3) fakeLabelSVC := mock.NewLabelService() fakeLabelSVC.FindLabelsFn = func(ctx context.Context, f influxdb.LabelFilter) ([]*influxdb.Label, error) { - if f.Name != "label_1" && f.Name != "display name" { + if f.Name != "label-1" && f.Name != "display name" { return nil, nil } id := influxdb.ID(1) name := f.Name if f.Name == "display name" { id = 3 - name = "label_3" + name = "label-3" } return []*influxdb.Label{stubExisting(name, id)}, nil } fakeLabelSVC.CreateLabelFn = func(_ context.Context, l *influxdb.Label) error { - if l.Name == "label_2" { + if l.Name == "label-2" { l.ID = 2 } return nil @@ -771,8 +761,8 @@ func TestService(t *testing.T) { assert.Contains(t, sum.Labels, SummaryLabel{ ID: 1, OrgID: SafeID(orgID), - PkgName: "label_1", - Name: "label_1", + PkgName: "label-1", + Name: "label-1", Properties: struct { Color string `json:"color"` Description string `json:"description"` @@ -785,8 +775,8 @@ func TestService(t *testing.T) { assert.Contains(t, sum.Labels, SummaryLabel{ ID: 2, OrgID: SafeID(orgID), - PkgName: "label_2", - Name: "label_2", + PkgName: "label-2", + Name: "label-2", Properties: struct { Color string `json:"color"` Description string `json:"description"` @@ -824,14 +814,14 @@ func TestService(t *testing.T) { dash1 := sum.Dashboards[0] assert.NotZero(t, dash1.ID) assert.NotZero(t, dash1.OrgID) - assert.Equal(t, "dash_1", dash1.PkgName) + assert.Equal(t, "dash-1", dash1.PkgName) assert.Equal(t, "display name", dash1.Name) require.Len(t, dash1.Charts, 1) dash2 := sum.Dashboards[1] assert.NotZero(t, dash2.ID) - assert.Equal(t, "dash_2", dash2.PkgName) - assert.Equal(t, "dash_2", dash2.Name) + assert.Equal(t, "dash-2", dash2.PkgName) + assert.Equal(t, "dash-2", dash2.Name) require.Empty(t, dash2.Charts) }) }) @@ -1146,8 +1136,8 @@ func TestService(t *testing.T) { expectedNames := []string{ "basic endpoint name", - "http_bearer_auth_notification_endpoint", - "http_none_auth_notification_endpoint", + "http-bearer-auth-notification-endpoint", + "http-none-auth-notification-endpoint", "pager duty name", "slack name", } @@ -1181,7 +1171,7 @@ func TestService(t *testing.T) { }) t.Run("notification rules", func(t *testing.T) { - t.Run("successfuly creates", func(t *testing.T) { + t.Run("successfully creates", func(t *testing.T) { testfileRunner(t, "testdata/notification_rule.yml", func(t *testing.T, pkg *Pkg) { fakeEndpointSVC := mock.NewNotificationEndpointService() fakeEndpointSVC.CreateNotificationEndpointF = func(ctx context.Context, nr influxdb.NotificationEndpoint, userID influxdb.ID) error { @@ -1205,10 +1195,11 @@ func TestService(t *testing.T) { require.NoError(t, err) require.Len(t, sum.NotificationRules, 1) + assert.Equal(t, "rule-uuid", sum.NotificationRules[0].PkgName) assert.Equal(t, "rule_0", sum.NotificationRules[0].Name) assert.Equal(t, "desc_0", sum.NotificationRules[0].Description) assert.Equal(t, SafeID(1), sum.NotificationRules[0].EndpointID) - assert.Equal(t, "endpoint_0", sum.NotificationRules[0].EndpointPkgName) + assert.Equal(t, "endpoint-0", sum.NotificationRules[0].EndpointPkgName) assert.Equal(t, "slack", sum.NotificationRules[0].EndpointType) }) }) @@ -1281,13 +1272,13 @@ func TestService(t *testing.T) { require.Len(t, sum.Tasks, 2) assert.NotZero(t, sum.Tasks[0].ID) - assert.Equal(t, "task_1", sum.Tasks[0].PkgName) - assert.Equal(t, "task_1", sum.Tasks[0].Name) + assert.Equal(t, "task-1", sum.Tasks[0].PkgName) + assert.Equal(t, "task-1", sum.Tasks[0].Name) assert.Equal(t, "desc_1", sum.Tasks[0].Description) assert.NotZero(t, sum.Tasks[1].ID) - assert.Equal(t, "task_UUID", sum.Tasks[1].PkgName) - assert.Equal(t, "task_0", sum.Tasks[1].Name) + assert.Equal(t, "task-uuid", sum.Tasks[1].PkgName) + assert.Equal(t, "task-0", sum.Tasks[1].Name) assert.Equal(t, "desc_0", sum.Tasks[1].Description) }) }) @@ -1335,7 +1326,7 @@ func TestService(t *testing.T) { require.Len(t, sum.TelegrafConfigs, 2) assert.Equal(t, "display name", sum.TelegrafConfigs[0].TelegrafConfig.Name) assert.Equal(t, "desc", sum.TelegrafConfigs[0].TelegrafConfig.Description) - assert.Equal(t, "tele_2", sum.TelegrafConfigs[1].TelegrafConfig.Name) + assert.Equal(t, "tele-2", sum.TelegrafConfigs[1].TelegrafConfig.Name) }) }) @@ -1390,8 +1381,8 @@ func TestService(t *testing.T) { expected := sum.Variables[0] assert.True(t, expected.ID > 0 && expected.ID < 5) assert.Equal(t, SafeID(orgID), expected.OrgID) - assert.Equal(t, "var_const_3", expected.Name) - assert.Equal(t, "var_const_3 desc", expected.Description) + assert.Equal(t, "var-const-3", expected.Name) + assert.Equal(t, "var-const-3 desc", expected.Description) require.NotNil(t, expected.Arguments) assert.Equal(t, influxdb.VariableConstantValues{"first val"}, expected.Arguments.Values) @@ -1434,7 +1425,7 @@ func TestService(t *testing.T) { // makes all pkg changes same as they are on the existing ID: influxdb.ID(1), OrganizationID: orgID, - Name: pkg.mVariables["var_const_3"].Name(), + Name: pkg.mVariables["var-const-3"].Name(), Arguments: &influxdb.VariableArguments{ Type: "constant", Values: influxdb.VariableConstantValues{"first val"}, @@ -1463,7 +1454,7 @@ func TestService(t *testing.T) { require.Len(t, sum.Variables, 4) expected := sum.Variables[0] assert.Equal(t, SafeID(1), expected.ID) - assert.Equal(t, "var_const_3", expected.Name) + assert.Equal(t, "var-const-3", expected.Name) assert.Equal(t, 3, fakeVarSVC.CreateVariableCalls.Count()) // only called for last 3 labels }) diff --git a/pkger/testdata/bucket.json b/pkger/testdata/bucket.json index 4d87a13080a..e00dac33dd2 100644 --- a/pkger/testdata/bucket.json +++ b/pkger/testdata/bucket.json @@ -3,7 +3,7 @@ "apiVersion": "influxdata.com/v2alpha1", "kind": "Bucket", "metadata": { - "name": "rucket_11" + "name": "rucket-11" }, "spec": { "description": "bucket 1 description", @@ -19,7 +19,7 @@ "apiVersion": "influxdata.com/v2alpha1", "kind": "Bucket", "metadata": { - "name": "rucket_22" + "name": "rucket-22" }, "spec": { "name": "display name", diff --git a/pkger/testdata/bucket.yml b/pkger/testdata/bucket.yml index b05f4e15142..3ea9da8cef0 100644 --- a/pkger/testdata/bucket.yml +++ b/pkger/testdata/bucket.yml @@ -1,7 +1,7 @@ apiVersion: influxdata.com/v2alpha1 kind: Bucket metadata: - name: rucket_11 + name: rucket-11 spec: description: bucket 1 description retentionRules: @@ -11,7 +11,7 @@ spec: apiVersion: influxdata.com/v2alpha1 kind: Bucket metadata: - name: rucket_22 + name: rucket-22 spec: name: display name description: bucket 2 description diff --git a/pkger/testdata/bucket_associates_label.json b/pkger/testdata/bucket_associates_label.json index dead1da1258..1e8f097585e 100644 --- a/pkger/testdata/bucket_associates_label.json +++ b/pkger/testdata/bucket_associates_label.json @@ -3,27 +3,27 @@ "apiVersion": "influxdata.com/v2alpha1", "kind": "Label", "metadata": { - "name": "label_2" + "name": "label-2" } }, { "apiVersion": "influxdata.com/v2alpha1", "kind": "Label", "metadata": { - "name": "label_1" + "name": "label-1" } }, { "apiVersion": "influxdata.com/v2alpha1", "kind": "Bucket", "metadata": { - "name": "rucket_1" + "name": "rucket-1" }, "spec": { "associations": [ { "kind": "Label", - "name": "label_1" + "name": "label-1" } ] } @@ -32,13 +32,13 @@ "apiVersion": "influxdata.com/v2alpha1", "kind": "Bucket", "metadata": { - "name": "rucket_2" + "name": "rucket-2" }, "spec": { "associations": [ { "kind": "Label", - "name": "label_2" + "name": "label-2" } ] } @@ -47,17 +47,17 @@ "apiVersion": "influxdata.com/v2alpha1", "kind": "Bucket", "metadata": { - "name": "rucket_3" + "name": "rucket-3" }, "spec": { "associations": [ { "kind": "Label", - "name": "label_2" + "name": "label-2" }, { "kind": "Label", - "name": "label_1" + "name": "label-1" } ] } diff --git a/pkger/testdata/bucket_associates_label.yml b/pkger/testdata/bucket_associates_label.yml index c1cd73151ff..47937350304 100644 --- a/pkger/testdata/bucket_associates_label.yml +++ b/pkger/testdata/bucket_associates_label.yml @@ -1,38 +1,38 @@ apiVersion: influxdata.com/v2alpha1 kind: Label metadata: - name: label_1 + name: label-1 --- apiVersion: influxdata.com/v2alpha1 kind: Label metadata: - name: label_2 + name: label-2 --- apiVersion: influxdata.com/v2alpha1 kind: Bucket metadata: - name: rucket_1 + name: rucket-1 spec: associations: - kind: Label - name: label_1 + name: label-1 --- apiVersion: influxdata.com/v2alpha1 kind: Bucket metadata: - name: rucket_2 + name: rucket-2 spec: associations: - kind: Label - name: label_2 + name: label-2 --- apiVersion: influxdata.com/v2alpha1 kind: Bucket metadata: - name: rucket_3 + name: rucket-3 spec: associations: - kind: Label - name: label_1 + name: label-1 - kind: Label - name: label_2 + name: label-2 diff --git a/pkger/testdata/bucket_associates_labels.jsonnet b/pkger/testdata/bucket_associates_labels.jsonnet index 050fb42908d..043f4516d20 100644 --- a/pkger/testdata/bucket_associates_labels.jsonnet +++ b/pkger/testdata/bucket_associates_labels.jsonnet @@ -15,7 +15,7 @@ local LabelAssociations(names=[]) = [ for name in names ]; -local Bucket(name, desc, secs, associations=LabelAssociations(['label_1'])) = { +local Bucket(name, desc, secs, associations=LabelAssociations(['label-1'])) = { apiVersion: 'influxdata.com/v2alpha1', kind: 'Bucket', metadata: { @@ -31,8 +31,8 @@ local Bucket(name, desc, secs, associations=LabelAssociations(['label_1'])) = { }; [ - Label("label_1",desc="desc_1", color='#eee888'), - Bucket(name="rucket_1", desc="desc_1", secs=10000), - Bucket("rucket_2", "desc_2", 20000), - Bucket("rucket_3", "desc_3", 30000), + Label("label-1",desc="desc_1", color='#eee888'), + Bucket(name="rucket-1", desc="desc_1", secs=10000), + Bucket("rucket-2", "desc-2", 20000), + Bucket("rucket-3", "desc_3", 30000), ] diff --git a/pkger/testdata/checks.json b/pkger/testdata/checks.json index 0d3f362d469..0b67dca5d5d 100644 --- a/pkger/testdata/checks.json +++ b/pkger/testdata/checks.json @@ -3,14 +3,14 @@ "apiVersion": "influxdata.com/v2alpha1", "kind": "Label", "metadata": { - "name": "label_1" + "name": "label-1" } }, { "apiVersion": "influxdata.com/v2alpha1", "kind": "CheckThreshold", "metadata": { - "name": "check_0" + "name": "check-0" }, "spec": { "description": "desc_0", @@ -57,7 +57,7 @@ "associations": [ { "kind": "Label", - "name": "label_1" + "name": "label-1" } ] } @@ -66,7 +66,7 @@ "apiVersion": "influxdata.com/v2alpha1", "kind": "CheckDeadman", "metadata": { - "name": "check_1" + "name": "check-1" }, "spec": { "name": "display name", @@ -91,7 +91,7 @@ "associations": [ { "kind": "Label", - "name": "label_1" + "name": "label-1" } ] } diff --git a/pkger/testdata/checks.yml b/pkger/testdata/checks.yml index 6f2f67b414e..5b5a2004b30 100644 --- a/pkger/testdata/checks.yml +++ b/pkger/testdata/checks.yml @@ -2,12 +2,12 @@ apiVersion: influxdata.com/v2alpha1 kind: Label metadata: - name: label_1 + name: label-1 --- apiVersion: influxdata.com/v2alpha1 kind: CheckThreshold metadata: - name: check_0 + name: check-0 spec: description: desc_0 every: 1m @@ -44,12 +44,12 @@ spec: max: 35.0 associations: - kind: Label - name: label_1 + name: label-1 --- apiVersion: influxdata.com/v2alpha1 kind: CheckDeadman metadata: - name: check_1 + name: check-1 spec: name: display name description: desc_1 @@ -74,4 +74,4 @@ spec: timeSince: 90s associations: - kind: Label - name: label_1 + name: label-1 diff --git a/pkger/testdata/dashboard.json b/pkger/testdata/dashboard.json index a8b05557e79..9118290fce5 100644 --- a/pkger/testdata/dashboard.json +++ b/pkger/testdata/dashboard.json @@ -3,7 +3,7 @@ "apiVersion": "influxdata.com/v2alpha1", "kind": "Dashboard", "metadata": { - "name": "dash_1" + "name": "dash-1" }, "spec": { "name": "display name", @@ -47,7 +47,7 @@ "apiVersion": "influxdata.com/v2alpha1", "kind": "Dashboard", "metadata": { - "name": "dash_2" + "name": "dash-2" }, "spec": { "description": "desc" diff --git a/pkger/testdata/dashboard.yml b/pkger/testdata/dashboard.yml index 5513d0d469e..b213d756550 100644 --- a/pkger/testdata/dashboard.yml +++ b/pkger/testdata/dashboard.yml @@ -1,7 +1,7 @@ apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dash_1 + name: dash-1 spec: name: display name description: desc1 @@ -31,6 +31,6 @@ spec: apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dash_2 + name: dash-2 spec: description: desc diff --git a/pkger/testdata/dashboard_associates_label.json b/pkger/testdata/dashboard_associates_label.json index b0b8352f091..40cc51ae8fb 100644 --- a/pkger/testdata/dashboard_associates_label.json +++ b/pkger/testdata/dashboard_associates_label.json @@ -3,31 +3,31 @@ "apiVersion": "influxdata.com/v2alpha1", "kind": "Label", "metadata": { - "name": "label_1" + "name": "label-1" } }, { "apiVersion": "influxdata.com/v2alpha1", "kind": "Label", "metadata": { - "name": "label_2" + "name": "label-2" } }, { "apiVersion": "influxdata.com/v2alpha1", "kind": "Dashboard", "metadata": { - "name": "dash_1" + "name": "dash-1" }, "spec": { "associations": [ { "kind": "Label", - "name": "label_1" + "name": "label-1" }, { "kind": "Label", - "name": "label_2" + "name": "label-2" } ] } diff --git a/pkger/testdata/dashboard_associates_label.yml b/pkger/testdata/dashboard_associates_label.yml index 369cffd0ef2..63a8e3c4933 100644 --- a/pkger/testdata/dashboard_associates_label.yml +++ b/pkger/testdata/dashboard_associates_label.yml @@ -1,20 +1,20 @@ apiVersion: influxdata.com/v2alpha1 kind: Label metadata: - name: label_1 + name: label-1 --- apiVersion: influxdata.com/v2alpha1 kind: Label metadata: - name: label_2 + name: label-2 --- apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dash_1 + name: dash-1 spec: associations: - kind: Label - name: label_1 + name: label-1 - kind: Label - name: label_2 + name: label-2 diff --git a/pkger/testdata/dashboard_gauge.json b/pkger/testdata/dashboard_gauge.json index e66156e3171..f44f1016797 100644 --- a/pkger/testdata/dashboard_gauge.json +++ b/pkger/testdata/dashboard_gauge.json @@ -3,7 +3,7 @@ "apiVersion": "influxdata.com/v2alpha1", "kind": "Dashboard", "metadata": { - "name": "dash_1" + "name": "dash-1" }, "spec": { "description": "desc1", diff --git a/pkger/testdata/dashboard_gauge.yml b/pkger/testdata/dashboard_gauge.yml index 9fc152032e2..277f48ffda3 100644 --- a/pkger/testdata/dashboard_gauge.yml +++ b/pkger/testdata/dashboard_gauge.yml @@ -1,7 +1,7 @@ apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dash_1 + name: dash-1 spec: description: desc1 charts: diff --git a/pkger/testdata/dashboard_heatmap.json b/pkger/testdata/dashboard_heatmap.json index a4e66f9e6c4..f6e9c9e9fb4 100644 --- a/pkger/testdata/dashboard_heatmap.json +++ b/pkger/testdata/dashboard_heatmap.json @@ -3,7 +3,7 @@ "apiVersion": "influxdata.com/v2alpha1", "kind": "Dashboard", "metadata": { - "name": "dashboard w/ single heatmap chart" + "name": "dash-0" }, "spec": { "description": "a dashboard w/ heatmap chart", diff --git a/pkger/testdata/dashboard_heatmap.yml b/pkger/testdata/dashboard_heatmap.yml index cf04a464200..fc45c907a16 100644 --- a/pkger/testdata/dashboard_heatmap.yml +++ b/pkger/testdata/dashboard_heatmap.yml @@ -1,7 +1,7 @@ apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dashboard w/ single heatmap chart + name: dash-0 spec: description: a dashboard w/ heatmap chart charts: diff --git a/pkger/testdata/dashboard_histogram.json b/pkger/testdata/dashboard_histogram.json index 2e475c63a30..dd6dd0d007c 100644 --- a/pkger/testdata/dashboard_histogram.json +++ b/pkger/testdata/dashboard_histogram.json @@ -3,7 +3,7 @@ "apiVersion": "influxdata.com/v2alpha1", "kind": "Dashboard", "metadata": { - "name": "dashboard w/ single histogram chart" + "name": "dash-0" }, "spec": { "description": "a dashboard w/ single histogram chart", diff --git a/pkger/testdata/dashboard_histogram.yml b/pkger/testdata/dashboard_histogram.yml index 28d539b57c5..31d904a6b06 100644 --- a/pkger/testdata/dashboard_histogram.yml +++ b/pkger/testdata/dashboard_histogram.yml @@ -1,7 +1,7 @@ apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dashboard w/ single histogram chart + name: dash-0 spec: description: a dashboard w/ single histogram chart charts: diff --git a/pkger/testdata/dashboard_markdown.json b/pkger/testdata/dashboard_markdown.json index c2d1bca2814..54613a52dcb 100644 --- a/pkger/testdata/dashboard_markdown.json +++ b/pkger/testdata/dashboard_markdown.json @@ -3,7 +3,7 @@ "apiVersion": "influxdata.com/v2alpha1", "kind": "Dashboard", "metadata": { - "name": "dashboard w/ single markdown chart" + "name": "dash-0" }, "spec": { "description": "a dashboard w/ single markdown chart", diff --git a/pkger/testdata/dashboard_markdown.yml b/pkger/testdata/dashboard_markdown.yml index 44ec954fbd0..afd568299f6 100644 --- a/pkger/testdata/dashboard_markdown.yml +++ b/pkger/testdata/dashboard_markdown.yml @@ -1,7 +1,7 @@ apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dashboard w/ single markdown chart + name: dash-0 spec: description: a dashboard w/ single markdown chart charts: diff --git a/pkger/testdata/dashboard_scatter.json b/pkger/testdata/dashboard_scatter.json index b6eae9b64cb..63fb14303ad 100644 --- a/pkger/testdata/dashboard_scatter.json +++ b/pkger/testdata/dashboard_scatter.json @@ -3,7 +3,7 @@ "apiVersion": "influxdata.com/v2alpha1", "kind": "Dashboard", "metadata": { - "name": "dashboard w/ single scatter chart" + "name": "dash-0" }, "spec": { "description": "a dashboard w/ single scatter chart", diff --git a/pkger/testdata/dashboard_scatter.yml b/pkger/testdata/dashboard_scatter.yml index 6fa321b27ef..173358b887b 100644 --- a/pkger/testdata/dashboard_scatter.yml +++ b/pkger/testdata/dashboard_scatter.yml @@ -1,7 +1,7 @@ apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dashboard w/ single scatter chart + name: dash-0 spec: description: a dashboard w/ single scatter chart charts: diff --git a/pkger/testdata/dashboard_single_stat_plus_line.json b/pkger/testdata/dashboard_single_stat_plus_line.json index d6a8ade1a19..d42133db083 100644 --- a/pkger/testdata/dashboard_single_stat_plus_line.json +++ b/pkger/testdata/dashboard_single_stat_plus_line.json @@ -3,7 +3,7 @@ "apiVersion": "influxdata.com/v2alpha1", "kind": "Dashboard", "metadata": { - "name": "dash_1" + "name": "dash-1" }, "spec": { "description": "desc1", diff --git a/pkger/testdata/dashboard_single_stat_plus_line.yml b/pkger/testdata/dashboard_single_stat_plus_line.yml index f8f11486b32..06836603ae2 100644 --- a/pkger/testdata/dashboard_single_stat_plus_line.yml +++ b/pkger/testdata/dashboard_single_stat_plus_line.yml @@ -1,7 +1,7 @@ apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dash_1 + name: dash-1 spec: description: desc1 charts: diff --git a/pkger/testdata/dashboard_table.json b/pkger/testdata/dashboard_table.json index 6c8e3a30599..ddb51dc533c 100644 --- a/pkger/testdata/dashboard_table.json +++ b/pkger/testdata/dashboard_table.json @@ -3,7 +3,7 @@ "apiVersion": "influxdata.com/v2alpha1", "kind": "Dashboard", "metadata": { - "name": "dash_1" + "name": "dash-1" }, "spec": { "description": "desc1", diff --git a/pkger/testdata/dashboard_table.yml b/pkger/testdata/dashboard_table.yml index 1aea424ec2b..e332146027f 100644 --- a/pkger/testdata/dashboard_table.yml +++ b/pkger/testdata/dashboard_table.yml @@ -1,7 +1,7 @@ apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dash_1 + name: dash-1 spec: description: desc1 charts: diff --git a/pkger/testdata/dashboard_xy.json b/pkger/testdata/dashboard_xy.json index 22f1f503a75..01e8510eec1 100644 --- a/pkger/testdata/dashboard_xy.json +++ b/pkger/testdata/dashboard_xy.json @@ -3,7 +3,7 @@ "apiVersion": "influxdata.com/v2alpha1", "kind": "Dashboard", "metadata": { - "name": "dash_1" + "name": "dash-1" }, "spec": { "description": "desc1", diff --git a/pkger/testdata/dashboard_xy.yml b/pkger/testdata/dashboard_xy.yml index 430f0d8b0bb..ee0b4571b86 100644 --- a/pkger/testdata/dashboard_xy.yml +++ b/pkger/testdata/dashboard_xy.yml @@ -1,7 +1,7 @@ apiVersion: influxdata.com/v2alpha1 kind: Dashboard metadata: - name: dash_1 + name: dash-1 spec: description: desc1 charts: diff --git a/pkger/testdata/label.json b/pkger/testdata/label.json index 55f472ac51e..5038f7e815f 100644 --- a/pkger/testdata/label.json +++ b/pkger/testdata/label.json @@ -3,7 +3,7 @@ "apiVersion": "influxdata.com/v2alpha1", "kind": "Label", "metadata": { - "name": "label_2" + "name": "label-2" }, "spec": { "color": "#000000", @@ -14,7 +14,7 @@ "apiVersion": "influxdata.com/v2alpha1", "kind": "Label", "metadata": { - "name": "label_1" + "name": "label-1" }, "spec": { "color": "#FFFFFF", @@ -25,7 +25,7 @@ "apiVersion": "influxdata.com/v2alpha1", "kind": "Label", "metadata": { - "name": "label_3" + "name": "label-3" }, "spec": { "name": "display name", diff --git a/pkger/testdata/label.yml b/pkger/testdata/label.yml index 7f010080f8a..80a98cb17f9 100644 --- a/pkger/testdata/label.yml +++ b/pkger/testdata/label.yml @@ -1,7 +1,7 @@ apiVersion: influxdata.com/v2alpha1 kind: Label metadata: - name: label_2 + name: label-2 spec: color: "#000000" description: label 2 description @@ -9,7 +9,7 @@ spec: apiVersion: influxdata.com/v2alpha1 kind: Label metadata: - name: label_1 + name: label-1 spec: color: "#FFFFFF" description: label 1 description @@ -17,7 +17,7 @@ spec: apiVersion: influxdata.com/v2alpha1 kind: Label metadata: - name: label_3 + name: label-3 spec: name: display name description: label 3 description diff --git a/pkger/testdata/notification_endpoint.json b/pkger/testdata/notification_endpoint.json index fb5441cf6a7..d1cadee1f4f 100644 --- a/pkger/testdata/notification_endpoint.json +++ b/pkger/testdata/notification_endpoint.json @@ -3,14 +3,14 @@ "apiVersion": "influxdata.com/v2alpha1", "kind": "Label", "metadata": { - "name": "label_1" + "name": "label-1" } }, { "apiVersion": "influxdata.com/v2alpha1", "kind": "NotificationEndpointSlack", "metadata": { - "name": "slack_notification_endpoint" + "name": "slack-notification-endpoint" }, "spec":{ "name": "slack name", @@ -21,7 +21,7 @@ "associations": [ { "kind": "Label", - "name": "label_1" + "name": "label-1" } ] } @@ -30,7 +30,7 @@ "apiVersion": "influxdata.com/v2alpha1", "kind": "NotificationEndpointHTTP", "metadata": { - "name": "http_none_auth_notification_endpoint" + "name": "http-none-auth-notification-endpoint" }, "spec":{ "description": "http none auth desc", @@ -41,7 +41,7 @@ "associations": [ { "kind": "Label", - "name": "label_1" + "name": "label-1" } ] } @@ -50,7 +50,7 @@ "apiVersion": "influxdata.com/v2alpha1", "kind": "NotificationEndpointHTTP", "metadata": { - "name": "http_basic_auth_notification_endpoint" + "name": "http-basic-auth-notification-endpoint" }, "spec":{ "name": "basic endpoint name", @@ -64,7 +64,7 @@ "associations": [ { "kind": "Label", - "name": "label_1" + "name": "label-1" } ] } @@ -73,7 +73,7 @@ "apiVersion": "influxdata.com/v2alpha1", "kind": "NotificationEndpointHTTP", "metadata": { - "name": "http_bearer_auth_notification_endpoint" + "name": "http-bearer-auth-notification-endpoint" }, "spec":{ "description": "http bearer auth desc", @@ -84,7 +84,7 @@ "associations": [ { "kind": "Label", - "name": "label_1" + "name": "label-1" } ] } @@ -93,7 +93,7 @@ "apiVersion": "influxdata.com/v2alpha1", "kind": "NotificationEndpointPagerDuty", "metadata": { - "name": "pager_duty_notification_endpoint" + "name": "pager-duty-notification-endpoint" }, "spec":{ "name": "pager duty name", @@ -104,7 +104,7 @@ "associations": [ { "kind": "Label", - "name": "label_1" + "name": "label-1" } ] } diff --git a/pkger/testdata/notification_endpoint.yml b/pkger/testdata/notification_endpoint.yml index 3d976fa7342..c56f2f428b1 100644 --- a/pkger/testdata/notification_endpoint.yml +++ b/pkger/testdata/notification_endpoint.yml @@ -1,12 +1,12 @@ apiVersion: influxdata.com/v2alpha1 kind: Label metadata: - name: label_1 + name: label-1 --- apiVersion: influxdata.com/v2alpha1 kind: NotificationEndpointSlack metadata: - name: slack_notification_endpoint + name: slack-notification-endpoint spec: name: slack name description: slack desc @@ -15,12 +15,12 @@ spec: token: tokenval associations: - kind: Label - name: label_1 + name: label-1 --- apiVersion: influxdata.com/v2alpha1 kind: NotificationEndpointHTTP metadata: - name: http_none_auth_notification_endpoint + name: http-none-auth-notification-endpoint spec: type: none description: http none auth desc @@ -29,12 +29,12 @@ spec: status: active associations: - kind: Label - name: label_1 + name: label-1 --- apiVersion: influxdata.com/v2alpha1 kind: NotificationEndpointHTTP metadata: - name: http_basic_auth_notification_endpoint + name: http-basic-auth-notification-endpoint spec: name: basic endpoint name description: http basic auth desc @@ -46,12 +46,12 @@ spec: status: inactive associations: - kind: Label - name: label_1 + name: label-1 --- apiVersion: influxdata.com/v2alpha1 kind: NotificationEndpointHTTP metadata: - name: http_bearer_auth_notification_endpoint + name: http-bearer-auth-notification-endpoint spec: description: http bearer auth desc type: bearer @@ -60,12 +60,12 @@ spec: token: "secret token" associations: - kind: Label - name: label_1 + name: label-1 --- apiVersion: influxdata.com/v2alpha1 kind: NotificationEndpointPagerDuty metadata: - name: pager_duty_notification_endpoint + name: pager-duty-notification-endpoint spec: name: pager duty name description: pager duty desc @@ -74,4 +74,4 @@ spec: status: active associations: - kind: Label - name: label_1 + name: label-1 diff --git a/pkger/testdata/notification_endpoint_secrets.yml b/pkger/testdata/notification_endpoint_secrets.yml index df75a6c70b7..301a910273f 100644 --- a/pkger/testdata/notification_endpoint_secrets.yml +++ b/pkger/testdata/notification_endpoint_secrets.yml @@ -1,7 +1,7 @@ apiVersion: influxdata.com/v2alpha1 kind: NotificationEndpointPagerDuty metadata: - name: pager_duty_notification_endpoint + name: pager-duty-notification-endpoint spec: description: pager duty desc url: http://localhost:8080/orgs/7167eb6719fa34e5/alert-history diff --git a/pkger/testdata/notification_rule.json b/pkger/testdata/notification_rule.json index bc36cca2695..94f97ad6fbc 100644 --- a/pkger/testdata/notification_rule.json +++ b/pkger/testdata/notification_rule.json @@ -3,27 +3,27 @@ "apiVersion": "influxdata.com/v2alpha1", "kind": "Label", "metadata": { - "name": "label_1" + "name": "label-1" } }, { "apiVersion": "influxdata.com/v2alpha1", "kind": "Label", "metadata": { - "name": "label_2" + "name": "label-2" } }, { "apiVersion": "influxdata.com/v2alpha1", "kind": "NotificationRule", "metadata": { - "name": "rule_UUID" + "name": "rule-uuid" }, "spec": { "name": "rule_0", "description": "desc_0", "channel": "#two-fer-one", - "endpointName": "endpoint_0", + "endpointName": "endpoint-0", "every": "10m", "offset": "30s", "messageTemplate": "Notification Rule: ${ r._notification_rule_name } triggered by check: ${ r._check_name }: ${ r._message }", @@ -52,12 +52,12 @@ "associations": [ { "kind": "Label", - "name": "label_1" + "name": "label-1" }, { "kind": "Label", - "name": "label_2" + "name": "label-2" } ] } @@ -66,7 +66,7 @@ "apiVersion": "influxdata.com/v2alpha1", "kind": "NotificationEndpointSlack", "metadata": { - "name": "endpoint_0" + "name": "endpoint-0" }, "spec": { "url": "https://hooks.slack.com/services/bip/piddy/boppidy" diff --git a/pkger/testdata/notification_rule.yml b/pkger/testdata/notification_rule.yml index 4803b29da51..bf494ab9b3d 100644 --- a/pkger/testdata/notification_rule.yml +++ b/pkger/testdata/notification_rule.yml @@ -2,22 +2,22 @@ apiVersion: influxdata.com/v2alpha1 kind: Label metadata: - name: label_1 + name: label-1 --- apiVersion: influxdata.com/v2alpha1 kind: Label metadata: - name: label_2 + name: label-2 --- apiVersion: influxdata.com/v2alpha1 kind: NotificationRule metadata: - name: rule_UUID + name: rule-uuid spec: name: rule_0 description: desc_0 channel: "#two-fer-one" - endpointName: endpoint_0 + endpointName: endpoint-0 every: 10m offset: 30s messageTemplate: "Notification Rule: ${ r._notification_rule_name } triggered by check: ${ r._check_name }: ${ r._message }" @@ -35,13 +35,13 @@ spec: operator: eQuAl associations: - kind: Label - name: label_1 + name: label-1 - kind: Label - name: label_2 + name: label-2 --- apiVersion: influxdata.com/v2alpha1 kind: NotificationEndpointSlack metadata: - name: endpoint_0 + name: endpoint-0 spec: url: https://hooks.slack.com/services/bip/piddy/boppidy diff --git a/pkger/testdata/remote_bucket.json b/pkger/testdata/remote_bucket.json index ddc40c78a66..6fd9a25ba3c 100644 --- a/pkger/testdata/remote_bucket.json +++ b/pkger/testdata/remote_bucket.json @@ -3,7 +3,7 @@ "apiVersion": "influxdata.com/v2alpha1", "kind": "Bucket", "metadata": { - "name": "rucket_11" + "name": "rucket-11" }, "spec": { "description": "bucket 1 description" diff --git a/pkger/testdata/tasks.json b/pkger/testdata/tasks.json index daf84fa13a6..c9489c1d03c 100644 --- a/pkger/testdata/tasks.json +++ b/pkger/testdata/tasks.json @@ -3,17 +3,17 @@ "apiVersion": "influxdata.com/v2alpha1", "kind": "Label", "metadata": { - "name": "label_1" + "name": "label-1" } }, { "apiVersion": "influxdata.com/v2alpha1", "kind": "Task", "metadata": { - "name": "task_UUID" + "name": "task-uuid" }, "spec": { - "name": "task_0", + "name": "task-0", "description": "desc_0", "every": "10m", "offset": "15s", @@ -22,7 +22,7 @@ "associations": [ { "kind": "Label", - "name": "label_1" + "name": "label-1" } ] } @@ -31,7 +31,7 @@ "apiVersion": "influxdata.com/v2alpha1", "kind": "Task", "metadata": { - "name": "task_1" + "name": "task-1" }, "spec": { "description": "desc_1", @@ -40,7 +40,7 @@ "associations": [ { "kind": "Label", - "name": "label_1" + "name": "label-1" } ] } diff --git a/pkger/testdata/tasks.yml b/pkger/testdata/tasks.yml index 5a655129708..460180c9016 100644 --- a/pkger/testdata/tasks.yml +++ b/pkger/testdata/tasks.yml @@ -1,14 +1,14 @@ apiVersion: influxdata.com/v2alpha1 kind: Label metadata: - name: label_1 + name: label-1 --- apiVersion: influxdata.com/v2alpha1 kind: Task metadata: - name: task_UUID + name: task-uuid spec: - name: task_0 + name: task-0 description: desc_0 every: 10m offset: 15s @@ -22,12 +22,12 @@ spec: status: inactive associations: - kind: Label - name: label_1 + name: label-1 --- apiVersion: influxdata.com/v2alpha1 kind: Task metadata: - name: task_1 + name: task-1 spec: description: desc_1 cron: 15 * * * * @@ -40,4 +40,4 @@ spec: |> yield(name: "mean") associations: - kind: Label - name: label_1 + name: label-1 diff --git a/pkger/testdata/telegraf.json b/pkger/testdata/telegraf.json index 77a579c0e70..12d3012ddaf 100644 --- a/pkger/testdata/telegraf.json +++ b/pkger/testdata/telegraf.json @@ -3,21 +3,21 @@ "apiVersion": "influxdata.com/v2alpha1", "kind": "Label", "metadata": { - "name": "label_1" + "name": "label-1" } }, { "apiVersion": "influxdata.com/v2alpha1", "kind": "Label", "metadata": { - "name": "label_2" + "name": "label-2" } }, { "apiVersion": "influxdata.com/v2alpha1", "kind": "Telegraf", "metadata": { - "name": "first_tele_config" + "name": "first-tele-config" }, "spec": { "name": "display name", @@ -25,11 +25,11 @@ "associations": [ { "kind": "Label", - "name": "label_1" + "name": "label-1" }, { "kind": "Label", - "name": "label_2" + "name": "label-2" } ], "config": "# Configuration for telegraf agent\n [agent]\n ## Default data collection interval for all inputs\n interval = \"10s\"\n ## Rounds collection interval to 'interval'\n ## ie, if interval=\"10s\" then always collect on :00, :10, :20, etc.\n round_interval = true\n\n ## Telegraf will send metrics to outputs in batches of at most\n ## metric_batch_size metrics.\n ## This controls the size of writes that Telegraf sends to output plugins.\n metric_batch_size = 1000\n\n ## For failed writes, telegraf will cache metric_buffer_limit metrics for each\n ## output, and will flush this buffer on a successful write. Oldest metrics\n ## are dropped first when this buffer fills.\n ## This buffer only fills when writes fail to output plugin(s).\n metric_buffer_limit = 10000\n\n ## Collection jitter is used to jitter the collection by a random amount.\n ## Each plugin will sleep for a random time within jitter before collecting.\n ## This can be used to avoid many plugins querying things like sysfs at the\n ## same time, which can have a measurable effect on the system.\n collection_jitter = \"0s\"\n\n ## Default flushing interval for all outputs. Maximum flush_interval will be\n ## flush_interval + flush_jitter\n flush_interval = \"10s\"\n ## Jitter the flush interval by a random amount. This is primarily to avoid\n ## large write spikes for users running a large number of telegraf instances.\n ## ie, a jitter of 5s and interval 10s means flushes will happen every 10-15s\n flush_jitter = \"0s\"\n\n ## By default or when set to \"0s\", precision will be set to the same\n ## timestamp order as the collection interval, with the maximum being 1s.\n ## ie, when interval = \"10s\", precision will be \"1s\"\n ## when interval = \"250ms\", precision will be \"1ms\"\n ## Precision will NOT be used for service inputs. It is up to each individual\n ## service input to set the timestamp at the appropriate precision.\n ## Valid time units are \"ns\", \"us\" (or \"µs\"), \"ms\", \"s\".\n precision = \"\"\n\n ## Logging configuration:\n ## Run telegraf with debug log messages.\n debug = false\n ## Run telegraf in quiet mode (error log messages only).\n quiet = false\n ## Specify the log file name. The empty string means to log to stderr.\n logfile = \"\"\n\n ## Override default hostname, if empty use os.Hostname()\n hostname = \"\"\n ## If set to true, do no set the \"host\" tag in the telegraf agent.\n omit_hostname = false\n [[outputs.influxdb_v2]]\n ## The URLs of the InfluxDB cluster nodes.\n ##\n ## Multiple URLs can be specified for a single cluster, only ONE of the\n ## urls will be written to each interval.\n ## urls exp: http://127.0.0.1:9999\n urls = [\"http://localhost:9999\"]\n\n ## Token for authentication.\n token = \"$INFLUX_TOKEN\"\n\n ## Organization is the name of the organization you wish to write to; must exist.\n organization = \"rg\"\n\n ## Destination bucket to write into.\n bucket = \"rucket_3\"\n [[inputs.cpu]]\n ## Whether to report per-cpu stats or not\n percpu = true\n ## Whether to report total system cpu stats or not\n totalcpu = true\n ## If true, collect raw CPU time metrics.\n collect_cpu_time = false\n ## If true, compute and report the sum of all non-idle CPU states.\n report_active = false\n [[inputs.disk]]\n ## By default stats will be gathered for all mount points.\n ## Set mount_points will restrict the stats to only the specified mount points.\n # mount_points = [\"/\"]\n ## Ignore mount points by filesystem type.\n ignore_fs = [\"tmpfs\", \"devtmpfs\", \"devfs\", \"overlay\", \"aufs\", \"squashfs\"]\n [[inputs.diskio]]\n [[inputs.mem]]\n [[inputs.net]]\n [[inputs.processes]]\n [[inputs.swap]]\n [[inputs.system]]" @@ -39,7 +39,7 @@ "apiVersion": "influxdata.com/v2alpha1", "kind": "Telegraf", "metadata": { - "name": "tele_2" + "name": "tele-2" }, "spec": { "config": "# Configuration for telegraf agent\n [agent]\n ## Default data collection interval for all inputs\n interval = \"10s\"\n ## Rounds collection interval to 'interval'\n ## ie, if interval=\"10s\" then always collect on :00, :10, :20, etc.\n round_interval = true\n\n ## Telegraf will send metrics to outputs in batches of at most\n ## metric_batch_size metrics.\n ## This controls the size of writes that Telegraf sends to output plugins.\n metric_batch_size = 1000\n\n ## For failed writes, telegraf will cache metric_buffer_limit metrics for each\n ## output, and will flush this buffer on a successful write. Oldest metrics\n ## are dropped first when this buffer fills.\n ## This buffer only fills when writes fail to output plugin(s).\n metric_buffer_limit = 10000\n\n ## Collection jitter is used to jitter the collection by a random amount.\n ## Each plugin will sleep for a random time within jitter before collecting.\n ## This can be used to avoid many plugins querying things like sysfs at the\n ## same time, which can have a measurable effect on the system.\n collection_jitter = \"0s\"\n\n ## Default flushing interval for all outputs. Maximum flush_interval will be\n ## flush_interval + flush_jitter\n flush_interval = \"10s\"\n ## Jitter the flush interval by a random amount. This is primarily to avoid\n ## large write spikes for users running a large number of telegraf instances.\n ## ie, a jitter of 5s and interval 10s means flushes will happen every 10-15s\n flush_jitter = \"0s\"\n\n ## By default or when set to \"0s\", precision will be set to the same\n ## timestamp order as the collection interval, with the maximum being 1s.\n ## ie, when interval = \"10s\", precision will be \"1s\"\n ## when interval = \"250ms\", precision will be \"1ms\"\n ## Precision will NOT be used for service inputs. It is up to each individual\n ## service input to set the timestamp at the appropriate precision.\n ## Valid time units are \"ns\", \"us\" (or \"µs\"), \"ms\", \"s\".\n precision = \"\"\n\n ## Logging configuration:\n ## Run telegraf with debug log messages.\n debug = false\n ## Run telegraf in quiet mode (error log messages only).\n quiet = false\n ## Specify the log file name. The empty string means to log to stderr.\n logfile = \"\"\n\n ## Override default hostname, if empty use os.Hostname()\n hostname = \"\"\n ## If set to true, do no set the \"host\" tag in the telegraf agent.\n omit_hostname = false\n [[outputs.influxdb_v2]]\n ## The URLs of the InfluxDB cluster nodes.\n ##\n ## Multiple URLs can be specified for a single cluster, only ONE of the\n ## urls will be written to each interval.\n ## urls exp: http://127.0.0.1:9999\n urls = [\"http://localhost:9999\"]\n\n ## Token for authentication.\n token = \"$INFLUX_TOKEN\"\n\n ## Organization is the name of the organization you wish to write to; must exist.\n organization = \"rg\"\n\n ## Destination bucket to write into.\n bucket = \"rucket_3\"\n [[inputs.cpu]]\n ## Whether to report per-cpu stats or not\n percpu = true\n ## Whether to report total system cpu stats or not\n totalcpu = true\n ## If true, collect raw CPU time metrics.\n collect_cpu_time = false\n ## If true, compute and report the sum of all non-idle CPU states.\n report_active = false\n [[inputs.disk]]\n ## By default stats will be gathered for all mount points.\n ## Set mount_points will restrict the stats to only the specified mount points.\n # mount_points = [\"/\"]\n ## Ignore mount points by filesystem type.\n ignore_fs = [\"tmpfs\", \"devtmpfs\", \"devfs\", \"overlay\", \"aufs\", \"squashfs\"]\n [[inputs.diskio]]\n [[inputs.mem]]\n [[inputs.net]]\n [[inputs.processes]]\n [[inputs.swap]]\n [[inputs.system]]" diff --git a/pkger/testdata/telegraf.yml b/pkger/testdata/telegraf.yml index 6eb56e28afb..84543f3a503 100644 --- a/pkger/testdata/telegraf.yml +++ b/pkger/testdata/telegraf.yml @@ -1,25 +1,25 @@ apiVersion: influxdata.com/v2alpha1 kind: Label metadata: - name: label_1 + name: label-1 --- apiVersion: influxdata.com/v2alpha1 kind: Label metadata: - name: label_2 + name: label-2 --- apiVersion: influxdata.com/v2alpha1 kind: Telegraf metadata: - name: first_tele_config + name: first-tele-config spec: name: display name description: desc associations: - kind: Label - name: label_1 + name: label-1 - kind: Label - name: label_2 + name: label-2 config: | # Configuration for telegraf agent [agent] @@ -118,7 +118,7 @@ spec: apiVersion: influxdata.com/v2alpha1 kind: Telegraf metadata: - name: tele_2 + name: tele-2 spec: config: | # Configuration for telegraf agent diff --git a/pkger/testdata/variable_associates_label.yml b/pkger/testdata/variable_associates_label.yml index 283e97114ac..e707eeca874 100644 --- a/pkger/testdata/variable_associates_label.yml +++ b/pkger/testdata/variable_associates_label.yml @@ -1,15 +1,15 @@ apiVersion: influxdata.com/v2alpha1 kind: Label metadata: - name: label_1 + name: label-1 --- apiVersion: influxdata.com/v2alpha1 kind: Variable metadata: - name: var_1 + name: var-1 spec: type: constant values: [first val] associations: - kind: Label - name: label_1 + name: label-1 diff --git a/pkger/testdata/variables.json b/pkger/testdata/variables.json index 8d736f73ad1..0d8038b4170 100644 --- a/pkger/testdata/variables.json +++ b/pkger/testdata/variables.json @@ -3,7 +3,7 @@ "apiVersion": "influxdata.com/v2alpha1", "kind": "Variable", "metadata": { - "name": "var_query_1" + "name": "var-query-1" }, "spec": { "name": "query var", @@ -17,10 +17,10 @@ "apiVersion": "influxdata.com/v2alpha1", "kind": "Variable", "metadata": { - "name": "var_query_2" + "name": "var-query-2" }, "spec": { - "description": "var_query_2 desc", + "description": "var-query-2 desc", "type": "query", "query": "an influxql query of sorts", "language": "influxql" @@ -30,10 +30,10 @@ "apiVersion": "influxdata.com/v2alpha1", "kind": "Variable", "metadata": { - "name": "var_const_3" + "name": "var-const-3" }, "spec": { - "description": "var_const_3 desc", + "description": "var-const-3 desc", "type": "constant", "values": ["first val"] } @@ -42,10 +42,10 @@ "apiVersion": "influxdata.com/v2alpha1", "kind": "Variable", "metadata": { - "name": "var_map_4" + "name": "var-map-4" }, "spec": { - "description": "var_map_4 desc", + "description": "var-map-4 desc", "type": "map", "values": { "k1": "v1" diff --git a/pkger/testdata/variables.yml b/pkger/testdata/variables.yml index ff600388263..6d4896d6627 100644 --- a/pkger/testdata/variables.yml +++ b/pkger/testdata/variables.yml @@ -1,7 +1,7 @@ apiVersion: influxdata.com/v2alpha1 kind: Variable metadata: - name: var_query_1 + name: var-query-1 spec: name: query var description: query var desc @@ -13,9 +13,9 @@ spec: apiVersion: influxdata.com/v2alpha1 kind: Variable metadata: - name: var_query_2 + name: var-query-2 spec: - description: var_query_2 desc + description: var-query-2 desc type: query query: an influxql query of sorts language: influxql @@ -23,9 +23,9 @@ spec: apiVersion: influxdata.com/v2alpha1 kind: Variable metadata: - name: var_const_3 + name: var-const-3 spec: - description: var_const_3 desc + description: var-const-3 desc type: constant values: - first val @@ -33,9 +33,9 @@ spec: apiVersion: influxdata.com/v2alpha1 kind: Variable metadata: - name: var_map_4 + name: var-map-4 spec: - description: var_map_4 desc + description: var-map-4 desc type: map values: k1: v1