Skip to content

Commit 690c7f8

Browse files
committed
feat: Added corefunc_str_pascal.
1 parent 3065ca0 commit 690c7f8

16 files changed

+594
-12
lines changed

.pre-commit-config.yaml

+7
Original file line numberDiff line numberDiff line change
@@ -80,6 +80,13 @@ repos:
8080
- id: script-must-have-extension
8181
- id: shellcheck
8282
- id: shfmt
83+
args:
84+
- -s
85+
- -w
86+
- -ln=auto
87+
- -i=4
88+
- -ci
89+
- -sr
8390
- id: terraform-fmt
8491
# - id: terrascan
8592
# - id: tfsec

Makefile

+9-2
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@ current_dir := $(dir $(mkfile_path))
1212
# Global stuff.
1313

1414
GO=$(shell which go)
15-
HOMEBREW_PACKAGES=bash coreutils findutils go jq nodejs pre-commit python@3.11
15+
HOMEBREW_PACKAGES=bash bats-core coreutils findutils go jq nodejs pre-commit python@3.11 tfschema
1616

1717
# Determine the operating system and CPU arch.
1818
OS=$(shell uname -o | tr '[:upper:]' '[:lower:]')
@@ -250,7 +250,14 @@ lint: vuln license pre-commit
250250

251251
.PHONY: test
252252
## test: [test]* Runs ALL tests.
253-
test: unit examples acc
253+
test: unit examples acc bats
254+
255+
.PHONY: bats
256+
## bats: [test] Tests the output of the provider using tfschema and BATS.
257+
bats: build
258+
@ $(ECHO) " "
259+
@ $(ECHO) "\033[1;33m=====> Running BATS/tfschema tests...\033[0m"
260+
bats bats/*
254261

255262
.PHONY: acc
256263
## acc: [test] Runs Terraform provider acceptance tests. Set NAME= (without 'Test') to run a specific test by name

bats/tfschema.bats.sh

+61
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,61 @@
1+
#!/usr/bin/env bats
2+
3+
@test "listing the data sources" {
4+
run tfschema data list corefunc
5+
6+
[ "$status" -eq 0 ]
7+
[[ ${lines[0]} == "corefunc_env_ensure" ]]
8+
[[ ${lines[1]} == "corefunc_str_camel" ]]
9+
[[ ${lines[2]} == "corefunc_str_pascal" ]]
10+
[[ ${lines[3]} == "corefunc_str_snake" ]]
11+
[[ ${lines[4]} == "corefunc_str_truncate_label" ]]
12+
}
13+
14+
@test "corefunc_env_ensure: attrs" {
15+
run bash -c "tfschema data show -format=json corefunc_env_ensure | jq -Mrc '.attributes[]'"
16+
17+
[ "$status" -eq 0 ]
18+
[[ ${lines[0]} == '{"name":"id","type":"number","required":false,"optional":false,"computed":true,"sensitive":false}' ]]
19+
[[ ${lines[1]} == '{"name":"name","type":"string","required":true,"optional":false,"computed":false,"sensitive":false}' ]]
20+
[[ ${lines[2]} == '{"name":"pattern","type":"string","required":false,"optional":true,"computed":false,"sensitive":false}' ]]
21+
[[ ${lines[3]} == '{"name":"value","type":"string","required":false,"optional":false,"computed":true,"sensitive":false}' ]]
22+
}
23+
24+
@test "corefunc_str_camel: attrs" {
25+
run bash -c "tfschema data show -format=json corefunc_str_camel | jq -Mrc '.attributes[]'"
26+
27+
[ "$status" -eq 0 ]
28+
[[ ${lines[0]} == '{"name":"id","type":"number","required":false,"optional":false,"computed":true,"sensitive":false}' ]]
29+
[[ ${lines[1]} == '{"name":"string","type":"string","required":true,"optional":false,"computed":false,"sensitive":false}' ]]
30+
[[ ${lines[2]} == '{"name":"value","type":"string","required":false,"optional":false,"computed":true,"sensitive":false}' ]]
31+
}
32+
33+
@test "corefunc_str_pascal: attrs" {
34+
run bash -c "tfschema data show -format=json corefunc_str_pascal | jq -Mrc '.attributes[]'"
35+
36+
[ "$status" -eq 0 ]
37+
[[ ${lines[0]} == '{"name":"acronym_caps","type":"bool","required":false,"optional":true,"computed":false,"sensitive":false}' ]]
38+
[[ ${lines[1]} == '{"name":"id","type":"number","required":false,"optional":false,"computed":true,"sensitive":false}' ]]
39+
[[ ${lines[2]} == '{"name":"string","type":"string","required":true,"optional":false,"computed":false,"sensitive":false}' ]]
40+
[[ ${lines[3]} == '{"name":"value","type":"string","required":false,"optional":false,"computed":true,"sensitive":false}' ]]
41+
}
42+
43+
@test "corefunc_str_snake: attrs" {
44+
run bash -c "tfschema data show -format=json corefunc_str_snake | jq -Mrc '.attributes[]'"
45+
46+
[ "$status" -eq 0 ]
47+
[[ ${lines[0]} == '{"name":"id","type":"number","required":false,"optional":false,"computed":true,"sensitive":false}' ]]
48+
[[ ${lines[1]} == '{"name":"string","type":"string","required":true,"optional":false,"computed":false,"sensitive":false}' ]]
49+
[[ ${lines[2]} == '{"name":"value","type":"string","required":false,"optional":false,"computed":true,"sensitive":false}' ]]
50+
}
51+
52+
@test "corefunc_str_truncate_label: attrs" {
53+
run bash -c "tfschema data show -format=json corefunc_str_truncate_label | jq -Mrc '.attributes[]'"
54+
55+
[ "$status" -eq 0 ]
56+
[[ ${lines[0]} == '{"name":"id","type":"number","required":false,"optional":false,"computed":true,"sensitive":false}' ]]
57+
[[ ${lines[1]} == '{"name":"label","type":"string","required":true,"optional":false,"computed":false,"sensitive":false}' ]]
58+
[[ ${lines[2]} == '{"name":"max_length","type":"number","required":false,"optional":true,"computed":false,"sensitive":false}' ]]
59+
[[ ${lines[3]} == '{"name":"prefix","type":"string","required":true,"optional":false,"computed":false,"sensitive":false}' ]]
60+
[[ ${lines[4]} == '{"name":"value","type":"string","required":false,"optional":false,"computed":true,"sensitive":false}' ]]
61+
}

corefuncprovider/env_ensure_data_source_test.go

+2
Original file line numberDiff line numberDiff line change
@@ -51,6 +51,8 @@ func TestAccEnvEnsureDataSource(t *testing.T) {
5151
log.Fatalln(err)
5252
}
5353

54+
// fmt.Fprintln(os.Stderr, buf.String())
55+
5456
// We expect the error to be nil.
5557
if tc.ExpectedErr == nil {
5658
resource.Test(t, resource.TestCase{

corefuncprovider/provider.go

+1
Original file line numberDiff line numberDiff line change
@@ -103,6 +103,7 @@ func (p *coreFuncProvider) DataSources(ctx context.Context) []func() datasource.
103103
return []func() datasource.DataSource{
104104
EnvEnsureDataSource,
105105
StrCamelDataSource,
106+
StrPascalDataSource,
106107
StrSnakeDataSource,
107108
TruncateLabelDataSource,
108109
}

corefuncprovider/str_camel_data_source.go

+17-1
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,8 @@ type (
4040

4141
// strCamelDataSourceModel maps the data source schema data.
4242
strCamelDataSourceModel struct {
43-
ID types.Int64 `tfsdk:"id"`
43+
ID types.Int64 `tfsdk:"id"`
44+
// AcronymCaps types.Bool `tfsdk:"acronym_caps"`
4445
String types.String `tfsdk:"string"`
4546
Value types.String `tfsdk:"value"`
4647
}
@@ -95,6 +96,12 @@ func (d *strCamelDataSource) Schema(
9596
Description: "The string to convert to `camelCase`.",
9697
Required: true,
9798
},
99+
// "acronym_caps": schema.BoolAttribute{
100+
// Description: "Whether or not to keep acronyms as uppercase. A value of `true` means that acronyms " +
101+
// "will be converted to uppercase. A value of `false` means that acronyms will using typical " +
102+
// "casing. The default value is `false`.",
103+
// Optional: true,
104+
// },
98105
"value": schema.StringAttribute{
99106
Description: "The value of the string.",
100107
Computed: true,
@@ -153,9 +160,18 @@ func (d *strCamelDataSource) Read(
153160

154161
state.ID = types.Int64Value(1)
155162

163+
// Default values
164+
// opts := caps.Opts{}
165+
// opts := caps.WithReplaceStyleCamel()
166+
167+
// if state.AcronymCaps.ValueBool() { // lint:allow_commented
168+
// opts = caps.WithReplaceStyleScreaming()
169+
// }
170+
156171
state.Value = types.StringValue(
157172
caps.ToLowerCamel(
158173
state.String.ValueString(),
174+
// opts,
159175
),
160176
)
161177

corefuncprovider/str_camel_data_source_test.go

+1-2
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,6 @@ import (
1818
"bytes"
1919
"fmt"
2020
"log"
21-
"os"
2221
"strings"
2322
"testing"
2423
"text/template"
@@ -48,7 +47,7 @@ func TestAccStrCamelDataSource(t *testing.T) {
4847
log.Fatalln(err)
4948
}
5049

51-
fmt.Fprintln(os.Stderr, buf.String())
50+
// fmt.Fprintln(os.Stderr, buf.String())
5251

5352
resource.Test(t, resource.TestCase{
5453
ProtoV6ProviderFactories: testAccProtoV6ProviderFactories,
+185
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,185 @@
1+
// Copyright 2023, Ryan Parman
2+
//
3+
// Licensed under the Apache License, Version 2.0 (the "License");
4+
// you may not use this file except in compliance with the License.
5+
// You may obtain a copy of the License at
6+
//
7+
// http://www.apache.org/licenses/LICENSE-2.0
8+
//
9+
// Unless required by applicable law or agreed to in writing, software
10+
// distributed under the License is distributed on an "AS IS" BASIS,
11+
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12+
// See the License for the specific language governing permissions and
13+
// limitations under the License.
14+
15+
package corefuncprovider // lint:no_dupe
16+
17+
import (
18+
"context"
19+
"fmt"
20+
"strings"
21+
22+
"github.com/chanced/caps"
23+
"github.com/hashicorp/terraform-plugin-framework/datasource"
24+
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
25+
"github.com/hashicorp/terraform-plugin-framework/resource"
26+
"github.com/hashicorp/terraform-plugin-framework/types"
27+
"github.com/hashicorp/terraform-plugin-log/tflog"
28+
"github.com/lithammer/dedent"
29+
)
30+
31+
// Ensure the implementation satisfies the expected interfaces.
32+
var (
33+
_ datasource.DataSource = &strPascalDataSource{}
34+
_ datasource.DataSourceWithConfigure = &strPascalDataSource{}
35+
)
36+
37+
// strPascalDataSource is the data source implementation.
38+
type (
39+
strPascalDataSource struct{}
40+
41+
// strPascalDataSourceModel maps the data source schema data.
42+
strPascalDataSourceModel struct {
43+
ID types.Int64 `tfsdk:"id"`
44+
AcronymCaps types.Bool `tfsdk:"acronym_caps"`
45+
String types.String `tfsdk:"string"`
46+
Value types.String `tfsdk:"value"`
47+
}
48+
)
49+
50+
// StrPascalDataSource is a method that exposes its paired Go function as a
51+
// Terraform Data Source.
52+
func StrPascalDataSource() datasource.DataSource { // lint:allow_return_interface
53+
return &strPascalDataSource{}
54+
}
55+
56+
// Metadata returns the data source type name.
57+
func (d *strPascalDataSource) Metadata(
58+
ctx context.Context,
59+
req datasource.MetadataRequest,
60+
resp *datasource.MetadataResponse,
61+
) {
62+
tflog.Info(ctx, "Starting StrPascal DataSource Metadata method.")
63+
64+
resp.TypeName = req.ProviderTypeName + "_str_pascal"
65+
66+
tflog.Debug(ctx, fmt.Sprintf("req.ProviderTypeName = %s", req.ProviderTypeName))
67+
tflog.Debug(ctx, fmt.Sprintf("resp.TypeName = %s", resp.TypeName))
68+
69+
tflog.Info(ctx, "Ending StrPascal DataSource Metadata method.")
70+
}
71+
72+
// Schema defines the schema for the data source.
73+
func (d *strPascalDataSource) Schema(
74+
ctx context.Context,
75+
_ datasource.SchemaRequest,
76+
resp *datasource.SchemaResponse,
77+
) {
78+
tflog.Info(ctx, "Starting StrPascal DataSource Schema method.")
79+
80+
resp.Schema = schema.Schema{
81+
MarkdownDescription: strings.TrimSpace(dedent.Dedent(`
82+
Converts a string to ` + "`" + `PascalCase` + "`" + `, removing any non-alphanumeric characters.
83+
84+
-> Some acronyms are maintained as uppercase. See
85+
[caps: pkg-variables](https://pkg.go.dev/github.com/chanced/caps#pkg-variables) for a complete list.
86+
87+
Maps to the [` + "`" + `caps.ToCamel()` + "`" + `](https://pkg.go.dev/github.com/chanced/caps#ToCamel)
88+
Go method, which can be used in ` + Terratest + `.
89+
`)),
90+
Attributes: map[string]schema.Attribute{
91+
"id": schema.Int64Attribute{
92+
Description: "Not used. Required by the " + TPF + ".",
93+
Computed: true,
94+
},
95+
"string": schema.StringAttribute{
96+
Description: "The string to convert to `PascalCase`.",
97+
Required: true,
98+
},
99+
"acronym_caps": schema.BoolAttribute{
100+
Description: "Whether or not to keep acronyms as uppercase. A value of `true` means that acronyms " +
101+
"will be converted to uppercase. A value of `false` means that acronyms will using typical " +
102+
"casing. The default value is `false`.",
103+
Optional: true,
104+
},
105+
"value": schema.StringAttribute{
106+
Description: "The value of the string.",
107+
Computed: true,
108+
},
109+
},
110+
}
111+
112+
tflog.Info(ctx, "Ending StrPascal DataSource Schema method.")
113+
}
114+
115+
// Configure adds the provider configured client to the data source.
116+
func (d *strPascalDataSource) Configure(
117+
ctx context.Context,
118+
req datasource.ConfigureRequest,
119+
_ *datasource.ConfigureResponse,
120+
) {
121+
tflog.Info(ctx, "Starting StrPascal DataSource Configure method.")
122+
123+
if req.ProviderData == nil {
124+
return
125+
}
126+
127+
tflog.Info(ctx, "Ending StrPascal DataSource Configure method.")
128+
}
129+
130+
func (d strPascalDataSource) Create(
131+
ctx context.Context,
132+
req resource.CreateRequest, // lint:allow_large_memory
133+
resp *resource.CreateResponse,
134+
) {
135+
tflog.Info(ctx, "Starting StrPascal DataSource Create method.")
136+
137+
var plan strPascalDataSourceModel
138+
139+
diags := req.Plan.Get(ctx, &plan)
140+
resp.Diagnostics.Append(diags...)
141+
142+
if resp.Diagnostics.HasError() {
143+
return
144+
}
145+
146+
tflog.Info(ctx, "Ending StrPascal DataSource Create method.")
147+
}
148+
149+
// Read refreshes the Terraform state with the latest data.
150+
func (d *strPascalDataSource) Read(
151+
ctx context.Context,
152+
_ datasource.ReadRequest, // lint:allow_large_memory
153+
resp *datasource.ReadResponse,
154+
) {
155+
tflog.Info(ctx, "Starting StrPascal DataSource Read method.")
156+
157+
var state strPascalDataSourceModel
158+
diags := resp.State.Get(ctx, &state)
159+
resp.Diagnostics.Append(diags...)
160+
161+
state.ID = types.Int64Value(1)
162+
163+
// Default values
164+
opts := caps.Opts{}
165+
166+
if !state.AcronymCaps.ValueBool() {
167+
opts = caps.WithReplaceStyleCamel()
168+
}
169+
170+
state.Value = types.StringValue(
171+
caps.ToCamel(
172+
state.String.ValueString(),
173+
opts,
174+
),
175+
)
176+
177+
diags = resp.State.Set(ctx, &state)
178+
resp.Diagnostics.Append(diags...)
179+
180+
if resp.Diagnostics.HasError() {
181+
return
182+
}
183+
184+
tflog.Info(ctx, "Ending StrPascal DataSource Read method.")
185+
}
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
data "corefunc_str_pascal" "pascal" {
2+
string = "{{ .Input }}"
3+
{{- with .AcronymCaps }}
4+
acronym_caps = {{ printf "%v" . }}
5+
{{- end }}
6+
}

0 commit comments

Comments
 (0)