From 26271f3121bdcadee84437f3064b5d3be88ea776 Mon Sep 17 00:00:00 2001 From: Theophile Chevalier Date: Tue, 7 Jan 2020 20:26:09 +0100 Subject: [PATCH 1/6] Add source_hash to aws_s3_bucket_object Allows one to store a hash in state to trigger resource update. --- aws/resource_aws_s3_bucket_object.go | 12 ++++ aws/resource_aws_s3_bucket_object_test.go | 57 +++++++++++++++++++ website/docs/r/s3_bucket_object.html.markdown | 1 + 3 files changed, 70 insertions(+) diff --git a/aws/resource_aws_s3_bucket_object.go b/aws/resource_aws_s3_bucket_object.go index 4f90ae35c8c3..cc95b3a50605 100644 --- a/aws/resource_aws_s3_bucket_object.go +++ b/aws/resource_aws_s3_bucket_object.go @@ -191,6 +191,11 @@ func resourceAwsS3BucketObject() *schema.Resource { Optional: true, ValidateFunc: validation.IsRFC3339Time, }, + + "source_hash": { + Type: schema.TypeString, + Optional: true, + }, }, } } @@ -564,6 +569,12 @@ func resourceAwsS3BucketObjectCustomizeDiff(_ context.Context, d *schema.Resourc if hasS3BucketObjectContentChanges(d) { return d.SetNewComputed("version_id") } + + if d.HasChange("source_hash") { + d.SetNewComputed("version_id") + d.SetNewComputed("etag") + } + return nil } @@ -582,6 +593,7 @@ func hasS3BucketObjectContentChanges(d resourceDiffer) bool { "metadata", "server_side_encryption", "source", + "source_hash", "storage_class", "website_redirect", } { diff --git a/aws/resource_aws_s3_bucket_object_test.go b/aws/resource_aws_s3_bucket_object_test.go index f88b4283918b..dde4681eb301 100644 --- a/aws/resource_aws_s3_bucket_object_test.go +++ b/aws/resource_aws_s3_bucket_object_test.go @@ -247,6 +247,48 @@ func TestAccAWSS3BucketObject_contentBase64(t *testing.T) { }) } +func TestAccAWSS3BucketObject_SourceHashTrigger(t *testing.T) { + var obj, updated_obj s3.GetObjectOutput + resourceName := "aws_s3_bucket_object.object" + source := testAccAWSS3BucketObjectCreateTempFile(t, "{anything will do }") + rewrite_source := func(*terraform.State) error { + if err := ioutil.WriteFile(source, []byte("{any other thing will do }"), 0644); err != nil { + os.Remove(source) + t.Fatal(err) + } + return nil + } + rInt := acctest.RandInt() + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckAWSS3BucketObjectDestroy, + Steps: []resource.TestStep{ + { + PreConfig: func() {}, + Config: testAccAWSS3BucketObjectConfig_SourceHashTrigger(rInt, source), + Check: resource.ComposeTestCheckFunc( + testAccCheckAWSS3BucketObjectExists(resourceName, &obj), + testAccCheckAWSS3BucketObjectBody(&obj, "{anything will do }"), + resource.TestCheckResourceAttr(resourceName, "source_hash", "7b006ff4d70f68cc65061acf2f802e6f"), + rewrite_source, + ), + ExpectNonEmptyPlan: true, + }, + { + PreConfig: func() {}, + Config: testAccAWSS3BucketObjectConfig_SourceHashTrigger(rInt, source), + Check: resource.ComposeTestCheckFunc( + testAccCheckAWSS3BucketObjectExists(resourceName, &updated_obj), + testAccCheckAWSS3BucketObjectBody(&updated_obj, "{any other thing will do }"), + resource.TestCheckResourceAttr(resourceName, "source_hash", "77a736aa9e04d0dc96b9b30894963983"), + ), + }, + }, + }) +} + func TestAccAWSS3BucketObject_withContentCharacteristics(t *testing.T) { var obj s3.GetObjectOutput resourceName := "aws_s3_bucket_object.object" @@ -1582,6 +1624,21 @@ resource "aws_s3_bucket_object" "object" { `, randInt, contentBase64) } +func testAccAWSS3BucketObjectConfig_SourceHashTrigger(randInt int, source string) string { + return fmt.Sprintf(` +resource "aws_s3_bucket" "object_bucket" { + bucket = "tf-object-test-bucket-%d" +} + +resource "aws_s3_bucket_object" "object" { + bucket = "${aws_s3_bucket.object_bucket.bucket}" + key = "test-key" + source = "%s" + source_hash = "${filemd5("%s")}" +} +`, randInt, source, source) +} + func testAccAWSS3BucketObjectConfig_updateable(randInt int, bucketVersioning bool, source string) string { return fmt.Sprintf(` resource "aws_s3_bucket" "object_bucket_3" { diff --git a/website/docs/r/s3_bucket_object.html.markdown b/website/docs/r/s3_bucket_object.html.markdown index 6bbd9dfa11c8..f9934409c5aa 100644 --- a/website/docs/r/s3_bucket_object.html.markdown +++ b/website/docs/r/s3_bucket_object.html.markdown @@ -131,6 +131,7 @@ The following arguments are supported: for the object. Can be either "`STANDARD`", "`REDUCED_REDUNDANCY`", "`ONEZONE_IA`", "`INTELLIGENT_TIERING`", "`GLACIER`", "`DEEP_ARCHIVE`", or "`STANDARD_IA`". Defaults to "`STANDARD`". * `etag` - (Optional) Used to trigger updates. The only meaningful value is `${filemd5("path/to/file")}` (Terraform 0.11.12 or later) or `${md5(file("path/to/file"))}` (Terraform 0.11.11 or earlier). This attribute is not compatible with KMS encryption, `kms_key_id` or `server_side_encryption = "aws:kms"`. +* `source_hash` - (Optional) Used to trigger updates based on source local changes. If used, must be set to `${filemd5("path/to/source")}` (Terraform 0.11.12 or later). This differs from `etag` since the value is stored in the state and does not come from AWS. Especially useful to address `etag` KMS encryption limitations. * `server_side_encryption` - (Optional) Specifies server-side encryption of the object in S3. Valid values are "`AES256`" and "`aws:kms`". * `kms_key_id` - (Optional) Amazon Resource Name (ARN) of the KMS Key to use for object encryption. If the S3 Bucket has server-side encryption enabled, that value will automatically be used. If referencing the `aws_kms_key` resource, use the `arn` attribute. If referencing the `aws_kms_alias` data source or resource, use the `target_key_arn` attribute. Terraform will only perform drift detection if a configuration value From bef29fe79c6eb9dcf39ee9b8f481d6260cc79f44 Mon Sep 17 00:00:00 2001 From: Dirk Avery Date: Mon, 12 Jul 2021 21:59:06 -0400 Subject: [PATCH 2/6] tests/r/s3_bucket_object: Clean up tests --- aws/resource_aws_s3_bucket_object_test.go | 369 +++++++++++----------- 1 file changed, 185 insertions(+), 184 deletions(-) diff --git a/aws/resource_aws_s3_bucket_object_test.go b/aws/resource_aws_s3_bucket_object_test.go index dde4681eb301..561aa388a3fd 100644 --- a/aws/resource_aws_s3_bucket_object_test.go +++ b/aws/resource_aws_s3_bucket_object_test.go @@ -4,6 +4,7 @@ import ( "encoding/base64" "fmt" "io" + "io/ioutil" "log" "os" "reflect" @@ -130,7 +131,7 @@ func TestAccAWSS3BucketObject_noNameNoKey(t *testing.T) { func TestAccAWSS3BucketObject_empty(t *testing.T) { var obj s3.GetObjectOutput resourceName := "aws_s3_bucket_object.object" - rInt := acctest.RandInt() + rName := acctest.RandomWithPrefix("tf-acc-test") resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { testAccPreCheck(t) }, @@ -140,7 +141,7 @@ func TestAccAWSS3BucketObject_empty(t *testing.T) { Steps: []resource.TestStep{ { PreConfig: func() {}, - Config: testAccAWSS3BucketObjectConfigEmpty(rInt), + Config: testAccAWSS3BucketObjectConfigEmpty(rName), Check: resource.ComposeTestCheckFunc( testAccCheckAWSS3BucketObjectExists(resourceName, &obj), testAccCheckAWSS3BucketObjectBody(&obj, ""), @@ -153,7 +154,7 @@ func TestAccAWSS3BucketObject_empty(t *testing.T) { func TestAccAWSS3BucketObject_source(t *testing.T) { var obj s3.GetObjectOutput resourceName := "aws_s3_bucket_object.object" - rInt := acctest.RandInt() + rName := acctest.RandomWithPrefix("tf-acc-test") source := testAccAWSS3BucketObjectCreateTempFile(t, "{anything will do }") defer os.Remove(source) @@ -165,7 +166,7 @@ func TestAccAWSS3BucketObject_source(t *testing.T) { CheckDestroy: testAccCheckAWSS3BucketObjectDestroy, Steps: []resource.TestStep{ { - Config: testAccAWSS3BucketObjectConfigSource(rInt, source), + Config: testAccAWSS3BucketObjectConfigSource(rName, source), Check: resource.ComposeTestCheckFunc( testAccCheckAWSS3BucketObjectExists(resourceName, &obj), testAccCheckAWSS3BucketObjectBody(&obj, "{anything will do }"), @@ -178,7 +179,7 @@ func TestAccAWSS3BucketObject_source(t *testing.T) { func TestAccAWSS3BucketObject_content(t *testing.T) { var obj s3.GetObjectOutput resourceName := "aws_s3_bucket_object.object" - rInt := acctest.RandInt() + rName := acctest.RandomWithPrefix("tf-acc-test") resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { testAccPreCheck(t) }, @@ -188,7 +189,7 @@ func TestAccAWSS3BucketObject_content(t *testing.T) { Steps: []resource.TestStep{ { PreConfig: func() {}, - Config: testAccAWSS3BucketObjectConfigContent(rInt, "some_bucket_content"), + Config: testAccAWSS3BucketObjectConfigContent(rName, "some_bucket_content"), Check: resource.ComposeTestCheckFunc( testAccCheckAWSS3BucketObjectExists(resourceName, &obj), testAccCheckAWSS3BucketObjectBody(&obj, "some_bucket_content"), @@ -201,7 +202,7 @@ func TestAccAWSS3BucketObject_content(t *testing.T) { func TestAccAWSS3BucketObject_etagEncryption(t *testing.T) { var obj s3.GetObjectOutput resourceName := "aws_s3_bucket_object.object" - rInt := acctest.RandInt() + rName := acctest.RandomWithPrefix("tf-acc-test") source := testAccAWSS3BucketObjectCreateTempFile(t, "{anything will do }") defer os.Remove(source) @@ -213,7 +214,7 @@ func TestAccAWSS3BucketObject_etagEncryption(t *testing.T) { Steps: []resource.TestStep{ { PreConfig: func() {}, - Config: testAccAWSS3BucketObjectEtagEncryption(rInt, source), + Config: testAccAWSS3BucketObjectEtagEncryption(rName, source), Check: resource.ComposeTestCheckFunc( testAccCheckAWSS3BucketObjectExists(resourceName, &obj), testAccCheckAWSS3BucketObjectBody(&obj, "{anything will do }"), @@ -227,7 +228,7 @@ func TestAccAWSS3BucketObject_etagEncryption(t *testing.T) { func TestAccAWSS3BucketObject_contentBase64(t *testing.T) { var obj s3.GetObjectOutput resourceName := "aws_s3_bucket_object.object" - rInt := acctest.RandInt() + rName := acctest.RandomWithPrefix("tf-acc-test") resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { testAccPreCheck(t) }, @@ -237,7 +238,7 @@ func TestAccAWSS3BucketObject_contentBase64(t *testing.T) { Steps: []resource.TestStep{ { PreConfig: func() {}, - Config: testAccAWSS3BucketObjectConfigContentBase64(rInt, base64.StdEncoding.EncodeToString([]byte("some_bucket_content"))), + Config: testAccAWSS3BucketObjectConfigContentBase64(rName, base64.StdEncoding.EncodeToString([]byte("some_bucket_content"))), Check: resource.ComposeTestCheckFunc( testAccCheckAWSS3BucketObjectExists(resourceName, &obj), testAccCheckAWSS3BucketObjectBody(&obj, "some_bucket_content"), @@ -247,18 +248,18 @@ func TestAccAWSS3BucketObject_contentBase64(t *testing.T) { }) } -func TestAccAWSS3BucketObject_SourceHashTrigger(t *testing.T) { +func TestAccAWSS3BucketObject_sourceHashTrigger(t *testing.T) { var obj, updated_obj s3.GetObjectOutput resourceName := "aws_s3_bucket_object.object" source := testAccAWSS3BucketObjectCreateTempFile(t, "{anything will do }") - rewrite_source := func(*terraform.State) error { + rewriteSourceF := func(*terraform.State) error { if err := ioutil.WriteFile(source, []byte("{any other thing will do }"), 0644); err != nil { os.Remove(source) t.Fatal(err) } return nil } - rInt := acctest.RandInt() + rName := acctest.RandomWithPrefix("tf-acc-test") resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { testAccPreCheck(t) }, @@ -267,18 +268,18 @@ func TestAccAWSS3BucketObject_SourceHashTrigger(t *testing.T) { Steps: []resource.TestStep{ { PreConfig: func() {}, - Config: testAccAWSS3BucketObjectConfig_SourceHashTrigger(rInt, source), + Config: testAccAWSS3BucketObjectConfig_sourceHashTrigger(rName, source), Check: resource.ComposeTestCheckFunc( testAccCheckAWSS3BucketObjectExists(resourceName, &obj), testAccCheckAWSS3BucketObjectBody(&obj, "{anything will do }"), resource.TestCheckResourceAttr(resourceName, "source_hash", "7b006ff4d70f68cc65061acf2f802e6f"), - rewrite_source, + rewriteSourceF, ), ExpectNonEmptyPlan: true, }, { PreConfig: func() {}, - Config: testAccAWSS3BucketObjectConfig_SourceHashTrigger(rInt, source), + Config: testAccAWSS3BucketObjectConfig_sourceHashTrigger(rName, source), Check: resource.ComposeTestCheckFunc( testAccCheckAWSS3BucketObjectExists(resourceName, &updated_obj), testAccCheckAWSS3BucketObjectBody(&updated_obj, "{any other thing will do }"), @@ -292,7 +293,7 @@ func TestAccAWSS3BucketObject_SourceHashTrigger(t *testing.T) { func TestAccAWSS3BucketObject_withContentCharacteristics(t *testing.T) { var obj s3.GetObjectOutput resourceName := "aws_s3_bucket_object.object" - rInt := acctest.RandInt() + rName := acctest.RandomWithPrefix("tf-acc-test") source := testAccAWSS3BucketObjectCreateTempFile(t, "{anything will do }") defer os.Remove(source) @@ -304,7 +305,7 @@ func TestAccAWSS3BucketObject_withContentCharacteristics(t *testing.T) { CheckDestroy: testAccCheckAWSS3BucketObjectDestroy, Steps: []resource.TestStep{ { - Config: testAccAWSS3BucketObjectConfig_withContentCharacteristics(rInt, source), + Config: testAccAWSS3BucketObjectConfig_withContentCharacteristics(rName, source), Check: resource.ComposeTestCheckFunc( testAccCheckAWSS3BucketObjectExists(resourceName, &obj), testAccCheckAWSS3BucketObjectBody(&obj, "{anything will do }"), @@ -316,10 +317,10 @@ func TestAccAWSS3BucketObject_withContentCharacteristics(t *testing.T) { }) } -func TestAccAWSS3BucketObject_NonVersioned(t *testing.T) { +func TestAccAWSS3BucketObject_nonVersioned(t *testing.T) { sourceInitial := testAccAWSS3BucketObjectCreateTempFile(t, "initial object state") defer os.Remove(sourceInitial) - + rName := acctest.RandomWithPrefix("tf-acc-test") var originalObj s3.GetObjectOutput resourceName := "aws_s3_bucket_object.object" @@ -330,7 +331,7 @@ func TestAccAWSS3BucketObject_NonVersioned(t *testing.T) { CheckDestroy: testAccCheckAWSS3BucketObjectDestroy, Steps: []resource.TestStep{ { - Config: testAccAWSS3BucketObjectConfig_NonVersioned(acctest.RandInt(), sourceInitial), + Config: testAccAWSS3BucketObjectConfig_nonVersioned(rName, sourceInitial), Check: resource.ComposeTestCheckFunc( testAccCheckAWSS3BucketObjectExists(resourceName, &originalObj), testAccCheckAWSS3BucketObjectBody(&originalObj, "initial object state"), @@ -344,7 +345,7 @@ func TestAccAWSS3BucketObject_NonVersioned(t *testing.T) { func TestAccAWSS3BucketObject_updates(t *testing.T) { var originalObj, modifiedObj s3.GetObjectOutput resourceName := "aws_s3_bucket_object.object" - rInt := acctest.RandInt() + rName := acctest.RandomWithPrefix("tf-acc-test") sourceInitial := testAccAWSS3BucketObjectCreateTempFile(t, "initial object state") defer os.Remove(sourceInitial) @@ -358,7 +359,7 @@ func TestAccAWSS3BucketObject_updates(t *testing.T) { CheckDestroy: testAccCheckAWSS3BucketObjectDestroy, Steps: []resource.TestStep{ { - Config: testAccAWSS3BucketObjectConfig_updateable(rInt, false, sourceInitial), + Config: testAccAWSS3BucketObjectConfig_updateable(rName, false, sourceInitial), Check: resource.ComposeTestCheckFunc( testAccCheckAWSS3BucketObjectExists(resourceName, &originalObj), testAccCheckAWSS3BucketObjectBody(&originalObj, "initial object state"), @@ -369,7 +370,7 @@ func TestAccAWSS3BucketObject_updates(t *testing.T) { ), }, { - Config: testAccAWSS3BucketObjectConfig_updateable(rInt, false, sourceModified), + Config: testAccAWSS3BucketObjectConfig_updateable(rName, false, sourceModified), Check: resource.ComposeTestCheckFunc( testAccCheckAWSS3BucketObjectExists(resourceName, &modifiedObj), testAccCheckAWSS3BucketObjectBody(&modifiedObj, "modified object"), @@ -386,7 +387,7 @@ func TestAccAWSS3BucketObject_updates(t *testing.T) { func TestAccAWSS3BucketObject_updateSameFile(t *testing.T) { var originalObj, modifiedObj s3.GetObjectOutput resourceName := "aws_s3_bucket_object.object" - rInt := acctest.RandInt() + rName := acctest.RandomWithPrefix("tf-acc-test") startingData := "lane 8" changingData := "chicane" @@ -409,7 +410,7 @@ func TestAccAWSS3BucketObject_updateSameFile(t *testing.T) { CheckDestroy: testAccCheckAWSS3BucketObjectDestroy, Steps: []resource.TestStep{ { - Config: testAccAWSS3BucketObjectConfig_updateable(rInt, false, filename), + Config: testAccAWSS3BucketObjectConfig_updateable(rName, false, filename), Check: resource.ComposeTestCheckFunc( testAccCheckAWSS3BucketObjectExists(resourceName, &originalObj), testAccCheckAWSS3BucketObjectBody(&originalObj, startingData), @@ -419,7 +420,7 @@ func TestAccAWSS3BucketObject_updateSameFile(t *testing.T) { ExpectNonEmptyPlan: true, }, { - Config: testAccAWSS3BucketObjectConfig_updateable(rInt, false, filename), + Config: testAccAWSS3BucketObjectConfig_updateable(rName, false, filename), Check: resource.ComposeTestCheckFunc( testAccCheckAWSS3BucketObjectExists(resourceName, &modifiedObj), testAccCheckAWSS3BucketObjectBody(&modifiedObj, changingData), @@ -433,7 +434,7 @@ func TestAccAWSS3BucketObject_updateSameFile(t *testing.T) { func TestAccAWSS3BucketObject_updatesWithVersioning(t *testing.T) { var originalObj, modifiedObj s3.GetObjectOutput resourceName := "aws_s3_bucket_object.object" - rInt := acctest.RandInt() + rName := acctest.RandomWithPrefix("tf-acc-test") sourceInitial := testAccAWSS3BucketObjectCreateTempFile(t, "initial versioned object state") defer os.Remove(sourceInitial) @@ -447,7 +448,7 @@ func TestAccAWSS3BucketObject_updatesWithVersioning(t *testing.T) { CheckDestroy: testAccCheckAWSS3BucketObjectDestroy, Steps: []resource.TestStep{ { - Config: testAccAWSS3BucketObjectConfig_updateable(rInt, true, sourceInitial), + Config: testAccAWSS3BucketObjectConfig_updateable(rName, true, sourceInitial), Check: resource.ComposeTestCheckFunc( testAccCheckAWSS3BucketObjectExists(resourceName, &originalObj), testAccCheckAWSS3BucketObjectBody(&originalObj, "initial versioned object state"), @@ -455,7 +456,7 @@ func TestAccAWSS3BucketObject_updatesWithVersioning(t *testing.T) { ), }, { - Config: testAccAWSS3BucketObjectConfig_updateable(rInt, true, sourceModified), + Config: testAccAWSS3BucketObjectConfig_updateable(rName, true, sourceModified), Check: resource.ComposeTestCheckFunc( testAccCheckAWSS3BucketObjectExists(resourceName, &modifiedObj), testAccCheckAWSS3BucketObjectBody(&modifiedObj, "modified versioned object"), @@ -509,7 +510,7 @@ func TestAccAWSS3BucketObject_updatesWithVersioningViaAccessPoint(t *testing.T) func TestAccAWSS3BucketObject_kms(t *testing.T) { var obj s3.GetObjectOutput resourceName := "aws_s3_bucket_object.object" - rInt := acctest.RandInt() + rName := acctest.RandomWithPrefix("tf-acc-test") source := testAccAWSS3BucketObjectCreateTempFile(t, "{anything will do }") defer os.Remove(source) @@ -522,7 +523,7 @@ func TestAccAWSS3BucketObject_kms(t *testing.T) { Steps: []resource.TestStep{ { PreConfig: func() {}, - Config: testAccAWSS3BucketObjectConfig_withKMSId(rInt, source), + Config: testAccAWSS3BucketObjectConfig_withKMSId(rName, source), Check: resource.ComposeTestCheckFunc( testAccCheckAWSS3BucketObjectExists(resourceName, &obj), testAccCheckAWSS3BucketObjectSSE(resourceName, "aws:kms"), @@ -536,7 +537,7 @@ func TestAccAWSS3BucketObject_kms(t *testing.T) { func TestAccAWSS3BucketObject_sse(t *testing.T) { var obj s3.GetObjectOutput resourceName := "aws_s3_bucket_object.object" - rInt := acctest.RandInt() + rName := acctest.RandomWithPrefix("tf-acc-test") source := testAccAWSS3BucketObjectCreateTempFile(t, "{anything will do }") defer os.Remove(source) @@ -549,7 +550,7 @@ func TestAccAWSS3BucketObject_sse(t *testing.T) { Steps: []resource.TestStep{ { PreConfig: func() {}, - Config: testAccAWSS3BucketObjectConfig_withSSE(rInt, source), + Config: testAccAWSS3BucketObjectConfig_withSSE(rName, source), Check: resource.ComposeTestCheckFunc( testAccCheckAWSS3BucketObjectExists(resourceName, &obj), testAccCheckAWSS3BucketObjectSSE(resourceName, "AES256"), @@ -563,7 +564,7 @@ func TestAccAWSS3BucketObject_sse(t *testing.T) { func TestAccAWSS3BucketObject_acl(t *testing.T) { var obj1, obj2, obj3 s3.GetObjectOutput resourceName := "aws_s3_bucket_object.object" - rInt := acctest.RandInt() + rName := acctest.RandomWithPrefix("tf-acc-test") resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { testAccPreCheck(t) }, @@ -572,7 +573,7 @@ func TestAccAWSS3BucketObject_acl(t *testing.T) { CheckDestroy: testAccCheckAWSS3BucketObjectDestroy, Steps: []resource.TestStep{ { - Config: testAccAWSS3BucketObjectConfig_acl(rInt, "some_bucket_content", "private"), + Config: testAccAWSS3BucketObjectConfig_acl(rName, "some_bucket_content", "private"), Check: resource.ComposeTestCheckFunc( testAccCheckAWSS3BucketObjectExists(resourceName, &obj1), testAccCheckAWSS3BucketObjectBody(&obj1, "some_bucket_content"), @@ -581,7 +582,7 @@ func TestAccAWSS3BucketObject_acl(t *testing.T) { ), }, { - Config: testAccAWSS3BucketObjectConfig_acl(rInt, "some_bucket_content", "public-read"), + Config: testAccAWSS3BucketObjectConfig_acl(rName, "some_bucket_content", "public-read"), Check: resource.ComposeTestCheckFunc( testAccCheckAWSS3BucketObjectExists(resourceName, &obj2), testAccCheckAWSS3BucketObjectVersionIdEquals(&obj2, &obj1), @@ -591,7 +592,7 @@ func TestAccAWSS3BucketObject_acl(t *testing.T) { ), }, { - Config: testAccAWSS3BucketObjectConfig_acl(rInt, "changed_some_bucket_content", "private"), + Config: testAccAWSS3BucketObjectConfig_acl(rName, "changed_some_bucket_content", "private"), Check: resource.ComposeTestCheckFunc( testAccCheckAWSS3BucketObjectExists(resourceName, &obj3), testAccCheckAWSS3BucketObjectVersionIdDiffers(&obj3, &obj2), @@ -605,7 +606,7 @@ func TestAccAWSS3BucketObject_acl(t *testing.T) { } func TestAccAWSS3BucketObject_metadata(t *testing.T) { - rInt := acctest.RandInt() + rName := acctest.RandomWithPrefix("tf-acc-test") var obj s3.GetObjectOutput resourceName := "aws_s3_bucket_object.object" @@ -616,7 +617,7 @@ func TestAccAWSS3BucketObject_metadata(t *testing.T) { CheckDestroy: testAccCheckAWSS3BucketObjectDestroy, Steps: []resource.TestStep{ { - Config: testAccAWSS3BucketObjectConfig_withMetadata(rInt, "key1", "value1", "key2", "value2"), + Config: testAccAWSS3BucketObjectConfig_withMetadata(rName, "key1", "value1", "key2", "value2"), Check: resource.ComposeTestCheckFunc( testAccCheckAWSS3BucketObjectExists(resourceName, &obj), resource.TestCheckResourceAttr(resourceName, "metadata.%", "2"), @@ -625,7 +626,7 @@ func TestAccAWSS3BucketObject_metadata(t *testing.T) { ), }, { - Config: testAccAWSS3BucketObjectConfig_withMetadata(rInt, "key1", "value1updated", "key3", "value3"), + Config: testAccAWSS3BucketObjectConfig_withMetadata(rName, "key1", "value1updated", "key3", "value3"), Check: resource.ComposeTestCheckFunc( testAccCheckAWSS3BucketObjectExists(resourceName, &obj), resource.TestCheckResourceAttr(resourceName, "metadata.%", "2"), @@ -634,7 +635,7 @@ func TestAccAWSS3BucketObject_metadata(t *testing.T) { ), }, { - Config: testAccAWSS3BucketObjectConfigEmpty(rInt), + Config: testAccAWSS3BucketObjectConfigEmpty(rName), Check: resource.ComposeTestCheckFunc( testAccCheckAWSS3BucketObjectExists(resourceName, &obj), resource.TestCheckResourceAttr(resourceName, "metadata.%", "0"), @@ -647,7 +648,7 @@ func TestAccAWSS3BucketObject_metadata(t *testing.T) { func TestAccAWSS3BucketObject_storageClass(t *testing.T) { var obj s3.GetObjectOutput resourceName := "aws_s3_bucket_object.object" - rInt := acctest.RandInt() + rName := acctest.RandomWithPrefix("tf-acc-test") resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { testAccPreCheck(t) }, @@ -657,7 +658,7 @@ func TestAccAWSS3BucketObject_storageClass(t *testing.T) { Steps: []resource.TestStep{ { PreConfig: func() {}, - Config: testAccAWSS3BucketObjectConfigContent(rInt, "some_bucket_content"), + Config: testAccAWSS3BucketObjectConfigContent(rName, "some_bucket_content"), Check: resource.ComposeTestCheckFunc( testAccCheckAWSS3BucketObjectExists(resourceName, &obj), resource.TestCheckResourceAttr(resourceName, "storage_class", "STANDARD"), @@ -665,7 +666,7 @@ func TestAccAWSS3BucketObject_storageClass(t *testing.T) { ), }, { - Config: testAccAWSS3BucketObjectConfig_storageClass(rInt, "REDUCED_REDUNDANCY"), + Config: testAccAWSS3BucketObjectConfig_storageClass(rName, "REDUCED_REDUNDANCY"), Check: resource.ComposeTestCheckFunc( testAccCheckAWSS3BucketObjectExists(resourceName, &obj), resource.TestCheckResourceAttr(resourceName, "storage_class", "REDUCED_REDUNDANCY"), @@ -673,7 +674,7 @@ func TestAccAWSS3BucketObject_storageClass(t *testing.T) { ), }, { - Config: testAccAWSS3BucketObjectConfig_storageClass(rInt, "GLACIER"), + Config: testAccAWSS3BucketObjectConfig_storageClass(rName, "GLACIER"), Check: resource.ComposeTestCheckFunc( // Can't GetObject on an object in Glacier without restoring it. resource.TestCheckResourceAttr(resourceName, "storage_class", "GLACIER"), @@ -681,7 +682,7 @@ func TestAccAWSS3BucketObject_storageClass(t *testing.T) { ), }, { - Config: testAccAWSS3BucketObjectConfig_storageClass(rInt, "INTELLIGENT_TIERING"), + Config: testAccAWSS3BucketObjectConfig_storageClass(rName, "INTELLIGENT_TIERING"), Check: resource.ComposeTestCheckFunc( testAccCheckAWSS3BucketObjectExists(resourceName, &obj), resource.TestCheckResourceAttr(resourceName, "storage_class", "INTELLIGENT_TIERING"), @@ -689,7 +690,7 @@ func TestAccAWSS3BucketObject_storageClass(t *testing.T) { ), }, { - Config: testAccAWSS3BucketObjectConfig_storageClass(rInt, "DEEP_ARCHIVE"), + Config: testAccAWSS3BucketObjectConfig_storageClass(rName, "DEEP_ARCHIVE"), Check: resource.ComposeTestCheckFunc( // Can't GetObject on an object in DEEP_ARCHIVE without restoring it. resource.TestCheckResourceAttr(resourceName, "storage_class", "DEEP_ARCHIVE"), @@ -960,10 +961,10 @@ func TestAccAWSS3BucketObject_tagsMultipleSlashes(t *testing.T) { }) } -func TestAccAWSS3BucketObject_ObjectLockLegalHoldStartWithNone(t *testing.T) { +func TestAccAWSS3BucketObject_objectLockLegalHoldStartWithNone(t *testing.T) { var obj1, obj2, obj3 s3.GetObjectOutput resourceName := "aws_s3_bucket_object.object" - rInt := acctest.RandInt() + rName := acctest.RandomWithPrefix("tf-acc-test") resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { testAccPreCheck(t) }, @@ -972,7 +973,7 @@ func TestAccAWSS3BucketObject_ObjectLockLegalHoldStartWithNone(t *testing.T) { CheckDestroy: testAccCheckAWSS3BucketObjectDestroy, Steps: []resource.TestStep{ { - Config: testAccAWSS3BucketObjectConfig_noObjectLockLegalHold(rInt, "stuff"), + Config: testAccAWSS3BucketObjectConfig_noObjectLockLegalHold(rName, "stuff"), Check: resource.ComposeTestCheckFunc( testAccCheckAWSS3BucketObjectExists(resourceName, &obj1), testAccCheckAWSS3BucketObjectBody(&obj1, "stuff"), @@ -982,7 +983,7 @@ func TestAccAWSS3BucketObject_ObjectLockLegalHoldStartWithNone(t *testing.T) { ), }, { - Config: testAccAWSS3BucketObjectConfig_withObjectLockLegalHold(rInt, "stuff", "ON"), + Config: testAccAWSS3BucketObjectConfig_withObjectLockLegalHold(rName, "stuff", "ON"), Check: resource.ComposeTestCheckFunc( testAccCheckAWSS3BucketObjectExists(resourceName, &obj2), testAccCheckAWSS3BucketObjectVersionIdEquals(&obj2, &obj1), @@ -994,7 +995,7 @@ func TestAccAWSS3BucketObject_ObjectLockLegalHoldStartWithNone(t *testing.T) { }, // Remove legal hold but create a new object version to test force_destroy { - Config: testAccAWSS3BucketObjectConfig_withObjectLockLegalHold(rInt, "changed stuff", "OFF"), + Config: testAccAWSS3BucketObjectConfig_withObjectLockLegalHold(rName, "changed stuff", "OFF"), Check: resource.ComposeTestCheckFunc( testAccCheckAWSS3BucketObjectExists(resourceName, &obj3), testAccCheckAWSS3BucketObjectVersionIdDiffers(&obj3, &obj2), @@ -1008,10 +1009,10 @@ func TestAccAWSS3BucketObject_ObjectLockLegalHoldStartWithNone(t *testing.T) { }) } -func TestAccAWSS3BucketObject_ObjectLockLegalHoldStartWithOn(t *testing.T) { +func TestAccAWSS3BucketObject_objectLockLegalHoldStartWithOn(t *testing.T) { var obj1, obj2 s3.GetObjectOutput resourceName := "aws_s3_bucket_object.object" - rInt := acctest.RandInt() + rName := acctest.RandomWithPrefix("tf-acc-test") resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { testAccPreCheck(t) }, @@ -1020,7 +1021,7 @@ func TestAccAWSS3BucketObject_ObjectLockLegalHoldStartWithOn(t *testing.T) { CheckDestroy: testAccCheckAWSS3BucketObjectDestroy, Steps: []resource.TestStep{ { - Config: testAccAWSS3BucketObjectConfig_withObjectLockLegalHold(rInt, "stuff", "ON"), + Config: testAccAWSS3BucketObjectConfig_withObjectLockLegalHold(rName, "stuff", "ON"), Check: resource.ComposeTestCheckFunc( testAccCheckAWSS3BucketObjectExists(resourceName, &obj1), testAccCheckAWSS3BucketObjectBody(&obj1, "stuff"), @@ -1030,7 +1031,7 @@ func TestAccAWSS3BucketObject_ObjectLockLegalHoldStartWithOn(t *testing.T) { ), }, { - Config: testAccAWSS3BucketObjectConfig_withObjectLockLegalHold(rInt, "stuff", "OFF"), + Config: testAccAWSS3BucketObjectConfig_withObjectLockLegalHold(rName, "stuff", "OFF"), Check: resource.ComposeTestCheckFunc( testAccCheckAWSS3BucketObjectExists(resourceName, &obj2), testAccCheckAWSS3BucketObjectVersionIdEquals(&obj2, &obj1), @@ -1044,10 +1045,10 @@ func TestAccAWSS3BucketObject_ObjectLockLegalHoldStartWithOn(t *testing.T) { }) } -func TestAccAWSS3BucketObject_ObjectLockRetentionStartWithNone(t *testing.T) { +func TestAccAWSS3BucketObject_objectLockRetentionStartWithNone(t *testing.T) { var obj1, obj2, obj3 s3.GetObjectOutput resourceName := "aws_s3_bucket_object.object" - rInt := acctest.RandInt() + rName := acctest.RandomWithPrefix("tf-acc-test") retainUntilDate := time.Now().UTC().AddDate(0, 0, 10).Format(time.RFC3339) resource.ParallelTest(t, resource.TestCase{ @@ -1057,7 +1058,7 @@ func TestAccAWSS3BucketObject_ObjectLockRetentionStartWithNone(t *testing.T) { CheckDestroy: testAccCheckAWSS3BucketObjectDestroy, Steps: []resource.TestStep{ { - Config: testAccAWSS3BucketObjectConfig_noObjectLockRetention(rInt, "stuff"), + Config: testAccAWSS3BucketObjectConfig_noObjectLockRetention(rName, "stuff"), Check: resource.ComposeTestCheckFunc( testAccCheckAWSS3BucketObjectExists(resourceName, &obj1), testAccCheckAWSS3BucketObjectBody(&obj1, "stuff"), @@ -1067,7 +1068,7 @@ func TestAccAWSS3BucketObject_ObjectLockRetentionStartWithNone(t *testing.T) { ), }, { - Config: testAccAWSS3BucketObjectConfig_withObjectLockRetention(rInt, "stuff", retainUntilDate), + Config: testAccAWSS3BucketObjectConfig_withObjectLockRetention(rName, "stuff", retainUntilDate), Check: resource.ComposeTestCheckFunc( testAccCheckAWSS3BucketObjectExists(resourceName, &obj2), testAccCheckAWSS3BucketObjectVersionIdEquals(&obj2, &obj1), @@ -1079,7 +1080,7 @@ func TestAccAWSS3BucketObject_ObjectLockRetentionStartWithNone(t *testing.T) { }, // Remove retention period but create a new object version to test force_destroy { - Config: testAccAWSS3BucketObjectConfig_noObjectLockRetention(rInt, "changed stuff"), + Config: testAccAWSS3BucketObjectConfig_noObjectLockRetention(rName, "changed stuff"), Check: resource.ComposeTestCheckFunc( testAccCheckAWSS3BucketObjectExists(resourceName, &obj3), testAccCheckAWSS3BucketObjectVersionIdDiffers(&obj3, &obj2), @@ -1093,10 +1094,10 @@ func TestAccAWSS3BucketObject_ObjectLockRetentionStartWithNone(t *testing.T) { }) } -func TestAccAWSS3BucketObject_ObjectLockRetentionStartWithSet(t *testing.T) { +func TestAccAWSS3BucketObject_objectLockRetentionStartWithSet(t *testing.T) { var obj1, obj2, obj3, obj4 s3.GetObjectOutput resourceName := "aws_s3_bucket_object.object" - rInt := acctest.RandInt() + rName := acctest.RandomWithPrefix("tf-acc-test") retainUntilDate1 := time.Now().UTC().AddDate(0, 0, 20).Format(time.RFC3339) retainUntilDate2 := time.Now().UTC().AddDate(0, 0, 30).Format(time.RFC3339) retainUntilDate3 := time.Now().UTC().AddDate(0, 0, 10).Format(time.RFC3339) @@ -1108,7 +1109,7 @@ func TestAccAWSS3BucketObject_ObjectLockRetentionStartWithSet(t *testing.T) { CheckDestroy: testAccCheckAWSS3BucketObjectDestroy, Steps: []resource.TestStep{ { - Config: testAccAWSS3BucketObjectConfig_withObjectLockRetention(rInt, "stuff", retainUntilDate1), + Config: testAccAWSS3BucketObjectConfig_withObjectLockRetention(rName, "stuff", retainUntilDate1), Check: resource.ComposeTestCheckFunc( testAccCheckAWSS3BucketObjectExists(resourceName, &obj1), testAccCheckAWSS3BucketObjectBody(&obj1, "stuff"), @@ -1118,7 +1119,7 @@ func TestAccAWSS3BucketObject_ObjectLockRetentionStartWithSet(t *testing.T) { ), }, { - Config: testAccAWSS3BucketObjectConfig_withObjectLockRetention(rInt, "stuff", retainUntilDate2), + Config: testAccAWSS3BucketObjectConfig_withObjectLockRetention(rName, "stuff", retainUntilDate2), Check: resource.ComposeTestCheckFunc( testAccCheckAWSS3BucketObjectExists(resourceName, &obj2), testAccCheckAWSS3BucketObjectVersionIdEquals(&obj2, &obj1), @@ -1129,7 +1130,7 @@ func TestAccAWSS3BucketObject_ObjectLockRetentionStartWithSet(t *testing.T) { ), }, { - Config: testAccAWSS3BucketObjectConfig_withObjectLockRetention(rInt, "stuff", retainUntilDate3), + Config: testAccAWSS3BucketObjectConfig_withObjectLockRetention(rName, "stuff", retainUntilDate3), Check: resource.ComposeTestCheckFunc( testAccCheckAWSS3BucketObjectExists(resourceName, &obj3), testAccCheckAWSS3BucketObjectVersionIdEquals(&obj3, &obj2), @@ -1140,7 +1141,7 @@ func TestAccAWSS3BucketObject_ObjectLockRetentionStartWithSet(t *testing.T) { ), }, { - Config: testAccAWSS3BucketObjectConfig_noObjectLockRetention(rInt, "stuff"), + Config: testAccAWSS3BucketObjectConfig_noObjectLockRetention(rName, "stuff"), Check: resource.ComposeTestCheckFunc( testAccCheckAWSS3BucketObjectExists(resourceName, &obj4), testAccCheckAWSS3BucketObjectVersionIdEquals(&obj4, &obj3), @@ -1157,7 +1158,7 @@ func TestAccAWSS3BucketObject_ObjectLockRetentionStartWithSet(t *testing.T) { func TestAccAWSS3BucketObject_objectBucketKeyEnabled(t *testing.T) { var obj s3.GetObjectOutput resourceName := "aws_s3_bucket_object.object" - rInt := acctest.RandInt() + rName := acctest.RandomWithPrefix("tf-acc-test") resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { testAccPreCheck(t) }, @@ -1166,7 +1167,7 @@ func TestAccAWSS3BucketObject_objectBucketKeyEnabled(t *testing.T) { CheckDestroy: testAccCheckAWSS3BucketObjectDestroy, Steps: []resource.TestStep{ { - Config: testAccAWSS3BucketObjectConfig_objectBucketKeyEnabled(rInt, "stuff"), + Config: testAccAWSS3BucketObjectConfig_objectBucketKeyEnabled(rName, "stuff"), Check: resource.ComposeTestCheckFunc( testAccCheckAWSS3BucketObjectExists(resourceName, &obj), testAccCheckAWSS3BucketObjectBody(&obj, "stuff"), @@ -1180,7 +1181,7 @@ func TestAccAWSS3BucketObject_objectBucketKeyEnabled(t *testing.T) { func TestAccAWSS3BucketObject_bucketBucketKeyEnabled(t *testing.T) { var obj s3.GetObjectOutput resourceName := "aws_s3_bucket_object.object" - rInt := acctest.RandInt() + rName := acctest.RandomWithPrefix("tf-acc-test") resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { testAccPreCheck(t) }, @@ -1189,7 +1190,7 @@ func TestAccAWSS3BucketObject_bucketBucketKeyEnabled(t *testing.T) { CheckDestroy: testAccCheckAWSS3BucketObjectDestroy, Steps: []resource.TestStep{ { - Config: testAccAWSS3BucketObjectConfig_bucketBucketKeyEnabled(rInt, "stuff"), + Config: testAccAWSS3BucketObjectConfig_bucketBucketKeyEnabled(rName, "stuff"), Check: resource.ComposeTestCheckFunc( testAccCheckAWSS3BucketObjectExists(resourceName, &obj), testAccCheckAWSS3BucketObjectBody(&obj, "stuff"), @@ -1203,7 +1204,7 @@ func TestAccAWSS3BucketObject_bucketBucketKeyEnabled(t *testing.T) { func TestAccAWSS3BucketObject_defaultBucketSSE(t *testing.T) { var obj1 s3.GetObjectOutput resourceName := "aws_s3_bucket_object.object" - rInt := acctest.RandInt() + rName := acctest.RandomWithPrefix("tf-acc-test") resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { testAccPreCheck(t) }, @@ -1212,7 +1213,7 @@ func TestAccAWSS3BucketObject_defaultBucketSSE(t *testing.T) { CheckDestroy: testAccCheckAWSS3BucketObjectDestroy, Steps: []resource.TestStep{ { - Config: testAccAWSS3BucketObjectConfig_defaultBucketSSE(rInt, "stuff"), + Config: testAccAWSS3BucketObjectConfig_defaultBucketSSE(rName, "stuff"), Check: resource.ComposeTestCheckFunc( testAccCheckAWSS3BucketObjectExists(resourceName, &obj1), testAccCheckAWSS3BucketObjectBody(&obj1, "stuff"), @@ -1535,114 +1536,114 @@ resource "aws_s3_bucket_object" "object" { `, bucket, key) } -func testAccAWSS3BucketObjectConfigEmpty(randInt int) string { +func testAccAWSS3BucketObjectConfigEmpty(rName string) string { return fmt.Sprintf(` -resource "aws_s3_bucket" "object_bucket" { - bucket = "tf-object-test-bucket-%d" +resource "aws_s3_bucket" "test" { + bucket = %[1]q } resource "aws_s3_bucket_object" "object" { - bucket = aws_s3_bucket.object_bucket.bucket + bucket = aws_s3_bucket.test.bucket key = "test-key" } -`, randInt) +`, rName) } -func testAccAWSS3BucketObjectConfigSource(randInt int, source string) string { +func testAccAWSS3BucketObjectConfigSource(rName string, source string) string { return fmt.Sprintf(` -resource "aws_s3_bucket" "object_bucket" { - bucket = "tf-object-test-bucket-%[1]d" +resource "aws_s3_bucket" "test" { + bucket = %[1]q } resource "aws_s3_bucket_object" "object" { - bucket = aws_s3_bucket.object_bucket.bucket + bucket = aws_s3_bucket.test.bucket key = "test-key" source = %[2]q content_type = "binary/octet-stream" } -`, randInt, source) +`, rName, source) } -func testAccAWSS3BucketObjectConfig_withContentCharacteristics(randInt int, source string) string { +func testAccAWSS3BucketObjectConfig_withContentCharacteristics(rName string, source string) string { return fmt.Sprintf(` -resource "aws_s3_bucket" "object_bucket" { - bucket = "tf-object-test-bucket-%[1]d" +resource "aws_s3_bucket" "test" { + bucket = %[1]q } resource "aws_s3_bucket_object" "object" { - bucket = aws_s3_bucket.object_bucket.bucket + bucket = aws_s3_bucket.test.bucket key = "test-key" source = %[2]q content_language = "en" content_type = "binary/octet-stream" website_redirect = "http://google.com" } -`, randInt, source) +`, rName, source) } -func testAccAWSS3BucketObjectConfigContent(randInt int, content string) string { +func testAccAWSS3BucketObjectConfigContent(rName string, content string) string { return fmt.Sprintf(` -resource "aws_s3_bucket" "object_bucket" { - bucket = "tf-object-test-bucket-%[1]d" +resource "aws_s3_bucket" "test" { + bucket = %[1]q } resource "aws_s3_bucket_object" "object" { - bucket = aws_s3_bucket.object_bucket.bucket + bucket = aws_s3_bucket.test.bucket key = "test-key" content = %[2]q } -`, randInt, content) +`, rName, content) } -func testAccAWSS3BucketObjectEtagEncryption(randInt int, source string) string { +func testAccAWSS3BucketObjectEtagEncryption(rName string, source string) string { return fmt.Sprintf(` -resource "aws_s3_bucket" "object_bucket" { - bucket = "tf-object-test-bucket-%[1]d" +resource "aws_s3_bucket" "test" { + bucket = %[1]q } resource "aws_s3_bucket_object" "object" { - bucket = aws_s3_bucket.object_bucket.bucket + bucket = aws_s3_bucket.test.bucket key = "test-key" server_side_encryption = "AES256" source = %[2]q etag = filemd5(%[2]q) } -`, randInt, source) +`, rName, source) } -func testAccAWSS3BucketObjectConfigContentBase64(randInt int, contentBase64 string) string { +func testAccAWSS3BucketObjectConfigContentBase64(rName string, contentBase64 string) string { return fmt.Sprintf(` -resource "aws_s3_bucket" "object_bucket" { - bucket = "tf-object-test-bucket-%[1]d" +resource "aws_s3_bucket" "test" { + bucket = %[1]q } resource "aws_s3_bucket_object" "object" { - bucket = aws_s3_bucket.object_bucket.bucket + bucket = aws_s3_bucket.test.bucket key = "test-key" content_base64 = %[2]q } -`, randInt, contentBase64) +`, rName, contentBase64) } -func testAccAWSS3BucketObjectConfig_SourceHashTrigger(randInt int, source string) string { +func testAccAWSS3BucketObjectConfig_sourceHashTrigger(rName string, source string) string { return fmt.Sprintf(` -resource "aws_s3_bucket" "object_bucket" { - bucket = "tf-object-test-bucket-%d" +resource "aws_s3_bucket" "test" { + bucket = %[1]q } resource "aws_s3_bucket_object" "object" { - bucket = "${aws_s3_bucket.object_bucket.bucket}" + bucket = aws_s3_bucket.test.bucket key = "test-key" - source = "%s" - source_hash = "${filemd5("%s")}" + source = %[2]q + source_hash = filemd5(%[2]q) } -`, randInt, source, source) +`, rName, source) } -func testAccAWSS3BucketObjectConfig_updateable(randInt int, bucketVersioning bool, source string) string { +func testAccAWSS3BucketObjectConfig_updateable(rName string, bucketVersioning bool, source string) string { return fmt.Sprintf(` resource "aws_s3_bucket" "object_bucket_3" { - bucket = "tf-object-test-bucket-%[1]d" + bucket = %[1]q versioning { enabled = %[2]t @@ -1655,7 +1656,7 @@ resource "aws_s3_bucket_object" "object" { source = %[3]q etag = filemd5(%[3]q) } -`, randInt, bucketVersioning, source) +`, rName, bucketVersioning, source) } func testAccAWSS3BucketObjectConfig_updateableViaAccessPoint(rName string, bucketVersioning bool, source string) string { @@ -1682,42 +1683,42 @@ resource "aws_s3_bucket_object" "test" { `, rName, bucketVersioning, source) } -func testAccAWSS3BucketObjectConfig_withKMSId(randInt int, source string) string { +func testAccAWSS3BucketObjectConfig_withKMSId(rName string, source string) string { return fmt.Sprintf(` resource "aws_kms_key" "kms_key_1" {} -resource "aws_s3_bucket" "object_bucket" { - bucket = "tf-object-test-bucket-%[1]d" +resource "aws_s3_bucket" "test" { + bucket = %[1]q } resource "aws_s3_bucket_object" "object" { - bucket = aws_s3_bucket.object_bucket.bucket + bucket = aws_s3_bucket.test.bucket key = "test-key" source = %[2]q kms_key_id = aws_kms_key.kms_key_1.arn } -`, randInt, source) +`, rName, source) } -func testAccAWSS3BucketObjectConfig_withSSE(randInt int, source string) string { +func testAccAWSS3BucketObjectConfig_withSSE(rName string, source string) string { return fmt.Sprintf(` -resource "aws_s3_bucket" "object_bucket" { - bucket = "tf-object-test-bucket-%[1]d" +resource "aws_s3_bucket" "test" { + bucket = %[1]q } resource "aws_s3_bucket_object" "object" { - bucket = aws_s3_bucket.object_bucket.bucket + bucket = aws_s3_bucket.test.bucket key = "test-key" source = %[2]q server_side_encryption = "AES256" } -`, randInt, source) +`, rName, source) } -func testAccAWSS3BucketObjectConfig_acl(randInt int, content, acl string) string { +func testAccAWSS3BucketObjectConfig_acl(rName string, content, acl string) string { return fmt.Sprintf(` -resource "aws_s3_bucket" "object_bucket" { - bucket = "tf-object-test-bucket-%[1]d" +resource "aws_s3_bucket" "test" { + bucket = %[1]q versioning { enabled = true @@ -1725,32 +1726,32 @@ resource "aws_s3_bucket" "object_bucket" { } resource "aws_s3_bucket_object" "object" { - bucket = aws_s3_bucket.object_bucket.bucket + bucket = aws_s3_bucket.test.bucket key = "test-key" content = %[2]q acl = %[3]q } -`, randInt, content, acl) +`, rName, content, acl) } -func testAccAWSS3BucketObjectConfig_storageClass(randInt int, storage_class string) string { +func testAccAWSS3BucketObjectConfig_storageClass(rName string, storage_class string) string { return fmt.Sprintf(` -resource "aws_s3_bucket" "object_bucket" { - bucket = "tf-object-test-bucket-%[1]d" +resource "aws_s3_bucket" "test" { + bucket = %[1]q } resource "aws_s3_bucket_object" "object" { - bucket = aws_s3_bucket.object_bucket.bucket + bucket = aws_s3_bucket.test.bucket key = "test-key" content = "some_bucket_content" storage_class = %[2]q } -`, randInt, storage_class) +`, rName, storage_class) } func testAccAWSS3BucketObjectConfig_withTags(rName, key, content string) string { return fmt.Sprintf(` -resource "aws_s3_bucket" "object_bucket" { +resource "aws_s3_bucket" "test" { bucket = %[1]q versioning { @@ -1759,7 +1760,7 @@ resource "aws_s3_bucket" "object_bucket" { } resource "aws_s3_bucket_object" "object" { - bucket = aws_s3_bucket.object_bucket.bucket + bucket = aws_s3_bucket.test.bucket key = %[2]q content = %[3]q @@ -1774,7 +1775,7 @@ resource "aws_s3_bucket_object" "object" { func testAccAWSS3BucketObjectConfig_withUpdatedTags(rName, key, content string) string { return fmt.Sprintf(` -resource "aws_s3_bucket" "object_bucket" { +resource "aws_s3_bucket" "test" { bucket = %[1]q versioning { @@ -1783,7 +1784,7 @@ resource "aws_s3_bucket" "object_bucket" { } resource "aws_s3_bucket_object" "object" { - bucket = aws_s3_bucket.object_bucket.bucket + bucket = aws_s3_bucket.test.bucket key = %[2]q content = %[3]q @@ -1799,7 +1800,7 @@ resource "aws_s3_bucket_object" "object" { func testAccAWSS3BucketObjectConfig_withNoTags(rName, key, content string) string { return fmt.Sprintf(` -resource "aws_s3_bucket" "object_bucket" { +resource "aws_s3_bucket" "test" { bucket = %[1]q versioning { @@ -1808,21 +1809,21 @@ resource "aws_s3_bucket" "object_bucket" { } resource "aws_s3_bucket_object" "object" { - bucket = aws_s3_bucket.object_bucket.bucket + bucket = aws_s3_bucket.test.bucket key = %[2]q content = %[3]q } `, rName, key, content) } -func testAccAWSS3BucketObjectConfig_withMetadata(randInt int, metadataKey1, metadataValue1, metadataKey2, metadataValue2 string) string { +func testAccAWSS3BucketObjectConfig_withMetadata(rName string, metadataKey1, metadataValue1, metadataKey2, metadataValue2 string) string { return fmt.Sprintf(` -resource "aws_s3_bucket" "object_bucket" { - bucket = "tf-object-test-bucket-%[1]d" +resource "aws_s3_bucket" "test" { + bucket = %[1]q } resource "aws_s3_bucket_object" "object" { - bucket = aws_s3_bucket.object_bucket.bucket + bucket = aws_s3_bucket.test.bucket key = "test-key" metadata = { @@ -1830,13 +1831,13 @@ resource "aws_s3_bucket_object" "object" { %[4]s = %[5]q } } -`, randInt, metadataKey1, metadataValue1, metadataKey2, metadataValue2) +`, rName, metadataKey1, metadataValue1, metadataKey2, metadataValue2) } -func testAccAWSS3BucketObjectConfig_noObjectLockLegalHold(randInt int, content string) string { +func testAccAWSS3BucketObjectConfig_noObjectLockLegalHold(rName string, content string) string { return fmt.Sprintf(` -resource "aws_s3_bucket" "object_bucket" { - bucket = "tf-object-test-bucket-%[1]d" +resource "aws_s3_bucket" "test" { + bucket = %[1]q versioning { enabled = true @@ -1848,18 +1849,18 @@ resource "aws_s3_bucket" "object_bucket" { } resource "aws_s3_bucket_object" "object" { - bucket = aws_s3_bucket.object_bucket.bucket + bucket = aws_s3_bucket.test.bucket key = "test-key" content = %[2]q force_destroy = true } -`, randInt, content) +`, rName, content) } -func testAccAWSS3BucketObjectConfig_withObjectLockLegalHold(randInt int, content, legalHoldStatus string) string { +func testAccAWSS3BucketObjectConfig_withObjectLockLegalHold(rName string, content, legalHoldStatus string) string { return fmt.Sprintf(` -resource "aws_s3_bucket" "object_bucket" { - bucket = "tf-object-test-bucket-%[1]d" +resource "aws_s3_bucket" "test" { + bucket = %[1]q versioning { enabled = true @@ -1871,19 +1872,19 @@ resource "aws_s3_bucket" "object_bucket" { } resource "aws_s3_bucket_object" "object" { - bucket = aws_s3_bucket.object_bucket.bucket + bucket = aws_s3_bucket.test.bucket key = "test-key" content = %[2]q object_lock_legal_hold_status = %[3]q force_destroy = true } -`, randInt, content, legalHoldStatus) +`, rName, content, legalHoldStatus) } -func testAccAWSS3BucketObjectConfig_noObjectLockRetention(randInt int, content string) string { +func testAccAWSS3BucketObjectConfig_noObjectLockRetention(rName string, content string) string { return fmt.Sprintf(` -resource "aws_s3_bucket" "object_bucket" { - bucket = "tf-object-test-bucket-%[1]d" +resource "aws_s3_bucket" "test" { + bucket = %[1]q versioning { enabled = true @@ -1895,18 +1896,18 @@ resource "aws_s3_bucket" "object_bucket" { } resource "aws_s3_bucket_object" "object" { - bucket = aws_s3_bucket.object_bucket.bucket + bucket = aws_s3_bucket.test.bucket key = "test-key" content = %[2]q force_destroy = true } -`, randInt, content) +`, rName, content) } -func testAccAWSS3BucketObjectConfig_withObjectLockRetention(randInt int, content, retainUntilDate string) string { +func testAccAWSS3BucketObjectConfig_withObjectLockRetention(rName string, content, retainUntilDate string) string { return fmt.Sprintf(` -resource "aws_s3_bucket" "object_bucket" { - bucket = "tf-object-test-bucket-%[1]d" +resource "aws_s3_bucket" "test" { + bucket = %[1]q versioning { enabled = true @@ -1918,17 +1919,17 @@ resource "aws_s3_bucket" "object_bucket" { } resource "aws_s3_bucket_object" "object" { - bucket = aws_s3_bucket.object_bucket.bucket + bucket = aws_s3_bucket.test.bucket key = "test-key" content = %[2]q force_destroy = true object_lock_mode = "GOVERNANCE" object_lock_retain_until_date = %[3]q } -`, randInt, content, retainUntilDate) +`, rName, content, retainUntilDate) } -func testAccAWSS3BucketObjectConfig_NonVersioned(randInt int, source string) string { +func testAccAWSS3BucketObjectConfig_nonVersioned(rName string, source string) string { policy := `{ "Version": "2012-10-17", "Statement": [ @@ -1952,7 +1953,7 @@ func testAccAWSS3BucketObjectConfig_NonVersioned(randInt int, source string) str return testAccProviderConfigAssumeRolePolicy(policy) + fmt.Sprintf(` resource "aws_s3_bucket" "object_bucket_3" { - bucket = "tf-object-test-bucket-%[1]d" + bucket = %[1]q } resource "aws_s3_bucket_object" "object" { @@ -1961,39 +1962,39 @@ resource "aws_s3_bucket_object" "object" { source = %[2]q etag = filemd5(%[2]q) } -`, randInt, source) +`, rName, source) } -func testAccAWSS3BucketObjectConfig_objectBucketKeyEnabled(randInt int, content string) string { +func testAccAWSS3BucketObjectConfig_objectBucketKeyEnabled(rName string, content string) string { return fmt.Sprintf(` resource "aws_kms_key" "test" { description = "Encrypts test bucket objects" deletion_window_in_days = 7 } -resource "aws_s3_bucket" "object_bucket" { - bucket = "tf-object-test-bucket-%[1]d" +resource "aws_s3_bucket" "test" { + bucket = %[1]q } resource "aws_s3_bucket_object" "object" { - bucket = aws_s3_bucket.object_bucket.bucket + bucket = aws_s3_bucket.test.bucket key = "test-key" content = %q kms_key_id = aws_kms_key.test.arn bucket_key_enabled = true } -`, randInt, content) +`, rName, content) } -func testAccAWSS3BucketObjectConfig_bucketBucketKeyEnabled(randInt int, content string) string { +func testAccAWSS3BucketObjectConfig_bucketBucketKeyEnabled(rName string, content string) string { return fmt.Sprintf(` resource "aws_kms_key" "test" { description = "Encrypts test bucket objects" deletion_window_in_days = 7 } -resource "aws_s3_bucket" "object_bucket" { - bucket = "tf-object-test-bucket-%[1]d" +resource "aws_s3_bucket" "test" { + bucket = %[1]q server_side_encryption_configuration { rule { @@ -2007,22 +2008,22 @@ resource "aws_s3_bucket" "object_bucket" { } resource "aws_s3_bucket_object" "object" { - bucket = aws_s3_bucket.object_bucket.bucket + bucket = aws_s3_bucket.test.bucket key = "test-key" content = %q } -`, randInt, content) +`, rName, content) } -func testAccAWSS3BucketObjectConfig_defaultBucketSSE(randInt int, content string) string { +func testAccAWSS3BucketObjectConfig_defaultBucketSSE(rName string, content string) string { return fmt.Sprintf(` resource "aws_kms_key" "test" { description = "Encrypts test bucket objects" deletion_window_in_days = 7 } -resource "aws_s3_bucket" "object_bucket" { - bucket = "tf-object-test-bucket-%d" +resource "aws_s3_bucket" "test" { + bucket = %[1]q server_side_encryption_configuration { rule { apply_server_side_encryption_by_default { @@ -2034,9 +2035,9 @@ resource "aws_s3_bucket" "object_bucket" { } resource "aws_s3_bucket_object" "object" { - bucket = aws_s3_bucket.object_bucket.bucket + bucket = aws_s3_bucket.test.bucket key = "test-key" - content = %q + content = %[2]q } -`, randInt, content) +`, rName, content) } From 2ee18a55fb851c8592d4394996b2e87a20825d7f Mon Sep 17 00:00:00 2001 From: Dirk Avery Date: Mon, 12 Jul 2021 22:40:21 -0400 Subject: [PATCH 3/6] tests/r/s3_bucket_object: Fix tests --- aws/resource_aws_s3_bucket_object_test.go | 35 +++++++++++++---------- 1 file changed, 20 insertions(+), 15 deletions(-) diff --git a/aws/resource_aws_s3_bucket_object_test.go b/aws/resource_aws_s3_bucket_object_test.go index 561aa388a3fd..33fc74424fce 100644 --- a/aws/resource_aws_s3_bucket_object_test.go +++ b/aws/resource_aws_s3_bucket_object_test.go @@ -4,7 +4,6 @@ import ( "encoding/base64" "fmt" "io" - "io/ioutil" "log" "os" "reflect" @@ -251,15 +250,21 @@ func TestAccAWSS3BucketObject_contentBase64(t *testing.T) { func TestAccAWSS3BucketObject_sourceHashTrigger(t *testing.T) { var obj, updated_obj s3.GetObjectOutput resourceName := "aws_s3_bucket_object.object" - source := testAccAWSS3BucketObjectCreateTempFile(t, "{anything will do }") - rewriteSourceF := func(*terraform.State) error { - if err := ioutil.WriteFile(source, []byte("{any other thing will do }"), 0644); err != nil { - os.Remove(source) + rName := acctest.RandomWithPrefix("tf-acc-test") + + startingData := "Ebben!" + changingData := "Ne andrĂ² lontana" + + filename := testAccAWSS3BucketObjectCreateTempFile(t, startingData) + defer os.Remove(filename) + + rewriteFile := func(*terraform.State) error { + if err := os.WriteFile(filename, []byte(changingData), 0644); err != nil { + os.Remove(filename) t.Fatal(err) } return nil } - rName := acctest.RandomWithPrefix("tf-acc-test") resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { testAccPreCheck(t) }, @@ -268,22 +273,22 @@ func TestAccAWSS3BucketObject_sourceHashTrigger(t *testing.T) { Steps: []resource.TestStep{ { PreConfig: func() {}, - Config: testAccAWSS3BucketObjectConfig_sourceHashTrigger(rName, source), + Config: testAccAWSS3BucketObjectConfig_sourceHashTrigger(rName, filename), Check: resource.ComposeTestCheckFunc( testAccCheckAWSS3BucketObjectExists(resourceName, &obj), - testAccCheckAWSS3BucketObjectBody(&obj, "{anything will do }"), - resource.TestCheckResourceAttr(resourceName, "source_hash", "7b006ff4d70f68cc65061acf2f802e6f"), - rewriteSourceF, + testAccCheckAWSS3BucketObjectBody(&obj, "Ebben!"), + resource.TestCheckResourceAttr(resourceName, "source_hash", "7c7e02a79f28968882bb1426c8f8bfc6"), + rewriteFile, ), ExpectNonEmptyPlan: true, }, { PreConfig: func() {}, - Config: testAccAWSS3BucketObjectConfig_sourceHashTrigger(rName, source), + Config: testAccAWSS3BucketObjectConfig_sourceHashTrigger(rName, filename), Check: resource.ComposeTestCheckFunc( testAccCheckAWSS3BucketObjectExists(resourceName, &updated_obj), - testAccCheckAWSS3BucketObjectBody(&updated_obj, "{any other thing will do }"), - resource.TestCheckResourceAttr(resourceName, "source_hash", "77a736aa9e04d0dc96b9b30894963983"), + testAccCheckAWSS3BucketObjectBody(&updated_obj, "Ne andrĂ² lontana"), + resource.TestCheckResourceAttr(resourceName, "source_hash", "cffc5e20de2d21764145b1124c9b337b"), ), }, }, @@ -1260,12 +1265,12 @@ func TestAccAWSS3BucketObject_ignoreTags(t *testing.T) { testAccCheckAWSS3BucketObjectExists(resourceName, &obj), testAccCheckAWSS3BucketObjectBody(&obj, "stuff"), resource.TestCheckResourceAttr(resourceName, "tags.%", "3"), - resource.TestCheckResourceAttr(resourceName, "tags.Key1", "AAA"), + resource.TestCheckResourceAttr(resourceName, "tags.Key1", "A@AA"), resource.TestCheckResourceAttr(resourceName, "tags.Key2", "BBB"), resource.TestCheckResourceAttr(resourceName, "tags.Key3", "CCC"), testAccCheckAWSS3BucketObjectCheckTags(resourceName, map[string]string{ "ignorekey1": "ignorevalue1", - "Key1": "AAA", + "Key1": "A@AA", "Key2": "BBB", "Key3": "CCC", }), From 73fad554a32a50a6937d0ba12de30709655d36c0 Mon Sep 17 00:00:00 2001 From: Dirk Avery Date: Mon, 12 Jul 2021 22:40:43 -0400 Subject: [PATCH 4/6] docs/r/s3_bucket_object: Clean up documentation --- website/docs/r/s3_bucket_object.html.markdown | 65 +++++++++---------- 1 file changed, 31 insertions(+), 34 deletions(-) diff --git a/website/docs/r/s3_bucket_object.html.markdown b/website/docs/r/s3_bucket_object.html.markdown index f9934409c5aa..9d53f2509986 100644 --- a/website/docs/r/s3_bucket_object.html.markdown +++ b/website/docs/r/s3_bucket_object.html.markdown @@ -113,37 +113,35 @@ resource "aws_s3_bucket_object" "examplebucket_object" { -> **Note:** If you specify `content_encoding` you are responsible for encoding the body appropriately. `source`, `content`, and `content_base64` all expect already encoded/compressed bytes. -The following arguments are supported: +The following arguments are required: -* `bucket` - (Required) The name of the bucket to put the file in. Alternatively, an [S3 access point](https://docs.aws.amazon.com/AmazonS3/latest/dev/using-access-points.html) ARN can be specified. -* `key` - (Required) The name of the object once it is in the bucket. -* `source` - (Optional, conflicts with `content` and `content_base64`) The path to a file that will be read and uploaded as raw bytes for the object content. -* `content` - (Optional, conflicts with `source` and `content_base64`) Literal string value to use as the object content, which will be uploaded as UTF-8-encoded text. -* `content_base64` - (Optional, conflicts with `source` and `content`) Base64-encoded data that will be decoded and uploaded as raw bytes for the object content. This allows safely uploading non-UTF8 binary data, but is recommended only for small content such as the result of the `gzipbase64` function with small text strings. For larger objects, use `source` to stream the content from a disk file. -* `acl` - (Optional) The [canned ACL](https://docs.aws.amazon.com/AmazonS3/latest/dev/acl-overview.html#canned-acl) to apply. Valid values are `private`, `public-read`, `public-read-write`, `aws-exec-read`, `authenticated-read`, `bucket-owner-read`, and `bucket-owner-full-control`. Defaults to `private`. -* `cache_control` - (Optional) Specifies caching behavior along the request/reply chain Read [w3c cache_control](http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.9) for further details. -* `content_disposition` - (Optional) Specifies presentational information for the object. Read [w3c content_disposition](http://www.w3.org/Protocols/rfc2616/rfc2616-sec19.html#sec19.5.1) for further information. -* `content_encoding` - (Optional) Specifies what content encodings have been applied to the object and thus what decoding mechanisms must be applied to obtain the media-type referenced by the Content-Type header field. Read [w3c content encoding](http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.11) for further information. -* `content_language` - (Optional) The language the content is in e.g. en-US or en-GB. -* `content_type` - (Optional) A standard MIME type describing the format of the object data, e.g. application/octet-stream. All Valid MIME Types are valid for this input. -* `website_redirect` - (Optional) Specifies a target URL for [website redirect](http://docs.aws.amazon.com/AmazonS3/latest/dev/how-to-page-redirect.html). -* `storage_class` - (Optional) Specifies the desired [Storage Class](http://docs.aws.amazon.com/AmazonS3/latest/dev/storage-class-intro.html) -for the object. Can be either "`STANDARD`", "`REDUCED_REDUNDANCY`", "`ONEZONE_IA`", "`INTELLIGENT_TIERING`", "`GLACIER`", "`DEEP_ARCHIVE`", or "`STANDARD_IA`". Defaults to "`STANDARD`". -* `etag` - (Optional) Used to trigger updates. The only meaningful value is `${filemd5("path/to/file")}` (Terraform 0.11.12 or later) or `${md5(file("path/to/file"))}` (Terraform 0.11.11 or earlier). -This attribute is not compatible with KMS encryption, `kms_key_id` or `server_side_encryption = "aws:kms"`. -* `source_hash` - (Optional) Used to trigger updates based on source local changes. If used, must be set to `${filemd5("path/to/source")}` (Terraform 0.11.12 or later). This differs from `etag` since the value is stored in the state and does not come from AWS. Especially useful to address `etag` KMS encryption limitations. -* `server_side_encryption` - (Optional) Specifies server-side encryption of the object in S3. Valid values are "`AES256`" and "`aws:kms`". -* `kms_key_id` - (Optional) Amazon Resource Name (ARN) of the KMS Key to use for object encryption. If the S3 Bucket has server-side encryption enabled, that value will automatically be used. If referencing the -`aws_kms_key` resource, use the `arn` attribute. If referencing the `aws_kms_alias` data source or resource, use the `target_key_arn` attribute. Terraform will only perform drift detection if a configuration value -is provided. +* `bucket` - (Required) Name of the bucket to put the file in. Alternatively, an [S3 access point](https://docs.aws.amazon.com/AmazonS3/latest/dev/using-access-points.html) ARN can be specified. +* `key` - (Required) Name of the object once it is in the bucket. + +The following arguments are optional: + +* `acl` - (Optional) [Canned ACL](https://docs.aws.amazon.com/AmazonS3/latest/dev/acl-overview.html#canned-acl) to apply. Valid values are `private`, `public-read`, `public-read-write`, `aws-exec-read`, `authenticated-read`, `bucket-owner-read`, and `bucket-owner-full-control`. Defaults to `private`. * `bucket_key_enabled` - (Optional) Whether or not to use [Amazon S3 Bucket Keys](https://docs.aws.amazon.com/AmazonS3/latest/dev/bucket-key.html) for SSE-KMS. -* `metadata` - (Optional) A map of keys/values to provision metadata (will be automatically prefixed by `x-amz-meta-`, note that only lowercase label are currently supported by the AWS Go API). -* `tags` - (Optional) A map of tags to assign to the object. If configured with a provider [`default_tags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. -* `force_destroy` - (Optional) Allow the object to be deleted by removing any legal hold on any object version. -Default is `false`. This value should be set to `true` only if the bucket has S3 object lock enabled. -* `object_lock_legal_hold_status` - (Optional) The [legal hold](https://docs.aws.amazon.com/AmazonS3/latest/dev/object-lock-overview.html#object-lock-legal-holds) status that you want to apply to the specified object. Valid values are `ON` and `OFF`. -* `object_lock_mode` - (Optional) The object lock [retention mode](https://docs.aws.amazon.com/AmazonS3/latest/dev/object-lock-overview.html#object-lock-retention-modes) that you want to apply to this object. Valid values are `GOVERNANCE` and `COMPLIANCE`. -* `object_lock_retain_until_date` - (Optional) The date and time, in [RFC3339 format](https://tools.ietf.org/html/rfc3339#section-5.8), when this object's object lock will [expire](https://docs.aws.amazon.com/AmazonS3/latest/dev/object-lock-overview.html#object-lock-retention-periods). +* `cache_control` - (Optional) Caching behavior along the request/reply chain Read [w3c cache_control](http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.9) for further details. +* `content_base64` - (Optional, conflicts with `source` and `content`) Base64-encoded data that will be decoded and uploaded as raw bytes for the object content. This allows safely uploading non-UTF8 binary data, but is recommended only for small content such as the result of the `gzipbase64` function with small text strings. For larger objects, use `source` to stream the content from a disk file. +* `content_disposition` - (Optional) Presentational information for the object. Read [w3c content_disposition](http://www.w3.org/Protocols/rfc2616/rfc2616-sec19.html#sec19.5.1) for further information. +* `content_encoding` - (Optional) Content encodings that have been applied to the object and thus what decoding mechanisms must be applied to obtain the media-type referenced by the Content-Type header field. Read [w3c content encoding](http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.11) for further information. +* `content_language` - (Optional) Language the content is in e.g. en-US or en-GB. +* `content_type` - (Optional) Standard MIME type describing the format of the object data, e.g. application/octet-stream. All Valid MIME Types are valid for this input. +* `content` - (Optional, conflicts with `source` and `content_base64`) Literal string value to use as the object content, which will be uploaded as UTF-8-encoded text. +* `etag` - (Optional) Triggers updates when the value changes. The only meaningful value is `filemd5("path/to/file")` (Terraform 0.11.12 or later) or `${md5(file("path/to/file"))}` (Terraform 0.11.11 or earlier). This attribute is not compatible with KMS encryption, `kms_key_id` or `server_side_encryption = "aws:kms"` (see `source_hash` instead). +* `force_destroy` - (Optional) Whether to allow the object to be deleted by removing any legal hold on any object version. Default is `false`. This value should be set to `true` only if the bucket has S3 object lock enabled. +* `kms_key_id` - (Optional) ARN of the KMS Key to use for object encryption. If the S3 Bucket has server-side encryption enabled, that value will automatically be used. If referencing the `aws_kms_key` resource, use the `arn` attribute. If referencing the `aws_kms_alias` data source or resource, use the `target_key_arn` attribute. Terraform will only perform drift detection if a configuration value is provided. +* `metadata` - (Optional) Map of keys/values to provision metadata (will be automatically prefixed by `x-amz-meta-`, note that only lowercase label are currently supported by the AWS Go API). +* `object_lock_legal_hold_status` - (Optional) [Legal hold](https://docs.aws.amazon.com/AmazonS3/latest/dev/object-lock-overview.html#object-lock-legal-holds) status that you want to apply to the specified object. Valid values are `ON` and `OFF`. +* `object_lock_mode` - (Optional) Object lock [retention mode](https://docs.aws.amazon.com/AmazonS3/latest/dev/object-lock-overview.html#object-lock-retention-modes) that you want to apply to this object. Valid values are `GOVERNANCE` and `COMPLIANCE`. +* `object_lock_retain_until_date` - (Optional) Date and time, in [RFC3339 format](https://tools.ietf.org/html/rfc3339#section-5.8), when this object's object lock will [expire](https://docs.aws.amazon.com/AmazonS3/latest/dev/object-lock-overview.html#object-lock-retention-periods). +* `server_side_encryption` - (Optional) Server-side encryption of the object in S3. Valid values are "`AES256`" and "`aws:kms`". +* `source_hash` - (Optional) Triggers updates like `etag` but useful to address `etag` encryption limitations. Set using `filemd5("path/to/source")` (Terraform 0.11.12 or later). (The value is only stored in state and not saved by AWS.) +* `source` - (Optional, conflicts with `content` and `content_base64`) Path to a file that will be read and uploaded as raw bytes for the object content. +* `storage_class` - (Optional) [Storage Class](http://docs.aws.amazon.com/AmazonS3/latest/dev/storage-class-intro.html) for the object. Can be either "`STANDARD`", "`REDUCED_REDUNDANCY`", "`ONEZONE_IA`", "`INTELLIGENT_TIERING`", "`GLACIER`", "`DEEP_ARCHIVE`", or "`STANDARD_IA`". Defaults to "`STANDARD`". +* `tags` - (Optional) Map of tags to assign to the object. If configured with a provider [`default_tags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. +* `website_redirect` - (Optional) Target URL for [website redirect](http://docs.aws.amazon.com/AmazonS3/latest/dev/how-to-page-redirect.html). If no content is provided through `source`, `content` or `content_base64`, then the object will be empty. @@ -153,8 +151,7 @@ If no content is provided through `source`, `content` or `content_base64`, then In addition to all arguments above, the following attributes are exported: -* `id` - the `key` of the resource supplied above -* `etag` - the ETag generated for the object (an MD5 sum of the object content). For plaintext objects or objects encrypted with an AWS-managed key, the hash is an MD5 digest of the object data. For objects encrypted with a KMS key or objects created by either the Multipart Upload or Part Copy operation, the hash is not an MD5 digest, regardless of the method of encryption. More information on possible values can be found on [Common Response Headers](https://docs.aws.amazon.com/AmazonS3/latest/API/RESTCommonResponseHeaders.html). -* `tags_all` - A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block). -* `version_id` - A unique version ID value for the object, if bucket versioning -is enabled. +* `etag` - ETag generated for the object (an MD5 sum of the object content). For plaintext objects or objects encrypted with an AWS-managed key, the hash is an MD5 digest of the object data. For objects encrypted with a KMS key or objects created by either the Multipart Upload or Part Copy operation, the hash is not an MD5 digest, regardless of the method of encryption. More information on possible values can be found on [Common Response Headers](https://docs.aws.amazon.com/AmazonS3/latest/API/RESTCommonResponseHeaders.html). +* `id` - `key` of the resource supplied above +* `tags_all` - Map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](/docs/providers/aws/index.html#default_tags-configuration-block). +* `version_id` - Unique version ID value for the object, if bucket versioning is enabled. \ No newline at end of file From 76e6415a0d6ce2a247b236e8eb131a0fc7667d27 Mon Sep 17 00:00:00 2001 From: Dirk Avery Date: Mon, 12 Jul 2021 22:44:14 -0400 Subject: [PATCH 5/6] r/s3_bucket_object: Add changelog --- .changelog/11522.txt | 3 +++ 1 file changed, 3 insertions(+) create mode 100644 .changelog/11522.txt diff --git a/.changelog/11522.txt b/.changelog/11522.txt new file mode 100644 index 000000000000..1eaade7f178a --- /dev/null +++ b/.changelog/11522.txt @@ -0,0 +1,3 @@ +```release-note:enhancement +resource/aws_s3_bucket_object: Add `source_hash` argument to compliment `etag`'s encryption limitations +``` \ No newline at end of file From efc078e6fe0302b9df2a52fdd07e0a030fb6f012 Mon Sep 17 00:00:00 2001 From: Dirk Avery Date: Mon, 12 Jul 2021 22:47:18 -0400 Subject: [PATCH 6/6] tests/r/s3_bucket_object: Add errorcheck --- aws/resource_aws_s3_bucket_object_test.go | 1 + 1 file changed, 1 insertion(+) diff --git a/aws/resource_aws_s3_bucket_object_test.go b/aws/resource_aws_s3_bucket_object_test.go index 33fc74424fce..e7d2c0d6dae9 100644 --- a/aws/resource_aws_s3_bucket_object_test.go +++ b/aws/resource_aws_s3_bucket_object_test.go @@ -268,6 +268,7 @@ func TestAccAWSS3BucketObject_sourceHashTrigger(t *testing.T) { resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { testAccPreCheck(t) }, + ErrorCheck: testAccErrorCheck(t, s3.EndpointsID), Providers: testAccProviders, CheckDestroy: testAccCheckAWSS3BucketObjectDestroy, Steps: []resource.TestStep{