add preliminary data sources for Spaces buckets (#416)

* checkpoint

* fix digitalocean_spaces_bucket datasource + test

* do not try to delete empty IDs

* add comment about race condition

* fix import ordering

* add digitalocean_spaces_buckets data source

* switch to use flattenSpacesBucket in data source

* update docs

* Fix sort example for data.digitalocean_spaces_buckets

Co-authored-by: Andrew Starr-Bochicchio <a.starr.b@gmail.com>
This commit is contained in:
Tom Dyas 2020-04-22 08:15:42 -07:00 committed by GitHub
parent f8fc9aacb6
commit b9e0615705
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
10 changed files with 515 additions and 0 deletions

View File

@ -0,0 +1,72 @@
package digitalocean
import (
"fmt"
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/aws/awserr"
"github.com/aws/aws-sdk-go/service/s3"
"github.com/hashicorp/terraform-plugin-sdk/helper/schema"
)
func dataSourceDigitalOceanSpacesBucket() *schema.Resource {
recordSchema := spacesBucketSchema()
for _, f := range recordSchema {
f.Computed = true
}
recordSchema["region"].Required = true
recordSchema["region"].Computed = false
recordSchema["name"].Required = true
recordSchema["name"].Computed = false
return &schema.Resource{
Read: dataSourceDigitalOceanSpacesBucketRead,
Schema: recordSchema,
}
}
func dataSourceDigitalOceanSpacesBucketRead(d *schema.ResourceData, meta interface{}) error {
region := d.Get("region").(string)
name := d.Get("name").(string)
client, err := meta.(*CombinedConfig).spacesClient(region)
if err != nil {
return fmt.Errorf("Error reading bucket: %s", err)
}
svc := s3.New(client)
_, err = retryOnAwsCode("NoSuchBucket", func() (interface{}, error) {
return svc.HeadBucket(&s3.HeadBucketInput{
Bucket: aws.String(name),
})
})
if err != nil {
if awsError, ok := err.(awserr.RequestFailure); ok && awsError.StatusCode() == 404 {
d.SetId("")
return fmt.Errorf("Spaces Bucket (%s) not found", name)
} else {
// some of the AWS SDK's errors can be empty strings, so let's add
// some additional context.
return fmt.Errorf("error reading Spaces bucket \"%s\": %s", d.Id(), err)
}
}
metadata := bucketMetadataStruct{
name: name,
region: region,
}
flattenedBucket, err := flattenSpacesBucket(&metadata, meta)
if err != nil {
return err
}
if err := setResourceDataFromMap(d, flattenedBucket); err != nil {
return err
}
d.SetId(name)
return nil
}

View File

@ -0,0 +1,80 @@
package digitalocean
import (
"fmt"
"regexp"
"testing"
"github.com/hashicorp/terraform-plugin-sdk/helper/acctest"
"github.com/hashicorp/terraform-plugin-sdk/helper/resource"
)
func TestAccDataSourceDigitalOceanSpacesBucket_Basic(t *testing.T) {
rInt := acctest.RandInt()
bucketName := testAccBucketName(rInt)
bucketRegion := "nyc3"
resourceConfig := fmt.Sprintf(`
resource "digitalocean_spaces_bucket" "bucket" {
name = "%s"
region = "%s"
}
`, bucketName, bucketRegion)
datasourceConfig := fmt.Sprintf(`
data "digitalocean_spaces_bucket" "bucket" {
name = "%s"
region = "%s"
}
`, bucketName, bucketRegion)
config1 := resourceConfig
config2 := config1 + datasourceConfig
resource.Test(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
CheckDestroy: testAccCheckDigitalOceanBucketDestroy,
Steps: []resource.TestStep{
{
Config: config1,
},
{
Config: config2,
Check: resource.ComposeTestCheckFunc(
resource.TestCheckResourceAttr("data.digitalocean_spaces_bucket.bucket", "name", bucketName),
resource.TestCheckResourceAttr("data.digitalocean_spaces_bucket.bucket", "region", bucketRegion),
resource.TestCheckResourceAttr("data.digitalocean_spaces_bucket.bucket", "bucket_domain_name", bucketDomainName(bucketName, bucketRegion)),
resource.TestCheckResourceAttr("data.digitalocean_spaces_bucket.bucket", "urn", fmt.Sprintf("do:space:%s", bucketName)),
),
},
{
// Remove the datasource from the config so Terraform trying to refresh it does not race with
// deleting the bucket resource. By removing the datasource from the config here, this ensures
// that the bucket will be deleted after the datasource has been removed from the state.
Config: config1,
},
},
})
}
func TestAccDataSourceDigitalOceanSpacesBucket_NotFound(t *testing.T) {
datasourceConfig := `
data "digitalocean_spaces_bucket" "bucket" {
name = "no-such-bucket"
region = "nyc3"
}
`
resource.Test(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
CheckDestroy: testAccCheckDigitalOceanBucketDestroy,
Steps: []resource.TestStep{
{
Config: datasourceConfig,
ExpectError: regexp.MustCompile("Spaces Bucket.*not found"),
},
},
})
}

View File

@ -0,0 +1,29 @@
package digitalocean
import (
"github.com/hashicorp/terraform-plugin-sdk/helper/schema"
"github.com/terraform-providers/terraform-provider-digitalocean/internal/datalist"
)
func dataSourceDigitalOceanSpacesBuckets() *schema.Resource {
dataListConfig := &datalist.ResourceConfig{
RecordSchema: spacesBucketSchema(),
FilterKeys: []string{
"bucket_domain_name",
"name",
"region",
"urn",
},
SortKeys: []string{
"bucket_domain_name",
"name",
"region",
"urn",
},
ResultAttributeName: "buckets",
FlattenRecord: flattenSpacesBucket,
GetRecords: getDigitalOceanBuckets,
}
return datalist.NewResource(dataListConfig)
}

View File

@ -0,0 +1,65 @@
package digitalocean
import (
"fmt"
"testing"
"github.com/hashicorp/terraform-plugin-sdk/helper/acctest"
"github.com/hashicorp/terraform-plugin-sdk/helper/resource"
)
func TestAccDataSourceDigitalOceanSpacesBuckets_Basic(t *testing.T) {
bucketName1 := testAccBucketName(acctest.RandInt())
bucketRegion1 := "nyc3"
bucketName2 := testAccBucketName(acctest.RandInt())
bucketRegion2 := "ams3"
bucketsConfig := fmt.Sprintf(`
resource "digitalocean_spaces_bucket" "bucket1" {
name = "%s"
region = "%s"
}
resource "digitalocean_spaces_bucket" "bucket2" {
name = "%s"
region = "%s"
}
`, bucketName1, bucketRegion1, bucketName2, bucketRegion2)
datasourceConfig := fmt.Sprintf(`
data "digitalocean_spaces_buckets" "result" {
filter {
key = "name"
values = ["%s"]
}
}
`, bucketName1)
resource.Test(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
CheckDestroy: testAccCheckDigitalOceanBucketDestroy,
Steps: []resource.TestStep{
{
Config: bucketsConfig,
},
{
Config: bucketsConfig + datasourceConfig,
Check: resource.ComposeTestCheckFunc(
resource.TestCheckResourceAttr("data.digitalocean_spaces_buckets.result", "buckets.#", "1"),
resource.TestCheckResourceAttr("data.digitalocean_spaces_buckets.result", "buckets.0.name", bucketName1),
resource.TestCheckResourceAttr("data.digitalocean_spaces_buckets.result", "buckets.0.region", bucketRegion1),
resource.TestCheckResourceAttr("data.digitalocean_spaces_buckets.result", "buckets.0.bucket_domain_name", bucketDomainName(bucketName1, bucketRegion1)),
resource.TestCheckResourceAttr("data.digitalocean_spaces_buckets.result", "buckets.0.urn", fmt.Sprintf("do:space:%s", bucketName1)),
),
},
{
// Remove the datasource from the config so Terraform trying to refresh it does not race with
// deleting the bucket resources. By removing the datasource from the config here, this ensures
// that the buckets are deleted after the datasource has been removed from the state.
Config: bucketsConfig,
},
},
})
}

View File

@ -63,6 +63,8 @@ func Provider() terraform.ResourceProvider {
"digitalocean_region": dataSourceDigitalOceanRegion(),
"digitalocean_regions": dataSourceDigitalOceanRegions(),
"digitalocean_sizes": dataSourceDigitalOceanSizes(),
"digitalocean_spaces_bucket": dataSourceDigitalOceanSpacesBucket(),
"digitalocean_spaces_buckets": dataSourceDigitalOceanSpacesBuckets(),
"digitalocean_ssh_key": dataSourceDigitalOceanSSHKey(),
"digitalocean_tag": dataSourceDigitalOceanTag(),
"digitalocean_volume_snapshot": dataSourceDigitalOceanVolumeSnapshot(),

View File

@ -462,6 +462,10 @@ func testAccCheckDigitalOceanBucketDestroyWithProvider(s *terraform.State, provi
continue
}
if rs.Primary.ID == "" {
continue
}
svc, err := testAccGetS3ConnForSpacesBucket(rs)
if err != nil {
return fmt.Errorf("Unable to create S3 client: %v", err)

View File

@ -0,0 +1,141 @@
package digitalocean
import (
"fmt"
"log"
"github.com/aws/aws-sdk-go/service/s3"
"github.com/hashicorp/terraform-plugin-sdk/helper/schema"
)
type bucketMetadataStruct struct {
name string
region string
}
func spacesBucketSchema() map[string]*schema.Schema {
return map[string]*schema.Schema{
"name": {
Type: schema.TypeString,
Description: "Bucket name",
},
"urn": {
Type: schema.TypeString,
Description: "the uniform resource name for the bucket",
},
"region": {
Type: schema.TypeString,
Description: "Bucket region",
},
"bucket_domain_name": {
Type: schema.TypeString,
Description: "The FQDN of the bucket",
},
}
}
// TODO: Hard-coding the Spaces regions for now given no way to filter out regions like nyc1
// which do not have spaces.
//
//func getSpacesRegions(meta interface{}) ([]string, error) {
// client := meta.(*CombinedConfig).godoClient()
//
// var spacesRegions []string
//
// opts := &godo.ListOptions{
// Page: 1,
// PerPage: 200,
// }
//
// for {
// regions, resp, err := client.Regions.List(context.Background(), opts)
//
// if err != nil {
// return nil, fmt.Errorf("Error retrieving regions: %s", err)
// }
//
// // TODO: Filter out regions without Spaces. It is unclear what feature is set
// // to indicate Spaces is available in a region because, for example, both
// // nyc1 and nyc3 have "storage" as a feature even though nyc3 is the Spaces region in NY.
// for _, region := range regions {
// spacesRegions = append(spacesRegions, region.Slug)
// }
//
// if resp.Links == nil || resp.Links.IsLastPage() {
// break
// }
//
// page, err := resp.Links.CurrentPage()
// if err != nil {
// return nil, fmt.Errorf("Error retrieving regions: %s", err)
// }
//
// opts.Page = page + 1
// }
//
// return spacesRegions, nil
//}
func getSpacesBucketsInRegion(meta interface{}, region string) ([]*s3.Bucket, error) {
client, err := meta.(*CombinedConfig).spacesClient(region)
if err != nil {
return nil, err
}
svc := s3.New(client)
input := s3.ListBucketsInput{}
output, err := svc.ListBuckets(&input)
if err != nil {
return nil, err
}
return output.Buckets, nil
}
func getDigitalOceanBuckets(meta interface{}) ([]interface{}, error) {
// Retrieve the regions with Spaces enabled.
//spacesRegions, err := getSpacesRegions(meta)
//if err != nil {
// return nil, err
//}
spacesRegions := []string{"ams3", "fra1", "nyc3", "sfo2", "sgp1"}
log.Printf("[DEBUG] spacesRegions = %v", spacesRegions)
var buckets []interface{}
for _, region := range spacesRegions {
bucketsInRegion, err := getSpacesBucketsInRegion(meta, region)
if err != nil {
return nil, err
}
log.Printf("[DEBUG] bucketsInRegion(%s) = %v", region, bucketsInRegion)
for _, bucketInRegion := range bucketsInRegion {
metadata := &bucketMetadataStruct{
name: *bucketInRegion.Name,
region: region,
}
buckets = append(buckets, metadata)
}
}
log.Printf("buckets = %v", buckets)
return buckets, nil
}
func flattenSpacesBucket(rawBucketMetadata, meta interface{}) (map[string]interface{}, error) {
bucketMetadata := rawBucketMetadata.(*bucketMetadataStruct)
name := bucketMetadata.name
region := bucketMetadata.region
flattenedBucket := map[string]interface{}{}
flattenedBucket["name"] = name
flattenedBucket["region"] = region
flattenedBucket["bucket_domain_name"] = bucketDomainName(name, region)
flattenedBucket["urn"] = fmt.Sprintf("do:space:%s", name)
return flattenedBucket, nil
}

View File

@ -67,6 +67,12 @@
<li<%= sidebar_current("docs-do-datasource-sizes") %>>
<a href="/docs/providers/do/d/sizes.html">digitalocean_sizes</a>
</li>
<li<%= sidebar_current("docs-do-datasource-spaces-bucket") %>>
<a href="/docs/providers/do/d/spaces_bucket.html">digitalocean_spaces_bucket</a>
</li>
<li<%= sidebar_current("docs-do-datasource-spaces-buckets") %>>
<a href="/docs/providers/do/d/spaces_buckets.html">digitalocean_spaces_buckets</a>
</li>
<li<%= sidebar_current("docs-do-datasource-ssh-key") %>>
<a href="/docs/providers/do/d/ssh_key.html">digitalocean_ssh_key</a>
</li>

View File

@ -0,0 +1,43 @@
---
layout: "digitalocean"
page_title: "DigitalOcean: digitalocean_spaces_bucket"
sidebar_current: "docs-do-datasource-spaces-bucket"
description: |-
Get information on a Spaces bucket.
---
# digitalocean_spaces_bucket
Get information on a Spaces bucket for use in other resources. This is useful if the Spaces bucket in question
is not managed by Terraform or you need to utilize any of the bucket's data.
## Example Usage
Get the bucket by name:
```hcl
data "digitalocean_spaces_bucket" "example" {
name = "my-spaces-bucket"
region = "nyc3"
}
output "bucket_domain_name" {
value = data.digitalocean_spaces_bucket.example.bucket_domain_name
}
```
## Argument Reference
The following arguments must be provided:
* `name` - (Required) The name of the Spaces bucket.
* `region` - (Required) The slug of the region where the bucket is stored.
## Attributes Reference
The following attributes are exported:
* `name` - The name of the Spaces bucket
* `region` - The slug of the region where the bucket is stored.
* `urn` - The uniform resource name of the bucket
* `bucket_domain_name` - The FQDN of the bucket (e.g. bucket-name.nyc3.digitaloceanspaces.com)

View File

@ -0,0 +1,73 @@
---
layout: "digitalocean"
page_title: "DigitalOcean: digitalocean_spaces_buckets"
sidebar_current: "docs-do-datasource-spaces-buckets"
description: |-
Retrieve information on Spaces buckets.
---
# digitalocean_spaces_buckets
Get information on Spaces buckets for use in other resources, with the ability to filter and sort the results.
If no filters are specified, all Spaces buckets will be returned.
Note: You can use the [`digitalocean_spaces_bucket`](/docs/providers/do/d/spaces_bucket.html) data source to
obtain metadata about a single bucket if you already know its `name` and `region`.
## Example Usage
Use the `filter` block with a `key` string and `values` list to filter buckets.
Get all buckets in a region:
```hcl
data "digitalocean_spaces_buckets" "nyc3" {
filter {
key = "region"
values = ["nyc3"]
}
}
```
You can sort the results as well:
```hcl
data "digitalocean_spaces_buckets" "nyc3" {
filter {
key = "region"
values = ["nyc3"]
}
sort {
key = "name"
direction = "desc"
}
}
```
## Argument Reference
* `filter` - (Optional) Filter the results.
The `filter` block is documented below.
* `sort` - (Optional) Sort the results.
The `sort` block is documented below.
`filter` supports the following arguments:
* `key` - (Required) Filter the images by this key. This may be one of `bucket_domain_name`, `name`, `region`, or `urn`.
* `values` - (Required) A list of values to match against the `key` field. Only retrieves images
where the `key` field takes on one or more of the values provided here.
`sort` supports the following arguments:
* `key` - (Required) Sort the images by this key. This may be one of `bucket_domain_name`, `name`, `region`, or `urn`.
* `direction` - (Required) The sort direction. This may be either `asc` or `desc`.
## Attributes Reference
* `buckets` - A list of Spaces buckets satisfying any `filter` and `sort` criteria. Each bucket has the following attributes:
- `name` - The name of the Spaces bucket
- `region` - The slug of the region where the bucket is stored.
- `urn` - The uniform resource name of the bucket
- `bucket_domain_name` - The FQDN of the bucket (e.g. bucket-name.nyc3.digitaloceanspaces.com)