From 19a9c30a2c2a8777d5ac8bea8b6cf64b297cf661 Mon Sep 17 00:00:00 2001 From: Laura Silaja Date: Thu, 29 Feb 2024 16:10:42 +0000 Subject: [PATCH 01/15] Added sample buckets as a separate resource --- internal/api/sample_bucket/sample_bucket.go | 86 +++++ internal/api/sample_bucket/stats.go | 9 + internal/datasources/sample_buckets.go | 214 ++++++++++++ internal/provider/provider.go | 2 + internal/resources/sampleBucket.go | 351 ++++++++++++++++++++ internal/resources/sampleBucket_schema.go | 42 +++ internal/schema/sample_bucket.go | 205 ++++++++++++ internal/schema/sample_bucket_test.go | 70 ++++ 8 files changed, 979 insertions(+) create mode 100644 internal/api/sample_bucket/sample_bucket.go create mode 100644 internal/api/sample_bucket/stats.go create mode 100644 internal/datasources/sample_buckets.go create mode 100644 internal/resources/sampleBucket.go create mode 100644 internal/resources/sampleBucket_schema.go create mode 100644 internal/schema/sample_bucket.go create mode 100644 internal/schema/sample_bucket_test.go diff --git a/internal/api/sample_bucket/sample_bucket.go b/internal/api/sample_bucket/sample_bucket.go new file mode 100644 index 00000000..095db154 --- /dev/null +++ b/internal/api/sample_bucket/sample_bucket.go @@ -0,0 +1,86 @@ +package samplebucket + +// CreateSampleBucketRequest is the payload passed to V4 Capella Public API to create a bucket in a Capella cluster. +// Creates a new sample bucket configuration under a cluster. +// +// To learn more about bucket configuration, see https://docs.couchbase.com/server/current/manage/manage-settings/install-sample-buckets.html. +// +// In order to access this endpoint, the provided API key must have at least one of the following roles: +// +// Organization Owner +// Project Owner +// Project Manager +// To learn more, see https://docs.couchbase.com/cloud/organizations/organization-projects-overview.html +type CreateSampleBucketRequest struct { + // Name is the name of the bucket (up to 100 characters). + // This field cannot be changed later. The name should be according to the following rules: + // Characters used for the name should be in the ranges of A-Z, a-z, and 0-9; plus the underscore, period, dash, and percent characters. + // The name can be a maximum of 100 characters in length. + // The name cannot have 0 characters or empty. Minimum length of name is 1. + // The name cannot start with a . (period). + Name string `json:"name"` +} + +// CreateBucketSampleResponse is the response received from Capella V4 Public API on requesting to create a new bucket. +// Common response codes: 201, 403, 422, 429, 500. +type CreateSampleBucketResponse struct { + // Id is unique ID of the bucket created. + Id string `json:"bucketId"` + + // Name is the name of the cluster (up to 100 characters). + Name string `json:"name"` +} + +// GetSampleBucketResponse is the response received from Capella V4 Public API on requesting to information about an existing sample bucket. +// +// In order to access this endpoint, the provided API key must have at least one of the following roles: +// +// Organization Owner +// Project Owner +// Project Manager +// Project Viewer +// Database Data Reader/Writer +// Database Data Reader +// To learn more, see https://docs.couchbase.com/cloud/organizations/organization-projects-overview.html +type GetSampleBucketResponse struct { + Stats *Stats `json:"stats"` + + // Id is the ID of the bucket created. + Id string `json:"id"` + + // Name is the name of the cluster (up to 100 characters). + Name string `json:"name"` + + // Type represents the sample Bucket Type + // To learn more, see https://docs.couchbase.com/cloud/clusters/data-service/manage-buckets.html#add-bucket + Type string `json:"type"` + + // StorageBackend represents the storage engine used for the sample bucket. + // To learn more, see https://docs.couchbase.com/cloud/clusters/data-service/storage-engines.html + StorageBackend string `json:"storageBackend"` + + // BucketConflictResolution is the means by which conflicts are resolved during replication. + // To learn more, see https://docs.couchbase.com/cloud/clusters/xdcr/xdcr.html#conflict-resolution + BucketConflictResolution string `json:"bucketConflictResolution"` + + // DurabilityLevel is the minimum level at which all writes to the sample bucket must occur. + // To learn more, see https://docs.couchbase.com/cloud/clusters/data-service/manage-buckets.html#add-bucket + DurabilityLevel string `json:"durabilityLevel"` + + // EvictionPolicy is the policy which Capella adopts to prevent data loss due to memory exhaustion. + //To learn more, see https://docs.couchbase.com/server/current/rest-api/rest-bucket-create.html#evictionpolicy + EvictionPolicy string `json:"evictionPolicy"` + + // MemoryAllocationInMb is the amount of memory to allocate for the sample bucket memory in MiB + MemoryAllocationInMb int64 `json:"memoryAllocationInMb"` + + // Replicas states the number of replica nodes for the sample bucket. + // To learn more, see https://docs.couchbase.com/cloud/clusters/data-service/manage-buckets.html#add-bucket + Replicas int64 `json:"replicas"` + + // TimeToLiveInSeconds specifies the time to live (TTL) value in seconds. + TimeToLiveInSeconds int64 `json:"timeToLiveInSeconds"` + + // Flush determines whether flushing is enabled on the sample bucket. + Flush bool `json:"flush"` +} diff --git a/internal/api/sample_bucket/stats.go b/internal/api/sample_bucket/stats.go new file mode 100644 index 00000000..2602bffb --- /dev/null +++ b/internal/api/sample_bucket/stats.go @@ -0,0 +1,9 @@ +package samplebucket + +// Stats are the bucket related statistics that are sent by the Capella V4 Public API for any existing bucket. +type Stats struct { + ItemCount int64 `json:"itemCount"` + OpsPerSecond int64 `json:"opsPerSecond"` + DiskUsedInMib int64 `json:"diskUsedInMib"` + MemoryUsedInMib int64 `json:"memoryUsedInMib"` +} diff --git a/internal/datasources/sample_buckets.go b/internal/datasources/sample_buckets.go new file mode 100644 index 00000000..63c80d7b --- /dev/null +++ b/internal/datasources/sample_buckets.go @@ -0,0 +1,214 @@ +package datasources + +import ( + "context" + "fmt" + "net/http" + + "github.com/couchbasecloud/terraform-provider-couchbase-capella/internal/api/bucket" + + "github.com/couchbasecloud/terraform-provider-couchbase-capella/internal/api" + providerschema "github.com/couchbasecloud/terraform-provider-couchbase-capella/internal/schema" + + "github.com/hashicorp/terraform-plugin-framework/datasource" + "github.com/hashicorp/terraform-plugin-framework/datasource/schema" + "github.com/hashicorp/terraform-plugin-framework/types" +) + +// Ensure the implementation satisfies the expected interfaces. +var ( + _ datasource.DataSource = &SampleBuckets{} + _ datasource.DataSourceWithConfigure = &SampleBuckets{} +) + +// Sample buckets is the bucket data source implementation. +type SampleBuckets struct { + *providerschema.Data +} + +// NewSampleBuckets is a helper function to simplify the provider implementation. +func NewSampleBuckets() datasource.DataSource { + return &SampleBuckets{} +} + +// Metadata returns the bucket data source type name. +func (d *SampleBuckets) Metadata(_ context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) { + resp.TypeName = req.ProviderTypeName + "_samplebuckets" +} + +// Schema defines the schema for the bucket data source. +func (s *SampleBuckets) Schema(_ context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) { + resp.Schema = schema.Schema{ + Attributes: map[string]schema.Attribute{ + "organization_id": schema.StringAttribute{ + Required: true, + }, + "project_id": schema.StringAttribute{ + Required: true, + }, + "cluster_id": schema.StringAttribute{ + Required: true, + }, + "data": schema.ListNestedAttribute{ + Computed: true, + NestedObject: schema.NestedAttributeObject{ + Attributes: map[string]schema.Attribute{ + "id": schema.StringAttribute{ + Computed: true, + }, + "name": schema.StringAttribute{ + Computed: true, + }, + "organization_id": schema.StringAttribute{ + Computed: true, + }, + "project_id": schema.StringAttribute{ + Computed: true, + }, + "cluster_id": schema.StringAttribute{ + Computed: true, + }, + "type": schema.StringAttribute{ + Optional: true, + Computed: true, + }, + "storage_backend": schema.StringAttribute{ + Optional: true, + Computed: true, + }, + "memory_allocation_in_mb": schema.Int64Attribute{ + Optional: true, + Computed: true, + }, + "bucket_conflict_resolution": schema.StringAttribute{ + Optional: true, + Computed: true, + }, + "durability_level": schema.StringAttribute{ + Optional: true, + Computed: true, + }, + "replicas": schema.Int64Attribute{ + Optional: true, + Computed: true, + }, + "flush": schema.BoolAttribute{ + Optional: true, + Computed: true, + }, + "time_to_live_in_seconds": schema.Int64Attribute{ + Optional: true, + Computed: true, + }, + "eviction_policy": schema.StringAttribute{ + Optional: true, + Computed: true, + }, + "stats": schema.SingleNestedAttribute{ + Computed: true, + Attributes: map[string]schema.Attribute{ + "item_count": schema.Int64Attribute{ + Computed: true, + }, + "ops_per_second": schema.Int64Attribute{ + Computed: true, + }, + "disk_used_in_mib": schema.Int64Attribute{ + Computed: true, + }, + "memory_used_in_mib": schema.Int64Attribute{ + Computed: true, + }, + }, + }, + }, + }, + }, + }, + } +} + +// Read refreshes the Terraform state with the latest data of buckets. +func (d *SampleBuckets) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { + var state providerschema.SampleBuckets + diags := req.Config.Get(ctx, &state) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + + clusterId, projectId, organizationId, err := state.Validate() + if err != nil { + resp.Diagnostics.AddError( + "Error Reading SampleBuckets in Capella", + "Could not read Capella buckets in cluster "+clusterId+": "+err.Error(), + ) + return + } + + url := fmt.Sprintf("%s/v4/organizations/%s/projects/%s/clusters/%s/sampleBuckets", d.HostURL, organizationId, projectId, clusterId) + cfg := api.EndpointCfg{Url: url, Method: http.MethodGet, SuccessStatus: http.StatusOK} + + response, err := api.GetPaginated[[]bucket.GetBucketResponse](ctx, d.Client, d.Token, cfg, api.SortById) + if err != nil { + resp.Diagnostics.AddError( + "Error Reading Capella SampleBuckets", + "Could not read buckets in cluster "+clusterId+": "+api.ParseError(err), + ) + return + } + + // Map response body to model + for _, bucket := range response { + bucketState := providerschema.OneSampleBucket{ + Id: types.StringValue(bucket.Id), + Name: types.StringValue(bucket.Name), + Type: types.StringValue(bucket.Type), + OrganizationId: types.StringValue(organizationId), + ProjectId: types.StringValue(projectId), + ClusterId: types.StringValue(clusterId), + StorageBackend: types.StringValue(bucket.StorageBackend), + MemoryAllocationInMB: types.Int64Value(bucket.MemoryAllocationInMb), + BucketConflictResolution: types.StringValue(bucket.BucketConflictResolution), + DurabilityLevel: types.StringValue(bucket.DurabilityLevel), + Replicas: types.Int64Value(bucket.Replicas), + Flush: types.BoolValue(bucket.Flush), + TimeToLiveInSeconds: types.Int64Value(bucket.TimeToLiveInSeconds), + EvictionPolicy: types.StringValue(bucket.EvictionPolicy), + Stats: &providerschema.Stats{ + ItemCount: types.Int64Value(bucket.Stats.ItemCount), + OpsPerSecond: types.Int64Value(bucket.Stats.OpsPerSecond), + DiskUsedInMiB: types.Int64Value(bucket.Stats.DiskUsedInMib), + MemoryUsedInMiB: types.Int64Value(bucket.Stats.MemoryUsedInMib), + }, + } + state.Data = append(state.Data, bucketState) + } + + // Set state + diags = resp.State.Set(ctx, &state) + + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } +} + +// Configure adds the provider configured client to the bucket data source. +func (d *SampleBuckets) Configure(_ context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) { + if req.ProviderData == nil { + return + } + + data, ok := req.ProviderData.(*providerschema.Data) + if !ok { + resp.Diagnostics.AddError( + "Unexpected Data Source Configure Type", + fmt.Sprintf("Expected *ProviderSourceData, got: %T. Please report this issue to the provider developers.", req.ProviderData), + ) + + return + } + + d.Data = data +} diff --git a/internal/provider/provider.go b/internal/provider/provider.go index 446cd00b..2909a4e3 100644 --- a/internal/provider/provider.go +++ b/internal/provider/provider.go @@ -174,6 +174,7 @@ func (p *capellaProvider) DataSources(_ context.Context) []func() datasource.Dat datasources.NewApiKeys, datasources.NewAppServices, datasources.NewBackups, + datasources.NewSampleBuckets, } } @@ -190,5 +191,6 @@ func (p *capellaProvider) Resources(_ context.Context) []func() resource.Resourc resources.NewAppService, resources.NewBackup, resources.NewBackupSchedule, + resources.NewSampleBucket, } } diff --git a/internal/resources/sampleBucket.go b/internal/resources/sampleBucket.go new file mode 100644 index 00000000..05b95ab9 --- /dev/null +++ b/internal/resources/sampleBucket.go @@ -0,0 +1,351 @@ +package resources + +import ( + "context" + "encoding/json" + "fmt" + "net/http" + + "github.com/couchbasecloud/terraform-provider-couchbase-capella/internal/api" + samplebucket "github.com/couchbasecloud/terraform-provider-couchbase-capella/internal/api/sample_bucket" + "github.com/couchbasecloud/terraform-provider-couchbase-capella/internal/errors" + providerschema "github.com/couchbasecloud/terraform-provider-couchbase-capella/internal/schema" + "github.com/hashicorp/terraform-plugin-framework/path" + "github.com/hashicorp/terraform-plugin-framework/resource" + "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/hashicorp/terraform-plugin-log/tflog" +) + +// Ensure the implementation satisfies the expected interfaces. +var ( + _ resource.Resource = &SampleBucket{} + _ resource.ResourceWithConfigure = &SampleBucket{} + _ resource.ResourceWithImportState = &SampleBucket{} +) + +// Samples is the samples resource implementation. +type SampleBucket struct { + *providerschema.Data +} + +// NewSamples is a helper function to simplify the provider implementation. +func NewSampleBucket() resource.Resource { + return &SampleBucket{} +} + +// Metadata returns the samples resource type name. +func (s *SampleBucket) Metadata(_ context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) { + resp.TypeName = req.ProviderTypeName + "_samplebucket" +} + +// Configure It adds the provider configured api to the project resource. +func (s *SampleBucket) Configure(ctx context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) { + if req.ProviderData == nil { + return + } + + data, ok := req.ProviderData.(*providerschema.Data) + + if !ok { + resp.Diagnostics.AddError( + "Unexpected Resource Configure Type", + fmt.Sprintf("Expected *ProviderSourceData, got: %T. Please report this issue to the provider developers.", req.ProviderData), + ) + + return + } + + s.Data = data +} + +// ImportState imports a remote sample cluster that is not created by Terraform. +func (s *SampleBucket) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) { + // Retrieve import ID and save to id attribute + resource.ImportStatePassthroughID(ctx, path.Root("id"), req, resp) +} + +// Schema defines the schema for the samples resource. +func (s *SampleBucket) Schema(_ context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) { + resp.Schema = SampleBucketSchema() +} + +func (s *SampleBucket) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) { + var plan providerschema.SampleBucket + diags := req.Plan.Get(ctx, &plan) + resp.Diagnostics.Append(diags...) + + if resp.Diagnostics.HasError() { + return + } + + BucketRequest := samplebucket.CreateSampleBucketRequest{ + Name: plan.Name.ValueString(), + } + if err := s.validateCreateBucket(plan); err != nil { + resp.Diagnostics.AddError( + "Error creating bucket", + "Could not create bucket, unexpected error: "+err.Error(), + ) + return + } + + var organizationId = plan.OrganizationId.ValueString() + var projectId = plan.ProjectId.ValueString() + var clusterId = plan.ClusterId.ValueString() + + url := fmt.Sprintf("%s/v4/organizations/%s/projects/%s/clusters/%s/sampleBuckets", s.HostURL, organizationId, projectId, clusterId) + cfg := api.EndpointCfg{Url: url, Method: http.MethodPost, SuccessStatus: http.StatusCreated} + response, err := s.Client.ExecuteWithRetry( + ctx, + cfg, + BucketRequest, + s.Token, + nil, + ) + if err != nil { + resp.Diagnostics.AddError( + "Error creating bucket", + errorMessageWhileBucketCreation+api.ParseError(err), + ) + return + } + + BucketResponse := samplebucket.CreateSampleBucketResponse{} + err = json.Unmarshal(response.Body, &BucketResponse) + if err != nil { + resp.Diagnostics.AddError( + "Error creating bucket", + errorMessageWhileBucketCreation+"error during unmarshalling: "+err.Error(), + ) + return + } + plan.Id = types.StringValue(BucketResponse.Id) + //diags = resp.State.Set(ctx, initializeSampleBucketWithPlanAndId(plan, BucketResponse.Id)) + diags = resp.State.Set(ctx, plan) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + // failed to retrieve the bucket + refreshedState, err := s.retrieveBucket(ctx, organizationId, projectId, clusterId, BucketResponse.Id) + if err != nil { + resp.Diagnostics.AddWarning( + "Error creating bucket "+BucketResponse.Id, + errorMessageAfterBucketCreation+api.ParseError(err), + ) + return + } + + // Set state to fully populated data + diags = resp.State.Set(ctx, refreshedState) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } +} + +func (s *SampleBucket) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) { + var state providerschema.SampleBucket + diags := req.State.Get(ctx, &state) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + + IDs, err := state.Validate() + if err != nil { + resp.Diagnostics.AddError( + "Error Reading Bucket in Capella", + "Could not read Capella Bucket with ID "+state.Id.String()+": "+err.Error(), + ) + return + } + + var ( + organizationId = IDs[providerschema.OrganizationId] + projectId = IDs[providerschema.ProjectId] + clusterId = IDs[providerschema.ClusterId] + bucketId = IDs[providerschema.Id] + ) + + refreshedState, err := s.retrieveBucket(ctx, organizationId, projectId, clusterId, bucketId) + if err != nil { + resourceNotFound, errString := api.CheckResourceNotFoundError(err) + if resourceNotFound { + tflog.Info(ctx, "resource doesn't exist in remote server removing resource from state file") + resp.State.RemoveResource(ctx) + return + } + resp.Diagnostics.AddError( + "Error reading bucket", + "Could not read bucket with id "+state.Id.String()+": "+errString, + ) + return + } + + diags = resp.State.Set(ctx, &refreshedState) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } +} + +func (s *SampleBucket) Update(_ context.Context, _ resource.UpdateRequest, _ *resource.UpdateResponse) { + // Couchbase Capella's v4 does not support a PUT endpoint for sample buckets. + // Allowlists can only be created, read and deleted. + // http://cbc-cp-api.s3-website-us-east-1.amazonaws.com/#tag/sampleBucket + // + // Note: In this situation, terraform apply will default to deleting and executing a new create. + // The update implementation should simply be left empty. + // https://developer.hashicorp.com/terraform/plugin/framework/resources/update +} + +func (s *SampleBucket) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) { + var state providerschema.SampleBucket + diags := req.State.Get(ctx, &state) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + + if state.OrganizationId.IsNull() { + resp.Diagnostics.AddError( + "Error creating bucket", + "Could not create bucket, unexpected error: "+errors.ErrOrganizationIdCannotBeEmpty.Error(), + ) + return + } + var organizationId = state.OrganizationId.ValueString() + + if state.ProjectId.IsNull() { + resp.Diagnostics.AddError( + "Error creating bucket", + "Could not create bucket, unexpected error: "+errors.ErrProjectIdCannotBeEmpty.Error(), + ) + return + } + var projectId = state.ProjectId.ValueString() + + if state.ClusterId.IsNull() { + resp.Diagnostics.AddError( + "Error creating bucket", + "Could not create bucket, unexpected error: "+errors.ErrClusterIdCannotBeEmpty.Error(), + ) + return + } + var clusterId = state.ClusterId.ValueString() + + if state.Id.IsNull() { + resp.Diagnostics.AddError( + "Error creating bucket", + "Could not create bucket, unexpected error: "+errors.ErrClusterIdCannotBeEmpty.Error(), + ) + return + } + var bucketId = state.Id.ValueString() + + url := fmt.Sprintf("%s/v4/organizations/%s/projects/%s/clusters/%s/sampleBuckets/%s", s.HostURL, organizationId, projectId, clusterId, bucketId) + cfg := api.EndpointCfg{Url: url, Method: http.MethodDelete, SuccessStatus: http.StatusNoContent} + _, err := s.Client.ExecuteWithRetry( + ctx, + cfg, + nil, + s.Token, + nil, + ) + if err != nil { + resourceNotFound, errString := api.CheckResourceNotFoundError(err) + if resourceNotFound { + tflog.Info(ctx, "resource doesn't exist in remote server removing resource from state file") + resp.State.RemoveResource(ctx) + return + } + resp.Diagnostics.AddError( + "Error Deleting the Bucket", + "Could not delete Bucket associated with cluster "+clusterId+": "+errString, + ) + return + } +} + +func (r *SampleBucket) validateCreateBucket(plan providerschema.SampleBucket) error { + if plan.OrganizationId.IsNull() { + return errors.ErrOrganizationIdMissing + } + if plan.ProjectId.IsNull() { + return errors.ErrProjectIdMissing + } + if plan.ClusterId.IsNull() { + return errors.ErrClusterIdMissing + } + return r.validateBucketAttributesTrimmed(plan) +} + +// Add extra validaiton for +func (r *SampleBucket) validateBucketAttributesTrimmed(plan providerschema.SampleBucket) error { + if (!plan.Name.IsNull() && !plan.Name.IsUnknown()) && !providerschema.IsTrimmed(plan.Name.ValueString()) { + return fmt.Errorf("name %s", errors.ErrNotTrimmed) + } + return nil +} + +// retrieveBucket retrieves bucket information for a specified organization, project, cluster and bucket ID. +func (s *SampleBucket) retrieveBucket(ctx context.Context, organizationId, projectId, clusterId, bucketId string) (*providerschema.OneSampleBucket, error) { + url := fmt.Sprintf("%s/v4/organizations/%s/projects/%s/clusters/%s/sampleBuckets/%s", s.HostURL, organizationId, projectId, clusterId, bucketId) + cfg := api.EndpointCfg{Url: url, Method: http.MethodGet, SuccessStatus: http.StatusOK} + response, err := s.Client.ExecuteWithRetry( + ctx, + cfg, + nil, + s.Token, + nil, + ) + if err != nil { + return nil, fmt.Errorf("%s: %w", errors.ErrExecutingRequest, err) + } + + bucketResp := samplebucket.GetSampleBucketResponse{} + err = json.Unmarshal(response.Body, &bucketResp) + if err != nil { + return nil, fmt.Errorf("%s: %w", errors.ErrUnmarshallingResponse, err) + } + refreshedState := providerschema.OneSampleBucket{ + Id: types.StringValue(bucketResp.Id), + Name: types.StringValue(bucketResp.Name), + OrganizationId: types.StringValue(organizationId), + ProjectId: types.StringValue(projectId), + ClusterId: types.StringValue(clusterId), + Type: types.StringValue(bucketResp.Type), + StorageBackend: types.StringValue(bucketResp.StorageBackend), + MemoryAllocationInMB: types.Int64Value(bucketResp.MemoryAllocationInMb), + BucketConflictResolution: types.StringValue(bucketResp.BucketConflictResolution), + DurabilityLevel: types.StringValue(bucketResp.DurabilityLevel), + Replicas: types.Int64Value(bucketResp.Replicas), + Flush: types.BoolValue(bucketResp.Flush), + TimeToLiveInSeconds: types.Int64Value(bucketResp.TimeToLiveInSeconds), + EvictionPolicy: types.StringValue(bucketResp.EvictionPolicy), + Stats: &providerschema.Stats{ + ItemCount: types.Int64Value(bucketResp.Stats.ItemCount), + OpsPerSecond: types.Int64Value(bucketResp.Stats.OpsPerSecond), + DiskUsedInMiB: types.Int64Value(bucketResp.Stats.DiskUsedInMib), + MemoryUsedInMiB: types.Int64Value(bucketResp.Stats.MemoryUsedInMib), + }, + } + + return &refreshedState, nil +} + +// initializeBucketWithPlanAndId initializes an instance of providerschema.Bucket +// with the specified plan and ID. It marks all computed fields as null. +func initializeSampleBucketWithPlanAndId(plan providerschema.SampleBucket, id string) providerschema.SampleBucket { + plan.Id = types.StringValue(id) + /* + if plan.StorageBackend.IsNull() || plan.StorageBackend.IsUnknown() { + plan.StorageBackend = types.StringNull() + } + if plan.EvictionPolicy.IsNull() || plan.EvictionPolicy.IsUnknown() { + plan.EvictionPolicy = types.StringNull() + } + plan.Stats = types.ObjectNull(providerschema.Stats{}.AttributeTypes())*/ + return plan +} diff --git a/internal/resources/sampleBucket_schema.go b/internal/resources/sampleBucket_schema.go new file mode 100644 index 00000000..6db4e23d --- /dev/null +++ b/internal/resources/sampleBucket_schema.go @@ -0,0 +1,42 @@ +package resources + +import ( + "github.com/hashicorp/terraform-plugin-framework/resource/schema" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier" +) + +func SampleBucketSchema() schema.Schema { + return schema.Schema{ + Attributes: map[string]schema.Attribute{ + "id": schema.StringAttribute{ + Computed: true, + PlanModifiers: []planmodifier.String{ + stringplanmodifier.UseStateForUnknown(), + }, + }, + "name": stringAttribute(required, requiresReplace), + "organization_id": stringAttribute(required, requiresReplace), + "project_id": stringAttribute(required, requiresReplace), + "cluster_id": stringAttribute(required, requiresReplace), + "type": stringDefaultAttribute("couchbase", optional, computed, requiresReplace, useStateForUnknown), + "storage_backend": stringAttribute(optional, computed, requiresReplace, useStateForUnknown), + "memory_allocation_in_mb": int64DefaultAttribute(200, optional, computed), + "bucket_conflict_resolution": stringDefaultAttribute("seqno", optional, computed, requiresReplace, useStateForUnknown), + "durability_level": stringDefaultAttribute("none", optional, computed), + "replicas": int64DefaultAttribute(1, optional, computed), + "flush": boolDefaultAttribute(false, optional, computed), + "time_to_live_in_seconds": int64DefaultAttribute(0, optional, computed), + "eviction_policy": stringAttribute(optional, computed, requiresReplace, useStateForUnknown), + "stats": schema.SingleNestedAttribute{ + Computed: true, + Attributes: map[string]schema.Attribute{ + "item_count": int64Attribute(computed), + "ops_per_second": int64Attribute(computed), + "disk_used_in_mib": int64Attribute(computed), + "memory_used_in_mib": int64Attribute(computed), + }, + }, + }, + } +} diff --git a/internal/schema/sample_bucket.go b/internal/schema/sample_bucket.go new file mode 100644 index 00000000..2800f08a --- /dev/null +++ b/internal/schema/sample_bucket.go @@ -0,0 +1,205 @@ +package schema + +import ( + "fmt" + + "github.com/couchbasecloud/terraform-provider-couchbase-capella/internal/errors" + + "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/hashicorp/terraform-plugin-framework/types/basetypes" +) + +type SampleBucket struct { + /* + // ClusterId is the ID of the cluster for which the database credential needs to be created. + ClusterId types.String `tfsdk:"cluster_id"` + + // Name is the name of the bucket. + Name types.String `tfsdk:"name"` + + // ProjectId is the ID of the project to which the Capella cluster belongs. + // The database credential will be created for the cluster. + ProjectId types.String `tfsdk:"project_id"` + + // Id is the id of the created bucket. + Id types.String `tfsdk:"id"` + + // OrganizationId is the ID of the organization to which the Capella cluster belongs. + // The database credential will be created for the cluster. + OrganizationId types.String `tfsdk:"organization_id"` + */ + // DurabilityLevel is the minimum level at which all writes to the bucket must occur. + // Default: "none" + // Enum: "none" "majority" "majorityAndPersistActive" "persistToMajority" + // + // The options for Durability level are as follows, according to the bucket type. + // + // For a Couchbase bucket: + // None + // Replicate to Majority + // Majority and Persist to Active + // Persist to Majority + // + //For an Ephemeral bucket: + // None + // Replicate to Majority + DurabilityLevel types.String `tfsdk:"durability_level"` + + // Stats has the bucket stats that are related to memory and disk consumption. + // itemCount: Number of documents in the bucket. + // opsPerSecond: Number of operations per second. + // diskUsedInMib: The amount of disk used (in MiB). + // memoryUsedInMib: The amount of memory used (in MiB). + Stats types.Object `tfsdk:"stats"` + + // Type defines the type of the bucket. + // Default: "couchbase" + // Enum: "couchbase" "ephemeral" + // If selected Ephemeral, it is not eligible for imports or App Endpoints creation. This field cannot be changed later. + // The options may also be referred to as Memory and Disk (Couchbase), Memory Only (Ephemeral) in the Couchbase documentation. + // To learn more, see https://docs.couchbase.com/cloud/clusters/data-service/manage-buckets.html#add-bucket + Type types.String `tfsdk:"type"` + + // StorageBackend defines the storage engine that is used by the bucket. + // Default: "couchstore" + // Enum: "couchstore" "magma" + // + // Ephemeral buckets do not support StorageBackend, hence not applicable for Ephemeral buckets and throws an error if this field is added. + // This field is only applicable for a Couchbase bucket. The default value mentioned (Couchstore) is for Couchbase bucket. + // This field cannot be changed later. + // To learn more, see https://docs.couchbase.com/cloud/clusters/data-service/storage-engines.html + StorageBackend types.String `tfsdk:"storage_backend"` + + // ClusterId is the ID of the cluster for which the database credential needs to be created. + ClusterId types.String `tfsdk:"cluster_id"` + + // BucketConflictResolution is the means by which conflicts are resolved during replication. + // Default: "seqno" + // Enum: "seqno" "lww" + // This field may be referred to as "conflict resolution" in the Couchbase documentation. + // seqno and lww may be referred to as "sequence number" and "timestamp" respectively. + // This field cannot be changed later. + // To learn more, see https://docs.couchbase.com/cloud/clusters/xdcr/xdcr.html#conflict-resolution + BucketConflictResolution types.String `tfsdk:"bucket_conflict_resolution"` + + // Name is the name of the bucket. + Name types.String `tfsdk:"name"` + + // ProjectId is the ID of the project to which the Capella cluster belongs. + // The database credential will be created for the cluster. + ProjectId types.String `tfsdk:"project_id"` + + // Id is the id of the created bucket. + Id types.String `tfsdk:"id"` + + // OrganizationId is the ID of the organization to which the Capella cluster belongs. + // The database credential will be created for the cluster. + OrganizationId types.String `tfsdk:"organization_id"` + + // EvictionPolicy is the policy which Capella adopts to prevent data loss due to memory exhaustion. + // This may be also known as Ejection Policy in the Couchbase documentation. + // + // For Couchbase bucket, Eviction Policy is fullEviction by default. + // For Ephemeral buckets, Eviction Policy is a required field, and should be one of the following: + // noEviction + // nruEviction + // Default: "fullEviction" + // Enum: "fullEviction" "noEviction" "nruEviction" + // To learn more, see https://docs.couchbase.com/server/current/rest-api/rest-bucket-create.html#evictionpolicy + EvictionPolicy types.String `tfsdk:"eviction_policy"` + + // MemoryAllocationInMB is the amount of memory to allocate for the bucket memory in MiB. + // This is the maximum limit is dependent on the allocation of the KV service. For example, 80% of the allocation. + // Default: 100 + // + // The default value (100MiB) mentioned is for Couchbase type buckets with Couchstore as the Storage Backend. + // + // For Couchbase buckets, the default and minimum memory allocation changes according to the Storage Backend type as follows: + // For Couchstore, the default and minimum memory allocation is 100 MiB. + // For Magma, the default and minimum memory allocation is 1024 MiB. + // For Ephemeral buckets, the default and minimum memory allocation is 100 MiB. + MemoryAllocationInMB types.Int64 `tfsdk:"memory_allocation_in_mb"` + + // TimeToLiveInSeconds specifies the time to live (TTL) value in seconds. + // This is the maximum time to live for items in the bucket. + // Default is 0, that means TTL is disabled. This is a non-negative value. + TimeToLiveInSeconds types.Int64 `tfsdk:"time_to_live_in_seconds"` + + // Replicas is the number of replicas for the bucket. + // Default: 1 + // Enum: 1 2 3 + Replicas types.Int64 `tfsdk:"replicas"` + + // Flush determines whether flushing is enabled on the bucket. + // Enable Flush to delete all items in this bucket at the earliest opportunity. + // Disable Flush to avoid inadvertent data loss. + // Default: false + Flush types.Bool `tfsdk:"flush"` +} + +// SampleBuckets defines attributes for the LIST buckets response received from V4 Capella Public API. +type SampleBuckets struct { + // OrganizationId The organizationId of the capella. + OrganizationId types.String `tfsdk:"organization_id"` + + // ProjectId is the projectId of the capella tenant. + ProjectId types.String `tfsdk:"project_id"` + + // ClusterId is the clusterId of the capella tenant. + ClusterId types.String `tfsdk:"cluster_id"` + + // Data It contains the list of resources. + Data []OneSampleBucket `tfsdk:"data"` +} + +type OneSampleBucket struct { + Stats *Stats `tfsdk:"stats"` + DurabilityLevel types.String `tfsdk:"durability_level"` + Name types.String `tfsdk:"name"` + StorageBackend types.String `tfsdk:"storage_backend"` + ClusterId types.String `tfsdk:"cluster_id"` + BucketConflictResolution types.String `tfsdk:"bucket_conflict_resolution"` + Id types.String `tfsdk:"id"` + ProjectId types.String `tfsdk:"project_id"` + OrganizationId types.String `tfsdk:"organization_id"` + Type types.String `tfsdk:"type"` + EvictionPolicy types.String `tfsdk:"eviction_policy"` + TimeToLiveInSeconds types.Int64 `tfsdk:"time_to_live_in_seconds"` + Replicas types.Int64 `tfsdk:"replicas"` + MemoryAllocationInMB types.Int64 `tfsdk:"memory_allocation_in_mb"` + Flush types.Bool `tfsdk:"flush"` +} + +// Validate will split the IDs by a delimiter i.e. comma , in case a terraform import CLI is invoked. +// The format of the terraform import CLI would include the IDs as follows - +// `terraform import capella_bucket.new_bucket id=,cluster_id=,project_id=,organization_id=`. +func (b SampleBucket) Validate() (map[Attr]string, error) { + state := map[Attr]basetypes.StringValue{ + OrganizationId: b.OrganizationId, + ProjectId: b.ProjectId, + ClusterId: b.ClusterId, + Id: b.Id, + } + + IDs, err := validateSchemaState(state) + if err != nil { + return nil, fmt.Errorf("%s: %w", errors.ErrValidatingResource, err) + } + + return IDs, nil +} + +// Validate is used to verify that all the fields in the datasource +// have been populated. +func (b SampleBuckets) Validate() (clusterId, projectId, organizationId string, err error) { + if b.OrganizationId.IsNull() { + return "", "", "", errors.ErrOrganizationIdMissing + } + if b.ProjectId.IsNull() { + return "", "", "", errors.ErrProjectIdMissing + } + if b.ClusterId.IsNull() { + return "", "", "", errors.ErrClusterIdMissing + } + return b.ClusterId.ValueString(), b.ProjectId.ValueString(), b.OrganizationId.ValueString(), nil +} diff --git a/internal/schema/sample_bucket_test.go b/internal/schema/sample_bucket_test.go new file mode 100644 index 00000000..8025ac0d --- /dev/null +++ b/internal/schema/sample_bucket_test.go @@ -0,0 +1,70 @@ +package schema + +import ( + "testing" + + "github.com/couchbasecloud/terraform-provider-couchbase-capella/internal/errors" + + "github.com/hashicorp/terraform-plugin-framework/types/basetypes" + "github.com/stretchr/testify/assert" +) + +func TestSampleBucketSchemaValidate(t *testing.T) { + type test struct { + expectedErr error + name string + expectedProjectId string + expectedOrganizationId string + expectedClusterId string + expectedBucketId string + input Bucket + } + + tests := []test{ + { + name: "[POSITIVE] project ID, organization ID, cluster ID, bucket ID are passed via terraform apply", + input: Bucket{ + Id: basetypes.NewStringValue("100"), + ClusterId: basetypes.NewStringValue("200"), + ProjectId: basetypes.NewStringValue("300"), + OrganizationId: basetypes.NewStringValue("400"), + }, + expectedBucketId: "100", + expectedClusterId: "200", + expectedProjectId: "300", + expectedOrganizationId: "400", + }, + { + name: "[POSITIVE] IDs are passed via terraform import", + input: Bucket{ + Id: basetypes.NewStringValue("id=100,cluster_id=200,project_id=300,organization_id=400"), + }, + expectedBucketId: "100", + expectedClusterId: "200", + expectedProjectId: "300", + expectedOrganizationId: "400", + }, + { + name: "[NEGATIVE] only bucket ID is passed via terraform apply", + input: Bucket{ + Id: basetypes.NewStringValue("100"), + }, + expectedErr: errors.ErrInvalidImport, + }, + } + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + IDs, err := test.input.Validate() + + if test.expectedErr != nil { + assert.ErrorContains(t, err, test.expectedErr.Error()) + return + } + + assert.Equal(t, test.expectedBucketId, IDs[Id]) + assert.Equal(t, test.expectedClusterId, IDs[ClusterId]) + assert.Equal(t, test.expectedProjectId, IDs[ProjectId]) + assert.Equal(t, test.expectedOrganizationId, IDs[OrganizationId]) + }) + } +} From e6243a4a41b15f3bc26bec19d33c9828df371d33 Mon Sep 17 00:00:00 2001 From: Laura Silaja Date: Fri, 1 Mar 2024 08:21:39 +0000 Subject: [PATCH 02/15] Adding import sample bucket example --- .../sample_bucket/create_sample_bucket.tf | 14 +++++++++++ examples/sample_bucket/list_sample_buckets.tf | 9 +++++++ examples/sample_bucket/main.tf | 12 ++++++++++ .../sample_bucket/terraform.template.tfvars | 9 +++++++ examples/sample_bucket/variables.tf | 24 +++++++++++++++++++ 5 files changed, 68 insertions(+) create mode 100644 examples/sample_bucket/create_sample_bucket.tf create mode 100644 examples/sample_bucket/list_sample_buckets.tf create mode 100644 examples/sample_bucket/main.tf create mode 100644 examples/sample_bucket/terraform.template.tfvars create mode 100644 examples/sample_bucket/variables.tf diff --git a/examples/sample_bucket/create_sample_bucket.tf b/examples/sample_bucket/create_sample_bucket.tf new file mode 100644 index 00000000..8d0bed58 --- /dev/null +++ b/examples/sample_bucket/create_sample_bucket.tf @@ -0,0 +1,14 @@ +output "new_samplebucket" { + value = couchbase-capella_samplebucket.new_samplebucket +} + +output "samplebucket_id" { + value = couchbase-capella_samplebucket.new_samplebucket.id +} + +resource "couchbase-capella_samplebucket" "new_samplebucket" { + name = var.samplebucket.name + organization_id = var.organization_id + project_id = var.project_id + cluster_id = var.cluster_id +} diff --git a/examples/sample_bucket/list_sample_buckets.tf b/examples/sample_bucket/list_sample_buckets.tf new file mode 100644 index 00000000..70214bed --- /dev/null +++ b/examples/sample_bucket/list_sample_buckets.tf @@ -0,0 +1,9 @@ +output "samplebuckets_list" { + value = data.couchbase-capella_samplebuckets.existing_samplebuckets +} + +data "couchbase-capella_samplebuckets" "existing_samplebuckets" { + organization_id = var.organization_id + project_id = var.project_id + cluster_id = var.cluster_id +} diff --git a/examples/sample_bucket/main.tf b/examples/sample_bucket/main.tf new file mode 100644 index 00000000..c0a9ce2c --- /dev/null +++ b/examples/sample_bucket/main.tf @@ -0,0 +1,12 @@ +terraform { + required_providers { + couchbase-capella = { + source = "couchbasecloud/couchbase-capella" + } + } +} + +provider "couchbase-capella" { + authentication_token = var.auth_token +} + diff --git a/examples/sample_bucket/terraform.template.tfvars b/examples/sample_bucket/terraform.template.tfvars new file mode 100644 index 00000000..84b7f668 --- /dev/null +++ b/examples/sample_bucket/terraform.template.tfvars @@ -0,0 +1,9 @@ +auth_token = "" + +organization_id = "" +project_id = "" +cluster_id = "" + +samplebucket = { + name = "new_terraform_samplebucket" +} diff --git a/examples/sample_bucket/variables.tf b/examples/sample_bucket/variables.tf new file mode 100644 index 00000000..dea37fc6 --- /dev/null +++ b/examples/sample_bucket/variables.tf @@ -0,0 +1,24 @@ +variable "organization_id" { + description = "Capella Organization ID" +} + +variable "auth_token" { + description = "Authentication API Key" + sensitive = true +} + +variable "project_id" { + description = "Capella Project ID" +} + +variable "cluster_id" { + description = "Capella Cluster ID" +} + +variable "samplebucket" { + description = "Bucket configuration details useful for creation" + + type = object({ + name = string + }) +} \ No newline at end of file From d6e459c45472460db6fcdb6169a774a12d1104fb Mon Sep 17 00:00:00 2001 From: Laura Silaja Date: Fri, 1 Mar 2024 10:07:19 +0000 Subject: [PATCH 03/15] Removed the unecessary schema --- internal/datasources/sample_buckets.go | 4 +- .../{sampleBucket.go => sample_bucket.go} | 19 +- ...cket_schema.go => sample_bucket_schema.go} | 0 internal/schema/sample_bucket.go | 205 ------------------ internal/schema/sample_bucket_test.go | 70 ------ 5 files changed, 13 insertions(+), 285 deletions(-) rename internal/resources/{sampleBucket.go => sample_bucket.go} (95%) rename internal/resources/{sampleBucket_schema.go => sample_bucket_schema.go} (100%) delete mode 100644 internal/schema/sample_bucket.go delete mode 100644 internal/schema/sample_bucket_test.go diff --git a/internal/datasources/sample_buckets.go b/internal/datasources/sample_buckets.go index 63c80d7b..cea57a8a 100644 --- a/internal/datasources/sample_buckets.go +++ b/internal/datasources/sample_buckets.go @@ -130,7 +130,7 @@ func (s *SampleBuckets) Schema(_ context.Context, _ datasource.SchemaRequest, re // Read refreshes the Terraform state with the latest data of buckets. func (d *SampleBuckets) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { - var state providerschema.SampleBuckets + var state providerschema.Buckets diags := req.Config.Get(ctx, &state) resp.Diagnostics.Append(diags...) if resp.Diagnostics.HasError() { @@ -160,7 +160,7 @@ func (d *SampleBuckets) Read(ctx context.Context, req datasource.ReadRequest, re // Map response body to model for _, bucket := range response { - bucketState := providerschema.OneSampleBucket{ + bucketState := providerschema.OneBucket{ Id: types.StringValue(bucket.Id), Name: types.StringValue(bucket.Name), Type: types.StringValue(bucket.Type), diff --git a/internal/resources/sampleBucket.go b/internal/resources/sample_bucket.go similarity index 95% rename from internal/resources/sampleBucket.go rename to internal/resources/sample_bucket.go index 05b95ab9..7c69c55c 100644 --- a/internal/resources/sampleBucket.go +++ b/internal/resources/sample_bucket.go @@ -70,7 +70,7 @@ func (s *SampleBucket) Schema(_ context.Context, _ resource.SchemaRequest, resp } func (s *SampleBucket) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) { - var plan providerschema.SampleBucket + var plan providerschema.Bucket diags := req.Plan.Get(ctx, &plan) resp.Diagnostics.Append(diags...) @@ -119,6 +119,8 @@ func (s *SampleBucket) Create(ctx context.Context, req resource.CreateRequest, r ) return } + // Add validation on name. If name doesn't equal the sample names then you should throw an error here or + // you could just let the create throw an error for it. plan.Id = types.StringValue(BucketResponse.Id) //diags = resp.State.Set(ctx, initializeSampleBucketWithPlanAndId(plan, BucketResponse.Id)) diags = resp.State.Set(ctx, plan) @@ -145,7 +147,7 @@ func (s *SampleBucket) Create(ctx context.Context, req resource.CreateRequest, r } func (s *SampleBucket) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) { - var state providerschema.SampleBucket + var state providerschema.Bucket diags := req.State.Get(ctx, &state) resp.Diagnostics.Append(diags...) if resp.Diagnostics.HasError() { @@ -201,7 +203,7 @@ func (s *SampleBucket) Update(_ context.Context, _ resource.UpdateRequest, _ *re } func (s *SampleBucket) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) { - var state providerschema.SampleBucket + var state providerschema.Bucket diags := req.State.Get(ctx, &state) resp.Diagnostics.Append(diags...) if resp.Diagnostics.HasError() { @@ -268,7 +270,7 @@ func (s *SampleBucket) Delete(ctx context.Context, req resource.DeleteRequest, r } } -func (r *SampleBucket) validateCreateBucket(plan providerschema.SampleBucket) error { +func (r *SampleBucket) validateCreateBucket(plan providerschema.Bucket) error { if plan.OrganizationId.IsNull() { return errors.ErrOrganizationIdMissing } @@ -282,7 +284,7 @@ func (r *SampleBucket) validateCreateBucket(plan providerschema.SampleBucket) er } // Add extra validaiton for -func (r *SampleBucket) validateBucketAttributesTrimmed(plan providerschema.SampleBucket) error { +func (r *SampleBucket) validateBucketAttributesTrimmed(plan providerschema.Bucket) error { if (!plan.Name.IsNull() && !plan.Name.IsUnknown()) && !providerschema.IsTrimmed(plan.Name.ValueString()) { return fmt.Errorf("name %s", errors.ErrNotTrimmed) } @@ -290,7 +292,7 @@ func (r *SampleBucket) validateBucketAttributesTrimmed(plan providerschema.Sampl } // retrieveBucket retrieves bucket information for a specified organization, project, cluster and bucket ID. -func (s *SampleBucket) retrieveBucket(ctx context.Context, organizationId, projectId, clusterId, bucketId string) (*providerschema.OneSampleBucket, error) { +func (s *SampleBucket) retrieveBucket(ctx context.Context, organizationId, projectId, clusterId, bucketId string) (*providerschema.OneBucket, error) { url := fmt.Sprintf("%s/v4/organizations/%s/projects/%s/clusters/%s/sampleBuckets/%s", s.HostURL, organizationId, projectId, clusterId, bucketId) cfg := api.EndpointCfg{Url: url, Method: http.MethodGet, SuccessStatus: http.StatusOK} response, err := s.Client.ExecuteWithRetry( @@ -309,7 +311,7 @@ func (s *SampleBucket) retrieveBucket(ctx context.Context, organizationId, proje if err != nil { return nil, fmt.Errorf("%s: %w", errors.ErrUnmarshallingResponse, err) } - refreshedState := providerschema.OneSampleBucket{ + refreshedState := providerschema.OneBucket{ Id: types.StringValue(bucketResp.Id), Name: types.StringValue(bucketResp.Name), OrganizationId: types.StringValue(organizationId), @@ -337,8 +339,9 @@ func (s *SampleBucket) retrieveBucket(ctx context.Context, organizationId, proje // initializeBucketWithPlanAndId initializes an instance of providerschema.Bucket // with the specified plan and ID. It marks all computed fields as null. -func initializeSampleBucketWithPlanAndId(plan providerschema.SampleBucket, id string) providerschema.SampleBucket { +func initializeSampleBucketWithPlanAndId(plan providerschema.Bucket, id string) providerschema.Bucket { plan.Id = types.StringValue(id) + // Do I need this? /* if plan.StorageBackend.IsNull() || plan.StorageBackend.IsUnknown() { plan.StorageBackend = types.StringNull() diff --git a/internal/resources/sampleBucket_schema.go b/internal/resources/sample_bucket_schema.go similarity index 100% rename from internal/resources/sampleBucket_schema.go rename to internal/resources/sample_bucket_schema.go diff --git a/internal/schema/sample_bucket.go b/internal/schema/sample_bucket.go deleted file mode 100644 index 2800f08a..00000000 --- a/internal/schema/sample_bucket.go +++ /dev/null @@ -1,205 +0,0 @@ -package schema - -import ( - "fmt" - - "github.com/couchbasecloud/terraform-provider-couchbase-capella/internal/errors" - - "github.com/hashicorp/terraform-plugin-framework/types" - "github.com/hashicorp/terraform-plugin-framework/types/basetypes" -) - -type SampleBucket struct { - /* - // ClusterId is the ID of the cluster for which the database credential needs to be created. - ClusterId types.String `tfsdk:"cluster_id"` - - // Name is the name of the bucket. - Name types.String `tfsdk:"name"` - - // ProjectId is the ID of the project to which the Capella cluster belongs. - // The database credential will be created for the cluster. - ProjectId types.String `tfsdk:"project_id"` - - // Id is the id of the created bucket. - Id types.String `tfsdk:"id"` - - // OrganizationId is the ID of the organization to which the Capella cluster belongs. - // The database credential will be created for the cluster. - OrganizationId types.String `tfsdk:"organization_id"` - */ - // DurabilityLevel is the minimum level at which all writes to the bucket must occur. - // Default: "none" - // Enum: "none" "majority" "majorityAndPersistActive" "persistToMajority" - // - // The options for Durability level are as follows, according to the bucket type. - // - // For a Couchbase bucket: - // None - // Replicate to Majority - // Majority and Persist to Active - // Persist to Majority - // - //For an Ephemeral bucket: - // None - // Replicate to Majority - DurabilityLevel types.String `tfsdk:"durability_level"` - - // Stats has the bucket stats that are related to memory and disk consumption. - // itemCount: Number of documents in the bucket. - // opsPerSecond: Number of operations per second. - // diskUsedInMib: The amount of disk used (in MiB). - // memoryUsedInMib: The amount of memory used (in MiB). - Stats types.Object `tfsdk:"stats"` - - // Type defines the type of the bucket. - // Default: "couchbase" - // Enum: "couchbase" "ephemeral" - // If selected Ephemeral, it is not eligible for imports or App Endpoints creation. This field cannot be changed later. - // The options may also be referred to as Memory and Disk (Couchbase), Memory Only (Ephemeral) in the Couchbase documentation. - // To learn more, see https://docs.couchbase.com/cloud/clusters/data-service/manage-buckets.html#add-bucket - Type types.String `tfsdk:"type"` - - // StorageBackend defines the storage engine that is used by the bucket. - // Default: "couchstore" - // Enum: "couchstore" "magma" - // - // Ephemeral buckets do not support StorageBackend, hence not applicable for Ephemeral buckets and throws an error if this field is added. - // This field is only applicable for a Couchbase bucket. The default value mentioned (Couchstore) is for Couchbase bucket. - // This field cannot be changed later. - // To learn more, see https://docs.couchbase.com/cloud/clusters/data-service/storage-engines.html - StorageBackend types.String `tfsdk:"storage_backend"` - - // ClusterId is the ID of the cluster for which the database credential needs to be created. - ClusterId types.String `tfsdk:"cluster_id"` - - // BucketConflictResolution is the means by which conflicts are resolved during replication. - // Default: "seqno" - // Enum: "seqno" "lww" - // This field may be referred to as "conflict resolution" in the Couchbase documentation. - // seqno and lww may be referred to as "sequence number" and "timestamp" respectively. - // This field cannot be changed later. - // To learn more, see https://docs.couchbase.com/cloud/clusters/xdcr/xdcr.html#conflict-resolution - BucketConflictResolution types.String `tfsdk:"bucket_conflict_resolution"` - - // Name is the name of the bucket. - Name types.String `tfsdk:"name"` - - // ProjectId is the ID of the project to which the Capella cluster belongs. - // The database credential will be created for the cluster. - ProjectId types.String `tfsdk:"project_id"` - - // Id is the id of the created bucket. - Id types.String `tfsdk:"id"` - - // OrganizationId is the ID of the organization to which the Capella cluster belongs. - // The database credential will be created for the cluster. - OrganizationId types.String `tfsdk:"organization_id"` - - // EvictionPolicy is the policy which Capella adopts to prevent data loss due to memory exhaustion. - // This may be also known as Ejection Policy in the Couchbase documentation. - // - // For Couchbase bucket, Eviction Policy is fullEviction by default. - // For Ephemeral buckets, Eviction Policy is a required field, and should be one of the following: - // noEviction - // nruEviction - // Default: "fullEviction" - // Enum: "fullEviction" "noEviction" "nruEviction" - // To learn more, see https://docs.couchbase.com/server/current/rest-api/rest-bucket-create.html#evictionpolicy - EvictionPolicy types.String `tfsdk:"eviction_policy"` - - // MemoryAllocationInMB is the amount of memory to allocate for the bucket memory in MiB. - // This is the maximum limit is dependent on the allocation of the KV service. For example, 80% of the allocation. - // Default: 100 - // - // The default value (100MiB) mentioned is for Couchbase type buckets with Couchstore as the Storage Backend. - // - // For Couchbase buckets, the default and minimum memory allocation changes according to the Storage Backend type as follows: - // For Couchstore, the default and minimum memory allocation is 100 MiB. - // For Magma, the default and minimum memory allocation is 1024 MiB. - // For Ephemeral buckets, the default and minimum memory allocation is 100 MiB. - MemoryAllocationInMB types.Int64 `tfsdk:"memory_allocation_in_mb"` - - // TimeToLiveInSeconds specifies the time to live (TTL) value in seconds. - // This is the maximum time to live for items in the bucket. - // Default is 0, that means TTL is disabled. This is a non-negative value. - TimeToLiveInSeconds types.Int64 `tfsdk:"time_to_live_in_seconds"` - - // Replicas is the number of replicas for the bucket. - // Default: 1 - // Enum: 1 2 3 - Replicas types.Int64 `tfsdk:"replicas"` - - // Flush determines whether flushing is enabled on the bucket. - // Enable Flush to delete all items in this bucket at the earliest opportunity. - // Disable Flush to avoid inadvertent data loss. - // Default: false - Flush types.Bool `tfsdk:"flush"` -} - -// SampleBuckets defines attributes for the LIST buckets response received from V4 Capella Public API. -type SampleBuckets struct { - // OrganizationId The organizationId of the capella. - OrganizationId types.String `tfsdk:"organization_id"` - - // ProjectId is the projectId of the capella tenant. - ProjectId types.String `tfsdk:"project_id"` - - // ClusterId is the clusterId of the capella tenant. - ClusterId types.String `tfsdk:"cluster_id"` - - // Data It contains the list of resources. - Data []OneSampleBucket `tfsdk:"data"` -} - -type OneSampleBucket struct { - Stats *Stats `tfsdk:"stats"` - DurabilityLevel types.String `tfsdk:"durability_level"` - Name types.String `tfsdk:"name"` - StorageBackend types.String `tfsdk:"storage_backend"` - ClusterId types.String `tfsdk:"cluster_id"` - BucketConflictResolution types.String `tfsdk:"bucket_conflict_resolution"` - Id types.String `tfsdk:"id"` - ProjectId types.String `tfsdk:"project_id"` - OrganizationId types.String `tfsdk:"organization_id"` - Type types.String `tfsdk:"type"` - EvictionPolicy types.String `tfsdk:"eviction_policy"` - TimeToLiveInSeconds types.Int64 `tfsdk:"time_to_live_in_seconds"` - Replicas types.Int64 `tfsdk:"replicas"` - MemoryAllocationInMB types.Int64 `tfsdk:"memory_allocation_in_mb"` - Flush types.Bool `tfsdk:"flush"` -} - -// Validate will split the IDs by a delimiter i.e. comma , in case a terraform import CLI is invoked. -// The format of the terraform import CLI would include the IDs as follows - -// `terraform import capella_bucket.new_bucket id=,cluster_id=,project_id=,organization_id=`. -func (b SampleBucket) Validate() (map[Attr]string, error) { - state := map[Attr]basetypes.StringValue{ - OrganizationId: b.OrganizationId, - ProjectId: b.ProjectId, - ClusterId: b.ClusterId, - Id: b.Id, - } - - IDs, err := validateSchemaState(state) - if err != nil { - return nil, fmt.Errorf("%s: %w", errors.ErrValidatingResource, err) - } - - return IDs, nil -} - -// Validate is used to verify that all the fields in the datasource -// have been populated. -func (b SampleBuckets) Validate() (clusterId, projectId, organizationId string, err error) { - if b.OrganizationId.IsNull() { - return "", "", "", errors.ErrOrganizationIdMissing - } - if b.ProjectId.IsNull() { - return "", "", "", errors.ErrProjectIdMissing - } - if b.ClusterId.IsNull() { - return "", "", "", errors.ErrClusterIdMissing - } - return b.ClusterId.ValueString(), b.ProjectId.ValueString(), b.OrganizationId.ValueString(), nil -} diff --git a/internal/schema/sample_bucket_test.go b/internal/schema/sample_bucket_test.go deleted file mode 100644 index 8025ac0d..00000000 --- a/internal/schema/sample_bucket_test.go +++ /dev/null @@ -1,70 +0,0 @@ -package schema - -import ( - "testing" - - "github.com/couchbasecloud/terraform-provider-couchbase-capella/internal/errors" - - "github.com/hashicorp/terraform-plugin-framework/types/basetypes" - "github.com/stretchr/testify/assert" -) - -func TestSampleBucketSchemaValidate(t *testing.T) { - type test struct { - expectedErr error - name string - expectedProjectId string - expectedOrganizationId string - expectedClusterId string - expectedBucketId string - input Bucket - } - - tests := []test{ - { - name: "[POSITIVE] project ID, organization ID, cluster ID, bucket ID are passed via terraform apply", - input: Bucket{ - Id: basetypes.NewStringValue("100"), - ClusterId: basetypes.NewStringValue("200"), - ProjectId: basetypes.NewStringValue("300"), - OrganizationId: basetypes.NewStringValue("400"), - }, - expectedBucketId: "100", - expectedClusterId: "200", - expectedProjectId: "300", - expectedOrganizationId: "400", - }, - { - name: "[POSITIVE] IDs are passed via terraform import", - input: Bucket{ - Id: basetypes.NewStringValue("id=100,cluster_id=200,project_id=300,organization_id=400"), - }, - expectedBucketId: "100", - expectedClusterId: "200", - expectedProjectId: "300", - expectedOrganizationId: "400", - }, - { - name: "[NEGATIVE] only bucket ID is passed via terraform apply", - input: Bucket{ - Id: basetypes.NewStringValue("100"), - }, - expectedErr: errors.ErrInvalidImport, - }, - } - for _, test := range tests { - t.Run(test.name, func(t *testing.T) { - IDs, err := test.input.Validate() - - if test.expectedErr != nil { - assert.ErrorContains(t, err, test.expectedErr.Error()) - return - } - - assert.Equal(t, test.expectedBucketId, IDs[Id]) - assert.Equal(t, test.expectedClusterId, IDs[ClusterId]) - assert.Equal(t, test.expectedProjectId, IDs[ProjectId]) - assert.Equal(t, test.expectedOrganizationId, IDs[OrganizationId]) - }) - } -} From f74bbf8f1c6b34dce36560da97c83790406b7663 Mon Sep 17 00:00:00 2001 From: Laura Silaja Date: Fri, 1 Mar 2024 11:50:08 +0000 Subject: [PATCH 04/15] Added some validation --- internal/errors/errors.go | 3 +++ internal/resources/sample_bucket.go | 39 +++++++++++++---------------- 2 files changed, 21 insertions(+), 21 deletions(-) diff --git a/internal/errors/errors.go b/internal/errors/errors.go index 2f38e196..a569b672 100644 --- a/internal/errors/errors.go +++ b/internal/errors/errors.go @@ -158,4 +158,7 @@ var ( // ErrIfMatchCannotBeSetWhileCreate is returned when if_match is set during create operation. ErrIfMatchCannotBeSetWhileCreate = errors.New("if_match attribute cannot be set during create operation") + + // ErrIfMatchCannotBeSetWhileCreate is returned when if_match is set during create operation. + ErrInvalidSampleBucketName = errors.New("sample bucket name can only be travel-sample, beer-sample, gamesim-sample") ) diff --git a/internal/resources/sample_bucket.go b/internal/resources/sample_bucket.go index 7c69c55c..f8526031 100644 --- a/internal/resources/sample_bucket.go +++ b/internal/resources/sample_bucket.go @@ -119,16 +119,14 @@ func (s *SampleBucket) Create(ctx context.Context, req resource.CreateRequest, r ) return } - // Add validation on name. If name doesn't equal the sample names then you should throw an error here or - // you could just let the create throw an error for it. + plan.Id = types.StringValue(BucketResponse.Id) - //diags = resp.State.Set(ctx, initializeSampleBucketWithPlanAndId(plan, BucketResponse.Id)) diags = resp.State.Set(ctx, plan) resp.Diagnostics.Append(diags...) if resp.Diagnostics.HasError() { return } - // failed to retrieve the bucket + refreshedState, err := s.retrieveBucket(ctx, organizationId, projectId, clusterId, BucketResponse.Id) if err != nil { resp.Diagnostics.AddWarning( @@ -280,14 +278,18 @@ func (r *SampleBucket) validateCreateBucket(plan providerschema.Bucket) error { if plan.ClusterId.IsNull() { return errors.ErrClusterIdMissing } - return r.validateBucketAttributesTrimmed(plan) + return r.validateBucketName(plan) } -// Add extra validaiton for -func (r *SampleBucket) validateBucketAttributesTrimmed(plan providerschema.Bucket) error { +func (r *SampleBucket) validateBucketName(plan providerschema.Bucket) error { if (!plan.Name.IsNull() && !plan.Name.IsUnknown()) && !providerschema.IsTrimmed(plan.Name.ValueString()) { return fmt.Errorf("name %s", errors.ErrNotTrimmed) } + + if !isValidSampleName(plan.Name.ValueString()) { + return errors.ErrInvalidSampleBucketName + } + return nil } @@ -337,18 +339,13 @@ func (s *SampleBucket) retrieveBucket(ctx context.Context, organizationId, proje return &refreshedState, nil } -// initializeBucketWithPlanAndId initializes an instance of providerschema.Bucket -// with the specified plan and ID. It marks all computed fields as null. -func initializeSampleBucketWithPlanAndId(plan providerschema.Bucket, id string) providerschema.Bucket { - plan.Id = types.StringValue(id) - // Do I need this? - /* - if plan.StorageBackend.IsNull() || plan.StorageBackend.IsUnknown() { - plan.StorageBackend = types.StringNull() - } - if plan.EvictionPolicy.IsNull() || plan.EvictionPolicy.IsUnknown() { - plan.EvictionPolicy = types.StringNull() - } - plan.Stats = types.ObjectNull(providerschema.Stats{}.AttributeTypes())*/ - return plan +func isValidSampleName(category string) bool { + switch category { + case + "travel-sample", + "beer-sample", + "gamesim-sample": + return true + } + return false } From 53568cb0ce79ed2fc129d707b5edf478cba97f61 Mon Sep 17 00:00:00 2001 From: Laura Silaja Date: Fri, 1 Mar 2024 11:56:08 +0000 Subject: [PATCH 05/15] Sorted out formating issue --- examples/sample_bucket/create_sample_bucket.tf | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/examples/sample_bucket/create_sample_bucket.tf b/examples/sample_bucket/create_sample_bucket.tf index 8d0bed58..d392a70d 100644 --- a/examples/sample_bucket/create_sample_bucket.tf +++ b/examples/sample_bucket/create_sample_bucket.tf @@ -7,8 +7,8 @@ output "samplebucket_id" { } resource "couchbase-capella_samplebucket" "new_samplebucket" { - name = var.samplebucket.name - organization_id = var.organization_id - project_id = var.project_id - cluster_id = var.cluster_id + name = var.samplebucket.name + organization_id = var.organization_id + project_id = var.project_id + cluster_id = var.cluster_id } From fb618096ef9068c7facd5b0ac30047239b9c7a0e Mon Sep 17 00:00:00 2001 From: Laura Silaja Date: Fri, 1 Mar 2024 14:18:11 +0000 Subject: [PATCH 06/15] Added readme to the sample bucket example --- examples/sample_bucket/README.md | 611 +++++++++++++++++++++++++++++++ 1 file changed, 611 insertions(+) create mode 100644 examples/sample_bucket/README.md diff --git a/examples/sample_bucket/README.md b/examples/sample_bucket/README.md new file mode 100644 index 00000000..63f3ba8f --- /dev/null +++ b/examples/sample_bucket/README.md @@ -0,0 +1,611 @@ +# Capella Sample Buckets Example +This example shows how to create and manage sample Buckets in Capella. + +This creates a new bucket in the selected Capella cluster and lists existing sample buckets in the cluster. It uses the cluster ID to create and list buckets. + +To run, configure your Couchbase Capella provider as described in README in the root of this project. + + +# Example Walkthrough + +In this example, we are going to do the following. + +1. CREATE: Create a new sample bucket in Capella as stated in the `create_sample_bucket.tf` file. +2. UPDATE: Update the bucket configuration using Terraform. +3. LIST: List existing sample buckets in Capella as stated in the `list_sample_buckets.tf` file. +4. IMPORT: Import a bucket that exists in Capella but not in the terraform state file. +5. DELETE: Delete the newly created bucket from Capella. +c +If you check the `terraform.template.tfvars` file - Make sure you copy the file to `terraform.tfvars` and update the values of the variables as per the correct organization access. + + +## CREATE & LIST +### View the plan for the resources that Terraform will create + +Command: `terraform plan` + +Sample Output: + +``` +Terraform plan +╷ +│ Warning: Provider development overrides are in effect +│ +│ The following provider development overrides are set in the CLI configuration: +│ - couchbasecloud/couchbase-capella in /Users/$USER/workspace/code/Lagher0/terraform-provider-couchbase-capella/bin +│ +│ The behavior may therefore not match any released version of the provider and applying changes may cause the state to become incompatible with published releases. +╵ +data.couchbase-capella_samplebuckets.existing_samplebuckets: Reading... +data.couchbase-capella_samplebuckets.existing_samplebuckets: Read complete after 0s + +Terraform used the selected providers to generate the following execution plan. Resource actions are indicated with the following symbols: + + create + +Terraform will perform the following actions: + + # couchbase-capella_samplebucket.new_samplebucket will be created + + resource "couchbase-capella_samplebucket" "new_samplebucket" { + + bucket_conflict_resolution = "seqno" + + cluster_id = "17619f3c-08f5-40a3-8c0c-d2e5b263a025" + + durability_level = "none" + + eviction_policy = (known after apply) + + flush = false + + id = (known after apply) + + memory_allocation_in_mb = 200 + + name = "gamesim-sample" + + organization_id = "6af08c0a-8cab-4c1c-b257-b521575c16d0" + + project_id = "c1fade1a-9f27-4a3c-af73-d1b2301890e3" + + replicas = 1 + + stats = (known after apply) + + storage_backend = (known after apply) + + time_to_live_in_seconds = 0 + + type = "couchbase" + } + +Plan: 1 to add, 0 to change, 0 to destroy. + +Changes to Outputs: + + new_samplebucket = { + + bucket_conflict_resolution = "seqno" + + cluster_id = "17619f3c-08f5-40a3-8c0c-d2e5b263a025" + + durability_level = "none" + + eviction_policy = (known after apply) + + flush = false + + id = (known after apply) + + memory_allocation_in_mb = 200 + + name = "gamesim-sample" + + organization_id = "6af08c0a-8cab-4c1c-b257-b521575c16d0" + + project_id = "c1fade1a-9f27-4a3c-af73-d1b2301890e3" + + replicas = 1 + + stats = (known after apply) + + storage_backend = (known after apply) + + time_to_live_in_seconds = 0 + + type = "couchbase" + } + + samplebucket_id = (known after apply) + + samplebuckets_list = { + + cluster_id = "17619f3c-08f5-40a3-8c0c-d2e5b263a025" + + data = null + + organization_id = "6af08c0a-8cab-4c1c-b257-b521575c16d0" + + project_id = "c1fade1a-9f27-4a3c-af73-d1b2301890e3" + } + +─────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────── + +Note: You didn't use the -out option to save this plan, so Terraform can't guarantee to take exactly these actions if you run "terraform apply" now. +``` + + +### Apply the Plan, in order to create a new sample Bucket + +Command: `terraform apply` + +``` +terraform apply +╷ +│ Warning: Provider development overrides are in effect +│ +│ The following provider development overrides are set in the CLI configuration: +│ - couchbasecloud/couchbase-capella in /Users/$USER/workspace/code/Lagher0/terraform-provider-couchbase-capella/bin +│ +│ The behavior may therefore not match any released version of the provider and applying changes may cause the state to become incompatible with published releases. +╵ +data.couchbase-capella_samplebuckets.existing_samplebuckets: Reading... +data.couchbase-capella_samplebuckets.existing_samplebuckets: Read complete after 1s + +Terraform used the selected providers to generate the following execution plan. Resource actions are indicated with the following symbols: + + create + +Terraform will perform the following actions: + + # couchbase-capella_samplebucket.new_samplebucket will be created + + resource "couchbase-capella_samplebucket" "new_samplebucket" { + + bucket_conflict_resolution = "seqno" + + cluster_id = "17619f3c-08f5-40a3-8c0c-d2e5b263a025" + + durability_level = "none" + + eviction_policy = (known after apply) + + flush = false + + id = (known after apply) + + memory_allocation_in_mb = 200 + + name = "gamesim-sample" + + organization_id = "6af08c0a-8cab-4c1c-b257-b521575c16d0" + + project_id = "c1fade1a-9f27-4a3c-af73-d1b2301890e3" + + replicas = 1 + + stats = (known after apply) + + storage_backend = (known after apply) + + time_to_live_in_seconds = 0 + + type = "couchbase" + } + +Plan: 1 to add, 0 to change, 0 to destroy. + +Changes to Outputs: + + new_samplebucket = { + + bucket_conflict_resolution = "seqno" + + cluster_id = "17619f3c-08f5-40a3-8c0c-d2e5b263a025" + + durability_level = "none" + + eviction_policy = (known after apply) + + flush = false + + id = (known after apply) + + memory_allocation_in_mb = 200 + + name = "gamesim-sample" + + organization_id = "6af08c0a-8cab-4c1c-b257-b521575c16d0" + + project_id = "c1fade1a-9f27-4a3c-af73-d1b2301890e3" + + replicas = 1 + + stats = (known after apply) + + storage_backend = (known after apply) + + time_to_live_in_seconds = 0 + + type = "couchbase" + } + + samplebucket_id = (known after apply) + + samplebuckets_list = { + + cluster_id = "17619f3c-08f5-40a3-8c0c-d2e5b263a025" + + data = null + + organization_id = "6af08c0a-8cab-4c1c-b257-b521575c16d0" + + project_id = "c1fade1a-9f27-4a3c-af73-d1b2301890e3" + } + +Do you want to perform these actions? + Terraform will perform the actions described above. + Only 'yes' will be accepted to approve. + + Enter a value: yes + +couchbase-capella_samplebucket.new_samplebucket: Creating... +couchbase-capella_samplebucket.new_samplebucket: Creation complete after 1s [id=Z2FtZXNpbS1zYW1wbGU=] + +Apply complete! Resources: 1 added, 0 changed, 0 destroyed. + +Outputs: + +new_samplebucket = { + "bucket_conflict_resolution" = "seqno" + "cluster_id" = "17619f3c-08f5-40a3-8c0c-d2e5b263a025" + "durability_level" = "none" + "eviction_policy" = "fullEviction" + "flush" = false + "id" = "Z2FtZXNpbS1zYW1wbGU=" + "memory_allocation_in_mb" = 200 + "name" = "gamesim-sample" + "organization_id" = "6af08c0a-8cab-4c1c-b257-b521575c16d0" + "project_id" = "c1fade1a-9f27-4a3c-af73-d1b2301890e3" + "replicas" = 1 + "stats" = { + "disk_used_in_mib" = 0 + "item_count" = 0 + "memory_used_in_mib" = 0 + "ops_per_second" = 0 + } + "storage_backend" = "couchstore" + "time_to_live_in_seconds" = 0 + "type" = "couchbase" +} +samplebucket_id = "Z2FtZXNpbS1zYW1wbGU=" +samplebuckets_list = { + "cluster_id" = "17619f3c-08f5-40a3-8c0c-d2e5b263a025" + "data" = tolist(null) /* of object */ + "organization_id" = "6af08c0a-8cab-4c1c-b257-b521575c16d0" + "project_id" = "c1fade1a-9f27-4a3c-af73-d1b2301890e3" +} +``` + + + +### Note the Bucket ID for the new sample Bucket +Command: `terraform output new_bucket` + +Sample Output: +``` +terraform output new_samplebucket +{ + "bucket_conflict_resolution" = "seqno" + "cluster_id" = "17619f3c-08f5-40a3-8c0c-d2e5b263a025" + "durability_level" = "none" + "eviction_policy" = "fullEviction" + "flush" = false + "id" = "Z2FtZXNpbS1zYW1wbGU=" + "memory_allocation_in_mb" = 200 + "name" = "gamesim-sample" + "organization_id" = "6af08c0a-8cab-4c1c-b257-b521575c16d0" + "project_id" = "c1fade1a-9f27-4a3c-af73-d1b2301890e3" + "replicas" = 1 + "stats" = { + "disk_used_in_mib" = 0 + "item_count" = 0 + "memory_used_in_mib" = 0 + "ops_per_second" = 0 + } + "storage_backend" = "couchstore" + "time_to_live_in_seconds" = 0 + "type" = "couchbase" +} +``` + + +### List the resources that are present in the Terraform State file. + +Command: `terraform state list` + +Sample Output: +``` + terraform state list +data.couchbase-capella_samplebuckets.existing_samplebuckets +couchbase-capella_samplebucket.new_samplebucket +``` + + +## IMPORT +### Remove the resource `new_samplebucket` from the Terraform State file + +Command: `terraform state rm couchbase-capella_samplebucket.new_samplebucket` + +Sample Output: + +``` +terraform state rm couchbase-capella_samplebucket.new_samplebucket +Removed couchbase-capella_samplebucket.new_samplebucket +Successfully removed 1 resource instance(s). +``` + + +### Now, let's import the resource in Terraform + +Command: `terraform import couchbase-capella_samplebucket.new_samplebucket id=,cluster_id=,project_id=,organization_id=` + +In this case, the complete command is: +`terraform import couchbase-capella_samplebucket.new_samplebucket id=Z2FtZXNpbS1zYW1wbGU=,organization_id=6af08c0a-8cab-4c1c-b257-b521575c16d0,project_id=c1fade1a-9f27-4a3c-af73-d1b2301890e3,cluster_id=17619f3c-08f5-40a3-8c0c-d2e5b263a025` + +``` +terraform import couchbase-capella_samplebucket.new_samplebucket id=Z2FtZXNpbS1zYW1wbGU=,organization_id=6af08c0a-8cab-4c1c-b257-b521575c16d0,project_id=c1fade1a-9f27-4a3c-af73-d1b2301890e3,cluster_id=17619f3c-08f5-40a3-8c0c-d2e5b263a025 +couchbase-capella_samplebucket.new_samplebucket: Importing from ID "id=Z2FtZXNpbS1zYW1wbGU=,organization_id=6af08c0a-8cab-4c1c-b257-b521575c16d0,project_id=c1fade1a-9f27-4a3c-af73-d1b2301890e3,cluster_id=17619f3c-08f5-40a3-8c0c-d2e5b263a025"... +couchbase-capella_samplebucket.new_samplebucket: Import prepared! + Prepared couchbase-capella_samplebucket for import +data.couchbase-capella_samplebuckets.existing_samplebuckets: Reading... +couchbase-capella_samplebucket.new_samplebucket: Refreshing state... [id=id=Z2FtZXNpbS1zYW1wbGU=,organization_id=6af08c0a-8cab-4c1c-b257-b521575c16d0,project_id=c1fade1a-9f27-4a3c-af73-d1b2301890e3,cluster_id=17619f3c-08f5-40a3-8c0c-d2e5b263a025] +data.couchbase-capella_samplebuckets.existing_samplebuckets: Read complete after 2s + +Import successful! + +The resources that were imported are shown above. These resources are now in +your Terraform state and will henceforth be managed by Terraform. +``` + + +Here, we pass the IDs as a single comma-separated string. +The first ID in the string is the sample bucket ID i.e. the ID of the resource that we want to import. +The second ID is the cluster ID i.e. the ID of the cluster to which the sample bucket belongs. +The third ID is the project ID i.e. the ID of the project to which the cluster belongs. +The fourth ID is the organization ID i.e. the ID of the organization to which the project belongs. + +### Let's run a terraform plan to confirm that the import was successful and no resource states were impacted + +Command: `terraform plan` + +Sample Output: + +``` +terraform plan +╷ +│ Warning: Provider development overrides are in effect +│ +│ The following provider development overrides are set in the CLI configuration: +│ - couchbasecloud/couchbase-capella in /Users/laurasilaja/code/Lagher0/terraform-provider-couchbase-capella/bin +│ +│ The behavior may therefore not match any released version of the provider and applying changes may cause the state to become incompatible with published releases. +╵ +data.couchbase-capella_samplebuckets.existing_samplebuckets: Reading... +couchbase-capella_samplebucket.new_samplebucket: Refreshing state... [id=Z2FtZXNpbS1zYW1wbGU=] +data.couchbase-capella_samplebuckets.existing_samplebuckets: Read complete after 1s + +No changes. Your infrastructure matches the configuration. + +Terraform has compared your real infrastructure against your configuration and found no differences, so no changes are needed. +``` + +## UPDATE +### Let us edit the terraform.tfvars file to change the bucket configuration settings. + +Sample buckets does not support update functionality. To update the terraform state it recreates the +sample bucket with the given changes + +Command: `terraform apply -var 'samplebucket={name="travel-sample"}'` + +Sample Output: + +``` + + terraform apply -var 'samplebucket={name="travel-sample"}' +╷ +│ Warning: Provider development overrides are in effect +│ +│ The following provider development overrides are set in the CLI configuration: +│ - couchbasecloud/couchbase-capella in /Users/laurasilaja/code/Lagher0/terraform-provider-couchbase-capella/bin +│ +│ The behavior may therefore not match any released version of the provider and applying changes may cause the state to become incompatible with published releases. +╵ +data.couchbase-capella_samplebuckets.existing_samplebuckets: Reading... +couchbase-capella_samplebucket.new_samplebucket: Refreshing state... [id=Z2FtZXNpbS1zYW1wbGU=] +data.couchbase-capella_samplebuckets.existing_samplebuckets: Read complete after 1s + +Note: Objects have changed outside of Terraform + +Terraform detected the following changes made outside of Terraform since the last "terraform apply" which may have affected this plan: + + # couchbase-capella_samplebucket.new_samplebucket has changed + ~ resource "couchbase-capella_samplebucket" "new_samplebucket" { + id = "Z2FtZXNpbS1zYW1wbGU=" + name = "gamesim-sample" + ~ stats = { + ~ item_count = 196 -> 390 + ~ memory_used_in_mib = 20 -> 42 + # (2 unchanged attributes hidden) + } + # (12 unchanged attributes hidden) + } + + +Unless you have made equivalent changes to your configuration, or ignored the relevant attributes using ignore_changes, the following plan may include actions to undo or respond to these changes. + +────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────── + +Terraform used the selected providers to generate the following execution plan. Resource actions are indicated with the following symbols: +-/+ destroy and then create replacement + +Terraform will perform the following actions: + + # couchbase-capella_samplebucket.new_samplebucket must be replaced +-/+ resource "couchbase-capella_samplebucket" "new_samplebucket" { + ~ eviction_policy = "fullEviction" -> (known after apply) + ~ id = "Z2FtZXNpbS1zYW1wbGU=" -> (known after apply) + ~ name = "gamesim-sample" -> "travel-sample" # forces replacement + ~ stats = { + ~ disk_used_in_mib = 0 -> (known after apply) + ~ item_count = 390 -> (known after apply) + ~ memory_used_in_mib = 42 -> (known after apply) + ~ ops_per_second = 0 -> (known after apply) + } -> (known after apply) + ~ storage_backend = "couchstore" -> (known after apply) + # (10 unchanged attributes hidden) + } + +Plan: 1 to add, 0 to change, 1 to destroy. + +Changes to Outputs: + ~ new_samplebucket = { + ~ eviction_policy = "fullEviction" -> (known after apply) + ~ id = "Z2FtZXNpbS1zYW1wbGU=" -> (known after apply) + ~ name = "gamesim-sample" -> "travel-sample" + ~ stats = { + - disk_used_in_mib = 0 + - item_count = 196 + - memory_used_in_mib = 20 + - ops_per_second = 0 + } -> (known after apply) + ~ storage_backend = "couchstore" -> (known after apply) + # (10 unchanged attributes hidden) + } + ~ samplebucket_id = "Z2FtZXNpbS1zYW1wbGU=" -> (known after apply) + ~ samplebuckets_list = { + ~ data = [ + ~ { + ~ id = "dHJhdmVsLXNhbXBsZQ==" -> "Z2FtZXNpbS1zYW1wbGU=" + ~ name = "travel-sample" -> "gamesim-sample" + ~ stats = { + ~ disk_used_in_mib = 15 -> 0 + ~ item_count = 163 -> 390 + ~ memory_used_in_mib = 72 -> 42 + # (1 unchanged attribute hidden) + } + # (12 unchanged attributes hidden) + }, + ] + # (3 unchanged attributes hidden) + } + +Do you want to perform these actions? + Terraform will perform the actions described above. + Only 'yes' will be accepted to approve. + + Enter a value: yes + +couchbase-capella_samplebucket.new_samplebucket: Destroying... [id=Z2FtZXNpbS1zYW1wbGU=] +couchbase-capella_samplebucket.new_samplebucket: Destruction complete after 1s +couchbase-capella_samplebucket.new_samplebucket: Creating... +couchbase-capella_samplebucket.new_samplebucket: Creation complete after 0s [id=dHJhdmVsLXNhbXBsZQ==] + +Apply complete! Resources: 1 added, 0 changed, 1 destroyed. + +Outputs: + +new_samplebucket = { + "bucket_conflict_resolution" = "seqno" + "cluster_id" = "17619f3c-08f5-40a3-8c0c-d2e5b263a025" + "durability_level" = "none" + "eviction_policy" = "fullEviction" + "flush" = false + "id" = "dHJhdmVsLXNhbXBsZQ==" + "memory_allocation_in_mb" = 200 + "name" = "travel-sample" + "organization_id" = "6af08c0a-8cab-4c1c-b257-b521575c16d0" + "project_id" = "c1fade1a-9f27-4a3c-af73-d1b2301890e3" + "replicas" = 1 + "stats" = { + "disk_used_in_mib" = 0 + "item_count" = 163 + "memory_used_in_mib" = 33 + "ops_per_second" = 0 + } + "storage_backend" = "couchstore" + "time_to_live_in_seconds" = 0 + "type" = "couchbase" +} +samplebucket_id = "dHJhdmVsLXNhbXBsZQ==" +samplebuckets_list = { + "cluster_id" = "17619f3c-08f5-40a3-8c0c-d2e5b263a025" + "data" = tolist([ + { + "bucket_conflict_resolution" = "seqno" + "cluster_id" = "17619f3c-08f5-40a3-8c0c-d2e5b263a025" + "durability_level" = "none" + "eviction_policy" = "fullEviction" + "flush" = false + "id" = "Z2FtZXNpbS1zYW1wbGU=" + "memory_allocation_in_mb" = 200 + "name" = "gamesim-sample" + "organization_id" = "6af08c0a-8cab-4c1c-b257-b521575c16d0" + "project_id" = "c1fade1a-9f27-4a3c-af73-d1b2301890e3" + "replicas" = 1 + "stats" = { + "disk_used_in_mib" = 0 + "item_count" = 390 + "memory_used_in_mib" = 42 + "ops_per_second" = 0 + } + "storage_backend" = "couchstore" + "time_to_live_in_seconds" = 0 + "type" = "couchbase" + }, + ]) + "organization_id" = "6af08c0a-8cab-4c1c-b257-b521575c16d0" + "project_id" = "c1fade1a-9f27-4a3c-af73-d1b2301890e3" +} +``` + +# DESTROY +### Finally, destroy the resources created by Terraform + +Command: `terraform destroy` + +Sample Output: + +``` +➜ sample_bucket git:(AV-70846_add_import_sample_data_set_apis) ✗ terraform destroy +╷ +│ Warning: Provider development overrides are in effect +│ +│ The following provider development overrides are set in the CLI configuration: +│ - couchbasecloud/couchbase-capella in /Users/laurasilaja/code/Lagher0/terraform-provider-couchbase-capella/bin +│ +│ The behavior may therefore not match any released version of the provider and applying changes may cause the state to become incompatible with published releases. +╵ +data.couchbase-capella_samplebuckets.existing_samplebuckets: Reading... +couchbase-capella_samplebucket.new_samplebucket: Refreshing state... [id=dHJhdmVsLXNhbXBsZQ==] +data.couchbase-capella_samplebuckets.existing_samplebuckets: Read complete after 1s + +Terraform used the selected providers to generate the following execution plan. Resource actions are indicated with the following symbols: + - destroy + +Terraform will perform the following actions: + + # couchbase-capella_samplebucket.new_samplebucket will be destroyed + - resource "couchbase-capella_samplebucket" "new_samplebucket" { + - bucket_conflict_resolution = "seqno" -> null + - cluster_id = "17619f3c-08f5-40a3-8c0c-d2e5b263a025" -> null + - durability_level = "none" -> null + - eviction_policy = "fullEviction" -> null + - flush = false -> null + - id = "dHJhdmVsLXNhbXBsZQ==" -> null + - memory_allocation_in_mb = 200 -> null + - name = "travel-sample" -> null + - organization_id = "6af08c0a-8cab-4c1c-b257-b521575c16d0" -> null + - project_id = "c1fade1a-9f27-4a3c-af73-d1b2301890e3" -> null + - replicas = 1 -> null + - stats = { + - disk_used_in_mib = 120 -> null + - item_count = 63288 -> null + - memory_used_in_mib = 165 -> null + - ops_per_second = 0 -> null + } -> null + - storage_backend = "couchstore" -> null + - time_to_live_in_seconds = 0 -> null + - type = "couchbase" -> null + } + +Plan: 0 to add, 0 to change, 1 to destroy. + +Changes to Outputs: + - new_samplebucket = { + - bucket_conflict_resolution = "seqno" + - cluster_id = "17619f3c-08f5-40a3-8c0c-d2e5b263a025" + - durability_level = "none" + - eviction_policy = "fullEviction" + - flush = false + - id = "dHJhdmVsLXNhbXBsZQ==" + - memory_allocation_in_mb = 200 + - name = "travel-sample" + - organization_id = "6af08c0a-8cab-4c1c-b257-b521575c16d0" + - project_id = "c1fade1a-9f27-4a3c-af73-d1b2301890e3" + - replicas = 1 + - stats = { + - disk_used_in_mib = 0 + - item_count = 163 + - memory_used_in_mib = 33 + - ops_per_second = 0 + } + - storage_backend = "couchstore" + - time_to_live_in_seconds = 0 + - type = "couchbase" + } -> null + - samplebucket_id = "dHJhdmVsLXNhbXBsZQ==" -> null + - samplebuckets_list = { + - cluster_id = "17619f3c-08f5-40a3-8c0c-d2e5b263a025" + - data = [ + - { + - bucket_conflict_resolution = "seqno" + - cluster_id = "17619f3c-08f5-40a3-8c0c-d2e5b263a025" + - durability_level = "none" + - eviction_policy = "fullEviction" + - flush = false + - id = "dHJhdmVsLXNhbXBsZQ==" + - memory_allocation_in_mb = 200 + - name = "travel-sample" + - organization_id = "6af08c0a-8cab-4c1c-b257-b521575c16d0" + - project_id = "c1fade1a-9f27-4a3c-af73-d1b2301890e3" + - replicas = 1 + - stats = { + - disk_used_in_mib = 120 + - item_count = 63288 + - memory_used_in_mib = 165 + - ops_per_second = 0 + } + - storage_backend = "couchstore" + - time_to_live_in_seconds = 0 + - type = "couchbase" + }, + ] + - organization_id = "6af08c0a-8cab-4c1c-b257-b521575c16d0" + - project_id = "c1fade1a-9f27-4a3c-af73-d1b2301890e3" + } -> null + +Do you really want to destroy all resources? + Terraform will destroy all your managed infrastructure, as shown above. + There is no undo. Only 'yes' will be accepted to confirm. + + Enter a value: yes + +couchbase-capella_samplebucket.new_samplebucket: Destroying... [id=dHJhdmVsLXNhbXBsZQ==] +couchbase-capella_samplebucket.new_samplebucket: Destruction complete after 2s + +Destroy complete! Resources: 1 destroyed. +``` \ No newline at end of file From 292fb8855e3cfc7ccc83ae9c6162126b976ef295 Mon Sep 17 00:00:00 2001 From: Laura Silaja Date: Fri, 1 Mar 2024 16:12:57 +0000 Subject: [PATCH 07/15] Generalised the path --- examples/sample_bucket/README.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/examples/sample_bucket/README.md b/examples/sample_bucket/README.md index 63f3ba8f..75516ffc 100644 --- a/examples/sample_bucket/README.md +++ b/examples/sample_bucket/README.md @@ -310,7 +310,7 @@ terraform plan │ Warning: Provider development overrides are in effect │ │ The following provider development overrides are set in the CLI configuration: -│ - couchbasecloud/couchbase-capella in /Users/laurasilaja/code/Lagher0/terraform-provider-couchbase-capella/bin +│ - couchbasecloud/couchbase-capella in $HOME/terraform-provider-couchbase-capella/bin │ │ The behavior may therefore not match any released version of the provider and applying changes may cause the state to become incompatible with published releases. ╵ @@ -340,7 +340,7 @@ Sample Output: │ Warning: Provider development overrides are in effect │ │ The following provider development overrides are set in the CLI configuration: -│ - couchbasecloud/couchbase-capella in /Users/laurasilaja/code/Lagher0/terraform-provider-couchbase-capella/bin +│ - couchbasecloud/couchbase-capella in $HOME/terraform-provider-couchbase-capella/bin │ │ The behavior may therefore not match any released version of the provider and applying changes may cause the state to become incompatible with published releases. ╵ @@ -505,7 +505,7 @@ Sample Output: │ Warning: Provider development overrides are in effect │ │ The following provider development overrides are set in the CLI configuration: -│ - couchbasecloud/couchbase-capella in /Users/laurasilaja/code/Lagher0/terraform-provider-couchbase-capella/bin +│ - couchbasecloud/couchbase-capella in $HOME/terraform-provider-couchbase-capella/bin │ │ The behavior may therefore not match any released version of the provider and applying changes may cause the state to become incompatible with published releases. ╵ From c9e360808943930f963448f942cfbbf141a96575 Mon Sep 17 00:00:00 2001 From: Laura Silaja Date: Tue, 5 Mar 2024 11:29:19 +0000 Subject: [PATCH 08/15] Added a separate schema for sample buckets and addressed various review comments --- examples/sample_bucket/README.md | 108 +++++++++--------- .../sample_bucket/create_sample_bucket.tf | 8 +- examples/sample_bucket/list_sample_buckets.tf | 4 +- .../sample_bucket/terraform.template.tfvars | 2 +- 4 files changed, 61 insertions(+), 61 deletions(-) diff --git a/examples/sample_bucket/README.md b/examples/sample_bucket/README.md index 75516ffc..5dd8a03e 100644 --- a/examples/sample_bucket/README.md +++ b/examples/sample_bucket/README.md @@ -36,16 +36,16 @@ Terraform plan │ │ The behavior may therefore not match any released version of the provider and applying changes may cause the state to become incompatible with published releases. ╵ -data.couchbase-capella_samplebuckets.existing_samplebuckets: Reading... -data.couchbase-capella_samplebuckets.existing_samplebuckets: Read complete after 0s +data.couchbase-capella_sample_buckets.existing_sample_buckets: Reading... +data.couchbase-capella_sample_buckets.existing_sample_buckets: Read complete after 0s Terraform used the selected providers to generate the following execution plan. Resource actions are indicated with the following symbols: + create Terraform will perform the following actions: - # couchbase-capella_samplebucket.new_samplebucket will be created - + resource "couchbase-capella_samplebucket" "new_samplebucket" { + # couchbase-capella_sample_bucket.new_sample_bucket will be created + + resource "couchbase-capella_sample_bucket" "new_sample_bucket" { + bucket_conflict_resolution = "seqno" + cluster_id = "17619f3c-08f5-40a3-8c0c-d2e5b263a025" + durability_level = "none" @@ -66,7 +66,7 @@ Terraform will perform the following actions: Plan: 1 to add, 0 to change, 0 to destroy. Changes to Outputs: - + new_samplebucket = { + + new_sample_bucket = { + bucket_conflict_resolution = "seqno" + cluster_id = "17619f3c-08f5-40a3-8c0c-d2e5b263a025" + durability_level = "none" @@ -111,16 +111,16 @@ terraform apply │ │ The behavior may therefore not match any released version of the provider and applying changes may cause the state to become incompatible with published releases. ╵ -data.couchbase-capella_samplebuckets.existing_samplebuckets: Reading... -data.couchbase-capella_samplebuckets.existing_samplebuckets: Read complete after 1s +data.couchbase-capella_sample_buckets.existing_sample_buckets: Reading... +data.couchbase-capella_sample_buckets.existing_sample_buckets: Read complete after 1s Terraform used the selected providers to generate the following execution plan. Resource actions are indicated with the following symbols: + create Terraform will perform the following actions: - # couchbase-capella_samplebucket.new_samplebucket will be created - + resource "couchbase-capella_samplebucket" "new_samplebucket" { + # couchbase-capella_sample_bucket.new_sample_bucket will be created + + resource "couchbase-capella_sample_bucket" "new_sample_bucket" { + bucket_conflict_resolution = "seqno" + cluster_id = "17619f3c-08f5-40a3-8c0c-d2e5b263a025" + durability_level = "none" @@ -141,7 +141,7 @@ Terraform will perform the following actions: Plan: 1 to add, 0 to change, 0 to destroy. Changes to Outputs: - + new_samplebucket = { + + new_sample_bucket = { + bucket_conflict_resolution = "seqno" + cluster_id = "17619f3c-08f5-40a3-8c0c-d2e5b263a025" + durability_level = "none" @@ -172,14 +172,14 @@ Do you want to perform these actions? Enter a value: yes -couchbase-capella_samplebucket.new_samplebucket: Creating... -couchbase-capella_samplebucket.new_samplebucket: Creation complete after 1s [id=Z2FtZXNpbS1zYW1wbGU=] +couchbase-capella_sample_bucket.new_sample_bucket: Creating... +couchbase-capella_sample_bucket.new_sample_bucket: Creation complete after 1s [id=Z2FtZXNpbS1zYW1wbGU=] Apply complete! Resources: 1 added, 0 changed, 0 destroyed. Outputs: -new_samplebucket = { +new_sample_bucket = { "bucket_conflict_resolution" = "seqno" "cluster_id" = "17619f3c-08f5-40a3-8c0c-d2e5b263a025" "durability_level" = "none" @@ -213,11 +213,11 @@ samplebuckets_list = { ### Note the Bucket ID for the new sample Bucket -Command: `terraform output new_bucket` +Command: `terraform output new_sample_bucket` Sample Output: ``` -terraform output new_samplebucket +terraform output new_sample_bucket { "bucket_conflict_resolution" = "seqno" "cluster_id" = "17619f3c-08f5-40a3-8c0c-d2e5b263a025" @@ -250,40 +250,40 @@ Command: `terraform state list` Sample Output: ``` terraform state list -data.couchbase-capella_samplebuckets.existing_samplebuckets -couchbase-capella_samplebucket.new_samplebucket +data.couchbase-capella_sample_buckets.existing_sample_buckets +couchbase-capella_sample_bucket.new_sample_bucket ``` ## IMPORT -### Remove the resource `new_samplebucket` from the Terraform State file +### Remove the resource `new_sample_bucket` from the Terraform State file -Command: `terraform state rm couchbase-capella_samplebucket.new_samplebucket` +Command: `terraform state rm couchbase-capella_sample_bucket.new_sample_bucket` Sample Output: ``` -terraform state rm couchbase-capella_samplebucket.new_samplebucket -Removed couchbase-capella_samplebucket.new_samplebucket +terraform state rm couchbase-capella_sample_bucket.new_sample_bucket +Removed couchbase-capella_sample_bucket.new_sample_bucket Successfully removed 1 resource instance(s). ``` ### Now, let's import the resource in Terraform -Command: `terraform import couchbase-capella_samplebucket.new_samplebucket id=,cluster_id=,project_id=,organization_id=` +Command: `terraform import couchbase-capella_sample_bucket.new_sample_bucket id=,cluster_id=,project_id=,organization_id=` In this case, the complete command is: -`terraform import couchbase-capella_samplebucket.new_samplebucket id=Z2FtZXNpbS1zYW1wbGU=,organization_id=6af08c0a-8cab-4c1c-b257-b521575c16d0,project_id=c1fade1a-9f27-4a3c-af73-d1b2301890e3,cluster_id=17619f3c-08f5-40a3-8c0c-d2e5b263a025` +`terraform import couchbase-capella_sample_bucket.new_sample_bucket id=Z2FtZXNpbS1zYW1wbGU=,organization_id=6af08c0a-8cab-4c1c-b257-b521575c16d0,project_id=c1fade1a-9f27-4a3c-af73-d1b2301890e3,cluster_id=17619f3c-08f5-40a3-8c0c-d2e5b263a025` ``` -terraform import couchbase-capella_samplebucket.new_samplebucket id=Z2FtZXNpbS1zYW1wbGU=,organization_id=6af08c0a-8cab-4c1c-b257-b521575c16d0,project_id=c1fade1a-9f27-4a3c-af73-d1b2301890e3,cluster_id=17619f3c-08f5-40a3-8c0c-d2e5b263a025 -couchbase-capella_samplebucket.new_samplebucket: Importing from ID "id=Z2FtZXNpbS1zYW1wbGU=,organization_id=6af08c0a-8cab-4c1c-b257-b521575c16d0,project_id=c1fade1a-9f27-4a3c-af73-d1b2301890e3,cluster_id=17619f3c-08f5-40a3-8c0c-d2e5b263a025"... -couchbase-capella_samplebucket.new_samplebucket: Import prepared! - Prepared couchbase-capella_samplebucket for import -data.couchbase-capella_samplebuckets.existing_samplebuckets: Reading... -couchbase-capella_samplebucket.new_samplebucket: Refreshing state... [id=id=Z2FtZXNpbS1zYW1wbGU=,organization_id=6af08c0a-8cab-4c1c-b257-b521575c16d0,project_id=c1fade1a-9f27-4a3c-af73-d1b2301890e3,cluster_id=17619f3c-08f5-40a3-8c0c-d2e5b263a025] -data.couchbase-capella_samplebuckets.existing_samplebuckets: Read complete after 2s +terraform import couchbase-capella_sample_bucket.new_sample_bucket id=Z2FtZXNpbS1zYW1wbGU=,organization_id=6af08c0a-8cab-4c1c-b257-b521575c16d0,project_id=c1fade1a-9f27-4a3c-af73-d1b2301890e3,cluster_id=17619f3c-08f5-40a3-8c0c-d2e5b263a025 +couchbase-capella_sample_bucket.new_sample_bucket: Importing from ID "id=Z2FtZXNpbS1zYW1wbGU=,organization_id=6af08c0a-8cab-4c1c-b257-b521575c16d0,project_id=c1fade1a-9f27-4a3c-af73-d1b2301890e3,cluster_id=17619f3c-08f5-40a3-8c0c-d2e5b263a025"... +couchbase-capella_sample_bucket.new_sample_bucket: Import prepared! + Prepared couchbase-capella_sample_bucket for import +data.couchbase-capella_sample_buckets.existing_sample_buckets: Reading... +couchbase-capella_sample_bucket.new_sample_bucket: Refreshing state... [id=id=Z2FtZXNpbS1zYW1wbGU=,organization_id=6af08c0a-8cab-4c1c-b257-b521575c16d0,project_id=c1fade1a-9f27-4a3c-af73-d1b2301890e3,cluster_id=17619f3c-08f5-40a3-8c0c-d2e5b263a025] +data.couchbase-capella_sample_buckets.existing_sample_buckets: Read complete after 2s Import successful! @@ -314,9 +314,9 @@ terraform plan │ │ The behavior may therefore not match any released version of the provider and applying changes may cause the state to become incompatible with published releases. ╵ -data.couchbase-capella_samplebuckets.existing_samplebuckets: Reading... -couchbase-capella_samplebucket.new_samplebucket: Refreshing state... [id=Z2FtZXNpbS1zYW1wbGU=] -data.couchbase-capella_samplebuckets.existing_samplebuckets: Read complete after 1s +data.couchbase-capella_sample_buckets.existing_sample_buckets: Reading... +couchbase-capella_sample_bucket.new_sample_bucket: Refreshing state... [id=Z2FtZXNpbS1zYW1wbGU=] +data.couchbase-capella_sample_buckets.existing_sample_buckets: Read complete after 1s No changes. Your infrastructure matches the configuration. @@ -344,16 +344,16 @@ Sample Output: │ │ The behavior may therefore not match any released version of the provider and applying changes may cause the state to become incompatible with published releases. ╵ -data.couchbase-capella_samplebuckets.existing_samplebuckets: Reading... -couchbase-capella_samplebucket.new_samplebucket: Refreshing state... [id=Z2FtZXNpbS1zYW1wbGU=] -data.couchbase-capella_samplebuckets.existing_samplebuckets: Read complete after 1s +data.couchbase-capella_sample_buckets.existing_sample_buckets: Reading... +couchbase-capella_sample_bucket.new_sample_bucket: Refreshing state... [id=Z2FtZXNpbS1zYW1wbGU=] +data.couchbase-capella_sample_buckets.existing_sample_buckets: Read complete after 1s Note: Objects have changed outside of Terraform Terraform detected the following changes made outside of Terraform since the last "terraform apply" which may have affected this plan: - # couchbase-capella_samplebucket.new_samplebucket has changed - ~ resource "couchbase-capella_samplebucket" "new_samplebucket" { + # couchbase-capella_sample_bucket.new_sample_bucket has changed + ~ resource "couchbase-capella_sample_bucket" "new_sample_bucket" { id = "Z2FtZXNpbS1zYW1wbGU=" name = "gamesim-sample" ~ stats = { @@ -374,8 +374,8 @@ Terraform used the selected providers to generate the following execution plan. Terraform will perform the following actions: - # couchbase-capella_samplebucket.new_samplebucket must be replaced --/+ resource "couchbase-capella_samplebucket" "new_samplebucket" { + # couchbase-capella_sample_bucket.new_sample_bucket must be replaced +-/+ resource "couchbase-capella_sample_bucket" "new_sample_bucket" { ~ eviction_policy = "fullEviction" -> (known after apply) ~ id = "Z2FtZXNpbS1zYW1wbGU=" -> (known after apply) ~ name = "gamesim-sample" -> "travel-sample" # forces replacement @@ -392,7 +392,7 @@ Terraform will perform the following actions: Plan: 1 to add, 0 to change, 1 to destroy. Changes to Outputs: - ~ new_samplebucket = { + ~ new_sample_bucket = { ~ eviction_policy = "fullEviction" -> (known after apply) ~ id = "Z2FtZXNpbS1zYW1wbGU=" -> (known after apply) ~ name = "gamesim-sample" -> "travel-sample" @@ -429,16 +429,16 @@ Do you want to perform these actions? Enter a value: yes -couchbase-capella_samplebucket.new_samplebucket: Destroying... [id=Z2FtZXNpbS1zYW1wbGU=] -couchbase-capella_samplebucket.new_samplebucket: Destruction complete after 1s -couchbase-capella_samplebucket.new_samplebucket: Creating... -couchbase-capella_samplebucket.new_samplebucket: Creation complete after 0s [id=dHJhdmVsLXNhbXBsZQ==] +couchbase-capella_sample_bucket.new_sample_bucket: Destroying... [id=Z2FtZXNpbS1zYW1wbGU=] +couchbase-capella_sample_bucket.new_sample_bucket: Destruction complete after 1s +couchbase-capella_sample_bucket.new_sample_bucket: Creating... +couchbase-capella_sample_bucket.new_sample_bucket: Creation complete after 0s [id=dHJhdmVsLXNhbXBsZQ==] Apply complete! Resources: 1 added, 0 changed, 1 destroyed. Outputs: -new_samplebucket = { +new_sample_bucket = { "bucket_conflict_resolution" = "seqno" "cluster_id" = "17619f3c-08f5-40a3-8c0c-d2e5b263a025" "durability_level" = "none" @@ -509,17 +509,17 @@ Sample Output: │ │ The behavior may therefore not match any released version of the provider and applying changes may cause the state to become incompatible with published releases. ╵ -data.couchbase-capella_samplebuckets.existing_samplebuckets: Reading... -couchbase-capella_samplebucket.new_samplebucket: Refreshing state... [id=dHJhdmVsLXNhbXBsZQ==] -data.couchbase-capella_samplebuckets.existing_samplebuckets: Read complete after 1s +data.couchbase-capella_sample_buckets.existing_sample_buckets: Reading... +couchbase-capella_sample_bucket.new_sample_bucket: Refreshing state... [id=dHJhdmVsLXNhbXBsZQ==] +data.couchbase-capella_sample_buckets.existing_sample_buckets: Read complete after 1s Terraform used the selected providers to generate the following execution plan. Resource actions are indicated with the following symbols: - destroy Terraform will perform the following actions: - # couchbase-capella_samplebucket.new_samplebucket will be destroyed - - resource "couchbase-capella_samplebucket" "new_samplebucket" { + # couchbase-capella_sample_bucket.new_sample_bucket will be destroyed + - resource "couchbase-capella_sample_bucket" "new_sample_bucket" { - bucket_conflict_resolution = "seqno" -> null - cluster_id = "17619f3c-08f5-40a3-8c0c-d2e5b263a025" -> null - durability_level = "none" -> null @@ -545,7 +545,7 @@ Terraform will perform the following actions: Plan: 0 to add, 0 to change, 1 to destroy. Changes to Outputs: - - new_samplebucket = { + - new_sample_bucket = { - bucket_conflict_resolution = "seqno" - cluster_id = "17619f3c-08f5-40a3-8c0c-d2e5b263a025" - durability_level = "none" @@ -604,8 +604,8 @@ Do you really want to destroy all resources? Enter a value: yes -couchbase-capella_samplebucket.new_samplebucket: Destroying... [id=dHJhdmVsLXNhbXBsZQ==] -couchbase-capella_samplebucket.new_samplebucket: Destruction complete after 2s +couchbase-capella_sample_bucket.new_sample_bucket: Destroying... [id=dHJhdmVsLXNhbXBsZQ==] +couchbase-capella_sample_bucket.new_sample_bucket: Destruction complete after 2s Destroy complete! Resources: 1 destroyed. ``` \ No newline at end of file diff --git a/examples/sample_bucket/create_sample_bucket.tf b/examples/sample_bucket/create_sample_bucket.tf index d392a70d..b58abeb0 100644 --- a/examples/sample_bucket/create_sample_bucket.tf +++ b/examples/sample_bucket/create_sample_bucket.tf @@ -1,12 +1,12 @@ -output "new_samplebucket" { - value = couchbase-capella_samplebucket.new_samplebucket +output "new_sample_bucket" { + value = couchbase-capella_sample_bucket.new_sample_bucket } output "samplebucket_id" { - value = couchbase-capella_samplebucket.new_samplebucket.id + value = couchbase-capella_sample_bucket.new_sample_bucket.id } -resource "couchbase-capella_samplebucket" "new_samplebucket" { +resource "couchbase-capella_sample_bucket" "new_sample_bucket" { name = var.samplebucket.name organization_id = var.organization_id project_id = var.project_id diff --git a/examples/sample_bucket/list_sample_buckets.tf b/examples/sample_bucket/list_sample_buckets.tf index 70214bed..c75d435d 100644 --- a/examples/sample_bucket/list_sample_buckets.tf +++ b/examples/sample_bucket/list_sample_buckets.tf @@ -1,8 +1,8 @@ output "samplebuckets_list" { - value = data.couchbase-capella_samplebuckets.existing_samplebuckets + value = data.couchbase-capella_sample_buckets.existing_sample_buckets } -data "couchbase-capella_samplebuckets" "existing_samplebuckets" { +data "couchbase-capella_sample_buckets" "existing_sample_buckets" { organization_id = var.organization_id project_id = var.project_id cluster_id = var.cluster_id diff --git a/examples/sample_bucket/terraform.template.tfvars b/examples/sample_bucket/terraform.template.tfvars index 84b7f668..8b225a0d 100644 --- a/examples/sample_bucket/terraform.template.tfvars +++ b/examples/sample_bucket/terraform.template.tfvars @@ -5,5 +5,5 @@ project_id = "" cluster_id = "" samplebucket = { - name = "new_terraform_samplebucket" + name = "new_terraform_sample_bucket" } From 26783156e5fc28401f92a7c462db7fb8f7190fda Mon Sep 17 00:00:00 2001 From: Laura Silaja Date: Tue, 5 Mar 2024 11:30:16 +0000 Subject: [PATCH 09/15] Addressed review comments --- internal/api/sample_bucket/sample_bucket.go | 2 +- internal/api/sample_bucket/stats.go | 2 +- internal/datasources/sample_buckets.go | 67 ++++---- internal/resources/sample_bucket.go | 155 ++++++++++------- internal/resources/sample_bucket_schema.go | 10 +- internal/schema/sample_bucket.go | 180 ++++++++++++++++++++ internal/schema/sample_bucket_test.go | 70 ++++++++ 7 files changed, 378 insertions(+), 108 deletions(-) create mode 100644 internal/schema/sample_bucket.go create mode 100644 internal/schema/sample_bucket_test.go diff --git a/internal/api/sample_bucket/sample_bucket.go b/internal/api/sample_bucket/sample_bucket.go index 095db154..0fac36fe 100644 --- a/internal/api/sample_bucket/sample_bucket.go +++ b/internal/api/sample_bucket/sample_bucket.go @@ -1,4 +1,4 @@ -package samplebucket +package sample_bucket // CreateSampleBucketRequest is the payload passed to V4 Capella Public API to create a bucket in a Capella cluster. // Creates a new sample bucket configuration under a cluster. diff --git a/internal/api/sample_bucket/stats.go b/internal/api/sample_bucket/stats.go index 2602bffb..f6006f3c 100644 --- a/internal/api/sample_bucket/stats.go +++ b/internal/api/sample_bucket/stats.go @@ -1,4 +1,4 @@ -package samplebucket +package sample_bucket // Stats are the bucket related statistics that are sent by the Capella V4 Public API for any existing bucket. type Stats struct { diff --git a/internal/datasources/sample_buckets.go b/internal/datasources/sample_buckets.go index cea57a8a..75cb48c7 100644 --- a/internal/datasources/sample_buckets.go +++ b/internal/datasources/sample_buckets.go @@ -5,7 +5,7 @@ import ( "fmt" "net/http" - "github.com/couchbasecloud/terraform-provider-couchbase-capella/internal/api/bucket" + samplebucketapi "github.com/couchbasecloud/terraform-provider-couchbase-capella/internal/api/sample_bucket" "github.com/couchbasecloud/terraform-provider-couchbase-capella/internal/api" providerschema "github.com/couchbasecloud/terraform-provider-couchbase-capella/internal/schema" @@ -33,7 +33,7 @@ func NewSampleBuckets() datasource.DataSource { // Metadata returns the bucket data source type name. func (d *SampleBuckets) Metadata(_ context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) { - resp.TypeName = req.ProviderTypeName + "_samplebuckets" + resp.TypeName = req.ProviderTypeName + "_sample_buckets" } // Schema defines the schema for the bucket data source. @@ -69,39 +69,30 @@ func (s *SampleBuckets) Schema(_ context.Context, _ datasource.SchemaRequest, re Computed: true, }, "type": schema.StringAttribute{ - Optional: true, Computed: true, }, "storage_backend": schema.StringAttribute{ - Optional: true, Computed: true, }, "memory_allocation_in_mb": schema.Int64Attribute{ - Optional: true, Computed: true, }, "bucket_conflict_resolution": schema.StringAttribute{ - Optional: true, Computed: true, }, "durability_level": schema.StringAttribute{ - Optional: true, Computed: true, }, "replicas": schema.Int64Attribute{ - Optional: true, Computed: true, }, "flush": schema.BoolAttribute{ - Optional: true, Computed: true, }, "time_to_live_in_seconds": schema.Int64Attribute{ - Optional: true, Computed: true, }, "eviction_policy": schema.StringAttribute{ - Optional: true, Computed: true, }, "stats": schema.SingleNestedAttribute{ @@ -130,7 +121,7 @@ func (s *SampleBuckets) Schema(_ context.Context, _ datasource.SchemaRequest, re // Read refreshes the Terraform state with the latest data of buckets. func (d *SampleBuckets) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { - var state providerschema.Buckets + var state providerschema.SampleBuckets diags := req.Config.Get(ctx, &state) resp.Diagnostics.Append(diags...) if resp.Diagnostics.HasError() { @@ -141,48 +132,52 @@ func (d *SampleBuckets) Read(ctx context.Context, req datasource.ReadRequest, re if err != nil { resp.Diagnostics.AddError( "Error Reading SampleBuckets in Capella", - "Could not read Capella buckets in cluster "+clusterId+": "+err.Error(), + "Could not read Capella sample buckets in cluster "+clusterId+": "+err.Error(), ) return } url := fmt.Sprintf("%s/v4/organizations/%s/projects/%s/clusters/%s/sampleBuckets", d.HostURL, organizationId, projectId, clusterId) cfg := api.EndpointCfg{Url: url, Method: http.MethodGet, SuccessStatus: http.StatusOK} - - response, err := api.GetPaginated[[]bucket.GetBucketResponse](ctx, d.Client, d.Token, cfg, api.SortById) + response, err := api.GetPaginated[[]samplebucketapi.GetSampleBucketResponse](ctx, d.Client, d.Token, cfg, api.SortById) if err != nil { resp.Diagnostics.AddError( "Error Reading Capella SampleBuckets", - "Could not read buckets in cluster "+clusterId+": "+api.ParseError(err), + "Could not read sample buckets in cluster "+clusterId+": "+api.ParseError(err), ) return } // Map response body to model - for _, bucket := range response { - bucketState := providerschema.OneBucket{ - Id: types.StringValue(bucket.Id), - Name: types.StringValue(bucket.Name), - Type: types.StringValue(bucket.Type), + for _, sampleBucket := range response { + sampleStats := providerschema.NewStats(*sampleBucket.Stats) + sampleBucketStatsObj, diags := types.ObjectValueFrom(ctx, sampleStats.AttributeTypes(), sampleStats) + if diags.HasError() { + resp.Diagnostics.AddError( + "Error Error Reading Sample Bucket Info", + fmt.Sprintf("Could not read sample bucket info from record, unexpected error: %s", fmt.Errorf("error while sample bucket info conversion")), + ) + return + } + + sampleBucketState := providerschema.SampleBucket{ + Id: types.StringValue(sampleBucket.Id), + Name: types.StringValue(sampleBucket.Name), + Type: types.StringValue(sampleBucket.Type), OrganizationId: types.StringValue(organizationId), ProjectId: types.StringValue(projectId), ClusterId: types.StringValue(clusterId), - StorageBackend: types.StringValue(bucket.StorageBackend), - MemoryAllocationInMB: types.Int64Value(bucket.MemoryAllocationInMb), - BucketConflictResolution: types.StringValue(bucket.BucketConflictResolution), - DurabilityLevel: types.StringValue(bucket.DurabilityLevel), - Replicas: types.Int64Value(bucket.Replicas), - Flush: types.BoolValue(bucket.Flush), - TimeToLiveInSeconds: types.Int64Value(bucket.TimeToLiveInSeconds), - EvictionPolicy: types.StringValue(bucket.EvictionPolicy), - Stats: &providerschema.Stats{ - ItemCount: types.Int64Value(bucket.Stats.ItemCount), - OpsPerSecond: types.Int64Value(bucket.Stats.OpsPerSecond), - DiskUsedInMiB: types.Int64Value(bucket.Stats.DiskUsedInMib), - MemoryUsedInMiB: types.Int64Value(bucket.Stats.MemoryUsedInMib), - }, + StorageBackend: types.StringValue(sampleBucket.StorageBackend), + MemoryAllocationInMB: types.Int64Value(sampleBucket.MemoryAllocationInMb), + BucketConflictResolution: types.StringValue(sampleBucket.BucketConflictResolution), + DurabilityLevel: types.StringValue(sampleBucket.DurabilityLevel), + Replicas: types.Int64Value(sampleBucket.Replicas), + Flush: types.BoolValue(sampleBucket.Flush), + TimeToLiveInSeconds: types.Int64Value(sampleBucket.TimeToLiveInSeconds), + EvictionPolicy: types.StringValue(sampleBucket.EvictionPolicy), + Stats: sampleBucketStatsObj, } - state.Data = append(state.Data, bucketState) + state.Data = append(state.Data, sampleBucketState) } // Set state diff --git a/internal/resources/sample_bucket.go b/internal/resources/sample_bucket.go index f8526031..cf92ead7 100644 --- a/internal/resources/sample_bucket.go +++ b/internal/resources/sample_bucket.go @@ -7,9 +7,10 @@ import ( "net/http" "github.com/couchbasecloud/terraform-provider-couchbase-capella/internal/api" - samplebucket "github.com/couchbasecloud/terraform-provider-couchbase-capella/internal/api/sample_bucket" + samplebucketapi "github.com/couchbasecloud/terraform-provider-couchbase-capella/internal/api/sample_bucket" "github.com/couchbasecloud/terraform-provider-couchbase-capella/internal/errors" providerschema "github.com/couchbasecloud/terraform-provider-couchbase-capella/internal/schema" + "github.com/hashicorp/terraform-plugin-framework/path" "github.com/hashicorp/terraform-plugin-framework/resource" "github.com/hashicorp/terraform-plugin-framework/types" @@ -23,22 +24,30 @@ var ( _ resource.ResourceWithImportState = &SampleBucket{} ) -// Samples is the samples resource implementation. +const errorMessageAfterSampleBucketCreation = "Sample bucket creation is successful, but encountered an error while checking the current" + + " state of the sample bucket. Please run `terraform plan` after 1-2 minutes to know the" + + " current sample bucket state. Additionally, run `terraform apply --refresh-only` to update" + + " the state from remote, unexpected error: " + +const errorMessageWhileSampleBucketCreation = "There is an error during sample bucket creation. Please check in Capella to see if any hanging resources" + + " have been created, unexpected error: " + +// SampleBucket is the sample bucket resource implementation. type SampleBucket struct { *providerschema.Data } -// NewSamples is a helper function to simplify the provider implementation. +// NewSampleBucket is a helper function to simplify the provider implementation. func NewSampleBucket() resource.Resource { return &SampleBucket{} } -// Metadata returns the samples resource type name. +// Metadata returns the SampleBucket resource type name. func (s *SampleBucket) Metadata(_ context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) { - resp.TypeName = req.ProviderTypeName + "_samplebucket" + resp.TypeName = req.ProviderTypeName + "_sample_bucket" } -// Configure It adds the provider configured api to the project resource. +// Configure adds the configured client to the SampleBucket resource. func (s *SampleBucket) Configure(ctx context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) { if req.ProviderData == nil { return @@ -58,19 +67,20 @@ func (s *SampleBucket) Configure(ctx context.Context, req resource.ConfigureRequ s.Data = data } -// ImportState imports a remote sample cluster that is not created by Terraform. +// ImportState imports a remote sample bucket that is not created by Terraform. func (s *SampleBucket) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) { // Retrieve import ID and save to id attribute resource.ImportStatePassthroughID(ctx, path.Root("id"), req, resp) } -// Schema defines the schema for the samples resource. +// Schema defines the schema for the SampleBucket resource. func (s *SampleBucket) Schema(_ context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) { resp.Schema = SampleBucketSchema() } +// Create creates a new sample bucket func (s *SampleBucket) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) { - var plan providerschema.Bucket + var plan providerschema.SampleBucket diags := req.Plan.Get(ctx, &plan) resp.Diagnostics.Append(diags...) @@ -78,7 +88,7 @@ func (s *SampleBucket) Create(ctx context.Context, req resource.CreateRequest, r return } - BucketRequest := samplebucket.CreateSampleBucketRequest{ + sampleBucketRequest := samplebucketapi.CreateSampleBucketRequest{ Name: plan.Name.ValueString(), } if err := s.validateCreateBucket(plan); err != nil { @@ -98,40 +108,39 @@ func (s *SampleBucket) Create(ctx context.Context, req resource.CreateRequest, r response, err := s.Client.ExecuteWithRetry( ctx, cfg, - BucketRequest, + sampleBucketRequest, s.Token, nil, ) if err != nil { resp.Diagnostics.AddError( - "Error creating bucket", - errorMessageWhileBucketCreation+api.ParseError(err), + "Error creating sample bucket", + errorMessageWhileSampleBucketCreation+api.ParseError(err), ) return } - BucketResponse := samplebucket.CreateSampleBucketResponse{} - err = json.Unmarshal(response.Body, &BucketResponse) + sampleBucketResponse := samplebucketapi.CreateSampleBucketResponse{} + err = json.Unmarshal(response.Body, &sampleBucketResponse) if err != nil { resp.Diagnostics.AddError( - "Error creating bucket", - errorMessageWhileBucketCreation+"error during unmarshalling: "+err.Error(), + "Error creating sample bucket", + errorMessageWhileSampleBucketCreation+"error during unmarshalling: "+err.Error(), ) return } - plan.Id = types.StringValue(BucketResponse.Id) - diags = resp.State.Set(ctx, plan) + diags = resp.State.Set(ctx, initializeSampleBucketWithPlanAndId(plan, sampleBucketResponse.Id)) resp.Diagnostics.Append(diags...) if resp.Diagnostics.HasError() { return } - refreshedState, err := s.retrieveBucket(ctx, organizationId, projectId, clusterId, BucketResponse.Id) + refreshedState, err := s.retrieveSampleBucket(ctx, organizationId, projectId, clusterId, sampleBucketResponse.Id) if err != nil { resp.Diagnostics.AddWarning( - "Error creating bucket "+BucketResponse.Id, - errorMessageAfterBucketCreation+api.ParseError(err), + "Error creating sample bucket "+sampleBucketResponse.Id, + errorMessageAfterSampleBucketCreation+api.ParseError(err), ) return } @@ -144,8 +153,9 @@ func (s *SampleBucket) Create(ctx context.Context, req resource.CreateRequest, r } } +// Read reads SampleBucket information. func (s *SampleBucket) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) { - var state providerschema.Bucket + var state providerschema.SampleBucket diags := req.State.Get(ctx, &state) resp.Diagnostics.Append(diags...) if resp.Diagnostics.HasError() { @@ -155,8 +165,8 @@ func (s *SampleBucket) Read(ctx context.Context, req resource.ReadRequest, resp IDs, err := state.Validate() if err != nil { resp.Diagnostics.AddError( - "Error Reading Bucket in Capella", - "Could not read Capella Bucket with ID "+state.Id.String()+": "+err.Error(), + "Error Reading SampleBucket in Capella", + "Could not read Capella sample Bucket with ID "+state.Id.String()+": "+err.Error(), ) return } @@ -168,7 +178,7 @@ func (s *SampleBucket) Read(ctx context.Context, req resource.ReadRequest, resp bucketId = IDs[providerschema.Id] ) - refreshedState, err := s.retrieveBucket(ctx, organizationId, projectId, clusterId, bucketId) + refreshedState, err := s.retrieveSampleBucket(ctx, organizationId, projectId, clusterId, bucketId) if err != nil { resourceNotFound, errString := api.CheckResourceNotFoundError(err) if resourceNotFound { @@ -177,8 +187,8 @@ func (s *SampleBucket) Read(ctx context.Context, req resource.ReadRequest, resp return } resp.Diagnostics.AddError( - "Error reading bucket", - "Could not read bucket with id "+state.Id.String()+": "+errString, + "Error reading Samplebucket", + "Could not read sample bucket with id "+state.Id.String()+": "+errString, ) return } @@ -192,7 +202,7 @@ func (s *SampleBucket) Read(ctx context.Context, req resource.ReadRequest, resp func (s *SampleBucket) Update(_ context.Context, _ resource.UpdateRequest, _ *resource.UpdateResponse) { // Couchbase Capella's v4 does not support a PUT endpoint for sample buckets. - // Allowlists can only be created, read and deleted. + // SampleBuckets can only be created, read and deleted. // http://cbc-cp-api.s3-website-us-east-1.amazonaws.com/#tag/sampleBucket // // Note: In this situation, terraform apply will default to deleting and executing a new create. @@ -200,8 +210,9 @@ func (s *SampleBucket) Update(_ context.Context, _ resource.UpdateRequest, _ *re // https://developer.hashicorp.com/terraform/plugin/framework/resources/update } +// Delete deletes the SampleBucket func (s *SampleBucket) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) { - var state providerschema.Bucket + var state providerschema.SampleBucket diags := req.State.Get(ctx, &state) resp.Diagnostics.Append(diags...) if resp.Diagnostics.HasError() { @@ -210,8 +221,8 @@ func (s *SampleBucket) Delete(ctx context.Context, req resource.DeleteRequest, r if state.OrganizationId.IsNull() { resp.Diagnostics.AddError( - "Error creating bucket", - "Could not create bucket, unexpected error: "+errors.ErrOrganizationIdCannotBeEmpty.Error(), + "Error creating sample bucket", + "Could not create sample bucket, unexpected error: "+errors.ErrOrganizationIdCannotBeEmpty.Error(), ) return } @@ -219,8 +230,8 @@ func (s *SampleBucket) Delete(ctx context.Context, req resource.DeleteRequest, r if state.ProjectId.IsNull() { resp.Diagnostics.AddError( - "Error creating bucket", - "Could not create bucket, unexpected error: "+errors.ErrProjectIdCannotBeEmpty.Error(), + "Error creating sample bucket", + "Could not create sample bucket, unexpected error: "+errors.ErrProjectIdCannotBeEmpty.Error(), ) return } @@ -228,8 +239,8 @@ func (s *SampleBucket) Delete(ctx context.Context, req resource.DeleteRequest, r if state.ClusterId.IsNull() { resp.Diagnostics.AddError( - "Error creating bucket", - "Could not create bucket, unexpected error: "+errors.ErrClusterIdCannotBeEmpty.Error(), + "Error creating sample bucket", + "Could not create sample bucket, unexpected error: "+errors.ErrClusterIdCannotBeEmpty.Error(), ) return } @@ -237,8 +248,8 @@ func (s *SampleBucket) Delete(ctx context.Context, req resource.DeleteRequest, r if state.Id.IsNull() { resp.Diagnostics.AddError( - "Error creating bucket", - "Could not create bucket, unexpected error: "+errors.ErrClusterIdCannotBeEmpty.Error(), + "Error creating sample bucket", + "Could not create sample bucket, unexpected error: "+errors.ErrClusterIdCannotBeEmpty.Error(), ) return } @@ -261,14 +272,14 @@ func (s *SampleBucket) Delete(ctx context.Context, req resource.DeleteRequest, r return } resp.Diagnostics.AddError( - "Error Deleting the Bucket", - "Could not delete Bucket associated with cluster "+clusterId+": "+errString, + "Error Deleting the SampleBucket", + "Could not delete sample Bucket associated with cluster "+clusterId+": "+errString, ) return } } -func (r *SampleBucket) validateCreateBucket(plan providerschema.Bucket) error { +func (r *SampleBucket) validateCreateBucket(plan providerschema.SampleBucket) error { if plan.OrganizationId.IsNull() { return errors.ErrOrganizationIdMissing } @@ -278,10 +289,10 @@ func (r *SampleBucket) validateCreateBucket(plan providerschema.Bucket) error { if plan.ClusterId.IsNull() { return errors.ErrClusterIdMissing } - return r.validateBucketName(plan) + return r.validateSampleBucketName(plan) } -func (r *SampleBucket) validateBucketName(plan providerschema.Bucket) error { +func (r *SampleBucket) validateSampleBucketName(plan providerschema.SampleBucket) error { if (!plan.Name.IsNull() && !plan.Name.IsUnknown()) && !providerschema.IsTrimmed(plan.Name.ValueString()) { return fmt.Errorf("name %s", errors.ErrNotTrimmed) } @@ -293,8 +304,8 @@ func (r *SampleBucket) validateBucketName(plan providerschema.Bucket) error { return nil } -// retrieveBucket retrieves bucket information for a specified organization, project, cluster and bucket ID. -func (s *SampleBucket) retrieveBucket(ctx context.Context, organizationId, projectId, clusterId, bucketId string) (*providerschema.OneBucket, error) { +// retrieveSampleBucket retrieves sample bucket information for a specified organization, project, cluster and sample bucket ID. +func (s *SampleBucket) retrieveSampleBucket(ctx context.Context, organizationId, projectId, clusterId, bucketId string) (*providerschema.SampleBucket, error) { url := fmt.Sprintf("%s/v4/organizations/%s/projects/%s/clusters/%s/sampleBuckets/%s", s.HostURL, organizationId, projectId, clusterId, bucketId) cfg := api.EndpointCfg{Url: url, Method: http.MethodGet, SuccessStatus: http.StatusOK} response, err := s.Client.ExecuteWithRetry( @@ -308,32 +319,33 @@ func (s *SampleBucket) retrieveBucket(ctx context.Context, organizationId, proje return nil, fmt.Errorf("%s: %w", errors.ErrExecutingRequest, err) } - bucketResp := samplebucket.GetSampleBucketResponse{} - err = json.Unmarshal(response.Body, &bucketResp) + sampleBucketResp := samplebucketapi.GetSampleBucketResponse{} + err = json.Unmarshal(response.Body, &sampleBucketResp) if err != nil { return nil, fmt.Errorf("%s: %w", errors.ErrUnmarshallingResponse, err) } - refreshedState := providerschema.OneBucket{ - Id: types.StringValue(bucketResp.Id), - Name: types.StringValue(bucketResp.Name), + sampleStats := providerschema.NewStats(*sampleBucketResp.Stats) + sampleBucketStatsObj, diags := types.ObjectValueFrom(ctx, sampleStats.AttributeTypes(), sampleStats) + if diags.HasError() { + return nil, errors.ErrUnableToConvertAuditData + } + + refreshedState := providerschema.SampleBucket{ + Id: types.StringValue(sampleBucketResp.Id), + Name: types.StringValue(sampleBucketResp.Name), OrganizationId: types.StringValue(organizationId), ProjectId: types.StringValue(projectId), ClusterId: types.StringValue(clusterId), - Type: types.StringValue(bucketResp.Type), - StorageBackend: types.StringValue(bucketResp.StorageBackend), - MemoryAllocationInMB: types.Int64Value(bucketResp.MemoryAllocationInMb), - BucketConflictResolution: types.StringValue(bucketResp.BucketConflictResolution), - DurabilityLevel: types.StringValue(bucketResp.DurabilityLevel), - Replicas: types.Int64Value(bucketResp.Replicas), - Flush: types.BoolValue(bucketResp.Flush), - TimeToLiveInSeconds: types.Int64Value(bucketResp.TimeToLiveInSeconds), - EvictionPolicy: types.StringValue(bucketResp.EvictionPolicy), - Stats: &providerschema.Stats{ - ItemCount: types.Int64Value(bucketResp.Stats.ItemCount), - OpsPerSecond: types.Int64Value(bucketResp.Stats.OpsPerSecond), - DiskUsedInMiB: types.Int64Value(bucketResp.Stats.DiskUsedInMib), - MemoryUsedInMiB: types.Int64Value(bucketResp.Stats.MemoryUsedInMib), - }, + Type: types.StringValue(sampleBucketResp.Type), + StorageBackend: types.StringValue(sampleBucketResp.StorageBackend), + MemoryAllocationInMB: types.Int64Value(sampleBucketResp.MemoryAllocationInMb), + BucketConflictResolution: types.StringValue(sampleBucketResp.BucketConflictResolution), + DurabilityLevel: types.StringValue(sampleBucketResp.DurabilityLevel), + Replicas: types.Int64Value(sampleBucketResp.Replicas), + Flush: types.BoolValue(sampleBucketResp.Flush), + TimeToLiveInSeconds: types.Int64Value(sampleBucketResp.TimeToLiveInSeconds), + EvictionPolicy: types.StringValue(sampleBucketResp.EvictionPolicy), + Stats: sampleBucketStatsObj, } return &refreshedState, nil @@ -349,3 +361,16 @@ func isValidSampleName(category string) bool { } return false } + +// initializeBucketWithPlanAndId initializes an instance of providerschema.Bucket +// with the specified plan and ID. It marks all computed fields as null. +func initializeSampleBucketWithPlanAndId(plan providerschema.SampleBucket, id string) providerschema.SampleBucket { + plan.Id = types.StringValue(id) + if plan.StorageBackend.IsNull() || plan.StorageBackend.IsUnknown() { + plan.StorageBackend = types.StringNull() + } + if plan.EvictionPolicy.IsNull() || plan.EvictionPolicy.IsUnknown() { + plan.EvictionPolicy = types.StringNull() + } + return plan +} diff --git a/internal/resources/sample_bucket_schema.go b/internal/resources/sample_bucket_schema.go index 6db4e23d..8e61eb85 100644 --- a/internal/resources/sample_bucket_schema.go +++ b/internal/resources/sample_bucket_schema.go @@ -21,12 +21,12 @@ func SampleBucketSchema() schema.Schema { "cluster_id": stringAttribute(required, requiresReplace), "type": stringDefaultAttribute("couchbase", optional, computed, requiresReplace, useStateForUnknown), "storage_backend": stringAttribute(optional, computed, requiresReplace, useStateForUnknown), - "memory_allocation_in_mb": int64DefaultAttribute(200, optional, computed), + "memory_allocation_in_mb": int64DefaultAttribute(200, optional, computed, requiresReplace), "bucket_conflict_resolution": stringDefaultAttribute("seqno", optional, computed, requiresReplace, useStateForUnknown), - "durability_level": stringDefaultAttribute("none", optional, computed), - "replicas": int64DefaultAttribute(1, optional, computed), - "flush": boolDefaultAttribute(false, optional, computed), - "time_to_live_in_seconds": int64DefaultAttribute(0, optional, computed), + "durability_level": stringDefaultAttribute("none", optional, computed, requiresReplace), + "replicas": int64DefaultAttribute(1, optional, computed, requiresReplace), + "flush": boolDefaultAttribute(false, optional, computed, requiresReplace), + "time_to_live_in_seconds": int64DefaultAttribute(0, optional, computed, requiresReplace), "eviction_policy": stringAttribute(optional, computed, requiresReplace, useStateForUnknown), "stats": schema.SingleNestedAttribute{ Computed: true, diff --git a/internal/schema/sample_bucket.go b/internal/schema/sample_bucket.go new file mode 100644 index 00000000..5aa72492 --- /dev/null +++ b/internal/schema/sample_bucket.go @@ -0,0 +1,180 @@ +package schema + +import ( + "fmt" + + samplebucketapi "github.com/couchbasecloud/terraform-provider-couchbase-capella/internal/api/sample_bucket" + "github.com/couchbasecloud/terraform-provider-couchbase-capella/internal/errors" + + "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/hashicorp/terraform-plugin-framework/types/basetypes" +) + +type SampleBucket struct { + // DurabilityLevel is the minimum level at which all writes to the bucket must occur. + // Default: "none" + // Enum: "none" "majority" "majorityAndPersistActive" "persistToMajority" + // + // The options for Durability level are as follows, according to the bucket type. + // + // For a Couchbase bucket: + // None + // Replicate to Majority + // Majority and Persist to Active + // Persist to Majority + // + //For an Ephemeral bucket: + // None + // Replicate to Majority + DurabilityLevel types.String `tfsdk:"durability_level"` + + // Stats has the bucket stats that are related to memory and disk consumption. + // itemCount: Number of documents in the bucket. + // opsPerSecond: Number of operations per second. + // diskUsedInMib: The amount of disk used (in MiB). + // memoryUsedInMib: The amount of memory used (in MiB). + Stats types.Object `tfsdk:"stats"` + + // Type defines the type of the bucket. + // Default: "couchbase" + // Enum: "couchbase" "ephemeral" + // If selected Ephemeral, it is not eligible for imports or App Endpoints creation. This field cannot be changed later. + // The options may also be referred to as Memory and Disk (Couchbase), Memory Only (Ephemeral) in the Couchbase documentation. + // To learn more, see https://docs.couchbase.com/cloud/clusters/data-service/manage-buckets.html#add-bucket + Type types.String `tfsdk:"type"` + + // StorageBackend defines the storage engine that is used by the bucket. + // Default: "couchstore" + // Enum: "couchstore" "magma" + // + // Ephemeral buckets do not support StorageBackend, hence not applicable for Ephemeral buckets and throws an error if this field is added. + // This field is only applicable for a Couchbase bucket. The default value mentioned (Couchstore) is for Couchbase bucket. + // This field cannot be changed later. + // To learn more, see https://docs.couchbase.com/cloud/clusters/data-service/storage-engines.html + StorageBackend types.String `tfsdk:"storage_backend"` + + // ClusterId is the ID of the cluster for which the database credential needs to be created. + ClusterId types.String `tfsdk:"cluster_id"` + + // BucketConflictResolution is the means by which conflicts are resolved during replication. + // Default: "seqno" + // Enum: "seqno" "lww" + // This field may be referred to as "conflict resolution" in the Couchbase documentation. + // seqno and lww may be referred to as "sequence number" and "timestamp" respectively. + // This field cannot be changed later. + // To learn more, see https://docs.couchbase.com/cloud/clusters/xdcr/xdcr.html#conflict-resolution + BucketConflictResolution types.String `tfsdk:"bucket_conflict_resolution"` + + // Name is the name of the bucket. + Name types.String `tfsdk:"name"` + + // ProjectId is the ID of the project to which the Capella cluster belongs. + // The database credential will be created for the cluster. + ProjectId types.String `tfsdk:"project_id"` + + // Id is the id of the created bucket. + Id types.String `tfsdk:"id"` + + // OrganizationId is the ID of the organization to which the Capella cluster belongs. + // The database credential will be created for the cluster. + OrganizationId types.String `tfsdk:"organization_id"` + + // EvictionPolicy is the policy which Capella adopts to prevent data loss due to memory exhaustion. + // This may be also known as Ejection Policy in the Couchbase documentation. + // + // For Couchbase bucket, Eviction Policy is fullEviction by default. + // For Ephemeral buckets, Eviction Policy is a required field, and should be one of the following: + // noEviction + // nruEviction + // Default: "fullEviction" + // Enum: "fullEviction" "noEviction" "nruEviction" + // To learn more, see https://docs.couchbase.com/server/current/rest-api/rest-bucket-create.html#evictionpolicy + EvictionPolicy types.String `tfsdk:"eviction_policy"` + + // MemoryAllocationInMB is the amount of memory to allocate for the bucket memory in MiB. + // This is the maximum limit is dependent on the allocation of the KV service. For example, 80% of the allocation. + // Default: 100 + // + // The default value (100MiB) mentioned is for Couchbase type buckets with Couchstore as the Storage Backend. + // + // For Couchbase buckets, the default and minimum memory allocation changes according to the Storage Backend type as follows: + // For Couchstore, the default and minimum memory allocation is 100 MiB. + // For Magma, the default and minimum memory allocation is 1024 MiB. + // For Ephemeral buckets, the default and minimum memory allocation is 100 MiB. + MemoryAllocationInMB types.Int64 `tfsdk:"memory_allocation_in_mb"` + + // TimeToLiveInSeconds specifies the time to live (TTL) value in seconds. + // This is the maximum time to live for items in the bucket. + // Default is 0, that means TTL is disabled. This is a non-negative value. + TimeToLiveInSeconds types.Int64 `tfsdk:"time_to_live_in_seconds"` + + // Replicas is the number of replicas for the bucket. + // Default: 1 + // Enum: 1 2 3 + Replicas types.Int64 `tfsdk:"replicas"` + + // Flush determines whether flushing is enabled on the bucket. + // Enable Flush to delete all items in this bucket at the earliest opportunity. + // Disable Flush to avoid inadvertent data loss. + // Default: false + Flush types.Bool `tfsdk:"flush"` +} + +// SampleBuckets defines attributes for the LIST buckets response received from V4 Capella Public API. +type SampleBuckets struct { + // OrganizationId The organizationId of the capella. + OrganizationId types.String `tfsdk:"organization_id"` + + // ProjectId is the projectId of the capella tenant. + ProjectId types.String `tfsdk:"project_id"` + + // ClusterId is the clusterId of the capella tenant. + ClusterId types.String `tfsdk:"cluster_id"` + + // Data It contains the list of resources. + Data []SampleBucket `tfsdk:"data"` +} + +// Validate will split the IDs by a delimiter i.e. comma , in case a terraform import CLI is invoked. +// The format of the terraform import CLI would include the IDs as follows - +// `terraform import capella_bucket.new_bucket id=,cluster_id=,project_id=,organization_id=`. +func (b SampleBucket) Validate() (map[Attr]string, error) { + state := map[Attr]basetypes.StringValue{ + OrganizationId: b.OrganizationId, + ProjectId: b.ProjectId, + ClusterId: b.ClusterId, + Id: b.Id, + } + + IDs, err := validateSchemaState(state) + if err != nil { + return nil, fmt.Errorf("%s: %w", errors.ErrValidatingResource, err) + } + + return IDs, nil +} + +// Validate is used to verify that all the fields in the datasource +// have been populated. +func (b SampleBuckets) Validate() (clusterId, projectId, organizationId string, err error) { + if b.OrganizationId.IsNull() { + return "", "", "", errors.ErrOrganizationIdMissing + } + if b.ProjectId.IsNull() { + return "", "", "", errors.ErrProjectIdMissing + } + if b.ClusterId.IsNull() { + return "", "", "", errors.ErrClusterIdMissing + } + return b.ClusterId.ValueString(), b.ProjectId.ValueString(), b.OrganizationId.ValueString(), nil +} + +// NewStats creates a new Stats data object. +func NewStats(stats samplebucketapi.Stats) Stats { + return Stats{ + ItemCount: types.Int64Value(stats.ItemCount), + OpsPerSecond: types.Int64Value(stats.OpsPerSecond), + DiskUsedInMiB: types.Int64Value(stats.DiskUsedInMib), + MemoryUsedInMiB: types.Int64Value(stats.MemoryUsedInMib), + } +} diff --git a/internal/schema/sample_bucket_test.go b/internal/schema/sample_bucket_test.go new file mode 100644 index 00000000..3c565d7c --- /dev/null +++ b/internal/schema/sample_bucket_test.go @@ -0,0 +1,70 @@ +package schema + +import ( + "testing" + + "github.com/couchbasecloud/terraform-provider-couchbase-capella/internal/errors" + + "github.com/hashicorp/terraform-plugin-framework/types/basetypes" + "github.com/stretchr/testify/assert" +) + +func TestSampleBucketSchemaValidate(t *testing.T) { + type test struct { + expectedErr error + name string + expectedProjectId string + expectedOrganizationId string + expectedClusterId string + expectedBucketId string + input SampleBucket + } + + tests := []test{ + { + name: "[POSITIVE] project ID, organization ID, cluster ID, bucket ID are passed via terraform apply", + input: SampleBucket{ + Id: basetypes.NewStringValue("100"), + ClusterId: basetypes.NewStringValue("200"), + ProjectId: basetypes.NewStringValue("300"), + OrganizationId: basetypes.NewStringValue("400"), + }, + expectedBucketId: "100", + expectedClusterId: "200", + expectedProjectId: "300", + expectedOrganizationId: "400", + }, + { + name: "[POSITIVE] IDs are passed via terraform import", + input: SampleBucket{ + Id: basetypes.NewStringValue("id=100,cluster_id=200,project_id=300,organization_id=400"), + }, + expectedBucketId: "100", + expectedClusterId: "200", + expectedProjectId: "300", + expectedOrganizationId: "400", + }, + { + name: "[NEGATIVE] only bucket ID is passed via terraform apply", + input: SampleBucket{ + Id: basetypes.NewStringValue("200"), + }, + expectedErr: errors.ErrInvalidImport, + }, + } + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + IDs, err := test.input.Validate() + + if test.expectedErr != nil { + assert.ErrorContains(t, err, test.expectedErr.Error()) + return + } + + assert.Equal(t, test.expectedBucketId, IDs[Id]) + assert.Equal(t, test.expectedClusterId, IDs[ClusterId]) + assert.Equal(t, test.expectedProjectId, IDs[ProjectId]) + assert.Equal(t, test.expectedOrganizationId, IDs[OrganizationId]) + }) + } +} From 8c45d10583f34091165ca53f2969bd82b1665540 Mon Sep 17 00:00:00 2001 From: Laura Silaja Date: Tue, 5 Mar 2024 11:44:26 +0000 Subject: [PATCH 10/15] Docstring format change --- internal/resources/sample_bucket.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/internal/resources/sample_bucket.go b/internal/resources/sample_bucket.go index cf92ead7..15f6a4cc 100644 --- a/internal/resources/sample_bucket.go +++ b/internal/resources/sample_bucket.go @@ -78,7 +78,7 @@ func (s *SampleBucket) Schema(_ context.Context, _ resource.SchemaRequest, resp resp.Schema = SampleBucketSchema() } -// Create creates a new sample bucket +// Create creates a new sample bucket. func (s *SampleBucket) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) { var plan providerschema.SampleBucket diags := req.Plan.Get(ctx, &plan) @@ -210,7 +210,7 @@ func (s *SampleBucket) Update(_ context.Context, _ resource.UpdateRequest, _ *re // https://developer.hashicorp.com/terraform/plugin/framework/resources/update } -// Delete deletes the SampleBucket +// Delete deletes the SampleBucket. func (s *SampleBucket) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) { var state providerschema.SampleBucket diags := req.State.Get(ctx, &state) From 50bbab359ffd6471c89c43663a4f81c969d8b2fc Mon Sep 17 00:00:00 2001 From: Laura Silaja Date: Fri, 8 Mar 2024 10:16:40 +0000 Subject: [PATCH 11/15] Updated the sample bucket schema --- internal/datasources/sample_buckets.go | 3 +-- internal/resources/sample_bucket_schema.go | 18 +++++++++--------- 2 files changed, 10 insertions(+), 11 deletions(-) diff --git a/internal/datasources/sample_buckets.go b/internal/datasources/sample_buckets.go index 75cb48c7..92103a07 100644 --- a/internal/datasources/sample_buckets.go +++ b/internal/datasources/sample_buckets.go @@ -5,9 +5,8 @@ import ( "fmt" "net/http" - samplebucketapi "github.com/couchbasecloud/terraform-provider-couchbase-capella/internal/api/sample_bucket" - "github.com/couchbasecloud/terraform-provider-couchbase-capella/internal/api" + samplebucketapi "github.com/couchbasecloud/terraform-provider-couchbase-capella/internal/api/sample_bucket" providerschema "github.com/couchbasecloud/terraform-provider-couchbase-capella/internal/schema" "github.com/hashicorp/terraform-plugin-framework/datasource" diff --git a/internal/resources/sample_bucket_schema.go b/internal/resources/sample_bucket_schema.go index 8e61eb85..275033ba 100644 --- a/internal/resources/sample_bucket_schema.go +++ b/internal/resources/sample_bucket_schema.go @@ -19,15 +19,15 @@ func SampleBucketSchema() schema.Schema { "organization_id": stringAttribute(required, requiresReplace), "project_id": stringAttribute(required, requiresReplace), "cluster_id": stringAttribute(required, requiresReplace), - "type": stringDefaultAttribute("couchbase", optional, computed, requiresReplace, useStateForUnknown), - "storage_backend": stringAttribute(optional, computed, requiresReplace, useStateForUnknown), - "memory_allocation_in_mb": int64DefaultAttribute(200, optional, computed, requiresReplace), - "bucket_conflict_resolution": stringDefaultAttribute("seqno", optional, computed, requiresReplace, useStateForUnknown), - "durability_level": stringDefaultAttribute("none", optional, computed, requiresReplace), - "replicas": int64DefaultAttribute(1, optional, computed, requiresReplace), - "flush": boolDefaultAttribute(false, optional, computed, requiresReplace), - "time_to_live_in_seconds": int64DefaultAttribute(0, optional, computed, requiresReplace), - "eviction_policy": stringAttribute(optional, computed, requiresReplace, useStateForUnknown), + "type": stringAttribute(computed), + "storage_backend": stringAttribute(computed), + "memory_allocation_in_mb": int64Attribute(computed), + "bucket_conflict_resolution": stringAttribute(computed), + "durability_level": stringAttribute(computed), + "replicas": int64Attribute(computed), + "flush": boolAttribute(computed), + "time_to_live_in_seconds": int64Attribute(computed), + "eviction_policy": stringAttribute(computed), "stats": schema.SingleNestedAttribute{ Computed: true, Attributes: map[string]schema.Attribute{ From b7ecd6ab7168cd80fb5b4b3595c46e45686a5181 Mon Sep 17 00:00:00 2001 From: Laura Silaja Date: Fri, 8 Mar 2024 10:53:39 +0000 Subject: [PATCH 12/15] Updated the example sample bucket readme --- examples/sample_bucket/README.md | 237 +++++++++++++++++-------------- 1 file changed, 131 insertions(+), 106 deletions(-) diff --git a/examples/sample_bucket/README.md b/examples/sample_bucket/README.md index 5dd8a03e..6e96217c 100644 --- a/examples/sample_bucket/README.md +++ b/examples/sample_bucket/README.md @@ -35,9 +35,8 @@ Terraform plan │ - couchbasecloud/couchbase-capella in /Users/$USER/workspace/code/Lagher0/terraform-provider-couchbase-capella/bin │ │ The behavior may therefore not match any released version of the provider and applying changes may cause the state to become incompatible with published releases. -╵ -data.couchbase-capella_sample_buckets.existing_sample_buckets: Reading... -data.couchbase-capella_sample_buckets.existing_sample_buckets: Read complete after 0s +╵data.couchbase-capella_sample_buckets.existing_sample_buckets: Reading... +data.couchbase-capella_sample_buckets.existing_sample_buckets: Read complete after 1s Terraform used the selected providers to generate the following execution plan. Resource actions are indicated with the following symbols: + create @@ -46,46 +45,46 @@ Terraform will perform the following actions: # couchbase-capella_sample_bucket.new_sample_bucket will be created + resource "couchbase-capella_sample_bucket" "new_sample_bucket" { - + bucket_conflict_resolution = "seqno" - + cluster_id = "17619f3c-08f5-40a3-8c0c-d2e5b263a025" - + durability_level = "none" + + bucket_conflict_resolution = (known after apply) + + cluster_id = "4ce95e43-106c-427f-b260-49fdb2c8b034" + + durability_level = (known after apply) + eviction_policy = (known after apply) - + flush = false + + flush = (known after apply) + id = (known after apply) - + memory_allocation_in_mb = 200 + + memory_allocation_in_mb = (known after apply) + name = "gamesim-sample" + organization_id = "6af08c0a-8cab-4c1c-b257-b521575c16d0" + project_id = "c1fade1a-9f27-4a3c-af73-d1b2301890e3" - + replicas = 1 + + replicas = (known after apply) + stats = (known after apply) + storage_backend = (known after apply) - + time_to_live_in_seconds = 0 - + type = "couchbase" + + time_to_live_in_seconds = (known after apply) + + type = (known after apply) } Plan: 1 to add, 0 to change, 0 to destroy. Changes to Outputs: - + new_sample_bucket = { - + bucket_conflict_resolution = "seqno" - + cluster_id = "17619f3c-08f5-40a3-8c0c-d2e5b263a025" - + durability_level = "none" + + new_sample_bucket = { + + bucket_conflict_resolution = (known after apply) + + cluster_id = "4ce95e43-106c-427f-b260-49fdb2c8b034" + + durability_level = (known after apply) + eviction_policy = (known after apply) - + flush = false + + flush = (known after apply) + id = (known after apply) - + memory_allocation_in_mb = 200 + + memory_allocation_in_mb = (known after apply) + name = "gamesim-sample" + organization_id = "6af08c0a-8cab-4c1c-b257-b521575c16d0" + project_id = "c1fade1a-9f27-4a3c-af73-d1b2301890e3" - + replicas = 1 + + replicas = (known after apply) + stats = (known after apply) + storage_backend = (known after apply) - + time_to_live_in_seconds = 0 - + type = "couchbase" + + time_to_live_in_seconds = (known after apply) + + type = (known after apply) } + samplebucket_id = (known after apply) + samplebuckets_list = { - + cluster_id = "17619f3c-08f5-40a3-8c0c-d2e5b263a025" + + cluster_id = "4ce95e43-106c-427f-b260-49fdb2c8b034" + data = null + organization_id = "6af08c0a-8cab-4c1c-b257-b521575c16d0" + project_id = "c1fade1a-9f27-4a3c-af73-d1b2301890e3" @@ -121,46 +120,46 @@ Terraform will perform the following actions: # couchbase-capella_sample_bucket.new_sample_bucket will be created + resource "couchbase-capella_sample_bucket" "new_sample_bucket" { - + bucket_conflict_resolution = "seqno" - + cluster_id = "17619f3c-08f5-40a3-8c0c-d2e5b263a025" - + durability_level = "none" + + bucket_conflict_resolution = (known after apply) + + cluster_id = "4ce95e43-106c-427f-b260-49fdb2c8b034" + + durability_level = (known after apply) + eviction_policy = (known after apply) - + flush = false + + flush = (known after apply) + id = (known after apply) - + memory_allocation_in_mb = 200 + + memory_allocation_in_mb = (known after apply) + name = "gamesim-sample" + organization_id = "6af08c0a-8cab-4c1c-b257-b521575c16d0" + project_id = "c1fade1a-9f27-4a3c-af73-d1b2301890e3" - + replicas = 1 + + replicas = (known after apply) + stats = (known after apply) + storage_backend = (known after apply) - + time_to_live_in_seconds = 0 - + type = "couchbase" + + time_to_live_in_seconds = (known after apply) + + type = (known after apply) } Plan: 1 to add, 0 to change, 0 to destroy. Changes to Outputs: - + new_sample_bucket = { - + bucket_conflict_resolution = "seqno" - + cluster_id = "17619f3c-08f5-40a3-8c0c-d2e5b263a025" - + durability_level = "none" + + new_sample_bucket = { + + bucket_conflict_resolution = (known after apply) + + cluster_id = "4ce95e43-106c-427f-b260-49fdb2c8b034" + + durability_level = (known after apply) + eviction_policy = (known after apply) - + flush = false + + flush = (known after apply) + id = (known after apply) - + memory_allocation_in_mb = 200 + + memory_allocation_in_mb = (known after apply) + name = "gamesim-sample" + organization_id = "6af08c0a-8cab-4c1c-b257-b521575c16d0" + project_id = "c1fade1a-9f27-4a3c-af73-d1b2301890e3" - + replicas = 1 + + replicas = (known after apply) + stats = (known after apply) + storage_backend = (known after apply) - + time_to_live_in_seconds = 0 - + type = "couchbase" + + time_to_live_in_seconds = (known after apply) + + type = (known after apply) } + samplebucket_id = (known after apply) + samplebuckets_list = { - + cluster_id = "17619f3c-08f5-40a3-8c0c-d2e5b263a025" + + cluster_id = "4ce95e43-106c-427f-b260-49fdb2c8b034" + data = null + organization_id = "6af08c0a-8cab-4c1c-b257-b521575c16d0" + project_id = "c1fade1a-9f27-4a3c-af73-d1b2301890e3" @@ -173,7 +172,7 @@ Do you want to perform these actions? Enter a value: yes couchbase-capella_sample_bucket.new_sample_bucket: Creating... -couchbase-capella_sample_bucket.new_sample_bucket: Creation complete after 1s [id=Z2FtZXNpbS1zYW1wbGU=] +couchbase-capella_sample_bucket.new_sample_bucket: Creation complete after 0s [id=Z2FtZXNpbS1zYW1wbGU=] Apply complete! Resources: 1 added, 0 changed, 0 destroyed. @@ -181,7 +180,7 @@ Outputs: new_sample_bucket = { "bucket_conflict_resolution" = "seqno" - "cluster_id" = "17619f3c-08f5-40a3-8c0c-d2e5b263a025" + "cluster_id" = "4ce95e43-106c-427f-b260-49fdb2c8b034" "durability_level" = "none" "eviction_policy" = "fullEviction" "flush" = false @@ -193,8 +192,8 @@ new_sample_bucket = { "replicas" = 1 "stats" = { "disk_used_in_mib" = 0 - "item_count" = 0 - "memory_used_in_mib" = 0 + "item_count" = 209 + "memory_used_in_mib" = 20 "ops_per_second" = 0 } "storage_backend" = "couchstore" @@ -203,11 +202,12 @@ new_sample_bucket = { } samplebucket_id = "Z2FtZXNpbS1zYW1wbGU=" samplebuckets_list = { - "cluster_id" = "17619f3c-08f5-40a3-8c0c-d2e5b263a025" + "cluster_id" = "4ce95e43-106c-427f-b260-49fdb2c8b034" "data" = tolist(null) /* of object */ "organization_id" = "6af08c0a-8cab-4c1c-b257-b521575c16d0" "project_id" = "c1fade1a-9f27-4a3c-af73-d1b2301890e3" } + ``` @@ -220,20 +220,20 @@ Sample Output: terraform output new_sample_bucket { "bucket_conflict_resolution" = "seqno" - "cluster_id" = "17619f3c-08f5-40a3-8c0c-d2e5b263a025" + "cluster_id" = "4ce95e43-106c-427f-b260-49fdb2c8b034" "durability_level" = "none" "eviction_policy" = "fullEviction" "flush" = false "id" = "Z2FtZXNpbS1zYW1wbGU=" - "memory_allocation_in_mb" = 200 + "memory_allocation_in_mb" = 250 "name" = "gamesim-sample" "organization_id" = "6af08c0a-8cab-4c1c-b257-b521575c16d0" "project_id" = "c1fade1a-9f27-4a3c-af73-d1b2301890e3" "replicas" = 1 "stats" = { - "disk_used_in_mib" = 0 - "item_count" = 0 - "memory_used_in_mib" = 0 + "disk_used_in_mib" = 19 + "item_count" = 586 + "memory_used_in_mib" = 62 "ops_per_second" = 0 } "storage_backend" = "couchstore" @@ -274,10 +274,10 @@ Successfully removed 1 resource instance(s). Command: `terraform import couchbase-capella_sample_bucket.new_sample_bucket id=,cluster_id=,project_id=,organization_id=` In this case, the complete command is: -`terraform import couchbase-capella_sample_bucket.new_sample_bucket id=Z2FtZXNpbS1zYW1wbGU=,organization_id=6af08c0a-8cab-4c1c-b257-b521575c16d0,project_id=c1fade1a-9f27-4a3c-af73-d1b2301890e3,cluster_id=17619f3c-08f5-40a3-8c0c-d2e5b263a025` +`terraform import couchbase-capella_sample_bucket.new_sample_bucket id=YmVlci1zYW1wbGU=,organization_id=6af08c0a-8cab-4c1c-b257-b521575c16d0,project_id=c1fade1a-9f27-4a3c-af73-d1b2301890e3,cluster_id=4ce95e43-106c-427f-b260-49fdb2c8b034` ``` -terraform import couchbase-capella_sample_bucket.new_sample_bucket id=Z2FtZXNpbS1zYW1wbGU=,organization_id=6af08c0a-8cab-4c1c-b257-b521575c16d0,project_id=c1fade1a-9f27-4a3c-af73-d1b2301890e3,cluster_id=17619f3c-08f5-40a3-8c0c-d2e5b263a025 +terraform import couchbase-capella_sample_bucket.new_sample_bucket id=YmVlci1zYW1wbGU=,organization_id=6af08c0a-8cab-4c1c-b257-b521575c16d0,project_id=c1fade1a-9f27-4a3c-af73-d1b2301890e3,cluster_id=4ce95e43-106c-427f-b260-49fdb2c8b034 couchbase-capella_sample_bucket.new_sample_bucket: Importing from ID "id=Z2FtZXNpbS1zYW1wbGU=,organization_id=6af08c0a-8cab-4c1c-b257-b521575c16d0,project_id=c1fade1a-9f27-4a3c-af73-d1b2301890e3,cluster_id=17619f3c-08f5-40a3-8c0c-d2e5b263a025"... couchbase-capella_sample_bucket.new_sample_bucket: Import prepared! Prepared couchbase-capella_sample_bucket for import @@ -326,8 +326,7 @@ Terraform has compared your real infrastructure against your configuration and f ## UPDATE ### Let us edit the terraform.tfvars file to change the bucket configuration settings. -Sample buckets does not support update functionality. To update the terraform state it recreates the -sample bucket with the given changes +Sample buckets does not support update functionality. We can only change the sample bucket name which destroys the current sample bucket and creates a new one. Command: `terraform apply -var 'samplebucket={name="travel-sample"}'` @@ -357,9 +356,10 @@ Terraform detected the following changes made outside of Terraform since the las id = "Z2FtZXNpbS1zYW1wbGU=" name = "gamesim-sample" ~ stats = { - ~ item_count = 196 -> 390 - ~ memory_used_in_mib = 20 -> 42 - # (2 unchanged attributes hidden) + ~ disk_used_in_mib = 0 -> 19 + ~ item_count = 209 -> 586 + ~ memory_used_in_mib = 20 -> 62 + # (1 unchanged attribute hidden) } # (12 unchanged attributes hidden) } @@ -367,7 +367,7 @@ Terraform detected the following changes made outside of Terraform since the las Unless you have made equivalent changes to your configuration, or ignored the relevant attributes using ignore_changes, the following plan may include actions to undo or respond to these changes. -────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────── +─────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────── Terraform used the selected providers to generate the following execution plan. Resource actions are indicated with the following symbols: -/+ destroy and then create replacement @@ -376,48 +376,73 @@ Terraform will perform the following actions: # couchbase-capella_sample_bucket.new_sample_bucket must be replaced -/+ resource "couchbase-capella_sample_bucket" "new_sample_bucket" { + ~ bucket_conflict_resolution = "seqno" -> (known after apply) + ~ durability_level = "none" -> (known after apply) ~ eviction_policy = "fullEviction" -> (known after apply) + ~ flush = false -> (known after apply) ~ id = "Z2FtZXNpbS1zYW1wbGU=" -> (known after apply) + ~ memory_allocation_in_mb = 200 -> (known after apply) ~ name = "gamesim-sample" -> "travel-sample" # forces replacement + ~ replicas = 1 -> (known after apply) ~ stats = { - ~ disk_used_in_mib = 0 -> (known after apply) - ~ item_count = 390 -> (known after apply) - ~ memory_used_in_mib = 42 -> (known after apply) + ~ disk_used_in_mib = 19 -> (known after apply) + ~ item_count = 586 -> (known after apply) + ~ memory_used_in_mib = 62 -> (known after apply) ~ ops_per_second = 0 -> (known after apply) } -> (known after apply) ~ storage_backend = "couchstore" -> (known after apply) - # (10 unchanged attributes hidden) + ~ time_to_live_in_seconds = 0 -> (known after apply) + ~ type = "couchbase" -> (known after apply) + # (3 unchanged attributes hidden) } Plan: 1 to add, 0 to change, 1 to destroy. Changes to Outputs: - ~ new_sample_bucket = { + ~ new_sample_bucket = { + ~ bucket_conflict_resolution = "seqno" -> (known after apply) + ~ durability_level = "none" -> (known after apply) ~ eviction_policy = "fullEviction" -> (known after apply) + ~ flush = false -> (known after apply) ~ id = "Z2FtZXNpbS1zYW1wbGU=" -> (known after apply) + ~ memory_allocation_in_mb = 200 -> (known after apply) ~ name = "gamesim-sample" -> "travel-sample" + ~ replicas = 1 -> (known after apply) ~ stats = { - disk_used_in_mib = 0 - - item_count = 196 + - item_count = 209 - memory_used_in_mib = 20 - ops_per_second = 0 } -> (known after apply) ~ storage_backend = "couchstore" -> (known after apply) - # (10 unchanged attributes hidden) + ~ time_to_live_in_seconds = 0 -> (known after apply) + ~ type = "couchbase" -> (known after apply) + # (3 unchanged attributes hidden) } ~ samplebucket_id = "Z2FtZXNpbS1zYW1wbGU=" -> (known after apply) ~ samplebuckets_list = { - ~ data = [ - ~ { - ~ id = "dHJhdmVsLXNhbXBsZQ==" -> "Z2FtZXNpbS1zYW1wbGU=" - ~ name = "travel-sample" -> "gamesim-sample" - ~ stats = { - ~ disk_used_in_mib = 15 -> 0 - ~ item_count = 163 -> 390 - ~ memory_used_in_mib = 72 -> 42 - # (1 unchanged attribute hidden) + ~ data = null -> [ + + { + + bucket_conflict_resolution = "seqno" + + cluster_id = "4ce95e43-106c-427f-b260-49fdb2c8b034" + + durability_level = "none" + + eviction_policy = "fullEviction" + + flush = false + + id = "Z2FtZXNpbS1zYW1wbGU=" + + memory_allocation_in_mb = 200 + + name = "gamesim-sample" + + organization_id = "6af08c0a-8cab-4c1c-b257-b521575c16d0" + + project_id = "c1fade1a-9f27-4a3c-af73-d1b2301890e3" + + replicas = 1 + + stats = { + + disk_used_in_mib = 19 + + item_count = 586 + + memory_used_in_mib = 62 + + ops_per_second = 0 } - # (12 unchanged attributes hidden) + + storage_backend = "couchstore" + + time_to_live_in_seconds = 0 + + type = "couchbase" }, ] # (3 unchanged attributes hidden) @@ -430,7 +455,7 @@ Do you want to perform these actions? Enter a value: yes couchbase-capella_sample_bucket.new_sample_bucket: Destroying... [id=Z2FtZXNpbS1zYW1wbGU=] -couchbase-capella_sample_bucket.new_sample_bucket: Destruction complete after 1s +couchbase-capella_sample_bucket.new_sample_bucket: Destruction complete after 2s couchbase-capella_sample_bucket.new_sample_bucket: Creating... couchbase-capella_sample_bucket.new_sample_bucket: Creation complete after 0s [id=dHJhdmVsLXNhbXBsZQ==] @@ -440,7 +465,7 @@ Outputs: new_sample_bucket = { "bucket_conflict_resolution" = "seqno" - "cluster_id" = "17619f3c-08f5-40a3-8c0c-d2e5b263a025" + "cluster_id" = "4ce95e43-106c-427f-b260-49fdb2c8b034" "durability_level" = "none" "eviction_policy" = "fullEviction" "flush" = false @@ -452,8 +477,8 @@ new_sample_bucket = { "replicas" = 1 "stats" = { "disk_used_in_mib" = 0 - "item_count" = 163 - "memory_used_in_mib" = 33 + "item_count" = 0 + "memory_used_in_mib" = 0 "ops_per_second" = 0 } "storage_backend" = "couchstore" @@ -462,11 +487,11 @@ new_sample_bucket = { } samplebucket_id = "dHJhdmVsLXNhbXBsZQ==" samplebuckets_list = { - "cluster_id" = "17619f3c-08f5-40a3-8c0c-d2e5b263a025" + "cluster_id" = "4ce95e43-106c-427f-b260-49fdb2c8b034" "data" = tolist([ { "bucket_conflict_resolution" = "seqno" - "cluster_id" = "17619f3c-08f5-40a3-8c0c-d2e5b263a025" + "cluster_id" = "4ce95e43-106c-427f-b260-49fdb2c8b034" "durability_level" = "none" "eviction_policy" = "fullEviction" "flush" = false @@ -477,9 +502,9 @@ samplebuckets_list = { "project_id" = "c1fade1a-9f27-4a3c-af73-d1b2301890e3" "replicas" = 1 "stats" = { - "disk_used_in_mib" = 0 - "item_count" = 390 - "memory_used_in_mib" = 42 + "disk_used_in_mib" = 19 + "item_count" = 586 + "memory_used_in_mib" = 62 "ops_per_second" = 0 } "storage_backend" = "couchstore" @@ -510,8 +535,8 @@ Sample Output: │ The behavior may therefore not match any released version of the provider and applying changes may cause the state to become incompatible with published releases. ╵ data.couchbase-capella_sample_buckets.existing_sample_buckets: Reading... -couchbase-capella_sample_bucket.new_sample_bucket: Refreshing state... [id=dHJhdmVsLXNhbXBsZQ==] -data.couchbase-capella_sample_buckets.existing_sample_buckets: Read complete after 1s +couchbase-capella_sample_bucket.new_sample_bucket: Refreshing state... [id=Z2FtZXNpbS1zYW1wbGU=] +data.couchbase-capella_sample_buckets.existing_sample_buckets: Read complete after 0s Terraform used the selected providers to generate the following execution plan. Resource actions are indicated with the following symbols: - destroy @@ -521,20 +546,20 @@ Terraform will perform the following actions: # couchbase-capella_sample_bucket.new_sample_bucket will be destroyed - resource "couchbase-capella_sample_bucket" "new_sample_bucket" { - bucket_conflict_resolution = "seqno" -> null - - cluster_id = "17619f3c-08f5-40a3-8c0c-d2e5b263a025" -> null + - cluster_id = "4ce95e43-106c-427f-b260-49fdb2c8b034" -> null - durability_level = "none" -> null - eviction_policy = "fullEviction" -> null - flush = false -> null - - id = "dHJhdmVsLXNhbXBsZQ==" -> null + - id = "Z2FtZXNpbS1zYW1wbGU=" -> null - memory_allocation_in_mb = 200 -> null - - name = "travel-sample" -> null + - name = "gamesim-sample" -> null - organization_id = "6af08c0a-8cab-4c1c-b257-b521575c16d0" -> null - project_id = "c1fade1a-9f27-4a3c-af73-d1b2301890e3" -> null - replicas = 1 -> null - stats = { - - disk_used_in_mib = 120 -> null - - item_count = 63288 -> null - - memory_used_in_mib = 165 -> null + - disk_used_in_mib = 0 -> null + - item_count = 586 -> null + - memory_used_in_mib = 62 -> null - ops_per_second = 0 -> null } -> null - storage_backend = "couchstore" -> null @@ -545,48 +570,48 @@ Terraform will perform the following actions: Plan: 0 to add, 0 to change, 1 to destroy. Changes to Outputs: - - new_sample_bucket = { + - new_sample_bucket = { - bucket_conflict_resolution = "seqno" - - cluster_id = "17619f3c-08f5-40a3-8c0c-d2e5b263a025" + - cluster_id = "4ce95e43-106c-427f-b260-49fdb2c8b034" - durability_level = "none" - eviction_policy = "fullEviction" - flush = false - - id = "dHJhdmVsLXNhbXBsZQ==" + - id = "Z2FtZXNpbS1zYW1wbGU=" - memory_allocation_in_mb = 200 - - name = "travel-sample" + - name = "gamesim-sample" - organization_id = "6af08c0a-8cab-4c1c-b257-b521575c16d0" - project_id = "c1fade1a-9f27-4a3c-af73-d1b2301890e3" - replicas = 1 - stats = { - disk_used_in_mib = 0 - - item_count = 163 - - memory_used_in_mib = 33 + - item_count = 586 + - memory_used_in_mib = 62 - ops_per_second = 0 } - storage_backend = "couchstore" - time_to_live_in_seconds = 0 - type = "couchbase" } -> null - - samplebucket_id = "dHJhdmVsLXNhbXBsZQ==" -> null + - samplebucket_id = "Z2FtZXNpbS1zYW1wbGU=" -> null - samplebuckets_list = { - - cluster_id = "17619f3c-08f5-40a3-8c0c-d2e5b263a025" + - cluster_id = "4ce95e43-106c-427f-b260-49fdb2c8b034" - data = [ - { - bucket_conflict_resolution = "seqno" - - cluster_id = "17619f3c-08f5-40a3-8c0c-d2e5b263a025" + - cluster_id = "4ce95e43-106c-427f-b260-49fdb2c8b034" - durability_level = "none" - eviction_policy = "fullEviction" - flush = false - - id = "dHJhdmVsLXNhbXBsZQ==" + - id = "Z2FtZXNpbS1zYW1wbGU=" - memory_allocation_in_mb = 200 - - name = "travel-sample" + - name = "gamesim-sample" - organization_id = "6af08c0a-8cab-4c1c-b257-b521575c16d0" - project_id = "c1fade1a-9f27-4a3c-af73-d1b2301890e3" - replicas = 1 - stats = { - - disk_used_in_mib = 120 - - item_count = 63288 - - memory_used_in_mib = 165 + - disk_used_in_mib = 0 + - item_count = 586 + - memory_used_in_mib = 62 - ops_per_second = 0 } - storage_backend = "couchstore" @@ -604,7 +629,7 @@ Do you really want to destroy all resources? Enter a value: yes -couchbase-capella_sample_bucket.new_sample_bucket: Destroying... [id=dHJhdmVsLXNhbXBsZQ==] +couchbase-capella_sample_bucket.new_sample_bucket: Destroying... [id=Z2FtZXNpbS1zYW1wbGU=] couchbase-capella_sample_bucket.new_sample_bucket: Destruction complete after 2s Destroy complete! Resources: 1 destroyed. From 389d01912150d65bfa8b9b554831b6768f00c505 Mon Sep 17 00:00:00 2001 From: Laura Silaja Date: Tue, 12 Mar 2024 08:47:20 +0000 Subject: [PATCH 13/15] Added nil pointer handler and renamed docstrings/errors --- examples/sample_bucket/README.md | 76 +++++++++---------- .../sample_bucket/terraform.template.tfvars | 2 +- internal/api/sample_bucket/sample_bucket.go | 6 +- internal/api/sample_bucket/stats.go | 2 +- internal/datasources/sample_buckets.go | 19 +++-- internal/resources/sample_bucket.go | 71 ++++++++--------- internal/schema/sample_bucket.go | 64 ++++++---------- 7 files changed, 112 insertions(+), 128 deletions(-) diff --git a/examples/sample_bucket/README.md b/examples/sample_bucket/README.md index 6e96217c..88bbc911 100644 --- a/examples/sample_bucket/README.md +++ b/examples/sample_bucket/README.md @@ -53,8 +53,8 @@ Terraform will perform the following actions: + id = (known after apply) + memory_allocation_in_mb = (known after apply) + name = "gamesim-sample" - + organization_id = "6af08c0a-8cab-4c1c-b257-b521575c16d0" - + project_id = "c1fade1a-9f27-4a3c-af73-d1b2301890e3" + + organization_id = "6af08c0a-8cab-4c2d-c957-b521585c16d0" + + project_id = "c1fade1a-9f27-4a3c-ki90-g1b2301890e4" + replicas = (known after apply) + stats = (known after apply) + storage_backend = (known after apply) @@ -74,8 +74,8 @@ Changes to Outputs: + id = (known after apply) + memory_allocation_in_mb = (known after apply) + name = "gamesim-sample" - + organization_id = "6af08c0a-8cab-4c1c-b257-b521575c16d0" - + project_id = "c1fade1a-9f27-4a3c-af73-d1b2301890e3" + + organization_id = "6af08c0a-8cab-4c2d-c957-b521585c16d0" + + project_id = "c1fade1a-9f27-4a3c-ki90-g1b2301890e4" + replicas = (known after apply) + stats = (known after apply) + storage_backend = (known after apply) @@ -86,8 +86,8 @@ Changes to Outputs: + samplebuckets_list = { + cluster_id = "4ce95e43-106c-427f-b260-49fdb2c8b034" + data = null - + organization_id = "6af08c0a-8cab-4c1c-b257-b521575c16d0" - + project_id = "c1fade1a-9f27-4a3c-af73-d1b2301890e3" + + organization_id = "6af08c0a-8cab-4c2d-c957-b521585c16d0" + + project_id = "c1fade1a-9f27-4a3c-ki90-g1b2301890e4" } ─────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────── @@ -128,8 +128,8 @@ Terraform will perform the following actions: + id = (known after apply) + memory_allocation_in_mb = (known after apply) + name = "gamesim-sample" - + organization_id = "6af08c0a-8cab-4c1c-b257-b521575c16d0" - + project_id = "c1fade1a-9f27-4a3c-af73-d1b2301890e3" + + organization_id = "6af08c0a-8cab-4c2d-c957-b521585c16d0" + + project_id = "c1fade1a-9f27-4a3c-ki90-g1b2301890e4" + replicas = (known after apply) + stats = (known after apply) + storage_backend = (known after apply) @@ -149,8 +149,8 @@ Changes to Outputs: + id = (known after apply) + memory_allocation_in_mb = (known after apply) + name = "gamesim-sample" - + organization_id = "6af08c0a-8cab-4c1c-b257-b521575c16d0" - + project_id = "c1fade1a-9f27-4a3c-af73-d1b2301890e3" + + organization_id = "6af08c0a-8cab-4c2d-c957-b521585c16d0" + + project_id = "c1fade1a-9f27-4a3c-ki90-g1b2301890e4" + replicas = (known after apply) + stats = (known after apply) + storage_backend = (known after apply) @@ -161,8 +161,8 @@ Changes to Outputs: + samplebuckets_list = { + cluster_id = "4ce95e43-106c-427f-b260-49fdb2c8b034" + data = null - + organization_id = "6af08c0a-8cab-4c1c-b257-b521575c16d0" - + project_id = "c1fade1a-9f27-4a3c-af73-d1b2301890e3" + + organization_id = "6af08c0a-8cab-4c2d-c957-b521585c16d0" + + project_id = "c1fade1a-9f27-4a3c-ki90-g1b2301890e4" } Do you want to perform these actions? @@ -187,8 +187,8 @@ new_sample_bucket = { "id" = "Z2FtZXNpbS1zYW1wbGU=" "memory_allocation_in_mb" = 200 "name" = "gamesim-sample" - "organization_id" = "6af08c0a-8cab-4c1c-b257-b521575c16d0" - "project_id" = "c1fade1a-9f27-4a3c-af73-d1b2301890e3" + "organization_id" = "6af08c0a-8cab-4c2d-c957-b521585c16d0" + "project_id" = "c1fade1a-9f27-4a3c-ki90-g1b2301890e4" "replicas" = 1 "stats" = { "disk_used_in_mib" = 0 @@ -204,8 +204,8 @@ samplebucket_id = "Z2FtZXNpbS1zYW1wbGU=" samplebuckets_list = { "cluster_id" = "4ce95e43-106c-427f-b260-49fdb2c8b034" "data" = tolist(null) /* of object */ - "organization_id" = "6af08c0a-8cab-4c1c-b257-b521575c16d0" - "project_id" = "c1fade1a-9f27-4a3c-af73-d1b2301890e3" + "organization_id" = "6af08c0a-8cab-4c2d-c957-b521585c16d0" + "project_id" = "c1fade1a-9f27-4a3c-ki90-g1b2301890e4" } ``` @@ -227,8 +227,8 @@ terraform output new_sample_bucket "id" = "Z2FtZXNpbS1zYW1wbGU=" "memory_allocation_in_mb" = 250 "name" = "gamesim-sample" - "organization_id" = "6af08c0a-8cab-4c1c-b257-b521575c16d0" - "project_id" = "c1fade1a-9f27-4a3c-af73-d1b2301890e3" + "organization_id" = "6af08c0a-8cab-4c2d-c957-b521585c16d0" + "project_id" = "c1fade1a-9f27-4a3c-ki90-g1b2301890e4" "replicas" = 1 "stats" = { "disk_used_in_mib" = 19 @@ -274,15 +274,15 @@ Successfully removed 1 resource instance(s). Command: `terraform import couchbase-capella_sample_bucket.new_sample_bucket id=,cluster_id=,project_id=,organization_id=` In this case, the complete command is: -`terraform import couchbase-capella_sample_bucket.new_sample_bucket id=YmVlci1zYW1wbGU=,organization_id=6af08c0a-8cab-4c1c-b257-b521575c16d0,project_id=c1fade1a-9f27-4a3c-af73-d1b2301890e3,cluster_id=4ce95e43-106c-427f-b260-49fdb2c8b034` +`terraform import couchbase-capella_sample_bucket.new_sample_bucket id=YmVlci1zYW1wbGU=,organization_id=6af08c0a-8cab-4c2d-c957-b521585c16d0,project_id=c1fade1a-9f27-4a3c-ki90-g1b2301890e4,cluster_id=4ce95e43-106c-427f-b260-49fdb2c8b034` ``` -terraform import couchbase-capella_sample_bucket.new_sample_bucket id=YmVlci1zYW1wbGU=,organization_id=6af08c0a-8cab-4c1c-b257-b521575c16d0,project_id=c1fade1a-9f27-4a3c-af73-d1b2301890e3,cluster_id=4ce95e43-106c-427f-b260-49fdb2c8b034 -couchbase-capella_sample_bucket.new_sample_bucket: Importing from ID "id=Z2FtZXNpbS1zYW1wbGU=,organization_id=6af08c0a-8cab-4c1c-b257-b521575c16d0,project_id=c1fade1a-9f27-4a3c-af73-d1b2301890e3,cluster_id=17619f3c-08f5-40a3-8c0c-d2e5b263a025"... +terraform import couchbase-capella_sample_bucket.new_sample_bucket id=YmVlci1zYW1wbGU=,organization_id=6af08c0a-8cab-4c2d-c957-b521585c16d0,project_id=c1fade1a-9f27-4a3c-ki90-g1b2301890e4,cluster_id=4ce95e43-106c-427f-b260-49fdb2c8b034 +couchbase-capella_sample_bucket.new_sample_bucket: Importing from ID "id=Z2FtZXNpbS1zYW1wbGU=,organization_id=6af08c0a-8cab-4c2d-c957-b521585c16d0,project_id=c1fade1a-9f27-4a3c-ki90-g1b2301890e4,cluster_id=17619f3c-08f5-40a3-8c0c-d2e5b263a025"... couchbase-capella_sample_bucket.new_sample_bucket: Import prepared! Prepared couchbase-capella_sample_bucket for import data.couchbase-capella_sample_buckets.existing_sample_buckets: Reading... -couchbase-capella_sample_bucket.new_sample_bucket: Refreshing state... [id=id=Z2FtZXNpbS1zYW1wbGU=,organization_id=6af08c0a-8cab-4c1c-b257-b521575c16d0,project_id=c1fade1a-9f27-4a3c-af73-d1b2301890e3,cluster_id=17619f3c-08f5-40a3-8c0c-d2e5b263a025] +couchbase-capella_sample_bucket.new_sample_bucket: Refreshing state... [id=id=Z2FtZXNpbS1zYW1wbGU=,organization_id=6af08c0a-8cab-4c2d-c957-b521585c16d0,project_id=c1fade1a-9f27-4a3c-ki90-g1b2301890e4,cluster_id=17619f3c-08f5-40a3-8c0c-d2e5b263a025] data.couchbase-capella_sample_buckets.existing_sample_buckets: Read complete after 2s Import successful! @@ -431,8 +431,8 @@ Changes to Outputs: + id = "Z2FtZXNpbS1zYW1wbGU=" + memory_allocation_in_mb = 200 + name = "gamesim-sample" - + organization_id = "6af08c0a-8cab-4c1c-b257-b521575c16d0" - + project_id = "c1fade1a-9f27-4a3c-af73-d1b2301890e3" + + organization_id = "6af08c0a-8cab-4c2d-c957-b521585c16d0" + + project_id = "c1fade1a-9f27-4a3c-ki90-g1b2301890e4" + replicas = 1 + stats = { + disk_used_in_mib = 19 @@ -472,8 +472,8 @@ new_sample_bucket = { "id" = "dHJhdmVsLXNhbXBsZQ==" "memory_allocation_in_mb" = 200 "name" = "travel-sample" - "organization_id" = "6af08c0a-8cab-4c1c-b257-b521575c16d0" - "project_id" = "c1fade1a-9f27-4a3c-af73-d1b2301890e3" + "organization_id" = "6af08c0a-8cab-4c2d-c957-b521585c16d0" + "project_id" = "c1fade1a-9f27-4a3c-ki90-g1b2301890e4" "replicas" = 1 "stats" = { "disk_used_in_mib" = 0 @@ -498,8 +498,8 @@ samplebuckets_list = { "id" = "Z2FtZXNpbS1zYW1wbGU=" "memory_allocation_in_mb" = 200 "name" = "gamesim-sample" - "organization_id" = "6af08c0a-8cab-4c1c-b257-b521575c16d0" - "project_id" = "c1fade1a-9f27-4a3c-af73-d1b2301890e3" + "organization_id" = "6af08c0a-8cab-4c2d-c957-b521585c16d0" + "project_id" = "c1fade1a-9f27-4a3c-ki90-g1b2301890e4" "replicas" = 1 "stats" = { "disk_used_in_mib" = 19 @@ -512,8 +512,8 @@ samplebuckets_list = { "type" = "couchbase" }, ]) - "organization_id" = "6af08c0a-8cab-4c1c-b257-b521575c16d0" - "project_id" = "c1fade1a-9f27-4a3c-af73-d1b2301890e3" + "organization_id" = "6af08c0a-8cab-4c2d-c957-b521585c16d0" + "project_id" = "c1fade1a-9f27-4a3c-ki90-g1b2301890e4" } ``` @@ -553,8 +553,8 @@ Terraform will perform the following actions: - id = "Z2FtZXNpbS1zYW1wbGU=" -> null - memory_allocation_in_mb = 200 -> null - name = "gamesim-sample" -> null - - organization_id = "6af08c0a-8cab-4c1c-b257-b521575c16d0" -> null - - project_id = "c1fade1a-9f27-4a3c-af73-d1b2301890e3" -> null + - organization_id = "6af08c0a-8cab-4c2d-c957-b521585c16d0" -> null + - project_id = "c1fade1a-9f27-4a3c-ki90-g1b2301890e4" -> null - replicas = 1 -> null - stats = { - disk_used_in_mib = 0 -> null @@ -579,8 +579,8 @@ Changes to Outputs: - id = "Z2FtZXNpbS1zYW1wbGU=" - memory_allocation_in_mb = 200 - name = "gamesim-sample" - - organization_id = "6af08c0a-8cab-4c1c-b257-b521575c16d0" - - project_id = "c1fade1a-9f27-4a3c-af73-d1b2301890e3" + - organization_id = "6af08c0a-8cab-4c2d-c957-b521585c16d0" + - project_id = "c1fade1a-9f27-4a3c-ki90-g1b2301890e4" - replicas = 1 - stats = { - disk_used_in_mib = 0 @@ -605,8 +605,8 @@ Changes to Outputs: - id = "Z2FtZXNpbS1zYW1wbGU=" - memory_allocation_in_mb = 200 - name = "gamesim-sample" - - organization_id = "6af08c0a-8cab-4c1c-b257-b521575c16d0" - - project_id = "c1fade1a-9f27-4a3c-af73-d1b2301890e3" + - organization_id = "6af08c0a-8cab-4c2d-c957-b521585c16d0" + - project_id = "c1fade1a-9f27-4a3c-ki90-g1b2301890e4" - replicas = 1 - stats = { - disk_used_in_mib = 0 @@ -619,8 +619,8 @@ Changes to Outputs: - type = "couchbase" }, ] - - organization_id = "6af08c0a-8cab-4c1c-b257-b521575c16d0" - - project_id = "c1fade1a-9f27-4a3c-af73-d1b2301890e3" + - organization_id = "6af08c0a-8cab-4c2d-c957-b521585c16d0" + - project_id = "c1fade1a-9f27-4a3c-ki90-g1b2301890e4" } -> null Do you really want to destroy all resources? diff --git a/examples/sample_bucket/terraform.template.tfvars b/examples/sample_bucket/terraform.template.tfvars index 8b225a0d..5bef9f27 100644 --- a/examples/sample_bucket/terraform.template.tfvars +++ b/examples/sample_bucket/terraform.template.tfvars @@ -5,5 +5,5 @@ project_id = "" cluster_id = "" samplebucket = { - name = "new_terraform_sample_bucket" + name = "gamesim-sample" } diff --git a/internal/api/sample_bucket/sample_bucket.go b/internal/api/sample_bucket/sample_bucket.go index 0fac36fe..5b540ee9 100644 --- a/internal/api/sample_bucket/sample_bucket.go +++ b/internal/api/sample_bucket/sample_bucket.go @@ -1,7 +1,7 @@ package sample_bucket -// CreateSampleBucketRequest is the payload passed to V4 Capella Public API to create a bucket in a Capella cluster. -// Creates a new sample bucket configuration under a cluster. +// CreateSampleBucketRequest is the payload passed to V4 Capella Public API to load a sample bucket in a Capella cluster. +// Loads a new sample bucket configuration under a cluster. // // To learn more about bucket configuration, see https://docs.couchbase.com/server/current/manage/manage-settings/install-sample-buckets.html. // @@ -21,7 +21,7 @@ type CreateSampleBucketRequest struct { Name string `json:"name"` } -// CreateBucketSampleResponse is the response received from Capella V4 Public API on requesting to create a new bucket. +// CreateBucketSampleResponse is the response received from Capella V4 Public API on requesting to load a new sample bucket. // Common response codes: 201, 403, 422, 429, 500. type CreateSampleBucketResponse struct { // Id is unique ID of the bucket created. diff --git a/internal/api/sample_bucket/stats.go b/internal/api/sample_bucket/stats.go index f6006f3c..9633347e 100644 --- a/internal/api/sample_bucket/stats.go +++ b/internal/api/sample_bucket/stats.go @@ -1,6 +1,6 @@ package sample_bucket -// Stats are the bucket related statistics that are sent by the Capella V4 Public API for any existing bucket. +// Stats are the sample bucket related statistics that are sent by the Capella V4 Public API for any existing sample bucket. type Stats struct { ItemCount int64 `json:"itemCount"` OpsPerSecond int64 `json:"opsPerSecond"` diff --git a/internal/datasources/sample_buckets.go b/internal/datasources/sample_buckets.go index 92103a07..58057c5d 100644 --- a/internal/datasources/sample_buckets.go +++ b/internal/datasources/sample_buckets.go @@ -20,7 +20,7 @@ var ( _ datasource.DataSourceWithConfigure = &SampleBuckets{} ) -// Sample buckets is the bucket data source implementation. +// Sample buckets is the sample bucket data source implementation. type SampleBuckets struct { *providerschema.Data } @@ -30,12 +30,12 @@ func NewSampleBuckets() datasource.DataSource { return &SampleBuckets{} } -// Metadata returns the bucket data source type name. +// Metadata returns the sample bucket data source type name. func (d *SampleBuckets) Metadata(_ context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) { resp.TypeName = req.ProviderTypeName + "_sample_buckets" } -// Schema defines the schema for the bucket data source. +// Schema defines the schema for the sample bucket data source. func (s *SampleBuckets) Schema(_ context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) { resp.Schema = schema.Schema{ Attributes: map[string]schema.Attribute{ @@ -118,7 +118,7 @@ func (s *SampleBuckets) Schema(_ context.Context, _ datasource.SchemaRequest, re } } -// Read refreshes the Terraform state with the latest data of buckets. +// Read refreshes the Terraform state with the latest data of sample buckets. func (d *SampleBuckets) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { var state providerschema.SampleBuckets diags := req.Config.Get(ctx, &state) @@ -130,7 +130,7 @@ func (d *SampleBuckets) Read(ctx context.Context, req datasource.ReadRequest, re clusterId, projectId, organizationId, err := state.Validate() if err != nil { resp.Diagnostics.AddError( - "Error Reading SampleBuckets in Capella", + "Error Reading Sample Buckets in Capella", "Could not read Capella sample buckets in cluster "+clusterId+": "+err.Error(), ) return @@ -141,7 +141,7 @@ func (d *SampleBuckets) Read(ctx context.Context, req datasource.ReadRequest, re response, err := api.GetPaginated[[]samplebucketapi.GetSampleBucketResponse](ctx, d.Client, d.Token, cfg, api.SortById) if err != nil { resp.Diagnostics.AddError( - "Error Reading Capella SampleBuckets", + "Error Reading Capella Sample Buckets", "Could not read sample buckets in cluster "+clusterId+": "+api.ParseError(err), ) return @@ -149,7 +149,12 @@ func (d *SampleBuckets) Read(ctx context.Context, req datasource.ReadRequest, re // Map response body to model for _, sampleBucket := range response { - sampleStats := providerschema.NewStats(*sampleBucket.Stats) + var sampleStats providerschema.Stats + + if sampleBucket.Stats != nil { + sampleStats = providerschema.NewStats(*sampleBucket.Stats) + } + sampleBucketStatsObj, diags := types.ObjectValueFrom(ctx, sampleStats.AttributeTypes(), sampleStats) if diags.HasError() { resp.Diagnostics.AddError( diff --git a/internal/resources/sample_bucket.go b/internal/resources/sample_bucket.go index 15f6a4cc..81cb4947 100644 --- a/internal/resources/sample_bucket.go +++ b/internal/resources/sample_bucket.go @@ -24,13 +24,13 @@ var ( _ resource.ResourceWithImportState = &SampleBucket{} ) -const errorMessageAfterSampleBucketCreation = "Sample bucket creation is successful, but encountered an error while checking the current" + +const errorMessageAfterSampleBucketCreation = "Sample bucket loading is successful, but encountered an error while checking the current" + " state of the sample bucket. Please run `terraform plan` after 1-2 minutes to know the" + " current sample bucket state. Additionally, run `terraform apply --refresh-only` to update" + " the state from remote, unexpected error: " -const errorMessageWhileSampleBucketCreation = "There is an error during sample bucket creation. Please check in Capella to see if any hanging resources" + - " have been created, unexpected error: " +const errorMessageWhileSampleBucketCreation = "There is an error during sample bucket loading. Please check in Capella to see if any hanging resources" + + " have been loaded, unexpected error: " // SampleBucket is the sample bucket resource implementation. type SampleBucket struct { @@ -78,7 +78,7 @@ func (s *SampleBucket) Schema(_ context.Context, _ resource.SchemaRequest, resp resp.Schema = SampleBucketSchema() } -// Create creates a new sample bucket. +// Create loads a new sample bucket. func (s *SampleBucket) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) { var plan providerschema.SampleBucket diags := req.Plan.Get(ctx, &plan) @@ -91,10 +91,10 @@ func (s *SampleBucket) Create(ctx context.Context, req resource.CreateRequest, r sampleBucketRequest := samplebucketapi.CreateSampleBucketRequest{ Name: plan.Name.ValueString(), } - if err := s.validateCreateBucket(plan); err != nil { + if err := s.validateCreateSampleBucket(plan); err != nil { resp.Diagnostics.AddError( - "Error creating bucket", - "Could not create bucket, unexpected error: "+err.Error(), + "Error loading sample bucket", + "Could not load sample bucket, unexpected error: "+err.Error(), ) return } @@ -114,7 +114,7 @@ func (s *SampleBucket) Create(ctx context.Context, req resource.CreateRequest, r ) if err != nil { resp.Diagnostics.AddError( - "Error creating sample bucket", + "Error loading sample bucket", errorMessageWhileSampleBucketCreation+api.ParseError(err), ) return @@ -124,13 +124,14 @@ func (s *SampleBucket) Create(ctx context.Context, req resource.CreateRequest, r err = json.Unmarshal(response.Body, &sampleBucketResponse) if err != nil { resp.Diagnostics.AddError( - "Error creating sample bucket", + "Error loading sample bucket", errorMessageWhileSampleBucketCreation+"error during unmarshalling: "+err.Error(), ) return } - diags = resp.State.Set(ctx, initializeSampleBucketWithPlanAndId(plan, sampleBucketResponse.Id)) + plan.Id = types.StringValue(sampleBucketResponse.Id) + diags = resp.State.Set(ctx, plan) resp.Diagnostics.Append(diags...) if resp.Diagnostics.HasError() { return @@ -139,7 +140,7 @@ func (s *SampleBucket) Create(ctx context.Context, req resource.CreateRequest, r refreshedState, err := s.retrieveSampleBucket(ctx, organizationId, projectId, clusterId, sampleBucketResponse.Id) if err != nil { resp.Diagnostics.AddWarning( - "Error creating sample bucket "+sampleBucketResponse.Id, + "Error loading sample bucket "+sampleBucketResponse.Id, errorMessageAfterSampleBucketCreation+api.ParseError(err), ) return @@ -165,8 +166,8 @@ func (s *SampleBucket) Read(ctx context.Context, req resource.ReadRequest, resp IDs, err := state.Validate() if err != nil { resp.Diagnostics.AddError( - "Error Reading SampleBucket in Capella", - "Could not read Capella sample Bucket with ID "+state.Id.String()+": "+err.Error(), + "Error Reading Sample Bucket in Capella", + "Could not read Capella sample bucket with ID "+state.Id.String()+": "+err.Error(), ) return } @@ -187,7 +188,7 @@ func (s *SampleBucket) Read(ctx context.Context, req resource.ReadRequest, resp return } resp.Diagnostics.AddError( - "Error reading Samplebucket", + "Error reading Sample Bucket", "Could not read sample bucket with id "+state.Id.String()+": "+errString, ) return @@ -221,8 +222,8 @@ func (s *SampleBucket) Delete(ctx context.Context, req resource.DeleteRequest, r if state.OrganizationId.IsNull() { resp.Diagnostics.AddError( - "Error creating sample bucket", - "Could not create sample bucket, unexpected error: "+errors.ErrOrganizationIdCannotBeEmpty.Error(), + "Error deleting sample bucket", + "Could not delete sample bucket, unexpected error: "+errors.ErrOrganizationIdCannotBeEmpty.Error(), ) return } @@ -230,8 +231,8 @@ func (s *SampleBucket) Delete(ctx context.Context, req resource.DeleteRequest, r if state.ProjectId.IsNull() { resp.Diagnostics.AddError( - "Error creating sample bucket", - "Could not create sample bucket, unexpected error: "+errors.ErrProjectIdCannotBeEmpty.Error(), + "Error deleting sample bucket", + "Could not delete sample bucket, unexpected error: "+errors.ErrProjectIdCannotBeEmpty.Error(), ) return } @@ -239,8 +240,8 @@ func (s *SampleBucket) Delete(ctx context.Context, req resource.DeleteRequest, r if state.ClusterId.IsNull() { resp.Diagnostics.AddError( - "Error creating sample bucket", - "Could not create sample bucket, unexpected error: "+errors.ErrClusterIdCannotBeEmpty.Error(), + "Error deleting sample bucket", + "Could not delete sample bucket, unexpected error: "+errors.ErrClusterIdCannotBeEmpty.Error(), ) return } @@ -248,8 +249,8 @@ func (s *SampleBucket) Delete(ctx context.Context, req resource.DeleteRequest, r if state.Id.IsNull() { resp.Diagnostics.AddError( - "Error creating sample bucket", - "Could not create sample bucket, unexpected error: "+errors.ErrClusterIdCannotBeEmpty.Error(), + "Error deleting sample bucket", + "Could not delete sample bucket, unexpected error: "+errors.ErrClusterIdCannotBeEmpty.Error(), ) return } @@ -272,14 +273,14 @@ func (s *SampleBucket) Delete(ctx context.Context, req resource.DeleteRequest, r return } resp.Diagnostics.AddError( - "Error Deleting the SampleBucket", - "Could not delete sample Bucket associated with cluster "+clusterId+": "+errString, + "Error Deleting the Sample Bucket", + "Could not delete sample bucket associated with cluster "+clusterId+": "+errString, ) return } } -func (r *SampleBucket) validateCreateBucket(plan providerschema.SampleBucket) error { +func (r *SampleBucket) validateCreateSampleBucket(plan providerschema.SampleBucket) error { if plan.OrganizationId.IsNull() { return errors.ErrOrganizationIdMissing } @@ -324,7 +325,12 @@ func (s *SampleBucket) retrieveSampleBucket(ctx context.Context, organizationId, if err != nil { return nil, fmt.Errorf("%s: %w", errors.ErrUnmarshallingResponse, err) } - sampleStats := providerschema.NewStats(*sampleBucketResp.Stats) + + var sampleStats providerschema.Stats + if sampleBucketResp.Stats != nil { + sampleStats = providerschema.NewStats(*sampleBucketResp.Stats) + } + sampleBucketStatsObj, diags := types.ObjectValueFrom(ctx, sampleStats.AttributeTypes(), sampleStats) if diags.HasError() { return nil, errors.ErrUnableToConvertAuditData @@ -361,16 +367,3 @@ func isValidSampleName(category string) bool { } return false } - -// initializeBucketWithPlanAndId initializes an instance of providerschema.Bucket -// with the specified plan and ID. It marks all computed fields as null. -func initializeSampleBucketWithPlanAndId(plan providerschema.SampleBucket, id string) providerschema.SampleBucket { - plan.Id = types.StringValue(id) - if plan.StorageBackend.IsNull() || plan.StorageBackend.IsUnknown() { - plan.StorageBackend = types.StringNull() - } - if plan.EvictionPolicy.IsNull() || plan.EvictionPolicy.IsUnknown() { - plan.EvictionPolicy = types.StringNull() - } - return plan -} diff --git a/internal/schema/sample_bucket.go b/internal/schema/sample_bucket.go index 5aa72492..21d4f392 100644 --- a/internal/schema/sample_bucket.go +++ b/internal/schema/sample_bucket.go @@ -11,44 +11,38 @@ import ( ) type SampleBucket struct { - // DurabilityLevel is the minimum level at which all writes to the bucket must occur. + // DurabilityLevel is the minimum level at which all writes to the sample bucket must occur. // Default: "none" // Enum: "none" "majority" "majorityAndPersistActive" "persistToMajority" // - // The options for Durability level are as follows, according to the bucket type. + // The options for Durability level are as follows, according to the sample bucket type. // - // For a Couchbase bucket: + // For a Couchbase sample bucket: // None // Replicate to Majority // Majority and Persist to Active // Persist to Majority - // - //For an Ephemeral bucket: - // None - // Replicate to Majority DurabilityLevel types.String `tfsdk:"durability_level"` - // Stats has the bucket stats that are related to memory and disk consumption. - // itemCount: Number of documents in the bucket. + // Stats has the sample bucket stats that are related to memory and disk consumption. + // itemCount: Number of documents in the sample bucket. // opsPerSecond: Number of operations per second. // diskUsedInMib: The amount of disk used (in MiB). // memoryUsedInMib: The amount of memory used (in MiB). Stats types.Object `tfsdk:"stats"` - // Type defines the type of the bucket. + // Type defines the type of the sample bucket. // Default: "couchbase" - // Enum: "couchbase" "ephemeral" - // If selected Ephemeral, it is not eligible for imports or App Endpoints creation. This field cannot be changed later. + // + // This field for sample buckets is always the default and cannot be changed. // The options may also be referred to as Memory and Disk (Couchbase), Memory Only (Ephemeral) in the Couchbase documentation. // To learn more, see https://docs.couchbase.com/cloud/clusters/data-service/manage-buckets.html#add-bucket Type types.String `tfsdk:"type"` - // StorageBackend defines the storage engine that is used by the bucket. + // StorageBackend defines the storage engine that is used by the sample bucket. // Default: "couchstore" - // Enum: "couchstore" "magma" // - // Ephemeral buckets do not support StorageBackend, hence not applicable for Ephemeral buckets and throws an error if this field is added. - // This field is only applicable for a Couchbase bucket. The default value mentioned (Couchstore) is for Couchbase bucket. + // This field for sample buckets is always the default and cannot be changed. // This field cannot be changed later. // To learn more, see https://docs.couchbase.com/cloud/clusters/data-service/storage-engines.html StorageBackend types.String `tfsdk:"storage_backend"` @@ -58,21 +52,23 @@ type SampleBucket struct { // BucketConflictResolution is the means by which conflicts are resolved during replication. // Default: "seqno" - // Enum: "seqno" "lww" + // + // This field for sample buckets is always the default and cannot be changed. // This field may be referred to as "conflict resolution" in the Couchbase documentation. - // seqno and lww may be referred to as "sequence number" and "timestamp" respectively. + // seqno may be referred to as "sequence number". // This field cannot be changed later. // To learn more, see https://docs.couchbase.com/cloud/clusters/xdcr/xdcr.html#conflict-resolution BucketConflictResolution types.String `tfsdk:"bucket_conflict_resolution"` - // Name is the name of the bucket. + // Name is the name of the sample bucket. + // Enum: "travel-sample", "beer-sample", "gamesim-sample" Name types.String `tfsdk:"name"` // ProjectId is the ID of the project to which the Capella cluster belongs. // The database credential will be created for the cluster. ProjectId types.String `tfsdk:"project_id"` - // Id is the id of the created bucket. + // Id is the id of the created sample bucket. Id types.String `tfsdk:"id"` // OrganizationId is the ID of the organization to which the Capella cluster belongs. @@ -82,39 +78,29 @@ type SampleBucket struct { // EvictionPolicy is the policy which Capella adopts to prevent data loss due to memory exhaustion. // This may be also known as Ejection Policy in the Couchbase documentation. // - // For Couchbase bucket, Eviction Policy is fullEviction by default. - // For Ephemeral buckets, Eviction Policy is a required field, and should be one of the following: - // noEviction - // nruEviction - // Default: "fullEviction" - // Enum: "fullEviction" "noEviction" "nruEviction" + // For Couchbase sample bucket, Eviction Policy is fullEviction by default and cannot be changed // To learn more, see https://docs.couchbase.com/server/current/rest-api/rest-bucket-create.html#evictionpolicy EvictionPolicy types.String `tfsdk:"eviction_policy"` - // MemoryAllocationInMB is the amount of memory to allocate for the bucket memory in MiB. + // MemoryAllocationInMB is the amount of memory to allocate for the sample bucket memory in MiB. // This is the maximum limit is dependent on the allocation of the KV service. For example, 80% of the allocation. - // Default: 100 - // - // The default value (100MiB) mentioned is for Couchbase type buckets with Couchstore as the Storage Backend. + // Default: 200 // - // For Couchbase buckets, the default and minimum memory allocation changes according to the Storage Backend type as follows: - // For Couchstore, the default and minimum memory allocation is 100 MiB. - // For Magma, the default and minimum memory allocation is 1024 MiB. - // For Ephemeral buckets, the default and minimum memory allocation is 100 MiB. + // For Couchbase sample buckets, the default and minimum memory allocation is different. The minimum allocation is 100MiB MemoryAllocationInMB types.Int64 `tfsdk:"memory_allocation_in_mb"` // TimeToLiveInSeconds specifies the time to live (TTL) value in seconds. - // This is the maximum time to live for items in the bucket. + // This is the maximum time to live for items in the sample bucket. // Default is 0, that means TTL is disabled. This is a non-negative value. TimeToLiveInSeconds types.Int64 `tfsdk:"time_to_live_in_seconds"` - // Replicas is the number of replicas for the bucket. + // Replicas is the number of replicas for the sample bucket. // Default: 1 // Enum: 1 2 3 Replicas types.Int64 `tfsdk:"replicas"` - // Flush determines whether flushing is enabled on the bucket. - // Enable Flush to delete all items in this bucket at the earliest opportunity. + // Flush determines whether flushing is enabled on the sample bucket. + // Enable Flush to delete all items in this sample bucket at the earliest opportunity. // Disable Flush to avoid inadvertent data loss. // Default: false Flush types.Bool `tfsdk:"flush"` @@ -137,7 +123,7 @@ type SampleBuckets struct { // Validate will split the IDs by a delimiter i.e. comma , in case a terraform import CLI is invoked. // The format of the terraform import CLI would include the IDs as follows - -// `terraform import capella_bucket.new_bucket id=,cluster_id=,project_id=,organization_id=`. +// `terraform import couchbase-capella_sample_bucket.new_sample_bucket id=,cluster_id=,project_id=,organization_id=`. func (b SampleBucket) Validate() (map[Attr]string, error) { state := map[Attr]basetypes.StringValue{ OrganizationId: b.OrganizationId, From 82cd3997543a39bbd7713481d805c33e70d4786c Mon Sep 17 00:00:00 2001 From: Laura Silaja Date: Tue, 12 Mar 2024 09:01:39 +0000 Subject: [PATCH 14/15] Updated docstring and updated the error capitalisation --- internal/api/sample_bucket/sample_bucket.go | 11 ++++------- internal/datasources/sample_buckets.go | 4 ++-- internal/resources/sample_bucket.go | 20 ++++++++++---------- 3 files changed, 16 insertions(+), 19 deletions(-) diff --git a/internal/api/sample_bucket/sample_bucket.go b/internal/api/sample_bucket/sample_bucket.go index 5b540ee9..b1941174 100644 --- a/internal/api/sample_bucket/sample_bucket.go +++ b/internal/api/sample_bucket/sample_bucket.go @@ -12,19 +12,16 @@ package sample_bucket // Project Manager // To learn more, see https://docs.couchbase.com/cloud/organizations/organization-projects-overview.html type CreateSampleBucketRequest struct { - // Name is the name of the bucket (up to 100 characters). - // This field cannot be changed later. The name should be according to the following rules: - // Characters used for the name should be in the ranges of A-Z, a-z, and 0-9; plus the underscore, period, dash, and percent characters. - // The name can be a maximum of 100 characters in length. - // The name cannot have 0 characters or empty. Minimum length of name is 1. - // The name cannot start with a . (period). + // Name is the name of the sample bucket (up to 100 characters). + // Enum: travel-sample, beer-sample, gamesim-sample + // This field cannot be changed later. Name string `json:"name"` } // CreateBucketSampleResponse is the response received from Capella V4 Public API on requesting to load a new sample bucket. // Common response codes: 201, 403, 422, 429, 500. type CreateSampleBucketResponse struct { - // Id is unique ID of the bucket created. + // Id is unique ID of the sample bucket created. Id string `json:"bucketId"` // Name is the name of the cluster (up to 100 characters). diff --git a/internal/datasources/sample_buckets.go b/internal/datasources/sample_buckets.go index 58057c5d..3f0bbb3d 100644 --- a/internal/datasources/sample_buckets.go +++ b/internal/datasources/sample_buckets.go @@ -131,7 +131,7 @@ func (d *SampleBuckets) Read(ctx context.Context, req datasource.ReadRequest, re if err != nil { resp.Diagnostics.AddError( "Error Reading Sample Buckets in Capella", - "Could not read Capella sample buckets in cluster "+clusterId+": "+err.Error(), + "Could not read sample buckets in cluster "+clusterId+": "+err.Error(), ) return } @@ -141,7 +141,7 @@ func (d *SampleBuckets) Read(ctx context.Context, req datasource.ReadRequest, re response, err := api.GetPaginated[[]samplebucketapi.GetSampleBucketResponse](ctx, d.Client, d.Token, cfg, api.SortById) if err != nil { resp.Diagnostics.AddError( - "Error Reading Capella Sample Buckets", + "Error Reading Sample Buckets in Capella", "Could not read sample buckets in cluster "+clusterId+": "+api.ParseError(err), ) return diff --git a/internal/resources/sample_bucket.go b/internal/resources/sample_bucket.go index 81cb4947..16f09267 100644 --- a/internal/resources/sample_bucket.go +++ b/internal/resources/sample_bucket.go @@ -93,7 +93,7 @@ func (s *SampleBucket) Create(ctx context.Context, req resource.CreateRequest, r } if err := s.validateCreateSampleBucket(plan); err != nil { resp.Diagnostics.AddError( - "Error loading sample bucket", + "Error Loading Sample Bucket", "Could not load sample bucket, unexpected error: "+err.Error(), ) return @@ -114,7 +114,7 @@ func (s *SampleBucket) Create(ctx context.Context, req resource.CreateRequest, r ) if err != nil { resp.Diagnostics.AddError( - "Error loading sample bucket", + "Error Loading Sample Bucket", errorMessageWhileSampleBucketCreation+api.ParseError(err), ) return @@ -124,7 +124,7 @@ func (s *SampleBucket) Create(ctx context.Context, req resource.CreateRequest, r err = json.Unmarshal(response.Body, &sampleBucketResponse) if err != nil { resp.Diagnostics.AddError( - "Error loading sample bucket", + "Error Loading Sample Bucket", errorMessageWhileSampleBucketCreation+"error during unmarshalling: "+err.Error(), ) return @@ -140,7 +140,7 @@ func (s *SampleBucket) Create(ctx context.Context, req resource.CreateRequest, r refreshedState, err := s.retrieveSampleBucket(ctx, organizationId, projectId, clusterId, sampleBucketResponse.Id) if err != nil { resp.Diagnostics.AddWarning( - "Error loading sample bucket "+sampleBucketResponse.Id, + "Error Loading Sample Bucket "+sampleBucketResponse.Id, errorMessageAfterSampleBucketCreation+api.ParseError(err), ) return @@ -188,7 +188,7 @@ func (s *SampleBucket) Read(ctx context.Context, req resource.ReadRequest, resp return } resp.Diagnostics.AddError( - "Error reading Sample Bucket", + "Error Reading Sample Bucket in Capella", "Could not read sample bucket with id "+state.Id.String()+": "+errString, ) return @@ -222,7 +222,7 @@ func (s *SampleBucket) Delete(ctx context.Context, req resource.DeleteRequest, r if state.OrganizationId.IsNull() { resp.Diagnostics.AddError( - "Error deleting sample bucket", + "Error Deleting Sample Bucket", "Could not delete sample bucket, unexpected error: "+errors.ErrOrganizationIdCannotBeEmpty.Error(), ) return @@ -231,7 +231,7 @@ func (s *SampleBucket) Delete(ctx context.Context, req resource.DeleteRequest, r if state.ProjectId.IsNull() { resp.Diagnostics.AddError( - "Error deleting sample bucket", + "Error Deleting Sample Bucket", "Could not delete sample bucket, unexpected error: "+errors.ErrProjectIdCannotBeEmpty.Error(), ) return @@ -240,7 +240,7 @@ func (s *SampleBucket) Delete(ctx context.Context, req resource.DeleteRequest, r if state.ClusterId.IsNull() { resp.Diagnostics.AddError( - "Error deleting sample bucket", + "Error Deleting Sample Bucket", "Could not delete sample bucket, unexpected error: "+errors.ErrClusterIdCannotBeEmpty.Error(), ) return @@ -249,7 +249,7 @@ func (s *SampleBucket) Delete(ctx context.Context, req resource.DeleteRequest, r if state.Id.IsNull() { resp.Diagnostics.AddError( - "Error deleting sample bucket", + "Error Deleting Sample Bucket", "Could not delete sample bucket, unexpected error: "+errors.ErrClusterIdCannotBeEmpty.Error(), ) return @@ -273,7 +273,7 @@ func (s *SampleBucket) Delete(ctx context.Context, req resource.DeleteRequest, r return } resp.Diagnostics.AddError( - "Error Deleting the Sample Bucket", + "Error Deleting Sample Bucket", "Could not delete sample bucket associated with cluster "+clusterId+": "+errString, ) return From ec672c5a1c77890dbc8dd0de5a5627043131e05b Mon Sep 17 00:00:00 2001 From: Laura Silaja Date: Wed, 13 Mar 2024 14:26:21 +0000 Subject: [PATCH 15/15] Added space to inports and initialised nul values for plan --- internal/resources/sample_bucket.go | 30 +++++++++++++++++++++++++-- internal/schema/sample_bucket_test.go | 1 + 2 files changed, 29 insertions(+), 2 deletions(-) diff --git a/internal/resources/sample_bucket.go b/internal/resources/sample_bucket.go index 16f09267..9e93abea 100644 --- a/internal/resources/sample_bucket.go +++ b/internal/resources/sample_bucket.go @@ -130,8 +130,7 @@ func (s *SampleBucket) Create(ctx context.Context, req resource.CreateRequest, r return } - plan.Id = types.StringValue(sampleBucketResponse.Id) - diags = resp.State.Set(ctx, plan) + diags = resp.State.Set(ctx, initializeSampleBucketWithPlanAndId(plan, sampleBucketResponse.Id)) resp.Diagnostics.Append(diags...) if resp.Diagnostics.HasError() { return @@ -367,3 +366,30 @@ func isValidSampleName(category string) bool { } return false } + +// initializeBucketWithPlanAndId initializes an instance of providerschema.Bucket +// with the specified plan and ID. It marks all computed fields as null. +func initializeSampleBucketWithPlanAndId(plan providerschema.SampleBucket, id string) providerschema.SampleBucket { + plan.Id = types.StringValue(id) + + plan.Type = types.StringNull() + + plan.StorageBackend = types.StringNull() + + plan.MemoryAllocationInMB = types.Int64Null() + + plan.BucketConflictResolution = types.StringNull() + + plan.DurabilityLevel = types.StringNull() + + plan.Replicas = types.Int64Null() + + plan.Flush = types.BoolNull() + + plan.TimeToLiveInSeconds = types.Int64Null() + + plan.EvictionPolicy = types.StringNull() + + plan.Stats = types.ObjectNull(providerschema.Stats{}.AttributeTypes()) + return plan +} diff --git a/internal/schema/sample_bucket_test.go b/internal/schema/sample_bucket_test.go index 3c565d7c..d5f96a1c 100644 --- a/internal/schema/sample_bucket_test.go +++ b/internal/schema/sample_bucket_test.go @@ -6,6 +6,7 @@ import ( "github.com/couchbasecloud/terraform-provider-couchbase-capella/internal/errors" "github.com/hashicorp/terraform-plugin-framework/types/basetypes" + "github.com/stretchr/testify/assert" )