Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[AV-70846] Import sample buckets #156

Merged
merged 15 commits into from
Mar 13, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
636 changes: 636 additions & 0 deletions examples/sample_bucket/README.md

Large diffs are not rendered by default.

14 changes: 14 additions & 0 deletions examples/sample_bucket/create_sample_bucket.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
output "new_sample_bucket" {
value = couchbase-capella_sample_bucket.new_sample_bucket
}

output "samplebucket_id" {
value = couchbase-capella_sample_bucket.new_sample_bucket.id
}

resource "couchbase-capella_sample_bucket" "new_sample_bucket" {
name = var.samplebucket.name
organization_id = var.organization_id
project_id = var.project_id
cluster_id = var.cluster_id
}
9 changes: 9 additions & 0 deletions examples/sample_bucket/list_sample_buckets.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
output "samplebuckets_list" {
value = data.couchbase-capella_sample_buckets.existing_sample_buckets
}

data "couchbase-capella_sample_buckets" "existing_sample_buckets" {
organization_id = var.organization_id
project_id = var.project_id
cluster_id = var.cluster_id
}
12 changes: 12 additions & 0 deletions examples/sample_bucket/main.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
terraform {
required_providers {
couchbase-capella = {
source = "couchbasecloud/couchbase-capella"
}
}
}

provider "couchbase-capella" {
authentication_token = var.auth_token
}

9 changes: 9 additions & 0 deletions examples/sample_bucket/terraform.template.tfvars
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
auth_token = "<v4-api-key-secret>"

organization_id = "<organization_id>"
project_id = "<project_id>"
cluster_id = "<cluster_id>"

samplebucket = {
name = "gamesim-sample"
}
24 changes: 24 additions & 0 deletions examples/sample_bucket/variables.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
variable "organization_id" {
description = "Capella Organization ID"
}

variable "auth_token" {
description = "Authentication API Key"
sensitive = true
}

variable "project_id" {
description = "Capella Project ID"
}

variable "cluster_id" {
description = "Capella Cluster ID"
}

variable "samplebucket" {
description = "Bucket configuration details useful for creation"

type = object({
name = string
})
}
83 changes: 83 additions & 0 deletions internal/api/sample_bucket/sample_bucket.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,83 @@
package sample_bucket

// CreateSampleBucketRequest is the payload passed to V4 Capella Public API to load a sample bucket in a Capella cluster.
// Loads a new sample bucket configuration under a cluster.
//
// To learn more about bucket configuration, see https://docs.couchbase.com/server/current/manage/manage-settings/install-sample-buckets.html.
//
// In order to access this endpoint, the provided API key must have at least one of the following roles:
//
// Organization Owner
// Project Owner
// Project Manager
// To learn more, see https://docs.couchbase.com/cloud/organizations/organization-projects-overview.html
type CreateSampleBucketRequest struct {
// Name is the name of the sample bucket (up to 100 characters).
// Enum: travel-sample, beer-sample, gamesim-sample
// This field cannot be changed later.
Name string `json:"name"`
}

// CreateBucketSampleResponse is the response received from Capella V4 Public API on requesting to load a new sample bucket.
// Common response codes: 201, 403, 422, 429, 500.
type CreateSampleBucketResponse struct {
// Id is unique ID of the sample bucket created.
Id string `json:"bucketId"`

// Name is the name of the cluster (up to 100 characters).
Name string `json:"name"`
}

// GetSampleBucketResponse is the response received from Capella V4 Public API on requesting to information about an existing sample bucket.
//
// In order to access this endpoint, the provided API key must have at least one of the following roles:
//
// Organization Owner
// Project Owner
// Project Manager
// Project Viewer
// Database Data Reader/Writer
// Database Data Reader
// To learn more, see https://docs.couchbase.com/cloud/organizations/organization-projects-overview.html
type GetSampleBucketResponse struct {
Stats *Stats `json:"stats"`

// Id is the ID of the bucket created.
Id string `json:"id"`

// Name is the name of the cluster (up to 100 characters).
Name string `json:"name"`

// Type represents the sample Bucket Type
// To learn more, see https://docs.couchbase.com/cloud/clusters/data-service/manage-buckets.html#add-bucket
Type string `json:"type"`

// StorageBackend represents the storage engine used for the sample bucket.
// To learn more, see https://docs.couchbase.com/cloud/clusters/data-service/storage-engines.html
StorageBackend string `json:"storageBackend"`

// BucketConflictResolution is the means by which conflicts are resolved during replication.
// To learn more, see https://docs.couchbase.com/cloud/clusters/xdcr/xdcr.html#conflict-resolution
BucketConflictResolution string `json:"bucketConflictResolution"`

// DurabilityLevel is the minimum level at which all writes to the sample bucket must occur.
// To learn more, see https://docs.couchbase.com/cloud/clusters/data-service/manage-buckets.html#add-bucket
DurabilityLevel string `json:"durabilityLevel"`

// EvictionPolicy is the policy which Capella adopts to prevent data loss due to memory exhaustion.
//To learn more, see https://docs.couchbase.com/server/current/rest-api/rest-bucket-create.html#evictionpolicy
EvictionPolicy string `json:"evictionPolicy"`

// MemoryAllocationInMb is the amount of memory to allocate for the sample bucket memory in MiB
MemoryAllocationInMb int64 `json:"memoryAllocationInMb"`

// Replicas states the number of replica nodes for the sample bucket.
// To learn more, see https://docs.couchbase.com/cloud/clusters/data-service/manage-buckets.html#add-bucket
Replicas int64 `json:"replicas"`

// TimeToLiveInSeconds specifies the time to live (TTL) value in seconds.
TimeToLiveInSeconds int64 `json:"timeToLiveInSeconds"`

// Flush determines whether flushing is enabled on the sample bucket.
Flush bool `json:"flush"`
}
9 changes: 9 additions & 0 deletions internal/api/sample_bucket/stats.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
package sample_bucket

// Stats are the sample bucket related statistics that are sent by the Capella V4 Public API for any existing sample bucket.
type Stats struct {
ItemCount int64 `json:"itemCount"`
OpsPerSecond int64 `json:"opsPerSecond"`
DiskUsedInMib int64 `json:"diskUsedInMib"`
MemoryUsedInMib int64 `json:"memoryUsedInMib"`
}
213 changes: 213 additions & 0 deletions internal/datasources/sample_buckets.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,213 @@
package datasources

import (
"context"
"fmt"
"net/http"

"github.com/couchbasecloud/terraform-provider-couchbase-capella/internal/api"
samplebucketapi "github.com/couchbasecloud/terraform-provider-couchbase-capella/internal/api/sample_bucket"
providerschema "github.com/couchbasecloud/terraform-provider-couchbase-capella/internal/schema"

"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
"github.com/hashicorp/terraform-plugin-framework/types"
)

// Ensure the implementation satisfies the expected interfaces.
var (
_ datasource.DataSource = &SampleBuckets{}
_ datasource.DataSourceWithConfigure = &SampleBuckets{}
)

// Sample buckets is the sample bucket data source implementation.
type SampleBuckets struct {
*providerschema.Data
}

// NewSampleBuckets is a helper function to simplify the provider implementation.
func NewSampleBuckets() datasource.DataSource {
return &SampleBuckets{}
}

// Metadata returns the sample bucket data source type name.
func (d *SampleBuckets) Metadata(_ context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) {
resp.TypeName = req.ProviderTypeName + "_sample_buckets"
}

// Schema defines the schema for the sample bucket data source.
func (s *SampleBuckets) Schema(_ context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) {
resp.Schema = schema.Schema{
Attributes: map[string]schema.Attribute{
"organization_id": schema.StringAttribute{
Required: true,
},
"project_id": schema.StringAttribute{
Required: true,
},
"cluster_id": schema.StringAttribute{
Required: true,
},
"data": schema.ListNestedAttribute{
Computed: true,
NestedObject: schema.NestedAttributeObject{
Attributes: map[string]schema.Attribute{
"id": schema.StringAttribute{
Computed: true,
},
"name": schema.StringAttribute{
Computed: true,
},
"organization_id": schema.StringAttribute{
Computed: true,
},
"project_id": schema.StringAttribute{
Computed: true,
},
"cluster_id": schema.StringAttribute{
Computed: true,
},
"type": schema.StringAttribute{
Computed: true,
},
"storage_backend": schema.StringAttribute{
Computed: true,
},
"memory_allocation_in_mb": schema.Int64Attribute{
Computed: true,
},
"bucket_conflict_resolution": schema.StringAttribute{
Computed: true,
},
"durability_level": schema.StringAttribute{
Computed: true,
},
"replicas": schema.Int64Attribute{
Computed: true,
},
"flush": schema.BoolAttribute{
Computed: true,
},
"time_to_live_in_seconds": schema.Int64Attribute{
Computed: true,
},
"eviction_policy": schema.StringAttribute{
Computed: true,
},
"stats": schema.SingleNestedAttribute{
Computed: true,
Attributes: map[string]schema.Attribute{
"item_count": schema.Int64Attribute{
Computed: true,
},
"ops_per_second": schema.Int64Attribute{
Computed: true,
},
"disk_used_in_mib": schema.Int64Attribute{
Computed: true,
},
"memory_used_in_mib": schema.Int64Attribute{
Computed: true,
},
},
},
},
},
},
},
}
}

// Read refreshes the Terraform state with the latest data of sample buckets.
func (d *SampleBuckets) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) {
var state providerschema.SampleBuckets
diags := req.Config.Get(ctx, &state)
resp.Diagnostics.Append(diags...)
if resp.Diagnostics.HasError() {
return
}

clusterId, projectId, organizationId, err := state.Validate()
if err != nil {
resp.Diagnostics.AddError(
"Error Reading Sample Buckets in Capella",
"Could not read sample buckets in cluster "+clusterId+": "+err.Error(),
)
return
}

url := fmt.Sprintf("%s/v4/organizations/%s/projects/%s/clusters/%s/sampleBuckets", d.HostURL, organizationId, projectId, clusterId)
cfg := api.EndpointCfg{Url: url, Method: http.MethodGet, SuccessStatus: http.StatusOK}
response, err := api.GetPaginated[[]samplebucketapi.GetSampleBucketResponse](ctx, d.Client, d.Token, cfg, api.SortById)
if err != nil {
resp.Diagnostics.AddError(
"Error Reading Sample Buckets in Capella",
"Could not read sample buckets in cluster "+clusterId+": "+api.ParseError(err),
)
return
}

// Map response body to model
for _, sampleBucket := range response {
var sampleStats providerschema.Stats

if sampleBucket.Stats != nil {
sampleStats = providerschema.NewStats(*sampleBucket.Stats)
}

sampleBucketStatsObj, diags := types.ObjectValueFrom(ctx, sampleStats.AttributeTypes(), sampleStats)
if diags.HasError() {
resp.Diagnostics.AddError(
"Error Error Reading Sample Bucket Info",
fmt.Sprintf("Could not read sample bucket info from record, unexpected error: %s", fmt.Errorf("error while sample bucket info conversion")),
)
return
}

sampleBucketState := providerschema.SampleBucket{
Id: types.StringValue(sampleBucket.Id),
Name: types.StringValue(sampleBucket.Name),
Type: types.StringValue(sampleBucket.Type),
OrganizationId: types.StringValue(organizationId),
ProjectId: types.StringValue(projectId),
ClusterId: types.StringValue(clusterId),
StorageBackend: types.StringValue(sampleBucket.StorageBackend),
MemoryAllocationInMB: types.Int64Value(sampleBucket.MemoryAllocationInMb),
BucketConflictResolution: types.StringValue(sampleBucket.BucketConflictResolution),
DurabilityLevel: types.StringValue(sampleBucket.DurabilityLevel),
Replicas: types.Int64Value(sampleBucket.Replicas),
Flush: types.BoolValue(sampleBucket.Flush),
TimeToLiveInSeconds: types.Int64Value(sampleBucket.TimeToLiveInSeconds),
EvictionPolicy: types.StringValue(sampleBucket.EvictionPolicy),
Stats: sampleBucketStatsObj,
}
state.Data = append(state.Data, sampleBucketState)
}

// Set state
diags = resp.State.Set(ctx, &state)

resp.Diagnostics.Append(diags...)
if resp.Diagnostics.HasError() {
return
}
}

// Configure adds the provider configured client to the bucket data source.
func (d *SampleBuckets) Configure(_ context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) {
if req.ProviderData == nil {
return
}

data, ok := req.ProviderData.(*providerschema.Data)
if !ok {
resp.Diagnostics.AddError(
"Unexpected Data Source Configure Type",
fmt.Sprintf("Expected *ProviderSourceData, got: %T. Please report this issue to the provider developers.", req.ProviderData),
)

return
}

d.Data = data
}
3 changes: 3 additions & 0 deletions internal/errors/errors.go
Original file line number Diff line number Diff line change
Expand Up @@ -158,4 +158,7 @@ var (

// ErrIfMatchCannotBeSetWhileCreate is returned when if_match is set during create operation.
ErrIfMatchCannotBeSetWhileCreate = errors.New("if_match attribute cannot be set during create operation")

// ErrIfMatchCannotBeSetWhileCreate is returned when if_match is set during create operation.
ErrInvalidSampleBucketName = errors.New("sample bucket name can only be travel-sample, beer-sample, gamesim-sample")
)
Loading
Loading