diff --git a/main.tf b/main.tf index 32271b60..d743310e 100644 --- a/main.tf +++ b/main.tf @@ -210,16 +210,20 @@ module "security-group" { module "s3-storage" { - source = "app.terraform.io/indico/indico-aws-buckets/mod" - version = "3.3.1" - force_destroy = true # allows terraform to destroy non-empty buckets. - label = var.label - kms_key_arn = module.kms_key.key.arn - submission_expiry = var.submission_expiry - uploads_expiry = var.uploads_expiry - include_rox = var.include_rox - enable_backup = var.enable_s3_backup - enable_access_logging = var.enable_s3_access_logging + source = "app.terraform.io/indico/indico-aws-buckets/mod" + version = "4.0.0" + force_destroy = true # allows terraform to destroy non-empty buckets. + label = var.label + kms_key_arn = module.kms_key.key.arn + submission_expiry = var.submission_expiry + uploads_expiry = var.uploads_expiry + include_rox = var.include_rox + enable_backup = var.enable_s3_backup + enable_access_logging = var.enable_s3_access_logging + bucket_type = var.bucket_type + data_s3_bucket_name = var.data_s3_bucket_name + api_models_s3_bucket_name = var.api_models_s3_bucket_name + pgbackup_s3_bucket_name = var.pgbackup_s3_bucket_name } diff --git a/outputs.tf b/outputs.tf index bc5bd026..fb0b04e6 100644 --- a/outputs.tf +++ b/outputs.tf @@ -9,12 +9,16 @@ output "data_s3_bucket_name" { value = module.s3-storage.data_s3_bucket_name } +output "pgbackup_s3_bucket_name" { + description = "Name of the pgbackup s3 bucket" + value = module.s3-storage.pgbackup_s3_bucket_name +} + output "s3_role_id" { description = "ID of the S3 role" value = module.cluster.s3_role_id } - output "efs_filesystem_id" { description = "ID of the EFS filesystem" value = var.include_efs == true ? module.efs-storage[0].efs_filesystem_id : "" diff --git a/variables.tf b/variables.tf index 463c087e..48731db6 100644 --- a/variables.tf +++ b/variables.tf @@ -824,50 +824,50 @@ variable "harness_mount_path" { } variable "lambda_sns_forwarder_enabled" { - type = bool - default = false + type = bool + default = false description = "If enabled a lamda will be provisioned to forward sns messages to an external endpoint." } variable "lambda_sns_forwarder_destination_endpoint" { - type = string - default = "" + type = string + default = "" description = "destination URL for the lambda sns forwarder" } variable "lambda_sns_forwarder_topic_arn" { - type = string - default = "" + type = string + default = "" description = "SNS topic to triger lambda forwarder." } variable "lambda_sns_forwarder_github_organization" { - type = string - default = "IndicoDataSolutions" + type = string + default = "IndicoDataSolutions" description = "The github organization containing the lambda_sns_forwarder code to use" } variable "lambda_sns_forwarder_github_repository" { - type = string - default = "" + type = string + default = "" description = "The github repository containing the lambda_sns_forwarder code to use" } variable "lambda_sns_forwarder_github_branch" { - type = string - default = "main" + type = string + default = "main" description = "The github branch / tag containing the lambda_sns_forwarder code to use" } variable "lambda_sns_forwarder_github_zip_path" { - type = string - default = "zip/lambda.zip" + type = string + default = "zip/lambda.zip" description = "Full path to the lambda zip file" } variable "lambda_sns_forwarder_function_variables" { - type = map - default = {} + type = map(any) + default = {} description = "A map of variables for the lambda_sns_forwarder code to use" } @@ -1043,3 +1043,30 @@ variable "instance_volume_type" { default = "gp2" description = "The type of EBS volume to attach to the cluster nodes" } + +variable "bucket_type" { + type = string + default = "create" + validation { + condition = var.bucket_type == "create" || var.bucket_type == "load" + error_message = "${var.bucket_type} not valid. Type must be either create or load" + } +} + +variable "data_s3_bucket_name" { + type = string + default = "" + description = "The name of the existing S3 bucket to be loaded and used as the data bucket" +} + +variable "api_models_s3_bucket_name" { + type = string + default = "" + description = "The name of the existing S3 bucket to be loaded and used as the API model bucket" +} + +variable "pgbackup_s3_bucket_name" { + type = string + default = "" + description = "The name of the existing S3 bucket to be loaded and used as the postgres backup bucket" +}