From 6e89ea8d911e2d408eeaa892ca24056732a8b207 Mon Sep 17 00:00:00 2001 From: nick gondek Date: Wed, 28 Feb 2024 23:08:42 +0000 Subject: [PATCH] update access policy for s3_from_gcp role, to reflect new bucket name using var instead of hardoce --- .deployment/tofu/modules/burn_backend/main.tf | 2 +- .deployment/tofu/modules/static_io/main.tf | 127 +----------------- src/routers/upload/shapefile_zip.py | 2 +- src/util/cloud_static_io.py | 2 +- 4 files changed, 4 insertions(+), 129 deletions(-) diff --git a/.deployment/tofu/modules/burn_backend/main.tf b/.deployment/tofu/modules/burn_backend/main.tf index 4222a8b..4e0853d 100644 --- a/.deployment/tofu/modules/burn_backend/main.tf +++ b/.deployment/tofu/modules/burn_backend/main.tf @@ -121,7 +121,7 @@ resource "google_cloud_run_v2_service" "tf-rest-burn-severity" { ## TODO [#24]: self-referential endpoint, will be solved by refactoring out titiler and/or making fully static env { name = "GCP_CLOUD_RUN_ENDPOINT" - value = "https://tf-rest-burn-severity-ohi6r6qs2a-uc.a.run.app" + value = "${terraform.workspace}" == "prod" ? "https://tf-rest-burn-severity-ohi6r6qs2a-uc.a.run.app" : "https://tf-rest-burn-severity-dev-ohi6r6qs2a-uc.a.run.appz" } env { name = "CPL_VSIL_CURL_ALLOWED_EXTENSIONS" diff --git a/.deployment/tofu/modules/static_io/main.tf b/.deployment/tofu/modules/static_io/main.tf index 98dbe56..b60cd11 100644 --- a/.deployment/tofu/modules/static_io/main.tf +++ b/.deployment/tofu/modules/static_io/main.tf @@ -87,131 +87,6 @@ resource "aws_s3_bucket_object" "assets" { source = "../assets/${each.value}" } -# Then, the user for the server, allowing it access to Transfer Family - -# data "aws_iam_policy_document" "assume_role" { -# statement { -# effect = "Allow" - -# principals { -# type = "Service" -# identifiers = ["transfer.amazonaws.com"] -# } - -# actions = ["sts:AssumeRole"] -# } -# } - -# resource "aws_iam_role" "admin" { -# name = "tf-sftp-admin-iam-role" -# assume_role_policy = data.aws_iam_policy_document.assume_role.json -# } - -# data "aws_iam_policy_document" "s3_policy" { -# statement { -# sid = "ReadWriteS3" -# effect = "Allow" -# actions = [ -# "s3:ListBucket", -# ] -# resources = [ -# "arn:aws:s3:::burn-severity-backend", -# ] -# } - -# statement { -# effect = "Allow" -# actions = [ -# "s3:PutObject", -# "s3:GetObject", -# "s3:GetObjectTagging", -# "s3:DeleteObject", -# "s3:DeleteObjectVersion", -# "s3:GetObjectVersion", -# "s3:GetObjectVersionTagging", -# "s3:GetObjectACL", -# "s3:PutObjectACL", -# ] -# resources = [ -# "arn:aws:s3:::burn-severity-backend/*", -# ] -# } -# } - -# # Create the s3_policy -# resource "aws_iam_policy" "s3_admin_policy" { -# name = "s3_admin_policy" -# description = "S3 policy for admin user" -# policy = data.aws_iam_policy_document.s3_policy.json -# } - -# # Attach the policy to the role -# resource "aws_iam_role_policy_attachment" "s3_policy_attachment" { -# role = aws_iam_role.admin.name -# policy_arn = aws_iam_policy.s3_admin_policy.arn -# } - -# # Add the necessary session policy to the user -# data "aws_iam_policy_document" "session_policy" { -# statement { -# sid = "AllowListingOfUserFolder" -# effect = "Allow" -# actions = [ -# "s3:ListBucket", -# ] -# resources = [ -# "arn:aws:s3:::burn-severity-backend", -# ] -# condition { -# test = "StringLike" -# variable = "s3:prefix" -# values = [ -# "/public/*", -# "/public", -# "/" -# ] -# } -# } - -# statement { -# sid = "HomeDirObjectAccess" -# effect = "Allow" -# actions = [ -# "s3:PutObject", -# "s3:GetObject", -# "s3:DeleteObject", -# "s3:GetObjectVersion", -# ] -# resources = [ -# "arn:aws:s3:::burn-severity-backend/*", -# ] -# } -# } - -# # Finally, create the user within Transfer Family -# resource "aws_transfer_user" "tf-sftp-burn-severity" { -# server_id = aws_transfer_server.tf-sftp-burn-severity.id -# user_name = "admin" -# role = aws_iam_role.admin.arn -# home_directory_mappings { -# entry = "/" -# target = "/burn-severity-backend/public" -# } -# home_directory_type = "LOGICAL" -# policy = data.aws_iam_policy_document.session_policy.json -# } - -# resource "aws_transfer_ssh_key" "sftp_ssh_key_public" { -# depends_on = [aws_transfer_user.tf-sftp-burn-severity] -# server_id = aws_transfer_server.tf-sftp-burn-severity.id -# user_name = "admin" -# body = var.ssh_pairs["SSH_KEY_ADMIN_PUBLIC"] -# } - - -## TODO [#4]: This is OIDC stuff, which is not yet working -# Set up STS to allow the GCP server to assume a role for AWS secrets - # Defines who can assume the role. # Confusing string mapping for the OIDC provider URL (https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_policies_iam-condition-keys.html#ck_aud) # example paylod of our token looks like:/ @@ -282,7 +157,7 @@ data "aws_iam_policy_document" "session_policy" { "s3:GetObjectVersion", ] resources = [ - "arn:aws:s3:::burn-severity-backend/*", + "arn:aws:s3:::${aws_s3_bucket.burn-severity-backend.id}/*", ] } } diff --git a/src/routers/upload/shapefile_zip.py b/src/routers/upload/shapefile_zip.py index 33c1f75..3952262 100644 --- a/src/routers/upload/shapefile_zip.py +++ b/src/routers/upload/shapefile_zip.py @@ -66,7 +66,7 @@ async def upload_shapefile( __shp_paths, geojson = valid_shp[0] user_uploaded_s3_path = ( - "public/{affiliation}/{fire_event_name}/user_uploaded_{file.filename}" + f"public/{affiliation}/{fire_event_name}/user_uploaded_{file.filename}" ) # Upload the zip and a geojson to s3 cloud_static_io_client.upload( diff --git a/src/util/cloud_static_io.py b/src/util/cloud_static_io.py index 9dbc48d..7f22019 100644 --- a/src/util/cloud_static_io.py +++ b/src/util/cloud_static_io.py @@ -90,7 +90,7 @@ def impersonate_service_account(self): None """ # Load the credentials of the user - source_credentials, project = google.auth.default() + source_credentials, __project = google.auth.default() # Define the scopes of the impersonated credentials target_scopes = ["https://www.googleapis.com/auth/cloud-platform"]