jSpeciale
jSpeciale

Reputation: 47

Rename Volume mounted in Google Cloud Function using Terraform

Is there a way to mount a .env file (from Secret Manager) as a volume within a Cloud Function using Terraform ?

I'm switching to Terraform from plain bash gcloud cli commands and still don't know if it's possible. I have a .env file in a Secret at Secret Manager, and was previously using a command like that:

FUNCTION_DEPLOY_COMMAND="gcloud functions deploy"
SECRETS=/etc/secrets/.env=some-secret-name-$GCF_ENVIRONMENT:latest
$FUNCTION_DEPLOY_COMMAND some-function-name-$GCF_ENVIRONMENT --gen2 --region=$REGION --entry-point=$ENTRY_POINT --runtime=$RUNTIME \
--set-secrets=$SECRETS \
--run-service-account=$SERVICE_ACCOUNT --source=some-source --update-labels=$LABELS \
--trigger-event-filters="type=google.cloud.audit.log.v1.written" \
--trigger-event-filters="serviceName=storage.googleapis.com" \
--trigger-event-filters="methodName=storage.objects.create" \
--trigger-event-filters-path-pattern="resourceName=/projects/_/buckets/some-bucket-name/objects/account/**/*.json"

So a .env file was mounted within the cloud function container. I wonder if it's possible to do the same thing using Terraform.

I'm using the secret_volumes block to mount the volume , but the file keeps the original secret name.

service_config {
    service_account_email = local.function_invoker_email
    secret_volumes {
      mount_path  = "/etc/secrets"
      project_id  = local.project_id
      secret      = local.secret_name
    }
  }

Edit: So if I have a secret on Secret Manager named my-secret-name. The volume is mounted as /etc/secrets/my-secret-name instead of /etc/secrets/.env.

Any thoughts ?

Sharing full terraform script.

terraform {
  required_providers {
    google = {
      source  = "hashicorp/google"
      version = "5.21.0"
    }
  }

  backend "gcs" {
    bucket = "terraform-datalake-firestore"
  }
}

provider "google" {
  project = local.project_id
  region  = local.datalake_firestore_region
}

resource "google_storage_bucket" "terraform_state" {
  name                        = local.datalake_firestore_terraform_state
  location                    = local.datalake_firestore_location
  uniform_bucket_level_access = true
  force_destroy               = false
  public_access_prevention    = "enforced"
  versioning {
    enabled = true
  }
}

resource "random_id" "default" {
  byte_length = 8
}

resource "google_storage_bucket" "function_source" {
  name                        = local.datalake_firestore_source
  location                    = local.datalake_firestore_location
  uniform_bucket_level_access = true
}

resource "null_resource" "prepare_function_folder" {
  for_each = toset(var.datalake_firestore_functions)

  provisioner "local-exec" {
    command = "bash ${local.datalake_firestore_code_path}/copy_function_dependencies.sh ${each.key}"
  }

  triggers = {
    always_run = timestamp()
  }
}

data "archive_file" "insbuc0305" {
  type        = "zip"
  output_path = "/tmp/insbuc0305-function-source.zip"
  source_dir  = "${local.datalake_firestore_code_path}/insbuc0305"
  excludes    = [ "*.lst" ]
  depends_on  = [ null_resource.prepare_function_folder ]
}

resource "google_storage_bucket_object" "insbuc0305_object" {
  name = "insbuc0305-function-source-${data.archive_file.insbuc0305.output_md5}.zip"
  content_type = "application/zip"
  bucket = local.datalake_firestore_source
  source = data.archive_file.insbuc0305.output_path
}

resource "google_cloudfunctions2_function" "insbuc0305rfcfpy" {
  name        = local.insbuc0305rfcfpy
  location    = local.datalake_firestore_region
  description = "A function Event Triggered by Files Created on /event/client to..."
  
  labels = {
    "environment"   = "${var.environment}"
    "project"       = "datalake"
    "service"       = "datalake-etl-event"
    "team"          = "data"
    "layer"         = "file-storage"
    "sublayer"      = "raw"
  }

  build_config {
    runtime     = "python310"
    entry_point = "start_function"

    source {
      storage_source {
        bucket = local.datalake_firestore_source
        object = google_storage_bucket_object.insbuc0305_object.name
      }
    }
  }

  service_config {
    service_account_email = "[email protected]"
    available_cpu         = "167m"
    available_memory      = "128Mi"
    secret_volumes {
      project_id = local.project_id
      secret     = local.secret_name
      mount_path = "/etc/secrets/.env"
    }
  }

  event_trigger {
    event_type  = "google.cloud.audit.log.v1.written"

    event_filters {
      attribute = "serviceName"
      value     = "storage.googleapis.com"
    }
    event_filters {
      attribute = "methodName"
      value     = "storage.objects.create"
    }
    event_filters {
      attribute = "resourceName"
      value     = "/projects/_/buckets/name_dl_raw/objects/event/client/*.json"
      operator  = "match-path-pattern"
    }
  }

  depends_on = [
    data.archive_file.insbuc0305,
    google_storage_bucket.function_source,
    google_storage_bucket_object.insbuc0305_object
  ]
}

Upvotes: 0

Views: 37

Answers (0)

Related Questions