google_cloudfunctions2_function

A Cloud Function that contains user computation executed in response to an event.

To get more information about function, see:

Example Usage - Cloudfunctions2 Basic

locals {
  project = "my-project-name" # Google Cloud Platform Project ID
}

resource "google_storage_bucket" "bucket" {
  name     = "${local.project}-gcf-source"  # Every bucket name must be globally unique
  location = "US"
  uniform_bucket_level_access = true
}

resource "google_storage_bucket_object" "object" {
  name   = "function-source.zip"
  bucket = google_storage_bucket.bucket.name
  source = "function-source.zip"  # Add path to the zipped function source code
}

resource "google_cloudfunctions2_function" "function" {
  name = "function-v2"
  location = "us-central1"
  description = "a new function"

  build_config {
    runtime = "nodejs16"
    entry_point = "helloHttp"  # Set the entry point 
    source {
      storage_source {
        bucket = google_storage_bucket.bucket.name
        object = google_storage_bucket_object.object.name
      }
    }
  }

  service_config {
    max_instance_count  = 1
    available_memory    = "256M"
    timeout_seconds     = 60
  }
}

output "function_uri" { 
  value = google_cloudfunctions2_function.function.service_config[0].uri
}

Example Usage - Cloudfunctions2 Full

locals {
  project = "my-project-name" # Google Cloud Platform Project ID
}

resource "google_service_account" "account" {
  account_id = "gcf-sa"
  display_name = "Test Service Account"
}

resource "google_pubsub_topic" "topic" {
  name = "functions2-topic"
}

resource "google_storage_bucket" "bucket" {
  name     = "${local.project}-gcf-source"  # Every bucket name must be globally unique
  location = "US"
  uniform_bucket_level_access = true
}

resource "google_storage_bucket_object" "object" {
  name   = "function-source.zip"
  bucket = google_storage_bucket.bucket.name
  source = "function-source.zip"  # Add path to the zipped function source code
}

resource "google_cloudfunctions2_function" "function" {
  name = "gcf-function"
  location = "us-central1"
  description = "a new function"

  build_config {
    runtime = "nodejs16"
    entry_point = "helloPubSub"  # Set the entry point 
    environment_variables = {
        BUILD_CONFIG_TEST = "build_test"
    }
    source {
      storage_source {
        bucket = google_storage_bucket.bucket.name
        object = google_storage_bucket_object.object.name
      }
    }
  }

  service_config {
    max_instance_count  = 3
    min_instance_count = 1
    available_memory    = "4Gi"
    timeout_seconds     = 60
    max_instance_request_concurrency = 80
    available_cpu = "4"
    environment_variables = {
        SERVICE_CONFIG_TEST = "config_test"
    }
    ingress_settings = "ALLOW_INTERNAL_ONLY"
    all_traffic_on_latest_revision = true
    service_account_email = google_service_account.account.email
  }

  event_trigger {
    trigger_region = "us-central1"
    event_type = "google.cloud.pubsub.topic.v1.messagePublished"
    pubsub_topic = google_pubsub_topic.topic.id
    retry_policy = "RETRY_POLICY_RETRY"
  }
}

Example Usage - Cloudfunctions2 Scheduler Auth

locals {
  project = "my-project-name" # Google Cloud Platform Project ID
}

resource "google_service_account" "account" {
  account_id   = "gcf-sa"
  display_name = "Test Service Account"
}

resource "google_storage_bucket" "bucket" {
  name                        = "${local.project}-gcf-source"  # Every bucket name must be globally unique
  location                    = "US"
  uniform_bucket_level_access = true
}

resource "google_storage_bucket_object" "object" {
  name   = "function-source.zip"
  bucket = google_storage_bucket.bucket.name
  source = "function-source.zip"  # Add path to the zipped function source code
}

resource "google_cloudfunctions2_function" "function" {
  name        = "gcf-function" # name should use kebab-case so generated Cloud Run service name will be the same
  location    = "us-central1"
  description = "a new function"

  build_config {
    runtime     = "nodejs16"
    entry_point = "helloHttp"  # Set the entry point
    source {
      storage_source {
        bucket = google_storage_bucket.bucket.name
        object = google_storage_bucket_object.object.name
      }
    }
  }

  service_config {
    min_instance_count    = 1
    available_memory      = "256M"
    timeout_seconds       = 60
    service_account_email = google_service_account.account.email
  }
}

resource "google_cloudfunctions2_function_iam_member" "invoker" {
  project        = google_cloudfunctions2_function.function.project
  location       = google_cloudfunctions2_function.function.location
  cloud_function = google_cloudfunctions2_function.function.name
  role           = "roles/cloudfunctions.invoker"
  member         = "serviceAccount:${google_service_account.account.email}"
}

resource "google_cloud_run_service_iam_member" "cloud_run_invoker" {
  project  = google_cloudfunctions2_function.function.project
  location = google_cloudfunctions2_function.function.location
  service  = google_cloudfunctions2_function.function.name
  role     = "roles/run.invoker"
  member   = "serviceAccount:${google_service_account.account.email}"
}

resource "google_cloud_scheduler_job" "invoke_cloud_function" {
  name        = "invoke-gcf-function"
  description = "Schedule the HTTPS trigger for cloud function"
  schedule    = "0 0 * * *" # every day at midnight
  project     = google_cloudfunctions2_function.function.project
  region      = google_cloudfunctions2_function.function.location

  http_target {
    uri         = google_cloudfunctions2_function.function.service_config[0].uri
    http_method = "POST"
    oidc_token {
      audience              = "${google_cloudfunctions2_function.function.service_config[0].uri}/"
      service_account_email = google_service_account.account.email
    }
  }
}

Example Usage - Cloudfunctions2 Basic Gcs

resource "google_storage_bucket" "source-bucket" {
  name     = "gcf-source-bucket"
  location = "US"
  uniform_bucket_level_access = true
}

resource "google_storage_bucket_object" "object" {
  name   = "function-source.zip"
  bucket = google_storage_bucket.source-bucket.name
  source = "function-source.zip"  # Add path to the zipped function source code
}

resource "google_storage_bucket" "trigger-bucket" {
  name     = "gcf-trigger-bucket"
  location = "us-central1" # The trigger must be in the same location as the bucket
  uniform_bucket_level_access = true
}

data "google_storage_project_service_account" "gcs_account" {
}

# To use GCS CloudEvent triggers, the GCS service account requires the Pub/Sub Publisher(roles/pubsub.publisher) IAM role in the specified project.
# (See https://cloud.google.com/eventarc/docs/run/quickstart-storage#before-you-begin)
resource "google_project_iam_member" "gcs-pubsub-publishing" {
  project = "my-project-name"
  role    = "roles/pubsub.publisher"
  member  = "serviceAccount:${data.google_storage_project_service_account.gcs_account.email_address}"
}

resource "google_service_account" "account" {
  account_id   = "gcf-sa"
  display_name = "Test Service Account - used for both the cloud function and eventarc trigger in the test"
}

# Permissions on the service account used by the function and Eventarc trigger
resource "google_project_iam_member" "invoking" {
  project = "my-project-name"
  role    = "roles/run.invoker"
  member  = "serviceAccount:${google_service_account.account.email}"
  depends_on = [google_project_iam_member.gcs-pubsub-publishing]
}

resource "google_project_iam_member" "event-receiving" {
  project = "my-project-name"
  role    = "roles/eventarc.eventReceiver"
  member  = "serviceAccount:${google_service_account.account.email}"
  depends_on = [google_project_iam_member.invoking]
}

resource "google_project_iam_member" "artifactregistry-reader" {
  project = "my-project-name"
  role     = "roles/artifactregistry.reader"
  member   = "serviceAccount:${google_service_account.account.email}"
  depends_on = [google_project_iam_member.event-receiving]
}

resource "google_cloudfunctions2_function" "function" {
  depends_on = [
    google_project_iam_member.event-receiving,
    google_project_iam_member.artifactregistry-reader,
  ]
  name = "gcf-function"
  location = "us-central1"
  description = "a new function"

  build_config {
    runtime     = "nodejs12"
    entry_point = "entryPoint" # Set the entry point in the code
    environment_variables = {
      BUILD_CONFIG_TEST = "build_test"
    }
    source {
      storage_source {
        bucket = google_storage_bucket.source-bucket.name
        object = google_storage_bucket_object.object.name
      }
    }
  }

  service_config {
    max_instance_count  = 3
    min_instance_count = 1
    available_memory    = "256M"
    timeout_seconds     = 60
    environment_variables = {
        SERVICE_CONFIG_TEST = "config_test"
    }
    ingress_settings = "ALLOW_INTERNAL_ONLY"
    all_traffic_on_latest_revision = true
    service_account_email = google_service_account.account.email
  }

  event_trigger {
    event_type = "google.cloud.storage.object.v1.finalized"
    retry_policy = "RETRY_POLICY_RETRY"
    service_account_email = google_service_account.account.email
    event_filters {
      attribute = "bucket"
      value = google_storage_bucket.trigger-bucket.name
    }
  }
}

Example Usage - Cloudfunctions2 Basic Auditlogs

# This example follows the examples shown in this Google Cloud Community blog post
# https://medium.com/google-cloud/applying-a-path-pattern-when-filtering-in-eventarc-f06b937b4c34
# and the docs:
# https://cloud.google.com/eventarc/docs/path-patterns

resource "google_storage_bucket" "source-bucket" {
  name     = "gcf-source-bucket"
  location = "US"
  uniform_bucket_level_access = true
}

resource "google_storage_bucket_object" "object" {
  name   = "function-source.zip"
  bucket = google_storage_bucket.source-bucket.name
  source = "function-source.zip"  # Add path to the zipped function source code
}

resource "google_service_account" "account" {
  account_id   = "gcf-sa"
  display_name = "Test Service Account - used for both the cloud function and eventarc trigger in the test"
}

# Note: The right way of listening for Cloud Storage events is to use a Cloud Storage trigger.
# Here we use Audit Logs to monitor the bucket so path patterns can be used in the example of
# google_cloudfunctions2_function below (Audit Log events have path pattern support)
resource "google_storage_bucket" "audit-log-bucket" {
  name     = "gcf-auditlog-bucket"
  location = "us-central1"  # The trigger must be in the same location as the bucket
  uniform_bucket_level_access = true
}

# Permissions on the service account used by the function and Eventarc trigger
resource "google_project_iam_member" "invoking" {
  project = "my-project-name"
  role    = "roles/run.invoker"
  member  = "serviceAccount:${google_service_account.account.email}"
}

resource "google_project_iam_member" "event-receiving" {
  project = "my-project-name"
  role    = "roles/eventarc.eventReceiver"
  member  = "serviceAccount:${google_service_account.account.email}"
  depends_on = [google_project_iam_member.invoking]
}

resource "google_project_iam_member" "artifactregistry-reader" {
  project = "my-project-name"
  role     = "roles/artifactregistry.reader"
  member   = "serviceAccount:${google_service_account.account.email}"
  depends_on = [google_project_iam_member.event-receiving]
}

resource "google_cloudfunctions2_function" "function" {
  depends_on = [
    google_project_iam_member.event-receiving,
    google_project_iam_member.artifactregistry-reader,
  ]
  name = "gcf-function"
  location = "us-central1"
  description = "a new function"

  build_config {
    runtime     = "nodejs12"
    entry_point = "entryPoint" # Set the entry point in the code
    environment_variables = {
      BUILD_CONFIG_TEST = "build_test"
    }
    source {
      storage_source {
        bucket = google_storage_bucket.source-bucket.name
        object = google_storage_bucket_object.object.name
      }
    }
  }

  service_config {
    max_instance_count  = 3
    min_instance_count = 1
    available_memory    = "256M"
    timeout_seconds     = 60
    environment_variables = {
        SERVICE_CONFIG_TEST = "config_test"
    }
    ingress_settings = "ALLOW_INTERNAL_ONLY"
    all_traffic_on_latest_revision = true
    service_account_email = google_service_account.account.email
  }

  event_trigger {
    trigger_region = "us-central1" # The trigger must be in the same location as the bucket
    event_type = "google.cloud.audit.log.v1.written"
    retry_policy = "RETRY_POLICY_RETRY"
    service_account_email = google_service_account.account.email
    event_filters {
      attribute = "serviceName"
      value = "storage.googleapis.com"
    }
    event_filters {
      attribute = "methodName"
      value = "storage.objects.create"
    }
    event_filters {
      attribute = "resourceName"
      value = "/projects/_/buckets/${google_storage_bucket.audit-log-bucket.name}/objects/*.txt" # Path pattern selects all .txt files in the bucket
      operator = "match-path-pattern" # This allows path patterns to be used in the value field
    }
  }
}

Example Usage - Cloudfunctions2 Basic Builder

locals {
  project = "my-project-name" # Google Cloud Platform Project ID
}

resource "google_service_account" "account" {
  account_id = "gcf-sa"
  display_name = "Test Service Account"
}

resource "google_project_iam_member" "log_writer" {
  project = google_service_account.account.project
  role    = "roles/logging.logWriter"
  member  = "serviceAccount:${google_service_account.account.email}"
}

resource "google_project_iam_member" "artifact_registry_writer" {
  project = google_service_account.account.project
  role    = "roles/artifactregistry.writer"
  member  = "serviceAccount:${google_service_account.account.email}"
}

resource "google_project_iam_member" "storage_object_admin" {
  project = google_service_account.account.project
  role    = "roles/storage.objectAdmin"
  member  = "serviceAccount:${google_service_account.account.email}"
}

resource "google_storage_bucket" "bucket" {
  name     = "${local.project}-gcf-source"  # Every bucket name must be globally unique
  location = "US"
  uniform_bucket_level_access = true
}

resource "google_storage_bucket_object" "object" {
  name   = "function-source.zip"
  bucket = google_storage_bucket.bucket.name
  source = "function-source.zip"  # Add path to the zipped function source code
}

# builder permissions need to stablize before it can pull the source zip
resource "time_sleep" "wait_60s" {
  create_duration = "60s"

  depends_on = [
    google_project_iam_member.log_writer,
    google_project_iam_member.artifact_registry_writer,
    google_project_iam_member.storage_object_admin,
  ]
}

resource "google_cloudfunctions2_function" "function" {
  name = "function-v2"
  location = "us-central1"
  description = "a new function"

  build_config {
    runtime = "nodejs16"
    entry_point = "helloHttp"  # Set the entry point 
    source {
      storage_source {
        bucket = google_storage_bucket.bucket.name
        object = google_storage_bucket_object.object.name
      }
    }
    service_account = google_service_account.account.id
  }

  service_config {
    max_instance_count  = 1
    available_memory    = "256M"
    timeout_seconds     = 60
  }

  depends_on = [time_sleep.wait_60s]
}

output "function_uri" { 
  value = google_cloudfunctions2_function.function.service_config[0].uri
}

Example Usage - Cloudfunctions2 Secret Env

locals {
  project = "my-project-name" # Google Cloud Platform Project ID
}

resource "google_storage_bucket" "bucket" {
  name     = "${local.project}-gcf-source"  # Every bucket name must be globally unique
  location = "US"
  uniform_bucket_level_access = true
}

resource "google_storage_bucket_object" "object" {
  name   = "function-source.zip"
  bucket = google_storage_bucket.bucket.name
  source = "function-source.zip"  # Add path to the zipped function source code
}

resource "google_cloudfunctions2_function" "function" {
  name = "function-secret"
  location = "us-central1"
  description = "a new function"

  build_config {
    runtime = "nodejs16"
    entry_point = "helloHttp"  # Set the entry point 
    source {
      storage_source {
        bucket = google_storage_bucket.bucket.name
        object = google_storage_bucket_object.object.name
      }
    }
  }

  service_config {
    max_instance_count  = 1
    available_memory    = "256M"
    timeout_seconds     = 60

    secret_environment_variables {
      key        = "TEST"
      project_id = local.project
      secret     = google_secret_manager_secret.secret.secret_id
      version    = "latest"
    }
  }
  depends_on = [google_secret_manager_secret_version.secret]
}

resource "google_secret_manager_secret" "secret" {
  secret_id = "secret"

  replication {
    user_managed {
      replicas {
        location = "us-central1"
      }
    }
  }  
}

resource "google_secret_manager_secret_version" "secret" {
  secret = google_secret_manager_secret.secret.name

  secret_data = "secret"
  enabled = true
}

Example Usage - Cloudfunctions2 Secret Volume

locals {
  project = "my-project-name" # Google Cloud Platform Project ID
}

resource "google_storage_bucket" "bucket" {
  name     = "${local.project}-gcf-source"  # Every bucket name must be globally unique
  location = "US"
  uniform_bucket_level_access = true
}

resource "google_storage_bucket_object" "object" {
  name   = "function-source.zip"
  bucket = google_storage_bucket.bucket.name
  source = "function-source.zip"  # Add path to the zipped function source code
}

resource "google_cloudfunctions2_function" "function" {
  name = "function-secret"
  location = "us-central1"
  description = "a new function"

  build_config {
    runtime = "nodejs16"
    entry_point = "helloHttp"  # Set the entry point 
    source {
      storage_source {
        bucket = google_storage_bucket.bucket.name
        object = google_storage_bucket_object.object.name
      }
    }
  }

  service_config {
    max_instance_count  = 1
    available_memory    = "256M"
    timeout_seconds     = 60

    secret_volumes {
      mount_path = "/etc/secrets"
      project_id = local.project
      secret     = google_secret_manager_secret.secret.secret_id
    }
  }
  depends_on = [google_secret_manager_secret_version.secret]
}

resource "google_secret_manager_secret" "secret" {
  secret_id = "secret"

  replication {
    user_managed {
      replicas {
        location = "us-central1"
      }
    }
  }  
}

resource "google_secret_manager_secret_version" "secret" {
  secret = google_secret_manager_secret.secret.name

  secret_data = "secret"
  enabled = true
}

Example Usage - Cloudfunctions2 Private Workerpool

locals {
  project = "my-project-name" # Google Cloud Platform Project ID
}

resource "google_storage_bucket" "bucket" {
  name     = "${local.project}-gcf-source"  # Every bucket name must be globally unique
  location = "US"
  uniform_bucket_level_access = true
}

resource "google_storage_bucket_object" "object" {
  name   = "function-source.zip"
  bucket = google_storage_bucket.bucket.name
  source = "function-source.zip"  # Add path to the zipped function source code
}

resource "google_cloudbuild_worker_pool" "pool" {
  name = "workerpool"
  location = "us-central1"
  worker_config {
    disk_size_gb = 100
    machine_type = "e2-standard-8"
    no_external_ip = false
  }
}

resource "google_cloudfunctions2_function" "function" {
  name = "function-workerpool"
  location = "us-central1"
  description = "a new function"

  build_config {
    runtime = "nodejs16"
    entry_point = "helloHttp"  # Set the entry point 
    source {
      storage_source {
        bucket = google_storage_bucket.bucket.name
        object = google_storage_bucket_object.object.name
      }
    }
    worker_pool = google_cloudbuild_worker_pool.pool.id
  }

  service_config {
    max_instance_count  = 1
    available_memory    = "256M"
    timeout_seconds     = 60
  }
}

Example Usage - Cloudfunctions2 Cmek Docs

locals {
  project = "my-project-name" # Google Cloud Platform Project ID
}

data "google_project" "project" {
  provider = google-beta
}

resource "google_storage_bucket" "bucket" {
  provider = google-beta

  name     = "${local.project}-gcf-source"  # Every bucket name must be globally unique
  location = "US"
  uniform_bucket_level_access = true
}

resource "google_storage_bucket_object" "object" {
  provider = google-beta

  name   = "function-source.zip"
  bucket = google_storage_bucket.bucket.name
  source = "function-source.zip"  # Add path to the zipped function source code
}

resource "google_project_service_identity" "ea_sa" {
  provider = google-beta

  project = data.google_project.project.project_id
  service = "eventarc.googleapis.com"
}

resource "google_artifact_registry_repository" "unencoded-ar-repo" {
  provider = google-beta

  repository_id = "ar-repo"
  location = "us-central1"
  format = "DOCKER"
}

resource "google_artifact_registry_repository_iam_binding" "binding" {
  provider = google-beta

  location = google_artifact_registry_repository.encoded-ar-repo.location
  repository = google_artifact_registry_repository.encoded-ar-repo.name
  role = "roles/artifactregistry.admin"
  members = [
    "serviceAccount:service-${data.google_project.project.number}@gcf-admin-robot.iam.gserviceaccount.com",
  ]
}

resource "google_kms_crypto_key_iam_binding" "gcf_cmek_keyuser" {
  provider = google-beta

  crypto_key_id = "cmek-key"
  role          = "roles/cloudkms.cryptoKeyEncrypterDecrypter"

  members = [
    "serviceAccount:service-${data.google_project.project.number}@gcf-admin-robot.iam.gserviceaccount.com",
    "serviceAccount:service-${data.google_project.project.number}@gcp-sa-artifactregistry.iam.gserviceaccount.com",
    "serviceAccount:service-${data.google_project.project.number}@gs-project-accounts.iam.gserviceaccount.com",
    "serviceAccount:service-${data.google_project.project.number}@serverless-robot-prod.iam.gserviceaccount.com",
    "serviceAccount:${google_project_service_identity.ea_sa.email}",
  ]

  depends_on = [
    google_project_service_identity.ea_sa
  ]
}

resource "google_artifact_registry_repository" "encoded-ar-repo" {
  provider = google-beta

  location = "us-central1"
  repository_id = "cmek-repo"
  format = "DOCKER"
  kms_key_name = "cmek-key"
  depends_on = [
    google_kms_crypto_key_iam_binding.gcf_cmek_keyuser
  ]
}

resource "google_cloudfunctions2_function" "function" {
  provider = google-beta

  name = "function-cmek"
  location = "us-central1"
  description = "CMEK function"
  kms_key_name = "cmek-key"

  build_config {
    runtime = "nodejs16"
    entry_point = "helloHttp"  # Set the entry point
    docker_repository = google_artifact_registry_repository.encoded-ar-repo.id

    source {
      storage_source {
        bucket = google_storage_bucket.bucket.name
        object = google_storage_bucket_object.object.name
      }
    }
  }

  service_config {
    max_instance_count  = 1
    available_memory    = "256M"
    timeout_seconds     = 60
  }

  depends_on = [
    google_kms_crypto_key_iam_binding.gcf_cmek_keyuser
  ]

}

Argument Reference

The following arguments are supported:


The build_config block supports:

The source block supports:

The storage_source block supports:

The repo_source block supports:

The service_config block supports:

The secret_environment_variables block supports:

The secret_volumes block supports:

The versions block supports:

The event_trigger block supports:

The event_filters block supports:

Attributes Reference

In addition to the arguments listed above, the following computed attributes are exported:

Timeouts

This resource provides the following Timeouts configuration options:

Import

function can be imported using any of these accepted formats:

In Terraform v1.5.0 and later, use an import block to import function using one of the formats above. For example:

import {
  id = "projects/{{project}}/locations/{{location}}/functions/{{name}}"
  to = google_cloudfunctions2_function.default
}

When using the terraform import command, function can be imported using one of the formats above. For example:

$ terraform import google_cloudfunctions2_function.default projects/{{project}}/locations/{{location}}/functions/{{name}}
$ terraform import google_cloudfunctions2_function.default {{project}}/{{location}}/{{name}}
$ terraform import google_cloudfunctions2_function.default {{location}}/{{name}}

User Project Overrides

This resource supports User Project Overrides.