google_data_loss_prevention_job_trigger

A job trigger configuration.

To get more information about JobTrigger, see:

Example Usage - Dlp Job Trigger Basic

resource "google_data_loss_prevention_job_trigger" "basic" {
    parent = "projects/my-project-name"
    description = "Description"
    display_name = "Displayname"

    triggers {
        schedule {
            recurrence_period_duration = "86400s"
        }
    }

    inspect_job {
        inspect_template_name = "fake"
        actions {
            save_findings {
                output_config {
                    table {
                        project_id = "project"
                        dataset_id = "dataset"
                    }
                }
            }
        }
        storage_config {
            cloud_storage_options {
                file_set {
                    url = "gs://mybucket/directory/"
                }
            }
        }
    }
}

Example Usage - Dlp Job Trigger Bigquery Row Limit

resource "google_data_loss_prevention_job_trigger" "bigquery_row_limit" {
    parent = "projects/my-project-name"
    description = "Description"
    display_name = "Displayname"

    triggers {
        schedule {
            recurrence_period_duration = "86400s"
        }
    }

    inspect_job {
        inspect_template_name = "fake"
        actions {
            save_findings {
                output_config {
                    table {
                        project_id = "project"
                        dataset_id = "dataset"
                    }
                }
            }
        }
        storage_config {
            big_query_options {
                table_reference {
                    project_id = "project"
                    dataset_id = "dataset"
                    table_id = "table_to_scan"
                }

                rows_limit = 1000
                sample_method = "RANDOM_START"
            }
        }
    }
}

Example Usage - Dlp Job Trigger Bigquery Row Limit Percentage

resource "google_data_loss_prevention_job_trigger" "bigquery_row_limit_percentage" {
    parent = "projects/my-project-name"
    description = "Description"
    display_name = "Displayname"

    triggers {
        schedule {
            recurrence_period_duration = "86400s"
        }
    }

    inspect_job {
        inspect_template_name = "fake"
        actions {
            save_findings {
                output_config {
                    table {
                        project_id = "project"
                        dataset_id = "dataset"
                    }
                }
            }
        }
        storage_config {
            big_query_options {
                table_reference {
                    project_id = "project"
                    dataset_id = "dataset"
                    table_id = "table_to_scan"
                }

                rows_limit_percent = 50
                sample_method = "RANDOM_START"
            }
        }
    }
}

Example Usage - Dlp Job Trigger Job Notification Emails

resource "google_data_loss_prevention_job_trigger" "job_notification_emails" {
  parent       = "projects/my-project-name"
  description  = "Description for the job_trigger created by terraform"
  display_name = "TerraformDisplayName"

  triggers {
    schedule {
      recurrence_period_duration = "86400s"
    }
  }

  inspect_job {
    inspect_template_name = "sample-inspect-template"
    actions {
      job_notification_emails {}
    }
    storage_config {
      cloud_storage_options {
        file_set {
          url = "gs://mybucket/directory/"
        }
      }
    }
  }
}

Example Usage - Dlp Job Trigger Deidentify

resource "google_data_loss_prevention_job_trigger" "deidentify" {
  parent       = "projects/my-project-name"
  description  = "Description for the job_trigger created by terraform"
  display_name = "TerraformDisplayName"

  triggers {
    schedule {
      recurrence_period_duration = "86400s"
    }
  }

  inspect_job {
    inspect_template_name = "sample-inspect-template"
    actions {
      deidentify {
        cloud_storage_output    = "gs://samplebucket/dir/"
        file_types_to_transform = ["CSV", "TSV"]
        transformation_details_storage_config {
          table {
            project_id = "my-project-name"
            dataset_id = google_bigquery_dataset.default.dataset_id
            table_id   = google_bigquery_table.default.table_id
          }
        }
        transformation_config {
          deidentify_template            = "sample-deidentify-template"
          image_redact_template          = "sample-image-redact-template"
          structured_deidentify_template = "sample-structured-deidentify-template"
        }
      }
    }
    storage_config {
      cloud_storage_options {
        file_set {
          url = "gs://mybucket/directory/"
        }
      }
    }
  }
}

resource "google_bigquery_dataset" "default" {
  dataset_id                  = "tf_test"
  friendly_name               = "terraform-test"
  description                 = "Description for the dataset created by terraform"
  location                    = "US"
  default_table_expiration_ms = 3600000

  labels = {
    env = "default"
  }
}

resource "google_bigquery_table" "default" {
  dataset_id          = google_bigquery_dataset.default.dataset_id
  table_id            = "tf_test"
  deletion_protection = false

  time_partitioning {
    type = "DAY"
  }

  labels = {
    env = "default"
  }

  schema = <<EOF
    [
    {
      "name": "quantity",
      "type": "NUMERIC",
      "mode": "NULLABLE",
      "description": "The quantity"
    },
    {
      "name": "name",
      "type": "STRING",
      "mode": "NULLABLE",
      "description": "Name of the object"
    }
    ]
  EOF
}
    [
    {
      "name": "quantity",
      "type": "NUMERIC",
      "mode": "NULLABLE",
      "description": "The quantity"
    },
    {
      "name": "name",
      "type": "STRING",
      "mode": "NULLABLE",
      "description": "Name of the object"
    }
    ]
  EOF
}

Example Usage - Dlp Job Trigger Hybrid

resource "google_data_loss_prevention_job_trigger" "hybrid_trigger" {
  parent = "projects/my-project-name"

  triggers {
    manual {}
  }

  inspect_job {
    inspect_template_name = "fake"
    actions {
      save_findings {
        output_config {
          table {
            project_id = "project"
            dataset_id = "dataset"
          }
        }
      }
    }
    storage_config {
      hybrid_options {
        description = "Hybrid job trigger for data from the comments field of a table that contains customer appointment bookings"
        required_finding_label_keys = [
          "appointment-bookings-comments"
        ]
        labels = {
          env = "prod"
        }
        table_options {
          identifying_fields {
            name = "booking_id"
          }
        }
      }
    }
  }
}

Example Usage - Dlp Job Trigger Inspect

resource "google_data_loss_prevention_job_trigger" "inspect" {
  parent = "projects/my-project-name"
  description = "Description"
  display_name = "Displayname"

  triggers {
    schedule {
      recurrence_period_duration = "86400s"
    }
  }

  inspect_job {
    inspect_template_name = "fake"
    actions {
      save_findings {
        output_config {
          table {
            project_id = "project"
            dataset_id = "dataset"
          }
        }
      }
    }
    storage_config {
      cloud_storage_options {
        file_set {
          url = "gs://mybucket/directory/"
        }
      }
    }
    inspect_config {
      custom_info_types {
        info_type {
          name = "MY_CUSTOM_TYPE"
        }

        likelihood = "UNLIKELY"

        regex {
          pattern = "test*"
        }
      }

      info_types {
        name = "EMAIL_ADDRESS"
      }

      min_likelihood = "UNLIKELY"
      rule_set {
        info_types {
          name = "EMAIL_ADDRESS"
        }
        rules {
          exclusion_rule {
            regex {
              pattern = ".+@example.com"
            }
            matching_type = "MATCHING_TYPE_FULL_MATCH"
          }
        }
      }

      rule_set {
        info_types {
          name = "MY_CUSTOM_TYPE"
        }
        rules {
          hotword_rule {
            hotword_regex {
              pattern = "example*"
            }
            proximity {
              window_before = 50
            }
            likelihood_adjustment {
              fixed_likelihood = "VERY_LIKELY"
            }
          }
        }
      }

      limits {
        max_findings_per_item    = 10
        max_findings_per_request = 50
      }
    }
  }
}

Example Usage - Dlp Job Trigger Publish To Stackdriver

resource "google_data_loss_prevention_job_trigger" "publish_to_stackdriver" {
  parent       = "projects/my-project-name"
  description  = "Description for the job_trigger created by terraform"
  display_name = "TerraformDisplayName"

  triggers {
    schedule {
      recurrence_period_duration = "86400s"
    }
  }

  inspect_job {
    inspect_template_name = "sample-inspect-template"
    actions {
      publish_to_stackdriver {}
    }
    storage_config {
      cloud_storage_options {
        file_set {
          url = "gs://mybucket/directory/"
        }
      }
    }
  }
}

Example Usage - Dlp Job Trigger With Id

resource "google_data_loss_prevention_job_trigger" "with_trigger_id" {
  parent = "projects/my-project-name"
  description = "Starting description"
  display_name = "display"
  trigger_id = "id-"

  triggers {
    schedule {
      recurrence_period_duration = "86400s"
    }
  }

  inspect_job {
    inspect_template_name = "fake"
    actions {
      save_findings {
        output_config {
          table {
            project_id = "project"
            dataset_id = "dataset123"
          }
        }
      }
    }
    storage_config {
      cloud_storage_options {
        file_set {
          url = "gs://mybucket/directory/"
        }
      }
    }
  }
}

Example Usage - Dlp Job Trigger Multiple Actions

resource "google_data_loss_prevention_job_trigger" "basic" {
    parent = "projects/my-project-name"
    description = "Description"
    display_name = "Displayname"

    triggers {
        schedule {
            recurrence_period_duration = "86400s"
        }
    }

    inspect_job {
        inspect_template_name = "fake"

        actions {
            save_findings {
                output_config {
                    table {
                        project_id = "project"
                        dataset_id = "dataset"
                    }
                }
            }
        }

        actions {
            pub_sub {
                topic = "projects/project/topics/topic-name"
            }
        }

        storage_config {
            cloud_storage_options {
                file_set {
                    url = "gs://mybucket/directory/"
                }
            }
        }
    }
}

Example Usage - Dlp Job Trigger Cloud Storage Optional Timespan Autopopulation

resource "google_data_loss_prevention_job_trigger" "basic" {
    parent = "projects/my-project-name"
    description = "Description"
    display_name = "Displayname"

    triggers {
        schedule {
            recurrence_period_duration = "86400s"
        }
    }

    inspect_job {
        inspect_template_name = "fake"
        actions {
            save_findings {
                output_config {
                    table {
                        project_id = "project"
                        dataset_id = "dataset"
                    }
                }
            }
        }
        storage_config {
            timespan_config {
                enable_auto_population_of_timespan_config = true
            }

            cloud_storage_options {
                file_set {
                    url = "gs://mybucket/directory/"
                }
            }
        }
    }
}

Argument Reference

The following arguments are supported:

The triggers block supports:

The schedule block supports:


The inspect_job block supports:

The inspect_config block supports:

The limits block supports:

The max_findings_per_info_type block supports:

The info_type block supports:

The sensitivity_score block supports:

The info_types block supports:

The sensitivity_score block supports:

The rule_set block supports:

The info_types block supports:

The sensitivity_score block supports:

The rules block supports:

The hotword_rule block supports:

The hotword_regex block supports:

The proximity block supports:

The likelihood_adjustment block supports:

The exclusion_rule block supports:

The dictionary block supports:

The word_list block supports:

The cloud_storage_path block supports:

The regex block supports:

The exclude_info_types block supports:

The info_types block supports:

The sensitivity_score block supports:

The exclude_by_hotword block supports:

The hotword_regex block supports:

The proximity block supports:

The custom_info_types block supports:

The info_type block supports:

The sensitivity_score block supports:

The sensitivity_score block supports:

The regex block supports:

The dictionary block supports:

The word_list block supports:

The cloud_storage_path block supports:

The stored_type block supports:

The storage_config block supports:

The timespan_config block supports:

The timestamp_field block supports:

The datastore_options block supports:

The partition_id block supports:

The kind block supports:

The cloud_storage_options block supports:

The file_set block supports:

The regex_file_set block supports:

The big_query_options block supports:

The table_reference block supports:

The identifying_fields block supports:

The included_fields block supports:

The excluded_fields block supports:

The hybrid_options block supports:

The table_options block supports:

The identifying_fields block supports:

The actions block supports:

The save_findings block supports:

The output_config block supports:

The table block supports:

The pub_sub block supports:

The deidentify block supports:

The transformation_config block supports:

The transformation_details_storage_config block supports:

The table block supports:

Attributes Reference

In addition to the arguments listed above, the following computed attributes are exported:

Timeouts

This resource provides the following Timeouts configuration options:

Import

JobTrigger can be imported using any of these accepted formats:

In Terraform v1.5.0 and later, use an import block to import JobTrigger using one of the formats above. For example:

import {
  id = "{{parent}}/jobTriggers/{{name}}"
  to = google_data_loss_prevention_job_trigger.default
}

When using the terraform import command, JobTrigger can be imported using one of the formats above. For example:

$ terraform import google_data_loss_prevention_job_trigger.default {{parent}}/jobTriggers/{{name}}
$ terraform import google_data_loss_prevention_job_trigger.default {{parent}}/{{name}}