google_biglake_table

Represents a table.

To get more information about Table, see:

Open in Cloud Shell

Example Usage - Biglake Table

resource "google_biglake_catalog" "catalog" {
    name = "my_catalog"
    location = "US"
}

resource "google_storage_bucket" "bucket" {
  name                        = "my_bucket"
  location                    = "US"
  force_destroy               = true
  uniform_bucket_level_access = true
}

resource "google_storage_bucket_object" "metadata_folder" {
  name    = "metadata/"
  content = " "
  bucket  = google_storage_bucket.bucket.name
}


resource "google_storage_bucket_object" "data_folder" {
  name    = "data/"
  content = " "
  bucket  = google_storage_bucket.bucket.name
}

resource "google_biglake_database" "database" {
    name = "my_database"
    catalog = google_biglake_catalog.catalog.id
    type = "HIVE"
    hive_options {
        location_uri = "gs://${google_storage_bucket.bucket.name}/${google_storage_bucket_object.metadata_folder.name}"
        parameters = {
          "owner" = "Alex"
        }
    }
}

resource "google_biglake_table" "table" {
    name = "my_table"
    database = google_biglake_database.database.id
    type = "HIVE"
    hive_options {
      table_type = "MANAGED_TABLE"
      storage_descriptor {
        location_uri = "gs://${google_storage_bucket.bucket.name}/${google_storage_bucket_object.data_folder.name}"
        input_format  = "org.apache.hadoop.mapred.SequenceFileInputFormat"
        output_format = "org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat"
      }
      # Some Example Parameters.
      parameters = {
        "spark.sql.create.version" = "3.1.3"
        "spark.sql.sources.schema.numParts" = "1"
        "transient_lastDdlTime" = "1680894197"
        "spark.sql.partitionProvider" = "catalog"
        "owner" = "John Doe"
        "spark.sql.sources.schema.part.0"= "{\"type\":\"struct\",\"fields\":[{\"name\":\"id\",\"type\":\"integer\",\"nullable\":true,\"metadata\":{}},{\"name\":\"name\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"age\",\"type\":\"integer\",\"nullable\":true,\"metadata\":{}}]}"
        "spark.sql.sources.provider" = "iceberg"
        "provider" = "iceberg"
      }
  }
}

Argument Reference

The following arguments are supported:


The hive_options block supports:

The storage_descriptor block supports:

Attributes Reference

In addition to the arguments listed above, the following computed attributes are exported:

Timeouts

This resource provides the following Timeouts configuration options:

Import

Table can be imported using any of these accepted formats:

In Terraform v1.5.0 and later, use an import block to import Table using one of the formats above. For example:

import {
  id = "{{database}}/tables/{{name}}"
  to = google_biglake_table.default
}

When using the terraform import command, Table can be imported using one of the formats above. For example:

$ terraform import google_biglake_table.default {{database}}/tables/{{name}}