Using Customer Managed Encryption Keys#
Table data is always encrypted at rest, but BigQuery also provides a way for you to control what keys it uses to encrypt they data. See Protecting data with Cloud KMS keys in the BigQuery documentation for more details.
Create a new table, using a customer-managed encryption key from Cloud KMS to encrypt it.
# from google.cloud import bigquery
# client = bigquery.Client()
# dataset_id = 'my_dataset'
table_ref = client.dataset(dataset_id).table("my_table")
table = bigquery.Table(table_ref)
# Set the encryption key to use for the table.
# TODO: Replace this key with a key you have created in Cloud KMS.
kms_key_name = "projects/{}/locations/{}/keyRings/{}/cryptoKeys/{}".format(
"cloud-samples-tests", "us-central1", "test", "test"
)
table.encryption_configuration = bigquery.EncryptionConfiguration(
kms_key_name=kms_key_name
)
table = client.create_table(table) # API request
assert table.encryption_configuration.kms_key_name == kms_key_name
Change the key used to encrypt a table.
# from google.cloud import bigquery
# client = bigquery.Client()
assert table.encryption_configuration.kms_key_name == original_kms_key_name
# Set a new encryption key to use for the destination.
# TODO: Replace this key with a key you have created in KMS.
updated_kms_key_name = (
"projects/cloud-samples-tests/locations/us-central1/"
"keyRings/test/cryptoKeys/otherkey"
)
table.encryption_configuration = bigquery.EncryptionConfiguration(
kms_key_name=updated_kms_key_name
)
table = client.update_table(table, ["encryption_configuration"]) # API request
assert table.encryption_configuration.kms_key_name == updated_kms_key_name
assert original_kms_key_name != updated_kms_key_name
Load a file from Cloud Storage, using a customer-managed encryption key from Cloud KMS for the destination table.
# from google.cloud import bigquery
# client = bigquery.Client()
# dataset_id = 'my_dataset'
dataset_ref = client.dataset(dataset_id)
job_config = bigquery.LoadJobConfig()
job_config.autodetect = True
job_config.source_format = bigquery.SourceFormat.NEWLINE_DELIMITED_JSON
# Set the encryption key to use for the destination.
# TODO: Replace this key with a key you have created in KMS.
kms_key_name = "projects/{}/locations/{}/keyRings/{}/cryptoKeys/{}".format(
"cloud-samples-tests", "us-central1", "test", "test"
)
encryption_config = bigquery.EncryptionConfiguration(kms_key_name=kms_key_name)
job_config.destination_encryption_configuration = encryption_config
uri = "gs://cloud-samples-data/bigquery/us-states/us-states.json"
load_job = client.load_table_from_uri(
uri,
dataset_ref.table("us_states"),
location="US", # Location must match that of the destination dataset.
job_config=job_config,
) # API request
assert load_job.job_type == "load"
load_job.result() # Waits for table load to complete.
assert load_job.state == "DONE"
table = client.get_table(dataset_ref.table("us_states"))
assert table.encryption_configuration.kms_key_name == kms_key_name
Copy a table, using a customer-managed encryption key from Cloud KMS for the destination table.
# from google.cloud import bigquery
# client = bigquery.Client()
source_dataset = bigquery.DatasetReference("bigquery-public-data", "samples")
source_table_ref = source_dataset.table("shakespeare")
# dataset_id = 'my_dataset'
dest_dataset_ref = client.dataset(dataset_id)
dest_table_ref = dest_dataset_ref.table("destination_table")
# Set the encryption key to use for the destination.
# TODO: Replace this key with a key you have created in KMS.
kms_key_name = "projects/{}/locations/{}/keyRings/{}/cryptoKeys/{}".format(
"cloud-samples-tests", "us-central1", "test", "test"
)
encryption_config = bigquery.EncryptionConfiguration(kms_key_name=kms_key_name)
job_config = bigquery.CopyJobConfig()
job_config.destination_encryption_configuration = encryption_config
job = client.copy_table(
source_table_ref,
dest_table_ref,
# Location must match that of the source and destination tables.
location="US",
job_config=job_config,
) # API request
job.result() # Waits for job to complete.
assert job.state == "DONE"
dest_table = client.get_table(dest_table_ref)
assert dest_table.encryption_configuration.kms_key_name == kms_key_name
Write query results to a table, using a customer-managed encryption key from Cloud KMS for the destination table.
# from google.cloud import bigquery
# client = bigquery.Client()
job_config = bigquery.QueryJobConfig()
# Set the destination table. Here, dataset_id is a string, such as:
# dataset_id = 'your_dataset_id'
table_ref = client.dataset(dataset_id).table("your_table_id")
job_config.destination = table_ref
# Set the encryption key to use for the destination.
# TODO: Replace this key with a key you have created in KMS.
kms_key_name = "projects/{}/locations/{}/keyRings/{}/cryptoKeys/{}".format(
"cloud-samples-tests", "us-central1", "test", "test"
)
encryption_config = bigquery.EncryptionConfiguration(kms_key_name=kms_key_name)
job_config.destination_encryption_configuration = encryption_config
# Start the query, passing in the extra configuration.
query_job = client.query(
"SELECT 17 AS my_col;",
# Location must match that of the dataset(s) referenced in the query
# and of the destination table.
location="US",
job_config=job_config,
) # API request - starts the query
query_job.result()
# The destination table is written using the encryption configuration.
table = client.get_table(table_ref)
assert table.encryption_configuration.kms_key_name == kms_key_name