confluent_connector
provides a connector resource that enables creating, editing, and deleting connectors on Confluent Cloud.
resource "confluent_connector" "source" {
environment {
id = confluent_environment.staging.id
}
kafka_cluster {
id = confluent_kafka_cluster.basic.id
}
config_sensitive = {}
config_nonsensitive = {
"connector.class" = "DatagenSource"
"name" = "DatagenSourceConnector_0"
"kafka.auth.mode" = "SERVICE_ACCOUNT"
"kafka.service.account.id" = confluent_service_account.app-connector.id
"kafka.topic" = confluent_kafka_topic.orders.topic_name
"output.data.format" = "JSON"
"quickstart" = "ORDERS"
"tasks.max" = "1"
}
depends_on = [
confluent_kafka_acl.app-connector-describe-on-cluster,
confluent_kafka_acl.app-connector-write-on-target-topic,
confluent_kafka_acl.app-connector-create-on-data-preview-topics,
confluent_kafka_acl.app-connector-write-on-data-preview-topics,
]
lifecycle {
prevent_destroy = true
}
}
resource "confluent_connector" "sink" {
environment {
id = confluent_environment.staging.id
}
kafka_cluster {
id = confluent_kafka_cluster.basic.id
}
// Block for custom *sensitive* configuration properties that are labelled with "Type: password" under "Configuration Properties" section in the docs:
// https://docs.confluent.io/cloud/current/connectors/cc-s3-sink.html#configuration-properties
config_sensitive = {
"aws.access.key.id" = "***REDACTED***"
"aws.secret.access.key" = "***REDACTED***"
}
// Block for custom *nonsensitive* configuration properties that are *not* labelled with "Type: password" under "Configuration Properties" section in the docs:
// https://docs.confluent.io/cloud/current/connectors/cc-s3-sink.html#configuration-properties
config_nonsensitive = {
"topics" = confluent_kafka_topic.orders.topic_name
"input.data.format" = "JSON"
"connector.class" = "S3_SINK"
"name" = "S3_SINKConnector_0"
"kafka.auth.mode" = "SERVICE_ACCOUNT"
"kafka.service.account.id" = confluent_service_account.app-connector.id
"s3.bucket.name" = "<s3-bucket-name>"
"output.data.format" = "JSON"
"time.interval" = "DAILY"
"flush.size" = "1000"
"tasks.max" = "1"
}
depends_on = [
confluent_kafka_acl.app-connector-describe-on-cluster,
confluent_kafka_acl.app-connector-read-on-target-topic,
confluent_kafka_acl.app-connector-create-on-dlq-lcc-topics,
confluent_kafka_acl.app-connector-write-on-dlq-lcc-topics,
confluent_kafka_acl.app-connector-create-on-success-lcc-topics,
confluent_kafka_acl.app-connector-write-on-success-lcc-topics,
confluent_kafka_acl.app-connector-create-on-error-lcc-topics,
confluent_kafka_acl.app-connector-write-on-error-lcc-topics,
confluent_kafka_acl.app-connector-read-on-connect-lcc-group,
]
lifecycle {
prevent_destroy = true
}
}
resource "confluent_connector" "sink" {
environment {
id = confluent_environment.staging.id
}
kafka_cluster {
id = confluent_kafka_cluster.basic.id
}
// Block for custom *sensitive* configuration properties that are labelled with "Type: password" under "Configuration Properties" section in the docs:
// https://docs.confluent.io/cloud/current/connectors/cc-amazon-dynamo-db-sink.html#configuration-properties
config_sensitive = {
"aws.access.key.id" = "***REDACTED***"
"aws.secret.access.key" = "***REDACTED***"
}
// Block for custom *nonsensitive* configuration properties that are *not* labelled with "Type: password" under "Configuration Properties" section in the docs:
// https://docs.confluent.io/cloud/current/connectors/cc-amazon-dynamo-db-sink.html#configuration-properties
config_nonsensitive = {
"topics" = confluent_kafka_topic.orders.topic_name
"input.data.format" = "JSON"
"connector.class" = "DynamoDbSink"
"name" = "DynamoDbSinkConnector_0"
"kafka.auth.mode" = "SERVICE_ACCOUNT"
"kafka.service.account.id" = confluent_service_account.app-connector.id
"aws.dynamodb.pk.hash" = "value.userid"
"aws.dynamodb.pk.sort" = "value.pageid"
"tasks.max" = "1"
}
depends_on = [
confluent_kafka_acl.app-connector-describe-on-cluster,
confluent_kafka_acl.app-connector-read-on-target-topic,
confluent_kafka_acl.app-connector-create-on-dlq-lcc-topics,
confluent_kafka_acl.app-connector-write-on-dlq-lcc-topics,
confluent_kafka_acl.app-connector-create-on-success-lcc-topics,
confluent_kafka_acl.app-connector-write-on-success-lcc-topics,
confluent_kafka_acl.app-connector-create-on-error-lcc-topics,
confluent_kafka_acl.app-connector-write-on-error-lcc-topics,
confluent_kafka_acl.app-connector-read-on-connect-lcc-group,
]
}
resource "confluent_connector" "source" {
environment {
id = confluent_environment.staging.id
}
kafka_cluster {
id = confluent_kafka_cluster.basic.id
}
// Block for custom *sensitive* configuration properties that are labelled with "Type: password" under "Configuration Properties" section in the docs:
// https://docs.confluent.io/platform/current/connect/userguide.html#connect-installing-plugins
config_sensitive = {
"kafka.api.key" = "***REDACTED***"
"kafka.api.secret" = "***REDACTED***"
}
// Block for custom *nonsensitive* configuration properties that are *not* labelled with "Type: password" under "Configuration Properties" section in the docs:
// https://docs.confluent.io/platform/current/connect/userguide.html#connect-installing-plugins
config_nonsensitive = {
"confluent.connector.type" = "CUSTOM"
"connector.class" = confluent_custom_connector_plugin.source.connector_class
"name" = "DatagenConnectorExampleName"
"kafka.auth.mode" = "KAFKA_API_KEY"
"kafka.topic" = confluent_kafka_topic.orders.topic_name
"output.data.format" = "JSON"
"quickstart" = "ORDERS"
"confluent.custom.plugin.id" = confluent_custom_connector_plugin.source.id
"min.interval" = "1000"
"max.interval" = "2000"
"tasks.max" = "1"
}
depends_on = [
confluent_role_binding.app-manager-kafka-cluster-admin,
]
}
The following arguments are supported:
environment
(Required Configuration Block) supports the following:
id
- (Required String) The ID of the Environment that the connector belongs to, for example, env-abc123
.kafka_cluster
(Optional Configuration Block) supports the following:
id
- (Required String) The ID of the Kafka cluster that the connector belongs to, for example, lkc-abc123
.config_nonsensitive
- (Required Map) Block for custom nonsensitive configuration properties that are not labelled with "Type: password" under "Configuration Properties" section in the docs:
name
- (Required String) The configuration setting name, for example, connector.class
.value
- (Required String) The configuration setting value, for example, S3_SINK
.config_sensitive
- (Required Map) Block for custom sensitive configuration properties that are labelled with "Type: password" under "Configuration Properties" section in the docs:
name
- (Required String) The configuration setting name, for example, aws.secret.access.key
.value
- (Required String, Sensitive) The configuration setting value, for example, ***REDACTED***
.status
(Optional String) The status of the connector (one of "NONE"
, "PROVISIONING"
, "RUNNING"
, "DEGRADED"
, "FAILED"
, "PAUSED"
, "DELETED"
). Pausing ("RUNNING" -> "PAUSED"
) and resuming ("PAUSED" -> "RUNNING"
) a connector is supported via an update operation.In addition to the preceding arguments, the following attributes are exported:
id
- (Required String) The ID of the connector, for example, lcc-abc123
.You can import a connector by using Environment ID, Kafka cluster ID, and connector's name, in the format <Environment ID>/<Kafka cluster ID>/<Connector name>
, for example:
$ export CONFLUENT_CLOUD_API_KEY="<cloud_api_key>"
$ export CONFLUENT_CLOUD_API_SECRET="<cloud_api_secret>"
$ terraform import confluent_connector.my_connector "env-abc123/lkc-abc123/S3_SINKConnector_0"
The following end-to-end examples might help to get started with confluent_connector
resource: