confluent_connector Resource

General Availability

confluent_connector provides a connector resource that enables creating, editing, and deleting connectors on Confluent Cloud.

Example Usage

Example Managed Datagen Source Connector that uses a service account to communicate with your Kafka cluster

resource "confluent_connector" "source" {
  environment {
    id = confluent_environment.staging.id
  }
  kafka_cluster {
    id = confluent_kafka_cluster.basic.id
  }

  config_sensitive = {}

  config_nonsensitive = {
    "connector.class"          = "DatagenSource"
    "name"                     = "DatagenSourceConnector_0"
    "kafka.auth.mode"          = "SERVICE_ACCOUNT"
    "kafka.service.account.id" = confluent_service_account.app-connector.id
    "kafka.topic"              = confluent_kafka_topic.orders.topic_name
    "output.data.format"       = "JSON"
    "quickstart"               = "ORDERS"
    "tasks.max"                = "1"
  }

  depends_on = [
    confluent_kafka_acl.app-connector-describe-on-cluster,
    confluent_kafka_acl.app-connector-write-on-target-topic,
    confluent_kafka_acl.app-connector-create-on-data-preview-topics,
    confluent_kafka_acl.app-connector-write-on-data-preview-topics,
  ]

  lifecycle {
    prevent_destroy = true
  }
}

Example Managed Amazon S3 Sink Connector that uses a service account to communicate with your Kafka cluster

resource "confluent_connector" "sink" {
  environment {
    id = confluent_environment.staging.id
  }
  kafka_cluster {
    id = confluent_kafka_cluster.basic.id
  }

  // Block for custom *sensitive* configuration properties that are labelled with "Type: password" under "Configuration Properties" section in the docs:
  // https://docs.confluent.io/cloud/current/connectors/cc-s3-sink.html#configuration-properties
  config_sensitive = {
    "aws.access.key.id"     = "***REDACTED***"
    "aws.secret.access.key" = "***REDACTED***"
  }

  // Block for custom *nonsensitive* configuration properties that are *not* labelled with "Type: password" under "Configuration Properties" section in the docs:
  // https://docs.confluent.io/cloud/current/connectors/cc-s3-sink.html#configuration-properties
  config_nonsensitive = {
    "topics"                   = confluent_kafka_topic.orders.topic_name
    "input.data.format"        = "JSON"
    "connector.class"          = "S3_SINK"
    "name"                     = "S3_SINKConnector_0"
    "kafka.auth.mode"          = "SERVICE_ACCOUNT"
    "kafka.service.account.id" = confluent_service_account.app-connector.id
    "s3.bucket.name"           = "<s3-bucket-name>"
    "output.data.format"       = "JSON"
    "time.interval"            = "DAILY"
    "flush.size"               = "1000"
    "tasks.max"                = "1"
  }

  depends_on = [
    confluent_kafka_acl.app-connector-describe-on-cluster,
    confluent_kafka_acl.app-connector-read-on-target-topic,
    confluent_kafka_acl.app-connector-create-on-dlq-lcc-topics,
    confluent_kafka_acl.app-connector-write-on-dlq-lcc-topics,
    confluent_kafka_acl.app-connector-create-on-success-lcc-topics,
    confluent_kafka_acl.app-connector-write-on-success-lcc-topics,
    confluent_kafka_acl.app-connector-create-on-error-lcc-topics,
    confluent_kafka_acl.app-connector-write-on-error-lcc-topics,
    confluent_kafka_acl.app-connector-read-on-connect-lcc-group,
  ]

  lifecycle {
    prevent_destroy = true
  }
}

Example Managed Amazon DynamoDB Connector that uses a service account to communicate with your Kafka cluster

resource "confluent_connector" "sink" {
  environment {
    id = confluent_environment.staging.id
  }
  kafka_cluster {
    id = confluent_kafka_cluster.basic.id
  }

  // Block for custom *sensitive* configuration properties that are labelled with "Type: password" under "Configuration Properties" section in the docs:
  // https://docs.confluent.io/cloud/current/connectors/cc-amazon-dynamo-db-sink.html#configuration-properties
  config_sensitive = {
    "aws.access.key.id"     = "***REDACTED***"
    "aws.secret.access.key" = "***REDACTED***"
  }

  // Block for custom *nonsensitive* configuration properties that are *not* labelled with "Type: password" under "Configuration Properties" section in the docs:
  // https://docs.confluent.io/cloud/current/connectors/cc-amazon-dynamo-db-sink.html#configuration-properties
  config_nonsensitive = {
    "topics"                   = confluent_kafka_topic.orders.topic_name
    "input.data.format"        = "JSON"
    "connector.class"          = "DynamoDbSink"
    "name"                     = "DynamoDbSinkConnector_0"
    "kafka.auth.mode"          = "SERVICE_ACCOUNT"
    "kafka.service.account.id" = confluent_service_account.app-connector.id
    "aws.dynamodb.pk.hash"     = "value.userid"
    "aws.dynamodb.pk.sort"     = "value.pageid"
    "tasks.max"                = "1"
  }

  depends_on = [
    confluent_kafka_acl.app-connector-describe-on-cluster,
    confluent_kafka_acl.app-connector-read-on-target-topic,
    confluent_kafka_acl.app-connector-create-on-dlq-lcc-topics,
    confluent_kafka_acl.app-connector-write-on-dlq-lcc-topics,
    confluent_kafka_acl.app-connector-create-on-success-lcc-topics,
    confluent_kafka_acl.app-connector-write-on-success-lcc-topics,
    confluent_kafka_acl.app-connector-create-on-error-lcc-topics,
    confluent_kafka_acl.app-connector-write-on-error-lcc-topics,
    confluent_kafka_acl.app-connector-read-on-connect-lcc-group,
  ]
}

Example Custom Datagen Source Connector that uses a Kafka API Key to communicate with your Kafka cluster

resource "confluent_connector" "source" {
  environment {
    id = confluent_environment.staging.id
  }
  kafka_cluster {
    id = confluent_kafka_cluster.basic.id
  }

  // Block for custom *sensitive* configuration properties that are labelled with "Type: password" under "Configuration Properties" section in the docs:
  // https://docs.confluent.io/platform/current/connect/userguide.html#connect-installing-plugins
  config_sensitive = {
    "kafka.api.key"     = "***REDACTED***"
    "kafka.api.secret"  = "***REDACTED***"
  }

  // Block for custom *nonsensitive* configuration properties that are *not* labelled with "Type: password" under "Configuration Properties" section in the docs:
  // https://docs.confluent.io/platform/current/connect/userguide.html#connect-installing-plugins
  config_nonsensitive = {
    "confluent.connector.type"   = "CUSTOM"
    "connector.class"            = confluent_custom_connector_plugin.source.connector_class
    "name"                       = "DatagenConnectorExampleName"
    "kafka.auth.mode"            = "KAFKA_API_KEY"
    "kafka.topic"                = confluent_kafka_topic.orders.topic_name
    "output.data.format"         = "JSON"
    "quickstart"                 = "ORDERS"
    "confluent.custom.plugin.id" = confluent_custom_connector_plugin.source.id
    "min.interval"               = "1000"
    "max.interval"               = "2000"
    "tasks.max"                  = "1"
  }

  depends_on = [
    confluent_role_binding.app-manager-kafka-cluster-admin,
  ]
}

Argument Reference

The following arguments are supported:

Attributes Reference

In addition to the preceding arguments, the following attributes are exported:

Import

You can import a connector by using Environment ID, Kafka cluster ID, and connector's name, in the format <Environment ID>/<Kafka cluster ID>/<Connector name>, for example:

$ export CONFLUENT_CLOUD_API_KEY="<cloud_api_key>"
$ export CONFLUENT_CLOUD_API_SECRET="<cloud_api_secret>"
$ terraform import confluent_connector.my_connector "env-abc123/lkc-abc123/S3_SINKConnector_0"

Getting Started

The following end-to-end examples might help to get started with confluent_connector resource: