1. Packages
  2. Confluent Provider
  3. API Docs
  4. getKafkaTopic
Confluent v2.21.0 published on Wednesday, Mar 19, 2025 by Pulumi

confluentcloud.getKafkaTopic

Explore with Pulumi AI

General Availability

confluentcloud.KafkaTopic describes a Kafka Topic data source.

Example Usage

Option #1: Manage multiple Kafka clusters in the same Pulumi Stack

import * as pulumi from "@pulumi/pulumi";
import * as confluentcloud from "@pulumi/confluentcloud";

const orders = confluentcloud.getKafkaTopic({
    kafkaCluster: {
        id: basic_cluster.id,
    },
    topicName: "orders",
    restEndpoint: basic_cluster.restEndpoint,
    credentials: {
        key: "<Kafka API Key for confluent_kafka_cluster.basic-cluster>",
        secret: "<Kafka API Secret for confluent_kafka_cluster.basic-cluster>",
    },
});
export const config = orders.then(orders => orders.config);
Copy
import pulumi
import pulumi_confluentcloud as confluentcloud

orders = confluentcloud.get_kafka_topic(kafka_cluster={
        "id": basic_cluster["id"],
    },
    topic_name="orders",
    rest_endpoint=basic_cluster["restEndpoint"],
    credentials={
        "key": "<Kafka API Key for confluent_kafka_cluster.basic-cluster>",
        "secret": "<Kafka API Secret for confluent_kafka_cluster.basic-cluster>",
    })
pulumi.export("config", orders.config)
Copy
package main

import (
	"github.com/pulumi/pulumi-confluentcloud/sdk/v2/go/confluentcloud"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		orders, err := confluentcloud.LookupKafkaTopic(ctx, &confluentcloud.LookupKafkaTopicArgs{
			KafkaCluster: confluentcloud.GetKafkaTopicKafkaCluster{
				Id: basic_cluster.Id,
			},
			TopicName:    "orders",
			RestEndpoint: basic_cluster.RestEndpoint,
			Credentials: confluentcloud.GetKafkaTopicCredentials{
				Key:    "<Kafka API Key for confluent_kafka_cluster.basic-cluster>",
				Secret: "<Kafka API Secret for confluent_kafka_cluster.basic-cluster>",
			},
		}, nil)
		if err != nil {
			return err
		}
		ctx.Export("config", orders.Config)
		return nil
	})
}
Copy
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using ConfluentCloud = Pulumi.ConfluentCloud;

return await Deployment.RunAsync(() => 
{
    var orders = ConfluentCloud.GetKafkaTopic.Invoke(new()
    {
        KafkaCluster = new ConfluentCloud.Inputs.GetKafkaTopicKafkaClusterInputArgs
        {
            Id = basic_cluster.Id,
        },
        TopicName = "orders",
        RestEndpoint = basic_cluster.RestEndpoint,
        Credentials = new ConfluentCloud.Inputs.GetKafkaTopicCredentialsInputArgs
        {
            Key = "<Kafka API Key for confluent_kafka_cluster.basic-cluster>",
            Secret = "<Kafka API Secret for confluent_kafka_cluster.basic-cluster>",
        },
    });

    return new Dictionary<string, object?>
    {
        ["config"] = orders.Apply(getKafkaTopicResult => getKafkaTopicResult.Config),
    };
});
Copy
package generated_program;

import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.confluentcloud.ConfluentcloudFunctions;
import com.pulumi.confluentcloud.inputs.GetKafkaTopicArgs;
import com.pulumi.confluentcloud.inputs.GetKafkaTopicKafkaClusterArgs;
import com.pulumi.confluentcloud.inputs.GetKafkaTopicCredentialsArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;

public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }

    public static void stack(Context ctx) {
        final var orders = ConfluentcloudFunctions.getKafkaTopic(GetKafkaTopicArgs.builder()
            .kafkaCluster(GetKafkaTopicKafkaClusterArgs.builder()
                .id(basic_cluster.id())
                .build())
            .topicName("orders")
            .restEndpoint(basic_cluster.restEndpoint())
            .credentials(GetKafkaTopicCredentialsArgs.builder()
                .key("<Kafka API Key for confluent_kafka_cluster.basic-cluster>")
                .secret("<Kafka API Secret for confluent_kafka_cluster.basic-cluster>")
                .build())
            .build());

        ctx.export("config", orders.applyValue(getKafkaTopicResult -> getKafkaTopicResult.config()));
    }
}
Copy
variables:
  orders:
    fn::invoke:
      function: confluentcloud:getKafkaTopic
      arguments:
        kafkaCluster:
          id: ${["basic-cluster"].id}
        topicName: orders
        restEndpoint: ${["basic-cluster"].restEndpoint}
        credentials:
          key: <Kafka API Key for confluent_kafka_cluster.basic-cluster>
          secret: <Kafka API Secret for confluent_kafka_cluster.basic-cluster>
outputs:
  config: ${orders.config}
Copy

Option #2: Manage a single Kafka cluster in the same Pulumi Stack

import * as pulumi from "@pulumi/pulumi";
import * as confluentcloud from "@pulumi/confluentcloud";

const orders = confluentcloud.getKafkaTopic({
    topicName: "orders",
});
export const config = orders.then(orders => orders.config);
Copy
import pulumi
import pulumi_confluentcloud as confluentcloud

orders = confluentcloud.get_kafka_topic(topic_name="orders")
pulumi.export("config", orders.config)
Copy
package main

import (
	"github.com/pulumi/pulumi-confluentcloud/sdk/v2/go/confluentcloud"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		orders, err := confluentcloud.LookupKafkaTopic(ctx, &confluentcloud.LookupKafkaTopicArgs{
			TopicName: "orders",
		}, nil)
		if err != nil {
			return err
		}
		ctx.Export("config", orders.Config)
		return nil
	})
}
Copy
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using ConfluentCloud = Pulumi.ConfluentCloud;

return await Deployment.RunAsync(() => 
{
    var orders = ConfluentCloud.GetKafkaTopic.Invoke(new()
    {
        TopicName = "orders",
    });

    return new Dictionary<string, object?>
    {
        ["config"] = orders.Apply(getKafkaTopicResult => getKafkaTopicResult.Config),
    };
});
Copy
package generated_program;

import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.confluentcloud.ConfluentcloudFunctions;
import com.pulumi.confluentcloud.inputs.GetKafkaTopicArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;

public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }

    public static void stack(Context ctx) {
        final var orders = ConfluentcloudFunctions.getKafkaTopic(GetKafkaTopicArgs.builder()
            .topicName("orders")
            .build());

        ctx.export("config", orders.applyValue(getKafkaTopicResult -> getKafkaTopicResult.config()));
    }
}
Copy
variables:
  orders:
    fn::invoke:
      function: confluentcloud:getKafkaTopic
      arguments:
        topicName: orders
outputs:
  config: ${orders.config}
Copy

Using getKafkaTopic

Two invocation forms are available. The direct form accepts plain arguments and either blocks until the result value is available, or returns a Promise-wrapped result. The output form accepts Input-wrapped arguments and returns an Output-wrapped result.

function getKafkaTopic(args: GetKafkaTopicArgs, opts?: InvokeOptions): Promise<GetKafkaTopicResult>
function getKafkaTopicOutput(args: GetKafkaTopicOutputArgs, opts?: InvokeOptions): Output<GetKafkaTopicResult>
Copy
def get_kafka_topic(credentials: Optional[GetKafkaTopicCredentials] = None,
                    kafka_cluster: Optional[GetKafkaTopicKafkaCluster] = None,
                    rest_endpoint: Optional[str] = None,
                    topic_name: Optional[str] = None,
                    opts: Optional[InvokeOptions] = None) -> GetKafkaTopicResult
def get_kafka_topic_output(credentials: Optional[pulumi.Input[GetKafkaTopicCredentialsArgs]] = None,
                    kafka_cluster: Optional[pulumi.Input[GetKafkaTopicKafkaClusterArgs]] = None,
                    rest_endpoint: Optional[pulumi.Input[str]] = None,
                    topic_name: Optional[pulumi.Input[str]] = None,
                    opts: Optional[InvokeOptions] = None) -> Output[GetKafkaTopicResult]
Copy
func LookupKafkaTopic(ctx *Context, args *LookupKafkaTopicArgs, opts ...InvokeOption) (*LookupKafkaTopicResult, error)
func LookupKafkaTopicOutput(ctx *Context, args *LookupKafkaTopicOutputArgs, opts ...InvokeOption) LookupKafkaTopicResultOutput
Copy

> Note: This function is named LookupKafkaTopic in the Go SDK.

public static class GetKafkaTopic 
{
    public static Task<GetKafkaTopicResult> InvokeAsync(GetKafkaTopicArgs args, InvokeOptions? opts = null)
    public static Output<GetKafkaTopicResult> Invoke(GetKafkaTopicInvokeArgs args, InvokeOptions? opts = null)
}
Copy
public static CompletableFuture<GetKafkaTopicResult> getKafkaTopic(GetKafkaTopicArgs args, InvokeOptions options)
public static Output<GetKafkaTopicResult> getKafkaTopic(GetKafkaTopicArgs args, InvokeOptions options)
Copy
fn::invoke:
  function: confluentcloud:index/getKafkaTopic:getKafkaTopic
  arguments:
    # arguments dictionary
Copy

The following arguments are supported:

RestEndpoint This property is required. string
The REST endpoint of the Kafka cluster, for example, https://pkc-00000.us-central1.gcp.confluent.cloud:443).
TopicName This property is required. string
The name of the topic, for example, orders-1. The topic name can be up to 255 characters in length and can contain only alphanumeric characters, hyphens, and underscores.
Credentials Pulumi.ConfluentCloud.Inputs.GetKafkaTopicCredentials
KafkaCluster Pulumi.ConfluentCloud.Inputs.GetKafkaTopicKafkaCluster
RestEndpoint This property is required. string
The REST endpoint of the Kafka cluster, for example, https://pkc-00000.us-central1.gcp.confluent.cloud:443).
TopicName This property is required. string
The name of the topic, for example, orders-1. The topic name can be up to 255 characters in length and can contain only alphanumeric characters, hyphens, and underscores.
Credentials GetKafkaTopicCredentials
KafkaCluster GetKafkaTopicKafkaCluster
restEndpoint This property is required. String
The REST endpoint of the Kafka cluster, for example, https://pkc-00000.us-central1.gcp.confluent.cloud:443).
topicName This property is required. String
The name of the topic, for example, orders-1. The topic name can be up to 255 characters in length and can contain only alphanumeric characters, hyphens, and underscores.
credentials GetKafkaTopicCredentials
kafkaCluster GetKafkaTopicKafkaCluster
restEndpoint This property is required. string
The REST endpoint of the Kafka cluster, for example, https://pkc-00000.us-central1.gcp.confluent.cloud:443).
topicName This property is required. string
The name of the topic, for example, orders-1. The topic name can be up to 255 characters in length and can contain only alphanumeric characters, hyphens, and underscores.
credentials GetKafkaTopicCredentials
kafkaCluster GetKafkaTopicKafkaCluster
rest_endpoint This property is required. str
The REST endpoint of the Kafka cluster, for example, https://pkc-00000.us-central1.gcp.confluent.cloud:443).
topic_name This property is required. str
The name of the topic, for example, orders-1. The topic name can be up to 255 characters in length and can contain only alphanumeric characters, hyphens, and underscores.
credentials GetKafkaTopicCredentials
kafka_cluster GetKafkaTopicKafkaCluster
restEndpoint This property is required. String
The REST endpoint of the Kafka cluster, for example, https://pkc-00000.us-central1.gcp.confluent.cloud:443).
topicName This property is required. String
The name of the topic, for example, orders-1. The topic name can be up to 255 characters in length and can contain only alphanumeric characters, hyphens, and underscores.
credentials Property Map
kafkaCluster Property Map

getKafkaTopic Result

The following output properties are available:

Config Dictionary<string, string>
(Optional Map) The custom topic settings:
Id string
The provider-assigned unique ID for this managed resource.
PartitionsCount int
(Required Number) The number of partitions to create in the topic. Defaults to 6.
RestEndpoint string
TopicName string
Credentials Pulumi.ConfluentCloud.Outputs.GetKafkaTopicCredentials
KafkaCluster Pulumi.ConfluentCloud.Outputs.GetKafkaTopicKafkaCluster
Config map[string]string
(Optional Map) The custom topic settings:
Id string
The provider-assigned unique ID for this managed resource.
PartitionsCount int
(Required Number) The number of partitions to create in the topic. Defaults to 6.
RestEndpoint string
TopicName string
Credentials GetKafkaTopicCredentials
KafkaCluster GetKafkaTopicKafkaCluster
config Map<String,String>
(Optional Map) The custom topic settings:
id String
The provider-assigned unique ID for this managed resource.
partitionsCount Integer
(Required Number) The number of partitions to create in the topic. Defaults to 6.
restEndpoint String
topicName String
credentials GetKafkaTopicCredentials
kafkaCluster GetKafkaTopicKafkaCluster
config {[key: string]: string}
(Optional Map) The custom topic settings:
id string
The provider-assigned unique ID for this managed resource.
partitionsCount number
(Required Number) The number of partitions to create in the topic. Defaults to 6.
restEndpoint string
topicName string
credentials GetKafkaTopicCredentials
kafkaCluster GetKafkaTopicKafkaCluster
config Mapping[str, str]
(Optional Map) The custom topic settings:
id str
The provider-assigned unique ID for this managed resource.
partitions_count int
(Required Number) The number of partitions to create in the topic. Defaults to 6.
rest_endpoint str
topic_name str
credentials GetKafkaTopicCredentials
kafka_cluster GetKafkaTopicKafkaCluster
config Map<String>
(Optional Map) The custom topic settings:
id String
The provider-assigned unique ID for this managed resource.
partitionsCount Number
(Required Number) The number of partitions to create in the topic. Defaults to 6.
restEndpoint String
topicName String
credentials Property Map
kafkaCluster Property Map

Supporting Types

GetKafkaTopicCredentials

Key This property is required. string
The Kafka API Key.
Secret This property is required. string
The Cluster API Secret for your Confluent Cloud cluster.
Key This property is required. string
The Kafka API Key.
Secret This property is required. string
The Cluster API Secret for your Confluent Cloud cluster.
key This property is required. String
The Kafka API Key.
secret This property is required. String
The Cluster API Secret for your Confluent Cloud cluster.
key This property is required. string
The Kafka API Key.
secret This property is required. string
The Cluster API Secret for your Confluent Cloud cluster.
key This property is required. str
The Kafka API Key.
secret This property is required. str
The Cluster API Secret for your Confluent Cloud cluster.
key This property is required. String
The Kafka API Key.
secret This property is required. String
The Cluster API Secret for your Confluent Cloud cluster.

GetKafkaTopicKafkaCluster

Id This property is required. string
The ID of the Kafka cluster, for example, lkc-abc123.
Id This property is required. string
The ID of the Kafka cluster, for example, lkc-abc123.
id This property is required. String
The ID of the Kafka cluster, for example, lkc-abc123.
id This property is required. string
The ID of the Kafka cluster, for example, lkc-abc123.
id This property is required. str
The ID of the Kafka cluster, for example, lkc-abc123.
id This property is required. String
The ID of the Kafka cluster, for example, lkc-abc123.

Package Details

Repository
Confluent Cloud pulumi/pulumi-confluentcloud
License
Apache-2.0
Notes
This Pulumi package is based on the confluent Terraform Provider.