confluentcloud.Schema
Explore with Pulumi AI
Import
You can import a Schema by using the Schema Registry cluster ID, Subject name, and unique identifier (or latest
when recreate_on_update = false
) of the Schema in the format <Schema Registry cluster ID>/<Subject name>/<Schema identifier>
, for example:
Option A: recreate_on_update = false (by default)
$ export IMPORT_SCHEMA_REGISTRY_API_KEY="<schema_registry_api_key>"
$ export IMPORT_SCHEMA_REGISTRY_API_SECRET="<schema_registry_api_secret>"
$ export IMPORT_SCHEMA_REGISTRY_REST_ENDPOINT="<schema_registry_rest_endpoint>"
$ pulumi import confluentcloud:index/schema:Schema my_schema_1 lsrc-abc123/test-subject/latest
Option B: recreate_on_update = true
$ export IMPORT_SCHEMA_REGISTRY_API_KEY="<schema_registry_api_key>"
$ export IMPORT_SCHEMA_REGISTRY_API_SECRET="<schema_registry_api_secret>"
$ export IMPORT_SCHEMA_REGISTRY_REST_ENDPOINT="<schema_registry_rest_endpoint>"
$ pulumi import confluentcloud:index/schema:Schema my_schema_1 lsrc-abc123/test-subject/100003
!> Warning: Do not forget to delete terminal command history afterwards for security purposes.
Create Schema Resource
Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.
Constructor syntax
new Schema(name: string, args: SchemaArgs, opts?: CustomResourceOptions);
@overload
def Schema(resource_name: str,
args: SchemaArgs,
opts: Optional[ResourceOptions] = None)
@overload
def Schema(resource_name: str,
opts: Optional[ResourceOptions] = None,
format: Optional[str] = None,
subject_name: Optional[str] = None,
credentials: Optional[SchemaCredentialsArgs] = None,
hard_delete: Optional[bool] = None,
metadata: Optional[SchemaMetadataArgs] = None,
recreate_on_update: Optional[bool] = None,
rest_endpoint: Optional[str] = None,
ruleset: Optional[SchemaRulesetArgs] = None,
schema: Optional[str] = None,
schema_references: Optional[Sequence[SchemaSchemaReferenceArgs]] = None,
schema_registry_cluster: Optional[SchemaSchemaRegistryClusterArgs] = None)
func NewSchema(ctx *Context, name string, args SchemaArgs, opts ...ResourceOption) (*Schema, error)
public Schema(string name, SchemaArgs args, CustomResourceOptions? opts = null)
public Schema(String name, SchemaArgs args)
public Schema(String name, SchemaArgs args, CustomResourceOptions options)
type: confluentcloud:Schema
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.
Parameters
- name string
- The unique name of the resource.
- args SchemaArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- resource_name str
- The unique name of the resource.
- args SchemaArgs
- The arguments to resource properties.
- opts ResourceOptions
- Bag of options to control resource's behavior.
- ctx Context
- Context object for the current deployment.
- name string
- The unique name of the resource.
- args SchemaArgs
- The arguments to resource properties.
- opts ResourceOption
- Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args SchemaArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- name String
- The unique name of the resource.
- args SchemaArgs
- The arguments to resource properties.
- options CustomResourceOptions
- Bag of options to control resource's behavior.
Constructor example
The following reference example uses placeholder values for all input properties.
var schemaResource = new ConfluentCloud.Schema("schemaResource", new()
{
Format = "string",
SubjectName = "string",
Credentials = new ConfluentCloud.Inputs.SchemaCredentialsArgs
{
Key = "string",
Secret = "string",
},
HardDelete = false,
Metadata = new ConfluentCloud.Inputs.SchemaMetadataArgs
{
Properties =
{
{ "string", "string" },
},
Sensitives = new[]
{
"string",
},
Tags = new[]
{
new ConfluentCloud.Inputs.SchemaMetadataTagArgs
{
Key = "string",
Values = new[]
{
"string",
},
},
},
},
RecreateOnUpdate = false,
RestEndpoint = "string",
Ruleset = new ConfluentCloud.Inputs.SchemaRulesetArgs
{
DomainRules = new[]
{
new ConfluentCloud.Inputs.SchemaRulesetDomainRuleArgs
{
Doc = "string",
Expr = "string",
Kind = "string",
Mode = "string",
Name = "string",
OnFailure = "string",
OnSuccess = "string",
Params =
{
{ "string", "string" },
},
Tags = new[]
{
"string",
},
Type = "string",
},
},
MigrationRules = new[]
{
new ConfluentCloud.Inputs.SchemaRulesetMigrationRuleArgs
{
Doc = "string",
Expr = "string",
Kind = "string",
Mode = "string",
Name = "string",
OnFailure = "string",
OnSuccess = "string",
Params =
{
{ "string", "string" },
},
Tags = new[]
{
"string",
},
Type = "string",
},
},
},
SchemaDetails = "string",
SchemaReferences = new[]
{
new ConfluentCloud.Inputs.SchemaSchemaReferenceArgs
{
Name = "string",
SubjectName = "string",
Version = 0,
},
},
SchemaRegistryCluster = new ConfluentCloud.Inputs.SchemaSchemaRegistryClusterArgs
{
Id = "string",
},
});
example, err := confluentcloud.NewSchema(ctx, "schemaResource", &confluentcloud.SchemaArgs{
Format: pulumi.String("string"),
SubjectName: pulumi.String("string"),
Credentials: &confluentcloud.SchemaCredentialsArgs{
Key: pulumi.String("string"),
Secret: pulumi.String("string"),
},
HardDelete: pulumi.Bool(false),
Metadata: &confluentcloud.SchemaMetadataArgs{
Properties: pulumi.StringMap{
"string": pulumi.String("string"),
},
Sensitives: pulumi.StringArray{
pulumi.String("string"),
},
Tags: confluentcloud.SchemaMetadataTagArray{
&confluentcloud.SchemaMetadataTagArgs{
Key: pulumi.String("string"),
Values: pulumi.StringArray{
pulumi.String("string"),
},
},
},
},
RecreateOnUpdate: pulumi.Bool(false),
RestEndpoint: pulumi.String("string"),
Ruleset: &confluentcloud.SchemaRulesetArgs{
DomainRules: confluentcloud.SchemaRulesetDomainRuleArray{
&confluentcloud.SchemaRulesetDomainRuleArgs{
Doc: pulumi.String("string"),
Expr: pulumi.String("string"),
Kind: pulumi.String("string"),
Mode: pulumi.String("string"),
Name: pulumi.String("string"),
OnFailure: pulumi.String("string"),
OnSuccess: pulumi.String("string"),
Params: pulumi.StringMap{
"string": pulumi.String("string"),
},
Tags: pulumi.StringArray{
pulumi.String("string"),
},
Type: pulumi.String("string"),
},
},
MigrationRules: confluentcloud.SchemaRulesetMigrationRuleArray{
&confluentcloud.SchemaRulesetMigrationRuleArgs{
Doc: pulumi.String("string"),
Expr: pulumi.String("string"),
Kind: pulumi.String("string"),
Mode: pulumi.String("string"),
Name: pulumi.String("string"),
OnFailure: pulumi.String("string"),
OnSuccess: pulumi.String("string"),
Params: pulumi.StringMap{
"string": pulumi.String("string"),
},
Tags: pulumi.StringArray{
pulumi.String("string"),
},
Type: pulumi.String("string"),
},
},
},
Schema: pulumi.String("string"),
SchemaReferences: confluentcloud.SchemaSchemaReferenceArray{
&confluentcloud.SchemaSchemaReferenceArgs{
Name: pulumi.String("string"),
SubjectName: pulumi.String("string"),
Version: pulumi.Int(0),
},
},
SchemaRegistryCluster: &confluentcloud.SchemaSchemaRegistryClusterArgs{
Id: pulumi.String("string"),
},
})
var schemaResource = new Schema("schemaResource", SchemaArgs.builder()
.format("string")
.subjectName("string")
.credentials(SchemaCredentialsArgs.builder()
.key("string")
.secret("string")
.build())
.hardDelete(false)
.metadata(SchemaMetadataArgs.builder()
.properties(Map.of("string", "string"))
.sensitives("string")
.tags(SchemaMetadataTagArgs.builder()
.key("string")
.values("string")
.build())
.build())
.recreateOnUpdate(false)
.restEndpoint("string")
.ruleset(SchemaRulesetArgs.builder()
.domainRules(SchemaRulesetDomainRuleArgs.builder()
.doc("string")
.expr("string")
.kind("string")
.mode("string")
.name("string")
.onFailure("string")
.onSuccess("string")
.params(Map.of("string", "string"))
.tags("string")
.type("string")
.build())
.migrationRules(SchemaRulesetMigrationRuleArgs.builder()
.doc("string")
.expr("string")
.kind("string")
.mode("string")
.name("string")
.onFailure("string")
.onSuccess("string")
.params(Map.of("string", "string"))
.tags("string")
.type("string")
.build())
.build())
.schema("string")
.schemaReferences(SchemaSchemaReferenceArgs.builder()
.name("string")
.subjectName("string")
.version(0)
.build())
.schemaRegistryCluster(SchemaSchemaRegistryClusterArgs.builder()
.id("string")
.build())
.build());
schema_resource = confluentcloud.Schema("schemaResource",
format="string",
subject_name="string",
credentials=confluentcloud.SchemaCredentialsArgs(
key="string",
secret="string",
),
hard_delete=False,
metadata=confluentcloud.SchemaMetadataArgs(
properties={
"string": "string",
},
sensitives=["string"],
tags=[confluentcloud.SchemaMetadataTagArgs(
key="string",
values=["string"],
)],
),
recreate_on_update=False,
rest_endpoint="string",
ruleset=confluentcloud.SchemaRulesetArgs(
domain_rules=[confluentcloud.SchemaRulesetDomainRuleArgs(
doc="string",
expr="string",
kind="string",
mode="string",
name="string",
on_failure="string",
on_success="string",
params={
"string": "string",
},
tags=["string"],
type="string",
)],
migration_rules=[confluentcloud.SchemaRulesetMigrationRuleArgs(
doc="string",
expr="string",
kind="string",
mode="string",
name="string",
on_failure="string",
on_success="string",
params={
"string": "string",
},
tags=["string"],
type="string",
)],
),
schema="string",
schema_references=[confluentcloud.SchemaSchemaReferenceArgs(
name="string",
subject_name="string",
version=0,
)],
schema_registry_cluster=confluentcloud.SchemaSchemaRegistryClusterArgs(
id="string",
))
const schemaResource = new confluentcloud.Schema("schemaResource", {
format: "string",
subjectName: "string",
credentials: {
key: "string",
secret: "string",
},
hardDelete: false,
metadata: {
properties: {
string: "string",
},
sensitives: ["string"],
tags: [{
key: "string",
values: ["string"],
}],
},
recreateOnUpdate: false,
restEndpoint: "string",
ruleset: {
domainRules: [{
doc: "string",
expr: "string",
kind: "string",
mode: "string",
name: "string",
onFailure: "string",
onSuccess: "string",
params: {
string: "string",
},
tags: ["string"],
type: "string",
}],
migrationRules: [{
doc: "string",
expr: "string",
kind: "string",
mode: "string",
name: "string",
onFailure: "string",
onSuccess: "string",
params: {
string: "string",
},
tags: ["string"],
type: "string",
}],
},
schema: "string",
schemaReferences: [{
name: "string",
subjectName: "string",
version: 0,
}],
schemaRegistryCluster: {
id: "string",
},
});
type: confluentcloud:Schema
properties:
credentials:
key: string
secret: string
format: string
hardDelete: false
metadata:
properties:
string: string
sensitives:
- string
tags:
- key: string
values:
- string
recreateOnUpdate: false
restEndpoint: string
ruleset:
domainRules:
- doc: string
expr: string
kind: string
mode: string
name: string
onFailure: string
onSuccess: string
params:
string: string
tags:
- string
type: string
migrationRules:
- doc: string
expr: string
kind: string
mode: string
name: string
onFailure: string
onSuccess: string
params:
string: string
tags:
- string
type: string
schema: string
schemaReferences:
- name: string
subjectName: string
version: 0
schemaRegistryCluster:
id: string
subjectName: string
Schema Resource Properties
To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.
Inputs
The Schema resource accepts the following input properties:
- Format string
- The format of the schema. Accepted values are:
AVRO
,PROTOBUF
, andJSON
. - Subject
Name string - The name of the subject (in other words, the namespace), representing the subject under which the schema will be registered, for example,
test-subject
. Schemas evolve safely, following a compatibility mode defined, under a subject name. - Credentials
Pulumi.
Confluent Cloud. Inputs. Schema Credentials - The Cluster API Credentials.
- Hard
Delete bool - An optional flag to control whether a schema should be soft or hard deleted. Set it to
true
if you want to hard delete a schema on destroy (see Schema Deletion Guidelines for more details). Must be unset when importing. Defaults tofalse
(soft delete). - Metadata
Pulumi.
Confluent Cloud. Inputs. Schema Metadata - See here for more details. Supports the following:
- Recreate
On boolUpdate - An optional flag to control whether a schema should be recreated on an update. Set it to
true
if you want to manage different schema versions using different resource instances. Must be set to the target value when importing. Defaults tofalse
, which manages the latest schema version only. The resource instance always points to the latest schema version by supporting in-place updates. - Rest
Endpoint string - The REST endpoint of the Schema Registry cluster, for example,
https://psrc-00000.us-central1.gcp.confluent.cloud:443
). - Ruleset
Pulumi.
Confluent Cloud. Inputs. Schema Ruleset - The list of schema rules. See Data Contracts for Schema Registry for more details. For example, these rules can enforce that a field that contains sensitive information must be encrypted, or that a message containing an invalid age must be sent to a dead letter queue.
- Schema
Details string - The schema string, for example,
file("./schema_version_1.avsc")
. - Schema
References List<Pulumi.Confluent Cloud. Inputs. Schema Schema Reference> - The list of referenced schemas (see Schema References for more details):
- Schema
Registry Pulumi.Cluster Confluent Cloud. Inputs. Schema Schema Registry Cluster
- Format string
- The format of the schema. Accepted values are:
AVRO
,PROTOBUF
, andJSON
. - Subject
Name string - The name of the subject (in other words, the namespace), representing the subject under which the schema will be registered, for example,
test-subject
. Schemas evolve safely, following a compatibility mode defined, under a subject name. - Credentials
Schema
Credentials Args - The Cluster API Credentials.
- Hard
Delete bool - An optional flag to control whether a schema should be soft or hard deleted. Set it to
true
if you want to hard delete a schema on destroy (see Schema Deletion Guidelines for more details). Must be unset when importing. Defaults tofalse
(soft delete). - Metadata
Schema
Metadata Args - See here for more details. Supports the following:
- Recreate
On boolUpdate - An optional flag to control whether a schema should be recreated on an update. Set it to
true
if you want to manage different schema versions using different resource instances. Must be set to the target value when importing. Defaults tofalse
, which manages the latest schema version only. The resource instance always points to the latest schema version by supporting in-place updates. - Rest
Endpoint string - The REST endpoint of the Schema Registry cluster, for example,
https://psrc-00000.us-central1.gcp.confluent.cloud:443
). - Ruleset
Schema
Ruleset Args - The list of schema rules. See Data Contracts for Schema Registry for more details. For example, these rules can enforce that a field that contains sensitive information must be encrypted, or that a message containing an invalid age must be sent to a dead letter queue.
- Schema string
- The schema string, for example,
file("./schema_version_1.avsc")
. - Schema
References []SchemaSchema Reference Args - The list of referenced schemas (see Schema References for more details):
- Schema
Registry SchemaCluster Schema Registry Cluster Args
- format String
- The format of the schema. Accepted values are:
AVRO
,PROTOBUF
, andJSON
. - subject
Name String - The name of the subject (in other words, the namespace), representing the subject under which the schema will be registered, for example,
test-subject
. Schemas evolve safely, following a compatibility mode defined, under a subject name. - credentials
Schema
Credentials - The Cluster API Credentials.
- hard
Delete Boolean - An optional flag to control whether a schema should be soft or hard deleted. Set it to
true
if you want to hard delete a schema on destroy (see Schema Deletion Guidelines for more details). Must be unset when importing. Defaults tofalse
(soft delete). - metadata
Schema
Metadata - See here for more details. Supports the following:
- recreate
On BooleanUpdate - An optional flag to control whether a schema should be recreated on an update. Set it to
true
if you want to manage different schema versions using different resource instances. Must be set to the target value when importing. Defaults tofalse
, which manages the latest schema version only. The resource instance always points to the latest schema version by supporting in-place updates. - rest
Endpoint String - The REST endpoint of the Schema Registry cluster, for example,
https://psrc-00000.us-central1.gcp.confluent.cloud:443
). - ruleset
Schema
Ruleset - The list of schema rules. See Data Contracts for Schema Registry for more details. For example, these rules can enforce that a field that contains sensitive information must be encrypted, or that a message containing an invalid age must be sent to a dead letter queue.
- schema String
- The schema string, for example,
file("./schema_version_1.avsc")
. - schema
References List<SchemaSchema Reference> - The list of referenced schemas (see Schema References for more details):
- schema
Registry SchemaCluster Schema Registry Cluster
- format string
- The format of the schema. Accepted values are:
AVRO
,PROTOBUF
, andJSON
. - subject
Name string - The name of the subject (in other words, the namespace), representing the subject under which the schema will be registered, for example,
test-subject
. Schemas evolve safely, following a compatibility mode defined, under a subject name. - credentials
Schema
Credentials - The Cluster API Credentials.
- hard
Delete boolean - An optional flag to control whether a schema should be soft or hard deleted. Set it to
true
if you want to hard delete a schema on destroy (see Schema Deletion Guidelines for more details). Must be unset when importing. Defaults tofalse
(soft delete). - metadata
Schema
Metadata - See here for more details. Supports the following:
- recreate
On booleanUpdate - An optional flag to control whether a schema should be recreated on an update. Set it to
true
if you want to manage different schema versions using different resource instances. Must be set to the target value when importing. Defaults tofalse
, which manages the latest schema version only. The resource instance always points to the latest schema version by supporting in-place updates. - rest
Endpoint string - The REST endpoint of the Schema Registry cluster, for example,
https://psrc-00000.us-central1.gcp.confluent.cloud:443
). - ruleset
Schema
Ruleset - The list of schema rules. See Data Contracts for Schema Registry for more details. For example, these rules can enforce that a field that contains sensitive information must be encrypted, or that a message containing an invalid age must be sent to a dead letter queue.
- schema string
- The schema string, for example,
file("./schema_version_1.avsc")
. - schema
References SchemaSchema Reference[] - The list of referenced schemas (see Schema References for more details):
- schema
Registry SchemaCluster Schema Registry Cluster
- format str
- The format of the schema. Accepted values are:
AVRO
,PROTOBUF
, andJSON
. - subject_
name str - The name of the subject (in other words, the namespace), representing the subject under which the schema will be registered, for example,
test-subject
. Schemas evolve safely, following a compatibility mode defined, under a subject name. - credentials
Schema
Credentials Args - The Cluster API Credentials.
- hard_
delete bool - An optional flag to control whether a schema should be soft or hard deleted. Set it to
true
if you want to hard delete a schema on destroy (see Schema Deletion Guidelines for more details). Must be unset when importing. Defaults tofalse
(soft delete). - metadata
Schema
Metadata Args - See here for more details. Supports the following:
- recreate_
on_ boolupdate - An optional flag to control whether a schema should be recreated on an update. Set it to
true
if you want to manage different schema versions using different resource instances. Must be set to the target value when importing. Defaults tofalse
, which manages the latest schema version only. The resource instance always points to the latest schema version by supporting in-place updates. - rest_
endpoint str - The REST endpoint of the Schema Registry cluster, for example,
https://psrc-00000.us-central1.gcp.confluent.cloud:443
). - ruleset
Schema
Ruleset Args - The list of schema rules. See Data Contracts for Schema Registry for more details. For example, these rules can enforce that a field that contains sensitive information must be encrypted, or that a message containing an invalid age must be sent to a dead letter queue.
- schema str
- The schema string, for example,
file("./schema_version_1.avsc")
. - schema_
references Sequence[SchemaSchema Reference Args] - The list of referenced schemas (see Schema References for more details):
- schema_
registry_ Schemacluster Schema Registry Cluster Args
- format String
- The format of the schema. Accepted values are:
AVRO
,PROTOBUF
, andJSON
. - subject
Name String - The name of the subject (in other words, the namespace), representing the subject under which the schema will be registered, for example,
test-subject
. Schemas evolve safely, following a compatibility mode defined, under a subject name. - credentials Property Map
- The Cluster API Credentials.
- hard
Delete Boolean - An optional flag to control whether a schema should be soft or hard deleted. Set it to
true
if you want to hard delete a schema on destroy (see Schema Deletion Guidelines for more details). Must be unset when importing. Defaults tofalse
(soft delete). - metadata Property Map
- See here for more details. Supports the following:
- recreate
On BooleanUpdate - An optional flag to control whether a schema should be recreated on an update. Set it to
true
if you want to manage different schema versions using different resource instances. Must be set to the target value when importing. Defaults tofalse
, which manages the latest schema version only. The resource instance always points to the latest schema version by supporting in-place updates. - rest
Endpoint String - The REST endpoint of the Schema Registry cluster, for example,
https://psrc-00000.us-central1.gcp.confluent.cloud:443
). - ruleset Property Map
- The list of schema rules. See Data Contracts for Schema Registry for more details. For example, these rules can enforce that a field that contains sensitive information must be encrypted, or that a message containing an invalid age must be sent to a dead letter queue.
- schema String
- The schema string, for example,
file("./schema_version_1.avsc")
. - schema
References List<Property Map> - The list of referenced schemas (see Schema References for more details):
- schema
Registry Property MapCluster
Outputs
All input properties are implicitly available as output properties. Additionally, the Schema resource produces the following output properties:
- Id string
- The provider-assigned unique ID for this managed resource.
- Schema
Identifier int - (Required Integer) The globally unique ID of the Schema, for example,
100003
. If the same schema is registered under a different subject, the same identifier will be returned. However, theversion
of the schema may be different under different subjects. - Version int
- (Required Integer) The version of the Schema, for example,
4
.
- Id string
- The provider-assigned unique ID for this managed resource.
- Schema
Identifier int - (Required Integer) The globally unique ID of the Schema, for example,
100003
. If the same schema is registered under a different subject, the same identifier will be returned. However, theversion
of the schema may be different under different subjects. - Version int
- (Required Integer) The version of the Schema, for example,
4
.
- id String
- The provider-assigned unique ID for this managed resource.
- schema
Identifier Integer - (Required Integer) The globally unique ID of the Schema, for example,
100003
. If the same schema is registered under a different subject, the same identifier will be returned. However, theversion
of the schema may be different under different subjects. - version Integer
- (Required Integer) The version of the Schema, for example,
4
.
- id string
- The provider-assigned unique ID for this managed resource.
- schema
Identifier number - (Required Integer) The globally unique ID of the Schema, for example,
100003
. If the same schema is registered under a different subject, the same identifier will be returned. However, theversion
of the schema may be different under different subjects. - version number
- (Required Integer) The version of the Schema, for example,
4
.
- id str
- The provider-assigned unique ID for this managed resource.
- schema_
identifier int - (Required Integer) The globally unique ID of the Schema, for example,
100003
. If the same schema is registered under a different subject, the same identifier will be returned. However, theversion
of the schema may be different under different subjects. - version int
- (Required Integer) The version of the Schema, for example,
4
.
- id String
- The provider-assigned unique ID for this managed resource.
- schema
Identifier Number - (Required Integer) The globally unique ID of the Schema, for example,
100003
. If the same schema is registered under a different subject, the same identifier will be returned. However, theversion
of the schema may be different under different subjects. - version Number
- (Required Integer) The version of the Schema, for example,
4
.
Look up Existing Schema Resource
Get an existing Schema resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.
public static get(name: string, id: Input<ID>, state?: SchemaState, opts?: CustomResourceOptions): Schema
@staticmethod
def get(resource_name: str,
id: str,
opts: Optional[ResourceOptions] = None,
credentials: Optional[SchemaCredentialsArgs] = None,
format: Optional[str] = None,
hard_delete: Optional[bool] = None,
metadata: Optional[SchemaMetadataArgs] = None,
recreate_on_update: Optional[bool] = None,
rest_endpoint: Optional[str] = None,
ruleset: Optional[SchemaRulesetArgs] = None,
schema: Optional[str] = None,
schema_identifier: Optional[int] = None,
schema_references: Optional[Sequence[SchemaSchemaReferenceArgs]] = None,
schema_registry_cluster: Optional[SchemaSchemaRegistryClusterArgs] = None,
subject_name: Optional[str] = None,
version: Optional[int] = None) -> Schema
func GetSchema(ctx *Context, name string, id IDInput, state *SchemaState, opts ...ResourceOption) (*Schema, error)
public static Schema Get(string name, Input<string> id, SchemaState? state, CustomResourceOptions? opts = null)
public static Schema get(String name, Output<String> id, SchemaState state, CustomResourceOptions options)
Resource lookup is not supported in YAML
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- resource_name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- Credentials
Pulumi.
Confluent Cloud. Inputs. Schema Credentials - The Cluster API Credentials.
- Format string
- The format of the schema. Accepted values are:
AVRO
,PROTOBUF
, andJSON
. - Hard
Delete bool - An optional flag to control whether a schema should be soft or hard deleted. Set it to
true
if you want to hard delete a schema on destroy (see Schema Deletion Guidelines for more details). Must be unset when importing. Defaults tofalse
(soft delete). - Metadata
Pulumi.
Confluent Cloud. Inputs. Schema Metadata - See here for more details. Supports the following:
- Recreate
On boolUpdate - An optional flag to control whether a schema should be recreated on an update. Set it to
true
if you want to manage different schema versions using different resource instances. Must be set to the target value when importing. Defaults tofalse
, which manages the latest schema version only. The resource instance always points to the latest schema version by supporting in-place updates. - Rest
Endpoint string - The REST endpoint of the Schema Registry cluster, for example,
https://psrc-00000.us-central1.gcp.confluent.cloud:443
). - Ruleset
Pulumi.
Confluent Cloud. Inputs. Schema Ruleset - The list of schema rules. See Data Contracts for Schema Registry for more details. For example, these rules can enforce that a field that contains sensitive information must be encrypted, or that a message containing an invalid age must be sent to a dead letter queue.
- Schema
Details string - The schema string, for example,
file("./schema_version_1.avsc")
. - Schema
Identifier int - (Required Integer) The globally unique ID of the Schema, for example,
100003
. If the same schema is registered under a different subject, the same identifier will be returned. However, theversion
of the schema may be different under different subjects. - Schema
References List<Pulumi.Confluent Cloud. Inputs. Schema Schema Reference> - The list of referenced schemas (see Schema References for more details):
- Schema
Registry Pulumi.Cluster Confluent Cloud. Inputs. Schema Schema Registry Cluster - Subject
Name string - The name of the subject (in other words, the namespace), representing the subject under which the schema will be registered, for example,
test-subject
. Schemas evolve safely, following a compatibility mode defined, under a subject name. - Version int
- (Required Integer) The version of the Schema, for example,
4
.
- Credentials
Schema
Credentials Args - The Cluster API Credentials.
- Format string
- The format of the schema. Accepted values are:
AVRO
,PROTOBUF
, andJSON
. - Hard
Delete bool - An optional flag to control whether a schema should be soft or hard deleted. Set it to
true
if you want to hard delete a schema on destroy (see Schema Deletion Guidelines for more details). Must be unset when importing. Defaults tofalse
(soft delete). - Metadata
Schema
Metadata Args - See here for more details. Supports the following:
- Recreate
On boolUpdate - An optional flag to control whether a schema should be recreated on an update. Set it to
true
if you want to manage different schema versions using different resource instances. Must be set to the target value when importing. Defaults tofalse
, which manages the latest schema version only. The resource instance always points to the latest schema version by supporting in-place updates. - Rest
Endpoint string - The REST endpoint of the Schema Registry cluster, for example,
https://psrc-00000.us-central1.gcp.confluent.cloud:443
). - Ruleset
Schema
Ruleset Args - The list of schema rules. See Data Contracts for Schema Registry for more details. For example, these rules can enforce that a field that contains sensitive information must be encrypted, or that a message containing an invalid age must be sent to a dead letter queue.
- Schema string
- The schema string, for example,
file("./schema_version_1.avsc")
. - Schema
Identifier int - (Required Integer) The globally unique ID of the Schema, for example,
100003
. If the same schema is registered under a different subject, the same identifier will be returned. However, theversion
of the schema may be different under different subjects. - Schema
References []SchemaSchema Reference Args - The list of referenced schemas (see Schema References for more details):
- Schema
Registry SchemaCluster Schema Registry Cluster Args - Subject
Name string - The name of the subject (in other words, the namespace), representing the subject under which the schema will be registered, for example,
test-subject
. Schemas evolve safely, following a compatibility mode defined, under a subject name. - Version int
- (Required Integer) The version of the Schema, for example,
4
.
- credentials
Schema
Credentials - The Cluster API Credentials.
- format String
- The format of the schema. Accepted values are:
AVRO
,PROTOBUF
, andJSON
. - hard
Delete Boolean - An optional flag to control whether a schema should be soft or hard deleted. Set it to
true
if you want to hard delete a schema on destroy (see Schema Deletion Guidelines for more details). Must be unset when importing. Defaults tofalse
(soft delete). - metadata
Schema
Metadata - See here for more details. Supports the following:
- recreate
On BooleanUpdate - An optional flag to control whether a schema should be recreated on an update. Set it to
true
if you want to manage different schema versions using different resource instances. Must be set to the target value when importing. Defaults tofalse
, which manages the latest schema version only. The resource instance always points to the latest schema version by supporting in-place updates. - rest
Endpoint String - The REST endpoint of the Schema Registry cluster, for example,
https://psrc-00000.us-central1.gcp.confluent.cloud:443
). - ruleset
Schema
Ruleset - The list of schema rules. See Data Contracts for Schema Registry for more details. For example, these rules can enforce that a field that contains sensitive information must be encrypted, or that a message containing an invalid age must be sent to a dead letter queue.
- schema String
- The schema string, for example,
file("./schema_version_1.avsc")
. - schema
Identifier Integer - (Required Integer) The globally unique ID of the Schema, for example,
100003
. If the same schema is registered under a different subject, the same identifier will be returned. However, theversion
of the schema may be different under different subjects. - schema
References List<SchemaSchema Reference> - The list of referenced schemas (see Schema References for more details):
- schema
Registry SchemaCluster Schema Registry Cluster - subject
Name String - The name of the subject (in other words, the namespace), representing the subject under which the schema will be registered, for example,
test-subject
. Schemas evolve safely, following a compatibility mode defined, under a subject name. - version Integer
- (Required Integer) The version of the Schema, for example,
4
.
- credentials
Schema
Credentials - The Cluster API Credentials.
- format string
- The format of the schema. Accepted values are:
AVRO
,PROTOBUF
, andJSON
. - hard
Delete boolean - An optional flag to control whether a schema should be soft or hard deleted. Set it to
true
if you want to hard delete a schema on destroy (see Schema Deletion Guidelines for more details). Must be unset when importing. Defaults tofalse
(soft delete). - metadata
Schema
Metadata - See here for more details. Supports the following:
- recreate
On booleanUpdate - An optional flag to control whether a schema should be recreated on an update. Set it to
true
if you want to manage different schema versions using different resource instances. Must be set to the target value when importing. Defaults tofalse
, which manages the latest schema version only. The resource instance always points to the latest schema version by supporting in-place updates. - rest
Endpoint string - The REST endpoint of the Schema Registry cluster, for example,
https://psrc-00000.us-central1.gcp.confluent.cloud:443
). - ruleset
Schema
Ruleset - The list of schema rules. See Data Contracts for Schema Registry for more details. For example, these rules can enforce that a field that contains sensitive information must be encrypted, or that a message containing an invalid age must be sent to a dead letter queue.
- schema string
- The schema string, for example,
file("./schema_version_1.avsc")
. - schema
Identifier number - (Required Integer) The globally unique ID of the Schema, for example,
100003
. If the same schema is registered under a different subject, the same identifier will be returned. However, theversion
of the schema may be different under different subjects. - schema
References SchemaSchema Reference[] - The list of referenced schemas (see Schema References for more details):
- schema
Registry SchemaCluster Schema Registry Cluster - subject
Name string - The name of the subject (in other words, the namespace), representing the subject under which the schema will be registered, for example,
test-subject
. Schemas evolve safely, following a compatibility mode defined, under a subject name. - version number
- (Required Integer) The version of the Schema, for example,
4
.
- credentials
Schema
Credentials Args - The Cluster API Credentials.
- format str
- The format of the schema. Accepted values are:
AVRO
,PROTOBUF
, andJSON
. - hard_
delete bool - An optional flag to control whether a schema should be soft or hard deleted. Set it to
true
if you want to hard delete a schema on destroy (see Schema Deletion Guidelines for more details). Must be unset when importing. Defaults tofalse
(soft delete). - metadata
Schema
Metadata Args - See here for more details. Supports the following:
- recreate_
on_ boolupdate - An optional flag to control whether a schema should be recreated on an update. Set it to
true
if you want to manage different schema versions using different resource instances. Must be set to the target value when importing. Defaults tofalse
, which manages the latest schema version only. The resource instance always points to the latest schema version by supporting in-place updates. - rest_
endpoint str - The REST endpoint of the Schema Registry cluster, for example,
https://psrc-00000.us-central1.gcp.confluent.cloud:443
). - ruleset
Schema
Ruleset Args - The list of schema rules. See Data Contracts for Schema Registry for more details. For example, these rules can enforce that a field that contains sensitive information must be encrypted, or that a message containing an invalid age must be sent to a dead letter queue.
- schema str
- The schema string, for example,
file("./schema_version_1.avsc")
. - schema_
identifier int - (Required Integer) The globally unique ID of the Schema, for example,
100003
. If the same schema is registered under a different subject, the same identifier will be returned. However, theversion
of the schema may be different under different subjects. - schema_
references Sequence[SchemaSchema Reference Args] - The list of referenced schemas (see Schema References for more details):
- schema_
registry_ Schemacluster Schema Registry Cluster Args - subject_
name str - The name of the subject (in other words, the namespace), representing the subject under which the schema will be registered, for example,
test-subject
. Schemas evolve safely, following a compatibility mode defined, under a subject name. - version int
- (Required Integer) The version of the Schema, for example,
4
.
- credentials Property Map
- The Cluster API Credentials.
- format String
- The format of the schema. Accepted values are:
AVRO
,PROTOBUF
, andJSON
. - hard
Delete Boolean - An optional flag to control whether a schema should be soft or hard deleted. Set it to
true
if you want to hard delete a schema on destroy (see Schema Deletion Guidelines for more details). Must be unset when importing. Defaults tofalse
(soft delete). - metadata Property Map
- See here for more details. Supports the following:
- recreate
On BooleanUpdate - An optional flag to control whether a schema should be recreated on an update. Set it to
true
if you want to manage different schema versions using different resource instances. Must be set to the target value when importing. Defaults tofalse
, which manages the latest schema version only. The resource instance always points to the latest schema version by supporting in-place updates. - rest
Endpoint String - The REST endpoint of the Schema Registry cluster, for example,
https://psrc-00000.us-central1.gcp.confluent.cloud:443
). - ruleset Property Map
- The list of schema rules. See Data Contracts for Schema Registry for more details. For example, these rules can enforce that a field that contains sensitive information must be encrypted, or that a message containing an invalid age must be sent to a dead letter queue.
- schema String
- The schema string, for example,
file("./schema_version_1.avsc")
. - schema
Identifier Number - (Required Integer) The globally unique ID of the Schema, for example,
100003
. If the same schema is registered under a different subject, the same identifier will be returned. However, theversion
of the schema may be different under different subjects. - schema
References List<Property Map> - The list of referenced schemas (see Schema References for more details):
- schema
Registry Property MapCluster - subject
Name String - The name of the subject (in other words, the namespace), representing the subject under which the schema will be registered, for example,
test-subject
. Schemas evolve safely, following a compatibility mode defined, under a subject name. - version Number
- (Required Integer) The version of the Schema, for example,
4
.
Supporting Types
SchemaCredentials, SchemaCredentialsArgs
SchemaMetadata, SchemaMetadataArgs
- Properties Dictionary<string, string>
- The custom properties to set:
- Sensitives List<string>
- A list of metadata properties to be encrypted.
- List<Pulumi.
Confluent Cloud. Inputs. Schema Metadata Tag>
- Properties map[string]string
- The custom properties to set:
- Sensitives []string
- A list of metadata properties to be encrypted.
- []Schema
Metadata Tag
- properties Map<String,String>
- The custom properties to set:
- sensitives List<String>
- A list of metadata properties to be encrypted.
- List<Schema
Metadata Tag>
- properties {[key: string]: string}
- The custom properties to set:
- sensitives string[]
- A list of metadata properties to be encrypted.
- Schema
Metadata Tag[]
- properties Mapping[str, str]
- The custom properties to set:
- sensitives Sequence[str]
- A list of metadata properties to be encrypted.
- Sequence[Schema
Metadata Tag]
- properties Map<String>
- The custom properties to set:
- sensitives List<String>
- A list of metadata properties to be encrypted.
- List<Property Map>
SchemaMetadataTag, SchemaMetadataTagArgs
SchemaRuleset, SchemaRulesetArgs
SchemaRulesetDomainRule, SchemaRulesetDomainRuleArgs
- Doc string
- An optional description of the rule.
- Expr string
- The body of the rule, which is optional.
- Kind string
- The kind of the rule. Accepted values are
CONDITION
andTRANSFORM
. - Mode string
- The mode of the rule. Accepted values are
UPGRADE
,DOWNGRADE
,UPDOWN
,WRITE
,READ
, andWRITEREAD
. - Name string
- A user-defined name that can be used to reference the rule.
- On
Failure string - An optional action to execute if the rule fails, otherwise the built-in action type ERROR is used. For
UPDOWN
andWRITEREAD
rules, one can specify two actions separated by commas, as mentioned above. - On
Success string - An optional action to execute if the rule succeeds, otherwise the built-in action type NONE is used. For
UPDOWN
andWRITEREAD
rules, one can specify two actions separated by commas, such as "NONE,ERROR" for aWRITEREAD
rule. In this caseNONE
applies toWRITE
andERROR
applies toREAD
. - Params Dictionary<string, string>
A set of static parameters for the rule, which is optional. These are key-value pairs that are passed to the rule.
Note: Schema rules (
ruleset
) are only available with the Stream Governance Advanced package.Note:
ruleset
andmetadata
attributes are available in Preview for early adopters. Preview features are introduced to gather customer feedback. This feature should be used only for evaluation and non-production testing purposes or to provide feedback to Confluent, particularly as it becomes more widely available in follow-on editions. Preview features are intended for evaluation use in development and testing environments only, and not for production use. The warranty, SLA, and Support Services provisions of your agreement with Confluent do not apply to Preview features. Preview features are considered to be a Proof of Concept as defined in the Confluent Cloud Terms of Service. Confluent may discontinue providing preview releases of the Preview features at any time in Confluent’s sole discretion.- List<string>
- The tags to which the rule applies, if any.
- Type string
- The type of rule, which invokes a specific rule executor, such as Google Common Expression Language (CEL) or JSONata.
- Doc string
- An optional description of the rule.
- Expr string
- The body of the rule, which is optional.
- Kind string
- The kind of the rule. Accepted values are
CONDITION
andTRANSFORM
. - Mode string
- The mode of the rule. Accepted values are
UPGRADE
,DOWNGRADE
,UPDOWN
,WRITE
,READ
, andWRITEREAD
. - Name string
- A user-defined name that can be used to reference the rule.
- On
Failure string - An optional action to execute if the rule fails, otherwise the built-in action type ERROR is used. For
UPDOWN
andWRITEREAD
rules, one can specify two actions separated by commas, as mentioned above. - On
Success string - An optional action to execute if the rule succeeds, otherwise the built-in action type NONE is used. For
UPDOWN
andWRITEREAD
rules, one can specify two actions separated by commas, such as "NONE,ERROR" for aWRITEREAD
rule. In this caseNONE
applies toWRITE
andERROR
applies toREAD
. - Params map[string]string
A set of static parameters for the rule, which is optional. These are key-value pairs that are passed to the rule.
Note: Schema rules (
ruleset
) are only available with the Stream Governance Advanced package.Note:
ruleset
andmetadata
attributes are available in Preview for early adopters. Preview features are introduced to gather customer feedback. This feature should be used only for evaluation and non-production testing purposes or to provide feedback to Confluent, particularly as it becomes more widely available in follow-on editions. Preview features are intended for evaluation use in development and testing environments only, and not for production use. The warranty, SLA, and Support Services provisions of your agreement with Confluent do not apply to Preview features. Preview features are considered to be a Proof of Concept as defined in the Confluent Cloud Terms of Service. Confluent may discontinue providing preview releases of the Preview features at any time in Confluent’s sole discretion.- []string
- The tags to which the rule applies, if any.
- Type string
- The type of rule, which invokes a specific rule executor, such as Google Common Expression Language (CEL) or JSONata.
- doc String
- An optional description of the rule.
- expr String
- The body of the rule, which is optional.
- kind String
- The kind of the rule. Accepted values are
CONDITION
andTRANSFORM
. - mode String
- The mode of the rule. Accepted values are
UPGRADE
,DOWNGRADE
,UPDOWN
,WRITE
,READ
, andWRITEREAD
. - name String
- A user-defined name that can be used to reference the rule.
- on
Failure String - An optional action to execute if the rule fails, otherwise the built-in action type ERROR is used. For
UPDOWN
andWRITEREAD
rules, one can specify two actions separated by commas, as mentioned above. - on
Success String - An optional action to execute if the rule succeeds, otherwise the built-in action type NONE is used. For
UPDOWN
andWRITEREAD
rules, one can specify two actions separated by commas, such as "NONE,ERROR" for aWRITEREAD
rule. In this caseNONE
applies toWRITE
andERROR
applies toREAD
. - params Map<String,String>
A set of static parameters for the rule, which is optional. These are key-value pairs that are passed to the rule.
Note: Schema rules (
ruleset
) are only available with the Stream Governance Advanced package.Note:
ruleset
andmetadata
attributes are available in Preview for early adopters. Preview features are introduced to gather customer feedback. This feature should be used only for evaluation and non-production testing purposes or to provide feedback to Confluent, particularly as it becomes more widely available in follow-on editions. Preview features are intended for evaluation use in development and testing environments only, and not for production use. The warranty, SLA, and Support Services provisions of your agreement with Confluent do not apply to Preview features. Preview features are considered to be a Proof of Concept as defined in the Confluent Cloud Terms of Service. Confluent may discontinue providing preview releases of the Preview features at any time in Confluent’s sole discretion.- List<String>
- The tags to which the rule applies, if any.
- type String
- The type of rule, which invokes a specific rule executor, such as Google Common Expression Language (CEL) or JSONata.
- doc string
- An optional description of the rule.
- expr string
- The body of the rule, which is optional.
- kind string
- The kind of the rule. Accepted values are
CONDITION
andTRANSFORM
. - mode string
- The mode of the rule. Accepted values are
UPGRADE
,DOWNGRADE
,UPDOWN
,WRITE
,READ
, andWRITEREAD
. - name string
- A user-defined name that can be used to reference the rule.
- on
Failure string - An optional action to execute if the rule fails, otherwise the built-in action type ERROR is used. For
UPDOWN
andWRITEREAD
rules, one can specify two actions separated by commas, as mentioned above. - on
Success string - An optional action to execute if the rule succeeds, otherwise the built-in action type NONE is used. For
UPDOWN
andWRITEREAD
rules, one can specify two actions separated by commas, such as "NONE,ERROR" for aWRITEREAD
rule. In this caseNONE
applies toWRITE
andERROR
applies toREAD
. - params {[key: string]: string}
A set of static parameters for the rule, which is optional. These are key-value pairs that are passed to the rule.
Note: Schema rules (
ruleset
) are only available with the Stream Governance Advanced package.Note:
ruleset
andmetadata
attributes are available in Preview for early adopters. Preview features are introduced to gather customer feedback. This feature should be used only for evaluation and non-production testing purposes or to provide feedback to Confluent, particularly as it becomes more widely available in follow-on editions. Preview features are intended for evaluation use in development and testing environments only, and not for production use. The warranty, SLA, and Support Services provisions of your agreement with Confluent do not apply to Preview features. Preview features are considered to be a Proof of Concept as defined in the Confluent Cloud Terms of Service. Confluent may discontinue providing preview releases of the Preview features at any time in Confluent’s sole discretion.- string[]
- The tags to which the rule applies, if any.
- type string
- The type of rule, which invokes a specific rule executor, such as Google Common Expression Language (CEL) or JSONata.
- doc str
- An optional description of the rule.
- expr str
- The body of the rule, which is optional.
- kind str
- The kind of the rule. Accepted values are
CONDITION
andTRANSFORM
. - mode str
- The mode of the rule. Accepted values are
UPGRADE
,DOWNGRADE
,UPDOWN
,WRITE
,READ
, andWRITEREAD
. - name str
- A user-defined name that can be used to reference the rule.
- on_
failure str - An optional action to execute if the rule fails, otherwise the built-in action type ERROR is used. For
UPDOWN
andWRITEREAD
rules, one can specify two actions separated by commas, as mentioned above. - on_
success str - An optional action to execute if the rule succeeds, otherwise the built-in action type NONE is used. For
UPDOWN
andWRITEREAD
rules, one can specify two actions separated by commas, such as "NONE,ERROR" for aWRITEREAD
rule. In this caseNONE
applies toWRITE
andERROR
applies toREAD
. - params Mapping[str, str]
A set of static parameters for the rule, which is optional. These are key-value pairs that are passed to the rule.
Note: Schema rules (
ruleset
) are only available with the Stream Governance Advanced package.Note:
ruleset
andmetadata
attributes are available in Preview for early adopters. Preview features are introduced to gather customer feedback. This feature should be used only for evaluation and non-production testing purposes or to provide feedback to Confluent, particularly as it becomes more widely available in follow-on editions. Preview features are intended for evaluation use in development and testing environments only, and not for production use. The warranty, SLA, and Support Services provisions of your agreement with Confluent do not apply to Preview features. Preview features are considered to be a Proof of Concept as defined in the Confluent Cloud Terms of Service. Confluent may discontinue providing preview releases of the Preview features at any time in Confluent’s sole discretion.- Sequence[str]
- The tags to which the rule applies, if any.
- type str
- The type of rule, which invokes a specific rule executor, such as Google Common Expression Language (CEL) or JSONata.
- doc String
- An optional description of the rule.
- expr String
- The body of the rule, which is optional.
- kind String
- The kind of the rule. Accepted values are
CONDITION
andTRANSFORM
. - mode String
- The mode of the rule. Accepted values are
UPGRADE
,DOWNGRADE
,UPDOWN
,WRITE
,READ
, andWRITEREAD
. - name String
- A user-defined name that can be used to reference the rule.
- on
Failure String - An optional action to execute if the rule fails, otherwise the built-in action type ERROR is used. For
UPDOWN
andWRITEREAD
rules, one can specify two actions separated by commas, as mentioned above. - on
Success String - An optional action to execute if the rule succeeds, otherwise the built-in action type NONE is used. For
UPDOWN
andWRITEREAD
rules, one can specify two actions separated by commas, such as "NONE,ERROR" for aWRITEREAD
rule. In this caseNONE
applies toWRITE
andERROR
applies toREAD
. - params Map<String>
A set of static parameters for the rule, which is optional. These are key-value pairs that are passed to the rule.
Note: Schema rules (
ruleset
) are only available with the Stream Governance Advanced package.Note:
ruleset
andmetadata
attributes are available in Preview for early adopters. Preview features are introduced to gather customer feedback. This feature should be used only for evaluation and non-production testing purposes or to provide feedback to Confluent, particularly as it becomes more widely available in follow-on editions. Preview features are intended for evaluation use in development and testing environments only, and not for production use. The warranty, SLA, and Support Services provisions of your agreement with Confluent do not apply to Preview features. Preview features are considered to be a Proof of Concept as defined in the Confluent Cloud Terms of Service. Confluent may discontinue providing preview releases of the Preview features at any time in Confluent’s sole discretion.- List<String>
- The tags to which the rule applies, if any.
- type String
- The type of rule, which invokes a specific rule executor, such as Google Common Expression Language (CEL) or JSONata.
SchemaRulesetMigrationRule, SchemaRulesetMigrationRuleArgs
- doc str
- expr str
- kind str
- mode str
- name str
- on_
failure str - on_
success str - params Mapping[str, str]
- Sequence[str]
- type str
SchemaSchemaReference, SchemaSchemaReferenceArgs
- Name string
- The name of the subject, representing the subject under which the referenced schema is registered.
- Subject
Name string - The name for the reference. (For Avro Schema, the reference name is the fully qualified schema name, for JSON Schema it is a URL, and for Protobuf Schema, it is the name of another Protobuf file.)
- Version int
- The version, representing the exact version of the schema under the registered subject.
- Name string
- The name of the subject, representing the subject under which the referenced schema is registered.
- Subject
Name string - The name for the reference. (For Avro Schema, the reference name is the fully qualified schema name, for JSON Schema it is a URL, and for Protobuf Schema, it is the name of another Protobuf file.)
- Version int
- The version, representing the exact version of the schema under the registered subject.
- name String
- The name of the subject, representing the subject under which the referenced schema is registered.
- subject
Name String - The name for the reference. (For Avro Schema, the reference name is the fully qualified schema name, for JSON Schema it is a URL, and for Protobuf Schema, it is the name of another Protobuf file.)
- version Integer
- The version, representing the exact version of the schema under the registered subject.
- name string
- The name of the subject, representing the subject under which the referenced schema is registered.
- subject
Name string - The name for the reference. (For Avro Schema, the reference name is the fully qualified schema name, for JSON Schema it is a URL, and for Protobuf Schema, it is the name of another Protobuf file.)
- version number
- The version, representing the exact version of the schema under the registered subject.
- name str
- The name of the subject, representing the subject under which the referenced schema is registered.
- subject_
name str - The name for the reference. (For Avro Schema, the reference name is the fully qualified schema name, for JSON Schema it is a URL, and for Protobuf Schema, it is the name of another Protobuf file.)
- version int
- The version, representing the exact version of the schema under the registered subject.
- name String
- The name of the subject, representing the subject under which the referenced schema is registered.
- subject
Name String - The name for the reference. (For Avro Schema, the reference name is the fully qualified schema name, for JSON Schema it is a URL, and for Protobuf Schema, it is the name of another Protobuf file.)
- version Number
- The version, representing the exact version of the schema under the registered subject.
SchemaSchemaRegistryCluster, SchemaSchemaRegistryClusterArgs
- Id string
- The ID of the Schema Registry cluster, for example,
lsrc-abc123
.
- Id string
- The ID of the Schema Registry cluster, for example,
lsrc-abc123
.
- id String
- The ID of the Schema Registry cluster, for example,
lsrc-abc123
.
- id string
- The ID of the Schema Registry cluster, for example,
lsrc-abc123
.
- id str
- The ID of the Schema Registry cluster, for example,
lsrc-abc123
.
- id String
- The ID of the Schema Registry cluster, for example,
lsrc-abc123
.
Package Details
- Repository
- Confluent Cloud pulumi/pulumi-confluentcloud
- License
- Apache-2.0
- Notes
- This Pulumi package is based on the
confluent
Terraform Provider.