gcp.datastream.Stream
Explore with Pulumi AI
A resource representing streaming data from a source to a destination.
To get more information about Stream, see:
- API documentation
- How-to Guides
Example Usage
Datastream Stream Full
import * as pulumi from "@pulumi/pulumi";
import * as gcp from "@pulumi/gcp";
import * as random from "@pulumi/random";
const project = gcp.organizations.getProject({});
const instance = new gcp.sql.DatabaseInstance("instance", {
name: "my-instance",
databaseVersion: "MYSQL_8_0",
region: "us-central1",
settings: {
tier: "db-f1-micro",
backupConfiguration: {
enabled: true,
binaryLogEnabled: true,
},
ipConfiguration: {
authorizedNetworks: [
{
value: "34.71.242.81",
},
{
value: "34.72.28.29",
},
{
value: "34.67.6.157",
},
{
value: "34.67.234.134",
},
{
value: "34.72.239.218",
},
],
},
},
deletionProtection: true,
});
const db = new gcp.sql.Database("db", {
instance: instance.name,
name: "db",
});
const pwd = new random.RandomPassword("pwd", {
length: 16,
special: false,
});
const user = new gcp.sql.User("user", {
name: "user",
instance: instance.name,
host: "%",
password: pwd.result,
});
const sourceConnectionProfile = new gcp.datastream.ConnectionProfile("source_connection_profile", {
displayName: "Source connection profile",
location: "us-central1",
connectionProfileId: "source-profile",
mysqlProfile: {
hostname: instance.publicIpAddress,
username: user.name,
password: user.password,
},
});
const bucket = new gcp.storage.Bucket("bucket", {
name: "my-bucket",
location: "US",
uniformBucketLevelAccess: true,
});
const viewer = new gcp.storage.BucketIAMMember("viewer", {
bucket: bucket.name,
role: "roles/storage.objectViewer",
member: project.then(project => `serviceAccount:service-${project.number}@gcp-sa-datastream.iam.gserviceaccount.com`),
});
const creator = new gcp.storage.BucketIAMMember("creator", {
bucket: bucket.name,
role: "roles/storage.objectCreator",
member: project.then(project => `serviceAccount:service-${project.number}@gcp-sa-datastream.iam.gserviceaccount.com`),
});
const reader = new gcp.storage.BucketIAMMember("reader", {
bucket: bucket.name,
role: "roles/storage.legacyBucketReader",
member: project.then(project => `serviceAccount:service-${project.number}@gcp-sa-datastream.iam.gserviceaccount.com`),
});
const keyUser = new gcp.kms.CryptoKeyIAMMember("key_user", {
cryptoKeyId: "kms-name",
role: "roles/cloudkms.cryptoKeyEncrypterDecrypter",
member: project.then(project => `serviceAccount:service-${project.number}@gcp-sa-datastream.iam.gserviceaccount.com`),
});
const destinationConnectionProfile = new gcp.datastream.ConnectionProfile("destination_connection_profile", {
displayName: "Connection profile",
location: "us-central1",
connectionProfileId: "destination-profile",
gcsProfile: {
bucket: bucket.name,
rootPath: "/path",
},
});
const _default = new gcp.datastream.Stream("default", {
streamId: "my-stream",
desiredState: "NOT_STARTED",
location: "us-central1",
displayName: "my stream",
labels: {
key: "value",
},
sourceConfig: {
sourceConnectionProfile: sourceConnectionProfile.id,
mysqlSourceConfig: {
includeObjects: {
mysqlDatabases: [{
database: "my-database",
mysqlTables: [
{
table: "includedTable",
mysqlColumns: [{
column: "includedColumn",
dataType: "VARCHAR",
collation: "utf8mb4",
primaryKey: false,
nullable: false,
ordinalPosition: 0,
}],
},
{
table: "includedTable_2",
},
],
}],
},
excludeObjects: {
mysqlDatabases: [{
database: "my-database",
mysqlTables: [{
table: "excludedTable",
mysqlColumns: [{
column: "excludedColumn",
dataType: "VARCHAR",
collation: "utf8mb4",
primaryKey: false,
nullable: false,
ordinalPosition: 0,
}],
}],
}],
},
maxConcurrentCdcTasks: 5,
},
},
destinationConfig: {
destinationConnectionProfile: destinationConnectionProfile.id,
gcsDestinationConfig: {
path: "mydata",
fileRotationMb: 200,
fileRotationInterval: "60s",
jsonFileFormat: {
schemaFileFormat: "NO_SCHEMA_FILE",
compression: "GZIP",
},
},
},
backfillAll: {
mysqlExcludedObjects: {
mysqlDatabases: [{
database: "my-database",
mysqlTables: [{
table: "excludedTable",
mysqlColumns: [{
column: "excludedColumn",
dataType: "VARCHAR",
collation: "utf8mb4",
primaryKey: false,
nullable: false,
ordinalPosition: 0,
}],
}],
}],
},
},
customerManagedEncryptionKey: "kms-name",
}, {
dependsOn: [keyUser],
});
import pulumi
import pulumi_gcp as gcp
import pulumi_random as random
project = gcp.organizations.get_project()
instance = gcp.sql.DatabaseInstance("instance",
name="my-instance",
database_version="MYSQL_8_0",
region="us-central1",
settings=gcp.sql.DatabaseInstanceSettingsArgs(
tier="db-f1-micro",
backup_configuration=gcp.sql.DatabaseInstanceSettingsBackupConfigurationArgs(
enabled=True,
binary_log_enabled=True,
),
ip_configuration=gcp.sql.DatabaseInstanceSettingsIpConfigurationArgs(
authorized_networks=[
gcp.sql.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs(
value="34.71.242.81",
),
gcp.sql.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs(
value="34.72.28.29",
),
gcp.sql.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs(
value="34.67.6.157",
),
gcp.sql.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs(
value="34.67.234.134",
),
gcp.sql.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs(
value="34.72.239.218",
),
],
),
),
deletion_protection=True)
db = gcp.sql.Database("db",
instance=instance.name,
name="db")
pwd = random.RandomPassword("pwd",
length=16,
special=False)
user = gcp.sql.User("user",
name="user",
instance=instance.name,
host="%",
password=pwd.result)
source_connection_profile = gcp.datastream.ConnectionProfile("source_connection_profile",
display_name="Source connection profile",
location="us-central1",
connection_profile_id="source-profile",
mysql_profile=gcp.datastream.ConnectionProfileMysqlProfileArgs(
hostname=instance.public_ip_address,
username=user.name,
password=user.password,
))
bucket = gcp.storage.Bucket("bucket",
name="my-bucket",
location="US",
uniform_bucket_level_access=True)
viewer = gcp.storage.BucketIAMMember("viewer",
bucket=bucket.name,
role="roles/storage.objectViewer",
member=f"serviceAccount:service-{project.number}@gcp-sa-datastream.iam.gserviceaccount.com")
creator = gcp.storage.BucketIAMMember("creator",
bucket=bucket.name,
role="roles/storage.objectCreator",
member=f"serviceAccount:service-{project.number}@gcp-sa-datastream.iam.gserviceaccount.com")
reader = gcp.storage.BucketIAMMember("reader",
bucket=bucket.name,
role="roles/storage.legacyBucketReader",
member=f"serviceAccount:service-{project.number}@gcp-sa-datastream.iam.gserviceaccount.com")
key_user = gcp.kms.CryptoKeyIAMMember("key_user",
crypto_key_id="kms-name",
role="roles/cloudkms.cryptoKeyEncrypterDecrypter",
member=f"serviceAccount:service-{project.number}@gcp-sa-datastream.iam.gserviceaccount.com")
destination_connection_profile = gcp.datastream.ConnectionProfile("destination_connection_profile",
display_name="Connection profile",
location="us-central1",
connection_profile_id="destination-profile",
gcs_profile=gcp.datastream.ConnectionProfileGcsProfileArgs(
bucket=bucket.name,
root_path="/path",
))
default = gcp.datastream.Stream("default",
stream_id="my-stream",
desired_state="NOT_STARTED",
location="us-central1",
display_name="my stream",
labels={
"key": "value",
},
source_config=gcp.datastream.StreamSourceConfigArgs(
source_connection_profile=source_connection_profile.id,
mysql_source_config=gcp.datastream.StreamSourceConfigMysqlSourceConfigArgs(
include_objects=gcp.datastream.StreamSourceConfigMysqlSourceConfigIncludeObjectsArgs(
mysql_databases=[gcp.datastream.StreamSourceConfigMysqlSourceConfigIncludeObjectsMysqlDatabaseArgs(
database="my-database",
mysql_tables=[
gcp.datastream.StreamSourceConfigMysqlSourceConfigIncludeObjectsMysqlDatabaseMysqlTableArgs(
table="includedTable",
mysql_columns=[gcp.datastream.StreamSourceConfigMysqlSourceConfigIncludeObjectsMysqlDatabaseMysqlTableMysqlColumnArgs(
column="includedColumn",
data_type="VARCHAR",
collation="utf8mb4",
primary_key=False,
nullable=False,
ordinal_position=0,
)],
),
gcp.datastream.StreamSourceConfigMysqlSourceConfigIncludeObjectsMysqlDatabaseMysqlTableArgs(
table="includedTable_2",
),
],
)],
),
exclude_objects=gcp.datastream.StreamSourceConfigMysqlSourceConfigExcludeObjectsArgs(
mysql_databases=[gcp.datastream.StreamSourceConfigMysqlSourceConfigExcludeObjectsMysqlDatabaseArgs(
database="my-database",
mysql_tables=[gcp.datastream.StreamSourceConfigMysqlSourceConfigExcludeObjectsMysqlDatabaseMysqlTableArgs(
table="excludedTable",
mysql_columns=[gcp.datastream.StreamSourceConfigMysqlSourceConfigExcludeObjectsMysqlDatabaseMysqlTableMysqlColumnArgs(
column="excludedColumn",
data_type="VARCHAR",
collation="utf8mb4",
primary_key=False,
nullable=False,
ordinal_position=0,
)],
)],
)],
),
max_concurrent_cdc_tasks=5,
),
),
destination_config=gcp.datastream.StreamDestinationConfigArgs(
destination_connection_profile=destination_connection_profile.id,
gcs_destination_config=gcp.datastream.StreamDestinationConfigGcsDestinationConfigArgs(
path="mydata",
file_rotation_mb=200,
file_rotation_interval="60s",
json_file_format=gcp.datastream.StreamDestinationConfigGcsDestinationConfigJsonFileFormatArgs(
schema_file_format="NO_SCHEMA_FILE",
compression="GZIP",
),
),
),
backfill_all=gcp.datastream.StreamBackfillAllArgs(
mysql_excluded_objects=gcp.datastream.StreamBackfillAllMysqlExcludedObjectsArgs(
mysql_databases=[gcp.datastream.StreamBackfillAllMysqlExcludedObjectsMysqlDatabaseArgs(
database="my-database",
mysql_tables=[gcp.datastream.StreamBackfillAllMysqlExcludedObjectsMysqlDatabaseMysqlTableArgs(
table="excludedTable",
mysql_columns=[gcp.datastream.StreamBackfillAllMysqlExcludedObjectsMysqlDatabaseMysqlTableMysqlColumnArgs(
column="excludedColumn",
data_type="VARCHAR",
collation="utf8mb4",
primary_key=False,
nullable=False,
ordinal_position=0,
)],
)],
)],
),
),
customer_managed_encryption_key="kms-name",
opts = pulumi.ResourceOptions(depends_on=[key_user]))
package main
import (
"fmt"
"github.com/pulumi/pulumi-gcp/sdk/v7/go/gcp/datastream"
"github.com/pulumi/pulumi-gcp/sdk/v7/go/gcp/kms"
"github.com/pulumi/pulumi-gcp/sdk/v7/go/gcp/organizations"
"github.com/pulumi/pulumi-gcp/sdk/v7/go/gcp/sql"
"github.com/pulumi/pulumi-gcp/sdk/v7/go/gcp/storage"
"github.com/pulumi/pulumi-random/sdk/v4/go/random"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
project, err := organizations.LookupProject(ctx, nil, nil)
if err != nil {
return err
}
instance, err := sql.NewDatabaseInstance(ctx, "instance", &sql.DatabaseInstanceArgs{
Name: pulumi.String("my-instance"),
DatabaseVersion: pulumi.String("MYSQL_8_0"),
Region: pulumi.String("us-central1"),
Settings: &sql.DatabaseInstanceSettingsArgs{
Tier: pulumi.String("db-f1-micro"),
BackupConfiguration: &sql.DatabaseInstanceSettingsBackupConfigurationArgs{
Enabled: pulumi.Bool(true),
BinaryLogEnabled: pulumi.Bool(true),
},
IpConfiguration: &sql.DatabaseInstanceSettingsIpConfigurationArgs{
AuthorizedNetworks: sql.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArray{
&sql.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs{
Value: pulumi.String("34.71.242.81"),
},
&sql.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs{
Value: pulumi.String("34.72.28.29"),
},
&sql.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs{
Value: pulumi.String("34.67.6.157"),
},
&sql.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs{
Value: pulumi.String("34.67.234.134"),
},
&sql.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs{
Value: pulumi.String("34.72.239.218"),
},
},
},
},
DeletionProtection: pulumi.Bool(true),
})
if err != nil {
return err
}
_, err = sql.NewDatabase(ctx, "db", &sql.DatabaseArgs{
Instance: instance.Name,
Name: pulumi.String("db"),
})
if err != nil {
return err
}
pwd, err := random.NewRandomPassword(ctx, "pwd", &random.RandomPasswordArgs{
Length: pulumi.Int(16),
Special: pulumi.Bool(false),
})
if err != nil {
return err
}
user, err := sql.NewUser(ctx, "user", &sql.UserArgs{
Name: pulumi.String("user"),
Instance: instance.Name,
Host: pulumi.String("%"),
Password: pwd.Result,
})
if err != nil {
return err
}
sourceConnectionProfile, err := datastream.NewConnectionProfile(ctx, "source_connection_profile", &datastream.ConnectionProfileArgs{
DisplayName: pulumi.String("Source connection profile"),
Location: pulumi.String("us-central1"),
ConnectionProfileId: pulumi.String("source-profile"),
MysqlProfile: &datastream.ConnectionProfileMysqlProfileArgs{
Hostname: instance.PublicIpAddress,
Username: user.Name,
Password: user.Password,
},
})
if err != nil {
return err
}
bucket, err := storage.NewBucket(ctx, "bucket", &storage.BucketArgs{
Name: pulumi.String("my-bucket"),
Location: pulumi.String("US"),
UniformBucketLevelAccess: pulumi.Bool(true),
})
if err != nil {
return err
}
_, err = storage.NewBucketIAMMember(ctx, "viewer", &storage.BucketIAMMemberArgs{
Bucket: bucket.Name,
Role: pulumi.String("roles/storage.objectViewer"),
Member: pulumi.String(fmt.Sprintf("serviceAccount:service-%v@gcp-sa-datastream.iam.gserviceaccount.com", project.Number)),
})
if err != nil {
return err
}
_, err = storage.NewBucketIAMMember(ctx, "creator", &storage.BucketIAMMemberArgs{
Bucket: bucket.Name,
Role: pulumi.String("roles/storage.objectCreator"),
Member: pulumi.String(fmt.Sprintf("serviceAccount:service-%v@gcp-sa-datastream.iam.gserviceaccount.com", project.Number)),
})
if err != nil {
return err
}
_, err = storage.NewBucketIAMMember(ctx, "reader", &storage.BucketIAMMemberArgs{
Bucket: bucket.Name,
Role: pulumi.String("roles/storage.legacyBucketReader"),
Member: pulumi.String(fmt.Sprintf("serviceAccount:service-%v@gcp-sa-datastream.iam.gserviceaccount.com", project.Number)),
})
if err != nil {
return err
}
keyUser, err := kms.NewCryptoKeyIAMMember(ctx, "key_user", &kms.CryptoKeyIAMMemberArgs{
CryptoKeyId: pulumi.String("kms-name"),
Role: pulumi.String("roles/cloudkms.cryptoKeyEncrypterDecrypter"),
Member: pulumi.String(fmt.Sprintf("serviceAccount:service-%v@gcp-sa-datastream.iam.gserviceaccount.com", project.Number)),
})
if err != nil {
return err
}
destinationConnectionProfile, err := datastream.NewConnectionProfile(ctx, "destination_connection_profile", &datastream.ConnectionProfileArgs{
DisplayName: pulumi.String("Connection profile"),
Location: pulumi.String("us-central1"),
ConnectionProfileId: pulumi.String("destination-profile"),
GcsProfile: &datastream.ConnectionProfileGcsProfileArgs{
Bucket: bucket.Name,
RootPath: pulumi.String("/path"),
},
})
if err != nil {
return err
}
_, err = datastream.NewStream(ctx, "default", &datastream.StreamArgs{
StreamId: pulumi.String("my-stream"),
DesiredState: pulumi.String("NOT_STARTED"),
Location: pulumi.String("us-central1"),
DisplayName: pulumi.String("my stream"),
Labels: pulumi.StringMap{
"key": pulumi.String("value"),
},
SourceConfig: &datastream.StreamSourceConfigArgs{
SourceConnectionProfile: sourceConnectionProfile.ID(),
MysqlSourceConfig: &datastream.StreamSourceConfigMysqlSourceConfigArgs{
IncludeObjects: &datastream.StreamSourceConfigMysqlSourceConfigIncludeObjectsArgs{
MysqlDatabases: datastream.StreamSourceConfigMysqlSourceConfigIncludeObjectsMysqlDatabaseArray{
&datastream.StreamSourceConfigMysqlSourceConfigIncludeObjectsMysqlDatabaseArgs{
Database: pulumi.String("my-database"),
MysqlTables: datastream.StreamSourceConfigMysqlSourceConfigIncludeObjectsMysqlDatabaseMysqlTableArray{
&datastream.StreamSourceConfigMysqlSourceConfigIncludeObjectsMysqlDatabaseMysqlTableArgs{
Table: pulumi.String("includedTable"),
MysqlColumns: datastream.StreamSourceConfigMysqlSourceConfigIncludeObjectsMysqlDatabaseMysqlTableMysqlColumnArray{
&datastream.StreamSourceConfigMysqlSourceConfigIncludeObjectsMysqlDatabaseMysqlTableMysqlColumnArgs{
Column: pulumi.String("includedColumn"),
DataType: pulumi.String("VARCHAR"),
Collation: pulumi.String("utf8mb4"),
PrimaryKey: pulumi.Bool(false),
Nullable: pulumi.Bool(false),
OrdinalPosition: pulumi.Int(0),
},
},
},
&datastream.StreamSourceConfigMysqlSourceConfigIncludeObjectsMysqlDatabaseMysqlTableArgs{
Table: pulumi.String("includedTable_2"),
},
},
},
},
},
ExcludeObjects: &datastream.StreamSourceConfigMysqlSourceConfigExcludeObjectsArgs{
MysqlDatabases: datastream.StreamSourceConfigMysqlSourceConfigExcludeObjectsMysqlDatabaseArray{
&datastream.StreamSourceConfigMysqlSourceConfigExcludeObjectsMysqlDatabaseArgs{
Database: pulumi.String("my-database"),
MysqlTables: datastream.StreamSourceConfigMysqlSourceConfigExcludeObjectsMysqlDatabaseMysqlTableArray{
&datastream.StreamSourceConfigMysqlSourceConfigExcludeObjectsMysqlDatabaseMysqlTableArgs{
Table: pulumi.String("excludedTable"),
MysqlColumns: datastream.StreamSourceConfigMysqlSourceConfigExcludeObjectsMysqlDatabaseMysqlTableMysqlColumnArray{
&datastream.StreamSourceConfigMysqlSourceConfigExcludeObjectsMysqlDatabaseMysqlTableMysqlColumnArgs{
Column: pulumi.String("excludedColumn"),
DataType: pulumi.String("VARCHAR"),
Collation: pulumi.String("utf8mb4"),
PrimaryKey: pulumi.Bool(false),
Nullable: pulumi.Bool(false),
OrdinalPosition: pulumi.Int(0),
},
},
},
},
},
},
},
MaxConcurrentCdcTasks: pulumi.Int(5),
},
},
DestinationConfig: &datastream.StreamDestinationConfigArgs{
DestinationConnectionProfile: destinationConnectionProfile.ID(),
GcsDestinationConfig: &datastream.StreamDestinationConfigGcsDestinationConfigArgs{
Path: pulumi.String("mydata"),
FileRotationMb: pulumi.Int(200),
FileRotationInterval: pulumi.String("60s"),
JsonFileFormat: &datastream.StreamDestinationConfigGcsDestinationConfigJsonFileFormatArgs{
SchemaFileFormat: pulumi.String("NO_SCHEMA_FILE"),
Compression: pulumi.String("GZIP"),
},
},
},
BackfillAll: &datastream.StreamBackfillAllArgs{
MysqlExcludedObjects: &datastream.StreamBackfillAllMysqlExcludedObjectsArgs{
MysqlDatabases: datastream.StreamBackfillAllMysqlExcludedObjectsMysqlDatabaseArray{
&datastream.StreamBackfillAllMysqlExcludedObjectsMysqlDatabaseArgs{
Database: pulumi.String("my-database"),
MysqlTables: datastream.StreamBackfillAllMysqlExcludedObjectsMysqlDatabaseMysqlTableArray{
&datastream.StreamBackfillAllMysqlExcludedObjectsMysqlDatabaseMysqlTableArgs{
Table: pulumi.String("excludedTable"),
MysqlColumns: datastream.StreamBackfillAllMysqlExcludedObjectsMysqlDatabaseMysqlTableMysqlColumnArray{
&datastream.StreamBackfillAllMysqlExcludedObjectsMysqlDatabaseMysqlTableMysqlColumnArgs{
Column: pulumi.String("excludedColumn"),
DataType: pulumi.String("VARCHAR"),
Collation: pulumi.String("utf8mb4"),
PrimaryKey: pulumi.Bool(false),
Nullable: pulumi.Bool(false),
OrdinalPosition: pulumi.Int(0),
},
},
},
},
},
},
},
},
CustomerManagedEncryptionKey: pulumi.String("kms-name"),
}, pulumi.DependsOn([]pulumi.Resource{
keyUser,
}))
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Gcp = Pulumi.Gcp;
using Random = Pulumi.Random;
return await Deployment.RunAsync(() =>
{
var project = Gcp.Organizations.GetProject.Invoke();
var instance = new Gcp.Sql.DatabaseInstance("instance", new()
{
Name = "my-instance",
DatabaseVersion = "MYSQL_8_0",
Region = "us-central1",
Settings = new Gcp.Sql.Inputs.DatabaseInstanceSettingsArgs
{
Tier = "db-f1-micro",
BackupConfiguration = new Gcp.Sql.Inputs.DatabaseInstanceSettingsBackupConfigurationArgs
{
Enabled = true,
BinaryLogEnabled = true,
},
IpConfiguration = new Gcp.Sql.Inputs.DatabaseInstanceSettingsIpConfigurationArgs
{
AuthorizedNetworks = new[]
{
new Gcp.Sql.Inputs.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs
{
Value = "34.71.242.81",
},
new Gcp.Sql.Inputs.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs
{
Value = "34.72.28.29",
},
new Gcp.Sql.Inputs.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs
{
Value = "34.67.6.157",
},
new Gcp.Sql.Inputs.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs
{
Value = "34.67.234.134",
},
new Gcp.Sql.Inputs.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs
{
Value = "34.72.239.218",
},
},
},
},
DeletionProtection = true,
});
var db = new Gcp.Sql.Database("db", new()
{
Instance = instance.Name,
Name = "db",
});
var pwd = new Random.RandomPassword("pwd", new()
{
Length = 16,
Special = false,
});
var user = new Gcp.Sql.User("user", new()
{
Name = "user",
Instance = instance.Name,
Host = "%",
Password = pwd.Result,
});
var sourceConnectionProfile = new Gcp.Datastream.ConnectionProfile("source_connection_profile", new()
{
DisplayName = "Source connection profile",
Location = "us-central1",
ConnectionProfileId = "source-profile",
MysqlProfile = new Gcp.Datastream.Inputs.ConnectionProfileMysqlProfileArgs
{
Hostname = instance.PublicIpAddress,
Username = user.Name,
Password = user.Password,
},
});
var bucket = new Gcp.Storage.Bucket("bucket", new()
{
Name = "my-bucket",
Location = "US",
UniformBucketLevelAccess = true,
});
var viewer = new Gcp.Storage.BucketIAMMember("viewer", new()
{
Bucket = bucket.Name,
Role = "roles/storage.objectViewer",
Member = $"serviceAccount:service-{project.Apply(getProjectResult => getProjectResult.Number)}@gcp-sa-datastream.iam.gserviceaccount.com",
});
var creator = new Gcp.Storage.BucketIAMMember("creator", new()
{
Bucket = bucket.Name,
Role = "roles/storage.objectCreator",
Member = $"serviceAccount:service-{project.Apply(getProjectResult => getProjectResult.Number)}@gcp-sa-datastream.iam.gserviceaccount.com",
});
var reader = new Gcp.Storage.BucketIAMMember("reader", new()
{
Bucket = bucket.Name,
Role = "roles/storage.legacyBucketReader",
Member = $"serviceAccount:service-{project.Apply(getProjectResult => getProjectResult.Number)}@gcp-sa-datastream.iam.gserviceaccount.com",
});
var keyUser = new Gcp.Kms.CryptoKeyIAMMember("key_user", new()
{
CryptoKeyId = "kms-name",
Role = "roles/cloudkms.cryptoKeyEncrypterDecrypter",
Member = $"serviceAccount:service-{project.Apply(getProjectResult => getProjectResult.Number)}@gcp-sa-datastream.iam.gserviceaccount.com",
});
var destinationConnectionProfile = new Gcp.Datastream.ConnectionProfile("destination_connection_profile", new()
{
DisplayName = "Connection profile",
Location = "us-central1",
ConnectionProfileId = "destination-profile",
GcsProfile = new Gcp.Datastream.Inputs.ConnectionProfileGcsProfileArgs
{
Bucket = bucket.Name,
RootPath = "/path",
},
});
var @default = new Gcp.Datastream.Stream("default", new()
{
StreamId = "my-stream",
DesiredState = "NOT_STARTED",
Location = "us-central1",
DisplayName = "my stream",
Labels =
{
{ "key", "value" },
},
SourceConfig = new Gcp.Datastream.Inputs.StreamSourceConfigArgs
{
SourceConnectionProfile = sourceConnectionProfile.Id,
MysqlSourceConfig = new Gcp.Datastream.Inputs.StreamSourceConfigMysqlSourceConfigArgs
{
IncludeObjects = new Gcp.Datastream.Inputs.StreamSourceConfigMysqlSourceConfigIncludeObjectsArgs
{
MysqlDatabases = new[]
{
new Gcp.Datastream.Inputs.StreamSourceConfigMysqlSourceConfigIncludeObjectsMysqlDatabaseArgs
{
Database = "my-database",
MysqlTables = new[]
{
new Gcp.Datastream.Inputs.StreamSourceConfigMysqlSourceConfigIncludeObjectsMysqlDatabaseMysqlTableArgs
{
Table = "includedTable",
MysqlColumns = new[]
{
new Gcp.Datastream.Inputs.StreamSourceConfigMysqlSourceConfigIncludeObjectsMysqlDatabaseMysqlTableMysqlColumnArgs
{
Column = "includedColumn",
DataType = "VARCHAR",
Collation = "utf8mb4",
PrimaryKey = false,
Nullable = false,
OrdinalPosition = 0,
},
},
},
new Gcp.Datastream.Inputs.StreamSourceConfigMysqlSourceConfigIncludeObjectsMysqlDatabaseMysqlTableArgs
{
Table = "includedTable_2",
},
},
},
},
},
ExcludeObjects = new Gcp.Datastream.Inputs.StreamSourceConfigMysqlSourceConfigExcludeObjectsArgs
{
MysqlDatabases = new[]
{
new Gcp.Datastream.Inputs.StreamSourceConfigMysqlSourceConfigExcludeObjectsMysqlDatabaseArgs
{
Database = "my-database",
MysqlTables = new[]
{
new Gcp.Datastream.Inputs.StreamSourceConfigMysqlSourceConfigExcludeObjectsMysqlDatabaseMysqlTableArgs
{
Table = "excludedTable",
MysqlColumns = new[]
{
new Gcp.Datastream.Inputs.StreamSourceConfigMysqlSourceConfigExcludeObjectsMysqlDatabaseMysqlTableMysqlColumnArgs
{
Column = "excludedColumn",
DataType = "VARCHAR",
Collation = "utf8mb4",
PrimaryKey = false,
Nullable = false,
OrdinalPosition = 0,
},
},
},
},
},
},
},
MaxConcurrentCdcTasks = 5,
},
},
DestinationConfig = new Gcp.Datastream.Inputs.StreamDestinationConfigArgs
{
DestinationConnectionProfile = destinationConnectionProfile.Id,
GcsDestinationConfig = new Gcp.Datastream.Inputs.StreamDestinationConfigGcsDestinationConfigArgs
{
Path = "mydata",
FileRotationMb = 200,
FileRotationInterval = "60s",
JsonFileFormat = new Gcp.Datastream.Inputs.StreamDestinationConfigGcsDestinationConfigJsonFileFormatArgs
{
SchemaFileFormat = "NO_SCHEMA_FILE",
Compression = "GZIP",
},
},
},
BackfillAll = new Gcp.Datastream.Inputs.StreamBackfillAllArgs
{
MysqlExcludedObjects = new Gcp.Datastream.Inputs.StreamBackfillAllMysqlExcludedObjectsArgs
{
MysqlDatabases = new[]
{
new Gcp.Datastream.Inputs.StreamBackfillAllMysqlExcludedObjectsMysqlDatabaseArgs
{
Database = "my-database",
MysqlTables = new[]
{
new Gcp.Datastream.Inputs.StreamBackfillAllMysqlExcludedObjectsMysqlDatabaseMysqlTableArgs
{
Table = "excludedTable",
MysqlColumns = new[]
{
new Gcp.Datastream.Inputs.StreamBackfillAllMysqlExcludedObjectsMysqlDatabaseMysqlTableMysqlColumnArgs
{
Column = "excludedColumn",
DataType = "VARCHAR",
Collation = "utf8mb4",
PrimaryKey = false,
Nullable = false,
OrdinalPosition = 0,
},
},
},
},
},
},
},
},
CustomerManagedEncryptionKey = "kms-name",
}, new CustomResourceOptions
{
DependsOn =
{
keyUser,
},
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.organizations.OrganizationsFunctions;
import com.pulumi.gcp.organizations.inputs.GetProjectArgs;
import com.pulumi.gcp.sql.DatabaseInstance;
import com.pulumi.gcp.sql.DatabaseInstanceArgs;
import com.pulumi.gcp.sql.inputs.DatabaseInstanceSettingsArgs;
import com.pulumi.gcp.sql.inputs.DatabaseInstanceSettingsBackupConfigurationArgs;
import com.pulumi.gcp.sql.inputs.DatabaseInstanceSettingsIpConfigurationArgs;
import com.pulumi.gcp.sql.Database;
import com.pulumi.gcp.sql.DatabaseArgs;
import com.pulumi.random.RandomPassword;
import com.pulumi.random.RandomPasswordArgs;
import com.pulumi.gcp.sql.User;
import com.pulumi.gcp.sql.UserArgs;
import com.pulumi.gcp.datastream.ConnectionProfile;
import com.pulumi.gcp.datastream.ConnectionProfileArgs;
import com.pulumi.gcp.datastream.inputs.ConnectionProfileMysqlProfileArgs;
import com.pulumi.gcp.storage.Bucket;
import com.pulumi.gcp.storage.BucketArgs;
import com.pulumi.gcp.storage.BucketIAMMember;
import com.pulumi.gcp.storage.BucketIAMMemberArgs;
import com.pulumi.gcp.kms.CryptoKeyIAMMember;
import com.pulumi.gcp.kms.CryptoKeyIAMMemberArgs;
import com.pulumi.gcp.datastream.inputs.ConnectionProfileGcsProfileArgs;
import com.pulumi.gcp.datastream.Stream;
import com.pulumi.gcp.datastream.StreamArgs;
import com.pulumi.gcp.datastream.inputs.StreamSourceConfigArgs;
import com.pulumi.gcp.datastream.inputs.StreamSourceConfigMysqlSourceConfigArgs;
import com.pulumi.gcp.datastream.inputs.StreamSourceConfigMysqlSourceConfigIncludeObjectsArgs;
import com.pulumi.gcp.datastream.inputs.StreamSourceConfigMysqlSourceConfigExcludeObjectsArgs;
import com.pulumi.gcp.datastream.inputs.StreamDestinationConfigArgs;
import com.pulumi.gcp.datastream.inputs.StreamDestinationConfigGcsDestinationConfigArgs;
import com.pulumi.gcp.datastream.inputs.StreamDestinationConfigGcsDestinationConfigJsonFileFormatArgs;
import com.pulumi.gcp.datastream.inputs.StreamBackfillAllArgs;
import com.pulumi.gcp.datastream.inputs.StreamBackfillAllMysqlExcludedObjectsArgs;
import com.pulumi.resources.CustomResourceOptions;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
final var project = OrganizationsFunctions.getProject();
var instance = new DatabaseInstance("instance", DatabaseInstanceArgs.builder()
.name("my-instance")
.databaseVersion("MYSQL_8_0")
.region("us-central1")
.settings(DatabaseInstanceSettingsArgs.builder()
.tier("db-f1-micro")
.backupConfiguration(DatabaseInstanceSettingsBackupConfigurationArgs.builder()
.enabled(true)
.binaryLogEnabled(true)
.build())
.ipConfiguration(DatabaseInstanceSettingsIpConfigurationArgs.builder()
.authorizedNetworks(
DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs.builder()
.value("34.71.242.81")
.build(),
DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs.builder()
.value("34.72.28.29")
.build(),
DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs.builder()
.value("34.67.6.157")
.build(),
DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs.builder()
.value("34.67.234.134")
.build(),
DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs.builder()
.value("34.72.239.218")
.build())
.build())
.build())
.deletionProtection(true)
.build());
var db = new Database("db", DatabaseArgs.builder()
.instance(instance.name())
.name("db")
.build());
var pwd = new RandomPassword("pwd", RandomPasswordArgs.builder()
.length(16)
.special(false)
.build());
var user = new User("user", UserArgs.builder()
.name("user")
.instance(instance.name())
.host("%")
.password(pwd.result())
.build());
var sourceConnectionProfile = new ConnectionProfile("sourceConnectionProfile", ConnectionProfileArgs.builder()
.displayName("Source connection profile")
.location("us-central1")
.connectionProfileId("source-profile")
.mysqlProfile(ConnectionProfileMysqlProfileArgs.builder()
.hostname(instance.publicIpAddress())
.username(user.name())
.password(user.password())
.build())
.build());
var bucket = new Bucket("bucket", BucketArgs.builder()
.name("my-bucket")
.location("US")
.uniformBucketLevelAccess(true)
.build());
var viewer = new BucketIAMMember("viewer", BucketIAMMemberArgs.builder()
.bucket(bucket.name())
.role("roles/storage.objectViewer")
.member(String.format("serviceAccount:service-%s@gcp-sa-datastream.iam.gserviceaccount.com", project.applyValue(getProjectResult -> getProjectResult.number())))
.build());
var creator = new BucketIAMMember("creator", BucketIAMMemberArgs.builder()
.bucket(bucket.name())
.role("roles/storage.objectCreator")
.member(String.format("serviceAccount:service-%s@gcp-sa-datastream.iam.gserviceaccount.com", project.applyValue(getProjectResult -> getProjectResult.number())))
.build());
var reader = new BucketIAMMember("reader", BucketIAMMemberArgs.builder()
.bucket(bucket.name())
.role("roles/storage.legacyBucketReader")
.member(String.format("serviceAccount:service-%s@gcp-sa-datastream.iam.gserviceaccount.com", project.applyValue(getProjectResult -> getProjectResult.number())))
.build());
var keyUser = new CryptoKeyIAMMember("keyUser", CryptoKeyIAMMemberArgs.builder()
.cryptoKeyId("kms-name")
.role("roles/cloudkms.cryptoKeyEncrypterDecrypter")
.member(String.format("serviceAccount:service-%s@gcp-sa-datastream.iam.gserviceaccount.com", project.applyValue(getProjectResult -> getProjectResult.number())))
.build());
var destinationConnectionProfile = new ConnectionProfile("destinationConnectionProfile", ConnectionProfileArgs.builder()
.displayName("Connection profile")
.location("us-central1")
.connectionProfileId("destination-profile")
.gcsProfile(ConnectionProfileGcsProfileArgs.builder()
.bucket(bucket.name())
.rootPath("/path")
.build())
.build());
var default_ = new Stream("default", StreamArgs.builder()
.streamId("my-stream")
.desiredState("NOT_STARTED")
.location("us-central1")
.displayName("my stream")
.labels(Map.of("key", "value"))
.sourceConfig(StreamSourceConfigArgs.builder()
.sourceConnectionProfile(sourceConnectionProfile.id())
.mysqlSourceConfig(StreamSourceConfigMysqlSourceConfigArgs.builder()
.includeObjects(StreamSourceConfigMysqlSourceConfigIncludeObjectsArgs.builder()
.mysqlDatabases(StreamSourceConfigMysqlSourceConfigIncludeObjectsMysqlDatabaseArgs.builder()
.database("my-database")
.mysqlTables(
StreamSourceConfigMysqlSourceConfigIncludeObjectsMysqlDatabaseMysqlTableArgs.builder()
.table("includedTable")
.mysqlColumns(StreamSourceConfigMysqlSourceConfigIncludeObjectsMysqlDatabaseMysqlTableMysqlColumnArgs.builder()
.column("includedColumn")
.dataType("VARCHAR")
.collation("utf8mb4")
.primaryKey(false)
.nullable(false)
.ordinalPosition(0)
.build())
.build(),
StreamSourceConfigMysqlSourceConfigIncludeObjectsMysqlDatabaseMysqlTableArgs.builder()
.table("includedTable_2")
.build())
.build())
.build())
.excludeObjects(StreamSourceConfigMysqlSourceConfigExcludeObjectsArgs.builder()
.mysqlDatabases(StreamSourceConfigMysqlSourceConfigExcludeObjectsMysqlDatabaseArgs.builder()
.database("my-database")
.mysqlTables(StreamSourceConfigMysqlSourceConfigExcludeObjectsMysqlDatabaseMysqlTableArgs.builder()
.table("excludedTable")
.mysqlColumns(StreamSourceConfigMysqlSourceConfigExcludeObjectsMysqlDatabaseMysqlTableMysqlColumnArgs.builder()
.column("excludedColumn")
.dataType("VARCHAR")
.collation("utf8mb4")
.primaryKey(false)
.nullable(false)
.ordinalPosition(0)
.build())
.build())
.build())
.build())
.maxConcurrentCdcTasks(5)
.build())
.build())
.destinationConfig(StreamDestinationConfigArgs.builder()
.destinationConnectionProfile(destinationConnectionProfile.id())
.gcsDestinationConfig(StreamDestinationConfigGcsDestinationConfigArgs.builder()
.path("mydata")
.fileRotationMb(200)
.fileRotationInterval("60s")
.jsonFileFormat(StreamDestinationConfigGcsDestinationConfigJsonFileFormatArgs.builder()
.schemaFileFormat("NO_SCHEMA_FILE")
.compression("GZIP")
.build())
.build())
.build())
.backfillAll(StreamBackfillAllArgs.builder()
.mysqlExcludedObjects(StreamBackfillAllMysqlExcludedObjectsArgs.builder()
.mysqlDatabases(StreamBackfillAllMysqlExcludedObjectsMysqlDatabaseArgs.builder()
.database("my-database")
.mysqlTables(StreamBackfillAllMysqlExcludedObjectsMysqlDatabaseMysqlTableArgs.builder()
.table("excludedTable")
.mysqlColumns(StreamBackfillAllMysqlExcludedObjectsMysqlDatabaseMysqlTableMysqlColumnArgs.builder()
.column("excludedColumn")
.dataType("VARCHAR")
.collation("utf8mb4")
.primaryKey(false)
.nullable(false)
.ordinalPosition(0)
.build())
.build())
.build())
.build())
.build())
.customerManagedEncryptionKey("kms-name")
.build(), CustomResourceOptions.builder()
.dependsOn(keyUser)
.build());
}
}
resources:
instance:
type: gcp:sql:DatabaseInstance
properties:
name: my-instance
databaseVersion: MYSQL_8_0
region: us-central1
settings:
tier: db-f1-micro
backupConfiguration:
enabled: true
binaryLogEnabled: true
ipConfiguration:
authorizedNetworks:
- value: 34.71.242.81
- value: 34.72.28.29
- value: 34.67.6.157
- value: 34.67.234.134
- value: 34.72.239.218
deletionProtection: true
db:
type: gcp:sql:Database
properties:
instance: ${instance.name}
name: db
pwd:
type: random:RandomPassword
properties:
length: 16
special: false
user:
type: gcp:sql:User
properties:
name: user
instance: ${instance.name}
host: '%'
password: ${pwd.result}
sourceConnectionProfile:
type: gcp:datastream:ConnectionProfile
name: source_connection_profile
properties:
displayName: Source connection profile
location: us-central1
connectionProfileId: source-profile
mysqlProfile:
hostname: ${instance.publicIpAddress}
username: ${user.name}
password: ${user.password}
bucket:
type: gcp:storage:Bucket
properties:
name: my-bucket
location: US
uniformBucketLevelAccess: true
viewer:
type: gcp:storage:BucketIAMMember
properties:
bucket: ${bucket.name}
role: roles/storage.objectViewer
member: serviceAccount:service-${project.number}@gcp-sa-datastream.iam.gserviceaccount.com
creator:
type: gcp:storage:BucketIAMMember
properties:
bucket: ${bucket.name}
role: roles/storage.objectCreator
member: serviceAccount:service-${project.number}@gcp-sa-datastream.iam.gserviceaccount.com
reader:
type: gcp:storage:BucketIAMMember
properties:
bucket: ${bucket.name}
role: roles/storage.legacyBucketReader
member: serviceAccount:service-${project.number}@gcp-sa-datastream.iam.gserviceaccount.com
keyUser:
type: gcp:kms:CryptoKeyIAMMember
name: key_user
properties:
cryptoKeyId: kms-name
role: roles/cloudkms.cryptoKeyEncrypterDecrypter
member: serviceAccount:service-${project.number}@gcp-sa-datastream.iam.gserviceaccount.com
destinationConnectionProfile:
type: gcp:datastream:ConnectionProfile
name: destination_connection_profile
properties:
displayName: Connection profile
location: us-central1
connectionProfileId: destination-profile
gcsProfile:
bucket: ${bucket.name}
rootPath: /path
default:
type: gcp:datastream:Stream
properties:
streamId: my-stream
desiredState: NOT_STARTED
location: us-central1
displayName: my stream
labels:
key: value
sourceConfig:
sourceConnectionProfile: ${sourceConnectionProfile.id}
mysqlSourceConfig:
includeObjects:
mysqlDatabases:
- database: my-database
mysqlTables:
- table: includedTable
mysqlColumns:
- column: includedColumn
dataType: VARCHAR
collation: utf8mb4
primaryKey: false
nullable: false
ordinalPosition: 0
- table: includedTable_2
excludeObjects:
mysqlDatabases:
- database: my-database
mysqlTables:
- table: excludedTable
mysqlColumns:
- column: excludedColumn
dataType: VARCHAR
collation: utf8mb4
primaryKey: false
nullable: false
ordinalPosition: 0
maxConcurrentCdcTasks: 5
destinationConfig:
destinationConnectionProfile: ${destinationConnectionProfile.id}
gcsDestinationConfig:
path: mydata
fileRotationMb: 200
fileRotationInterval: 60s
jsonFileFormat:
schemaFileFormat: NO_SCHEMA_FILE
compression: GZIP
backfillAll:
mysqlExcludedObjects:
mysqlDatabases:
- database: my-database
mysqlTables:
- table: excludedTable
mysqlColumns:
- column: excludedColumn
dataType: VARCHAR
collation: utf8mb4
primaryKey: false
nullable: false
ordinalPosition: 0
customerManagedEncryptionKey: kms-name
options:
dependson:
- ${keyUser}
variables:
project:
fn::invoke:
Function: gcp:organizations:getProject
Arguments: {}
Datastream Stream Postgresql
import * as pulumi from "@pulumi/pulumi";
import * as gcp from "@pulumi/gcp";
const source = new gcp.datastream.ConnectionProfile("source", {
displayName: "Postgresql Source",
location: "us-central1",
connectionProfileId: "source-profile",
postgresqlProfile: {
hostname: "hostname",
port: 3306,
username: "user",
password: "pass",
database: "postgres",
},
});
const destination = new gcp.datastream.ConnectionProfile("destination", {
displayName: "BigQuery Destination",
location: "us-central1",
connectionProfileId: "destination-profile",
bigqueryProfile: {},
});
const _default = new gcp.datastream.Stream("default", {
displayName: "Postgres to BigQuery",
location: "us-central1",
streamId: "my-stream",
desiredState: "RUNNING",
sourceConfig: {
sourceConnectionProfile: source.id,
postgresqlSourceConfig: {
maxConcurrentBackfillTasks: 12,
publication: "publication",
replicationSlot: "replication_slot",
includeObjects: {
postgresqlSchemas: [{
schema: "schema",
postgresqlTables: [{
table: "table",
postgresqlColumns: [{
column: "column",
}],
}],
}],
},
excludeObjects: {
postgresqlSchemas: [{
schema: "schema",
postgresqlTables: [{
table: "table",
postgresqlColumns: [{
column: "column",
}],
}],
}],
},
},
},
destinationConfig: {
destinationConnectionProfile: destination.id,
bigqueryDestinationConfig: {
dataFreshness: "900s",
sourceHierarchyDatasets: {
datasetTemplate: {
location: "us-central1",
},
},
},
},
backfillAll: {
postgresqlExcludedObjects: {
postgresqlSchemas: [{
schema: "schema",
postgresqlTables: [{
table: "table",
postgresqlColumns: [{
column: "column",
}],
}],
}],
},
},
});
import pulumi
import pulumi_gcp as gcp
source = gcp.datastream.ConnectionProfile("source",
display_name="Postgresql Source",
location="us-central1",
connection_profile_id="source-profile",
postgresql_profile=gcp.datastream.ConnectionProfilePostgresqlProfileArgs(
hostname="hostname",
port=3306,
username="user",
password="pass",
database="postgres",
))
destination = gcp.datastream.ConnectionProfile("destination",
display_name="BigQuery Destination",
location="us-central1",
connection_profile_id="destination-profile",
bigquery_profile=gcp.datastream.ConnectionProfileBigqueryProfileArgs())
default = gcp.datastream.Stream("default",
display_name="Postgres to BigQuery",
location="us-central1",
stream_id="my-stream",
desired_state="RUNNING",
source_config=gcp.datastream.StreamSourceConfigArgs(
source_connection_profile=source.id,
postgresql_source_config=gcp.datastream.StreamSourceConfigPostgresqlSourceConfigArgs(
max_concurrent_backfill_tasks=12,
publication="publication",
replication_slot="replication_slot",
include_objects=gcp.datastream.StreamSourceConfigPostgresqlSourceConfigIncludeObjectsArgs(
postgresql_schemas=[gcp.datastream.StreamSourceConfigPostgresqlSourceConfigIncludeObjectsPostgresqlSchemaArgs(
schema="schema",
postgresql_tables=[gcp.datastream.StreamSourceConfigPostgresqlSourceConfigIncludeObjectsPostgresqlSchemaPostgresqlTableArgs(
table="table",
postgresql_columns=[gcp.datastream.StreamSourceConfigPostgresqlSourceConfigIncludeObjectsPostgresqlSchemaPostgresqlTablePostgresqlColumnArgs(
column="column",
)],
)],
)],
),
exclude_objects=gcp.datastream.StreamSourceConfigPostgresqlSourceConfigExcludeObjectsArgs(
postgresql_schemas=[gcp.datastream.StreamSourceConfigPostgresqlSourceConfigExcludeObjectsPostgresqlSchemaArgs(
schema="schema",
postgresql_tables=[gcp.datastream.StreamSourceConfigPostgresqlSourceConfigExcludeObjectsPostgresqlSchemaPostgresqlTableArgs(
table="table",
postgresql_columns=[gcp.datastream.StreamSourceConfigPostgresqlSourceConfigExcludeObjectsPostgresqlSchemaPostgresqlTablePostgresqlColumnArgs(
column="column",
)],
)],
)],
),
),
),
destination_config=gcp.datastream.StreamDestinationConfigArgs(
destination_connection_profile=destination.id,
bigquery_destination_config=gcp.datastream.StreamDestinationConfigBigqueryDestinationConfigArgs(
data_freshness="900s",
source_hierarchy_datasets=gcp.datastream.StreamDestinationConfigBigqueryDestinationConfigSourceHierarchyDatasetsArgs(
dataset_template=gcp.datastream.StreamDestinationConfigBigqueryDestinationConfigSourceHierarchyDatasetsDatasetTemplateArgs(
location="us-central1",
),
),
),
),
backfill_all=gcp.datastream.StreamBackfillAllArgs(
postgresql_excluded_objects=gcp.datastream.StreamBackfillAllPostgresqlExcludedObjectsArgs(
postgresql_schemas=[gcp.datastream.StreamBackfillAllPostgresqlExcludedObjectsPostgresqlSchemaArgs(
schema="schema",
postgresql_tables=[gcp.datastream.StreamBackfillAllPostgresqlExcludedObjectsPostgresqlSchemaPostgresqlTableArgs(
table="table",
postgresql_columns=[gcp.datastream.StreamBackfillAllPostgresqlExcludedObjectsPostgresqlSchemaPostgresqlTablePostgresqlColumnArgs(
column="column",
)],
)],
)],
),
))
package main
import (
"github.com/pulumi/pulumi-gcp/sdk/v7/go/gcp/datastream"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
source, err := datastream.NewConnectionProfile(ctx, "source", &datastream.ConnectionProfileArgs{
DisplayName: pulumi.String("Postgresql Source"),
Location: pulumi.String("us-central1"),
ConnectionProfileId: pulumi.String("source-profile"),
PostgresqlProfile: &datastream.ConnectionProfilePostgresqlProfileArgs{
Hostname: pulumi.String("hostname"),
Port: pulumi.Int(3306),
Username: pulumi.String("user"),
Password: pulumi.String("pass"),
Database: pulumi.String("postgres"),
},
})
if err != nil {
return err
}
destination, err := datastream.NewConnectionProfile(ctx, "destination", &datastream.ConnectionProfileArgs{
DisplayName: pulumi.String("BigQuery Destination"),
Location: pulumi.String("us-central1"),
ConnectionProfileId: pulumi.String("destination-profile"),
BigqueryProfile: nil,
})
if err != nil {
return err
}
_, err = datastream.NewStream(ctx, "default", &datastream.StreamArgs{
DisplayName: pulumi.String("Postgres to BigQuery"),
Location: pulumi.String("us-central1"),
StreamId: pulumi.String("my-stream"),
DesiredState: pulumi.String("RUNNING"),
SourceConfig: &datastream.StreamSourceConfigArgs{
SourceConnectionProfile: source.ID(),
PostgresqlSourceConfig: &datastream.StreamSourceConfigPostgresqlSourceConfigArgs{
MaxConcurrentBackfillTasks: pulumi.Int(12),
Publication: pulumi.String("publication"),
ReplicationSlot: pulumi.String("replication_slot"),
IncludeObjects: &datastream.StreamSourceConfigPostgresqlSourceConfigIncludeObjectsArgs{
PostgresqlSchemas: datastream.StreamSourceConfigPostgresqlSourceConfigIncludeObjectsPostgresqlSchemaArray{
&datastream.StreamSourceConfigPostgresqlSourceConfigIncludeObjectsPostgresqlSchemaArgs{
Schema: pulumi.String("schema"),
PostgresqlTables: datastream.StreamSourceConfigPostgresqlSourceConfigIncludeObjectsPostgresqlSchemaPostgresqlTableArray{
&datastream.StreamSourceConfigPostgresqlSourceConfigIncludeObjectsPostgresqlSchemaPostgresqlTableArgs{
Table: pulumi.String("table"),
PostgresqlColumns: datastream.StreamSourceConfigPostgresqlSourceConfigIncludeObjectsPostgresqlSchemaPostgresqlTablePostgresqlColumnArray{
&datastream.StreamSourceConfigPostgresqlSourceConfigIncludeObjectsPostgresqlSchemaPostgresqlTablePostgresqlColumnArgs{
Column: pulumi.String("column"),
},
},
},
},
},
},
},
ExcludeObjects: &datastream.StreamSourceConfigPostgresqlSourceConfigExcludeObjectsArgs{
PostgresqlSchemas: datastream.StreamSourceConfigPostgresqlSourceConfigExcludeObjectsPostgresqlSchemaArray{
&datastream.StreamSourceConfigPostgresqlSourceConfigExcludeObjectsPostgresqlSchemaArgs{
Schema: pulumi.String("schema"),
PostgresqlTables: datastream.StreamSourceConfigPostgresqlSourceConfigExcludeObjectsPostgresqlSchemaPostgresqlTableArray{
&datastream.StreamSourceConfigPostgresqlSourceConfigExcludeObjectsPostgresqlSchemaPostgresqlTableArgs{
Table: pulumi.String("table"),
PostgresqlColumns: datastream.StreamSourceConfigPostgresqlSourceConfigExcludeObjectsPostgresqlSchemaPostgresqlTablePostgresqlColumnArray{
&datastream.StreamSourceConfigPostgresqlSourceConfigExcludeObjectsPostgresqlSchemaPostgresqlTablePostgresqlColumnArgs{
Column: pulumi.String("column"),
},
},
},
},
},
},
},
},
},
DestinationConfig: &datastream.StreamDestinationConfigArgs{
DestinationConnectionProfile: destination.ID(),
BigqueryDestinationConfig: &datastream.StreamDestinationConfigBigqueryDestinationConfigArgs{
DataFreshness: pulumi.String("900s"),
SourceHierarchyDatasets: &datastream.StreamDestinationConfigBigqueryDestinationConfigSourceHierarchyDatasetsArgs{
DatasetTemplate: &datastream.StreamDestinationConfigBigqueryDestinationConfigSourceHierarchyDatasetsDatasetTemplateArgs{
Location: pulumi.String("us-central1"),
},
},
},
},
BackfillAll: &datastream.StreamBackfillAllArgs{
PostgresqlExcludedObjects: &datastream.StreamBackfillAllPostgresqlExcludedObjectsArgs{
PostgresqlSchemas: datastream.StreamBackfillAllPostgresqlExcludedObjectsPostgresqlSchemaArray{
&datastream.StreamBackfillAllPostgresqlExcludedObjectsPostgresqlSchemaArgs{
Schema: pulumi.String("schema"),
PostgresqlTables: datastream.StreamBackfillAllPostgresqlExcludedObjectsPostgresqlSchemaPostgresqlTableArray{
&datastream.StreamBackfillAllPostgresqlExcludedObjectsPostgresqlSchemaPostgresqlTableArgs{
Table: pulumi.String("table"),
PostgresqlColumns: datastream.StreamBackfillAllPostgresqlExcludedObjectsPostgresqlSchemaPostgresqlTablePostgresqlColumnArray{
&datastream.StreamBackfillAllPostgresqlExcludedObjectsPostgresqlSchemaPostgresqlTablePostgresqlColumnArgs{
Column: pulumi.String("column"),
},
},
},
},
},
},
},
},
})
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Gcp = Pulumi.Gcp;
return await Deployment.RunAsync(() =>
{
var source = new Gcp.Datastream.ConnectionProfile("source", new()
{
DisplayName = "Postgresql Source",
Location = "us-central1",
ConnectionProfileId = "source-profile",
PostgresqlProfile = new Gcp.Datastream.Inputs.ConnectionProfilePostgresqlProfileArgs
{
Hostname = "hostname",
Port = 3306,
Username = "user",
Password = "pass",
Database = "postgres",
},
});
var destination = new Gcp.Datastream.ConnectionProfile("destination", new()
{
DisplayName = "BigQuery Destination",
Location = "us-central1",
ConnectionProfileId = "destination-profile",
BigqueryProfile = null,
});
var @default = new Gcp.Datastream.Stream("default", new()
{
DisplayName = "Postgres to BigQuery",
Location = "us-central1",
StreamId = "my-stream",
DesiredState = "RUNNING",
SourceConfig = new Gcp.Datastream.Inputs.StreamSourceConfigArgs
{
SourceConnectionProfile = source.Id,
PostgresqlSourceConfig = new Gcp.Datastream.Inputs.StreamSourceConfigPostgresqlSourceConfigArgs
{
MaxConcurrentBackfillTasks = 12,
Publication = "publication",
ReplicationSlot = "replication_slot",
IncludeObjects = new Gcp.Datastream.Inputs.StreamSourceConfigPostgresqlSourceConfigIncludeObjectsArgs
{
PostgresqlSchemas = new[]
{
new Gcp.Datastream.Inputs.StreamSourceConfigPostgresqlSourceConfigIncludeObjectsPostgresqlSchemaArgs
{
Schema = "schema",
PostgresqlTables = new[]
{
new Gcp.Datastream.Inputs.StreamSourceConfigPostgresqlSourceConfigIncludeObjectsPostgresqlSchemaPostgresqlTableArgs
{
Table = "table",
PostgresqlColumns = new[]
{
new Gcp.Datastream.Inputs.StreamSourceConfigPostgresqlSourceConfigIncludeObjectsPostgresqlSchemaPostgresqlTablePostgresqlColumnArgs
{
Column = "column",
},
},
},
},
},
},
},
ExcludeObjects = new Gcp.Datastream.Inputs.StreamSourceConfigPostgresqlSourceConfigExcludeObjectsArgs
{
PostgresqlSchemas = new[]
{
new Gcp.Datastream.Inputs.StreamSourceConfigPostgresqlSourceConfigExcludeObjectsPostgresqlSchemaArgs
{
Schema = "schema",
PostgresqlTables = new[]
{
new Gcp.Datastream.Inputs.StreamSourceConfigPostgresqlSourceConfigExcludeObjectsPostgresqlSchemaPostgresqlTableArgs
{
Table = "table",
PostgresqlColumns = new[]
{
new Gcp.Datastream.Inputs.StreamSourceConfigPostgresqlSourceConfigExcludeObjectsPostgresqlSchemaPostgresqlTablePostgresqlColumnArgs
{
Column = "column",
},
},
},
},
},
},
},
},
},
DestinationConfig = new Gcp.Datastream.Inputs.StreamDestinationConfigArgs
{
DestinationConnectionProfile = destination.Id,
BigqueryDestinationConfig = new Gcp.Datastream.Inputs.StreamDestinationConfigBigqueryDestinationConfigArgs
{
DataFreshness = "900s",
SourceHierarchyDatasets = new Gcp.Datastream.Inputs.StreamDestinationConfigBigqueryDestinationConfigSourceHierarchyDatasetsArgs
{
DatasetTemplate = new Gcp.Datastream.Inputs.StreamDestinationConfigBigqueryDestinationConfigSourceHierarchyDatasetsDatasetTemplateArgs
{
Location = "us-central1",
},
},
},
},
BackfillAll = new Gcp.Datastream.Inputs.StreamBackfillAllArgs
{
PostgresqlExcludedObjects = new Gcp.Datastream.Inputs.StreamBackfillAllPostgresqlExcludedObjectsArgs
{
PostgresqlSchemas = new[]
{
new Gcp.Datastream.Inputs.StreamBackfillAllPostgresqlExcludedObjectsPostgresqlSchemaArgs
{
Schema = "schema",
PostgresqlTables = new[]
{
new Gcp.Datastream.Inputs.StreamBackfillAllPostgresqlExcludedObjectsPostgresqlSchemaPostgresqlTableArgs
{
Table = "table",
PostgresqlColumns = new[]
{
new Gcp.Datastream.Inputs.StreamBackfillAllPostgresqlExcludedObjectsPostgresqlSchemaPostgresqlTablePostgresqlColumnArgs
{
Column = "column",
},
},
},
},
},
},
},
},
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.datastream.ConnectionProfile;
import com.pulumi.gcp.datastream.ConnectionProfileArgs;
import com.pulumi.gcp.datastream.inputs.ConnectionProfilePostgresqlProfileArgs;
import com.pulumi.gcp.datastream.inputs.ConnectionProfileBigqueryProfileArgs;
import com.pulumi.gcp.datastream.Stream;
import com.pulumi.gcp.datastream.StreamArgs;
import com.pulumi.gcp.datastream.inputs.StreamSourceConfigArgs;
import com.pulumi.gcp.datastream.inputs.StreamSourceConfigPostgresqlSourceConfigArgs;
import com.pulumi.gcp.datastream.inputs.StreamSourceConfigPostgresqlSourceConfigIncludeObjectsArgs;
import com.pulumi.gcp.datastream.inputs.StreamSourceConfigPostgresqlSourceConfigExcludeObjectsArgs;
import com.pulumi.gcp.datastream.inputs.StreamDestinationConfigArgs;
import com.pulumi.gcp.datastream.inputs.StreamDestinationConfigBigqueryDestinationConfigArgs;
import com.pulumi.gcp.datastream.inputs.StreamDestinationConfigBigqueryDestinationConfigSourceHierarchyDatasetsArgs;
import com.pulumi.gcp.datastream.inputs.StreamDestinationConfigBigqueryDestinationConfigSourceHierarchyDatasetsDatasetTemplateArgs;
import com.pulumi.gcp.datastream.inputs.StreamBackfillAllArgs;
import com.pulumi.gcp.datastream.inputs.StreamBackfillAllPostgresqlExcludedObjectsArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var source = new ConnectionProfile("source", ConnectionProfileArgs.builder()
.displayName("Postgresql Source")
.location("us-central1")
.connectionProfileId("source-profile")
.postgresqlProfile(ConnectionProfilePostgresqlProfileArgs.builder()
.hostname("hostname")
.port(3306)
.username("user")
.password("pass")
.database("postgres")
.build())
.build());
var destination = new ConnectionProfile("destination", ConnectionProfileArgs.builder()
.displayName("BigQuery Destination")
.location("us-central1")
.connectionProfileId("destination-profile")
.bigqueryProfile()
.build());
var default_ = new Stream("default", StreamArgs.builder()
.displayName("Postgres to BigQuery")
.location("us-central1")
.streamId("my-stream")
.desiredState("RUNNING")
.sourceConfig(StreamSourceConfigArgs.builder()
.sourceConnectionProfile(source.id())
.postgresqlSourceConfig(StreamSourceConfigPostgresqlSourceConfigArgs.builder()
.maxConcurrentBackfillTasks(12)
.publication("publication")
.replicationSlot("replication_slot")
.includeObjects(StreamSourceConfigPostgresqlSourceConfigIncludeObjectsArgs.builder()
.postgresqlSchemas(StreamSourceConfigPostgresqlSourceConfigIncludeObjectsPostgresqlSchemaArgs.builder()
.schema("schema")
.postgresqlTables(StreamSourceConfigPostgresqlSourceConfigIncludeObjectsPostgresqlSchemaPostgresqlTableArgs.builder()
.table("table")
.postgresqlColumns(StreamSourceConfigPostgresqlSourceConfigIncludeObjectsPostgresqlSchemaPostgresqlTablePostgresqlColumnArgs.builder()
.column("column")
.build())
.build())
.build())
.build())
.excludeObjects(StreamSourceConfigPostgresqlSourceConfigExcludeObjectsArgs.builder()
.postgresqlSchemas(StreamSourceConfigPostgresqlSourceConfigExcludeObjectsPostgresqlSchemaArgs.builder()
.schema("schema")
.postgresqlTables(StreamSourceConfigPostgresqlSourceConfigExcludeObjectsPostgresqlSchemaPostgresqlTableArgs.builder()
.table("table")
.postgresqlColumns(StreamSourceConfigPostgresqlSourceConfigExcludeObjectsPostgresqlSchemaPostgresqlTablePostgresqlColumnArgs.builder()
.column("column")
.build())
.build())
.build())
.build())
.build())
.build())
.destinationConfig(StreamDestinationConfigArgs.builder()
.destinationConnectionProfile(destination.id())
.bigqueryDestinationConfig(StreamDestinationConfigBigqueryDestinationConfigArgs.builder()
.dataFreshness("900s")
.sourceHierarchyDatasets(StreamDestinationConfigBigqueryDestinationConfigSourceHierarchyDatasetsArgs.builder()
.datasetTemplate(StreamDestinationConfigBigqueryDestinationConfigSourceHierarchyDatasetsDatasetTemplateArgs.builder()
.location("us-central1")
.build())
.build())
.build())
.build())
.backfillAll(StreamBackfillAllArgs.builder()
.postgresqlExcludedObjects(StreamBackfillAllPostgresqlExcludedObjectsArgs.builder()
.postgresqlSchemas(StreamBackfillAllPostgresqlExcludedObjectsPostgresqlSchemaArgs.builder()
.schema("schema")
.postgresqlTables(StreamBackfillAllPostgresqlExcludedObjectsPostgresqlSchemaPostgresqlTableArgs.builder()
.table("table")
.postgresqlColumns(StreamBackfillAllPostgresqlExcludedObjectsPostgresqlSchemaPostgresqlTablePostgresqlColumnArgs.builder()
.column("column")
.build())
.build())
.build())
.build())
.build())
.build());
}
}
resources:
source:
type: gcp:datastream:ConnectionProfile
properties:
displayName: Postgresql Source
location: us-central1
connectionProfileId: source-profile
postgresqlProfile:
hostname: hostname
port: 3306
username: user
password: pass
database: postgres
destination:
type: gcp:datastream:ConnectionProfile
properties:
displayName: BigQuery Destination
location: us-central1
connectionProfileId: destination-profile
bigqueryProfile: {}
default:
type: gcp:datastream:Stream
properties:
displayName: Postgres to BigQuery
location: us-central1
streamId: my-stream
desiredState: RUNNING
sourceConfig:
sourceConnectionProfile: ${source.id}
postgresqlSourceConfig:
maxConcurrentBackfillTasks: 12
publication: publication
replicationSlot: replication_slot
includeObjects:
postgresqlSchemas:
- schema: schema
postgresqlTables:
- table: table
postgresqlColumns:
- column: column
excludeObjects:
postgresqlSchemas:
- schema: schema
postgresqlTables:
- table: table
postgresqlColumns:
- column: column
destinationConfig:
destinationConnectionProfile: ${destination.id}
bigqueryDestinationConfig:
dataFreshness: 900s
sourceHierarchyDatasets:
datasetTemplate:
location: us-central1
backfillAll:
postgresqlExcludedObjects:
postgresqlSchemas:
- schema: schema
postgresqlTables:
- table: table
postgresqlColumns:
- column: column
Datastream Stream Oracle
import * as pulumi from "@pulumi/pulumi";
import * as gcp from "@pulumi/gcp";
const source = new gcp.datastream.ConnectionProfile("source", {
displayName: "Oracle Source",
location: "us-central1",
connectionProfileId: "source-profile",
oracleProfile: {
hostname: "hostname",
port: 1521,
username: "user",
password: "pass",
databaseService: "ORCL",
},
});
const destination = new gcp.datastream.ConnectionProfile("destination", {
displayName: "BigQuery Destination",
location: "us-central1",
connectionProfileId: "destination-profile",
bigqueryProfile: {},
});
const stream5 = new gcp.datastream.Stream("stream5", {
displayName: "Oracle to BigQuery",
location: "us-central1",
streamId: "my-stream",
desiredState: "RUNNING",
sourceConfig: {
sourceConnectionProfile: source.id,
oracleSourceConfig: {
maxConcurrentCdcTasks: 8,
maxConcurrentBackfillTasks: 12,
includeObjects: {
oracleSchemas: [{
schema: "schema",
oracleTables: [{
table: "table",
oracleColumns: [{
column: "column",
}],
}],
}],
},
excludeObjects: {
oracleSchemas: [{
schema: "schema",
oracleTables: [{
table: "table",
oracleColumns: [{
column: "column",
}],
}],
}],
},
dropLargeObjects: {},
},
},
destinationConfig: {
destinationConnectionProfile: destination.id,
bigqueryDestinationConfig: {
dataFreshness: "900s",
sourceHierarchyDatasets: {
datasetTemplate: {
location: "us-central1",
},
},
},
},
backfillAll: {
oracleExcludedObjects: {
oracleSchemas: [{
schema: "schema",
oracleTables: [{
table: "table",
oracleColumns: [{
column: "column",
}],
}],
}],
},
},
});
import pulumi
import pulumi_gcp as gcp
source = gcp.datastream.ConnectionProfile("source",
display_name="Oracle Source",
location="us-central1",
connection_profile_id="source-profile",
oracle_profile=gcp.datastream.ConnectionProfileOracleProfileArgs(
hostname="hostname",
port=1521,
username="user",
password="pass",
database_service="ORCL",
))
destination = gcp.datastream.ConnectionProfile("destination",
display_name="BigQuery Destination",
location="us-central1",
connection_profile_id="destination-profile",
bigquery_profile=gcp.datastream.ConnectionProfileBigqueryProfileArgs())
stream5 = gcp.datastream.Stream("stream5",
display_name="Oracle to BigQuery",
location="us-central1",
stream_id="my-stream",
desired_state="RUNNING",
source_config=gcp.datastream.StreamSourceConfigArgs(
source_connection_profile=source.id,
oracle_source_config=gcp.datastream.StreamSourceConfigOracleSourceConfigArgs(
max_concurrent_cdc_tasks=8,
max_concurrent_backfill_tasks=12,
include_objects=gcp.datastream.StreamSourceConfigOracleSourceConfigIncludeObjectsArgs(
oracle_schemas=[gcp.datastream.StreamSourceConfigOracleSourceConfigIncludeObjectsOracleSchemaArgs(
schema="schema",
oracle_tables=[gcp.datastream.StreamSourceConfigOracleSourceConfigIncludeObjectsOracleSchemaOracleTableArgs(
table="table",
oracle_columns=[gcp.datastream.StreamSourceConfigOracleSourceConfigIncludeObjectsOracleSchemaOracleTableOracleColumnArgs(
column="column",
)],
)],
)],
),
exclude_objects=gcp.datastream.StreamSourceConfigOracleSourceConfigExcludeObjectsArgs(
oracle_schemas=[gcp.datastream.StreamSourceConfigOracleSourceConfigExcludeObjectsOracleSchemaArgs(
schema="schema",
oracle_tables=[gcp.datastream.StreamSourceConfigOracleSourceConfigExcludeObjectsOracleSchemaOracleTableArgs(
table="table",
oracle_columns=[gcp.datastream.StreamSourceConfigOracleSourceConfigExcludeObjectsOracleSchemaOracleTableOracleColumnArgs(
column="column",
)],
)],
)],
),
drop_large_objects=gcp.datastream.StreamSourceConfigOracleSourceConfigDropLargeObjectsArgs(),
),
),
destination_config=gcp.datastream.StreamDestinationConfigArgs(
destination_connection_profile=destination.id,
bigquery_destination_config=gcp.datastream.StreamDestinationConfigBigqueryDestinationConfigArgs(
data_freshness="900s",
source_hierarchy_datasets=gcp.datastream.StreamDestinationConfigBigqueryDestinationConfigSourceHierarchyDatasetsArgs(
dataset_template=gcp.datastream.StreamDestinationConfigBigqueryDestinationConfigSourceHierarchyDatasetsDatasetTemplateArgs(
location="us-central1",
),
),
),
),
backfill_all=gcp.datastream.StreamBackfillAllArgs(
oracle_excluded_objects=gcp.datastream.StreamBackfillAllOracleExcludedObjectsArgs(
oracle_schemas=[gcp.datastream.StreamBackfillAllOracleExcludedObjectsOracleSchemaArgs(
schema="schema",
oracle_tables=[gcp.datastream.StreamBackfillAllOracleExcludedObjectsOracleSchemaOracleTableArgs(
table="table",
oracle_columns=[gcp.datastream.StreamBackfillAllOracleExcludedObjectsOracleSchemaOracleTableOracleColumnArgs(
column="column",
)],
)],
)],
),
))
package main
import (
"github.com/pulumi/pulumi-gcp/sdk/v7/go/gcp/datastream"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
source, err := datastream.NewConnectionProfile(ctx, "source", &datastream.ConnectionProfileArgs{
DisplayName: pulumi.String("Oracle Source"),
Location: pulumi.String("us-central1"),
ConnectionProfileId: pulumi.String("source-profile"),
OracleProfile: &datastream.ConnectionProfileOracleProfileArgs{
Hostname: pulumi.String("hostname"),
Port: pulumi.Int(1521),
Username: pulumi.String("user"),
Password: pulumi.String("pass"),
DatabaseService: pulumi.String("ORCL"),
},
})
if err != nil {
return err
}
destination, err := datastream.NewConnectionProfile(ctx, "destination", &datastream.ConnectionProfileArgs{
DisplayName: pulumi.String("BigQuery Destination"),
Location: pulumi.String("us-central1"),
ConnectionProfileId: pulumi.String("destination-profile"),
BigqueryProfile: nil,
})
if err != nil {
return err
}
_, err = datastream.NewStream(ctx, "stream5", &datastream.StreamArgs{
DisplayName: pulumi.String("Oracle to BigQuery"),
Location: pulumi.String("us-central1"),
StreamId: pulumi.String("my-stream"),
DesiredState: pulumi.String("RUNNING"),
SourceConfig: &datastream.StreamSourceConfigArgs{
SourceConnectionProfile: source.ID(),
OracleSourceConfig: &datastream.StreamSourceConfigOracleSourceConfigArgs{
MaxConcurrentCdcTasks: pulumi.Int(8),
MaxConcurrentBackfillTasks: pulumi.Int(12),
IncludeObjects: &datastream.StreamSourceConfigOracleSourceConfigIncludeObjectsArgs{
OracleSchemas: datastream.StreamSourceConfigOracleSourceConfigIncludeObjectsOracleSchemaArray{
&datastream.StreamSourceConfigOracleSourceConfigIncludeObjectsOracleSchemaArgs{
Schema: pulumi.String("schema"),
OracleTables: datastream.StreamSourceConfigOracleSourceConfigIncludeObjectsOracleSchemaOracleTableArray{
&datastream.StreamSourceConfigOracleSourceConfigIncludeObjectsOracleSchemaOracleTableArgs{
Table: pulumi.String("table"),
OracleColumns: datastream.StreamSourceConfigOracleSourceConfigIncludeObjectsOracleSchemaOracleTableOracleColumnArray{
&datastream.StreamSourceConfigOracleSourceConfigIncludeObjectsOracleSchemaOracleTableOracleColumnArgs{
Column: pulumi.String("column"),
},
},
},
},
},
},
},
ExcludeObjects: &datastream.StreamSourceConfigOracleSourceConfigExcludeObjectsArgs{
OracleSchemas: datastream.StreamSourceConfigOracleSourceConfigExcludeObjectsOracleSchemaArray{
&datastream.StreamSourceConfigOracleSourceConfigExcludeObjectsOracleSchemaArgs{
Schema: pulumi.String("schema"),
OracleTables: datastream.StreamSourceConfigOracleSourceConfigExcludeObjectsOracleSchemaOracleTableArray{
&datastream.StreamSourceConfigOracleSourceConfigExcludeObjectsOracleSchemaOracleTableArgs{
Table: pulumi.String("table"),
OracleColumns: datastream.StreamSourceConfigOracleSourceConfigExcludeObjectsOracleSchemaOracleTableOracleColumnArray{
&datastream.StreamSourceConfigOracleSourceConfigExcludeObjectsOracleSchemaOracleTableOracleColumnArgs{
Column: pulumi.String("column"),
},
},
},
},
},
},
},
DropLargeObjects: nil,
},
},
DestinationConfig: &datastream.StreamDestinationConfigArgs{
DestinationConnectionProfile: destination.ID(),
BigqueryDestinationConfig: &datastream.StreamDestinationConfigBigqueryDestinationConfigArgs{
DataFreshness: pulumi.String("900s"),
SourceHierarchyDatasets: &datastream.StreamDestinationConfigBigqueryDestinationConfigSourceHierarchyDatasetsArgs{
DatasetTemplate: &datastream.StreamDestinationConfigBigqueryDestinationConfigSourceHierarchyDatasetsDatasetTemplateArgs{
Location: pulumi.String("us-central1"),
},
},
},
},
BackfillAll: &datastream.StreamBackfillAllArgs{
OracleExcludedObjects: &datastream.StreamBackfillAllOracleExcludedObjectsArgs{
OracleSchemas: datastream.StreamBackfillAllOracleExcludedObjectsOracleSchemaArray{
&datastream.StreamBackfillAllOracleExcludedObjectsOracleSchemaArgs{
Schema: pulumi.String("schema"),
OracleTables: datastream.StreamBackfillAllOracleExcludedObjectsOracleSchemaOracleTableArray{
&datastream.StreamBackfillAllOracleExcludedObjectsOracleSchemaOracleTableArgs{
Table: pulumi.String("table"),
OracleColumns: datastream.StreamBackfillAllOracleExcludedObjectsOracleSchemaOracleTableOracleColumnArray{
&datastream.StreamBackfillAllOracleExcludedObjectsOracleSchemaOracleTableOracleColumnArgs{
Column: pulumi.String("column"),
},
},
},
},
},
},
},
},
})
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Gcp = Pulumi.Gcp;
return await Deployment.RunAsync(() =>
{
var source = new Gcp.Datastream.ConnectionProfile("source", new()
{
DisplayName = "Oracle Source",
Location = "us-central1",
ConnectionProfileId = "source-profile",
OracleProfile = new Gcp.Datastream.Inputs.ConnectionProfileOracleProfileArgs
{
Hostname = "hostname",
Port = 1521,
Username = "user",
Password = "pass",
DatabaseService = "ORCL",
},
});
var destination = new Gcp.Datastream.ConnectionProfile("destination", new()
{
DisplayName = "BigQuery Destination",
Location = "us-central1",
ConnectionProfileId = "destination-profile",
BigqueryProfile = null,
});
var stream5 = new Gcp.Datastream.Stream("stream5", new()
{
DisplayName = "Oracle to BigQuery",
Location = "us-central1",
StreamId = "my-stream",
DesiredState = "RUNNING",
SourceConfig = new Gcp.Datastream.Inputs.StreamSourceConfigArgs
{
SourceConnectionProfile = source.Id,
OracleSourceConfig = new Gcp.Datastream.Inputs.StreamSourceConfigOracleSourceConfigArgs
{
MaxConcurrentCdcTasks = 8,
MaxConcurrentBackfillTasks = 12,
IncludeObjects = new Gcp.Datastream.Inputs.StreamSourceConfigOracleSourceConfigIncludeObjectsArgs
{
OracleSchemas = new[]
{
new Gcp.Datastream.Inputs.StreamSourceConfigOracleSourceConfigIncludeObjectsOracleSchemaArgs
{
Schema = "schema",
OracleTables = new[]
{
new Gcp.Datastream.Inputs.StreamSourceConfigOracleSourceConfigIncludeObjectsOracleSchemaOracleTableArgs
{
Table = "table",
OracleColumns = new[]
{
new Gcp.Datastream.Inputs.StreamSourceConfigOracleSourceConfigIncludeObjectsOracleSchemaOracleTableOracleColumnArgs
{
Column = "column",
},
},
},
},
},
},
},
ExcludeObjects = new Gcp.Datastream.Inputs.StreamSourceConfigOracleSourceConfigExcludeObjectsArgs
{
OracleSchemas = new[]
{
new Gcp.Datastream.Inputs.StreamSourceConfigOracleSourceConfigExcludeObjectsOracleSchemaArgs
{
Schema = "schema",
OracleTables = new[]
{
new Gcp.Datastream.Inputs.StreamSourceConfigOracleSourceConfigExcludeObjectsOracleSchemaOracleTableArgs
{
Table = "table",
OracleColumns = new[]
{
new Gcp.Datastream.Inputs.StreamSourceConfigOracleSourceConfigExcludeObjectsOracleSchemaOracleTableOracleColumnArgs
{
Column = "column",
},
},
},
},
},
},
},
DropLargeObjects = null,
},
},
DestinationConfig = new Gcp.Datastream.Inputs.StreamDestinationConfigArgs
{
DestinationConnectionProfile = destination.Id,
BigqueryDestinationConfig = new Gcp.Datastream.Inputs.StreamDestinationConfigBigqueryDestinationConfigArgs
{
DataFreshness = "900s",
SourceHierarchyDatasets = new Gcp.Datastream.Inputs.StreamDestinationConfigBigqueryDestinationConfigSourceHierarchyDatasetsArgs
{
DatasetTemplate = new Gcp.Datastream.Inputs.StreamDestinationConfigBigqueryDestinationConfigSourceHierarchyDatasetsDatasetTemplateArgs
{
Location = "us-central1",
},
},
},
},
BackfillAll = new Gcp.Datastream.Inputs.StreamBackfillAllArgs
{
OracleExcludedObjects = new Gcp.Datastream.Inputs.StreamBackfillAllOracleExcludedObjectsArgs
{
OracleSchemas = new[]
{
new Gcp.Datastream.Inputs.StreamBackfillAllOracleExcludedObjectsOracleSchemaArgs
{
Schema = "schema",
OracleTables = new[]
{
new Gcp.Datastream.Inputs.StreamBackfillAllOracleExcludedObjectsOracleSchemaOracleTableArgs
{
Table = "table",
OracleColumns = new[]
{
new Gcp.Datastream.Inputs.StreamBackfillAllOracleExcludedObjectsOracleSchemaOracleTableOracleColumnArgs
{
Column = "column",
},
},
},
},
},
},
},
},
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.datastream.ConnectionProfile;
import com.pulumi.gcp.datastream.ConnectionProfileArgs;
import com.pulumi.gcp.datastream.inputs.ConnectionProfileOracleProfileArgs;
import com.pulumi.gcp.datastream.inputs.ConnectionProfileBigqueryProfileArgs;
import com.pulumi.gcp.datastream.Stream;
import com.pulumi.gcp.datastream.StreamArgs;
import com.pulumi.gcp.datastream.inputs.StreamSourceConfigArgs;
import com.pulumi.gcp.datastream.inputs.StreamSourceConfigOracleSourceConfigArgs;
import com.pulumi.gcp.datastream.inputs.StreamSourceConfigOracleSourceConfigIncludeObjectsArgs;
import com.pulumi.gcp.datastream.inputs.StreamSourceConfigOracleSourceConfigExcludeObjectsArgs;
import com.pulumi.gcp.datastream.inputs.StreamSourceConfigOracleSourceConfigDropLargeObjectsArgs;
import com.pulumi.gcp.datastream.inputs.StreamDestinationConfigArgs;
import com.pulumi.gcp.datastream.inputs.StreamDestinationConfigBigqueryDestinationConfigArgs;
import com.pulumi.gcp.datastream.inputs.StreamDestinationConfigBigqueryDestinationConfigSourceHierarchyDatasetsArgs;
import com.pulumi.gcp.datastream.inputs.StreamDestinationConfigBigqueryDestinationConfigSourceHierarchyDatasetsDatasetTemplateArgs;
import com.pulumi.gcp.datastream.inputs.StreamBackfillAllArgs;
import com.pulumi.gcp.datastream.inputs.StreamBackfillAllOracleExcludedObjectsArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var source = new ConnectionProfile("source", ConnectionProfileArgs.builder()
.displayName("Oracle Source")
.location("us-central1")
.connectionProfileId("source-profile")
.oracleProfile(ConnectionProfileOracleProfileArgs.builder()
.hostname("hostname")
.port(1521)
.username("user")
.password("pass")
.databaseService("ORCL")
.build())
.build());
var destination = new ConnectionProfile("destination", ConnectionProfileArgs.builder()
.displayName("BigQuery Destination")
.location("us-central1")
.connectionProfileId("destination-profile")
.bigqueryProfile()
.build());
var stream5 = new Stream("stream5", StreamArgs.builder()
.displayName("Oracle to BigQuery")
.location("us-central1")
.streamId("my-stream")
.desiredState("RUNNING")
.sourceConfig(StreamSourceConfigArgs.builder()
.sourceConnectionProfile(source.id())
.oracleSourceConfig(StreamSourceConfigOracleSourceConfigArgs.builder()
.maxConcurrentCdcTasks(8)
.maxConcurrentBackfillTasks(12)
.includeObjects(StreamSourceConfigOracleSourceConfigIncludeObjectsArgs.builder()
.oracleSchemas(StreamSourceConfigOracleSourceConfigIncludeObjectsOracleSchemaArgs.builder()
.schema("schema")
.oracleTables(StreamSourceConfigOracleSourceConfigIncludeObjectsOracleSchemaOracleTableArgs.builder()
.table("table")
.oracleColumns(StreamSourceConfigOracleSourceConfigIncludeObjectsOracleSchemaOracleTableOracleColumnArgs.builder()
.column("column")
.build())
.build())
.build())
.build())
.excludeObjects(StreamSourceConfigOracleSourceConfigExcludeObjectsArgs.builder()
.oracleSchemas(StreamSourceConfigOracleSourceConfigExcludeObjectsOracleSchemaArgs.builder()
.schema("schema")
.oracleTables(StreamSourceConfigOracleSourceConfigExcludeObjectsOracleSchemaOracleTableArgs.builder()
.table("table")
.oracleColumns(StreamSourceConfigOracleSourceConfigExcludeObjectsOracleSchemaOracleTableOracleColumnArgs.builder()
.column("column")
.build())
.build())
.build())
.build())
.dropLargeObjects()
.build())
.build())
.destinationConfig(StreamDestinationConfigArgs.builder()
.destinationConnectionProfile(destination.id())
.bigqueryDestinationConfig(StreamDestinationConfigBigqueryDestinationConfigArgs.builder()
.dataFreshness("900s")
.sourceHierarchyDatasets(StreamDestinationConfigBigqueryDestinationConfigSourceHierarchyDatasetsArgs.builder()
.datasetTemplate(StreamDestinationConfigBigqueryDestinationConfigSourceHierarchyDatasetsDatasetTemplateArgs.builder()
.location("us-central1")
.build())
.build())
.build())
.build())
.backfillAll(StreamBackfillAllArgs.builder()
.oracleExcludedObjects(StreamBackfillAllOracleExcludedObjectsArgs.builder()
.oracleSchemas(StreamBackfillAllOracleExcludedObjectsOracleSchemaArgs.builder()
.schema("schema")
.oracleTables(StreamBackfillAllOracleExcludedObjectsOracleSchemaOracleTableArgs.builder()
.table("table")
.oracleColumns(StreamBackfillAllOracleExcludedObjectsOracleSchemaOracleTableOracleColumnArgs.builder()
.column("column")
.build())
.build())
.build())
.build())
.build())
.build());
}
}
resources:
source:
type: gcp:datastream:ConnectionProfile
properties:
displayName: Oracle Source
location: us-central1
connectionProfileId: source-profile
oracleProfile:
hostname: hostname
port: 1521
username: user
password: pass
databaseService: ORCL
destination:
type: gcp:datastream:ConnectionProfile
properties:
displayName: BigQuery Destination
location: us-central1
connectionProfileId: destination-profile
bigqueryProfile: {}
stream5:
type: gcp:datastream:Stream
properties:
displayName: Oracle to BigQuery
location: us-central1
streamId: my-stream
desiredState: RUNNING
sourceConfig:
sourceConnectionProfile: ${source.id}
oracleSourceConfig:
maxConcurrentCdcTasks: 8
maxConcurrentBackfillTasks: 12
includeObjects:
oracleSchemas:
- schema: schema
oracleTables:
- table: table
oracleColumns:
- column: column
excludeObjects:
oracleSchemas:
- schema: schema
oracleTables:
- table: table
oracleColumns:
- column: column
dropLargeObjects: {}
destinationConfig:
destinationConnectionProfile: ${destination.id}
bigqueryDestinationConfig:
dataFreshness: 900s
sourceHierarchyDatasets:
datasetTemplate:
location: us-central1
backfillAll:
oracleExcludedObjects:
oracleSchemas:
- schema: schema
oracleTables:
- table: table
oracleColumns:
- column: column
Datastream Stream Sql Server
import * as pulumi from "@pulumi/pulumi";
import * as gcp from "@pulumi/gcp";
const instance = new gcp.sql.DatabaseInstance("instance", {
name: "sql-server",
databaseVersion: "SQLSERVER_2019_STANDARD",
region: "us-central1",
rootPassword: "root-password",
deletionProtection: true,
settings: {
tier: "db-custom-2-4096",
ipConfiguration: {
authorizedNetworks: [
{
value: "34.71.242.81",
},
{
value: "34.72.28.29",
},
{
value: "34.67.6.157",
},
{
value: "34.67.234.134",
},
{
value: "34.72.239.218",
},
],
},
},
});
const user = new gcp.sql.User("user", {
name: "user",
instance: instance.name,
password: "password",
});
const db = new gcp.sql.Database("db", {
name: "db",
instance: instance.name,
}, {
dependsOn: [user],
});
const source = new gcp.datastream.ConnectionProfile("source", {
displayName: "SQL Server Source",
location: "us-central1",
connectionProfileId: "source-profile",
sqlServerProfile: {
hostname: instance.publicIpAddress,
port: 1433,
username: user.name,
password: user.password,
database: db.name,
},
});
const destination = new gcp.datastream.ConnectionProfile("destination", {
displayName: "BigQuery Destination",
location: "us-central1",
connectionProfileId: "destination-profile",
bigqueryProfile: {},
});
const _default = new gcp.datastream.Stream("default", {
displayName: "SQL Server to BigQuery",
location: "us-central1",
streamId: "stream",
sourceConfig: {
sourceConnectionProfile: source.id,
sqlServerSourceConfig: {
includeObjects: {
schemas: [{
schema: "schema",
tables: [{
table: "table",
}],
}],
},
},
},
destinationConfig: {
destinationConnectionProfile: destination.id,
bigqueryDestinationConfig: {
dataFreshness: "900s",
sourceHierarchyDatasets: {
datasetTemplate: {
location: "us-central1",
},
},
},
},
backfillNone: {},
});
import pulumi
import pulumi_gcp as gcp
instance = gcp.sql.DatabaseInstance("instance",
name="sql-server",
database_version="SQLSERVER_2019_STANDARD",
region="us-central1",
root_password="root-password",
deletion_protection=True,
settings=gcp.sql.DatabaseInstanceSettingsArgs(
tier="db-custom-2-4096",
ip_configuration=gcp.sql.DatabaseInstanceSettingsIpConfigurationArgs(
authorized_networks=[
gcp.sql.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs(
value="34.71.242.81",
),
gcp.sql.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs(
value="34.72.28.29",
),
gcp.sql.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs(
value="34.67.6.157",
),
gcp.sql.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs(
value="34.67.234.134",
),
gcp.sql.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs(
value="34.72.239.218",
),
],
),
))
user = gcp.sql.User("user",
name="user",
instance=instance.name,
password="password")
db = gcp.sql.Database("db",
name="db",
instance=instance.name,
opts = pulumi.ResourceOptions(depends_on=[user]))
source = gcp.datastream.ConnectionProfile("source",
display_name="SQL Server Source",
location="us-central1",
connection_profile_id="source-profile",
sql_server_profile=gcp.datastream.ConnectionProfileSqlServerProfileArgs(
hostname=instance.public_ip_address,
port=1433,
username=user.name,
password=user.password,
database=db.name,
))
destination = gcp.datastream.ConnectionProfile("destination",
display_name="BigQuery Destination",
location="us-central1",
connection_profile_id="destination-profile",
bigquery_profile=gcp.datastream.ConnectionProfileBigqueryProfileArgs())
default = gcp.datastream.Stream("default",
display_name="SQL Server to BigQuery",
location="us-central1",
stream_id="stream",
source_config=gcp.datastream.StreamSourceConfigArgs(
source_connection_profile=source.id,
sql_server_source_config=gcp.datastream.StreamSourceConfigSqlServerSourceConfigArgs(
include_objects=gcp.datastream.StreamSourceConfigSqlServerSourceConfigIncludeObjectsArgs(
schemas=[gcp.datastream.StreamSourceConfigSqlServerSourceConfigIncludeObjectsSchemaArgs(
schema="schema",
tables=[gcp.datastream.StreamSourceConfigSqlServerSourceConfigIncludeObjectsSchemaTableArgs(
table="table",
)],
)],
),
),
),
destination_config=gcp.datastream.StreamDestinationConfigArgs(
destination_connection_profile=destination.id,
bigquery_destination_config=gcp.datastream.StreamDestinationConfigBigqueryDestinationConfigArgs(
data_freshness="900s",
source_hierarchy_datasets=gcp.datastream.StreamDestinationConfigBigqueryDestinationConfigSourceHierarchyDatasetsArgs(
dataset_template=gcp.datastream.StreamDestinationConfigBigqueryDestinationConfigSourceHierarchyDatasetsDatasetTemplateArgs(
location="us-central1",
),
),
),
),
backfill_none=gcp.datastream.StreamBackfillNoneArgs())
package main
import (
"github.com/pulumi/pulumi-gcp/sdk/v7/go/gcp/datastream"
"github.com/pulumi/pulumi-gcp/sdk/v7/go/gcp/sql"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
instance, err := sql.NewDatabaseInstance(ctx, "instance", &sql.DatabaseInstanceArgs{
Name: pulumi.String("sql-server"),
DatabaseVersion: pulumi.String("SQLSERVER_2019_STANDARD"),
Region: pulumi.String("us-central1"),
RootPassword: pulumi.String("root-password"),
DeletionProtection: pulumi.Bool(true),
Settings: &sql.DatabaseInstanceSettingsArgs{
Tier: pulumi.String("db-custom-2-4096"),
IpConfiguration: &sql.DatabaseInstanceSettingsIpConfigurationArgs{
AuthorizedNetworks: sql.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArray{
&sql.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs{
Value: pulumi.String("34.71.242.81"),
},
&sql.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs{
Value: pulumi.String("34.72.28.29"),
},
&sql.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs{
Value: pulumi.String("34.67.6.157"),
},
&sql.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs{
Value: pulumi.String("34.67.234.134"),
},
&sql.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs{
Value: pulumi.String("34.72.239.218"),
},
},
},
},
})
if err != nil {
return err
}
user, err := sql.NewUser(ctx, "user", &sql.UserArgs{
Name: pulumi.String("user"),
Instance: instance.Name,
Password: pulumi.String("password"),
})
if err != nil {
return err
}
db, err := sql.NewDatabase(ctx, "db", &sql.DatabaseArgs{
Name: pulumi.String("db"),
Instance: instance.Name,
}, pulumi.DependsOn([]pulumi.Resource{
user,
}))
if err != nil {
return err
}
source, err := datastream.NewConnectionProfile(ctx, "source", &datastream.ConnectionProfileArgs{
DisplayName: pulumi.String("SQL Server Source"),
Location: pulumi.String("us-central1"),
ConnectionProfileId: pulumi.String("source-profile"),
SqlServerProfile: &datastream.ConnectionProfileSqlServerProfileArgs{
Hostname: instance.PublicIpAddress,
Port: pulumi.Int(1433),
Username: user.Name,
Password: user.Password,
Database: db.Name,
},
})
if err != nil {
return err
}
destination, err := datastream.NewConnectionProfile(ctx, "destination", &datastream.ConnectionProfileArgs{
DisplayName: pulumi.String("BigQuery Destination"),
Location: pulumi.String("us-central1"),
ConnectionProfileId: pulumi.String("destination-profile"),
BigqueryProfile: nil,
})
if err != nil {
return err
}
_, err = datastream.NewStream(ctx, "default", &datastream.StreamArgs{
DisplayName: pulumi.String("SQL Server to BigQuery"),
Location: pulumi.String("us-central1"),
StreamId: pulumi.String("stream"),
SourceConfig: &datastream.StreamSourceConfigArgs{
SourceConnectionProfile: source.ID(),
SqlServerSourceConfig: &datastream.StreamSourceConfigSqlServerSourceConfigArgs{
IncludeObjects: &datastream.StreamSourceConfigSqlServerSourceConfigIncludeObjectsArgs{
Schemas: datastream.StreamSourceConfigSqlServerSourceConfigIncludeObjectsSchemaArray{
&datastream.StreamSourceConfigSqlServerSourceConfigIncludeObjectsSchemaArgs{
Schema: pulumi.String("schema"),
Tables: datastream.StreamSourceConfigSqlServerSourceConfigIncludeObjectsSchemaTableArray{
&datastream.StreamSourceConfigSqlServerSourceConfigIncludeObjectsSchemaTableArgs{
Table: pulumi.String("table"),
},
},
},
},
},
},
},
DestinationConfig: &datastream.StreamDestinationConfigArgs{
DestinationConnectionProfile: destination.ID(),
BigqueryDestinationConfig: &datastream.StreamDestinationConfigBigqueryDestinationConfigArgs{
DataFreshness: pulumi.String("900s"),
SourceHierarchyDatasets: &datastream.StreamDestinationConfigBigqueryDestinationConfigSourceHierarchyDatasetsArgs{
DatasetTemplate: &datastream.StreamDestinationConfigBigqueryDestinationConfigSourceHierarchyDatasetsDatasetTemplateArgs{
Location: pulumi.String("us-central1"),
},
},
},
},
BackfillNone: nil,
})
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Gcp = Pulumi.Gcp;
return await Deployment.RunAsync(() =>
{
var instance = new Gcp.Sql.DatabaseInstance("instance", new()
{
Name = "sql-server",
DatabaseVersion = "SQLSERVER_2019_STANDARD",
Region = "us-central1",
RootPassword = "root-password",
DeletionProtection = true,
Settings = new Gcp.Sql.Inputs.DatabaseInstanceSettingsArgs
{
Tier = "db-custom-2-4096",
IpConfiguration = new Gcp.Sql.Inputs.DatabaseInstanceSettingsIpConfigurationArgs
{
AuthorizedNetworks = new[]
{
new Gcp.Sql.Inputs.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs
{
Value = "34.71.242.81",
},
new Gcp.Sql.Inputs.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs
{
Value = "34.72.28.29",
},
new Gcp.Sql.Inputs.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs
{
Value = "34.67.6.157",
},
new Gcp.Sql.Inputs.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs
{
Value = "34.67.234.134",
},
new Gcp.Sql.Inputs.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs
{
Value = "34.72.239.218",
},
},
},
},
});
var user = new Gcp.Sql.User("user", new()
{
Name = "user",
Instance = instance.Name,
Password = "password",
});
var db = new Gcp.Sql.Database("db", new()
{
Name = "db",
Instance = instance.Name,
}, new CustomResourceOptions
{
DependsOn =
{
user,
},
});
var source = new Gcp.Datastream.ConnectionProfile("source", new()
{
DisplayName = "SQL Server Source",
Location = "us-central1",
ConnectionProfileId = "source-profile",
SqlServerProfile = new Gcp.Datastream.Inputs.ConnectionProfileSqlServerProfileArgs
{
Hostname = instance.PublicIpAddress,
Port = 1433,
Username = user.Name,
Password = user.Password,
Database = db.Name,
},
});
var destination = new Gcp.Datastream.ConnectionProfile("destination", new()
{
DisplayName = "BigQuery Destination",
Location = "us-central1",
ConnectionProfileId = "destination-profile",
BigqueryProfile = null,
});
var @default = new Gcp.Datastream.Stream("default", new()
{
DisplayName = "SQL Server to BigQuery",
Location = "us-central1",
StreamId = "stream",
SourceConfig = new Gcp.Datastream.Inputs.StreamSourceConfigArgs
{
SourceConnectionProfile = source.Id,
SqlServerSourceConfig = new Gcp.Datastream.Inputs.StreamSourceConfigSqlServerSourceConfigArgs
{
IncludeObjects = new Gcp.Datastream.Inputs.StreamSourceConfigSqlServerSourceConfigIncludeObjectsArgs
{
Schemas = new[]
{
new Gcp.Datastream.Inputs.StreamSourceConfigSqlServerSourceConfigIncludeObjectsSchemaArgs
{
Schema = "schema",
Tables = new[]
{
new Gcp.Datastream.Inputs.StreamSourceConfigSqlServerSourceConfigIncludeObjectsSchemaTableArgs
{
Table = "table",
},
},
},
},
},
},
},
DestinationConfig = new Gcp.Datastream.Inputs.StreamDestinationConfigArgs
{
DestinationConnectionProfile = destination.Id,
BigqueryDestinationConfig = new Gcp.Datastream.Inputs.StreamDestinationConfigBigqueryDestinationConfigArgs
{
DataFreshness = "900s",
SourceHierarchyDatasets = new Gcp.Datastream.Inputs.StreamDestinationConfigBigqueryDestinationConfigSourceHierarchyDatasetsArgs
{
DatasetTemplate = new Gcp.Datastream.Inputs.StreamDestinationConfigBigqueryDestinationConfigSourceHierarchyDatasetsDatasetTemplateArgs
{
Location = "us-central1",
},
},
},
},
BackfillNone = null,
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.sql.DatabaseInstance;
import com.pulumi.gcp.sql.DatabaseInstanceArgs;
import com.pulumi.gcp.sql.inputs.DatabaseInstanceSettingsArgs;
import com.pulumi.gcp.sql.inputs.DatabaseInstanceSettingsIpConfigurationArgs;
import com.pulumi.gcp.sql.User;
import com.pulumi.gcp.sql.UserArgs;
import com.pulumi.gcp.sql.Database;
import com.pulumi.gcp.sql.DatabaseArgs;
import com.pulumi.gcp.datastream.ConnectionProfile;
import com.pulumi.gcp.datastream.ConnectionProfileArgs;
import com.pulumi.gcp.datastream.inputs.ConnectionProfileSqlServerProfileArgs;
import com.pulumi.gcp.datastream.inputs.ConnectionProfileBigqueryProfileArgs;
import com.pulumi.gcp.datastream.Stream;
import com.pulumi.gcp.datastream.StreamArgs;
import com.pulumi.gcp.datastream.inputs.StreamSourceConfigArgs;
import com.pulumi.gcp.datastream.inputs.StreamSourceConfigSqlServerSourceConfigArgs;
import com.pulumi.gcp.datastream.inputs.StreamSourceConfigSqlServerSourceConfigIncludeObjectsArgs;
import com.pulumi.gcp.datastream.inputs.StreamDestinationConfigArgs;
import com.pulumi.gcp.datastream.inputs.StreamDestinationConfigBigqueryDestinationConfigArgs;
import com.pulumi.gcp.datastream.inputs.StreamDestinationConfigBigqueryDestinationConfigSourceHierarchyDatasetsArgs;
import com.pulumi.gcp.datastream.inputs.StreamDestinationConfigBigqueryDestinationConfigSourceHierarchyDatasetsDatasetTemplateArgs;
import com.pulumi.gcp.datastream.inputs.StreamBackfillNoneArgs;
import com.pulumi.resources.CustomResourceOptions;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var instance = new DatabaseInstance("instance", DatabaseInstanceArgs.builder()
.name("sql-server")
.databaseVersion("SQLSERVER_2019_STANDARD")
.region("us-central1")
.rootPassword("root-password")
.deletionProtection("true")
.settings(DatabaseInstanceSettingsArgs.builder()
.tier("db-custom-2-4096")
.ipConfiguration(DatabaseInstanceSettingsIpConfigurationArgs.builder()
.authorizedNetworks(
DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs.builder()
.value("34.71.242.81")
.build(),
DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs.builder()
.value("34.72.28.29")
.build(),
DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs.builder()
.value("34.67.6.157")
.build(),
DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs.builder()
.value("34.67.234.134")
.build(),
DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs.builder()
.value("34.72.239.218")
.build())
.build())
.build())
.build());
var user = new User("user", UserArgs.builder()
.name("user")
.instance(instance.name())
.password("password")
.build());
var db = new Database("db", DatabaseArgs.builder()
.name("db")
.instance(instance.name())
.build(), CustomResourceOptions.builder()
.dependsOn(user)
.build());
var source = new ConnectionProfile("source", ConnectionProfileArgs.builder()
.displayName("SQL Server Source")
.location("us-central1")
.connectionProfileId("source-profile")
.sqlServerProfile(ConnectionProfileSqlServerProfileArgs.builder()
.hostname(instance.publicIpAddress())
.port(1433)
.username(user.name())
.password(user.password())
.database(db.name())
.build())
.build());
var destination = new ConnectionProfile("destination", ConnectionProfileArgs.builder()
.displayName("BigQuery Destination")
.location("us-central1")
.connectionProfileId("destination-profile")
.bigqueryProfile()
.build());
var default_ = new Stream("default", StreamArgs.builder()
.displayName("SQL Server to BigQuery")
.location("us-central1")
.streamId("stream")
.sourceConfig(StreamSourceConfigArgs.builder()
.sourceConnectionProfile(source.id())
.sqlServerSourceConfig(StreamSourceConfigSqlServerSourceConfigArgs.builder()
.includeObjects(StreamSourceConfigSqlServerSourceConfigIncludeObjectsArgs.builder()
.schemas(StreamSourceConfigSqlServerSourceConfigIncludeObjectsSchemaArgs.builder()
.schema("schema")
.tables(StreamSourceConfigSqlServerSourceConfigIncludeObjectsSchemaTableArgs.builder()
.table("table")
.build())
.build())
.build())
.build())
.build())
.destinationConfig(StreamDestinationConfigArgs.builder()
.destinationConnectionProfile(destination.id())
.bigqueryDestinationConfig(StreamDestinationConfigBigqueryDestinationConfigArgs.builder()
.dataFreshness("900s")
.sourceHierarchyDatasets(StreamDestinationConfigBigqueryDestinationConfigSourceHierarchyDatasetsArgs.builder()
.datasetTemplate(StreamDestinationConfigBigqueryDestinationConfigSourceHierarchyDatasetsDatasetTemplateArgs.builder()
.location("us-central1")
.build())
.build())
.build())
.build())
.backfillNone()
.build());
}
}
resources:
instance:
type: gcp:sql:DatabaseInstance
properties:
name: sql-server
databaseVersion: SQLSERVER_2019_STANDARD
region: us-central1
rootPassword: root-password
deletionProtection: 'true'
settings:
tier: db-custom-2-4096
ipConfiguration:
authorizedNetworks:
- value: 34.71.242.81
- value: 34.72.28.29
- value: 34.67.6.157
- value: 34.67.234.134
- value: 34.72.239.218
db:
type: gcp:sql:Database
properties:
name: db
instance: ${instance.name}
options:
dependson:
- ${user}
user:
type: gcp:sql:User
properties:
name: user
instance: ${instance.name}
password: password
source:
type: gcp:datastream:ConnectionProfile
properties:
displayName: SQL Server Source
location: us-central1
connectionProfileId: source-profile
sqlServerProfile:
hostname: ${instance.publicIpAddress}
port: 1433
username: ${user.name}
password: ${user.password}
database: ${db.name}
destination:
type: gcp:datastream:ConnectionProfile
properties:
displayName: BigQuery Destination
location: us-central1
connectionProfileId: destination-profile
bigqueryProfile: {}
default:
type: gcp:datastream:Stream
properties:
displayName: SQL Server to BigQuery
location: us-central1
streamId: stream
sourceConfig:
sourceConnectionProfile: ${source.id}
sqlServerSourceConfig:
includeObjects:
schemas:
- schema: schema
tables:
- table: table
destinationConfig:
destinationConnectionProfile: ${destination.id}
bigqueryDestinationConfig:
dataFreshness: 900s
sourceHierarchyDatasets:
datasetTemplate:
location: us-central1
backfillNone: {}
Datastream Stream Postgresql Bigquery Dataset Id
import * as pulumi from "@pulumi/pulumi";
import * as gcp from "@pulumi/gcp";
import * as random from "@pulumi/random";
const postgres = new gcp.bigquery.Dataset("postgres", {
datasetId: "postgres",
friendlyName: "postgres",
description: "Database of postgres",
location: "us-central1",
});
const destinationConnectionProfile2 = new gcp.datastream.ConnectionProfile("destination_connection_profile2", {
displayName: "Connection profile",
location: "us-central1",
connectionProfileId: "dest-profile",
bigqueryProfile: {},
});
const instance = new gcp.sql.DatabaseInstance("instance", {
name: "instance-name",
databaseVersion: "MYSQL_8_0",
region: "us-central1",
settings: {
tier: "db-f1-micro",
backupConfiguration: {
enabled: true,
binaryLogEnabled: true,
},
ipConfiguration: {
authorizedNetworks: [
{
value: "34.71.242.81",
},
{
value: "34.72.28.29",
},
{
value: "34.67.6.157",
},
{
value: "34.67.234.134",
},
{
value: "34.72.239.218",
},
],
},
},
deletionProtection: false,
});
const pwd = new random.RandomPassword("pwd", {
length: 16,
special: false,
});
const user = new gcp.sql.User("user", {
name: "my-user",
instance: instance.name,
host: "%",
password: pwd.result,
});
const sourceConnectionProfile = new gcp.datastream.ConnectionProfile("source_connection_profile", {
displayName: "Source connection profile",
location: "us-central1",
connectionProfileId: "source-profile",
mysqlProfile: {
hostname: instance.publicIpAddress,
username: user.name,
password: user.password,
},
});
const _default = new gcp.datastream.Stream("default", {
displayName: "postgres to bigQuery",
location: "us-central1",
streamId: "postgres-bigquery",
sourceConfig: {
sourceConnectionProfile: sourceConnectionProfile.id,
mysqlSourceConfig: {},
},
destinationConfig: {
destinationConnectionProfile: destinationConnectionProfile2.id,
bigqueryDestinationConfig: {
dataFreshness: "900s",
singleTargetDataset: {
datasetId: postgres.id,
},
},
},
backfillAll: {},
});
const db = new gcp.sql.Database("db", {
instance: instance.name,
name: "db",
});
import pulumi
import pulumi_gcp as gcp
import pulumi_random as random
postgres = gcp.bigquery.Dataset("postgres",
dataset_id="postgres",
friendly_name="postgres",
description="Database of postgres",
location="us-central1")
destination_connection_profile2 = gcp.datastream.ConnectionProfile("destination_connection_profile2",
display_name="Connection profile",
location="us-central1",
connection_profile_id="dest-profile",
bigquery_profile=gcp.datastream.ConnectionProfileBigqueryProfileArgs())
instance = gcp.sql.DatabaseInstance("instance",
name="instance-name",
database_version="MYSQL_8_0",
region="us-central1",
settings=gcp.sql.DatabaseInstanceSettingsArgs(
tier="db-f1-micro",
backup_configuration=gcp.sql.DatabaseInstanceSettingsBackupConfigurationArgs(
enabled=True,
binary_log_enabled=True,
),
ip_configuration=gcp.sql.DatabaseInstanceSettingsIpConfigurationArgs(
authorized_networks=[
gcp.sql.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs(
value="34.71.242.81",
),
gcp.sql.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs(
value="34.72.28.29",
),
gcp.sql.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs(
value="34.67.6.157",
),
gcp.sql.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs(
value="34.67.234.134",
),
gcp.sql.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs(
value="34.72.239.218",
),
],
),
),
deletion_protection=False)
pwd = random.RandomPassword("pwd",
length=16,
special=False)
user = gcp.sql.User("user",
name="my-user",
instance=instance.name,
host="%",
password=pwd.result)
source_connection_profile = gcp.datastream.ConnectionProfile("source_connection_profile",
display_name="Source connection profile",
location="us-central1",
connection_profile_id="source-profile",
mysql_profile=gcp.datastream.ConnectionProfileMysqlProfileArgs(
hostname=instance.public_ip_address,
username=user.name,
password=user.password,
))
default = gcp.datastream.Stream("default",
display_name="postgres to bigQuery",
location="us-central1",
stream_id="postgres-bigquery",
source_config=gcp.datastream.StreamSourceConfigArgs(
source_connection_profile=source_connection_profile.id,
mysql_source_config=gcp.datastream.StreamSourceConfigMysqlSourceConfigArgs(),
),
destination_config=gcp.datastream.StreamDestinationConfigArgs(
destination_connection_profile=destination_connection_profile2.id,
bigquery_destination_config=gcp.datastream.StreamDestinationConfigBigqueryDestinationConfigArgs(
data_freshness="900s",
single_target_dataset=gcp.datastream.StreamDestinationConfigBigqueryDestinationConfigSingleTargetDatasetArgs(
dataset_id=postgres.id,
),
),
),
backfill_all=gcp.datastream.StreamBackfillAllArgs())
db = gcp.sql.Database("db",
instance=instance.name,
name="db")
package main
import (
"github.com/pulumi/pulumi-gcp/sdk/v7/go/gcp/bigquery"
"github.com/pulumi/pulumi-gcp/sdk/v7/go/gcp/datastream"
"github.com/pulumi/pulumi-gcp/sdk/v7/go/gcp/sql"
"github.com/pulumi/pulumi-random/sdk/v4/go/random"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
postgres, err := bigquery.NewDataset(ctx, "postgres", &bigquery.DatasetArgs{
DatasetId: pulumi.String("postgres"),
FriendlyName: pulumi.String("postgres"),
Description: pulumi.String("Database of postgres"),
Location: pulumi.String("us-central1"),
})
if err != nil {
return err
}
destinationConnectionProfile2, err := datastream.NewConnectionProfile(ctx, "destination_connection_profile2", &datastream.ConnectionProfileArgs{
DisplayName: pulumi.String("Connection profile"),
Location: pulumi.String("us-central1"),
ConnectionProfileId: pulumi.String("dest-profile"),
BigqueryProfile: nil,
})
if err != nil {
return err
}
instance, err := sql.NewDatabaseInstance(ctx, "instance", &sql.DatabaseInstanceArgs{
Name: pulumi.String("instance-name"),
DatabaseVersion: pulumi.String("MYSQL_8_0"),
Region: pulumi.String("us-central1"),
Settings: &sql.DatabaseInstanceSettingsArgs{
Tier: pulumi.String("db-f1-micro"),
BackupConfiguration: &sql.DatabaseInstanceSettingsBackupConfigurationArgs{
Enabled: pulumi.Bool(true),
BinaryLogEnabled: pulumi.Bool(true),
},
IpConfiguration: &sql.DatabaseInstanceSettingsIpConfigurationArgs{
AuthorizedNetworks: sql.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArray{
&sql.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs{
Value: pulumi.String("34.71.242.81"),
},
&sql.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs{
Value: pulumi.String("34.72.28.29"),
},
&sql.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs{
Value: pulumi.String("34.67.6.157"),
},
&sql.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs{
Value: pulumi.String("34.67.234.134"),
},
&sql.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs{
Value: pulumi.String("34.72.239.218"),
},
},
},
},
DeletionProtection: pulumi.Bool(false),
})
if err != nil {
return err
}
pwd, err := random.NewRandomPassword(ctx, "pwd", &random.RandomPasswordArgs{
Length: pulumi.Int(16),
Special: pulumi.Bool(false),
})
if err != nil {
return err
}
user, err := sql.NewUser(ctx, "user", &sql.UserArgs{
Name: pulumi.String("my-user"),
Instance: instance.Name,
Host: pulumi.String("%"),
Password: pwd.Result,
})
if err != nil {
return err
}
sourceConnectionProfile, err := datastream.NewConnectionProfile(ctx, "source_connection_profile", &datastream.ConnectionProfileArgs{
DisplayName: pulumi.String("Source connection profile"),
Location: pulumi.String("us-central1"),
ConnectionProfileId: pulumi.String("source-profile"),
MysqlProfile: &datastream.ConnectionProfileMysqlProfileArgs{
Hostname: instance.PublicIpAddress,
Username: user.Name,
Password: user.Password,
},
})
if err != nil {
return err
}
_, err = datastream.NewStream(ctx, "default", &datastream.StreamArgs{
DisplayName: pulumi.String("postgres to bigQuery"),
Location: pulumi.String("us-central1"),
StreamId: pulumi.String("postgres-bigquery"),
SourceConfig: &datastream.StreamSourceConfigArgs{
SourceConnectionProfile: sourceConnectionProfile.ID(),
MysqlSourceConfig: nil,
},
DestinationConfig: &datastream.StreamDestinationConfigArgs{
DestinationConnectionProfile: destinationConnectionProfile2.ID(),
BigqueryDestinationConfig: &datastream.StreamDestinationConfigBigqueryDestinationConfigArgs{
DataFreshness: pulumi.String("900s"),
SingleTargetDataset: &datastream.StreamDestinationConfigBigqueryDestinationConfigSingleTargetDatasetArgs{
DatasetId: postgres.ID(),
},
},
},
BackfillAll: nil,
})
if err != nil {
return err
}
_, err = sql.NewDatabase(ctx, "db", &sql.DatabaseArgs{
Instance: instance.Name,
Name: pulumi.String("db"),
})
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Gcp = Pulumi.Gcp;
using Random = Pulumi.Random;
return await Deployment.RunAsync(() =>
{
var postgres = new Gcp.BigQuery.Dataset("postgres", new()
{
DatasetId = "postgres",
FriendlyName = "postgres",
Description = "Database of postgres",
Location = "us-central1",
});
var destinationConnectionProfile2 = new Gcp.Datastream.ConnectionProfile("destination_connection_profile2", new()
{
DisplayName = "Connection profile",
Location = "us-central1",
ConnectionProfileId = "dest-profile",
BigqueryProfile = null,
});
var instance = new Gcp.Sql.DatabaseInstance("instance", new()
{
Name = "instance-name",
DatabaseVersion = "MYSQL_8_0",
Region = "us-central1",
Settings = new Gcp.Sql.Inputs.DatabaseInstanceSettingsArgs
{
Tier = "db-f1-micro",
BackupConfiguration = new Gcp.Sql.Inputs.DatabaseInstanceSettingsBackupConfigurationArgs
{
Enabled = true,
BinaryLogEnabled = true,
},
IpConfiguration = new Gcp.Sql.Inputs.DatabaseInstanceSettingsIpConfigurationArgs
{
AuthorizedNetworks = new[]
{
new Gcp.Sql.Inputs.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs
{
Value = "34.71.242.81",
},
new Gcp.Sql.Inputs.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs
{
Value = "34.72.28.29",
},
new Gcp.Sql.Inputs.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs
{
Value = "34.67.6.157",
},
new Gcp.Sql.Inputs.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs
{
Value = "34.67.234.134",
},
new Gcp.Sql.Inputs.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs
{
Value = "34.72.239.218",
},
},
},
},
DeletionProtection = false,
});
var pwd = new Random.RandomPassword("pwd", new()
{
Length = 16,
Special = false,
});
var user = new Gcp.Sql.User("user", new()
{
Name = "my-user",
Instance = instance.Name,
Host = "%",
Password = pwd.Result,
});
var sourceConnectionProfile = new Gcp.Datastream.ConnectionProfile("source_connection_profile", new()
{
DisplayName = "Source connection profile",
Location = "us-central1",
ConnectionProfileId = "source-profile",
MysqlProfile = new Gcp.Datastream.Inputs.ConnectionProfileMysqlProfileArgs
{
Hostname = instance.PublicIpAddress,
Username = user.Name,
Password = user.Password,
},
});
var @default = new Gcp.Datastream.Stream("default", new()
{
DisplayName = "postgres to bigQuery",
Location = "us-central1",
StreamId = "postgres-bigquery",
SourceConfig = new Gcp.Datastream.Inputs.StreamSourceConfigArgs
{
SourceConnectionProfile = sourceConnectionProfile.Id,
MysqlSourceConfig = null,
},
DestinationConfig = new Gcp.Datastream.Inputs.StreamDestinationConfigArgs
{
DestinationConnectionProfile = destinationConnectionProfile2.Id,
BigqueryDestinationConfig = new Gcp.Datastream.Inputs.StreamDestinationConfigBigqueryDestinationConfigArgs
{
DataFreshness = "900s",
SingleTargetDataset = new Gcp.Datastream.Inputs.StreamDestinationConfigBigqueryDestinationConfigSingleTargetDatasetArgs
{
DatasetId = postgres.Id,
},
},
},
BackfillAll = null,
});
var db = new Gcp.Sql.Database("db", new()
{
Instance = instance.Name,
Name = "db",
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.bigquery.Dataset;
import com.pulumi.gcp.bigquery.DatasetArgs;
import com.pulumi.gcp.datastream.ConnectionProfile;
import com.pulumi.gcp.datastream.ConnectionProfileArgs;
import com.pulumi.gcp.datastream.inputs.ConnectionProfileBigqueryProfileArgs;
import com.pulumi.gcp.sql.DatabaseInstance;
import com.pulumi.gcp.sql.DatabaseInstanceArgs;
import com.pulumi.gcp.sql.inputs.DatabaseInstanceSettingsArgs;
import com.pulumi.gcp.sql.inputs.DatabaseInstanceSettingsBackupConfigurationArgs;
import com.pulumi.gcp.sql.inputs.DatabaseInstanceSettingsIpConfigurationArgs;
import com.pulumi.random.RandomPassword;
import com.pulumi.random.RandomPasswordArgs;
import com.pulumi.gcp.sql.User;
import com.pulumi.gcp.sql.UserArgs;
import com.pulumi.gcp.datastream.inputs.ConnectionProfileMysqlProfileArgs;
import com.pulumi.gcp.datastream.Stream;
import com.pulumi.gcp.datastream.StreamArgs;
import com.pulumi.gcp.datastream.inputs.StreamSourceConfigArgs;
import com.pulumi.gcp.datastream.inputs.StreamSourceConfigMysqlSourceConfigArgs;
import com.pulumi.gcp.datastream.inputs.StreamDestinationConfigArgs;
import com.pulumi.gcp.datastream.inputs.StreamDestinationConfigBigqueryDestinationConfigArgs;
import com.pulumi.gcp.datastream.inputs.StreamDestinationConfigBigqueryDestinationConfigSingleTargetDatasetArgs;
import com.pulumi.gcp.datastream.inputs.StreamBackfillAllArgs;
import com.pulumi.gcp.sql.Database;
import com.pulumi.gcp.sql.DatabaseArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var postgres = new Dataset("postgres", DatasetArgs.builder()
.datasetId("postgres")
.friendlyName("postgres")
.description("Database of postgres")
.location("us-central1")
.build());
var destinationConnectionProfile2 = new ConnectionProfile("destinationConnectionProfile2", ConnectionProfileArgs.builder()
.displayName("Connection profile")
.location("us-central1")
.connectionProfileId("dest-profile")
.bigqueryProfile()
.build());
var instance = new DatabaseInstance("instance", DatabaseInstanceArgs.builder()
.name("instance-name")
.databaseVersion("MYSQL_8_0")
.region("us-central1")
.settings(DatabaseInstanceSettingsArgs.builder()
.tier("db-f1-micro")
.backupConfiguration(DatabaseInstanceSettingsBackupConfigurationArgs.builder()
.enabled(true)
.binaryLogEnabled(true)
.build())
.ipConfiguration(DatabaseInstanceSettingsIpConfigurationArgs.builder()
.authorizedNetworks(
DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs.builder()
.value("34.71.242.81")
.build(),
DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs.builder()
.value("34.72.28.29")
.build(),
DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs.builder()
.value("34.67.6.157")
.build(),
DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs.builder()
.value("34.67.234.134")
.build(),
DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs.builder()
.value("34.72.239.218")
.build())
.build())
.build())
.deletionProtection(false)
.build());
var pwd = new RandomPassword("pwd", RandomPasswordArgs.builder()
.length(16)
.special(false)
.build());
var user = new User("user", UserArgs.builder()
.name("my-user")
.instance(instance.name())
.host("%")
.password(pwd.result())
.build());
var sourceConnectionProfile = new ConnectionProfile("sourceConnectionProfile", ConnectionProfileArgs.builder()
.displayName("Source connection profile")
.location("us-central1")
.connectionProfileId("source-profile")
.mysqlProfile(ConnectionProfileMysqlProfileArgs.builder()
.hostname(instance.publicIpAddress())
.username(user.name())
.password(user.password())
.build())
.build());
var default_ = new Stream("default", StreamArgs.builder()
.displayName("postgres to bigQuery")
.location("us-central1")
.streamId("postgres-bigquery")
.sourceConfig(StreamSourceConfigArgs.builder()
.sourceConnectionProfile(sourceConnectionProfile.id())
.mysqlSourceConfig()
.build())
.destinationConfig(StreamDestinationConfigArgs.builder()
.destinationConnectionProfile(destinationConnectionProfile2.id())
.bigqueryDestinationConfig(StreamDestinationConfigBigqueryDestinationConfigArgs.builder()
.dataFreshness("900s")
.singleTargetDataset(StreamDestinationConfigBigqueryDestinationConfigSingleTargetDatasetArgs.builder()
.datasetId(postgres.id())
.build())
.build())
.build())
.backfillAll()
.build());
var db = new Database("db", DatabaseArgs.builder()
.instance(instance.name())
.name("db")
.build());
}
}
resources:
postgres:
type: gcp:bigquery:Dataset
properties:
datasetId: postgres
friendlyName: postgres
description: Database of postgres
location: us-central1
default:
type: gcp:datastream:Stream
properties:
displayName: postgres to bigQuery
location: us-central1
streamId: postgres-bigquery
sourceConfig:
sourceConnectionProfile: ${sourceConnectionProfile.id}
mysqlSourceConfig: {}
destinationConfig:
destinationConnectionProfile: ${destinationConnectionProfile2.id}
bigqueryDestinationConfig:
dataFreshness: 900s
singleTargetDataset:
datasetId: ${postgres.id}
backfillAll: {}
destinationConnectionProfile2:
type: gcp:datastream:ConnectionProfile
name: destination_connection_profile2
properties:
displayName: Connection profile
location: us-central1
connectionProfileId: dest-profile
bigqueryProfile: {}
instance:
type: gcp:sql:DatabaseInstance
properties:
name: instance-name
databaseVersion: MYSQL_8_0
region: us-central1
settings:
tier: db-f1-micro
backupConfiguration:
enabled: true
binaryLogEnabled: true
ipConfiguration:
authorizedNetworks:
- value: 34.71.242.81
- value: 34.72.28.29
- value: 34.67.6.157
- value: 34.67.234.134
- value: 34.72.239.218
deletionProtection: false
db:
type: gcp:sql:Database
properties:
instance: ${instance.name}
name: db
pwd:
type: random:RandomPassword
properties:
length: 16
special: false
user:
type: gcp:sql:User
properties:
name: my-user
instance: ${instance.name}
host: '%'
password: ${pwd.result}
sourceConnectionProfile:
type: gcp:datastream:ConnectionProfile
name: source_connection_profile
properties:
displayName: Source connection profile
location: us-central1
connectionProfileId: source-profile
mysqlProfile:
hostname: ${instance.publicIpAddress}
username: ${user.name}
password: ${user.password}
Datastream Stream Bigquery
import * as pulumi from "@pulumi/pulumi";
import * as gcp from "@pulumi/gcp";
import * as random from "@pulumi/random";
const project = gcp.organizations.getProject({});
const instance = new gcp.sql.DatabaseInstance("instance", {
name: "my-instance",
databaseVersion: "MYSQL_8_0",
region: "us-central1",
settings: {
tier: "db-f1-micro",
backupConfiguration: {
enabled: true,
binaryLogEnabled: true,
},
ipConfiguration: {
authorizedNetworks: [
{
value: "34.71.242.81",
},
{
value: "34.72.28.29",
},
{
value: "34.67.6.157",
},
{
value: "34.67.234.134",
},
{
value: "34.72.239.218",
},
],
},
},
deletionProtection: true,
});
const db = new gcp.sql.Database("db", {
instance: instance.name,
name: "db",
});
const pwd = new random.RandomPassword("pwd", {
length: 16,
special: false,
});
const user = new gcp.sql.User("user", {
name: "user",
instance: instance.name,
host: "%",
password: pwd.result,
});
const sourceConnectionProfile = new gcp.datastream.ConnectionProfile("source_connection_profile", {
displayName: "Source connection profile",
location: "us-central1",
connectionProfileId: "source-profile",
mysqlProfile: {
hostname: instance.publicIpAddress,
username: user.name,
password: user.password,
},
});
const bqSa = gcp.bigquery.getDefaultServiceAccount({});
const bigqueryKeyUser = new gcp.kms.CryptoKeyIAMMember("bigquery_key_user", {
cryptoKeyId: "bigquery-kms-name",
role: "roles/cloudkms.cryptoKeyEncrypterDecrypter",
member: bqSa.then(bqSa => `serviceAccount:${bqSa.email}`),
});
const destinationConnectionProfile = new gcp.datastream.ConnectionProfile("destination_connection_profile", {
displayName: "Connection profile",
location: "us-central1",
connectionProfileId: "destination-profile",
bigqueryProfile: {},
});
const _default = new gcp.datastream.Stream("default", {
streamId: "my-stream",
location: "us-central1",
displayName: "my stream",
sourceConfig: {
sourceConnectionProfile: sourceConnectionProfile.id,
mysqlSourceConfig: {},
},
destinationConfig: {
destinationConnectionProfile: destinationConnectionProfile.id,
bigqueryDestinationConfig: {
sourceHierarchyDatasets: {
datasetTemplate: {
location: "us-central1",
kmsKeyName: "bigquery-kms-name",
},
},
},
},
backfillNone: {},
}, {
dependsOn: [bigqueryKeyUser],
});
import pulumi
import pulumi_gcp as gcp
import pulumi_random as random
project = gcp.organizations.get_project()
instance = gcp.sql.DatabaseInstance("instance",
name="my-instance",
database_version="MYSQL_8_0",
region="us-central1",
settings=gcp.sql.DatabaseInstanceSettingsArgs(
tier="db-f1-micro",
backup_configuration=gcp.sql.DatabaseInstanceSettingsBackupConfigurationArgs(
enabled=True,
binary_log_enabled=True,
),
ip_configuration=gcp.sql.DatabaseInstanceSettingsIpConfigurationArgs(
authorized_networks=[
gcp.sql.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs(
value="34.71.242.81",
),
gcp.sql.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs(
value="34.72.28.29",
),
gcp.sql.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs(
value="34.67.6.157",
),
gcp.sql.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs(
value="34.67.234.134",
),
gcp.sql.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs(
value="34.72.239.218",
),
],
),
),
deletion_protection=True)
db = gcp.sql.Database("db",
instance=instance.name,
name="db")
pwd = random.RandomPassword("pwd",
length=16,
special=False)
user = gcp.sql.User("user",
name="user",
instance=instance.name,
host="%",
password=pwd.result)
source_connection_profile = gcp.datastream.ConnectionProfile("source_connection_profile",
display_name="Source connection profile",
location="us-central1",
connection_profile_id="source-profile",
mysql_profile=gcp.datastream.ConnectionProfileMysqlProfileArgs(
hostname=instance.public_ip_address,
username=user.name,
password=user.password,
))
bq_sa = gcp.bigquery.get_default_service_account()
bigquery_key_user = gcp.kms.CryptoKeyIAMMember("bigquery_key_user",
crypto_key_id="bigquery-kms-name",
role="roles/cloudkms.cryptoKeyEncrypterDecrypter",
member=f"serviceAccount:{bq_sa.email}")
destination_connection_profile = gcp.datastream.ConnectionProfile("destination_connection_profile",
display_name="Connection profile",
location="us-central1",
connection_profile_id="destination-profile",
bigquery_profile=gcp.datastream.ConnectionProfileBigqueryProfileArgs())
default = gcp.datastream.Stream("default",
stream_id="my-stream",
location="us-central1",
display_name="my stream",
source_config=gcp.datastream.StreamSourceConfigArgs(
source_connection_profile=source_connection_profile.id,
mysql_source_config=gcp.datastream.StreamSourceConfigMysqlSourceConfigArgs(),
),
destination_config=gcp.datastream.StreamDestinationConfigArgs(
destination_connection_profile=destination_connection_profile.id,
bigquery_destination_config=gcp.datastream.StreamDestinationConfigBigqueryDestinationConfigArgs(
source_hierarchy_datasets=gcp.datastream.StreamDestinationConfigBigqueryDestinationConfigSourceHierarchyDatasetsArgs(
dataset_template=gcp.datastream.StreamDestinationConfigBigqueryDestinationConfigSourceHierarchyDatasetsDatasetTemplateArgs(
location="us-central1",
kms_key_name="bigquery-kms-name",
),
),
),
),
backfill_none=gcp.datastream.StreamBackfillNoneArgs(),
opts = pulumi.ResourceOptions(depends_on=[bigquery_key_user]))
package main
import (
"fmt"
"github.com/pulumi/pulumi-gcp/sdk/v7/go/gcp/bigquery"
"github.com/pulumi/pulumi-gcp/sdk/v7/go/gcp/datastream"
"github.com/pulumi/pulumi-gcp/sdk/v7/go/gcp/kms"
"github.com/pulumi/pulumi-gcp/sdk/v7/go/gcp/organizations"
"github.com/pulumi/pulumi-gcp/sdk/v7/go/gcp/sql"
"github.com/pulumi/pulumi-random/sdk/v4/go/random"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
_, err := organizations.LookupProject(ctx, nil, nil)
if err != nil {
return err
}
instance, err := sql.NewDatabaseInstance(ctx, "instance", &sql.DatabaseInstanceArgs{
Name: pulumi.String("my-instance"),
DatabaseVersion: pulumi.String("MYSQL_8_0"),
Region: pulumi.String("us-central1"),
Settings: &sql.DatabaseInstanceSettingsArgs{
Tier: pulumi.String("db-f1-micro"),
BackupConfiguration: &sql.DatabaseInstanceSettingsBackupConfigurationArgs{
Enabled: pulumi.Bool(true),
BinaryLogEnabled: pulumi.Bool(true),
},
IpConfiguration: &sql.DatabaseInstanceSettingsIpConfigurationArgs{
AuthorizedNetworks: sql.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArray{
&sql.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs{
Value: pulumi.String("34.71.242.81"),
},
&sql.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs{
Value: pulumi.String("34.72.28.29"),
},
&sql.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs{
Value: pulumi.String("34.67.6.157"),
},
&sql.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs{
Value: pulumi.String("34.67.234.134"),
},
&sql.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs{
Value: pulumi.String("34.72.239.218"),
},
},
},
},
DeletionProtection: pulumi.Bool(true),
})
if err != nil {
return err
}
_, err = sql.NewDatabase(ctx, "db", &sql.DatabaseArgs{
Instance: instance.Name,
Name: pulumi.String("db"),
})
if err != nil {
return err
}
pwd, err := random.NewRandomPassword(ctx, "pwd", &random.RandomPasswordArgs{
Length: pulumi.Int(16),
Special: pulumi.Bool(false),
})
if err != nil {
return err
}
user, err := sql.NewUser(ctx, "user", &sql.UserArgs{
Name: pulumi.String("user"),
Instance: instance.Name,
Host: pulumi.String("%"),
Password: pwd.Result,
})
if err != nil {
return err
}
sourceConnectionProfile, err := datastream.NewConnectionProfile(ctx, "source_connection_profile", &datastream.ConnectionProfileArgs{
DisplayName: pulumi.String("Source connection profile"),
Location: pulumi.String("us-central1"),
ConnectionProfileId: pulumi.String("source-profile"),
MysqlProfile: &datastream.ConnectionProfileMysqlProfileArgs{
Hostname: instance.PublicIpAddress,
Username: user.Name,
Password: user.Password,
},
})
if err != nil {
return err
}
bqSa, err := bigquery.GetDefaultServiceAccount(ctx, nil, nil)
if err != nil {
return err
}
bigqueryKeyUser, err := kms.NewCryptoKeyIAMMember(ctx, "bigquery_key_user", &kms.CryptoKeyIAMMemberArgs{
CryptoKeyId: pulumi.String("bigquery-kms-name"),
Role: pulumi.String("roles/cloudkms.cryptoKeyEncrypterDecrypter"),
Member: pulumi.String(fmt.Sprintf("serviceAccount:%v", bqSa.Email)),
})
if err != nil {
return err
}
destinationConnectionProfile, err := datastream.NewConnectionProfile(ctx, "destination_connection_profile", &datastream.ConnectionProfileArgs{
DisplayName: pulumi.String("Connection profile"),
Location: pulumi.String("us-central1"),
ConnectionProfileId: pulumi.String("destination-profile"),
BigqueryProfile: nil,
})
if err != nil {
return err
}
_, err = datastream.NewStream(ctx, "default", &datastream.StreamArgs{
StreamId: pulumi.String("my-stream"),
Location: pulumi.String("us-central1"),
DisplayName: pulumi.String("my stream"),
SourceConfig: &datastream.StreamSourceConfigArgs{
SourceConnectionProfile: sourceConnectionProfile.ID(),
MysqlSourceConfig: nil,
},
DestinationConfig: &datastream.StreamDestinationConfigArgs{
DestinationConnectionProfile: destinationConnectionProfile.ID(),
BigqueryDestinationConfig: &datastream.StreamDestinationConfigBigqueryDestinationConfigArgs{
SourceHierarchyDatasets: &datastream.StreamDestinationConfigBigqueryDestinationConfigSourceHierarchyDatasetsArgs{
DatasetTemplate: &datastream.StreamDestinationConfigBigqueryDestinationConfigSourceHierarchyDatasetsDatasetTemplateArgs{
Location: pulumi.String("us-central1"),
KmsKeyName: pulumi.String("bigquery-kms-name"),
},
},
},
},
BackfillNone: nil,
}, pulumi.DependsOn([]pulumi.Resource{
bigqueryKeyUser,
}))
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Gcp = Pulumi.Gcp;
using Random = Pulumi.Random;
return await Deployment.RunAsync(() =>
{
var project = Gcp.Organizations.GetProject.Invoke();
var instance = new Gcp.Sql.DatabaseInstance("instance", new()
{
Name = "my-instance",
DatabaseVersion = "MYSQL_8_0",
Region = "us-central1",
Settings = new Gcp.Sql.Inputs.DatabaseInstanceSettingsArgs
{
Tier = "db-f1-micro",
BackupConfiguration = new Gcp.Sql.Inputs.DatabaseInstanceSettingsBackupConfigurationArgs
{
Enabled = true,
BinaryLogEnabled = true,
},
IpConfiguration = new Gcp.Sql.Inputs.DatabaseInstanceSettingsIpConfigurationArgs
{
AuthorizedNetworks = new[]
{
new Gcp.Sql.Inputs.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs
{
Value = "34.71.242.81",
},
new Gcp.Sql.Inputs.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs
{
Value = "34.72.28.29",
},
new Gcp.Sql.Inputs.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs
{
Value = "34.67.6.157",
},
new Gcp.Sql.Inputs.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs
{
Value = "34.67.234.134",
},
new Gcp.Sql.Inputs.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs
{
Value = "34.72.239.218",
},
},
},
},
DeletionProtection = true,
});
var db = new Gcp.Sql.Database("db", new()
{
Instance = instance.Name,
Name = "db",
});
var pwd = new Random.RandomPassword("pwd", new()
{
Length = 16,
Special = false,
});
var user = new Gcp.Sql.User("user", new()
{
Name = "user",
Instance = instance.Name,
Host = "%",
Password = pwd.Result,
});
var sourceConnectionProfile = new Gcp.Datastream.ConnectionProfile("source_connection_profile", new()
{
DisplayName = "Source connection profile",
Location = "us-central1",
ConnectionProfileId = "source-profile",
MysqlProfile = new Gcp.Datastream.Inputs.ConnectionProfileMysqlProfileArgs
{
Hostname = instance.PublicIpAddress,
Username = user.Name,
Password = user.Password,
},
});
var bqSa = Gcp.BigQuery.GetDefaultServiceAccount.Invoke();
var bigqueryKeyUser = new Gcp.Kms.CryptoKeyIAMMember("bigquery_key_user", new()
{
CryptoKeyId = "bigquery-kms-name",
Role = "roles/cloudkms.cryptoKeyEncrypterDecrypter",
Member = $"serviceAccount:{bqSa.Apply(getDefaultServiceAccountResult => getDefaultServiceAccountResult.Email)}",
});
var destinationConnectionProfile = new Gcp.Datastream.ConnectionProfile("destination_connection_profile", new()
{
DisplayName = "Connection profile",
Location = "us-central1",
ConnectionProfileId = "destination-profile",
BigqueryProfile = null,
});
var @default = new Gcp.Datastream.Stream("default", new()
{
StreamId = "my-stream",
Location = "us-central1",
DisplayName = "my stream",
SourceConfig = new Gcp.Datastream.Inputs.StreamSourceConfigArgs
{
SourceConnectionProfile = sourceConnectionProfile.Id,
MysqlSourceConfig = null,
},
DestinationConfig = new Gcp.Datastream.Inputs.StreamDestinationConfigArgs
{
DestinationConnectionProfile = destinationConnectionProfile.Id,
BigqueryDestinationConfig = new Gcp.Datastream.Inputs.StreamDestinationConfigBigqueryDestinationConfigArgs
{
SourceHierarchyDatasets = new Gcp.Datastream.Inputs.StreamDestinationConfigBigqueryDestinationConfigSourceHierarchyDatasetsArgs
{
DatasetTemplate = new Gcp.Datastream.Inputs.StreamDestinationConfigBigqueryDestinationConfigSourceHierarchyDatasetsDatasetTemplateArgs
{
Location = "us-central1",
KmsKeyName = "bigquery-kms-name",
},
},
},
},
BackfillNone = null,
}, new CustomResourceOptions
{
DependsOn =
{
bigqueryKeyUser,
},
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.organizations.OrganizationsFunctions;
import com.pulumi.gcp.organizations.inputs.GetProjectArgs;
import com.pulumi.gcp.sql.DatabaseInstance;
import com.pulumi.gcp.sql.DatabaseInstanceArgs;
import com.pulumi.gcp.sql.inputs.DatabaseInstanceSettingsArgs;
import com.pulumi.gcp.sql.inputs.DatabaseInstanceSettingsBackupConfigurationArgs;
import com.pulumi.gcp.sql.inputs.DatabaseInstanceSettingsIpConfigurationArgs;
import com.pulumi.gcp.sql.Database;
import com.pulumi.gcp.sql.DatabaseArgs;
import com.pulumi.random.RandomPassword;
import com.pulumi.random.RandomPasswordArgs;
import com.pulumi.gcp.sql.User;
import com.pulumi.gcp.sql.UserArgs;
import com.pulumi.gcp.datastream.ConnectionProfile;
import com.pulumi.gcp.datastream.ConnectionProfileArgs;
import com.pulumi.gcp.datastream.inputs.ConnectionProfileMysqlProfileArgs;
import com.pulumi.gcp.bigquery.BigqueryFunctions;
import com.pulumi.gcp.bigquery.inputs.GetDefaultServiceAccountArgs;
import com.pulumi.gcp.kms.CryptoKeyIAMMember;
import com.pulumi.gcp.kms.CryptoKeyIAMMemberArgs;
import com.pulumi.gcp.datastream.inputs.ConnectionProfileBigqueryProfileArgs;
import com.pulumi.gcp.datastream.Stream;
import com.pulumi.gcp.datastream.StreamArgs;
import com.pulumi.gcp.datastream.inputs.StreamSourceConfigArgs;
import com.pulumi.gcp.datastream.inputs.StreamSourceConfigMysqlSourceConfigArgs;
import com.pulumi.gcp.datastream.inputs.StreamDestinationConfigArgs;
import com.pulumi.gcp.datastream.inputs.StreamDestinationConfigBigqueryDestinationConfigArgs;
import com.pulumi.gcp.datastream.inputs.StreamDestinationConfigBigqueryDestinationConfigSourceHierarchyDatasetsArgs;
import com.pulumi.gcp.datastream.inputs.StreamDestinationConfigBigqueryDestinationConfigSourceHierarchyDatasetsDatasetTemplateArgs;
import com.pulumi.gcp.datastream.inputs.StreamBackfillNoneArgs;
import com.pulumi.resources.CustomResourceOptions;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
final var project = OrganizationsFunctions.getProject();
var instance = new DatabaseInstance("instance", DatabaseInstanceArgs.builder()
.name("my-instance")
.databaseVersion("MYSQL_8_0")
.region("us-central1")
.settings(DatabaseInstanceSettingsArgs.builder()
.tier("db-f1-micro")
.backupConfiguration(DatabaseInstanceSettingsBackupConfigurationArgs.builder()
.enabled(true)
.binaryLogEnabled(true)
.build())
.ipConfiguration(DatabaseInstanceSettingsIpConfigurationArgs.builder()
.authorizedNetworks(
DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs.builder()
.value("34.71.242.81")
.build(),
DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs.builder()
.value("34.72.28.29")
.build(),
DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs.builder()
.value("34.67.6.157")
.build(),
DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs.builder()
.value("34.67.234.134")
.build(),
DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs.builder()
.value("34.72.239.218")
.build())
.build())
.build())
.deletionProtection(true)
.build());
var db = new Database("db", DatabaseArgs.builder()
.instance(instance.name())
.name("db")
.build());
var pwd = new RandomPassword("pwd", RandomPasswordArgs.builder()
.length(16)
.special(false)
.build());
var user = new User("user", UserArgs.builder()
.name("user")
.instance(instance.name())
.host("%")
.password(pwd.result())
.build());
var sourceConnectionProfile = new ConnectionProfile("sourceConnectionProfile", ConnectionProfileArgs.builder()
.displayName("Source connection profile")
.location("us-central1")
.connectionProfileId("source-profile")
.mysqlProfile(ConnectionProfileMysqlProfileArgs.builder()
.hostname(instance.publicIpAddress())
.username(user.name())
.password(user.password())
.build())
.build());
final var bqSa = BigqueryFunctions.getDefaultServiceAccount();
var bigqueryKeyUser = new CryptoKeyIAMMember("bigqueryKeyUser", CryptoKeyIAMMemberArgs.builder()
.cryptoKeyId("bigquery-kms-name")
.role("roles/cloudkms.cryptoKeyEncrypterDecrypter")
.member(String.format("serviceAccount:%s", bqSa.applyValue(getDefaultServiceAccountResult -> getDefaultServiceAccountResult.email())))
.build());
var destinationConnectionProfile = new ConnectionProfile("destinationConnectionProfile", ConnectionProfileArgs.builder()
.displayName("Connection profile")
.location("us-central1")
.connectionProfileId("destination-profile")
.bigqueryProfile()
.build());
var default_ = new Stream("default", StreamArgs.builder()
.streamId("my-stream")
.location("us-central1")
.displayName("my stream")
.sourceConfig(StreamSourceConfigArgs.builder()
.sourceConnectionProfile(sourceConnectionProfile.id())
.mysqlSourceConfig()
.build())
.destinationConfig(StreamDestinationConfigArgs.builder()
.destinationConnectionProfile(destinationConnectionProfile.id())
.bigqueryDestinationConfig(StreamDestinationConfigBigqueryDestinationConfigArgs.builder()
.sourceHierarchyDatasets(StreamDestinationConfigBigqueryDestinationConfigSourceHierarchyDatasetsArgs.builder()
.datasetTemplate(StreamDestinationConfigBigqueryDestinationConfigSourceHierarchyDatasetsDatasetTemplateArgs.builder()
.location("us-central1")
.kmsKeyName("bigquery-kms-name")
.build())
.build())
.build())
.build())
.backfillNone()
.build(), CustomResourceOptions.builder()
.dependsOn(bigqueryKeyUser)
.build());
}
}
resources:
instance:
type: gcp:sql:DatabaseInstance
properties:
name: my-instance
databaseVersion: MYSQL_8_0
region: us-central1
settings:
tier: db-f1-micro
backupConfiguration:
enabled: true
binaryLogEnabled: true
ipConfiguration:
authorizedNetworks:
- value: 34.71.242.81
- value: 34.72.28.29
- value: 34.67.6.157
- value: 34.67.234.134
- value: 34.72.239.218
deletionProtection: true
db:
type: gcp:sql:Database
properties:
instance: ${instance.name}
name: db
pwd:
type: random:RandomPassword
properties:
length: 16
special: false
user:
type: gcp:sql:User
properties:
name: user
instance: ${instance.name}
host: '%'
password: ${pwd.result}
sourceConnectionProfile:
type: gcp:datastream:ConnectionProfile
name: source_connection_profile
properties:
displayName: Source connection profile
location: us-central1
connectionProfileId: source-profile
mysqlProfile:
hostname: ${instance.publicIpAddress}
username: ${user.name}
password: ${user.password}
bigqueryKeyUser:
type: gcp:kms:CryptoKeyIAMMember
name: bigquery_key_user
properties:
cryptoKeyId: bigquery-kms-name
role: roles/cloudkms.cryptoKeyEncrypterDecrypter
member: serviceAccount:${bqSa.email}
destinationConnectionProfile:
type: gcp:datastream:ConnectionProfile
name: destination_connection_profile
properties:
displayName: Connection profile
location: us-central1
connectionProfileId: destination-profile
bigqueryProfile: {}
default:
type: gcp:datastream:Stream
properties:
streamId: my-stream
location: us-central1
displayName: my stream
sourceConfig:
sourceConnectionProfile: ${sourceConnectionProfile.id}
mysqlSourceConfig: {}
destinationConfig:
destinationConnectionProfile: ${destinationConnectionProfile.id}
bigqueryDestinationConfig:
sourceHierarchyDatasets:
datasetTemplate:
location: us-central1
kmsKeyName: bigquery-kms-name
backfillNone: {}
options:
dependson:
- ${bigqueryKeyUser}
variables:
project:
fn::invoke:
Function: gcp:organizations:getProject
Arguments: {}
bqSa:
fn::invoke:
Function: gcp:bigquery:getDefaultServiceAccount
Arguments: {}
Create Stream Resource
Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.
Constructor syntax
new Stream(name: string, args: StreamArgs, opts?: CustomResourceOptions);
@overload
def Stream(resource_name: str,
args: StreamArgs,
opts: Optional[ResourceOptions] = None)
@overload
def Stream(resource_name: str,
opts: Optional[ResourceOptions] = None,
destination_config: Optional[StreamDestinationConfigArgs] = None,
display_name: Optional[str] = None,
location: Optional[str] = None,
source_config: Optional[StreamSourceConfigArgs] = None,
stream_id: Optional[str] = None,
backfill_all: Optional[StreamBackfillAllArgs] = None,
backfill_none: Optional[StreamBackfillNoneArgs] = None,
create_without_validation: Optional[bool] = None,
customer_managed_encryption_key: Optional[str] = None,
desired_state: Optional[str] = None,
labels: Optional[Mapping[str, str]] = None,
project: Optional[str] = None)
func NewStream(ctx *Context, name string, args StreamArgs, opts ...ResourceOption) (*Stream, error)
public Stream(string name, StreamArgs args, CustomResourceOptions? opts = null)
public Stream(String name, StreamArgs args)
public Stream(String name, StreamArgs args, CustomResourceOptions options)
type: gcp:datastream:Stream
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.
Parameters
- name string
- The unique name of the resource.
- args StreamArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- resource_name str
- The unique name of the resource.
- args StreamArgs
- The arguments to resource properties.
- opts ResourceOptions
- Bag of options to control resource's behavior.
- ctx Context
- Context object for the current deployment.
- name string
- The unique name of the resource.
- args StreamArgs
- The arguments to resource properties.
- opts ResourceOption
- Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args StreamArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- name String
- The unique name of the resource.
- args StreamArgs
- The arguments to resource properties.
- options CustomResourceOptions
- Bag of options to control resource's behavior.
Constructor example
The following reference example uses placeholder values for all input properties.
var streamResource = new Gcp.Datastream.Stream("streamResource", new()
{
DestinationConfig = new Gcp.Datastream.Inputs.StreamDestinationConfigArgs
{
DestinationConnectionProfile = "string",
BigqueryDestinationConfig = new Gcp.Datastream.Inputs.StreamDestinationConfigBigqueryDestinationConfigArgs
{
DataFreshness = "string",
SingleTargetDataset = new Gcp.Datastream.Inputs.StreamDestinationConfigBigqueryDestinationConfigSingleTargetDatasetArgs
{
DatasetId = "string",
},
SourceHierarchyDatasets = new Gcp.Datastream.Inputs.StreamDestinationConfigBigqueryDestinationConfigSourceHierarchyDatasetsArgs
{
DatasetTemplate = new Gcp.Datastream.Inputs.StreamDestinationConfigBigqueryDestinationConfigSourceHierarchyDatasetsDatasetTemplateArgs
{
Location = "string",
DatasetIdPrefix = "string",
KmsKeyName = "string",
},
},
},
GcsDestinationConfig = new Gcp.Datastream.Inputs.StreamDestinationConfigGcsDestinationConfigArgs
{
AvroFileFormat = null,
FileRotationInterval = "string",
FileRotationMb = 0,
JsonFileFormat = new Gcp.Datastream.Inputs.StreamDestinationConfigGcsDestinationConfigJsonFileFormatArgs
{
Compression = "string",
SchemaFileFormat = "string",
},
Path = "string",
},
},
DisplayName = "string",
Location = "string",
SourceConfig = new Gcp.Datastream.Inputs.StreamSourceConfigArgs
{
SourceConnectionProfile = "string",
MysqlSourceConfig = new Gcp.Datastream.Inputs.StreamSourceConfigMysqlSourceConfigArgs
{
ExcludeObjects = new Gcp.Datastream.Inputs.StreamSourceConfigMysqlSourceConfigExcludeObjectsArgs
{
MysqlDatabases = new[]
{
new Gcp.Datastream.Inputs.StreamSourceConfigMysqlSourceConfigExcludeObjectsMysqlDatabaseArgs
{
Database = "string",
MysqlTables = new[]
{
new Gcp.Datastream.Inputs.StreamSourceConfigMysqlSourceConfigExcludeObjectsMysqlDatabaseMysqlTableArgs
{
Table = "string",
MysqlColumns = new[]
{
new Gcp.Datastream.Inputs.StreamSourceConfigMysqlSourceConfigExcludeObjectsMysqlDatabaseMysqlTableMysqlColumnArgs
{
Collation = "string",
Column = "string",
DataType = "string",
Length = 0,
Nullable = false,
OrdinalPosition = 0,
PrimaryKey = false,
},
},
},
},
},
},
},
IncludeObjects = new Gcp.Datastream.Inputs.StreamSourceConfigMysqlSourceConfigIncludeObjectsArgs
{
MysqlDatabases = new[]
{
new Gcp.Datastream.Inputs.StreamSourceConfigMysqlSourceConfigIncludeObjectsMysqlDatabaseArgs
{
Database = "string",
MysqlTables = new[]
{
new Gcp.Datastream.Inputs.StreamSourceConfigMysqlSourceConfigIncludeObjectsMysqlDatabaseMysqlTableArgs
{
Table = "string",
MysqlColumns = new[]
{
new Gcp.Datastream.Inputs.StreamSourceConfigMysqlSourceConfigIncludeObjectsMysqlDatabaseMysqlTableMysqlColumnArgs
{
Collation = "string",
Column = "string",
DataType = "string",
Length = 0,
Nullable = false,
OrdinalPosition = 0,
PrimaryKey = false,
},
},
},
},
},
},
},
MaxConcurrentBackfillTasks = 0,
MaxConcurrentCdcTasks = 0,
},
OracleSourceConfig = new Gcp.Datastream.Inputs.StreamSourceConfigOracleSourceConfigArgs
{
DropLargeObjects = null,
ExcludeObjects = new Gcp.Datastream.Inputs.StreamSourceConfigOracleSourceConfigExcludeObjectsArgs
{
OracleSchemas = new[]
{
new Gcp.Datastream.Inputs.StreamSourceConfigOracleSourceConfigExcludeObjectsOracleSchemaArgs
{
Schema = "string",
OracleTables = new[]
{
new Gcp.Datastream.Inputs.StreamSourceConfigOracleSourceConfigExcludeObjectsOracleSchemaOracleTableArgs
{
Table = "string",
OracleColumns = new[]
{
new Gcp.Datastream.Inputs.StreamSourceConfigOracleSourceConfigExcludeObjectsOracleSchemaOracleTableOracleColumnArgs
{
Column = "string",
DataType = "string",
Encoding = "string",
Length = 0,
Nullable = false,
OrdinalPosition = 0,
Precision = 0,
PrimaryKey = false,
Scale = 0,
},
},
},
},
},
},
},
IncludeObjects = new Gcp.Datastream.Inputs.StreamSourceConfigOracleSourceConfigIncludeObjectsArgs
{
OracleSchemas = new[]
{
new Gcp.Datastream.Inputs.StreamSourceConfigOracleSourceConfigIncludeObjectsOracleSchemaArgs
{
Schema = "string",
OracleTables = new[]
{
new Gcp.Datastream.Inputs.StreamSourceConfigOracleSourceConfigIncludeObjectsOracleSchemaOracleTableArgs
{
Table = "string",
OracleColumns = new[]
{
new Gcp.Datastream.Inputs.StreamSourceConfigOracleSourceConfigIncludeObjectsOracleSchemaOracleTableOracleColumnArgs
{
Column = "string",
DataType = "string",
Encoding = "string",
Length = 0,
Nullable = false,
OrdinalPosition = 0,
Precision = 0,
PrimaryKey = false,
Scale = 0,
},
},
},
},
},
},
},
MaxConcurrentBackfillTasks = 0,
MaxConcurrentCdcTasks = 0,
StreamLargeObjects = null,
},
PostgresqlSourceConfig = new Gcp.Datastream.Inputs.StreamSourceConfigPostgresqlSourceConfigArgs
{
Publication = "string",
ReplicationSlot = "string",
ExcludeObjects = new Gcp.Datastream.Inputs.StreamSourceConfigPostgresqlSourceConfigExcludeObjectsArgs
{
PostgresqlSchemas = new[]
{
new Gcp.Datastream.Inputs.StreamSourceConfigPostgresqlSourceConfigExcludeObjectsPostgresqlSchemaArgs
{
Schema = "string",
PostgresqlTables = new[]
{
new Gcp.Datastream.Inputs.StreamSourceConfigPostgresqlSourceConfigExcludeObjectsPostgresqlSchemaPostgresqlTableArgs
{
Table = "string",
PostgresqlColumns = new[]
{
new Gcp.Datastream.Inputs.StreamSourceConfigPostgresqlSourceConfigExcludeObjectsPostgresqlSchemaPostgresqlTablePostgresqlColumnArgs
{
Column = "string",
DataType = "string",
Length = 0,
Nullable = false,
OrdinalPosition = 0,
Precision = 0,
PrimaryKey = false,
Scale = 0,
},
},
},
},
},
},
},
IncludeObjects = new Gcp.Datastream.Inputs.StreamSourceConfigPostgresqlSourceConfigIncludeObjectsArgs
{
PostgresqlSchemas = new[]
{
new Gcp.Datastream.Inputs.StreamSourceConfigPostgresqlSourceConfigIncludeObjectsPostgresqlSchemaArgs
{
Schema = "string",
PostgresqlTables = new[]
{
new Gcp.Datastream.Inputs.StreamSourceConfigPostgresqlSourceConfigIncludeObjectsPostgresqlSchemaPostgresqlTableArgs
{
Table = "string",
PostgresqlColumns = new[]
{
new Gcp.Datastream.Inputs.StreamSourceConfigPostgresqlSourceConfigIncludeObjectsPostgresqlSchemaPostgresqlTablePostgresqlColumnArgs
{
Column = "string",
DataType = "string",
Length = 0,
Nullable = false,
OrdinalPosition = 0,
Precision = 0,
PrimaryKey = false,
Scale = 0,
},
},
},
},
},
},
},
MaxConcurrentBackfillTasks = 0,
},
SqlServerSourceConfig = new Gcp.Datastream.Inputs.StreamSourceConfigSqlServerSourceConfigArgs
{
ExcludeObjects = new Gcp.Datastream.Inputs.StreamSourceConfigSqlServerSourceConfigExcludeObjectsArgs
{
Schemas = new[]
{
new Gcp.Datastream.Inputs.StreamSourceConfigSqlServerSourceConfigExcludeObjectsSchemaArgs
{
Schema = "string",
Tables = new[]
{
new Gcp.Datastream.Inputs.StreamSourceConfigSqlServerSourceConfigExcludeObjectsSchemaTableArgs
{
Table = "string",
Columns = new[]
{
new Gcp.Datastream.Inputs.StreamSourceConfigSqlServerSourceConfigExcludeObjectsSchemaTableColumnArgs
{
Column = "string",
DataType = "string",
Length = 0,
Nullable = false,
OrdinalPosition = 0,
Precision = 0,
PrimaryKey = false,
Scale = 0,
},
},
},
},
},
},
},
IncludeObjects = new Gcp.Datastream.Inputs.StreamSourceConfigSqlServerSourceConfigIncludeObjectsArgs
{
Schemas = new[]
{
new Gcp.Datastream.Inputs.StreamSourceConfigSqlServerSourceConfigIncludeObjectsSchemaArgs
{
Schema = "string",
Tables = new[]
{
new Gcp.Datastream.Inputs.StreamSourceConfigSqlServerSourceConfigIncludeObjectsSchemaTableArgs
{
Table = "string",
Columns = new[]
{
new Gcp.Datastream.Inputs.StreamSourceConfigSqlServerSourceConfigIncludeObjectsSchemaTableColumnArgs
{
Column = "string",
DataType = "string",
Length = 0,
Nullable = false,
OrdinalPosition = 0,
Precision = 0,
PrimaryKey = false,
Scale = 0,
},
},
},
},
},
},
},
MaxConcurrentBackfillTasks = 0,
MaxConcurrentCdcTasks = 0,
},
},
StreamId = "string",
BackfillAll = new Gcp.Datastream.Inputs.StreamBackfillAllArgs
{
MysqlExcludedObjects = new Gcp.Datastream.Inputs.StreamBackfillAllMysqlExcludedObjectsArgs
{
MysqlDatabases = new[]
{
new Gcp.Datastream.Inputs.StreamBackfillAllMysqlExcludedObjectsMysqlDatabaseArgs
{
Database = "string",
MysqlTables = new[]
{
new Gcp.Datastream.Inputs.StreamBackfillAllMysqlExcludedObjectsMysqlDatabaseMysqlTableArgs
{
Table = "string",
MysqlColumns = new[]
{
new Gcp.Datastream.Inputs.StreamBackfillAllMysqlExcludedObjectsMysqlDatabaseMysqlTableMysqlColumnArgs
{
Collation = "string",
Column = "string",
DataType = "string",
Length = 0,
Nullable = false,
OrdinalPosition = 0,
PrimaryKey = false,
},
},
},
},
},
},
},
OracleExcludedObjects = new Gcp.Datastream.Inputs.StreamBackfillAllOracleExcludedObjectsArgs
{
OracleSchemas = new[]
{
new Gcp.Datastream.Inputs.StreamBackfillAllOracleExcludedObjectsOracleSchemaArgs
{
Schema = "string",
OracleTables = new[]
{
new Gcp.Datastream.Inputs.StreamBackfillAllOracleExcludedObjectsOracleSchemaOracleTableArgs
{
Table = "string",
OracleColumns = new[]
{
new Gcp.Datastream.Inputs.StreamBackfillAllOracleExcludedObjectsOracleSchemaOracleTableOracleColumnArgs
{
Column = "string",
DataType = "string",
Encoding = "string",
Length = 0,
Nullable = false,
OrdinalPosition = 0,
Precision = 0,
PrimaryKey = false,
Scale = 0,
},
},
},
},
},
},
},
PostgresqlExcludedObjects = new Gcp.Datastream.Inputs.StreamBackfillAllPostgresqlExcludedObjectsArgs
{
PostgresqlSchemas = new[]
{
new Gcp.Datastream.Inputs.StreamBackfillAllPostgresqlExcludedObjectsPostgresqlSchemaArgs
{
Schema = "string",
PostgresqlTables = new[]
{
new Gcp.Datastream.Inputs.StreamBackfillAllPostgresqlExcludedObjectsPostgresqlSchemaPostgresqlTableArgs
{
Table = "string",
PostgresqlColumns = new[]
{
new Gcp.Datastream.Inputs.StreamBackfillAllPostgresqlExcludedObjectsPostgresqlSchemaPostgresqlTablePostgresqlColumnArgs
{
Column = "string",
DataType = "string",
Length = 0,
Nullable = false,
OrdinalPosition = 0,
Precision = 0,
PrimaryKey = false,
Scale = 0,
},
},
},
},
},
},
},
SqlServerExcludedObjects = new Gcp.Datastream.Inputs.StreamBackfillAllSqlServerExcludedObjectsArgs
{
Schemas = new[]
{
new Gcp.Datastream.Inputs.StreamBackfillAllSqlServerExcludedObjectsSchemaArgs
{
Schema = "string",
Tables = new[]
{
new Gcp.Datastream.Inputs.StreamBackfillAllSqlServerExcludedObjectsSchemaTableArgs
{
Table = "string",
Columns = new[]
{
new Gcp.Datastream.Inputs.StreamBackfillAllSqlServerExcludedObjectsSchemaTableColumnArgs
{
Column = "string",
DataType = "string",
Length = 0,
Nullable = false,
OrdinalPosition = 0,
Precision = 0,
PrimaryKey = false,
Scale = 0,
},
},
},
},
},
},
},
},
BackfillNone = null,
CreateWithoutValidation = false,
CustomerManagedEncryptionKey = "string",
DesiredState = "string",
Labels =
{
{ "string", "string" },
},
Project = "string",
});
example, err := datastream.NewStream(ctx, "streamResource", &datastream.StreamArgs{
DestinationConfig: &datastream.StreamDestinationConfigArgs{
DestinationConnectionProfile: pulumi.String("string"),
BigqueryDestinationConfig: &datastream.StreamDestinationConfigBigqueryDestinationConfigArgs{
DataFreshness: pulumi.String("string"),
SingleTargetDataset: &datastream.StreamDestinationConfigBigqueryDestinationConfigSingleTargetDatasetArgs{
DatasetId: pulumi.String("string"),
},
SourceHierarchyDatasets: &datastream.StreamDestinationConfigBigqueryDestinationConfigSourceHierarchyDatasetsArgs{
DatasetTemplate: &datastream.StreamDestinationConfigBigqueryDestinationConfigSourceHierarchyDatasetsDatasetTemplateArgs{
Location: pulumi.String("string"),
DatasetIdPrefix: pulumi.String("string"),
KmsKeyName: pulumi.String("string"),
},
},
},
GcsDestinationConfig: &datastream.StreamDestinationConfigGcsDestinationConfigArgs{
AvroFileFormat: nil,
FileRotationInterval: pulumi.String("string"),
FileRotationMb: pulumi.Int(0),
JsonFileFormat: &datastream.StreamDestinationConfigGcsDestinationConfigJsonFileFormatArgs{
Compression: pulumi.String("string"),
SchemaFileFormat: pulumi.String("string"),
},
Path: pulumi.String("string"),
},
},
DisplayName: pulumi.String("string"),
Location: pulumi.String("string"),
SourceConfig: &datastream.StreamSourceConfigArgs{
SourceConnectionProfile: pulumi.String("string"),
MysqlSourceConfig: &datastream.StreamSourceConfigMysqlSourceConfigArgs{
ExcludeObjects: &datastream.StreamSourceConfigMysqlSourceConfigExcludeObjectsArgs{
MysqlDatabases: datastream.StreamSourceConfigMysqlSourceConfigExcludeObjectsMysqlDatabaseArray{
&datastream.StreamSourceConfigMysqlSourceConfigExcludeObjectsMysqlDatabaseArgs{
Database: pulumi.String("string"),
MysqlTables: datastream.StreamSourceConfigMysqlSourceConfigExcludeObjectsMysqlDatabaseMysqlTableArray{
&datastream.StreamSourceConfigMysqlSourceConfigExcludeObjectsMysqlDatabaseMysqlTableArgs{
Table: pulumi.String("string"),
MysqlColumns: datastream.StreamSourceConfigMysqlSourceConfigExcludeObjectsMysqlDatabaseMysqlTableMysqlColumnArray{
&datastream.StreamSourceConfigMysqlSourceConfigExcludeObjectsMysqlDatabaseMysqlTableMysqlColumnArgs{
Collation: pulumi.String("string"),
Column: pulumi.String("string"),
DataType: pulumi.String("string"),
Length: pulumi.Int(0),
Nullable: pulumi.Bool(false),
OrdinalPosition: pulumi.Int(0),
PrimaryKey: pulumi.Bool(false),
},
},
},
},
},
},
},
IncludeObjects: &datastream.StreamSourceConfigMysqlSourceConfigIncludeObjectsArgs{
MysqlDatabases: datastream.StreamSourceConfigMysqlSourceConfigIncludeObjectsMysqlDatabaseArray{
&datastream.StreamSourceConfigMysqlSourceConfigIncludeObjectsMysqlDatabaseArgs{
Database: pulumi.String("string"),
MysqlTables: datastream.StreamSourceConfigMysqlSourceConfigIncludeObjectsMysqlDatabaseMysqlTableArray{
&datastream.StreamSourceConfigMysqlSourceConfigIncludeObjectsMysqlDatabaseMysqlTableArgs{
Table: pulumi.String("string"),
MysqlColumns: datastream.StreamSourceConfigMysqlSourceConfigIncludeObjectsMysqlDatabaseMysqlTableMysqlColumnArray{
&datastream.StreamSourceConfigMysqlSourceConfigIncludeObjectsMysqlDatabaseMysqlTableMysqlColumnArgs{
Collation: pulumi.String("string"),
Column: pulumi.String("string"),
DataType: pulumi.String("string"),
Length: pulumi.Int(0),
Nullable: pulumi.Bool(false),
OrdinalPosition: pulumi.Int(0),
PrimaryKey: pulumi.Bool(false),
},
},
},
},
},
},
},
MaxConcurrentBackfillTasks: pulumi.Int(0),
MaxConcurrentCdcTasks: pulumi.Int(0),
},
OracleSourceConfig: &datastream.StreamSourceConfigOracleSourceConfigArgs{
DropLargeObjects: nil,
ExcludeObjects: &datastream.StreamSourceConfigOracleSourceConfigExcludeObjectsArgs{
OracleSchemas: datastream.StreamSourceConfigOracleSourceConfigExcludeObjectsOracleSchemaArray{
&datastream.StreamSourceConfigOracleSourceConfigExcludeObjectsOracleSchemaArgs{
Schema: pulumi.String("string"),
OracleTables: datastream.StreamSourceConfigOracleSourceConfigExcludeObjectsOracleSchemaOracleTableArray{
&datastream.StreamSourceConfigOracleSourceConfigExcludeObjectsOracleSchemaOracleTableArgs{
Table: pulumi.String("string"),
OracleColumns: datastream.StreamSourceConfigOracleSourceConfigExcludeObjectsOracleSchemaOracleTableOracleColumnArray{
&datastream.StreamSourceConfigOracleSourceConfigExcludeObjectsOracleSchemaOracleTableOracleColumnArgs{
Column: pulumi.String("string"),
DataType: pulumi.String("string"),
Encoding: pulumi.String("string"),
Length: pulumi.Int(0),
Nullable: pulumi.Bool(false),
OrdinalPosition: pulumi.Int(0),
Precision: pulumi.Int(0),
PrimaryKey: pulumi.Bool(false),
Scale: pulumi.Int(0),
},
},
},
},
},
},
},
IncludeObjects: &datastream.StreamSourceConfigOracleSourceConfigIncludeObjectsArgs{
OracleSchemas: datastream.StreamSourceConfigOracleSourceConfigIncludeObjectsOracleSchemaArray{
&datastream.StreamSourceConfigOracleSourceConfigIncludeObjectsOracleSchemaArgs{
Schema: pulumi.String("string"),
OracleTables: datastream.StreamSourceConfigOracleSourceConfigIncludeObjectsOracleSchemaOracleTableArray{
&datastream.StreamSourceConfigOracleSourceConfigIncludeObjectsOracleSchemaOracleTableArgs{
Table: pulumi.String("string"),
OracleColumns: datastream.StreamSourceConfigOracleSourceConfigIncludeObjectsOracleSchemaOracleTableOracleColumnArray{
&datastream.StreamSourceConfigOracleSourceConfigIncludeObjectsOracleSchemaOracleTableOracleColumnArgs{
Column: pulumi.String("string"),
DataType: pulumi.String("string"),
Encoding: pulumi.String("string"),
Length: pulumi.Int(0),
Nullable: pulumi.Bool(false),
OrdinalPosition: pulumi.Int(0),
Precision: pulumi.Int(0),
PrimaryKey: pulumi.Bool(false),
Scale: pulumi.Int(0),
},
},
},
},
},
},
},
MaxConcurrentBackfillTasks: pulumi.Int(0),
MaxConcurrentCdcTasks: pulumi.Int(0),
StreamLargeObjects: nil,
},
PostgresqlSourceConfig: &datastream.StreamSourceConfigPostgresqlSourceConfigArgs{
Publication: pulumi.String("string"),
ReplicationSlot: pulumi.String("string"),
ExcludeObjects: &datastream.StreamSourceConfigPostgresqlSourceConfigExcludeObjectsArgs{
PostgresqlSchemas: datastream.StreamSourceConfigPostgresqlSourceConfigExcludeObjectsPostgresqlSchemaArray{
&datastream.StreamSourceConfigPostgresqlSourceConfigExcludeObjectsPostgresqlSchemaArgs{
Schema: pulumi.String("string"),
PostgresqlTables: datastream.StreamSourceConfigPostgresqlSourceConfigExcludeObjectsPostgresqlSchemaPostgresqlTableArray{
&datastream.StreamSourceConfigPostgresqlSourceConfigExcludeObjectsPostgresqlSchemaPostgresqlTableArgs{
Table: pulumi.String("string"),
PostgresqlColumns: datastream.StreamSourceConfigPostgresqlSourceConfigExcludeObjectsPostgresqlSchemaPostgresqlTablePostgresqlColumnArray{
&datastream.StreamSourceConfigPostgresqlSourceConfigExcludeObjectsPostgresqlSchemaPostgresqlTablePostgresqlColumnArgs{
Column: pulumi.String("string"),
DataType: pulumi.String("string"),
Length: pulumi.Int(0),
Nullable: pulumi.Bool(false),
OrdinalPosition: pulumi.Int(0),
Precision: pulumi.Int(0),
PrimaryKey: pulumi.Bool(false),
Scale: pulumi.Int(0),
},
},
},
},
},
},
},
IncludeObjects: &datastream.StreamSourceConfigPostgresqlSourceConfigIncludeObjectsArgs{
PostgresqlSchemas: datastream.StreamSourceConfigPostgresqlSourceConfigIncludeObjectsPostgresqlSchemaArray{
&datastream.StreamSourceConfigPostgresqlSourceConfigIncludeObjectsPostgresqlSchemaArgs{
Schema: pulumi.String("string"),
PostgresqlTables: datastream.StreamSourceConfigPostgresqlSourceConfigIncludeObjectsPostgresqlSchemaPostgresqlTableArray{
&datastream.StreamSourceConfigPostgresqlSourceConfigIncludeObjectsPostgresqlSchemaPostgresqlTableArgs{
Table: pulumi.String("string"),
PostgresqlColumns: datastream.StreamSourceConfigPostgresqlSourceConfigIncludeObjectsPostgresqlSchemaPostgresqlTablePostgresqlColumnArray{
&datastream.StreamSourceConfigPostgresqlSourceConfigIncludeObjectsPostgresqlSchemaPostgresqlTablePostgresqlColumnArgs{
Column: pulumi.String("string"),
DataType: pulumi.String("string"),
Length: pulumi.Int(0),
Nullable: pulumi.Bool(false),
OrdinalPosition: pulumi.Int(0),
Precision: pulumi.Int(0),
PrimaryKey: pulumi.Bool(false),
Scale: pulumi.Int(0),
},
},
},
},
},
},
},
MaxConcurrentBackfillTasks: pulumi.Int(0),
},
SqlServerSourceConfig: &datastream.StreamSourceConfigSqlServerSourceConfigArgs{
ExcludeObjects: &datastream.StreamSourceConfigSqlServerSourceConfigExcludeObjectsArgs{
Schemas: datastream.StreamSourceConfigSqlServerSourceConfigExcludeObjectsSchemaArray{
&datastream.StreamSourceConfigSqlServerSourceConfigExcludeObjectsSchemaArgs{
Schema: pulumi.String("string"),
Tables: datastream.StreamSourceConfigSqlServerSourceConfigExcludeObjectsSchemaTableArray{
&datastream.StreamSourceConfigSqlServerSourceConfigExcludeObjectsSchemaTableArgs{
Table: pulumi.String("string"),
Columns: datastream.StreamSourceConfigSqlServerSourceConfigExcludeObjectsSchemaTableColumnArray{
&datastream.StreamSourceConfigSqlServerSourceConfigExcludeObjectsSchemaTableColumnArgs{
Column: pulumi.String("string"),
DataType: pulumi.String("string"),
Length: pulumi.Int(0),
Nullable: pulumi.Bool(false),
OrdinalPosition: pulumi.Int(0),
Precision: pulumi.Int(0),
PrimaryKey: pulumi.Bool(false),
Scale: pulumi.Int(0),
},
},
},
},
},
},
},
IncludeObjects: &datastream.StreamSourceConfigSqlServerSourceConfigIncludeObjectsArgs{
Schemas: datastream.StreamSourceConfigSqlServerSourceConfigIncludeObjectsSchemaArray{
&datastream.StreamSourceConfigSqlServerSourceConfigIncludeObjectsSchemaArgs{
Schema: pulumi.String("string"),
Tables: datastream.StreamSourceConfigSqlServerSourceConfigIncludeObjectsSchemaTableArray{
&datastream.StreamSourceConfigSqlServerSourceConfigIncludeObjectsSchemaTableArgs{
Table: pulumi.String("string"),
Columns: datastream.StreamSourceConfigSqlServerSourceConfigIncludeObjectsSchemaTableColumnArray{
&datastream.StreamSourceConfigSqlServerSourceConfigIncludeObjectsSchemaTableColumnArgs{
Column: pulumi.String("string"),
DataType: pulumi.String("string"),
Length: pulumi.Int(0),
Nullable: pulumi.Bool(false),
OrdinalPosition: pulumi.Int(0),
Precision: pulumi.Int(0),
PrimaryKey: pulumi.Bool(false),
Scale: pulumi.Int(0),
},
},
},
},
},
},
},
MaxConcurrentBackfillTasks: pulumi.Int(0),
MaxConcurrentCdcTasks: pulumi.Int(0),
},
},
StreamId: pulumi.String("string"),
BackfillAll: &datastream.StreamBackfillAllArgs{
MysqlExcludedObjects: &datastream.StreamBackfillAllMysqlExcludedObjectsArgs{
MysqlDatabases: datastream.StreamBackfillAllMysqlExcludedObjectsMysqlDatabaseArray{
&datastream.StreamBackfillAllMysqlExcludedObjectsMysqlDatabaseArgs{
Database: pulumi.String("string"),
MysqlTables: datastream.StreamBackfillAllMysqlExcludedObjectsMysqlDatabaseMysqlTableArray{
&datastream.StreamBackfillAllMysqlExcludedObjectsMysqlDatabaseMysqlTableArgs{
Table: pulumi.String("string"),
MysqlColumns: datastream.StreamBackfillAllMysqlExcludedObjectsMysqlDatabaseMysqlTableMysqlColumnArray{
&datastream.StreamBackfillAllMysqlExcludedObjectsMysqlDatabaseMysqlTableMysqlColumnArgs{
Collation: pulumi.String("string"),
Column: pulumi.String("string"),
DataType: pulumi.String("string"),
Length: pulumi.Int(0),
Nullable: pulumi.Bool(false),
OrdinalPosition: pulumi.Int(0),
PrimaryKey: pulumi.Bool(false),
},
},
},
},
},
},
},
OracleExcludedObjects: &datastream.StreamBackfillAllOracleExcludedObjectsArgs{
OracleSchemas: datastream.StreamBackfillAllOracleExcludedObjectsOracleSchemaArray{
&datastream.StreamBackfillAllOracleExcludedObjectsOracleSchemaArgs{
Schema: pulumi.String("string"),
OracleTables: datastream.StreamBackfillAllOracleExcludedObjectsOracleSchemaOracleTableArray{
&datastream.StreamBackfillAllOracleExcludedObjectsOracleSchemaOracleTableArgs{
Table: pulumi.String("string"),
OracleColumns: datastream.StreamBackfillAllOracleExcludedObjectsOracleSchemaOracleTableOracleColumnArray{
&datastream.StreamBackfillAllOracleExcludedObjectsOracleSchemaOracleTableOracleColumnArgs{
Column: pulumi.String("string"),
DataType: pulumi.String("string"),
Encoding: pulumi.String("string"),
Length: pulumi.Int(0),
Nullable: pulumi.Bool(false),
OrdinalPosition: pulumi.Int(0),
Precision: pulumi.Int(0),
PrimaryKey: pulumi.Bool(false),
Scale: pulumi.Int(0),
},
},
},
},
},
},
},
PostgresqlExcludedObjects: &datastream.StreamBackfillAllPostgresqlExcludedObjectsArgs{
PostgresqlSchemas: datastream.StreamBackfillAllPostgresqlExcludedObjectsPostgresqlSchemaArray{
&datastream.StreamBackfillAllPostgresqlExcludedObjectsPostgresqlSchemaArgs{
Schema: pulumi.String("string"),
PostgresqlTables: datastream.StreamBackfillAllPostgresqlExcludedObjectsPostgresqlSchemaPostgresqlTableArray{
&datastream.StreamBackfillAllPostgresqlExcludedObjectsPostgresqlSchemaPostgresqlTableArgs{
Table: pulumi.String("string"),
PostgresqlColumns: datastream.StreamBackfillAllPostgresqlExcludedObjectsPostgresqlSchemaPostgresqlTablePostgresqlColumnArray{
&datastream.StreamBackfillAllPostgresqlExcludedObjectsPostgresqlSchemaPostgresqlTablePostgresqlColumnArgs{
Column: pulumi.String("string"),
DataType: pulumi.String("string"),
Length: pulumi.Int(0),
Nullable: pulumi.Bool(false),
OrdinalPosition: pulumi.Int(0),
Precision: pulumi.Int(0),
PrimaryKey: pulumi.Bool(false),
Scale: pulumi.Int(0),
},
},
},
},
},
},
},
SqlServerExcludedObjects: &datastream.StreamBackfillAllSqlServerExcludedObjectsArgs{
Schemas: datastream.StreamBackfillAllSqlServerExcludedObjectsSchemaArray{
&datastream.StreamBackfillAllSqlServerExcludedObjectsSchemaArgs{
Schema: pulumi.String("string"),
Tables: datastream.StreamBackfillAllSqlServerExcludedObjectsSchemaTableArray{
&datastream.StreamBackfillAllSqlServerExcludedObjectsSchemaTableArgs{
Table: pulumi.String("string"),
Columns: datastream.StreamBackfillAllSqlServerExcludedObjectsSchemaTableColumnArray{
&datastream.StreamBackfillAllSqlServerExcludedObjectsSchemaTableColumnArgs{
Column: pulumi.String("string"),
DataType: pulumi.String("string"),
Length: pulumi.Int(0),
Nullable: pulumi.Bool(false),
OrdinalPosition: pulumi.Int(0),
Precision: pulumi.Int(0),
PrimaryKey: pulumi.Bool(false),
Scale: pulumi.Int(0),
},
},
},
},
},
},
},
},
BackfillNone: nil,
CreateWithoutValidation: pulumi.Bool(false),
CustomerManagedEncryptionKey: pulumi.String("string"),
DesiredState: pulumi.String("string"),
Labels: pulumi.StringMap{
"string": pulumi.String("string"),
},
Project: pulumi.String("string"),
})
var streamResource = new Stream("streamResource", StreamArgs.builder()
.destinationConfig(StreamDestinationConfigArgs.builder()
.destinationConnectionProfile("string")
.bigqueryDestinationConfig(StreamDestinationConfigBigqueryDestinationConfigArgs.builder()
.dataFreshness("string")
.singleTargetDataset(StreamDestinationConfigBigqueryDestinationConfigSingleTargetDatasetArgs.builder()
.datasetId("string")
.build())
.sourceHierarchyDatasets(StreamDestinationConfigBigqueryDestinationConfigSourceHierarchyDatasetsArgs.builder()
.datasetTemplate(StreamDestinationConfigBigqueryDestinationConfigSourceHierarchyDatasetsDatasetTemplateArgs.builder()
.location("string")
.datasetIdPrefix("string")
.kmsKeyName("string")
.build())
.build())
.build())
.gcsDestinationConfig(StreamDestinationConfigGcsDestinationConfigArgs.builder()
.avroFileFormat()
.fileRotationInterval("string")
.fileRotationMb(0)
.jsonFileFormat(StreamDestinationConfigGcsDestinationConfigJsonFileFormatArgs.builder()
.compression("string")
.schemaFileFormat("string")
.build())
.path("string")
.build())
.build())
.displayName("string")
.location("string")
.sourceConfig(StreamSourceConfigArgs.builder()
.sourceConnectionProfile("string")
.mysqlSourceConfig(StreamSourceConfigMysqlSourceConfigArgs.builder()
.excludeObjects(StreamSourceConfigMysqlSourceConfigExcludeObjectsArgs.builder()
.mysqlDatabases(StreamSourceConfigMysqlSourceConfigExcludeObjectsMysqlDatabaseArgs.builder()
.database("string")
.mysqlTables(StreamSourceConfigMysqlSourceConfigExcludeObjectsMysqlDatabaseMysqlTableArgs.builder()
.table("string")
.mysqlColumns(StreamSourceConfigMysqlSourceConfigExcludeObjectsMysqlDatabaseMysqlTableMysqlColumnArgs.builder()
.collation("string")
.column("string")
.dataType("string")
.length(0)
.nullable(false)
.ordinalPosition(0)
.primaryKey(false)
.build())
.build())
.build())
.build())
.includeObjects(StreamSourceConfigMysqlSourceConfigIncludeObjectsArgs.builder()
.mysqlDatabases(StreamSourceConfigMysqlSourceConfigIncludeObjectsMysqlDatabaseArgs.builder()
.database("string")
.mysqlTables(StreamSourceConfigMysqlSourceConfigIncludeObjectsMysqlDatabaseMysqlTableArgs.builder()
.table("string")
.mysqlColumns(StreamSourceConfigMysqlSourceConfigIncludeObjectsMysqlDatabaseMysqlTableMysqlColumnArgs.builder()
.collation("string")
.column("string")
.dataType("string")
.length(0)
.nullable(false)
.ordinalPosition(0)
.primaryKey(false)
.build())
.build())
.build())
.build())
.maxConcurrentBackfillTasks(0)
.maxConcurrentCdcTasks(0)
.build())
.oracleSourceConfig(StreamSourceConfigOracleSourceConfigArgs.builder()
.dropLargeObjects()
.excludeObjects(StreamSourceConfigOracleSourceConfigExcludeObjectsArgs.builder()
.oracleSchemas(StreamSourceConfigOracleSourceConfigExcludeObjectsOracleSchemaArgs.builder()
.schema("string")
.oracleTables(StreamSourceConfigOracleSourceConfigExcludeObjectsOracleSchemaOracleTableArgs.builder()
.table("string")
.oracleColumns(StreamSourceConfigOracleSourceConfigExcludeObjectsOracleSchemaOracleTableOracleColumnArgs.builder()
.column("string")
.dataType("string")
.encoding("string")
.length(0)
.nullable(false)
.ordinalPosition(0)
.precision(0)
.primaryKey(false)
.scale(0)
.build())
.build())
.build())
.build())
.includeObjects(StreamSourceConfigOracleSourceConfigIncludeObjectsArgs.builder()
.oracleSchemas(StreamSourceConfigOracleSourceConfigIncludeObjectsOracleSchemaArgs.builder()
.schema("string")
.oracleTables(StreamSourceConfigOracleSourceConfigIncludeObjectsOracleSchemaOracleTableArgs.builder()
.table("string")
.oracleColumns(StreamSourceConfigOracleSourceConfigIncludeObjectsOracleSchemaOracleTableOracleColumnArgs.builder()
.column("string")
.dataType("string")
.encoding("string")
.length(0)
.nullable(false)
.ordinalPosition(0)
.precision(0)
.primaryKey(false)
.scale(0)
.build())
.build())
.build())
.build())
.maxConcurrentBackfillTasks(0)
.maxConcurrentCdcTasks(0)
.streamLargeObjects()
.build())
.postgresqlSourceConfig(StreamSourceConfigPostgresqlSourceConfigArgs.builder()
.publication("string")
.replicationSlot("string")
.excludeObjects(StreamSourceConfigPostgresqlSourceConfigExcludeObjectsArgs.builder()
.postgresqlSchemas(StreamSourceConfigPostgresqlSourceConfigExcludeObjectsPostgresqlSchemaArgs.builder()
.schema("string")
.postgresqlTables(StreamSourceConfigPostgresqlSourceConfigExcludeObjectsPostgresqlSchemaPostgresqlTableArgs.builder()
.table("string")
.postgresqlColumns(StreamSourceConfigPostgresqlSourceConfigExcludeObjectsPostgresqlSchemaPostgresqlTablePostgresqlColumnArgs.builder()
.column("string")
.dataType("string")
.length(0)
.nullable(false)
.ordinalPosition(0)
.precision(0)
.primaryKey(false)
.scale(0)
.build())
.build())
.build())
.build())
.includeObjects(StreamSourceConfigPostgresqlSourceConfigIncludeObjectsArgs.builder()
.postgresqlSchemas(StreamSourceConfigPostgresqlSourceConfigIncludeObjectsPostgresqlSchemaArgs.builder()
.schema("string")
.postgresqlTables(StreamSourceConfigPostgresqlSourceConfigIncludeObjectsPostgresqlSchemaPostgresqlTableArgs.builder()
.table("string")
.postgresqlColumns(StreamSourceConfigPostgresqlSourceConfigIncludeObjectsPostgresqlSchemaPostgresqlTablePostgresqlColumnArgs.builder()
.column("string")
.dataType("string")
.length(0)
.nullable(false)
.ordinalPosition(0)
.precision(0)
.primaryKey(false)
.scale(0)
.build())
.build())
.build())
.build())
.maxConcurrentBackfillTasks(0)
.build())
.sqlServerSourceConfig(StreamSourceConfigSqlServerSourceConfigArgs.builder()
.excludeObjects(StreamSourceConfigSqlServerSourceConfigExcludeObjectsArgs.builder()
.schemas(StreamSourceConfigSqlServerSourceConfigExcludeObjectsSchemaArgs.builder()
.schema("string")
.tables(StreamSourceConfigSqlServerSourceConfigExcludeObjectsSchemaTableArgs.builder()
.table("string")
.columns(StreamSourceConfigSqlServerSourceConfigExcludeObjectsSchemaTableColumnArgs.builder()
.column("string")
.dataType("string")
.length(0)
.nullable(false)
.ordinalPosition(0)
.precision(0)
.primaryKey(false)
.scale(0)
.build())
.build())
.build())
.build())
.includeObjects(StreamSourceConfigSqlServerSourceConfigIncludeObjectsArgs.builder()
.schemas(StreamSourceConfigSqlServerSourceConfigIncludeObjectsSchemaArgs.builder()
.schema("string")
.tables(StreamSourceConfigSqlServerSourceConfigIncludeObjectsSchemaTableArgs.builder()
.table("string")
.columns(StreamSourceConfigSqlServerSourceConfigIncludeObjectsSchemaTableColumnArgs.builder()
.column("string")
.dataType("string")
.length(0)
.nullable(false)
.ordinalPosition(0)
.precision(0)
.primaryKey(false)
.scale(0)
.build())
.build())
.build())
.build())
.maxConcurrentBackfillTasks(0)
.maxConcurrentCdcTasks(0)
.build())
.build())
.streamId("string")
.backfillAll(StreamBackfillAllArgs.builder()
.mysqlExcludedObjects(StreamBackfillAllMysqlExcludedObjectsArgs.builder()
.mysqlDatabases(StreamBackfillAllMysqlExcludedObjectsMysqlDatabaseArgs.builder()
.database("string")
.mysqlTables(StreamBackfillAllMysqlExcludedObjectsMysqlDatabaseMysqlTableArgs.builder()
.table("string")
.mysqlColumns(StreamBackfillAllMysqlExcludedObjectsMysqlDatabaseMysqlTableMysqlColumnArgs.builder()
.collation("string")
.column("string")
.dataType("string")
.length(0)
.nullable(false)
.ordinalPosition(0)
.primaryKey(false)
.build())
.build())
.build())
.build())
.oracleExcludedObjects(StreamBackfillAllOracleExcludedObjectsArgs.builder()
.oracleSchemas(StreamBackfillAllOracleExcludedObjectsOracleSchemaArgs.builder()
.schema("string")
.oracleTables(StreamBackfillAllOracleExcludedObjectsOracleSchemaOracleTableArgs.builder()
.table("string")
.oracleColumns(StreamBackfillAllOracleExcludedObjectsOracleSchemaOracleTableOracleColumnArgs.builder()
.column("string")
.dataType("string")
.encoding("string")
.length(0)
.nullable(false)
.ordinalPosition(0)
.precision(0)
.primaryKey(false)
.scale(0)
.build())
.build())
.build())
.build())
.postgresqlExcludedObjects(StreamBackfillAllPostgresqlExcludedObjectsArgs.builder()
.postgresqlSchemas(StreamBackfillAllPostgresqlExcludedObjectsPostgresqlSchemaArgs.builder()
.schema("string")
.postgresqlTables(StreamBackfillAllPostgresqlExcludedObjectsPostgresqlSchemaPostgresqlTableArgs.builder()
.table("string")
.postgresqlColumns(StreamBackfillAllPostgresqlExcludedObjectsPostgresqlSchemaPostgresqlTablePostgresqlColumnArgs.builder()
.column("string")
.dataType("string")
.length(0)
.nullable(false)
.ordinalPosition(0)
.precision(0)
.primaryKey(false)
.scale(0)
.build())
.build())
.build())
.build())
.sqlServerExcludedObjects(StreamBackfillAllSqlServerExcludedObjectsArgs.builder()
.schemas(StreamBackfillAllSqlServerExcludedObjectsSchemaArgs.builder()
.schema("string")
.tables(StreamBackfillAllSqlServerExcludedObjectsSchemaTableArgs.builder()
.table("string")
.columns(StreamBackfillAllSqlServerExcludedObjectsSchemaTableColumnArgs.builder()
.column("string")
.dataType("string")
.length(0)
.nullable(false)
.ordinalPosition(0)
.precision(0)
.primaryKey(false)
.scale(0)
.build())
.build())
.build())
.build())
.build())
.backfillNone()
.createWithoutValidation(false)
.customerManagedEncryptionKey("string")
.desiredState("string")
.labels(Map.of("string", "string"))
.project("string")
.build());
stream_resource = gcp.datastream.Stream("streamResource",
destination_config=gcp.datastream.StreamDestinationConfigArgs(
destination_connection_profile="string",
bigquery_destination_config=gcp.datastream.StreamDestinationConfigBigqueryDestinationConfigArgs(
data_freshness="string",
single_target_dataset=gcp.datastream.StreamDestinationConfigBigqueryDestinationConfigSingleTargetDatasetArgs(
dataset_id="string",
),
source_hierarchy_datasets=gcp.datastream.StreamDestinationConfigBigqueryDestinationConfigSourceHierarchyDatasetsArgs(
dataset_template=gcp.datastream.StreamDestinationConfigBigqueryDestinationConfigSourceHierarchyDatasetsDatasetTemplateArgs(
location="string",
dataset_id_prefix="string",
kms_key_name="string",
),
),
),
gcs_destination_config=gcp.datastream.StreamDestinationConfigGcsDestinationConfigArgs(
avro_file_format=gcp.datastream.StreamDestinationConfigGcsDestinationConfigAvroFileFormatArgs(),
file_rotation_interval="string",
file_rotation_mb=0,
json_file_format=gcp.datastream.StreamDestinationConfigGcsDestinationConfigJsonFileFormatArgs(
compression="string",
schema_file_format="string",
),
path="string",
),
),
display_name="string",
location="string",
source_config=gcp.datastream.StreamSourceConfigArgs(
source_connection_profile="string",
mysql_source_config=gcp.datastream.StreamSourceConfigMysqlSourceConfigArgs(
exclude_objects=gcp.datastream.StreamSourceConfigMysqlSourceConfigExcludeObjectsArgs(
mysql_databases=[gcp.datastream.StreamSourceConfigMysqlSourceConfigExcludeObjectsMysqlDatabaseArgs(
database="string",
mysql_tables=[gcp.datastream.StreamSourceConfigMysqlSourceConfigExcludeObjectsMysqlDatabaseMysqlTableArgs(
table="string",
mysql_columns=[gcp.datastream.StreamSourceConfigMysqlSourceConfigExcludeObjectsMysqlDatabaseMysqlTableMysqlColumnArgs(
collation="string",
column="string",
data_type="string",
length=0,
nullable=False,
ordinal_position=0,
primary_key=False,
)],
)],
)],
),
include_objects=gcp.datastream.StreamSourceConfigMysqlSourceConfigIncludeObjectsArgs(
mysql_databases=[gcp.datastream.StreamSourceConfigMysqlSourceConfigIncludeObjectsMysqlDatabaseArgs(
database="string",
mysql_tables=[gcp.datastream.StreamSourceConfigMysqlSourceConfigIncludeObjectsMysqlDatabaseMysqlTableArgs(
table="string",
mysql_columns=[gcp.datastream.StreamSourceConfigMysqlSourceConfigIncludeObjectsMysqlDatabaseMysqlTableMysqlColumnArgs(
collation="string",
column="string",
data_type="string",
length=0,
nullable=False,
ordinal_position=0,
primary_key=False,
)],
)],
)],
),
max_concurrent_backfill_tasks=0,
max_concurrent_cdc_tasks=0,
),
oracle_source_config=gcp.datastream.StreamSourceConfigOracleSourceConfigArgs(
drop_large_objects=gcp.datastream.StreamSourceConfigOracleSourceConfigDropLargeObjectsArgs(),
exclude_objects=gcp.datastream.StreamSourceConfigOracleSourceConfigExcludeObjectsArgs(
oracle_schemas=[gcp.datastream.StreamSourceConfigOracleSourceConfigExcludeObjectsOracleSchemaArgs(
schema="string",
oracle_tables=[gcp.datastream.StreamSourceConfigOracleSourceConfigExcludeObjectsOracleSchemaOracleTableArgs(
table="string",
oracle_columns=[gcp.datastream.StreamSourceConfigOracleSourceConfigExcludeObjectsOracleSchemaOracleTableOracleColumnArgs(
column="string",
data_type="string",
encoding="string",
length=0,
nullable=False,
ordinal_position=0,
precision=0,
primary_key=False,
scale=0,
)],
)],
)],
),
include_objects=gcp.datastream.StreamSourceConfigOracleSourceConfigIncludeObjectsArgs(
oracle_schemas=[gcp.datastream.StreamSourceConfigOracleSourceConfigIncludeObjectsOracleSchemaArgs(
schema="string",
oracle_tables=[gcp.datastream.StreamSourceConfigOracleSourceConfigIncludeObjectsOracleSchemaOracleTableArgs(
table="string",
oracle_columns=[gcp.datastream.StreamSourceConfigOracleSourceConfigIncludeObjectsOracleSchemaOracleTableOracleColumnArgs(
column="string",
data_type="string",
encoding="string",
length=0,
nullable=False,
ordinal_position=0,
precision=0,
primary_key=False,
scale=0,
)],
)],
)],
),
max_concurrent_backfill_tasks=0,
max_concurrent_cdc_tasks=0,
stream_large_objects=gcp.datastream.StreamSourceConfigOracleSourceConfigStreamLargeObjectsArgs(),
),
postgresql_source_config=gcp.datastream.StreamSourceConfigPostgresqlSourceConfigArgs(
publication="string",
replication_slot="string",
exclude_objects=gcp.datastream.StreamSourceConfigPostgresqlSourceConfigExcludeObjectsArgs(
postgresql_schemas=[gcp.datastream.StreamSourceConfigPostgresqlSourceConfigExcludeObjectsPostgresqlSchemaArgs(
schema="string",
postgresql_tables=[gcp.datastream.StreamSourceConfigPostgresqlSourceConfigExcludeObjectsPostgresqlSchemaPostgresqlTableArgs(
table="string",
postgresql_columns=[gcp.datastream.StreamSourceConfigPostgresqlSourceConfigExcludeObjectsPostgresqlSchemaPostgresqlTablePostgresqlColumnArgs(
column="string",
data_type="string",
length=0,
nullable=False,
ordinal_position=0,
precision=0,
primary_key=False,
scale=0,
)],
)],
)],
),
include_objects=gcp.datastream.StreamSourceConfigPostgresqlSourceConfigIncludeObjectsArgs(
postgresql_schemas=[gcp.datastream.StreamSourceConfigPostgresqlSourceConfigIncludeObjectsPostgresqlSchemaArgs(
schema="string",
postgresql_tables=[gcp.datastream.StreamSourceConfigPostgresqlSourceConfigIncludeObjectsPostgresqlSchemaPostgresqlTableArgs(
table="string",
postgresql_columns=[gcp.datastream.StreamSourceConfigPostgresqlSourceConfigIncludeObjectsPostgresqlSchemaPostgresqlTablePostgresqlColumnArgs(
column="string",
data_type="string",
length=0,
nullable=False,
ordinal_position=0,
precision=0,
primary_key=False,
scale=0,
)],
)],
)],
),
max_concurrent_backfill_tasks=0,
),
sql_server_source_config=gcp.datastream.StreamSourceConfigSqlServerSourceConfigArgs(
exclude_objects=gcp.datastream.StreamSourceConfigSqlServerSourceConfigExcludeObjectsArgs(
schemas=[gcp.datastream.StreamSourceConfigSqlServerSourceConfigExcludeObjectsSchemaArgs(
schema="string",
tables=[gcp.datastream.StreamSourceConfigSqlServerSourceConfigExcludeObjectsSchemaTableArgs(
table="string",
columns=[gcp.datastream.StreamSourceConfigSqlServerSourceConfigExcludeObjectsSchemaTableColumnArgs(
column="string",
data_type="string",
length=0,
nullable=False,
ordinal_position=0,
precision=0,
primary_key=False,
scale=0,
)],
)],
)],
),
include_objects=gcp.datastream.StreamSourceConfigSqlServerSourceConfigIncludeObjectsArgs(
schemas=[gcp.datastream.StreamSourceConfigSqlServerSourceConfigIncludeObjectsSchemaArgs(
schema="string",
tables=[gcp.datastream.StreamSourceConfigSqlServerSourceConfigIncludeObjectsSchemaTableArgs(
table="string",
columns=[gcp.datastream.StreamSourceConfigSqlServerSourceConfigIncludeObjectsSchemaTableColumnArgs(
column="string",
data_type="string",
length=0,
nullable=False,
ordinal_position=0,
precision=0,
primary_key=False,
scale=0,
)],
)],
)],
),
max_concurrent_backfill_tasks=0,
max_concurrent_cdc_tasks=0,
),
),
stream_id="string",
backfill_all=gcp.datastream.StreamBackfillAllArgs(
mysql_excluded_objects=gcp.datastream.StreamBackfillAllMysqlExcludedObjectsArgs(
mysql_databases=[gcp.datastream.StreamBackfillAllMysqlExcludedObjectsMysqlDatabaseArgs(
database="string",
mysql_tables=[gcp.datastream.StreamBackfillAllMysqlExcludedObjectsMysqlDatabaseMysqlTableArgs(
table="string",
mysql_columns=[gcp.datastream.StreamBackfillAllMysqlExcludedObjectsMysqlDatabaseMysqlTableMysqlColumnArgs(
collation="string",
column="string",
data_type="string",
length=0,
nullable=False,
ordinal_position=0,
primary_key=False,
)],
)],
)],
),
oracle_excluded_objects=gcp.datastream.StreamBackfillAllOracleExcludedObjectsArgs(
oracle_schemas=[gcp.datastream.StreamBackfillAllOracleExcludedObjectsOracleSchemaArgs(
schema="string",
oracle_tables=[gcp.datastream.StreamBackfillAllOracleExcludedObjectsOracleSchemaOracleTableArgs(
table="string",
oracle_columns=[gcp.datastream.StreamBackfillAllOracleExcludedObjectsOracleSchemaOracleTableOracleColumnArgs(
column="string",
data_type="string",
encoding="string",
length=0,
nullable=False,
ordinal_position=0,
precision=0,
primary_key=False,
scale=0,
)],
)],
)],
),
postgresql_excluded_objects=gcp.datastream.StreamBackfillAllPostgresqlExcludedObjectsArgs(
postgresql_schemas=[gcp.datastream.StreamBackfillAllPostgresqlExcludedObjectsPostgresqlSchemaArgs(
schema="string",
postgresql_tables=[gcp.datastream.StreamBackfillAllPostgresqlExcludedObjectsPostgresqlSchemaPostgresqlTableArgs(
table="string",
postgresql_columns=[gcp.datastream.StreamBackfillAllPostgresqlExcludedObjectsPostgresqlSchemaPostgresqlTablePostgresqlColumnArgs(
column="string",
data_type="string",
length=0,
nullable=False,
ordinal_position=0,
precision=0,
primary_key=False,
scale=0,
)],
)],
)],
),
sql_server_excluded_objects=gcp.datastream.StreamBackfillAllSqlServerExcludedObjectsArgs(
schemas=[gcp.datastream.StreamBackfillAllSqlServerExcludedObjectsSchemaArgs(
schema="string",
tables=[gcp.datastream.StreamBackfillAllSqlServerExcludedObjectsSchemaTableArgs(
table="string",
columns=[gcp.datastream.StreamBackfillAllSqlServerExcludedObjectsSchemaTableColumnArgs(
column="string",
data_type="string",
length=0,
nullable=False,
ordinal_position=0,
precision=0,
primary_key=False,
scale=0,
)],
)],
)],
),
),
backfill_none=gcp.datastream.StreamBackfillNoneArgs(),
create_without_validation=False,
customer_managed_encryption_key="string",
desired_state="string",
labels={
"string": "string",
},
project="string")
const streamResource = new gcp.datastream.Stream("streamResource", {
destinationConfig: {
destinationConnectionProfile: "string",
bigqueryDestinationConfig: {
dataFreshness: "string",
singleTargetDataset: {
datasetId: "string",
},
sourceHierarchyDatasets: {
datasetTemplate: {
location: "string",
datasetIdPrefix: "string",
kmsKeyName: "string",
},
},
},
gcsDestinationConfig: {
avroFileFormat: {},
fileRotationInterval: "string",
fileRotationMb: 0,
jsonFileFormat: {
compression: "string",
schemaFileFormat: "string",
},
path: "string",
},
},
displayName: "string",
location: "string",
sourceConfig: {
sourceConnectionProfile: "string",
mysqlSourceConfig: {
excludeObjects: {
mysqlDatabases: [{
database: "string",
mysqlTables: [{
table: "string",
mysqlColumns: [{
collation: "string",
column: "string",
dataType: "string",
length: 0,
nullable: false,
ordinalPosition: 0,
primaryKey: false,
}],
}],
}],
},
includeObjects: {
mysqlDatabases: [{
database: "string",
mysqlTables: [{
table: "string",
mysqlColumns: [{
collation: "string",
column: "string",
dataType: "string",
length: 0,
nullable: false,
ordinalPosition: 0,
primaryKey: false,
}],
}],
}],
},
maxConcurrentBackfillTasks: 0,
maxConcurrentCdcTasks: 0,
},
oracleSourceConfig: {
dropLargeObjects: {},
excludeObjects: {
oracleSchemas: [{
schema: "string",
oracleTables: [{
table: "string",
oracleColumns: [{
column: "string",
dataType: "string",
encoding: "string",
length: 0,
nullable: false,
ordinalPosition: 0,
precision: 0,
primaryKey: false,
scale: 0,
}],
}],
}],
},
includeObjects: {
oracleSchemas: [{
schema: "string",
oracleTables: [{
table: "string",
oracleColumns: [{
column: "string",
dataType: "string",
encoding: "string",
length: 0,
nullable: false,
ordinalPosition: 0,
precision: 0,
primaryKey: false,
scale: 0,
}],
}],
}],
},
maxConcurrentBackfillTasks: 0,
maxConcurrentCdcTasks: 0,
streamLargeObjects: {},
},
postgresqlSourceConfig: {
publication: "string",
replicationSlot: "string",
excludeObjects: {
postgresqlSchemas: [{
schema: "string",
postgresqlTables: [{
table: "string",
postgresqlColumns: [{
column: "string",
dataType: "string",
length: 0,
nullable: false,
ordinalPosition: 0,
precision: 0,
primaryKey: false,
scale: 0,
}],
}],
}],
},
includeObjects: {
postgresqlSchemas: [{
schema: "string",
postgresqlTables: [{
table: "string",
postgresqlColumns: [{
column: "string",
dataType: "string",
length: 0,
nullable: false,
ordinalPosition: 0,
precision: 0,
primaryKey: false,
scale: 0,
}],
}],
}],
},
maxConcurrentBackfillTasks: 0,
},
sqlServerSourceConfig: {
excludeObjects: {
schemas: [{
schema: "string",
tables: [{
table: "string",
columns: [{
column: "string",
dataType: "string",
length: 0,
nullable: false,
ordinalPosition: 0,
precision: 0,
primaryKey: false,
scale: 0,
}],
}],
}],
},
includeObjects: {
schemas: [{
schema: "string",
tables: [{
table: "string",
columns: [{
column: "string",
dataType: "string",
length: 0,
nullable: false,
ordinalPosition: 0,
precision: 0,
primaryKey: false,
scale: 0,
}],
}],
}],
},
maxConcurrentBackfillTasks: 0,
maxConcurrentCdcTasks: 0,
},
},
streamId: "string",
backfillAll: {
mysqlExcludedObjects: {
mysqlDatabases: [{
database: "string",
mysqlTables: [{
table: "string",
mysqlColumns: [{
collation: "string",
column: "string",
dataType: "string",
length: 0,
nullable: false,
ordinalPosition: 0,
primaryKey: false,
}],
}],
}],
},
oracleExcludedObjects: {
oracleSchemas: [{
schema: "string",
oracleTables: [{
table: "string",
oracleColumns: [{
column: "string",
dataType: "string",
encoding: "string",
length: 0,
nullable: false,
ordinalPosition: 0,
precision: 0,
primaryKey: false,
scale: 0,
}],
}],
}],
},
postgresqlExcludedObjects: {
postgresqlSchemas: [{
schema: "string",
postgresqlTables: [{
table: "string",
postgresqlColumns: [{
column: "string",
dataType: "string",
length: 0,
nullable: false,
ordinalPosition: 0,
precision: 0,
primaryKey: false,
scale: 0,
}],
}],
}],
},
sqlServerExcludedObjects: {
schemas: [{
schema: "string",
tables: [{
table: "string",
columns: [{
column: "string",
dataType: "string",
length: 0,
nullable: false,
ordinalPosition: 0,
precision: 0,
primaryKey: false,
scale: 0,
}],
}],
}],
},
},
backfillNone: {},
createWithoutValidation: false,
customerManagedEncryptionKey: "string",
desiredState: "string",
labels: {
string: "string",
},
project: "string",
});
type: gcp:datastream:Stream
properties:
backfillAll:
mysqlExcludedObjects:
mysqlDatabases:
- database: string
mysqlTables:
- mysqlColumns:
- collation: string
column: string
dataType: string
length: 0
nullable: false
ordinalPosition: 0
primaryKey: false
table: string
oracleExcludedObjects:
oracleSchemas:
- oracleTables:
- oracleColumns:
- column: string
dataType: string
encoding: string
length: 0
nullable: false
ordinalPosition: 0
precision: 0
primaryKey: false
scale: 0
table: string
schema: string
postgresqlExcludedObjects:
postgresqlSchemas:
- postgresqlTables:
- postgresqlColumns:
- column: string
dataType: string
length: 0
nullable: false
ordinalPosition: 0
precision: 0
primaryKey: false
scale: 0
table: string
schema: string
sqlServerExcludedObjects:
schemas:
- schema: string
tables:
- columns:
- column: string
dataType: string
length: 0
nullable: false
ordinalPosition: 0
precision: 0
primaryKey: false
scale: 0
table: string
backfillNone: {}
createWithoutValidation: false
customerManagedEncryptionKey: string
desiredState: string
destinationConfig:
bigqueryDestinationConfig:
dataFreshness: string
singleTargetDataset:
datasetId: string
sourceHierarchyDatasets:
datasetTemplate:
datasetIdPrefix: string
kmsKeyName: string
location: string
destinationConnectionProfile: string
gcsDestinationConfig:
avroFileFormat: {}
fileRotationInterval: string
fileRotationMb: 0
jsonFileFormat:
compression: string
schemaFileFormat: string
path: string
displayName: string
labels:
string: string
location: string
project: string
sourceConfig:
mysqlSourceConfig:
excludeObjects:
mysqlDatabases:
- database: string
mysqlTables:
- mysqlColumns:
- collation: string
column: string
dataType: string
length: 0
nullable: false
ordinalPosition: 0
primaryKey: false
table: string
includeObjects:
mysqlDatabases:
- database: string
mysqlTables:
- mysqlColumns:
- collation: string
column: string
dataType: string
length: 0
nullable: false
ordinalPosition: 0
primaryKey: false
table: string
maxConcurrentBackfillTasks: 0
maxConcurrentCdcTasks: 0
oracleSourceConfig:
dropLargeObjects: {}
excludeObjects:
oracleSchemas:
- oracleTables:
- oracleColumns:
- column: string
dataType: string
encoding: string
length: 0
nullable: false
ordinalPosition: 0
precision: 0
primaryKey: false
scale: 0
table: string
schema: string
includeObjects:
oracleSchemas:
- oracleTables:
- oracleColumns:
- column: string
dataType: string
encoding: string
length: 0
nullable: false
ordinalPosition: 0
precision: 0
primaryKey: false
scale: 0
table: string
schema: string
maxConcurrentBackfillTasks: 0
maxConcurrentCdcTasks: 0
streamLargeObjects: {}
postgresqlSourceConfig:
excludeObjects:
postgresqlSchemas:
- postgresqlTables:
- postgresqlColumns:
- column: string
dataType: string
length: 0
nullable: false
ordinalPosition: 0
precision: 0
primaryKey: false
scale: 0
table: string
schema: string
includeObjects:
postgresqlSchemas:
- postgresqlTables:
- postgresqlColumns:
- column: string
dataType: string
length: 0
nullable: false
ordinalPosition: 0
precision: 0
primaryKey: false
scale: 0
table: string
schema: string
maxConcurrentBackfillTasks: 0
publication: string
replicationSlot: string
sourceConnectionProfile: string
sqlServerSourceConfig:
excludeObjects:
schemas:
- schema: string
tables:
- columns:
- column: string
dataType: string
length: 0
nullable: false
ordinalPosition: 0
precision: 0
primaryKey: false
scale: 0
table: string
includeObjects:
schemas:
- schema: string
tables:
- columns:
- column: string
dataType: string
length: 0
nullable: false
ordinalPosition: 0
precision: 0
primaryKey: false
scale: 0
table: string
maxConcurrentBackfillTasks: 0
maxConcurrentCdcTasks: 0
streamId: string
Stream Resource Properties
To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.
Inputs
The Stream resource accepts the following input properties:
- Destination
Config StreamDestination Config - Destination connection profile configuration. Structure is documented below.
- Display
Name string - Display name.
- Location string
- The name of the location this stream is located in.
- Source
Config StreamSource Config - Source connection profile configuration. Structure is documented below.
- Stream
Id string - The stream identifier.
- Backfill
All StreamBackfill All - Backfill strategy to automatically backfill the Stream's objects. Specific objects can be excluded.
- Backfill
None StreamBackfill None - Backfill strategy to disable automatic backfill for the Stream's objects.
- Create
Without boolValidation - Create the stream without validating it.
- Customer
Managed stringEncryption Key - A reference to a KMS encryption key. If provided, it will be used to encrypt the data. If left blank, data will be encrypted using an internal Stream-specific encryption key provisioned through KMS.
- Desired
State string - Desired state of the Stream. Set this field to 'RUNNING' to start the stream, and 'PAUSED' to pause the stream.
- Labels Dictionary<string, string>
- Labels. Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field 'effective_labels' for all of the labels present on the resource.
- Project string
- Destination
Config StreamDestination Config Args - Destination connection profile configuration. Structure is documented below.
- Display
Name string - Display name.
- Location string
- The name of the location this stream is located in.
- Source
Config StreamSource Config Args - Source connection profile configuration. Structure is documented below.
- Stream
Id string - The stream identifier.
- Backfill
All StreamBackfill All Args - Backfill strategy to automatically backfill the Stream's objects. Specific objects can be excluded.
- Backfill
None StreamBackfill None Args - Backfill strategy to disable automatic backfill for the Stream's objects.
- Create
Without boolValidation - Create the stream without validating it.
- Customer
Managed stringEncryption Key - A reference to a KMS encryption key. If provided, it will be used to encrypt the data. If left blank, data will be encrypted using an internal Stream-specific encryption key provisioned through KMS.
- Desired
State string - Desired state of the Stream. Set this field to 'RUNNING' to start the stream, and 'PAUSED' to pause the stream.
- Labels map[string]string
- Labels. Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field 'effective_labels' for all of the labels present on the resource.
- Project string
- destination
Config StreamDestination Config - Destination connection profile configuration. Structure is documented below.
- display
Name String - Display name.
- location String
- The name of the location this stream is located in.
- source
Config StreamSource Config - Source connection profile configuration. Structure is documented below.
- stream
Id String - The stream identifier.
- backfill
All StreamBackfill All - Backfill strategy to automatically backfill the Stream's objects. Specific objects can be excluded.
- backfill
None StreamBackfill None - Backfill strategy to disable automatic backfill for the Stream's objects.
- create
Without BooleanValidation - Create the stream without validating it.
- customer
Managed StringEncryption Key - A reference to a KMS encryption key. If provided, it will be used to encrypt the data. If left blank, data will be encrypted using an internal Stream-specific encryption key provisioned through KMS.
- desired
State String - Desired state of the Stream. Set this field to 'RUNNING' to start the stream, and 'PAUSED' to pause the stream.
- labels Map<String,String>
- Labels. Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field 'effective_labels' for all of the labels present on the resource.
- project String
- destination
Config StreamDestination Config - Destination connection profile configuration. Structure is documented below.
- display
Name string - Display name.
- location string
- The name of the location this stream is located in.
- source
Config StreamSource Config - Source connection profile configuration. Structure is documented below.
- stream
Id string - The stream identifier.
- backfill
All StreamBackfill All - Backfill strategy to automatically backfill the Stream's objects. Specific objects can be excluded.
- backfill
None StreamBackfill None - Backfill strategy to disable automatic backfill for the Stream's objects.
- create
Without booleanValidation - Create the stream without validating it.
- customer
Managed stringEncryption Key - A reference to a KMS encryption key. If provided, it will be used to encrypt the data. If left blank, data will be encrypted using an internal Stream-specific encryption key provisioned through KMS.
- desired
State string - Desired state of the Stream. Set this field to 'RUNNING' to start the stream, and 'PAUSED' to pause the stream.
- labels {[key: string]: string}
- Labels. Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field 'effective_labels' for all of the labels present on the resource.
- project string
- destination_
config StreamDestination Config Args - Destination connection profile configuration. Structure is documented below.
- display_
name str - Display name.
- location str
- The name of the location this stream is located in.
- source_
config StreamSource Config Args - Source connection profile configuration. Structure is documented below.
- stream_
id str - The stream identifier.
- backfill_
all StreamBackfill All Args - Backfill strategy to automatically backfill the Stream's objects. Specific objects can be excluded.
- backfill_
none StreamBackfill None Args - Backfill strategy to disable automatic backfill for the Stream's objects.
- create_
without_ boolvalidation - Create the stream without validating it.
- customer_
managed_ strencryption_ key - A reference to a KMS encryption key. If provided, it will be used to encrypt the data. If left blank, data will be encrypted using an internal Stream-specific encryption key provisioned through KMS.
- desired_
state str - Desired state of the Stream. Set this field to 'RUNNING' to start the stream, and 'PAUSED' to pause the stream.
- labels Mapping[str, str]
- Labels. Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field 'effective_labels' for all of the labels present on the resource.
- project str
- destination
Config Property Map - Destination connection profile configuration. Structure is documented below.
- display
Name String - Display name.
- location String
- The name of the location this stream is located in.
- source
Config Property Map - Source connection profile configuration. Structure is documented below.
- stream
Id String - The stream identifier.
- backfill
All Property Map - Backfill strategy to automatically backfill the Stream's objects. Specific objects can be excluded.
- backfill
None Property Map - Backfill strategy to disable automatic backfill for the Stream's objects.
- create
Without BooleanValidation - Create the stream without validating it.
- customer
Managed StringEncryption Key - A reference to a KMS encryption key. If provided, it will be used to encrypt the data. If left blank, data will be encrypted using an internal Stream-specific encryption key provisioned through KMS.
- desired
State String - Desired state of the Stream. Set this field to 'RUNNING' to start the stream, and 'PAUSED' to pause the stream.
- labels Map<String>
- Labels. Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field 'effective_labels' for all of the labels present on the resource.
- project String
Outputs
All input properties are implicitly available as output properties. Additionally, the Stream resource produces the following output properties:
- Effective
Labels Dictionary<string, string> - All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
- Id string
- The provider-assigned unique ID for this managed resource.
- Name string
- The stream's name.
- Pulumi
Labels Dictionary<string, string> - The combination of labels configured directly on the resource and default labels configured on the provider.
- State string
- The state of the stream.
- Effective
Labels map[string]string - All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
- Id string
- The provider-assigned unique ID for this managed resource.
- Name string
- The stream's name.
- Pulumi
Labels map[string]string - The combination of labels configured directly on the resource and default labels configured on the provider.
- State string
- The state of the stream.
- effective
Labels Map<String,String> - All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
- id String
- The provider-assigned unique ID for this managed resource.
- name String
- The stream's name.
- pulumi
Labels Map<String,String> - The combination of labels configured directly on the resource and default labels configured on the provider.
- state String
- The state of the stream.
- effective
Labels {[key: string]: string} - All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
- id string
- The provider-assigned unique ID for this managed resource.
- name string
- The stream's name.
- pulumi
Labels {[key: string]: string} - The combination of labels configured directly on the resource and default labels configured on the provider.
- state string
- The state of the stream.
- effective_
labels Mapping[str, str] - All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
- id str
- The provider-assigned unique ID for this managed resource.
- name str
- The stream's name.
- pulumi_
labels Mapping[str, str] - The combination of labels configured directly on the resource and default labels configured on the provider.
- state str
- The state of the stream.
- effective
Labels Map<String> - All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
- id String
- The provider-assigned unique ID for this managed resource.
- name String
- The stream's name.
- pulumi
Labels Map<String> - The combination of labels configured directly on the resource and default labels configured on the provider.
- state String
- The state of the stream.
Look up Existing Stream Resource
Get an existing Stream resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.
public static get(name: string, id: Input<ID>, state?: StreamState, opts?: CustomResourceOptions): Stream
@staticmethod
def get(resource_name: str,
id: str,
opts: Optional[ResourceOptions] = None,
backfill_all: Optional[StreamBackfillAllArgs] = None,
backfill_none: Optional[StreamBackfillNoneArgs] = None,
create_without_validation: Optional[bool] = None,
customer_managed_encryption_key: Optional[str] = None,
desired_state: Optional[str] = None,
destination_config: Optional[StreamDestinationConfigArgs] = None,
display_name: Optional[str] = None,
effective_labels: Optional[Mapping[str, str]] = None,
labels: Optional[Mapping[str, str]] = None,
location: Optional[str] = None,
name: Optional[str] = None,
project: Optional[str] = None,
pulumi_labels: Optional[Mapping[str, str]] = None,
source_config: Optional[StreamSourceConfigArgs] = None,
state: Optional[str] = None,
stream_id: Optional[str] = None) -> Stream
func GetStream(ctx *Context, name string, id IDInput, state *StreamState, opts ...ResourceOption) (*Stream, error)
public static Stream Get(string name, Input<string> id, StreamState? state, CustomResourceOptions? opts = null)
public static Stream get(String name, Output<String> id, StreamState state, CustomResourceOptions options)
Resource lookup is not supported in YAML
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- resource_name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- Backfill
All StreamBackfill All - Backfill strategy to automatically backfill the Stream's objects. Specific objects can be excluded.
- Backfill
None StreamBackfill None - Backfill strategy to disable automatic backfill for the Stream's objects.
- Create
Without boolValidation - Create the stream without validating it.
- Customer
Managed stringEncryption Key - A reference to a KMS encryption key. If provided, it will be used to encrypt the data. If left blank, data will be encrypted using an internal Stream-specific encryption key provisioned through KMS.
- Desired
State string - Desired state of the Stream. Set this field to 'RUNNING' to start the stream, and 'PAUSED' to pause the stream.
- Destination
Config StreamDestination Config - Destination connection profile configuration. Structure is documented below.
- Display
Name string - Display name.
- Effective
Labels Dictionary<string, string> - All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
- Labels Dictionary<string, string>
- Labels. Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field 'effective_labels' for all of the labels present on the resource.
- Location string
- The name of the location this stream is located in.
- Name string
- The stream's name.
- Project string
- Pulumi
Labels Dictionary<string, string> - The combination of labels configured directly on the resource and default labels configured on the provider.
- Source
Config StreamSource Config - Source connection profile configuration. Structure is documented below.
- State string
- The state of the stream.
- Stream
Id string - The stream identifier.
- Backfill
All StreamBackfill All Args - Backfill strategy to automatically backfill the Stream's objects. Specific objects can be excluded.
- Backfill
None StreamBackfill None Args - Backfill strategy to disable automatic backfill for the Stream's objects.
- Create
Without boolValidation - Create the stream without validating it.
- Customer
Managed stringEncryption Key - A reference to a KMS encryption key. If provided, it will be used to encrypt the data. If left blank, data will be encrypted using an internal Stream-specific encryption key provisioned through KMS.
- Desired
State string - Desired state of the Stream. Set this field to 'RUNNING' to start the stream, and 'PAUSED' to pause the stream.
- Destination
Config StreamDestination Config Args - Destination connection profile configuration. Structure is documented below.
- Display
Name string - Display name.
- Effective
Labels map[string]string - All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
- Labels map[string]string
- Labels. Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field 'effective_labels' for all of the labels present on the resource.
- Location string
- The name of the location this stream is located in.
- Name string
- The stream's name.
- Project string
- Pulumi
Labels map[string]string - The combination of labels configured directly on the resource and default labels configured on the provider.
- Source
Config StreamSource Config Args - Source connection profile configuration. Structure is documented below.
- State string
- The state of the stream.
- Stream
Id string - The stream identifier.
- backfill
All StreamBackfill All - Backfill strategy to automatically backfill the Stream's objects. Specific objects can be excluded.
- backfill
None StreamBackfill None - Backfill strategy to disable automatic backfill for the Stream's objects.
- create
Without BooleanValidation - Create the stream without validating it.
- customer
Managed StringEncryption Key - A reference to a KMS encryption key. If provided, it will be used to encrypt the data. If left blank, data will be encrypted using an internal Stream-specific encryption key provisioned through KMS.
- desired
State String - Desired state of the Stream. Set this field to 'RUNNING' to start the stream, and 'PAUSED' to pause the stream.
- destination
Config StreamDestination Config - Destination connection profile configuration. Structure is documented below.
- display
Name String - Display name.
- effective
Labels Map<String,String> - All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
- labels Map<String,String>
- Labels. Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field 'effective_labels' for all of the labels present on the resource.
- location String
- The name of the location this stream is located in.
- name String
- The stream's name.
- project String
- pulumi
Labels Map<String,String> - The combination of labels configured directly on the resource and default labels configured on the provider.
- source
Config StreamSource Config - Source connection profile configuration. Structure is documented below.
- state String
- The state of the stream.
- stream
Id String - The stream identifier.
- backfill
All StreamBackfill All - Backfill strategy to automatically backfill the Stream's objects. Specific objects can be excluded.
- backfill
None StreamBackfill None - Backfill strategy to disable automatic backfill for the Stream's objects.
- create
Without booleanValidation - Create the stream without validating it.
- customer
Managed stringEncryption Key - A reference to a KMS encryption key. If provided, it will be used to encrypt the data. If left blank, data will be encrypted using an internal Stream-specific encryption key provisioned through KMS.
- desired
State string - Desired state of the Stream. Set this field to 'RUNNING' to start the stream, and 'PAUSED' to pause the stream.
- destination
Config StreamDestination Config - Destination connection profile configuration. Structure is documented below.
- display
Name string - Display name.
- effective
Labels {[key: string]: string} - All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
- labels {[key: string]: string}
- Labels. Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field 'effective_labels' for all of the labels present on the resource.
- location string
- The name of the location this stream is located in.
- name string
- The stream's name.
- project string
- pulumi
Labels {[key: string]: string} - The combination of labels configured directly on the resource and default labels configured on the provider.
- source
Config StreamSource Config - Source connection profile configuration. Structure is documented below.
- state string
- The state of the stream.
- stream
Id string - The stream identifier.
- backfill_
all StreamBackfill All Args - Backfill strategy to automatically backfill the Stream's objects. Specific objects can be excluded.
- backfill_
none StreamBackfill None Args - Backfill strategy to disable automatic backfill for the Stream's objects.
- create_
without_ boolvalidation - Create the stream without validating it.
- customer_
managed_ strencryption_ key - A reference to a KMS encryption key. If provided, it will be used to encrypt the data. If left blank, data will be encrypted using an internal Stream-specific encryption key provisioned through KMS.
- desired_
state str - Desired state of the Stream. Set this field to 'RUNNING' to start the stream, and 'PAUSED' to pause the stream.
- destination_
config StreamDestination Config Args - Destination connection profile configuration. Structure is documented below.
- display_
name str - Display name.
- effective_
labels Mapping[str, str] - All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
- labels Mapping[str, str]
- Labels. Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field 'effective_labels' for all of the labels present on the resource.
- location str
- The name of the location this stream is located in.
- name str
- The stream's name.
- project str
- pulumi_
labels Mapping[str, str] - The combination of labels configured directly on the resource and default labels configured on the provider.
- source_
config StreamSource Config Args - Source connection profile configuration. Structure is documented below.
- state str
- The state of the stream.
- stream_
id str - The stream identifier.
- backfill
All Property Map - Backfill strategy to automatically backfill the Stream's objects. Specific objects can be excluded.
- backfill
None Property Map - Backfill strategy to disable automatic backfill for the Stream's objects.
- create
Without BooleanValidation - Create the stream without validating it.
- customer
Managed StringEncryption Key - A reference to a KMS encryption key. If provided, it will be used to encrypt the data. If left blank, data will be encrypted using an internal Stream-specific encryption key provisioned through KMS.
- desired
State String - Desired state of the Stream. Set this field to 'RUNNING' to start the stream, and 'PAUSED' to pause the stream.
- destination
Config Property Map - Destination connection profile configuration. Structure is documented below.
- display
Name String - Display name.
- effective
Labels Map<String> - All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
- labels Map<String>
- Labels. Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field 'effective_labels' for all of the labels present on the resource.
- location String
- The name of the location this stream is located in.
- name String
- The stream's name.
- project String
- pulumi
Labels Map<String> - The combination of labels configured directly on the resource and default labels configured on the provider.
- source
Config Property Map - Source connection profile configuration. Structure is documented below.
- state String
- The state of the stream.
- stream
Id String - The stream identifier.
Supporting Types
StreamBackfillAll, StreamBackfillAllArgs
- Mysql
Excluded StreamObjects Backfill All Mysql Excluded Objects - MySQL data source objects to avoid backfilling. Structure is documented below.
- Oracle
Excluded StreamObjects Backfill All Oracle Excluded Objects - PostgreSQL data source objects to avoid backfilling. Structure is documented below.
- Postgresql
Excluded StreamObjects Backfill All Postgresql Excluded Objects - PostgreSQL data source objects to avoid backfilling. Structure is documented below.
- Sql
Server StreamExcluded Objects Backfill All Sql Server Excluded Objects - SQL Server data source objects to avoid backfilling. Structure is documented below.
- Mysql
Excluded StreamObjects Backfill All Mysql Excluded Objects - MySQL data source objects to avoid backfilling. Structure is documented below.
- Oracle
Excluded StreamObjects Backfill All Oracle Excluded Objects - PostgreSQL data source objects to avoid backfilling. Structure is documented below.
- Postgresql
Excluded StreamObjects Backfill All Postgresql Excluded Objects - PostgreSQL data source objects to avoid backfilling. Structure is documented below.
- Sql
Server StreamExcluded Objects Backfill All Sql Server Excluded Objects - SQL Server data source objects to avoid backfilling. Structure is documented below.
- mysql
Excluded StreamObjects Backfill All Mysql Excluded Objects - MySQL data source objects to avoid backfilling. Structure is documented below.
- oracle
Excluded StreamObjects Backfill All Oracle Excluded Objects - PostgreSQL data source objects to avoid backfilling. Structure is documented below.
- postgresql
Excluded StreamObjects Backfill All Postgresql Excluded Objects - PostgreSQL data source objects to avoid backfilling. Structure is documented below.
- sql
Server StreamExcluded Objects Backfill All Sql Server Excluded Objects - SQL Server data source objects to avoid backfilling. Structure is documented below.
- mysql
Excluded StreamObjects Backfill All Mysql Excluded Objects - MySQL data source objects to avoid backfilling. Structure is documented below.
- oracle
Excluded StreamObjects Backfill All Oracle Excluded Objects - PostgreSQL data source objects to avoid backfilling. Structure is documented below.
- postgresql
Excluded StreamObjects Backfill All Postgresql Excluded Objects - PostgreSQL data source objects to avoid backfilling. Structure is documented below.
- sql
Server StreamExcluded Objects Backfill All Sql Server Excluded Objects - SQL Server data source objects to avoid backfilling. Structure is documented below.
- mysql_
excluded_ Streamobjects Backfill All Mysql Excluded Objects - MySQL data source objects to avoid backfilling. Structure is documented below.
- oracle_
excluded_ Streamobjects Backfill All Oracle Excluded Objects - PostgreSQL data source objects to avoid backfilling. Structure is documented below.
- postgresql_
excluded_ Streamobjects Backfill All Postgresql Excluded Objects - PostgreSQL data source objects to avoid backfilling. Structure is documented below.
- sql_
server_ Streamexcluded_ objects Backfill All Sql Server Excluded Objects - SQL Server data source objects to avoid backfilling. Structure is documented below.
- mysql
Excluded Property MapObjects - MySQL data source objects to avoid backfilling. Structure is documented below.
- oracle
Excluded Property MapObjects - PostgreSQL data source objects to avoid backfilling. Structure is documented below.
- postgresql
Excluded Property MapObjects - PostgreSQL data source objects to avoid backfilling. Structure is documented below.
- sql
Server Property MapExcluded Objects - SQL Server data source objects to avoid backfilling. Structure is documented below.
StreamBackfillAllMysqlExcludedObjects, StreamBackfillAllMysqlExcludedObjectsArgs
- Mysql
Databases List<StreamBackfill All Mysql Excluded Objects Mysql Database> - MySQL databases on the server Structure is documented below.
- Mysql
Databases []StreamBackfill All Mysql Excluded Objects Mysql Database - MySQL databases on the server Structure is documented below.
- mysql
Databases List<StreamBackfill All Mysql Excluded Objects Mysql Database> - MySQL databases on the server Structure is documented below.
- mysql
Databases StreamBackfill All Mysql Excluded Objects Mysql Database[] - MySQL databases on the server Structure is documented below.
- mysql_
databases Sequence[StreamBackfill All Mysql Excluded Objects Mysql Database] - MySQL databases on the server Structure is documented below.
- mysql
Databases List<Property Map> - MySQL databases on the server Structure is documented below.
StreamBackfillAllMysqlExcludedObjectsMysqlDatabase, StreamBackfillAllMysqlExcludedObjectsMysqlDatabaseArgs
- Database string
- Database name.
- Mysql
Tables List<StreamBackfill All Mysql Excluded Objects Mysql Database Mysql Table> - Tables in the database. Structure is documented below.
- Database string
- Database name.
- Mysql
Tables []StreamBackfill All Mysql Excluded Objects Mysql Database Mysql Table - Tables in the database. Structure is documented below.
- database String
- Database name.
- mysql
Tables List<StreamBackfill All Mysql Excluded Objects Mysql Database Mysql Table> - Tables in the database. Structure is documented below.
- database string
- Database name.
- mysql
Tables StreamBackfill All Mysql Excluded Objects Mysql Database Mysql Table[] - Tables in the database. Structure is documented below.
- database str
- Database name.
- mysql_
tables Sequence[StreamBackfill All Mysql Excluded Objects Mysql Database Mysql Table] - Tables in the database. Structure is documented below.
- database String
- Database name.
- mysql
Tables List<Property Map> - Tables in the database. Structure is documented below.
StreamBackfillAllMysqlExcludedObjectsMysqlDatabaseMysqlTable, StreamBackfillAllMysqlExcludedObjectsMysqlDatabaseMysqlTableArgs
- Table string
- Table name.
- Mysql
Columns List<StreamBackfill All Mysql Excluded Objects Mysql Database Mysql Table Mysql Column> - MySQL columns in the schema. When unspecified as part of include/exclude objects, includes/excludes everything. Structure is documented below.
- Table string
- Table name.
- Mysql
Columns []StreamBackfill All Mysql Excluded Objects Mysql Database Mysql Table Mysql Column - MySQL columns in the schema. When unspecified as part of include/exclude objects, includes/excludes everything. Structure is documented below.
- table String
- Table name.
- mysql
Columns List<StreamBackfill All Mysql Excluded Objects Mysql Database Mysql Table Mysql Column> - MySQL columns in the schema. When unspecified as part of include/exclude objects, includes/excludes everything. Structure is documented below.
- table string
- Table name.
- mysql
Columns StreamBackfill All Mysql Excluded Objects Mysql Database Mysql Table Mysql Column[] - MySQL columns in the schema. When unspecified as part of include/exclude objects, includes/excludes everything. Structure is documented below.
- table str
- Table name.
- mysql_
columns Sequence[StreamBackfill All Mysql Excluded Objects Mysql Database Mysql Table Mysql Column] - MySQL columns in the schema. When unspecified as part of include/exclude objects, includes/excludes everything. Structure is documented below.
- table String
- Table name.
- mysql
Columns List<Property Map> - MySQL columns in the schema. When unspecified as part of include/exclude objects, includes/excludes everything. Structure is documented below.
StreamBackfillAllMysqlExcludedObjectsMysqlDatabaseMysqlTableMysqlColumn, StreamBackfillAllMysqlExcludedObjectsMysqlDatabaseMysqlTableMysqlColumnArgs
- Collation string
- Column collation.
- Column string
- Column name.
- Data
Type string - The MySQL data type. Full data types list can be found here: https://dev.mysql.com/doc/refman/8.0/en/data-types.html
- Length int
- (Output) Column length.
- Nullable bool
- Whether or not the column can accept a null value.
- Ordinal
Position int - The ordinal position of the column in the table.
- Primary
Key bool - Whether or not the column represents a primary key.
- Collation string
- Column collation.
- Column string
- Column name.
- Data
Type string - The MySQL data type. Full data types list can be found here: https://dev.mysql.com/doc/refman/8.0/en/data-types.html
- Length int
- (Output) Column length.
- Nullable bool
- Whether or not the column can accept a null value.
- Ordinal
Position int - The ordinal position of the column in the table.
- Primary
Key bool - Whether or not the column represents a primary key.
- collation String
- Column collation.
- column String
- Column name.
- data
Type String - The MySQL data type. Full data types list can be found here: https://dev.mysql.com/doc/refman/8.0/en/data-types.html
- length Integer
- (Output) Column length.
- nullable Boolean
- Whether or not the column can accept a null value.
- ordinal
Position Integer - The ordinal position of the column in the table.
- primary
Key Boolean - Whether or not the column represents a primary key.
- collation string
- Column collation.
- column string
- Column name.
- data
Type string - The MySQL data type. Full data types list can be found here: https://dev.mysql.com/doc/refman/8.0/en/data-types.html
- length number
- (Output) Column length.
- nullable boolean
- Whether or not the column can accept a null value.
- ordinal
Position number - The ordinal position of the column in the table.
- primary
Key boolean - Whether or not the column represents a primary key.
- collation str
- Column collation.
- column str
- Column name.
- data_
type str - The MySQL data type. Full data types list can be found here: https://dev.mysql.com/doc/refman/8.0/en/data-types.html
- length int
- (Output) Column length.
- nullable bool
- Whether or not the column can accept a null value.
- ordinal_
position int - The ordinal position of the column in the table.
- primary_
key bool - Whether or not the column represents a primary key.
- collation String
- Column collation.
- column String
- Column name.
- data
Type String - The MySQL data type. Full data types list can be found here: https://dev.mysql.com/doc/refman/8.0/en/data-types.html
- length Number
- (Output) Column length.
- nullable Boolean
- Whether or not the column can accept a null value.
- ordinal
Position Number - The ordinal position of the column in the table.
- primary
Key Boolean - Whether or not the column represents a primary key.
StreamBackfillAllOracleExcludedObjects, StreamBackfillAllOracleExcludedObjectsArgs
- Oracle
Schemas List<StreamBackfill All Oracle Excluded Objects Oracle Schema> - Oracle schemas/databases in the database server Structure is documented below.
- Oracle
Schemas []StreamBackfill All Oracle Excluded Objects Oracle Schema - Oracle schemas/databases in the database server Structure is documented below.
- oracle
Schemas List<StreamBackfill All Oracle Excluded Objects Oracle Schema> - Oracle schemas/databases in the database server Structure is documented below.
- oracle
Schemas StreamBackfill All Oracle Excluded Objects Oracle Schema[] - Oracle schemas/databases in the database server Structure is documented below.
- oracle_
schemas Sequence[StreamBackfill All Oracle Excluded Objects Oracle Schema] - Oracle schemas/databases in the database server Structure is documented below.
- oracle
Schemas List<Property Map> - Oracle schemas/databases in the database server Structure is documented below.
StreamBackfillAllOracleExcludedObjectsOracleSchema, StreamBackfillAllOracleExcludedObjectsOracleSchemaArgs
- Schema string
- Schema name.
- Oracle
Tables List<StreamBackfill All Oracle Excluded Objects Oracle Schema Oracle Table> - Tables in the database. Structure is documented below.
- Schema string
- Schema name.
- Oracle
Tables []StreamBackfill All Oracle Excluded Objects Oracle Schema Oracle Table - Tables in the database. Structure is documented below.
- schema String
- Schema name.
- oracle
Tables List<StreamBackfill All Oracle Excluded Objects Oracle Schema Oracle Table> - Tables in the database. Structure is documented below.
- schema string
- Schema name.
- oracle
Tables StreamBackfill All Oracle Excluded Objects Oracle Schema Oracle Table[] - Tables in the database. Structure is documented below.
- schema str
- Schema name.
- oracle_
tables Sequence[StreamBackfill All Oracle Excluded Objects Oracle Schema Oracle Table] - Tables in the database. Structure is documented below.
- schema String
- Schema name.
- oracle
Tables List<Property Map> - Tables in the database. Structure is documented below.
StreamBackfillAllOracleExcludedObjectsOracleSchemaOracleTable, StreamBackfillAllOracleExcludedObjectsOracleSchemaOracleTableArgs
- Table string
- Table name.
- Oracle
Columns List<StreamBackfill All Oracle Excluded Objects Oracle Schema Oracle Table Oracle Column> - Oracle columns in the schema. When unspecified as part of include/exclude objects, includes/excludes everything. Structure is documented below.
- Table string
- Table name.
- Oracle
Columns []StreamBackfill All Oracle Excluded Objects Oracle Schema Oracle Table Oracle Column - Oracle columns in the schema. When unspecified as part of include/exclude objects, includes/excludes everything. Structure is documented below.
- table String
- Table name.
- oracle
Columns List<StreamBackfill All Oracle Excluded Objects Oracle Schema Oracle Table Oracle Column> - Oracle columns in the schema. When unspecified as part of include/exclude objects, includes/excludes everything. Structure is documented below.
- table string
- Table name.
- oracle
Columns StreamBackfill All Oracle Excluded Objects Oracle Schema Oracle Table Oracle Column[] - Oracle columns in the schema. When unspecified as part of include/exclude objects, includes/excludes everything. Structure is documented below.
- table str
- Table name.
- oracle_
columns Sequence[StreamBackfill All Oracle Excluded Objects Oracle Schema Oracle Table Oracle Column] - Oracle columns in the schema. When unspecified as part of include/exclude objects, includes/excludes everything. Structure is documented below.
- table String
- Table name.
- oracle
Columns List<Property Map> - Oracle columns in the schema. When unspecified as part of include/exclude objects, includes/excludes everything. Structure is documented below.
StreamBackfillAllOracleExcludedObjectsOracleSchemaOracleTableOracleColumn, StreamBackfillAllOracleExcludedObjectsOracleSchemaOracleTableOracleColumnArgs
- Column string
- Column name.
- Data
Type string - The Oracle data type. Full data types list can be found here: https://docs.oracle.com/en/database/oracle/oracle-database/21/sqlrf/Data-Types.html
- Encoding string
- (Output) Column encoding.
- Length int
- (Output) Column length.
- Nullable bool
- (Output) Whether or not the column can accept a null value.
- Ordinal
Position int - (Output) The ordinal position of the column in the table.
- Precision int
- (Output) Column precision.
- Primary
Key bool - (Output) Whether or not the column represents a primary key.
- Scale int
- (Output) Column scale.
- Column string
- Column name.
- Data
Type string - The Oracle data type. Full data types list can be found here: https://docs.oracle.com/en/database/oracle/oracle-database/21/sqlrf/Data-Types.html
- Encoding string
- (Output) Column encoding.
- Length int
- (Output) Column length.
- Nullable bool
- (Output) Whether or not the column can accept a null value.
- Ordinal
Position int - (Output) The ordinal position of the column in the table.
- Precision int
- (Output) Column precision.
- Primary
Key bool - (Output) Whether or not the column represents a primary key.
- Scale int
- (Output) Column scale.
- column String
- Column name.
- data
Type String - The Oracle data type. Full data types list can be found here: https://docs.oracle.com/en/database/oracle/oracle-database/21/sqlrf/Data-Types.html
- encoding String
- (Output) Column encoding.
- length Integer
- (Output) Column length.
- nullable Boolean
- (Output) Whether or not the column can accept a null value.
- ordinal
Position Integer - (Output) The ordinal position of the column in the table.
- precision Integer
- (Output) Column precision.
- primary
Key Boolean - (Output) Whether or not the column represents a primary key.
- scale Integer
- (Output) Column scale.
- column string
- Column name.
- data
Type string - The Oracle data type. Full data types list can be found here: https://docs.oracle.com/en/database/oracle/oracle-database/21/sqlrf/Data-Types.html
- encoding string
- (Output) Column encoding.
- length number
- (Output) Column length.
- nullable boolean
- (Output) Whether or not the column can accept a null value.
- ordinal
Position number - (Output) The ordinal position of the column in the table.
- precision number
- (Output) Column precision.
- primary
Key boolean - (Output) Whether or not the column represents a primary key.
- scale number
- (Output) Column scale.
- column str
- Column name.
- data_
type str - The Oracle data type. Full data types list can be found here: https://docs.oracle.com/en/database/oracle/oracle-database/21/sqlrf/Data-Types.html
- encoding str
- (Output) Column encoding.
- length int
- (Output) Column length.
- nullable bool
- (Output) Whether or not the column can accept a null value.
- ordinal_
position int - (Output) The ordinal position of the column in the table.
- precision int
- (Output) Column precision.
- primary_
key bool - (Output) Whether or not the column represents a primary key.
- scale int
- (Output) Column scale.
- column String
- Column name.
- data
Type String - The Oracle data type. Full data types list can be found here: https://docs.oracle.com/en/database/oracle/oracle-database/21/sqlrf/Data-Types.html
- encoding String
- (Output) Column encoding.
- length Number
- (Output) Column length.
- nullable Boolean
- (Output) Whether or not the column can accept a null value.
- ordinal
Position Number - (Output) The ordinal position of the column in the table.
- precision Number
- (Output) Column precision.
- primary
Key Boolean - (Output) Whether or not the column represents a primary key.
- scale Number
- (Output) Column scale.
StreamBackfillAllPostgresqlExcludedObjects, StreamBackfillAllPostgresqlExcludedObjectsArgs
- Postgresql
Schemas List<StreamBackfill All Postgresql Excluded Objects Postgresql Schema> - PostgreSQL schemas on the server Structure is documented below.
- Postgresql
Schemas []StreamBackfill All Postgresql Excluded Objects Postgresql Schema - PostgreSQL schemas on the server Structure is documented below.
- postgresql
Schemas List<StreamBackfill All Postgresql Excluded Objects Postgresql Schema> - PostgreSQL schemas on the server Structure is documented below.
- postgresql
Schemas StreamBackfill All Postgresql Excluded Objects Postgresql Schema[] - PostgreSQL schemas on the server Structure is documented below.
- postgresql_
schemas Sequence[StreamBackfill All Postgresql Excluded Objects Postgresql Schema] - PostgreSQL schemas on the server Structure is documented below.
- postgresql
Schemas List<Property Map> - PostgreSQL schemas on the server Structure is documented below.
StreamBackfillAllPostgresqlExcludedObjectsPostgresqlSchema, StreamBackfillAllPostgresqlExcludedObjectsPostgresqlSchemaArgs
- Schema string
- Database name.
- Postgresql
Tables List<StreamBackfill All Postgresql Excluded Objects Postgresql Schema Postgresql Table> - Tables in the schema. Structure is documented below.
- Schema string
- Database name.
- Postgresql
Tables []StreamBackfill All Postgresql Excluded Objects Postgresql Schema Postgresql Table - Tables in the schema. Structure is documented below.
- schema String
- Database name.
- postgresql
Tables List<StreamBackfill All Postgresql Excluded Objects Postgresql Schema Postgresql Table> - Tables in the schema. Structure is documented below.
- schema string
- Database name.
- postgresql
Tables StreamBackfill All Postgresql Excluded Objects Postgresql Schema Postgresql Table[] - Tables in the schema. Structure is documented below.
- schema str
- Database name.
- postgresql_
tables Sequence[StreamBackfill All Postgresql Excluded Objects Postgresql Schema Postgresql Table] - Tables in the schema. Structure is documented below.
- schema String
- Database name.
- postgresql
Tables List<Property Map> - Tables in the schema. Structure is documented below.
StreamBackfillAllPostgresqlExcludedObjectsPostgresqlSchemaPostgresqlTable, StreamBackfillAllPostgresqlExcludedObjectsPostgresqlSchemaPostgresqlTableArgs
- Table string
- Table name.
- Postgresql
Columns List<StreamBackfill All Postgresql Excluded Objects Postgresql Schema Postgresql Table Postgresql Column> - PostgreSQL columns in the schema. When unspecified as part of include/exclude objects, includes/excludes everything. Structure is documented below.
- Table string
- Table name.
- Postgresql
Columns []StreamBackfill All Postgresql Excluded Objects Postgresql Schema Postgresql Table Postgresql Column - PostgreSQL columns in the schema. When unspecified as part of include/exclude objects, includes/excludes everything. Structure is documented below.
- table String
- Table name.
- postgresql
Columns List<StreamBackfill All Postgresql Excluded Objects Postgresql Schema Postgresql Table Postgresql Column> - PostgreSQL columns in the schema. When unspecified as part of include/exclude objects, includes/excludes everything. Structure is documented below.
- table string
- Table name.
- postgresql
Columns StreamBackfill All Postgresql Excluded Objects Postgresql Schema Postgresql Table Postgresql Column[] - PostgreSQL columns in the schema. When unspecified as part of include/exclude objects, includes/excludes everything. Structure is documented below.
- table str
- Table name.
- postgresql_
columns Sequence[StreamBackfill All Postgresql Excluded Objects Postgresql Schema Postgresql Table Postgresql Column] - PostgreSQL columns in the schema. When unspecified as part of include/exclude objects, includes/excludes everything. Structure is documented below.
- table String
- Table name.
- postgresql
Columns List<Property Map> - PostgreSQL columns in the schema. When unspecified as part of include/exclude objects, includes/excludes everything. Structure is documented below.
StreamBackfillAllPostgresqlExcludedObjectsPostgresqlSchemaPostgresqlTablePostgresqlColumn, StreamBackfillAllPostgresqlExcludedObjectsPostgresqlSchemaPostgresqlTablePostgresqlColumnArgs
- Column string
- Column name.
- Data
Type string - The PostgreSQL data type. Full data types list can be found here: https://www.postgresql.org/docs/current/datatype.html
- Length int
- (Output) Column length.
- Nullable bool
- Whether or not the column can accept a null value.
- Ordinal
Position int - The ordinal position of the column in the table.
- Precision int
- (Output) Column precision.
- Primary
Key bool - Whether or not the column represents a primary key.
- Scale int
- (Output) Column scale.
- Column string
- Column name.
- Data
Type string - The PostgreSQL data type. Full data types list can be found here: https://www.postgresql.org/docs/current/datatype.html
- Length int
- (Output) Column length.
- Nullable bool
- Whether or not the column can accept a null value.
- Ordinal
Position int - The ordinal position of the column in the table.
- Precision int
- (Output) Column precision.
- Primary
Key bool - Whether or not the column represents a primary key.
- Scale int
- (Output) Column scale.
- column String
- Column name.
- data
Type String - The PostgreSQL data type. Full data types list can be found here: https://www.postgresql.org/docs/current/datatype.html
- length Integer
- (Output) Column length.
- nullable Boolean
- Whether or not the column can accept a null value.
- ordinal
Position Integer - The ordinal position of the column in the table.
- precision Integer
- (Output) Column precision.
- primary
Key Boolean - Whether or not the column represents a primary key.
- scale Integer
- (Output) Column scale.
- column string
- Column name.
- data
Type string - The PostgreSQL data type. Full data types list can be found here: https://www.postgresql.org/docs/current/datatype.html
- length number
- (Output) Column length.
- nullable boolean
- Whether or not the column can accept a null value.
- ordinal
Position number - The ordinal position of the column in the table.
- precision number
- (Output) Column precision.
- primary
Key boolean - Whether or not the column represents a primary key.
- scale number
- (Output) Column scale.
- column str
- Column name.
- data_
type str - The PostgreSQL data type. Full data types list can be found here: https://www.postgresql.org/docs/current/datatype.html
- length int
- (Output) Column length.
- nullable bool
- Whether or not the column can accept a null value.
- ordinal_
position int - The ordinal position of the column in the table.
- precision int
- (Output) Column precision.
- primary_
key bool - Whether or not the column represents a primary key.
- scale int
- (Output) Column scale.
- column String
- Column name.
- data
Type String - The PostgreSQL data type. Full data types list can be found here: https://www.postgresql.org/docs/current/datatype.html
- length Number
- (Output) Column length.
- nullable Boolean
- Whether or not the column can accept a null value.
- ordinal
Position Number - The ordinal position of the column in the table.
- precision Number
- (Output) Column precision.
- primary
Key Boolean - Whether or not the column represents a primary key.
- scale Number
- (Output) Column scale.
StreamBackfillAllSqlServerExcludedObjects, StreamBackfillAllSqlServerExcludedObjectsArgs
- Schemas
List<Stream
Backfill All Sql Server Excluded Objects Schema> - SQL Server schemas/databases in the database server Structure is documented below.
- Schemas
[]Stream
Backfill All Sql Server Excluded Objects Schema - SQL Server schemas/databases in the database server Structure is documented below.
- schemas
List<Stream
Backfill All Sql Server Excluded Objects Schema> - SQL Server schemas/databases in the database server Structure is documented below.
- schemas
Stream
Backfill All Sql Server Excluded Objects Schema[] - SQL Server schemas/databases in the database server Structure is documented below.
- schemas
Sequence[Stream
Backfill All Sql Server Excluded Objects Schema] - SQL Server schemas/databases in the database server Structure is documented below.
- schemas List<Property Map>
- SQL Server schemas/databases in the database server Structure is documented below.
StreamBackfillAllSqlServerExcludedObjectsSchema, StreamBackfillAllSqlServerExcludedObjectsSchemaArgs
- Schema string
- Schema name.
- Tables
List<Stream
Backfill All Sql Server Excluded Objects Schema Table> - Tables in the database. Structure is documented below.
- Schema string
- Schema name.
- Tables
[]Stream
Backfill All Sql Server Excluded Objects Schema Table - Tables in the database. Structure is documented below.
- schema String
- Schema name.
- tables
List<Stream
Backfill All Sql Server Excluded Objects Schema Table> - Tables in the database. Structure is documented below.
- schema string
- Schema name.
- tables
Stream
Backfill All Sql Server Excluded Objects Schema Table[] - Tables in the database. Structure is documented below.
- schema str
- Schema name.
- tables
Sequence[Stream
Backfill All Sql Server Excluded Objects Schema Table] - Tables in the database. Structure is documented below.
- schema String
- Schema name.
- tables List<Property Map>
- Tables in the database. Structure is documented below.
StreamBackfillAllSqlServerExcludedObjectsSchemaTable, StreamBackfillAllSqlServerExcludedObjectsSchemaTableArgs
- Table string
- Table name.
- Columns
List<Stream
Backfill All Sql Server Excluded Objects Schema Table Column> - SQL Server columns in the schema. When unspecified as part of include/exclude objects, includes/excludes everything. Structure is documented below.
- Table string
- Table name.
- Columns
[]Stream
Backfill All Sql Server Excluded Objects Schema Table Column - SQL Server columns in the schema. When unspecified as part of include/exclude objects, includes/excludes everything. Structure is documented below.
- table String
- Table name.
- columns
List<Stream
Backfill All Sql Server Excluded Objects Schema Table Column> - SQL Server columns in the schema. When unspecified as part of include/exclude objects, includes/excludes everything. Structure is documented below.
- table string
- Table name.
- columns
Stream
Backfill All Sql Server Excluded Objects Schema Table Column[] - SQL Server columns in the schema. When unspecified as part of include/exclude objects, includes/excludes everything. Structure is documented below.
- table str
- Table name.
- columns
Sequence[Stream
Backfill All Sql Server Excluded Objects Schema Table Column] - SQL Server columns in the schema. When unspecified as part of include/exclude objects, includes/excludes everything. Structure is documented below.
- table String
- Table name.
- columns List<Property Map>
- SQL Server columns in the schema. When unspecified as part of include/exclude objects, includes/excludes everything. Structure is documented below.
StreamBackfillAllSqlServerExcludedObjectsSchemaTableColumn, StreamBackfillAllSqlServerExcludedObjectsSchemaTableColumnArgs
- Column string
- Column name.
- Data
Type string - The SQL Server data type. Full data types list can be found here: https://learn.microsoft.com/en-us/sql/t-sql/data-types/data-types-transact-sql?view=sql-server-ver16
- Length int
- (Output) Column length.
- Nullable bool
- (Output) Whether or not the column can accept a null value.
- Ordinal
Position int - (Output) The ordinal position of the column in the table.
- Precision int
- (Output) Column precision.
- Primary
Key bool - (Output) Whether or not the column represents a primary key.
- Scale int
- (Output) Column scale.
- Column string
- Column name.
- Data
Type string - The SQL Server data type. Full data types list can be found here: https://learn.microsoft.com/en-us/sql/t-sql/data-types/data-types-transact-sql?view=sql-server-ver16
- Length int
- (Output) Column length.
- Nullable bool
- (Output) Whether or not the column can accept a null value.
- Ordinal
Position int - (Output) The ordinal position of the column in the table.
- Precision int
- (Output) Column precision.
- Primary
Key bool - (Output) Whether or not the column represents a primary key.
- Scale int
- (Output) Column scale.
- column String
- Column name.
- data
Type String - The SQL Server data type. Full data types list can be found here: https://learn.microsoft.com/en-us/sql/t-sql/data-types/data-types-transact-sql?view=sql-server-ver16
- length Integer
- (Output) Column length.
- nullable Boolean
- (Output) Whether or not the column can accept a null value.
- ordinal
Position Integer - (Output) The ordinal position of the column in the table.
- precision Integer
- (Output) Column precision.
- primary
Key Boolean - (Output) Whether or not the column represents a primary key.
- scale Integer
- (Output) Column scale.
- column string
- Column name.
- data
Type string - The SQL Server data type. Full data types list can be found here: https://learn.microsoft.com/en-us/sql/t-sql/data-types/data-types-transact-sql?view=sql-server-ver16
- length number
- (Output) Column length.
- nullable boolean
- (Output) Whether or not the column can accept a null value.
- ordinal
Position number - (Output) The ordinal position of the column in the table.
- precision number
- (Output) Column precision.
- primary
Key boolean - (Output) Whether or not the column represents a primary key.
- scale number
- (Output) Column scale.
- column str
- Column name.
- data_
type str - The SQL Server data type. Full data types list can be found here: https://learn.microsoft.com/en-us/sql/t-sql/data-types/data-types-transact-sql?view=sql-server-ver16
- length int
- (Output) Column length.
- nullable bool
- (Output) Whether or not the column can accept a null value.
- ordinal_
position int - (Output) The ordinal position of the column in the table.
- precision int
- (Output) Column precision.
- primary_
key bool - (Output) Whether or not the column represents a primary key.
- scale int
- (Output) Column scale.
- column String
- Column name.
- data
Type String - The SQL Server data type. Full data types list can be found here: https://learn.microsoft.com/en-us/sql/t-sql/data-types/data-types-transact-sql?view=sql-server-ver16
- length Number
- (Output) Column length.
- nullable Boolean
- (Output) Whether or not the column can accept a null value.
- ordinal
Position Number - (Output) The ordinal position of the column in the table.
- precision Number
- (Output) Column precision.
- primary
Key Boolean - (Output) Whether or not the column represents a primary key.
- scale Number
- (Output) Column scale.
StreamDestinationConfig, StreamDestinationConfigArgs
- Destination
Connection stringProfile - Destination connection profile resource. Format: projects/{project}/locations/{location}/connectionProfiles/{name}
- Bigquery
Destination StreamConfig Destination Config Bigquery Destination Config - A configuration for how data should be loaded to Cloud Storage. Structure is documented below.
- Gcs
Destination StreamConfig Destination Config Gcs Destination Config - A configuration for how data should be loaded to Cloud Storage. Structure is documented below.
- Destination
Connection stringProfile - Destination connection profile resource. Format: projects/{project}/locations/{location}/connectionProfiles/{name}
- Bigquery
Destination StreamConfig Destination Config Bigquery Destination Config - A configuration for how data should be loaded to Cloud Storage. Structure is documented below.
- Gcs
Destination StreamConfig Destination Config Gcs Destination Config - A configuration for how data should be loaded to Cloud Storage. Structure is documented below.
- destination
Connection StringProfile - Destination connection profile resource. Format: projects/{project}/locations/{location}/connectionProfiles/{name}
- bigquery
Destination StreamConfig Destination Config Bigquery Destination Config - A configuration for how data should be loaded to Cloud Storage. Structure is documented below.
- gcs
Destination StreamConfig Destination Config Gcs Destination Config - A configuration for how data should be loaded to Cloud Storage. Structure is documented below.
- destination
Connection stringProfile - Destination connection profile resource. Format: projects/{project}/locations/{location}/connectionProfiles/{name}
- bigquery
Destination StreamConfig Destination Config Bigquery Destination Config - A configuration for how data should be loaded to Cloud Storage. Structure is documented below.
- gcs
Destination StreamConfig Destination Config Gcs Destination Config - A configuration for how data should be loaded to Cloud Storage. Structure is documented below.
- destination_
connection_ strprofile - Destination connection profile resource. Format: projects/{project}/locations/{location}/connectionProfiles/{name}
- bigquery_
destination_ Streamconfig Destination Config Bigquery Destination Config - A configuration for how data should be loaded to Cloud Storage. Structure is documented below.
- gcs_
destination_ Streamconfig Destination Config Gcs Destination Config - A configuration for how data should be loaded to Cloud Storage. Structure is documented below.
- destination
Connection StringProfile - Destination connection profile resource. Format: projects/{project}/locations/{location}/connectionProfiles/{name}
- bigquery
Destination Property MapConfig - A configuration for how data should be loaded to Cloud Storage. Structure is documented below.
- gcs
Destination Property MapConfig - A configuration for how data should be loaded to Cloud Storage. Structure is documented below.
StreamDestinationConfigBigqueryDestinationConfig, StreamDestinationConfigBigqueryDestinationConfigArgs
- Data
Freshness string - The guaranteed data freshness (in seconds) when querying tables created by the stream. Editing this field will only affect new tables created in the future, but existing tables will not be impacted. Lower values mean that queries will return fresher data, but may result in higher cost. A duration in seconds with up to nine fractional digits, terminated by 's'. Example: "3.5s". Defaults to 900s.
- Single
Target StreamDataset Destination Config Bigquery Destination Config Single Target Dataset - A single target dataset to which all data will be streamed. Structure is documented below.
- Source
Hierarchy StreamDatasets Destination Config Bigquery Destination Config Source Hierarchy Datasets - Destination datasets are created so that hierarchy of the destination data objects matches the source hierarchy. Structure is documented below.
- Data
Freshness string - The guaranteed data freshness (in seconds) when querying tables created by the stream. Editing this field will only affect new tables created in the future, but existing tables will not be impacted. Lower values mean that queries will return fresher data, but may result in higher cost. A duration in seconds with up to nine fractional digits, terminated by 's'. Example: "3.5s". Defaults to 900s.
- Single
Target StreamDataset Destination Config Bigquery Destination Config Single Target Dataset - A single target dataset to which all data will be streamed. Structure is documented below.
- Source
Hierarchy StreamDatasets Destination Config Bigquery Destination Config Source Hierarchy Datasets - Destination datasets are created so that hierarchy of the destination data objects matches the source hierarchy. Structure is documented below.
- data
Freshness String - The guaranteed data freshness (in seconds) when querying tables created by the stream. Editing this field will only affect new tables created in the future, but existing tables will not be impacted. Lower values mean that queries will return fresher data, but may result in higher cost. A duration in seconds with up to nine fractional digits, terminated by 's'. Example: "3.5s". Defaults to 900s.
- single
Target StreamDataset Destination Config Bigquery Destination Config Single Target Dataset - A single target dataset to which all data will be streamed. Structure is documented below.
- source
Hierarchy StreamDatasets Destination Config Bigquery Destination Config Source Hierarchy Datasets - Destination datasets are created so that hierarchy of the destination data objects matches the source hierarchy. Structure is documented below.
- data
Freshness string - The guaranteed data freshness (in seconds) when querying tables created by the stream. Editing this field will only affect new tables created in the future, but existing tables will not be impacted. Lower values mean that queries will return fresher data, but may result in higher cost. A duration in seconds with up to nine fractional digits, terminated by 's'. Example: "3.5s". Defaults to 900s.
- single
Target StreamDataset Destination Config Bigquery Destination Config Single Target Dataset - A single target dataset to which all data will be streamed. Structure is documented below.
- source
Hierarchy StreamDatasets Destination Config Bigquery Destination Config Source Hierarchy Datasets - Destination datasets are created so that hierarchy of the destination data objects matches the source hierarchy. Structure is documented below.
- data_
freshness str - The guaranteed data freshness (in seconds) when querying tables created by the stream. Editing this field will only affect new tables created in the future, but existing tables will not be impacted. Lower values mean that queries will return fresher data, but may result in higher cost. A duration in seconds with up to nine fractional digits, terminated by 's'. Example: "3.5s". Defaults to 900s.
- single_
target_ Streamdataset Destination Config Bigquery Destination Config Single Target Dataset - A single target dataset to which all data will be streamed. Structure is documented below.
- source_
hierarchy_ Streamdatasets Destination Config Bigquery Destination Config Source Hierarchy Datasets - Destination datasets are created so that hierarchy of the destination data objects matches the source hierarchy. Structure is documented below.
- data
Freshness String - The guaranteed data freshness (in seconds) when querying tables created by the stream. Editing this field will only affect new tables created in the future, but existing tables will not be impacted. Lower values mean that queries will return fresher data, but may result in higher cost. A duration in seconds with up to nine fractional digits, terminated by 's'. Example: "3.5s". Defaults to 900s.
- single
Target Property MapDataset - A single target dataset to which all data will be streamed. Structure is documented below.
- source
Hierarchy Property MapDatasets - Destination datasets are created so that hierarchy of the destination data objects matches the source hierarchy. Structure is documented below.
StreamDestinationConfigBigqueryDestinationConfigSingleTargetDataset, StreamDestinationConfigBigqueryDestinationConfigSingleTargetDatasetArgs
- Dataset
Id string - Dataset ID in the format projects/{project}/datasets/{dataset_id} or {project}:{dataset_id}
- Dataset
Id string - Dataset ID in the format projects/{project}/datasets/{dataset_id} or {project}:{dataset_id}
- dataset
Id String - Dataset ID in the format projects/{project}/datasets/{dataset_id} or {project}:{dataset_id}
- dataset
Id string - Dataset ID in the format projects/{project}/datasets/{dataset_id} or {project}:{dataset_id}
- dataset_
id str - Dataset ID in the format projects/{project}/datasets/{dataset_id} or {project}:{dataset_id}
- dataset
Id String - Dataset ID in the format projects/{project}/datasets/{dataset_id} or {project}:{dataset_id}
StreamDestinationConfigBigqueryDestinationConfigSourceHierarchyDatasets, StreamDestinationConfigBigqueryDestinationConfigSourceHierarchyDatasetsArgs
- Dataset
Template StreamDestination Config Bigquery Destination Config Source Hierarchy Datasets Dataset Template - Dataset template used for dynamic dataset creation. Structure is documented below.
- Dataset
Template StreamDestination Config Bigquery Destination Config Source Hierarchy Datasets Dataset Template - Dataset template used for dynamic dataset creation. Structure is documented below.
- dataset
Template StreamDestination Config Bigquery Destination Config Source Hierarchy Datasets Dataset Template - Dataset template used for dynamic dataset creation. Structure is documented below.
- dataset
Template StreamDestination Config Bigquery Destination Config Source Hierarchy Datasets Dataset Template - Dataset template used for dynamic dataset creation. Structure is documented below.
- dataset_
template StreamDestination Config Bigquery Destination Config Source Hierarchy Datasets Dataset Template - Dataset template used for dynamic dataset creation. Structure is documented below.
- dataset
Template Property Map - Dataset template used for dynamic dataset creation. Structure is documented below.
StreamDestinationConfigBigqueryDestinationConfigSourceHierarchyDatasetsDatasetTemplate, StreamDestinationConfigBigqueryDestinationConfigSourceHierarchyDatasetsDatasetTemplateArgs
- Location string
- The geographic location where the dataset should reside. See https://cloud.google.com/bigquery/docs/locations for supported locations.
- Dataset
Id stringPrefix - If supplied, every created dataset will have its name prefixed by the provided value. The prefix and name will be separated by an underscore. i.e. _.
- Kms
Key stringName - Describes the Cloud KMS encryption key that will be used to protect destination BigQuery
table. The BigQuery Service Account associated with your project requires access to this
encryption key. i.e. projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{cryptoKey}.
See https://cloud.google.com/bigquery/docs/customer-managed-encryption for more information.
- Location string
- The geographic location where the dataset should reside. See https://cloud.google.com/bigquery/docs/locations for supported locations.
- Dataset
Id stringPrefix - If supplied, every created dataset will have its name prefixed by the provided value. The prefix and name will be separated by an underscore. i.e. _.
- Kms
Key stringName - Describes the Cloud KMS encryption key that will be used to protect destination BigQuery
table. The BigQuery Service Account associated with your project requires access to this
encryption key. i.e. projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{cryptoKey}.
See https://cloud.google.com/bigquery/docs/customer-managed-encryption for more information.
- location String
- The geographic location where the dataset should reside. See https://cloud.google.com/bigquery/docs/locations for supported locations.
- dataset
Id StringPrefix - If supplied, every created dataset will have its name prefixed by the provided value. The prefix and name will be separated by an underscore. i.e. _.
- kms
Key StringName - Describes the Cloud KMS encryption key that will be used to protect destination BigQuery
table. The BigQuery Service Account associated with your project requires access to this
encryption key. i.e. projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{cryptoKey}.
See https://cloud.google.com/bigquery/docs/customer-managed-encryption for more information.
- location string
- The geographic location where the dataset should reside. See https://cloud.google.com/bigquery/docs/locations for supported locations.
- dataset
Id stringPrefix - If supplied, every created dataset will have its name prefixed by the provided value. The prefix and name will be separated by an underscore. i.e. _.
- kms
Key stringName - Describes the Cloud KMS encryption key that will be used to protect destination BigQuery
table. The BigQuery Service Account associated with your project requires access to this
encryption key. i.e. projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{cryptoKey}.
See https://cloud.google.com/bigquery/docs/customer-managed-encryption for more information.
- location str
- The geographic location where the dataset should reside. See https://cloud.google.com/bigquery/docs/locations for supported locations.
- dataset_
id_ strprefix - If supplied, every created dataset will have its name prefixed by the provided value. The prefix and name will be separated by an underscore. i.e. _.
- kms_
key_ strname - Describes the Cloud KMS encryption key that will be used to protect destination BigQuery
table. The BigQuery Service Account associated with your project requires access to this
encryption key. i.e. projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{cryptoKey}.
See https://cloud.google.com/bigquery/docs/customer-managed-encryption for more information.
- location String
- The geographic location where the dataset should reside. See https://cloud.google.com/bigquery/docs/locations for supported locations.
- dataset
Id StringPrefix - If supplied, every created dataset will have its name prefixed by the provided value. The prefix and name will be separated by an underscore. i.e. _.
- kms
Key StringName - Describes the Cloud KMS encryption key that will be used to protect destination BigQuery
table. The BigQuery Service Account associated with your project requires access to this
encryption key. i.e. projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{cryptoKey}.
See https://cloud.google.com/bigquery/docs/customer-managed-encryption for more information.
StreamDestinationConfigGcsDestinationConfig, StreamDestinationConfigGcsDestinationConfigArgs
- Avro
File StreamFormat Destination Config Gcs Destination Config Avro File Format - AVRO file format configuration.
- File
Rotation stringInterval - The maximum duration for which new events are added before a file is closed and a new file is created. A duration in seconds with up to nine fractional digits, terminated by 's'. Example: "3.5s". Defaults to 900s.
- File
Rotation intMb - The maximum file size to be saved in the bucket.
- Json
File StreamFormat Destination Config Gcs Destination Config Json File Format - JSON file format configuration. Structure is documented below.
- Path string
- Path inside the Cloud Storage bucket to write data to.
- Avro
File StreamFormat Destination Config Gcs Destination Config Avro File Format - AVRO file format configuration.
- File
Rotation stringInterval - The maximum duration for which new events are added before a file is closed and a new file is created. A duration in seconds with up to nine fractional digits, terminated by 's'. Example: "3.5s". Defaults to 900s.
- File
Rotation intMb - The maximum file size to be saved in the bucket.
- Json
File StreamFormat Destination Config Gcs Destination Config Json File Format - JSON file format configuration. Structure is documented below.
- Path string
- Path inside the Cloud Storage bucket to write data to.
- avro
File StreamFormat Destination Config Gcs Destination Config Avro File Format - AVRO file format configuration.
- file
Rotation StringInterval - The maximum duration for which new events are added before a file is closed and a new file is created. A duration in seconds with up to nine fractional digits, terminated by 's'. Example: "3.5s". Defaults to 900s.
- file
Rotation IntegerMb - The maximum file size to be saved in the bucket.
- json
File StreamFormat Destination Config Gcs Destination Config Json File Format - JSON file format configuration. Structure is documented below.
- path String
- Path inside the Cloud Storage bucket to write data to.
- avro
File StreamFormat Destination Config Gcs Destination Config Avro File Format - AVRO file format configuration.
- file
Rotation stringInterval - The maximum duration for which new events are added before a file is closed and a new file is created. A duration in seconds with up to nine fractional digits, terminated by 's'. Example: "3.5s". Defaults to 900s.
- file
Rotation numberMb - The maximum file size to be saved in the bucket.
- json
File StreamFormat Destination Config Gcs Destination Config Json File Format - JSON file format configuration. Structure is documented below.
- path string
- Path inside the Cloud Storage bucket to write data to.
- avro_
file_ Streamformat Destination Config Gcs Destination Config Avro File Format - AVRO file format configuration.
- file_
rotation_ strinterval - The maximum duration for which new events are added before a file is closed and a new file is created. A duration in seconds with up to nine fractional digits, terminated by 's'. Example: "3.5s". Defaults to 900s.
- file_
rotation_ intmb - The maximum file size to be saved in the bucket.
- json_
file_ Streamformat Destination Config Gcs Destination Config Json File Format - JSON file format configuration. Structure is documented below.
- path str
- Path inside the Cloud Storage bucket to write data to.
- avro
File Property MapFormat - AVRO file format configuration.
- file
Rotation StringInterval - The maximum duration for which new events are added before a file is closed and a new file is created. A duration in seconds with up to nine fractional digits, terminated by 's'. Example: "3.5s". Defaults to 900s.
- file
Rotation NumberMb - The maximum file size to be saved in the bucket.
- json
File Property MapFormat - JSON file format configuration. Structure is documented below.
- path String
- Path inside the Cloud Storage bucket to write data to.
StreamDestinationConfigGcsDestinationConfigJsonFileFormat, StreamDestinationConfigGcsDestinationConfigJsonFileFormatArgs
- Compression string
- Compression of the loaded JSON file.
Possible values are:
NO_COMPRESSION
,GZIP
. - Schema
File stringFormat - The schema file format along JSON data files.
Possible values are:
NO_SCHEMA_FILE
,AVRO_SCHEMA_FILE
.
- Compression string
- Compression of the loaded JSON file.
Possible values are:
NO_COMPRESSION
,GZIP
. - Schema
File stringFormat - The schema file format along JSON data files.
Possible values are:
NO_SCHEMA_FILE
,AVRO_SCHEMA_FILE
.
- compression String
- Compression of the loaded JSON file.
Possible values are:
NO_COMPRESSION
,GZIP
. - schema
File StringFormat - The schema file format along JSON data files.
Possible values are:
NO_SCHEMA_FILE
,AVRO_SCHEMA_FILE
.
- compression string
- Compression of the loaded JSON file.
Possible values are:
NO_COMPRESSION
,GZIP
. - schema
File stringFormat - The schema file format along JSON data files.
Possible values are:
NO_SCHEMA_FILE
,AVRO_SCHEMA_FILE
.
- compression str
- Compression of the loaded JSON file.
Possible values are:
NO_COMPRESSION
,GZIP
. - schema_
file_ strformat - The schema file format along JSON data files.
Possible values are:
NO_SCHEMA_FILE
,AVRO_SCHEMA_FILE
.
- compression String
- Compression of the loaded JSON file.
Possible values are:
NO_COMPRESSION
,GZIP
. - schema
File StringFormat - The schema file format along JSON data files.
Possible values are:
NO_SCHEMA_FILE
,AVRO_SCHEMA_FILE
.
StreamSourceConfig, StreamSourceConfigArgs
- Source
Connection stringProfile - Source connection profile resource. Format: projects/{project}/locations/{location}/connectionProfiles/{name}
- Mysql
Source StreamConfig Source Config Mysql Source Config - MySQL data source configuration. Structure is documented below.
- Oracle
Source StreamConfig Source Config Oracle Source Config - MySQL data source configuration. Structure is documented below.
- Postgresql
Source StreamConfig Source Config Postgresql Source Config - PostgreSQL data source configuration. Structure is documented below.
- Sql
Server StreamSource Config Source Config Sql Server Source Config - SQL Server data source configuration. Structure is documented below.
- Source
Connection stringProfile - Source connection profile resource. Format: projects/{project}/locations/{location}/connectionProfiles/{name}
- Mysql
Source StreamConfig Source Config Mysql Source Config - MySQL data source configuration. Structure is documented below.
- Oracle
Source StreamConfig Source Config Oracle Source Config - MySQL data source configuration. Structure is documented below.
- Postgresql
Source StreamConfig Source Config Postgresql Source Config - PostgreSQL data source configuration. Structure is documented below.
- Sql
Server StreamSource Config Source Config Sql Server Source Config - SQL Server data source configuration. Structure is documented below.
- source
Connection StringProfile - Source connection profile resource. Format: projects/{project}/locations/{location}/connectionProfiles/{name}
- mysql
Source StreamConfig Source Config Mysql Source Config - MySQL data source configuration. Structure is documented below.
- oracle
Source StreamConfig Source Config Oracle Source Config - MySQL data source configuration. Structure is documented below.
- postgresql
Source StreamConfig Source Config Postgresql Source Config - PostgreSQL data source configuration. Structure is documented below.
- sql
Server StreamSource Config Source Config Sql Server Source Config - SQL Server data source configuration. Structure is documented below.
- source
Connection stringProfile - Source connection profile resource. Format: projects/{project}/locations/{location}/connectionProfiles/{name}
- mysql
Source StreamConfig Source Config Mysql Source Config - MySQL data source configuration. Structure is documented below.
- oracle
Source StreamConfig Source Config Oracle Source Config - MySQL data source configuration. Structure is documented below.
- postgresql
Source StreamConfig Source Config Postgresql Source Config - PostgreSQL data source configuration. Structure is documented below.
- sql
Server StreamSource Config Source Config Sql Server Source Config - SQL Server data source configuration. Structure is documented below.
- source_
connection_ strprofile - Source connection profile resource. Format: projects/{project}/locations/{location}/connectionProfiles/{name}
- mysql_
source_ Streamconfig Source Config Mysql Source Config - MySQL data source configuration. Structure is documented below.
- oracle_
source_ Streamconfig Source Config Oracle Source Config - MySQL data source configuration. Structure is documented below.
- postgresql_
source_ Streamconfig Source Config Postgresql Source Config - PostgreSQL data source configuration. Structure is documented below.
- sql_
server_ Streamsource_ config Source Config Sql Server Source Config - SQL Server data source configuration. Structure is documented below.
- source
Connection StringProfile - Source connection profile resource. Format: projects/{project}/locations/{location}/connectionProfiles/{name}
- mysql
Source Property MapConfig - MySQL data source configuration. Structure is documented below.
- oracle
Source Property MapConfig - MySQL data source configuration. Structure is documented below.
- postgresql
Source Property MapConfig - PostgreSQL data source configuration. Structure is documented below.
- sql
Server Property MapSource Config - SQL Server data source configuration. Structure is documented below.
StreamSourceConfigMysqlSourceConfig, StreamSourceConfigMysqlSourceConfigArgs
- Exclude
Objects StreamSource Config Mysql Source Config Exclude Objects - MySQL objects to exclude from the stream. Structure is documented below.
- Include
Objects StreamSource Config Mysql Source Config Include Objects - MySQL objects to retrieve from the source. Structure is documented below.
- Max
Concurrent intBackfill Tasks - Maximum number of concurrent backfill tasks. The number should be non negative. If not set (or set to 0), the system's default value will be used.
- Max
Concurrent intCdc Tasks - Maximum number of concurrent CDC tasks. The number should be non negative. If not set (or set to 0), the system's default value will be used.
- Exclude
Objects StreamSource Config Mysql Source Config Exclude Objects - MySQL objects to exclude from the stream. Structure is documented below.
- Include
Objects StreamSource Config Mysql Source Config Include Objects - MySQL objects to retrieve from the source. Structure is documented below.
- Max
Concurrent intBackfill Tasks - Maximum number of concurrent backfill tasks. The number should be non negative. If not set (or set to 0), the system's default value will be used.
- Max
Concurrent intCdc Tasks - Maximum number of concurrent CDC tasks. The number should be non negative. If not set (or set to 0), the system's default value will be used.
- exclude
Objects StreamSource Config Mysql Source Config Exclude Objects - MySQL objects to exclude from the stream. Structure is documented below.
- include
Objects StreamSource Config Mysql Source Config Include Objects - MySQL objects to retrieve from the source. Structure is documented below.
- max
Concurrent IntegerBackfill Tasks - Maximum number of concurrent backfill tasks. The number should be non negative. If not set (or set to 0), the system's default value will be used.
- max
Concurrent IntegerCdc Tasks - Maximum number of concurrent CDC tasks. The number should be non negative. If not set (or set to 0), the system's default value will be used.
- exclude
Objects StreamSource Config Mysql Source Config Exclude Objects - MySQL objects to exclude from the stream. Structure is documented below.
- include
Objects StreamSource Config Mysql Source Config Include Objects - MySQL objects to retrieve from the source. Structure is documented below.
- max
Concurrent numberBackfill Tasks - Maximum number of concurrent backfill tasks. The number should be non negative. If not set (or set to 0), the system's default value will be used.
- max
Concurrent numberCdc Tasks - Maximum number of concurrent CDC tasks. The number should be non negative. If not set (or set to 0), the system's default value will be used.
- exclude_
objects StreamSource Config Mysql Source Config Exclude Objects - MySQL objects to exclude from the stream. Structure is documented below.
- include_
objects StreamSource Config Mysql Source Config Include Objects - MySQL objects to retrieve from the source. Structure is documented below.
- max_
concurrent_ intbackfill_ tasks - Maximum number of concurrent backfill tasks. The number should be non negative. If not set (or set to 0), the system's default value will be used.
- max_
concurrent_ intcdc_ tasks - Maximum number of concurrent CDC tasks. The number should be non negative. If not set (or set to 0), the system's default value will be used.
- exclude
Objects Property Map - MySQL objects to exclude from the stream. Structure is documented below.
- include
Objects Property Map - MySQL objects to retrieve from the source. Structure is documented below.
- max
Concurrent NumberBackfill Tasks - Maximum number of concurrent backfill tasks. The number should be non negative. If not set (or set to 0), the system's default value will be used.
- max
Concurrent NumberCdc Tasks - Maximum number of concurrent CDC tasks. The number should be non negative. If not set (or set to 0), the system's default value will be used.
StreamSourceConfigMysqlSourceConfigExcludeObjects, StreamSourceConfigMysqlSourceConfigExcludeObjectsArgs
- Mysql
Databases List<StreamSource Config Mysql Source Config Exclude Objects Mysql Database> - MySQL databases on the server Structure is documented below.
- Mysql
Databases []StreamSource Config Mysql Source Config Exclude Objects Mysql Database - MySQL databases on the server Structure is documented below.
- mysql
Databases List<StreamSource Config Mysql Source Config Exclude Objects Mysql Database> - MySQL databases on the server Structure is documented below.
- mysql
Databases StreamSource Config Mysql Source Config Exclude Objects Mysql Database[] - MySQL databases on the server Structure is documented below.
- mysql_
databases Sequence[StreamSource Config Mysql Source Config Exclude Objects Mysql Database] - MySQL databases on the server Structure is documented below.
- mysql
Databases List<Property Map> - MySQL databases on the server Structure is documented below.
StreamSourceConfigMysqlSourceConfigExcludeObjectsMysqlDatabase, StreamSourceConfigMysqlSourceConfigExcludeObjectsMysqlDatabaseArgs
- Database string
- Database name.
- Mysql
Tables List<StreamSource Config Mysql Source Config Exclude Objects Mysql Database Mysql Table> - Tables in the database. Structure is documented below.
- Database string
- Database name.
- Mysql
Tables []StreamSource Config Mysql Source Config Exclude Objects Mysql Database Mysql Table - Tables in the database. Structure is documented below.
- database String
- Database name.
- mysql
Tables List<StreamSource Config Mysql Source Config Exclude Objects Mysql Database Mysql Table> - Tables in the database. Structure is documented below.
- database string
- Database name.
- mysql
Tables StreamSource Config Mysql Source Config Exclude Objects Mysql Database Mysql Table[] - Tables in the database. Structure is documented below.
- database str
- Database name.
- mysql_
tables Sequence[StreamSource Config Mysql Source Config Exclude Objects Mysql Database Mysql Table] - Tables in the database. Structure is documented below.
- database String
- Database name.
- mysql
Tables List<Property Map> - Tables in the database. Structure is documented below.
StreamSourceConfigMysqlSourceConfigExcludeObjectsMysqlDatabaseMysqlTable, StreamSourceConfigMysqlSourceConfigExcludeObjectsMysqlDatabaseMysqlTableArgs
- Table string
- Table name.
- Mysql
Columns List<StreamSource Config Mysql Source Config Exclude Objects Mysql Database Mysql Table Mysql Column> - MySQL columns in the schema. When unspecified as part of include/exclude objects, includes/excludes everything. Structure is documented below.
- Table string
- Table name.
- Mysql
Columns []StreamSource Config Mysql Source Config Exclude Objects Mysql Database Mysql Table Mysql Column - MySQL columns in the schema. When unspecified as part of include/exclude objects, includes/excludes everything. Structure is documented below.
- table String
- Table name.
- mysql
Columns List<StreamSource Config Mysql Source Config Exclude Objects Mysql Database Mysql Table Mysql Column> - MySQL columns in the schema. When unspecified as part of include/exclude objects, includes/excludes everything. Structure is documented below.
- table string
- Table name.
- mysql
Columns StreamSource Config Mysql Source Config Exclude Objects Mysql Database Mysql Table Mysql Column[] - MySQL columns in the schema. When unspecified as part of include/exclude objects, includes/excludes everything. Structure is documented below.
- table str
- Table name.
- mysql_
columns Sequence[StreamSource Config Mysql Source Config Exclude Objects Mysql Database Mysql Table Mysql Column] - MySQL columns in the schema. When unspecified as part of include/exclude objects, includes/excludes everything. Structure is documented below.
- table String
- Table name.
- mysql
Columns List<Property Map> - MySQL columns in the schema. When unspecified as part of include/exclude objects, includes/excludes everything. Structure is documented below.
StreamSourceConfigMysqlSourceConfigExcludeObjectsMysqlDatabaseMysqlTableMysqlColumn, StreamSourceConfigMysqlSourceConfigExcludeObjectsMysqlDatabaseMysqlTableMysqlColumnArgs
- Collation string
- Column collation.
- Column string
- Column name.
- Data
Type string - The MySQL data type. Full data types list can be found here: https://dev.mysql.com/doc/refman/8.0/en/data-types.html
- Length int
- (Output) Column length.
- Nullable bool
- Whether or not the column can accept a null value.
- Ordinal
Position int - The ordinal position of the column in the table.
- Primary
Key bool - Whether or not the column represents a primary key.
- Collation string
- Column collation.
- Column string
- Column name.
- Data
Type string - The MySQL data type. Full data types list can be found here: https://dev.mysql.com/doc/refman/8.0/en/data-types.html
- Length int
- (Output) Column length.
- Nullable bool
- Whether or not the column can accept a null value.
- Ordinal
Position int - The ordinal position of the column in the table.
- Primary
Key bool - Whether or not the column represents a primary key.
- collation String
- Column collation.
- column String
- Column name.
- data
Type String - The MySQL data type. Full data types list can be found here: https://dev.mysql.com/doc/refman/8.0/en/data-types.html
- length Integer
- (Output) Column length.
- nullable Boolean
- Whether or not the column can accept a null value.
- ordinal
Position Integer - The ordinal position of the column in the table.
- primary
Key Boolean - Whether or not the column represents a primary key.
- collation string
- Column collation.
- column string
- Column name.
- data
Type string - The MySQL data type. Full data types list can be found here: https://dev.mysql.com/doc/refman/8.0/en/data-types.html
- length number
- (Output) Column length.
- nullable boolean
- Whether or not the column can accept a null value.
- ordinal
Position number - The ordinal position of the column in the table.
- primary
Key boolean - Whether or not the column represents a primary key.
- collation str
- Column collation.
- column str
- Column name.
- data_
type str - The MySQL data type. Full data types list can be found here: https://dev.mysql.com/doc/refman/8.0/en/data-types.html
- length int
- (Output) Column length.
- nullable bool
- Whether or not the column can accept a null value.
- ordinal_
position int - The ordinal position of the column in the table.
- primary_
key bool - Whether or not the column represents a primary key.
- collation String
- Column collation.
- column String
- Column name.
- data
Type String - The MySQL data type. Full data types list can be found here: https://dev.mysql.com/doc/refman/8.0/en/data-types.html
- length Number
- (Output) Column length.
- nullable Boolean
- Whether or not the column can accept a null value.
- ordinal
Position Number - The ordinal position of the column in the table.
- primary
Key Boolean - Whether or not the column represents a primary key.
StreamSourceConfigMysqlSourceConfigIncludeObjects, StreamSourceConfigMysqlSourceConfigIncludeObjectsArgs
- Mysql
Databases List<StreamSource Config Mysql Source Config Include Objects Mysql Database> - MySQL databases on the server Structure is documented below.
- Mysql
Databases []StreamSource Config Mysql Source Config Include Objects Mysql Database - MySQL databases on the server Structure is documented below.
- mysql
Databases List<StreamSource Config Mysql Source Config Include Objects Mysql Database> - MySQL databases on the server Structure is documented below.
- mysql
Databases StreamSource Config Mysql Source Config Include Objects Mysql Database[] - MySQL databases on the server Structure is documented below.
- mysql_
databases Sequence[StreamSource Config Mysql Source Config Include Objects Mysql Database] - MySQL databases on the server Structure is documented below.
- mysql
Databases List<Property Map> - MySQL databases on the server Structure is documented below.
StreamSourceConfigMysqlSourceConfigIncludeObjectsMysqlDatabase, StreamSourceConfigMysqlSourceConfigIncludeObjectsMysqlDatabaseArgs
- Database string
- Database name.
- Mysql
Tables List<StreamSource Config Mysql Source Config Include Objects Mysql Database Mysql Table> - Tables in the database. Structure is documented below.
- Database string
- Database name.
- Mysql
Tables []StreamSource Config Mysql Source Config Include Objects Mysql Database Mysql Table - Tables in the database. Structure is documented below.
- database String
- Database name.
- mysql
Tables List<StreamSource Config Mysql Source Config Include Objects Mysql Database Mysql Table> - Tables in the database. Structure is documented below.
- database string
- Database name.
- mysql
Tables StreamSource Config Mysql Source Config Include Objects Mysql Database Mysql Table[] - Tables in the database. Structure is documented below.
- database str
- Database name.
- mysql_
tables Sequence[StreamSource Config Mysql Source Config Include Objects Mysql Database Mysql Table] - Tables in the database. Structure is documented below.
- database String
- Database name.
- mysql
Tables List<Property Map> - Tables in the database. Structure is documented below.
StreamSourceConfigMysqlSourceConfigIncludeObjectsMysqlDatabaseMysqlTable, StreamSourceConfigMysqlSourceConfigIncludeObjectsMysqlDatabaseMysqlTableArgs
- Table string
- Table name.
- Mysql
Columns List<StreamSource Config Mysql Source Config Include Objects Mysql Database Mysql Table Mysql Column> - MySQL columns in the schema. When unspecified as part of include/exclude objects, includes/excludes everything. Structure is documented below.
- Table string
- Table name.
- Mysql
Columns []StreamSource Config Mysql Source Config Include Objects Mysql Database Mysql Table Mysql Column - MySQL columns in the schema. When unspecified as part of include/exclude objects, includes/excludes everything. Structure is documented below.
- table String
- Table name.
- mysql
Columns List<StreamSource Config Mysql Source Config Include Objects Mysql Database Mysql Table Mysql Column> - MySQL columns in the schema. When unspecified as part of include/exclude objects, includes/excludes everything. Structure is documented below.
- table string
- Table name.
- mysql
Columns StreamSource Config Mysql Source Config Include Objects Mysql Database Mysql Table Mysql Column[] - MySQL columns in the schema. When unspecified as part of include/exclude objects, includes/excludes everything. Structure is documented below.
- table str
- Table name.
- mysql_
columns Sequence[StreamSource Config Mysql Source Config Include Objects Mysql Database Mysql Table Mysql Column] - MySQL columns in the schema. When unspecified as part of include/exclude objects, includes/excludes everything. Structure is documented below.
- table String
- Table name.
- mysql
Columns List<Property Map> - MySQL columns in the schema. When unspecified as part of include/exclude objects, includes/excludes everything. Structure is documented below.
StreamSourceConfigMysqlSourceConfigIncludeObjectsMysqlDatabaseMysqlTableMysqlColumn, StreamSourceConfigMysqlSourceConfigIncludeObjectsMysqlDatabaseMysqlTableMysqlColumnArgs
- Collation string
- Column collation.
- Column string
- Column name.
- Data
Type string - The MySQL data type. Full data types list can be found here: https://dev.mysql.com/doc/refman/8.0/en/data-types.html
- Length int
- (Output) Column length.
- Nullable bool
- Whether or not the column can accept a null value.
- Ordinal
Position int - The ordinal position of the column in the table.
- Primary
Key bool - Whether or not the column represents a primary key.
- Collation string
- Column collation.
- Column string
- Column name.
- Data
Type string - The MySQL data type. Full data types list can be found here: https://dev.mysql.com/doc/refman/8.0/en/data-types.html
- Length int
- (Output) Column length.
- Nullable bool
- Whether or not the column can accept a null value.
- Ordinal
Position int - The ordinal position of the column in the table.
- Primary
Key bool - Whether or not the column represents a primary key.
- collation String
- Column collation.
- column String
- Column name.
- data
Type String - The MySQL data type. Full data types list can be found here: https://dev.mysql.com/doc/refman/8.0/en/data-types.html
- length Integer
- (Output) Column length.
- nullable Boolean
- Whether or not the column can accept a null value.
- ordinal
Position Integer - The ordinal position of the column in the table.
- primary
Key Boolean - Whether or not the column represents a primary key.
- collation string
- Column collation.
- column string
- Column name.
- data
Type string - The MySQL data type. Full data types list can be found here: https://dev.mysql.com/doc/refman/8.0/en/data-types.html
- length number
- (Output) Column length.
- nullable boolean
- Whether or not the column can accept a null value.
- ordinal
Position number - The ordinal position of the column in the table.
- primary
Key boolean - Whether or not the column represents a primary key.
- collation str
- Column collation.
- column str
- Column name.
- data_
type str - The MySQL data type. Full data types list can be found here: https://dev.mysql.com/doc/refman/8.0/en/data-types.html
- length int
- (Output) Column length.
- nullable bool
- Whether or not the column can accept a null value.
- ordinal_
position int - The ordinal position of the column in the table.
- primary_
key bool - Whether or not the column represents a primary key.
- collation String
- Column collation.
- column String
- Column name.
- data
Type String - The MySQL data type. Full data types list can be found here: https://dev.mysql.com/doc/refman/8.0/en/data-types.html
- length Number
- (Output) Column length.
- nullable Boolean
- Whether or not the column can accept a null value.
- ordinal
Position Number - The ordinal position of the column in the table.
- primary
Key Boolean - Whether or not the column represents a primary key.
StreamSourceConfigOracleSourceConfig, StreamSourceConfigOracleSourceConfigArgs
- Drop
Large StreamObjects Source Config Oracle Source Config Drop Large Objects - Configuration to drop large object values.
- Exclude
Objects StreamSource Config Oracle Source Config Exclude Objects - Oracle objects to exclude from the stream. Structure is documented below.
- Include
Objects StreamSource Config Oracle Source Config Include Objects - Oracle objects to retrieve from the source. Structure is documented below.
- Max
Concurrent intBackfill Tasks - Maximum number of concurrent backfill tasks. The number should be non negative. If not set (or set to 0), the system's default value will be used.
- Max
Concurrent intCdc Tasks - Maximum number of concurrent CDC tasks. The number should be non negative. If not set (or set to 0), the system's default value will be used.
- Stream
Large StreamObjects Source Config Oracle Source Config Stream Large Objects - Configuration to drop large object values.
- Drop
Large StreamObjects Source Config Oracle Source Config Drop Large Objects - Configuration to drop large object values.
- Exclude
Objects StreamSource Config Oracle Source Config Exclude Objects - Oracle objects to exclude from the stream. Structure is documented below.
- Include
Objects StreamSource Config Oracle Source Config Include Objects - Oracle objects to retrieve from the source. Structure is documented below.
- Max
Concurrent intBackfill Tasks - Maximum number of concurrent backfill tasks. The number should be non negative. If not set (or set to 0), the system's default value will be used.
- Max
Concurrent intCdc Tasks - Maximum number of concurrent CDC tasks. The number should be non negative. If not set (or set to 0), the system's default value will be used.
- Stream
Large StreamObjects Source Config Oracle Source Config Stream Large Objects - Configuration to drop large object values.
- drop
Large StreamObjects Source Config Oracle Source Config Drop Large Objects - Configuration to drop large object values.
- exclude
Objects StreamSource Config Oracle Source Config Exclude Objects - Oracle objects to exclude from the stream. Structure is documented below.
- include
Objects StreamSource Config Oracle Source Config Include Objects - Oracle objects to retrieve from the source. Structure is documented below.
- max
Concurrent IntegerBackfill Tasks - Maximum number of concurrent backfill tasks. The number should be non negative. If not set (or set to 0), the system's default value will be used.
- max
Concurrent IntegerCdc Tasks - Maximum number of concurrent CDC tasks. The number should be non negative. If not set (or set to 0), the system's default value will be used.
- stream
Large StreamObjects Source Config Oracle Source Config Stream Large Objects - Configuration to drop large object values.
- drop
Large StreamObjects Source Config Oracle Source Config Drop Large Objects - Configuration to drop large object values.
- exclude
Objects StreamSource Config Oracle Source Config Exclude Objects - Oracle objects to exclude from the stream. Structure is documented below.
- include
Objects StreamSource Config Oracle Source Config Include Objects - Oracle objects to retrieve from the source. Structure is documented below.
- max
Concurrent numberBackfill Tasks - Maximum number of concurrent backfill tasks. The number should be non negative. If not set (or set to 0), the system's default value will be used.
- max
Concurrent numberCdc Tasks - Maximum number of concurrent CDC tasks. The number should be non negative. If not set (or set to 0), the system's default value will be used.
- stream
Large StreamObjects Source Config Oracle Source Config Stream Large Objects - Configuration to drop large object values.
- drop_
large_ Streamobjects Source Config Oracle Source Config Drop Large Objects - Configuration to drop large object values.
- exclude_
objects StreamSource Config Oracle Source Config Exclude Objects - Oracle objects to exclude from the stream. Structure is documented below.
- include_
objects StreamSource Config Oracle Source Config Include Objects - Oracle objects to retrieve from the source. Structure is documented below.
- max_
concurrent_ intbackfill_ tasks - Maximum number of concurrent backfill tasks. The number should be non negative. If not set (or set to 0), the system's default value will be used.
- max_
concurrent_ intcdc_ tasks - Maximum number of concurrent CDC tasks. The number should be non negative. If not set (or set to 0), the system's default value will be used.
- stream_
large_ Streamobjects Source Config Oracle Source Config Stream Large Objects - Configuration to drop large object values.
- drop
Large Property MapObjects - Configuration to drop large object values.
- exclude
Objects Property Map - Oracle objects to exclude from the stream. Structure is documented below.
- include
Objects Property Map - Oracle objects to retrieve from the source. Structure is documented below.
- max
Concurrent NumberBackfill Tasks - Maximum number of concurrent backfill tasks. The number should be non negative. If not set (or set to 0), the system's default value will be used.
- max
Concurrent NumberCdc Tasks - Maximum number of concurrent CDC tasks. The number should be non negative. If not set (or set to 0), the system's default value will be used.
- stream
Large Property MapObjects - Configuration to drop large object values.
StreamSourceConfigOracleSourceConfigExcludeObjects, StreamSourceConfigOracleSourceConfigExcludeObjectsArgs
- Oracle
Schemas List<StreamSource Config Oracle Source Config Exclude Objects Oracle Schema> - Oracle schemas/databases in the database server Structure is documented below.
- Oracle
Schemas []StreamSource Config Oracle Source Config Exclude Objects Oracle Schema - Oracle schemas/databases in the database server Structure is documented below.
- oracle
Schemas List<StreamSource Config Oracle Source Config Exclude Objects Oracle Schema> - Oracle schemas/databases in the database server Structure is documented below.
- oracle
Schemas StreamSource Config Oracle Source Config Exclude Objects Oracle Schema[] - Oracle schemas/databases in the database server Structure is documented below.
- oracle_
schemas Sequence[StreamSource Config Oracle Source Config Exclude Objects Oracle Schema] - Oracle schemas/databases in the database server Structure is documented below.
- oracle
Schemas List<Property Map> - Oracle schemas/databases in the database server Structure is documented below.
StreamSourceConfigOracleSourceConfigExcludeObjectsOracleSchema, StreamSourceConfigOracleSourceConfigExcludeObjectsOracleSchemaArgs
- Schema string
- Schema name.
- Oracle
Tables List<StreamSource Config Oracle Source Config Exclude Objects Oracle Schema Oracle Table> - Tables in the database. Structure is documented below.
- Schema string
- Schema name.
- Oracle
Tables []StreamSource Config Oracle Source Config Exclude Objects Oracle Schema Oracle Table - Tables in the database. Structure is documented below.
- schema String
- Schema name.
- oracle
Tables List<StreamSource Config Oracle Source Config Exclude Objects Oracle Schema Oracle Table> - Tables in the database. Structure is documented below.
- schema string
- Schema name.
- oracle
Tables StreamSource Config Oracle Source Config Exclude Objects Oracle Schema Oracle Table[] - Tables in the database. Structure is documented below.
- schema str
- Schema name.
- oracle_
tables Sequence[StreamSource Config Oracle Source Config Exclude Objects Oracle Schema Oracle Table] - Tables in the database. Structure is documented below.
- schema String
- Schema name.
- oracle
Tables List<Property Map> - Tables in the database. Structure is documented below.
StreamSourceConfigOracleSourceConfigExcludeObjectsOracleSchemaOracleTable, StreamSourceConfigOracleSourceConfigExcludeObjectsOracleSchemaOracleTableArgs
- Table string
- Table name.
- Oracle
Columns List<StreamSource Config Oracle Source Config Exclude Objects Oracle Schema Oracle Table Oracle Column> - Oracle columns in the schema. When unspecified as part of include/exclude objects, includes/excludes everything. Structure is documented below.
- Table string
- Table name.
- Oracle
Columns []StreamSource Config Oracle Source Config Exclude Objects Oracle Schema Oracle Table Oracle Column - Oracle columns in the schema. When unspecified as part of include/exclude objects, includes/excludes everything. Structure is documented below.
- table String
- Table name.
- oracle
Columns List<StreamSource Config Oracle Source Config Exclude Objects Oracle Schema Oracle Table Oracle Column> - Oracle columns in the schema. When unspecified as part of include/exclude objects, includes/excludes everything. Structure is documented below.
- table string
- Table name.
- oracle
Columns StreamSource Config Oracle Source Config Exclude Objects Oracle Schema Oracle Table Oracle Column[] - Oracle columns in the schema. When unspecified as part of include/exclude objects, includes/excludes everything. Structure is documented below.
- table str
- Table name.
- oracle_
columns Sequence[StreamSource Config Oracle Source Config Exclude Objects Oracle Schema Oracle Table Oracle Column] - Oracle columns in the schema. When unspecified as part of include/exclude objects, includes/excludes everything. Structure is documented below.
- table String
- Table name.
- oracle
Columns List<Property Map> - Oracle columns in the schema. When unspecified as part of include/exclude objects, includes/excludes everything. Structure is documented below.
StreamSourceConfigOracleSourceConfigExcludeObjectsOracleSchemaOracleTableOracleColumn, StreamSourceConfigOracleSourceConfigExcludeObjectsOracleSchemaOracleTableOracleColumnArgs
- Column string
- Column name.
- Data
Type string - The Oracle data type. Full data types list can be found here: https://docs.oracle.com/en/database/oracle/oracle-database/21/sqlrf/Data-Types.html
- Encoding string
- (Output) Column encoding.
- Length int
- (Output) Column length.
- Nullable bool
- (Output) Whether or not the column can accept a null value.
- Ordinal
Position int - (Output) The ordinal position of the column in the table.
- Precision int
- (Output) Column precision.
- Primary
Key bool - (Output) Whether or not the column represents a primary key.
- Scale int
- (Output) Column scale.
- Column string
- Column name.
- Data
Type string - The Oracle data type. Full data types list can be found here: https://docs.oracle.com/en/database/oracle/oracle-database/21/sqlrf/Data-Types.html
- Encoding string
- (Output) Column encoding.
- Length int
- (Output) Column length.
- Nullable bool
- (Output) Whether or not the column can accept a null value.
- Ordinal
Position int - (Output) The ordinal position of the column in the table.
- Precision int
- (Output) Column precision.
- Primary
Key bool - (Output) Whether or not the column represents a primary key.
- Scale int
- (Output) Column scale.
- column String
- Column name.
- data
Type String - The Oracle data type. Full data types list can be found here: https://docs.oracle.com/en/database/oracle/oracle-database/21/sqlrf/Data-Types.html
- encoding String
- (Output) Column encoding.
- length Integer
- (Output) Column length.
- nullable Boolean
- (Output) Whether or not the column can accept a null value.
- ordinal
Position Integer - (Output) The ordinal position of the column in the table.
- precision Integer
- (Output) Column precision.
- primary
Key Boolean - (Output) Whether or not the column represents a primary key.
- scale Integer
- (Output) Column scale.
- column string
- Column name.
- data
Type string - The Oracle data type. Full data types list can be found here: https://docs.oracle.com/en/database/oracle/oracle-database/21/sqlrf/Data-Types.html
- encoding string
- (Output) Column encoding.
- length number
- (Output) Column length.
- nullable boolean
- (Output) Whether or not the column can accept a null value.
- ordinal
Position number - (Output) The ordinal position of the column in the table.
- precision number
- (Output) Column precision.
- primary
Key boolean - (Output) Whether or not the column represents a primary key.
- scale number
- (Output) Column scale.
- column str
- Column name.
- data_
type str - The Oracle data type. Full data types list can be found here: https://docs.oracle.com/en/database/oracle/oracle-database/21/sqlrf/Data-Types.html
- encoding str
- (Output) Column encoding.
- length int
- (Output) Column length.
- nullable bool
- (Output) Whether or not the column can accept a null value.
- ordinal_
position int - (Output) The ordinal position of the column in the table.
- precision int
- (Output) Column precision.
- primary_
key bool - (Output) Whether or not the column represents a primary key.
- scale int
- (Output) Column scale.
- column String
- Column name.
- data
Type String - The Oracle data type. Full data types list can be found here: https://docs.oracle.com/en/database/oracle/oracle-database/21/sqlrf/Data-Types.html
- encoding String
- (Output) Column encoding.
- length Number
- (Output) Column length.
- nullable Boolean
- (Output) Whether or not the column can accept a null value.
- ordinal
Position Number - (Output) The ordinal position of the column in the table.
- precision Number
- (Output) Column precision.
- primary
Key Boolean - (Output) Whether or not the column represents a primary key.
- scale Number
- (Output) Column scale.
StreamSourceConfigOracleSourceConfigIncludeObjects, StreamSourceConfigOracleSourceConfigIncludeObjectsArgs
- Oracle
Schemas List<StreamSource Config Oracle Source Config Include Objects Oracle Schema> - Oracle schemas/databases in the database server Structure is documented below.
- Oracle
Schemas []StreamSource Config Oracle Source Config Include Objects Oracle Schema - Oracle schemas/databases in the database server Structure is documented below.
- oracle
Schemas List<StreamSource Config Oracle Source Config Include Objects Oracle Schema> - Oracle schemas/databases in the database server Structure is documented below.
- oracle
Schemas StreamSource Config Oracle Source Config Include Objects Oracle Schema[] - Oracle schemas/databases in the database server Structure is documented below.
- oracle_
schemas Sequence[StreamSource Config Oracle Source Config Include Objects Oracle Schema] - Oracle schemas/databases in the database server Structure is documented below.
- oracle
Schemas List<Property Map> - Oracle schemas/databases in the database server Structure is documented below.
StreamSourceConfigOracleSourceConfigIncludeObjectsOracleSchema, StreamSourceConfigOracleSourceConfigIncludeObjectsOracleSchemaArgs
- Schema string
- Schema name.
- Oracle
Tables List<StreamSource Config Oracle Source Config Include Objects Oracle Schema Oracle Table> - Tables in the database. Structure is documented below.
- Schema string
- Schema name.
- Oracle
Tables []StreamSource Config Oracle Source Config Include Objects Oracle Schema Oracle Table - Tables in the database. Structure is documented below.
- schema String
- Schema name.
- oracle
Tables List<StreamSource Config Oracle Source Config Include Objects Oracle Schema Oracle Table> - Tables in the database. Structure is documented below.
- schema string
- Schema name.
- oracle
Tables StreamSource Config Oracle Source Config Include Objects Oracle Schema Oracle Table[] - Tables in the database. Structure is documented below.
- schema str
- Schema name.
- oracle_
tables Sequence[StreamSource Config Oracle Source Config Include Objects Oracle Schema Oracle Table] - Tables in the database. Structure is documented below.
- schema String
- Schema name.
- oracle
Tables List<Property Map> - Tables in the database. Structure is documented below.
StreamSourceConfigOracleSourceConfigIncludeObjectsOracleSchemaOracleTable, StreamSourceConfigOracleSourceConfigIncludeObjectsOracleSchemaOracleTableArgs
- Table string
- Table name.
- Oracle
Columns List<StreamSource Config Oracle Source Config Include Objects Oracle Schema Oracle Table Oracle Column> - Oracle columns in the schema. When unspecified as part of include/exclude objects, includes/excludes everything. Structure is documented below.
- Table string
- Table name.
- Oracle
Columns []StreamSource Config Oracle Source Config Include Objects Oracle Schema Oracle Table Oracle Column - Oracle columns in the schema. When unspecified as part of include/exclude objects, includes/excludes everything. Structure is documented below.
- table String
- Table name.
- oracle
Columns List<StreamSource Config Oracle Source Config Include Objects Oracle Schema Oracle Table Oracle Column> - Oracle columns in the schema. When unspecified as part of include/exclude objects, includes/excludes everything. Structure is documented below.
- table string
- Table name.
- oracle
Columns StreamSource Config Oracle Source Config Include Objects Oracle Schema Oracle Table Oracle Column[] - Oracle columns in the schema. When unspecified as part of include/exclude objects, includes/excludes everything. Structure is documented below.
- table str
- Table name.
- oracle_
columns Sequence[StreamSource Config Oracle Source Config Include Objects Oracle Schema Oracle Table Oracle Column] - Oracle columns in the schema. When unspecified as part of include/exclude objects, includes/excludes everything. Structure is documented below.
- table String
- Table name.
- oracle
Columns List<Property Map> - Oracle columns in the schema. When unspecified as part of include/exclude objects, includes/excludes everything. Structure is documented below.
StreamSourceConfigOracleSourceConfigIncludeObjectsOracleSchemaOracleTableOracleColumn, StreamSourceConfigOracleSourceConfigIncludeObjectsOracleSchemaOracleTableOracleColumnArgs
- Column string
- Column name.
- Data
Type string - The Oracle data type. Full data types list can be found here: https://docs.oracle.com/en/database/oracle/oracle-database/21/sqlrf/Data-Types.html
- Encoding string
- (Output) Column encoding.
- Length int
- (Output) Column length.
- Nullable bool
- (Output) Whether or not the column can accept a null value.
- Ordinal
Position int - (Output) The ordinal position of the column in the table.
- Precision int
- (Output) Column precision.
- Primary
Key bool - (Output) Whether or not the column represents a primary key.
- Scale int
- (Output) Column scale.
- Column string
- Column name.
- Data
Type string - The Oracle data type. Full data types list can be found here: https://docs.oracle.com/en/database/oracle/oracle-database/21/sqlrf/Data-Types.html
- Encoding string
- (Output) Column encoding.
- Length int
- (Output) Column length.
- Nullable bool
- (Output) Whether or not the column can accept a null value.
- Ordinal
Position int - (Output) The ordinal position of the column in the table.
- Precision int
- (Output) Column precision.
- Primary
Key bool - (Output) Whether or not the column represents a primary key.
- Scale int
- (Output) Column scale.
- column String
- Column name.
- data
Type String - The Oracle data type. Full data types list can be found here: https://docs.oracle.com/en/database/oracle/oracle-database/21/sqlrf/Data-Types.html
- encoding String
- (Output) Column encoding.
- length Integer
- (Output) Column length.
- nullable Boolean
- (Output) Whether or not the column can accept a null value.
- ordinal
Position Integer - (Output) The ordinal position of the column in the table.
- precision Integer
- (Output) Column precision.
- primary
Key Boolean - (Output) Whether or not the column represents a primary key.
- scale Integer
- (Output) Column scale.
- column string
- Column name.
- data
Type string - The Oracle data type. Full data types list can be found here: https://docs.oracle.com/en/database/oracle/oracle-database/21/sqlrf/Data-Types.html
- encoding string
- (Output) Column encoding.
- length number
- (Output) Column length.
- nullable boolean
- (Output) Whether or not the column can accept a null value.
- ordinal
Position number - (Output) The ordinal position of the column in the table.
- precision number
- (Output) Column precision.
- primary
Key boolean - (Output) Whether or not the column represents a primary key.
- scale number
- (Output) Column scale.
- column str
- Column name.
- data_
type str - The Oracle data type. Full data types list can be found here: https://docs.oracle.com/en/database/oracle/oracle-database/21/sqlrf/Data-Types.html
- encoding str
- (Output) Column encoding.
- length int
- (Output) Column length.
- nullable bool
- (Output) Whether or not the column can accept a null value.
- ordinal_
position int - (Output) The ordinal position of the column in the table.
- precision int
- (Output) Column precision.
- primary_
key bool - (Output) Whether or not the column represents a primary key.
- scale int
- (Output) Column scale.
- column String
- Column name.
- data
Type String - The Oracle data type. Full data types list can be found here: https://docs.oracle.com/en/database/oracle/oracle-database/21/sqlrf/Data-Types.html
- encoding String
- (Output) Column encoding.
- length Number
- (Output) Column length.
- nullable Boolean
- (Output) Whether or not the column can accept a null value.
- ordinal
Position Number - (Output) The ordinal position of the column in the table.
- precision Number
- (Output) Column precision.
- primary
Key Boolean - (Output) Whether or not the column represents a primary key.
- scale Number
- (Output) Column scale.
StreamSourceConfigPostgresqlSourceConfig, StreamSourceConfigPostgresqlSourceConfigArgs
- Publication string
- The name of the publication that includes the set of all tables that are defined in the stream's include_objects.
- Replication
Slot string - The name of the logical replication slot that's configured with the pgoutput plugin.
- Exclude
Objects StreamSource Config Postgresql Source Config Exclude Objects - PostgreSQL objects to exclude from the stream. Structure is documented below.
- Include
Objects StreamSource Config Postgresql Source Config Include Objects - PostgreSQL objects to retrieve from the source. Structure is documented below.
- Max
Concurrent intBackfill Tasks - Maximum number of concurrent backfill tasks. The number should be non negative. If not set (or set to 0), the system's default value will be used.
- Publication string
- The name of the publication that includes the set of all tables that are defined in the stream's include_objects.
- Replication
Slot string - The name of the logical replication slot that's configured with the pgoutput plugin.
- Exclude
Objects StreamSource Config Postgresql Source Config Exclude Objects - PostgreSQL objects to exclude from the stream. Structure is documented below.
- Include
Objects StreamSource Config Postgresql Source Config Include Objects - PostgreSQL objects to retrieve from the source. Structure is documented below.
- Max
Concurrent intBackfill Tasks - Maximum number of concurrent backfill tasks. The number should be non negative. If not set (or set to 0), the system's default value will be used.
- publication String
- The name of the publication that includes the set of all tables that are defined in the stream's include_objects.
- replication
Slot String - The name of the logical replication slot that's configured with the pgoutput plugin.
- exclude
Objects StreamSource Config Postgresql Source Config Exclude Objects - PostgreSQL objects to exclude from the stream. Structure is documented below.
- include
Objects StreamSource Config Postgresql Source Config Include Objects - PostgreSQL objects to retrieve from the source. Structure is documented below.
- max
Concurrent IntegerBackfill Tasks - Maximum number of concurrent backfill tasks. The number should be non negative. If not set (or set to 0), the system's default value will be used.
- publication string
- The name of the publication that includes the set of all tables that are defined in the stream's include_objects.
- replication
Slot string - The name of the logical replication slot that's configured with the pgoutput plugin.
- exclude
Objects StreamSource Config Postgresql Source Config Exclude Objects - PostgreSQL objects to exclude from the stream. Structure is documented below.
- include
Objects StreamSource Config Postgresql Source Config Include Objects - PostgreSQL objects to retrieve from the source. Structure is documented below.
- max
Concurrent numberBackfill Tasks - Maximum number of concurrent backfill tasks. The number should be non negative. If not set (or set to 0), the system's default value will be used.
- publication str
- The name of the publication that includes the set of all tables that are defined in the stream's include_objects.
- replication_
slot str - The name of the logical replication slot that's configured with the pgoutput plugin.
- exclude_
objects StreamSource Config Postgresql Source Config Exclude Objects - PostgreSQL objects to exclude from the stream. Structure is documented below.
- include_
objects StreamSource Config Postgresql Source Config Include Objects - PostgreSQL objects to retrieve from the source. Structure is documented below.
- max_
concurrent_ intbackfill_ tasks - Maximum number of concurrent backfill tasks. The number should be non negative. If not set (or set to 0), the system's default value will be used.
- publication String
- The name of the publication that includes the set of all tables that are defined in the stream's include_objects.
- replication
Slot String - The name of the logical replication slot that's configured with the pgoutput plugin.
- exclude
Objects Property Map - PostgreSQL objects to exclude from the stream. Structure is documented below.
- include
Objects Property Map - PostgreSQL objects to retrieve from the source. Structure is documented below.
- max
Concurrent NumberBackfill Tasks - Maximum number of concurrent backfill tasks. The number should be non negative. If not set (or set to 0), the system's default value will be used.
StreamSourceConfigPostgresqlSourceConfigExcludeObjects, StreamSourceConfigPostgresqlSourceConfigExcludeObjectsArgs
- Postgresql
Schemas List<StreamSource Config Postgresql Source Config Exclude Objects Postgresql Schema> - PostgreSQL schemas on the server Structure is documented below.
- Postgresql
Schemas []StreamSource Config Postgresql Source Config Exclude Objects Postgresql Schema - PostgreSQL schemas on the server Structure is documented below.
- postgresql
Schemas List<StreamSource Config Postgresql Source Config Exclude Objects Postgresql Schema> - PostgreSQL schemas on the server Structure is documented below.
- postgresql
Schemas StreamSource Config Postgresql Source Config Exclude Objects Postgresql Schema[] - PostgreSQL schemas on the server Structure is documented below.
- postgresql_
schemas Sequence[StreamSource Config Postgresql Source Config Exclude Objects Postgresql Schema] - PostgreSQL schemas on the server Structure is documented below.
- postgresql
Schemas List<Property Map> - PostgreSQL schemas on the server Structure is documented below.
StreamSourceConfigPostgresqlSourceConfigExcludeObjectsPostgresqlSchema, StreamSourceConfigPostgresqlSourceConfigExcludeObjectsPostgresqlSchemaArgs
- Schema string
- Database name.
- Postgresql
Tables List<StreamSource Config Postgresql Source Config Exclude Objects Postgresql Schema Postgresql Table> - Tables in the schema. Structure is documented below.
- Schema string
- Database name.
- Postgresql
Tables []StreamSource Config Postgresql Source Config Exclude Objects Postgresql Schema Postgresql Table - Tables in the schema. Structure is documented below.
- schema String
- Database name.
- postgresql
Tables List<StreamSource Config Postgresql Source Config Exclude Objects Postgresql Schema Postgresql Table> - Tables in the schema. Structure is documented below.
- schema string
- Database name.
- postgresql
Tables StreamSource Config Postgresql Source Config Exclude Objects Postgresql Schema Postgresql Table[] - Tables in the schema. Structure is documented below.
- schema str
- Database name.
- postgresql_
tables Sequence[StreamSource Config Postgresql Source Config Exclude Objects Postgresql Schema Postgresql Table] - Tables in the schema. Structure is documented below.
- schema String
- Database name.
- postgresql
Tables List<Property Map> - Tables in the schema. Structure is documented below.
StreamSourceConfigPostgresqlSourceConfigExcludeObjectsPostgresqlSchemaPostgresqlTable, StreamSourceConfigPostgresqlSourceConfigExcludeObjectsPostgresqlSchemaPostgresqlTableArgs
- Table string
- Table name.
- Postgresql
Columns List<StreamSource Config Postgresql Source Config Exclude Objects Postgresql Schema Postgresql Table Postgresql Column> - PostgreSQL columns in the schema. When unspecified as part of include/exclude objects, includes/excludes everything. Structure is documented below.
- Table string
- Table name.
- Postgresql
Columns []StreamSource Config Postgresql Source Config Exclude Objects Postgresql Schema Postgresql Table Postgresql Column - PostgreSQL columns in the schema. When unspecified as part of include/exclude objects, includes/excludes everything. Structure is documented below.
- table String
- Table name.
- postgresql
Columns List<StreamSource Config Postgresql Source Config Exclude Objects Postgresql Schema Postgresql Table Postgresql Column> - PostgreSQL columns in the schema. When unspecified as part of include/exclude objects, includes/excludes everything. Structure is documented below.
- table string
- Table name.
- postgresql
Columns StreamSource Config Postgresql Source Config Exclude Objects Postgresql Schema Postgresql Table Postgresql Column[] - PostgreSQL columns in the schema. When unspecified as part of include/exclude objects, includes/excludes everything. Structure is documented below.
- table str
- Table name.
- postgresql_
columns Sequence[StreamSource Config Postgresql Source Config Exclude Objects Postgresql Schema Postgresql Table Postgresql Column] - PostgreSQL columns in the schema. When unspecified as part of include/exclude objects, includes/excludes everything. Structure is documented below.
- table String
- Table name.
- postgresql
Columns List<Property Map> - PostgreSQL columns in the schema. When unspecified as part of include/exclude objects, includes/excludes everything. Structure is documented below.
StreamSourceConfigPostgresqlSourceConfigExcludeObjectsPostgresqlSchemaPostgresqlTablePostgresqlColumn, StreamSourceConfigPostgresqlSourceConfigExcludeObjectsPostgresqlSchemaPostgresqlTablePostgresqlColumnArgs
- Column string
- Column name.
- Data
Type string - The PostgreSQL data type. Full data types list can be found here: https://www.postgresql.org/docs/current/datatype.html
- Length int
- (Output) Column length.
- Nullable bool
- Whether or not the column can accept a null value.
- Ordinal
Position int - The ordinal position of the column in the table.
- Precision int
- (Output) Column precision.
- Primary
Key bool - Whether or not the column represents a primary key.
- Scale int
- (Output) Column scale.
- Column string
- Column name.
- Data
Type string - The PostgreSQL data type. Full data types list can be found here: https://www.postgresql.org/docs/current/datatype.html
- Length int
- (Output) Column length.
- Nullable bool
- Whether or not the column can accept a null value.
- Ordinal
Position int - The ordinal position of the column in the table.
- Precision int
- (Output) Column precision.
- Primary
Key bool - Whether or not the column represents a primary key.
- Scale int
- (Output) Column scale.
- column String
- Column name.
- data
Type String - The PostgreSQL data type. Full data types list can be found here: https://www.postgresql.org/docs/current/datatype.html
- length Integer
- (Output) Column length.
- nullable Boolean
- Whether or not the column can accept a null value.
- ordinal
Position Integer - The ordinal position of the column in the table.
- precision Integer
- (Output) Column precision.
- primary
Key Boolean - Whether or not the column represents a primary key.
- scale Integer
- (Output) Column scale.
- column string
- Column name.
- data
Type string - The PostgreSQL data type. Full data types list can be found here: https://www.postgresql.org/docs/current/datatype.html
- length number
- (Output) Column length.
- nullable boolean
- Whether or not the column can accept a null value.
- ordinal
Position number - The ordinal position of the column in the table.
- precision number
- (Output) Column precision.
- primary
Key boolean - Whether or not the column represents a primary key.
- scale number
- (Output) Column scale.
- column str
- Column name.
- data_
type str - The PostgreSQL data type. Full data types list can be found here: https://www.postgresql.org/docs/current/datatype.html
- length int
- (Output) Column length.
- nullable bool
- Whether or not the column can accept a null value.
- ordinal_
position int - The ordinal position of the column in the table.
- precision int
- (Output) Column precision.
- primary_
key bool - Whether or not the column represents a primary key.
- scale int
- (Output) Column scale.
- column String
- Column name.
- data
Type String - The PostgreSQL data type. Full data types list can be found here: https://www.postgresql.org/docs/current/datatype.html
- length Number
- (Output) Column length.
- nullable Boolean
- Whether or not the column can accept a null value.
- ordinal
Position Number - The ordinal position of the column in the table.
- precision Number
- (Output) Column precision.
- primary
Key Boolean - Whether or not the column represents a primary key.
- scale Number
- (Output) Column scale.
StreamSourceConfigPostgresqlSourceConfigIncludeObjects, StreamSourceConfigPostgresqlSourceConfigIncludeObjectsArgs
- Postgresql
Schemas List<StreamSource Config Postgresql Source Config Include Objects Postgresql Schema> - PostgreSQL schemas on the server Structure is documented below.
- Postgresql
Schemas []StreamSource Config Postgresql Source Config Include Objects Postgresql Schema - PostgreSQL schemas on the server Structure is documented below.
- postgresql
Schemas List<StreamSource Config Postgresql Source Config Include Objects Postgresql Schema> - PostgreSQL schemas on the server Structure is documented below.
- postgresql
Schemas StreamSource Config Postgresql Source Config Include Objects Postgresql Schema[] - PostgreSQL schemas on the server Structure is documented below.
- postgresql_
schemas Sequence[StreamSource Config Postgresql Source Config Include Objects Postgresql Schema] - PostgreSQL schemas on the server Structure is documented below.
- postgresql
Schemas List<Property Map> - PostgreSQL schemas on the server Structure is documented below.
StreamSourceConfigPostgresqlSourceConfigIncludeObjectsPostgresqlSchema, StreamSourceConfigPostgresqlSourceConfigIncludeObjectsPostgresqlSchemaArgs
- Schema string
- Database name.
- Postgresql
Tables List<StreamSource Config Postgresql Source Config Include Objects Postgresql Schema Postgresql Table> - Tables in the schema. Structure is documented below.
- Schema string
- Database name.
- Postgresql
Tables []StreamSource Config Postgresql Source Config Include Objects Postgresql Schema Postgresql Table - Tables in the schema. Structure is documented below.
- schema String
- Database name.
- postgresql
Tables List<StreamSource Config Postgresql Source Config Include Objects Postgresql Schema Postgresql Table> - Tables in the schema. Structure is documented below.
- schema string
- Database name.
- postgresql
Tables StreamSource Config Postgresql Source Config Include Objects Postgresql Schema Postgresql Table[] - Tables in the schema. Structure is documented below.
- schema str
- Database name.
- postgresql_
tables Sequence[StreamSource Config Postgresql Source Config Include Objects Postgresql Schema Postgresql Table] - Tables in the schema. Structure is documented below.
- schema String
- Database name.
- postgresql
Tables List<Property Map> - Tables in the schema. Structure is documented below.
StreamSourceConfigPostgresqlSourceConfigIncludeObjectsPostgresqlSchemaPostgresqlTable, StreamSourceConfigPostgresqlSourceConfigIncludeObjectsPostgresqlSchemaPostgresqlTableArgs
- Table string
- Table name.
- Postgresql
Columns List<StreamSource Config Postgresql Source Config Include Objects Postgresql Schema Postgresql Table Postgresql Column> - PostgreSQL columns in the schema. When unspecified as part of include/exclude objects, includes/excludes everything. Structure is documented below.
- Table string
- Table name.
- Postgresql
Columns []StreamSource Config Postgresql Source Config Include Objects Postgresql Schema Postgresql Table Postgresql Column - PostgreSQL columns in the schema. When unspecified as part of include/exclude objects, includes/excludes everything. Structure is documented below.
- table String
- Table name.
- postgresql
Columns List<StreamSource Config Postgresql Source Config Include Objects Postgresql Schema Postgresql Table Postgresql Column> - PostgreSQL columns in the schema. When unspecified as part of include/exclude objects, includes/excludes everything. Structure is documented below.
- table string
- Table name.
- postgresql
Columns StreamSource Config Postgresql Source Config Include Objects Postgresql Schema Postgresql Table Postgresql Column[] - PostgreSQL columns in the schema. When unspecified as part of include/exclude objects, includes/excludes everything. Structure is documented below.
- table str
- Table name.
- postgresql_
columns Sequence[StreamSource Config Postgresql Source Config Include Objects Postgresql Schema Postgresql Table Postgresql Column] - PostgreSQL columns in the schema. When unspecified as part of include/exclude objects, includes/excludes everything. Structure is documented below.
- table String
- Table name.
- postgresql
Columns List<Property Map> - PostgreSQL columns in the schema. When unspecified as part of include/exclude objects, includes/excludes everything. Structure is documented below.
StreamSourceConfigPostgresqlSourceConfigIncludeObjectsPostgresqlSchemaPostgresqlTablePostgresqlColumn, StreamSourceConfigPostgresqlSourceConfigIncludeObjectsPostgresqlSchemaPostgresqlTablePostgresqlColumnArgs
- Column string
- Column name.
- Data
Type string - The PostgreSQL data type. Full data types list can be found here: https://www.postgresql.org/docs/current/datatype.html
- Length int
- (Output) Column length.
- Nullable bool
- Whether or not the column can accept a null value.
- Ordinal
Position int - The ordinal position of the column in the table.
- Precision int
- (Output) Column precision.
- Primary
Key bool - Whether or not the column represents a primary key.
- Scale int
- (Output) Column scale.
- Column string
- Column name.
- Data
Type string - The PostgreSQL data type. Full data types list can be found here: https://www.postgresql.org/docs/current/datatype.html
- Length int
- (Output) Column length.
- Nullable bool
- Whether or not the column can accept a null value.
- Ordinal
Position int - The ordinal position of the column in the table.
- Precision int
- (Output) Column precision.
- Primary
Key bool - Whether or not the column represents a primary key.
- Scale int
- (Output) Column scale.
- column String
- Column name.
- data
Type String - The PostgreSQL data type. Full data types list can be found here: https://www.postgresql.org/docs/current/datatype.html
- length Integer
- (Output) Column length.
- nullable Boolean
- Whether or not the column can accept a null value.
- ordinal
Position Integer - The ordinal position of the column in the table.
- precision Integer
- (Output) Column precision.
- primary
Key Boolean - Whether or not the column represents a primary key.
- scale Integer
- (Output) Column scale.
- column string
- Column name.
- data
Type string - The PostgreSQL data type. Full data types list can be found here: https://www.postgresql.org/docs/current/datatype.html
- length number
- (Output) Column length.
- nullable boolean
- Whether or not the column can accept a null value.
- ordinal
Position number - The ordinal position of the column in the table.
- precision number
- (Output) Column precision.
- primary
Key boolean - Whether or not the column represents a primary key.
- scale number
- (Output) Column scale.
- column str
- Column name.
- data_
type str - The PostgreSQL data type. Full data types list can be found here: https://www.postgresql.org/docs/current/datatype.html
- length int
- (Output) Column length.
- nullable bool
- Whether or not the column can accept a null value.
- ordinal_
position int - The ordinal position of the column in the table.
- precision int
- (Output) Column precision.
- primary_
key bool - Whether or not the column represents a primary key.
- scale int
- (Output) Column scale.
- column String
- Column name.
- data
Type String - The PostgreSQL data type. Full data types list can be found here: https://www.postgresql.org/docs/current/datatype.html
- length Number
- (Output) Column length.
- nullable Boolean
- Whether or not the column can accept a null value.
- ordinal
Position Number - The ordinal position of the column in the table.
- precision Number
- (Output) Column precision.
- primary
Key Boolean - Whether or not the column represents a primary key.
- scale Number
- (Output) Column scale.
StreamSourceConfigSqlServerSourceConfig, StreamSourceConfigSqlServerSourceConfigArgs
- Exclude
Objects StreamSource Config Sql Server Source Config Exclude Objects - SQL Server objects to exclude from the stream. Structure is documented below.
- Include
Objects StreamSource Config Sql Server Source Config Include Objects - SQL Server objects to retrieve from the source. Structure is documented below.
- Max
Concurrent intBackfill Tasks - Max concurrent backfill tasks.
- Max
Concurrent intCdc Tasks - Max concurrent CDC tasks.
- Exclude
Objects StreamSource Config Sql Server Source Config Exclude Objects - SQL Server objects to exclude from the stream. Structure is documented below.
- Include
Objects StreamSource Config Sql Server Source Config Include Objects - SQL Server objects to retrieve from the source. Structure is documented below.
- Max
Concurrent intBackfill Tasks - Max concurrent backfill tasks.
- Max
Concurrent intCdc Tasks - Max concurrent CDC tasks.
- exclude
Objects StreamSource Config Sql Server Source Config Exclude Objects - SQL Server objects to exclude from the stream. Structure is documented below.
- include
Objects StreamSource Config Sql Server Source Config Include Objects - SQL Server objects to retrieve from the source. Structure is documented below.
- max
Concurrent IntegerBackfill Tasks - Max concurrent backfill tasks.
- max
Concurrent IntegerCdc Tasks - Max concurrent CDC tasks.
- exclude
Objects StreamSource Config Sql Server Source Config Exclude Objects - SQL Server objects to exclude from the stream. Structure is documented below.
- include
Objects StreamSource Config Sql Server Source Config Include Objects - SQL Server objects to retrieve from the source. Structure is documented below.
- max
Concurrent numberBackfill Tasks - Max concurrent backfill tasks.
- max
Concurrent numberCdc Tasks - Max concurrent CDC tasks.
- exclude_
objects StreamSource Config Sql Server Source Config Exclude Objects - SQL Server objects to exclude from the stream. Structure is documented below.
- include_
objects StreamSource Config Sql Server Source Config Include Objects - SQL Server objects to retrieve from the source. Structure is documented below.
- max_
concurrent_ intbackfill_ tasks - Max concurrent backfill tasks.
- max_
concurrent_ intcdc_ tasks - Max concurrent CDC tasks.
- exclude
Objects Property Map - SQL Server objects to exclude from the stream. Structure is documented below.
- include
Objects Property Map - SQL Server objects to retrieve from the source. Structure is documented below.
- max
Concurrent NumberBackfill Tasks - Max concurrent backfill tasks.
- max
Concurrent NumberCdc Tasks - Max concurrent CDC tasks.
StreamSourceConfigSqlServerSourceConfigExcludeObjects, StreamSourceConfigSqlServerSourceConfigExcludeObjectsArgs
- Schemas
List<Stream
Source Config Sql Server Source Config Exclude Objects Schema> - SQL Server schemas/databases in the database server Structure is documented below.
- Schemas
[]Stream
Source Config Sql Server Source Config Exclude Objects Schema - SQL Server schemas/databases in the database server Structure is documented below.
- schemas
List<Stream
Source Config Sql Server Source Config Exclude Objects Schema> - SQL Server schemas/databases in the database server Structure is documented below.
- schemas
Stream
Source Config Sql Server Source Config Exclude Objects Schema[] - SQL Server schemas/databases in the database server Structure is documented below.
- schemas
Sequence[Stream
Source Config Sql Server Source Config Exclude Objects Schema] - SQL Server schemas/databases in the database server Structure is documented below.
- schemas List<Property Map>
- SQL Server schemas/databases in the database server Structure is documented below.
StreamSourceConfigSqlServerSourceConfigExcludeObjectsSchema, StreamSourceConfigSqlServerSourceConfigExcludeObjectsSchemaArgs
- Schema string
- Schema name.
- Tables
List<Stream
Source Config Sql Server Source Config Exclude Objects Schema Table> - Tables in the database. Structure is documented below.
- Schema string
- Schema name.
- Tables
[]Stream
Source Config Sql Server Source Config Exclude Objects Schema Table - Tables in the database. Structure is documented below.
- schema String
- Schema name.
- tables
List<Stream
Source Config Sql Server Source Config Exclude Objects Schema Table> - Tables in the database. Structure is documented below.
- schema string
- Schema name.
- tables
Stream
Source Config Sql Server Source Config Exclude Objects Schema Table[] - Tables in the database. Structure is documented below.
- schema str
- Schema name.
- tables
Sequence[Stream
Source Config Sql Server Source Config Exclude Objects Schema Table] - Tables in the database. Structure is documented below.
- schema String
- Schema name.
- tables List<Property Map>
- Tables in the database. Structure is documented below.
StreamSourceConfigSqlServerSourceConfigExcludeObjectsSchemaTable, StreamSourceConfigSqlServerSourceConfigExcludeObjectsSchemaTableArgs
- Table string
- Table name.
- Columns
List<Stream
Source Config Sql Server Source Config Exclude Objects Schema Table Column> - SQL Server columns in the schema. When unspecified as part of include/exclude objects, includes/excludes everything. Structure is documented below.
- Table string
- Table name.
- Columns
[]Stream
Source Config Sql Server Source Config Exclude Objects Schema Table Column - SQL Server columns in the schema. When unspecified as part of include/exclude objects, includes/excludes everything. Structure is documented below.
- table String
- Table name.
- columns
List<Stream
Source Config Sql Server Source Config Exclude Objects Schema Table Column> - SQL Server columns in the schema. When unspecified as part of include/exclude objects, includes/excludes everything. Structure is documented below.
- table string
- Table name.
- columns
Stream
Source Config Sql Server Source Config Exclude Objects Schema Table Column[] - SQL Server columns in the schema. When unspecified as part of include/exclude objects, includes/excludes everything. Structure is documented below.
- table str
- Table name.
- columns
Sequence[Stream
Source Config Sql Server Source Config Exclude Objects Schema Table Column] - SQL Server columns in the schema. When unspecified as part of include/exclude objects, includes/excludes everything. Structure is documented below.
- table String
- Table name.
- columns List<Property Map>
- SQL Server columns in the schema. When unspecified as part of include/exclude objects, includes/excludes everything. Structure is documented below.
StreamSourceConfigSqlServerSourceConfigExcludeObjectsSchemaTableColumn, StreamSourceConfigSqlServerSourceConfigExcludeObjectsSchemaTableColumnArgs
- Column string
- Column name.
- Data
Type string - The SQL Server data type. Full data types list can be found here: https://learn.microsoft.com/en-us/sql/t-sql/data-types/data-types-transact-sql?view=sql-server-ver16
- Length int
- (Output) Column length.
- Nullable bool
- (Output) Whether or not the column can accept a null value.
- Ordinal
Position int - (Output) The ordinal position of the column in the table.
- Precision int
- (Output) Column precision.
- Primary
Key bool - (Output) Whether or not the column represents a primary key.
- Scale int
- (Output) Column scale.
- Column string
- Column name.
- Data
Type string - The SQL Server data type. Full data types list can be found here: https://learn.microsoft.com/en-us/sql/t-sql/data-types/data-types-transact-sql?view=sql-server-ver16
- Length int
- (Output) Column length.
- Nullable bool
- (Output) Whether or not the column can accept a null value.
- Ordinal
Position int - (Output) The ordinal position of the column in the table.
- Precision int
- (Output) Column precision.
- Primary
Key bool - (Output) Whether or not the column represents a primary key.
- Scale int
- (Output) Column scale.
- column String
- Column name.
- data
Type String - The SQL Server data type. Full data types list can be found here: https://learn.microsoft.com/en-us/sql/t-sql/data-types/data-types-transact-sql?view=sql-server-ver16
- length Integer
- (Output) Column length.
- nullable Boolean
- (Output) Whether or not the column can accept a null value.
- ordinal
Position Integer - (Output) The ordinal position of the column in the table.
- precision Integer
- (Output) Column precision.
- primary
Key Boolean - (Output) Whether or not the column represents a primary key.
- scale Integer
- (Output) Column scale.
- column string
- Column name.
- data
Type string - The SQL Server data type. Full data types list can be found here: https://learn.microsoft.com/en-us/sql/t-sql/data-types/data-types-transact-sql?view=sql-server-ver16
- length number
- (Output) Column length.
- nullable boolean
- (Output) Whether or not the column can accept a null value.
- ordinal
Position number - (Output) The ordinal position of the column in the table.
- precision number
- (Output) Column precision.
- primary
Key boolean - (Output) Whether or not the column represents a primary key.
- scale number
- (Output) Column scale.
- column str
- Column name.
- data_
type str - The SQL Server data type. Full data types list can be found here: https://learn.microsoft.com/en-us/sql/t-sql/data-types/data-types-transact-sql?view=sql-server-ver16
- length int
- (Output) Column length.
- nullable bool
- (Output) Whether or not the column can accept a null value.
- ordinal_
position int - (Output) The ordinal position of the column in the table.
- precision int
- (Output) Column precision.
- primary_
key bool - (Output) Whether or not the column represents a primary key.
- scale int
- (Output) Column scale.
- column String
- Column name.
- data
Type String - The SQL Server data type. Full data types list can be found here: https://learn.microsoft.com/en-us/sql/t-sql/data-types/data-types-transact-sql?view=sql-server-ver16
- length Number
- (Output) Column length.
- nullable Boolean
- (Output) Whether or not the column can accept a null value.
- ordinal
Position Number - (Output) The ordinal position of the column in the table.
- precision Number
- (Output) Column precision.
- primary
Key Boolean - (Output) Whether or not the column represents a primary key.
- scale Number
- (Output) Column scale.
StreamSourceConfigSqlServerSourceConfigIncludeObjects, StreamSourceConfigSqlServerSourceConfigIncludeObjectsArgs
- Schemas
List<Stream
Source Config Sql Server Source Config Include Objects Schema> - SQL Server schemas/databases in the database server Structure is documented below.
- Schemas
[]Stream
Source Config Sql Server Source Config Include Objects Schema - SQL Server schemas/databases in the database server Structure is documented below.
- schemas
List<Stream
Source Config Sql Server Source Config Include Objects Schema> - SQL Server schemas/databases in the database server Structure is documented below.
- schemas
Stream
Source Config Sql Server Source Config Include Objects Schema[] - SQL Server schemas/databases in the database server Structure is documented below.
- schemas
Sequence[Stream
Source Config Sql Server Source Config Include Objects Schema] - SQL Server schemas/databases in the database server Structure is documented below.
- schemas List<Property Map>
- SQL Server schemas/databases in the database server Structure is documented below.
StreamSourceConfigSqlServerSourceConfigIncludeObjectsSchema, StreamSourceConfigSqlServerSourceConfigIncludeObjectsSchemaArgs
- Schema string
- Schema name.
- Tables
List<Stream
Source Config Sql Server Source Config Include Objects Schema Table> - Tables in the database. Structure is documented below.
- Schema string
- Schema name.
- Tables
[]Stream
Source Config Sql Server Source Config Include Objects Schema Table - Tables in the database. Structure is documented below.
- schema String
- Schema name.
- tables
List<Stream
Source Config Sql Server Source Config Include Objects Schema Table> - Tables in the database. Structure is documented below.
- schema string
- Schema name.
- tables
Stream
Source Config Sql Server Source Config Include Objects Schema Table[] - Tables in the database. Structure is documented below.
- schema str
- Schema name.
- tables
Sequence[Stream
Source Config Sql Server Source Config Include Objects Schema Table] - Tables in the database. Structure is documented below.
- schema String
- Schema name.
- tables List<Property Map>
- Tables in the database. Structure is documented below.
StreamSourceConfigSqlServerSourceConfigIncludeObjectsSchemaTable, StreamSourceConfigSqlServerSourceConfigIncludeObjectsSchemaTableArgs
- Table string
- Table name.
- Columns
List<Stream
Source Config Sql Server Source Config Include Objects Schema Table Column> - SQL Server columns in the schema. When unspecified as part of include/exclude objects, includes/excludes everything. Structure is documented below.
- Table string
- Table name.
- Columns
[]Stream
Source Config Sql Server Source Config Include Objects Schema Table Column - SQL Server columns in the schema. When unspecified as part of include/exclude objects, includes/excludes everything. Structure is documented below.
- table String
- Table name.
- columns
List<Stream
Source Config Sql Server Source Config Include Objects Schema Table Column> - SQL Server columns in the schema. When unspecified as part of include/exclude objects, includes/excludes everything. Structure is documented below.
- table string
- Table name.
- columns
Stream
Source Config Sql Server Source Config Include Objects Schema Table Column[] - SQL Server columns in the schema. When unspecified as part of include/exclude objects, includes/excludes everything. Structure is documented below.
- table str
- Table name.
- columns
Sequence[Stream
Source Config Sql Server Source Config Include Objects Schema Table Column] - SQL Server columns in the schema. When unspecified as part of include/exclude objects, includes/excludes everything. Structure is documented below.
- table String
- Table name.
- columns List<Property Map>
- SQL Server columns in the schema. When unspecified as part of include/exclude objects, includes/excludes everything. Structure is documented below.
StreamSourceConfigSqlServerSourceConfigIncludeObjectsSchemaTableColumn, StreamSourceConfigSqlServerSourceConfigIncludeObjectsSchemaTableColumnArgs
- Column string
- Column name.
- Data
Type string - The SQL Server data type. Full data types list can be found here: https://learn.microsoft.com/en-us/sql/t-sql/data-types/data-types-transact-sql?view=sql-server-ver16
- Length int
- (Output) Column length.
- Nullable bool
- (Output) Whether or not the column can accept a null value.
- Ordinal
Position int - (Output) The ordinal position of the column in the table.
- Precision int
- (Output) Column precision.
- Primary
Key bool - (Output) Whether or not the column represents a primary key.
- Scale int
- (Output) Column scale.
- Column string
- Column name.
- Data
Type string - The SQL Server data type. Full data types list can be found here: https://learn.microsoft.com/en-us/sql/t-sql/data-types/data-types-transact-sql?view=sql-server-ver16
- Length int
- (Output) Column length.
- Nullable bool
- (Output) Whether or not the column can accept a null value.
- Ordinal
Position int - (Output) The ordinal position of the column in the table.
- Precision int
- (Output) Column precision.
- Primary
Key bool - (Output) Whether or not the column represents a primary key.
- Scale int
- (Output) Column scale.
- column String
- Column name.
- data
Type String - The SQL Server data type. Full data types list can be found here: https://learn.microsoft.com/en-us/sql/t-sql/data-types/data-types-transact-sql?view=sql-server-ver16
- length Integer
- (Output) Column length.
- nullable Boolean
- (Output) Whether or not the column can accept a null value.
- ordinal
Position Integer - (Output) The ordinal position of the column in the table.
- precision Integer
- (Output) Column precision.
- primary
Key Boolean - (Output) Whether or not the column represents a primary key.
- scale Integer
- (Output) Column scale.
- column string
- Column name.
- data
Type string - The SQL Server data type. Full data types list can be found here: https://learn.microsoft.com/en-us/sql/t-sql/data-types/data-types-transact-sql?view=sql-server-ver16
- length number
- (Output) Column length.
- nullable boolean
- (Output) Whether or not the column can accept a null value.
- ordinal
Position number - (Output) The ordinal position of the column in the table.
- precision number
- (Output) Column precision.
- primary
Key boolean - (Output) Whether or not the column represents a primary key.
- scale number
- (Output) Column scale.
- column str
- Column name.
- data_
type str - The SQL Server data type. Full data types list can be found here: https://learn.microsoft.com/en-us/sql/t-sql/data-types/data-types-transact-sql?view=sql-server-ver16
- length int
- (Output) Column length.
- nullable bool
- (Output) Whether or not the column can accept a null value.
- ordinal_
position int - (Output) The ordinal position of the column in the table.
- precision int
- (Output) Column precision.
- primary_
key bool - (Output) Whether or not the column represents a primary key.
- scale int
- (Output) Column scale.
- column String
- Column name.
- data
Type String - The SQL Server data type. Full data types list can be found here: https://learn.microsoft.com/en-us/sql/t-sql/data-types/data-types-transact-sql?view=sql-server-ver16
- length Number
- (Output) Column length.
- nullable Boolean
- (Output) Whether or not the column can accept a null value.
- ordinal
Position Number - (Output) The ordinal position of the column in the table.
- precision Number
- (Output) Column precision.
- primary
Key Boolean - (Output) Whether or not the column represents a primary key.
- scale Number
- (Output) Column scale.
Import
Stream can be imported using any of these accepted formats:
projects/{{project}}/locations/{{location}}/streams/{{stream_id}}
{{project}}/{{location}}/{{stream_id}}
{{location}}/{{stream_id}}
When using the pulumi import
command, Stream can be imported using one of the formats above. For example:
$ pulumi import gcp:datastream/stream:Stream default projects/{{project}}/locations/{{location}}/streams/{{stream_id}}
$ pulumi import gcp:datastream/stream:Stream default {{project}}/{{location}}/{{stream_id}}
$ pulumi import gcp:datastream/stream:Stream default {{location}}/{{stream_id}}
To learn more about importing existing cloud resources, see Importing resources.
Package Details
- Repository
- Google Cloud (GCP) Classic pulumi/pulumi-gcp
- License
- Apache-2.0
- Notes
- This Pulumi package is based on the
google-beta
Terraform Provider.