1. Packages
  2. Databricks
  3. API Docs
  4. Connection
Databricks v1.46.1 published on Friday, Jun 28, 2024 by Pulumi

databricks.Connection

Explore with Pulumi AI

databricks logo
Databricks v1.46.1 published on Friday, Jun 28, 2024 by Pulumi

    Note This resource could be only used with workspace-level provider!

    Lakehouse Federation is the query federation platform for Databricks. Databricks uses Unity Catalog to manage query federation. To make a dataset available for read-only querying using Lakehouse Federation, you create the following:

    • A connection, a securable object in Unity Catalog that specifies a path and credentials for accessing an external database system.
    • A foreign catalog

    This resource manages connections in Unity Catalog

    Example Usage

    Create a connection to a MySQL database

    import * as pulumi from "@pulumi/pulumi";
    import * as databricks from "@pulumi/databricks";
    
    const mysql = new databricks.Connection("mysql", {
        name: "mysql_connection",
        connectionType: "MYSQL",
        comment: "this is a connection to mysql db",
        options: {
            host: "test.mysql.database.azure.com",
            port: "3306",
            user: "user",
            password: "password",
        },
        properties: {
            purpose: "testing",
        },
    });
    
    import pulumi
    import pulumi_databricks as databricks
    
    mysql = databricks.Connection("mysql",
        name="mysql_connection",
        connection_type="MYSQL",
        comment="this is a connection to mysql db",
        options={
            "host": "test.mysql.database.azure.com",
            "port": "3306",
            "user": "user",
            "password": "password",
        },
        properties={
            "purpose": "testing",
        })
    
    package main
    
    import (
    	"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		_, err := databricks.NewConnection(ctx, "mysql", &databricks.ConnectionArgs{
    			Name:           pulumi.String("mysql_connection"),
    			ConnectionType: pulumi.String("MYSQL"),
    			Comment:        pulumi.String("this is a connection to mysql db"),
    			Options: pulumi.Map{
    				"host":     pulumi.Any("test.mysql.database.azure.com"),
    				"port":     pulumi.Any("3306"),
    				"user":     pulumi.Any("user"),
    				"password": pulumi.Any("password"),
    			},
    			Properties: pulumi.Map{
    				"purpose": pulumi.Any("testing"),
    			},
    		})
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using Databricks = Pulumi.Databricks;
    
    return await Deployment.RunAsync(() => 
    {
        var mysql = new Databricks.Connection("mysql", new()
        {
            Name = "mysql_connection",
            ConnectionType = "MYSQL",
            Comment = "this is a connection to mysql db",
            Options = 
            {
                { "host", "test.mysql.database.azure.com" },
                { "port", "3306" },
                { "user", "user" },
                { "password", "password" },
            },
            Properties = 
            {
                { "purpose", "testing" },
            },
        });
    
    });
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.databricks.Connection;
    import com.pulumi.databricks.ConnectionArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            var mysql = new Connection("mysql", ConnectionArgs.builder()
                .name("mysql_connection")
                .connectionType("MYSQL")
                .comment("this is a connection to mysql db")
                .options(Map.ofEntries(
                    Map.entry("host", "test.mysql.database.azure.com"),
                    Map.entry("port", "3306"),
                    Map.entry("user", "user"),
                    Map.entry("password", "password")
                ))
                .properties(Map.of("purpose", "testing"))
                .build());
    
        }
    }
    
    resources:
      mysql:
        type: databricks:Connection
        properties:
          name: mysql_connection
          connectionType: MYSQL
          comment: this is a connection to mysql db
          options:
            host: test.mysql.database.azure.com
            port: '3306'
            user: user
            password: password
          properties:
            purpose: testing
    

    Create a connection to a BigQuery database

    import * as pulumi from "@pulumi/pulumi";
    import * as databricks from "@pulumi/databricks";
    
    const bigquery = new databricks.Connection("bigquery", {
        name: "bq_connection",
        connectionType: "BIGQUERY",
        comment: "this is a connection to BQ",
        options: {
            GoogleServiceAccountKeyJson: JSON.stringify({
                type: "service_account",
                project_id: "PROJECT_ID",
                private_key_id: "KEY_ID",
                private_key: `-----BEGIN PRIVATE KEY-----
    PRIVATE_KEY
    -----END PRIVATE KEY-----
    `,
                client_email: "SERVICE_ACCOUNT_EMAIL",
                client_id: "CLIENT_ID",
                auth_uri: "https://accounts.google.com/o/oauth2/auth",
                token_uri: "https://accounts.google.com/o/oauth2/token",
                auth_provider_x509_cert_url: "https://www.googleapis.com/oauth2/v1/certs",
                client_x509_cert_url: "https://www.googleapis.com/robot/v1/metadata/x509/SERVICE_ACCOUNT_EMAIL",
                universe_domain: "googleapis.com",
            }),
        },
        properties: {
            purpose: "testing",
        },
    });
    
    import pulumi
    import json
    import pulumi_databricks as databricks
    
    bigquery = databricks.Connection("bigquery",
        name="bq_connection",
        connection_type="BIGQUERY",
        comment="this is a connection to BQ",
        options={
            "GoogleServiceAccountKeyJson": json.dumps({
                "type": "service_account",
                "project_id": "PROJECT_ID",
                "private_key_id": "KEY_ID",
                "private_key": """-----BEGIN PRIVATE KEY-----
    PRIVATE_KEY
    -----END PRIVATE KEY-----
    """,
                "client_email": "SERVICE_ACCOUNT_EMAIL",
                "client_id": "CLIENT_ID",
                "auth_uri": "https://accounts.google.com/o/oauth2/auth",
                "token_uri": "https://accounts.google.com/o/oauth2/token",
                "auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs",
                "client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/SERVICE_ACCOUNT_EMAIL",
                "universe_domain": "googleapis.com",
            }),
        },
        properties={
            "purpose": "testing",
        })
    
    package main
    
    import (
    	"encoding/json"
    
    	"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		tmpJSON0, err := json.Marshal(map[string]interface{}{
    			"type":                        "service_account",
    			"project_id":                  "PROJECT_ID",
    			"private_key_id":              "KEY_ID",
    			"private_key":                 "-----BEGIN PRIVATE KEY-----\nPRIVATE_KEY\n-----END PRIVATE KEY-----\n",
    			"client_email":                "SERVICE_ACCOUNT_EMAIL",
    			"client_id":                   "CLIENT_ID",
    			"auth_uri":                    "https://accounts.google.com/o/oauth2/auth",
    			"token_uri":                   "https://accounts.google.com/o/oauth2/token",
    			"auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs",
    			"client_x509_cert_url":        "https://www.googleapis.com/robot/v1/metadata/x509/SERVICE_ACCOUNT_EMAIL",
    			"universe_domain":             "googleapis.com",
    		})
    		if err != nil {
    			return err
    		}
    		json0 := string(tmpJSON0)
    		_, err = databricks.NewConnection(ctx, "bigquery", &databricks.ConnectionArgs{
    			Name:           pulumi.String("bq_connection"),
    			ConnectionType: pulumi.String("BIGQUERY"),
    			Comment:        pulumi.String("this is a connection to BQ"),
    			Options: pulumi.Map{
    				"GoogleServiceAccountKeyJson": pulumi.String(json0),
    			},
    			Properties: pulumi.Map{
    				"purpose": pulumi.Any("testing"),
    			},
    		})
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    using System.Collections.Generic;
    using System.Linq;
    using System.Text.Json;
    using Pulumi;
    using Databricks = Pulumi.Databricks;
    
    return await Deployment.RunAsync(() => 
    {
        var bigquery = new Databricks.Connection("bigquery", new()
        {
            Name = "bq_connection",
            ConnectionType = "BIGQUERY",
            Comment = "this is a connection to BQ",
            Options = 
            {
                { "GoogleServiceAccountKeyJson", JsonSerializer.Serialize(new Dictionary<string, object?>
                {
                    ["type"] = "service_account",
                    ["project_id"] = "PROJECT_ID",
                    ["private_key_id"] = "KEY_ID",
                    ["private_key"] = @"-----BEGIN PRIVATE KEY-----
    PRIVATE_KEY
    -----END PRIVATE KEY-----
    ",
                    ["client_email"] = "SERVICE_ACCOUNT_EMAIL",
                    ["client_id"] = "CLIENT_ID",
                    ["auth_uri"] = "https://accounts.google.com/o/oauth2/auth",
                    ["token_uri"] = "https://accounts.google.com/o/oauth2/token",
                    ["auth_provider_x509_cert_url"] = "https://www.googleapis.com/oauth2/v1/certs",
                    ["client_x509_cert_url"] = "https://www.googleapis.com/robot/v1/metadata/x509/SERVICE_ACCOUNT_EMAIL",
                    ["universe_domain"] = "googleapis.com",
                }) },
            },
            Properties = 
            {
                { "purpose", "testing" },
            },
        });
    
    });
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.databricks.Connection;
    import com.pulumi.databricks.ConnectionArgs;
    import static com.pulumi.codegen.internal.Serialization.*;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            var bigquery = new Connection("bigquery", ConnectionArgs.builder()
                .name("bq_connection")
                .connectionType("BIGQUERY")
                .comment("this is a connection to BQ")
                .options(Map.of("GoogleServiceAccountKeyJson", serializeJson(
                    jsonObject(
                        jsonProperty("type", "service_account"),
                        jsonProperty("project_id", "PROJECT_ID"),
                        jsonProperty("private_key_id", "KEY_ID"),
                        jsonProperty("private_key", """
    -----BEGIN PRIVATE KEY-----
    PRIVATE_KEY
    -----END PRIVATE KEY-----
                        """),
                        jsonProperty("client_email", "SERVICE_ACCOUNT_EMAIL"),
                        jsonProperty("client_id", "CLIENT_ID"),
                        jsonProperty("auth_uri", "https://accounts.google.com/o/oauth2/auth"),
                        jsonProperty("token_uri", "https://accounts.google.com/o/oauth2/token"),
                        jsonProperty("auth_provider_x509_cert_url", "https://www.googleapis.com/oauth2/v1/certs"),
                        jsonProperty("client_x509_cert_url", "https://www.googleapis.com/robot/v1/metadata/x509/SERVICE_ACCOUNT_EMAIL"),
                        jsonProperty("universe_domain", "googleapis.com")
                    ))))
                .properties(Map.of("purpose", "testing"))
                .build());
    
        }
    }
    
    resources:
      bigquery:
        type: databricks:Connection
        properties:
          name: bq_connection
          connectionType: BIGQUERY
          comment: this is a connection to BQ
          options:
            GoogleServiceAccountKeyJson:
              fn::toJSON:
                type: service_account
                project_id: PROJECT_ID
                private_key_id: KEY_ID
                private_key: |
                  -----BEGIN PRIVATE KEY-----
                  PRIVATE_KEY
                  -----END PRIVATE KEY-----              
                client_email: SERVICE_ACCOUNT_EMAIL
                client_id: CLIENT_ID
                auth_uri: https://accounts.google.com/o/oauth2/auth
                token_uri: https://accounts.google.com/o/oauth2/token
                auth_provider_x509_cert_url: https://www.googleapis.com/oauth2/v1/certs
                client_x509_cert_url: https://www.googleapis.com/robot/v1/metadata/x509/SERVICE_ACCOUNT_EMAIL
                universe_domain: googleapis.com
          properties:
            purpose: testing
    

    Create Connection Resource

    Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.

    Constructor syntax

    new Connection(name: string, args: ConnectionArgs, opts?: CustomResourceOptions);
    @overload
    def Connection(resource_name: str,
                   args: ConnectionArgs,
                   opts: Optional[ResourceOptions] = None)
    
    @overload
    def Connection(resource_name: str,
                   opts: Optional[ResourceOptions] = None,
                   connection_type: Optional[str] = None,
                   options: Optional[Mapping[str, Any]] = None,
                   comment: Optional[str] = None,
                   metastore_id: Optional[str] = None,
                   name: Optional[str] = None,
                   owner: Optional[str] = None,
                   properties: Optional[Mapping[str, Any]] = None,
                   read_only: Optional[bool] = None)
    func NewConnection(ctx *Context, name string, args ConnectionArgs, opts ...ResourceOption) (*Connection, error)
    public Connection(string name, ConnectionArgs args, CustomResourceOptions? opts = null)
    public Connection(String name, ConnectionArgs args)
    public Connection(String name, ConnectionArgs args, CustomResourceOptions options)
    
    type: databricks:Connection
    properties: # The arguments to resource properties.
    options: # Bag of options to control resource's behavior.
    
    

    Parameters

    name string
    The unique name of the resource.
    args ConnectionArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    resource_name str
    The unique name of the resource.
    args ConnectionArgs
    The arguments to resource properties.
    opts ResourceOptions
    Bag of options to control resource's behavior.
    ctx Context
    Context object for the current deployment.
    name string
    The unique name of the resource.
    args ConnectionArgs
    The arguments to resource properties.
    opts ResourceOption
    Bag of options to control resource's behavior.
    name string
    The unique name of the resource.
    args ConnectionArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    name String
    The unique name of the resource.
    args ConnectionArgs
    The arguments to resource properties.
    options CustomResourceOptions
    Bag of options to control resource's behavior.

    Constructor example

    The following reference example uses placeholder values for all input properties.

    var connectionResource = new Databricks.Connection("connectionResource", new()
    {
        ConnectionType = "string",
        Options = 
        {
            { "string", "any" },
        },
        Comment = "string",
        MetastoreId = "string",
        Name = "string",
        Owner = "string",
        Properties = 
        {
            { "string", "any" },
        },
        ReadOnly = false,
    });
    
    example, err := databricks.NewConnection(ctx, "connectionResource", &databricks.ConnectionArgs{
    	ConnectionType: pulumi.String("string"),
    	Options: pulumi.Map{
    		"string": pulumi.Any("any"),
    	},
    	Comment:     pulumi.String("string"),
    	MetastoreId: pulumi.String("string"),
    	Name:        pulumi.String("string"),
    	Owner:       pulumi.String("string"),
    	Properties: pulumi.Map{
    		"string": pulumi.Any("any"),
    	},
    	ReadOnly: pulumi.Bool(false),
    })
    
    var connectionResource = new Connection("connectionResource", ConnectionArgs.builder()
        .connectionType("string")
        .options(Map.of("string", "any"))
        .comment("string")
        .metastoreId("string")
        .name("string")
        .owner("string")
        .properties(Map.of("string", "any"))
        .readOnly(false)
        .build());
    
    connection_resource = databricks.Connection("connectionResource",
        connection_type="string",
        options={
            "string": "any",
        },
        comment="string",
        metastore_id="string",
        name="string",
        owner="string",
        properties={
            "string": "any",
        },
        read_only=False)
    
    const connectionResource = new databricks.Connection("connectionResource", {
        connectionType: "string",
        options: {
            string: "any",
        },
        comment: "string",
        metastoreId: "string",
        name: "string",
        owner: "string",
        properties: {
            string: "any",
        },
        readOnly: false,
    });
    
    type: databricks:Connection
    properties:
        comment: string
        connectionType: string
        metastoreId: string
        name: string
        options:
            string: any
        owner: string
        properties:
            string: any
        readOnly: false
    

    Connection Resource Properties

    To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.

    Inputs

    The Connection resource accepts the following input properties:

    ConnectionType string
    Connection type. BIGQUERY MYSQL POSTGRESQL SNOWFLAKE REDSHIFT SQLDW SQLSERVER or DATABRICKS are supported. Up-to-date list of connection type supported
    Options Dictionary<string, object>
    The key value of options required by the connection, e.g. host, port, user, password or GoogleServiceAccountKeyJson. Please consult the documentation for the required option.
    Comment string
    Free-form text.
    MetastoreId string
    Name string
    Name of the Connection.
    Owner string
    Name of the connection owner.
    Properties Dictionary<string, object>
    Free-form connection properties.
    ReadOnly bool
    ConnectionType string
    Connection type. BIGQUERY MYSQL POSTGRESQL SNOWFLAKE REDSHIFT SQLDW SQLSERVER or DATABRICKS are supported. Up-to-date list of connection type supported
    Options map[string]interface{}
    The key value of options required by the connection, e.g. host, port, user, password or GoogleServiceAccountKeyJson. Please consult the documentation for the required option.
    Comment string
    Free-form text.
    MetastoreId string
    Name string
    Name of the Connection.
    Owner string
    Name of the connection owner.
    Properties map[string]interface{}
    Free-form connection properties.
    ReadOnly bool
    connectionType String
    Connection type. BIGQUERY MYSQL POSTGRESQL SNOWFLAKE REDSHIFT SQLDW SQLSERVER or DATABRICKS are supported. Up-to-date list of connection type supported
    options Map<String,Object>
    The key value of options required by the connection, e.g. host, port, user, password or GoogleServiceAccountKeyJson. Please consult the documentation for the required option.
    comment String
    Free-form text.
    metastoreId String
    name String
    Name of the Connection.
    owner String
    Name of the connection owner.
    properties Map<String,Object>
    Free-form connection properties.
    readOnly Boolean
    connectionType string
    Connection type. BIGQUERY MYSQL POSTGRESQL SNOWFLAKE REDSHIFT SQLDW SQLSERVER or DATABRICKS are supported. Up-to-date list of connection type supported
    options {[key: string]: any}
    The key value of options required by the connection, e.g. host, port, user, password or GoogleServiceAccountKeyJson. Please consult the documentation for the required option.
    comment string
    Free-form text.
    metastoreId string
    name string
    Name of the Connection.
    owner string
    Name of the connection owner.
    properties {[key: string]: any}
    Free-form connection properties.
    readOnly boolean
    connection_type str
    Connection type. BIGQUERY MYSQL POSTGRESQL SNOWFLAKE REDSHIFT SQLDW SQLSERVER or DATABRICKS are supported. Up-to-date list of connection type supported
    options Mapping[str, Any]
    The key value of options required by the connection, e.g. host, port, user, password or GoogleServiceAccountKeyJson. Please consult the documentation for the required option.
    comment str
    Free-form text.
    metastore_id str
    name str
    Name of the Connection.
    owner str
    Name of the connection owner.
    properties Mapping[str, Any]
    Free-form connection properties.
    read_only bool
    connectionType String
    Connection type. BIGQUERY MYSQL POSTGRESQL SNOWFLAKE REDSHIFT SQLDW SQLSERVER or DATABRICKS are supported. Up-to-date list of connection type supported
    options Map<Any>
    The key value of options required by the connection, e.g. host, port, user, password or GoogleServiceAccountKeyJson. Please consult the documentation for the required option.
    comment String
    Free-form text.
    metastoreId String
    name String
    Name of the Connection.
    owner String
    Name of the connection owner.
    properties Map<Any>
    Free-form connection properties.
    readOnly Boolean

    Outputs

    All input properties are implicitly available as output properties. Additionally, the Connection resource produces the following output properties:

    Id string
    The provider-assigned unique ID for this managed resource.
    Id string
    The provider-assigned unique ID for this managed resource.
    id String
    The provider-assigned unique ID for this managed resource.
    id string
    The provider-assigned unique ID for this managed resource.
    id str
    The provider-assigned unique ID for this managed resource.
    id String
    The provider-assigned unique ID for this managed resource.

    Look up Existing Connection Resource

    Get an existing Connection resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.

    public static get(name: string, id: Input<ID>, state?: ConnectionState, opts?: CustomResourceOptions): Connection
    @staticmethod
    def get(resource_name: str,
            id: str,
            opts: Optional[ResourceOptions] = None,
            comment: Optional[str] = None,
            connection_type: Optional[str] = None,
            metastore_id: Optional[str] = None,
            name: Optional[str] = None,
            options: Optional[Mapping[str, Any]] = None,
            owner: Optional[str] = None,
            properties: Optional[Mapping[str, Any]] = None,
            read_only: Optional[bool] = None) -> Connection
    func GetConnection(ctx *Context, name string, id IDInput, state *ConnectionState, opts ...ResourceOption) (*Connection, error)
    public static Connection Get(string name, Input<string> id, ConnectionState? state, CustomResourceOptions? opts = null)
    public static Connection get(String name, Output<String> id, ConnectionState state, CustomResourceOptions options)
    Resource lookup is not supported in YAML
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    resource_name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    The following state arguments are supported:
    Comment string
    Free-form text.
    ConnectionType string
    Connection type. BIGQUERY MYSQL POSTGRESQL SNOWFLAKE REDSHIFT SQLDW SQLSERVER or DATABRICKS are supported. Up-to-date list of connection type supported
    MetastoreId string
    Name string
    Name of the Connection.
    Options Dictionary<string, object>
    The key value of options required by the connection, e.g. host, port, user, password or GoogleServiceAccountKeyJson. Please consult the documentation for the required option.
    Owner string
    Name of the connection owner.
    Properties Dictionary<string, object>
    Free-form connection properties.
    ReadOnly bool
    Comment string
    Free-form text.
    ConnectionType string
    Connection type. BIGQUERY MYSQL POSTGRESQL SNOWFLAKE REDSHIFT SQLDW SQLSERVER or DATABRICKS are supported. Up-to-date list of connection type supported
    MetastoreId string
    Name string
    Name of the Connection.
    Options map[string]interface{}
    The key value of options required by the connection, e.g. host, port, user, password or GoogleServiceAccountKeyJson. Please consult the documentation for the required option.
    Owner string
    Name of the connection owner.
    Properties map[string]interface{}
    Free-form connection properties.
    ReadOnly bool
    comment String
    Free-form text.
    connectionType String
    Connection type. BIGQUERY MYSQL POSTGRESQL SNOWFLAKE REDSHIFT SQLDW SQLSERVER or DATABRICKS are supported. Up-to-date list of connection type supported
    metastoreId String
    name String
    Name of the Connection.
    options Map<String,Object>
    The key value of options required by the connection, e.g. host, port, user, password or GoogleServiceAccountKeyJson. Please consult the documentation for the required option.
    owner String
    Name of the connection owner.
    properties Map<String,Object>
    Free-form connection properties.
    readOnly Boolean
    comment string
    Free-form text.
    connectionType string
    Connection type. BIGQUERY MYSQL POSTGRESQL SNOWFLAKE REDSHIFT SQLDW SQLSERVER or DATABRICKS are supported. Up-to-date list of connection type supported
    metastoreId string
    name string
    Name of the Connection.
    options {[key: string]: any}
    The key value of options required by the connection, e.g. host, port, user, password or GoogleServiceAccountKeyJson. Please consult the documentation for the required option.
    owner string
    Name of the connection owner.
    properties {[key: string]: any}
    Free-form connection properties.
    readOnly boolean
    comment str
    Free-form text.
    connection_type str
    Connection type. BIGQUERY MYSQL POSTGRESQL SNOWFLAKE REDSHIFT SQLDW SQLSERVER or DATABRICKS are supported. Up-to-date list of connection type supported
    metastore_id str
    name str
    Name of the Connection.
    options Mapping[str, Any]
    The key value of options required by the connection, e.g. host, port, user, password or GoogleServiceAccountKeyJson. Please consult the documentation for the required option.
    owner str
    Name of the connection owner.
    properties Mapping[str, Any]
    Free-form connection properties.
    read_only bool
    comment String
    Free-form text.
    connectionType String
    Connection type. BIGQUERY MYSQL POSTGRESQL SNOWFLAKE REDSHIFT SQLDW SQLSERVER or DATABRICKS are supported. Up-to-date list of connection type supported
    metastoreId String
    name String
    Name of the Connection.
    options Map<Any>
    The key value of options required by the connection, e.g. host, port, user, password or GoogleServiceAccountKeyJson. Please consult the documentation for the required option.
    owner String
    Name of the connection owner.
    properties Map<Any>
    Free-form connection properties.
    readOnly Boolean

    Import

    This resource can be imported by id:

    bash

    $ pulumi import databricks:index/connection:Connection this '<metastore_id>|<name>'
    

    To learn more about importing existing cloud resources, see Importing resources.

    Package Details

    Repository
    databricks pulumi/pulumi-databricks
    License
    Apache-2.0
    Notes
    This Pulumi package is based on the databricks Terraform Provider.
    databricks logo
    Databricks v1.46.1 published on Friday, Jun 28, 2024 by Pulumi