1. Packages
  2. Databricks
  3. API Docs
  4. Pipeline
Databricks v1.46.1 published on Friday, Jun 28, 2024 by Pulumi

databricks.Pipeline

Explore with Pulumi AI

databricks logo
Databricks v1.46.1 published on Friday, Jun 28, 2024 by Pulumi

    Use databricks.Pipeline to deploy Delta Live Tables.

    Example Usage

    import * as pulumi from "@pulumi/pulumi";
    import * as databricks from "@pulumi/databricks";
    
    const dltDemo = new databricks.Notebook("dlt_demo", {});
    const dltDemoRepo = new databricks.Repo("dlt_demo", {});
    const _this = new databricks.Pipeline("this", {
        name: "Pipeline Name",
        storage: "/test/first-pipeline",
        configuration: {
            key1: "value1",
            key2: "value2",
        },
        clusters: [
            {
                label: "default",
                numWorkers: 2,
                customTags: {
                    cluster_type: "default",
                },
            },
            {
                label: "maintenance",
                numWorkers: 1,
                customTags: {
                    cluster_type: "maintenance",
                },
            },
        ],
        libraries: [
            {
                notebook: {
                    path: dltDemo.id,
                },
            },
            {
                file: {
                    path: pulumi.interpolate`${dltDemoRepo.path}/pipeline.sql`,
                },
            },
        ],
        continuous: false,
        notifications: [{
            emailRecipients: [
                "user@domain.com",
                "user1@domain.com",
            ],
            alerts: [
                "on-update-failure",
                "on-update-fatal-failure",
                "on-update-success",
                "on-flow-failure",
            ],
        }],
    });
    
    import pulumi
    import pulumi_databricks as databricks
    
    dlt_demo = databricks.Notebook("dlt_demo")
    dlt_demo_repo = databricks.Repo("dlt_demo")
    this = databricks.Pipeline("this",
        name="Pipeline Name",
        storage="/test/first-pipeline",
        configuration={
            "key1": "value1",
            "key2": "value2",
        },
        clusters=[
            databricks.PipelineClusterArgs(
                label="default",
                num_workers=2,
                custom_tags={
                    "cluster_type": "default",
                },
            ),
            databricks.PipelineClusterArgs(
                label="maintenance",
                num_workers=1,
                custom_tags={
                    "cluster_type": "maintenance",
                },
            ),
        ],
        libraries=[
            databricks.PipelineLibraryArgs(
                notebook=databricks.PipelineLibraryNotebookArgs(
                    path=dlt_demo.id,
                ),
            ),
            databricks.PipelineLibraryArgs(
                file=databricks.PipelineLibraryFileArgs(
                    path=dlt_demo_repo.path.apply(lambda path: f"{path}/pipeline.sql"),
                ),
            ),
        ],
        continuous=False,
        notifications=[databricks.PipelineNotificationArgs(
            email_recipients=[
                "user@domain.com",
                "user1@domain.com",
            ],
            alerts=[
                "on-update-failure",
                "on-update-fatal-failure",
                "on-update-success",
                "on-flow-failure",
            ],
        )])
    
    package main
    
    import (
    	"fmt"
    
    	"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		dltDemo, err := databricks.NewNotebook(ctx, "dlt_demo", nil)
    		if err != nil {
    			return err
    		}
    		dltDemoRepo, err := databricks.NewRepo(ctx, "dlt_demo", nil)
    		if err != nil {
    			return err
    		}
    		_, err = databricks.NewPipeline(ctx, "this", &databricks.PipelineArgs{
    			Name:    pulumi.String("Pipeline Name"),
    			Storage: pulumi.String("/test/first-pipeline"),
    			Configuration: pulumi.Map{
    				"key1": pulumi.Any("value1"),
    				"key2": pulumi.Any("value2"),
    			},
    			Clusters: databricks.PipelineClusterArray{
    				&databricks.PipelineClusterArgs{
    					Label:      pulumi.String("default"),
    					NumWorkers: pulumi.Int(2),
    					CustomTags: pulumi.Map{
    						"cluster_type": pulumi.Any("default"),
    					},
    				},
    				&databricks.PipelineClusterArgs{
    					Label:      pulumi.String("maintenance"),
    					NumWorkers: pulumi.Int(1),
    					CustomTags: pulumi.Map{
    						"cluster_type": pulumi.Any("maintenance"),
    					},
    				},
    			},
    			Libraries: databricks.PipelineLibraryArray{
    				&databricks.PipelineLibraryArgs{
    					Notebook: &databricks.PipelineLibraryNotebookArgs{
    						Path: dltDemo.ID(),
    					},
    				},
    				&databricks.PipelineLibraryArgs{
    					File: &databricks.PipelineLibraryFileArgs{
    						Path: dltDemoRepo.Path.ApplyT(func(path string) (string, error) {
    							return fmt.Sprintf("%v/pipeline.sql", path), nil
    						}).(pulumi.StringOutput),
    					},
    				},
    			},
    			Continuous: pulumi.Bool(false),
    			Notifications: databricks.PipelineNotificationArray{
    				&databricks.PipelineNotificationArgs{
    					EmailRecipients: pulumi.StringArray{
    						pulumi.String("user@domain.com"),
    						pulumi.String("user1@domain.com"),
    					},
    					Alerts: pulumi.StringArray{
    						pulumi.String("on-update-failure"),
    						pulumi.String("on-update-fatal-failure"),
    						pulumi.String("on-update-success"),
    						pulumi.String("on-flow-failure"),
    					},
    				},
    			},
    		})
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using Databricks = Pulumi.Databricks;
    
    return await Deployment.RunAsync(() => 
    {
        var dltDemo = new Databricks.Notebook("dlt_demo");
    
        var dltDemoRepo = new Databricks.Repo("dlt_demo");
    
        var @this = new Databricks.Pipeline("this", new()
        {
            Name = "Pipeline Name",
            Storage = "/test/first-pipeline",
            Configuration = 
            {
                { "key1", "value1" },
                { "key2", "value2" },
            },
            Clusters = new[]
            {
                new Databricks.Inputs.PipelineClusterArgs
                {
                    Label = "default",
                    NumWorkers = 2,
                    CustomTags = 
                    {
                        { "cluster_type", "default" },
                    },
                },
                new Databricks.Inputs.PipelineClusterArgs
                {
                    Label = "maintenance",
                    NumWorkers = 1,
                    CustomTags = 
                    {
                        { "cluster_type", "maintenance" },
                    },
                },
            },
            Libraries = new[]
            {
                new Databricks.Inputs.PipelineLibraryArgs
                {
                    Notebook = new Databricks.Inputs.PipelineLibraryNotebookArgs
                    {
                        Path = dltDemo.Id,
                    },
                },
                new Databricks.Inputs.PipelineLibraryArgs
                {
                    File = new Databricks.Inputs.PipelineLibraryFileArgs
                    {
                        Path = dltDemoRepo.Path.Apply(path => $"{path}/pipeline.sql"),
                    },
                },
            },
            Continuous = false,
            Notifications = new[]
            {
                new Databricks.Inputs.PipelineNotificationArgs
                {
                    EmailRecipients = new[]
                    {
                        "user@domain.com",
                        "user1@domain.com",
                    },
                    Alerts = new[]
                    {
                        "on-update-failure",
                        "on-update-fatal-failure",
                        "on-update-success",
                        "on-flow-failure",
                    },
                },
            },
        });
    
    });
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.databricks.Notebook;
    import com.pulumi.databricks.Repo;
    import com.pulumi.databricks.Pipeline;
    import com.pulumi.databricks.PipelineArgs;
    import com.pulumi.databricks.inputs.PipelineClusterArgs;
    import com.pulumi.databricks.inputs.PipelineLibraryArgs;
    import com.pulumi.databricks.inputs.PipelineLibraryNotebookArgs;
    import com.pulumi.databricks.inputs.PipelineLibraryFileArgs;
    import com.pulumi.databricks.inputs.PipelineNotificationArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            var dltDemo = new Notebook("dltDemo");
    
            var dltDemoRepo = new Repo("dltDemoRepo");
    
            var this_ = new Pipeline("this", PipelineArgs.builder()
                .name("Pipeline Name")
                .storage("/test/first-pipeline")
                .configuration(Map.ofEntries(
                    Map.entry("key1", "value1"),
                    Map.entry("key2", "value2")
                ))
                .clusters(            
                    PipelineClusterArgs.builder()
                        .label("default")
                        .numWorkers(2)
                        .customTags(Map.of("cluster_type", "default"))
                        .build(),
                    PipelineClusterArgs.builder()
                        .label("maintenance")
                        .numWorkers(1)
                        .customTags(Map.of("cluster_type", "maintenance"))
                        .build())
                .libraries(            
                    PipelineLibraryArgs.builder()
                        .notebook(PipelineLibraryNotebookArgs.builder()
                            .path(dltDemo.id())
                            .build())
                        .build(),
                    PipelineLibraryArgs.builder()
                        .file(PipelineLibraryFileArgs.builder()
                            .path(dltDemoRepo.path().applyValue(path -> String.format("%s/pipeline.sql", path)))
                            .build())
                        .build())
                .continuous(false)
                .notifications(PipelineNotificationArgs.builder()
                    .emailRecipients(                
                        "user@domain.com",
                        "user1@domain.com")
                    .alerts(                
                        "on-update-failure",
                        "on-update-fatal-failure",
                        "on-update-success",
                        "on-flow-failure")
                    .build())
                .build());
    
        }
    }
    
    resources:
      dltDemo:
        type: databricks:Notebook
        name: dlt_demo
      dltDemoRepo:
        type: databricks:Repo
        name: dlt_demo
      this:
        type: databricks:Pipeline
        properties:
          name: Pipeline Name
          storage: /test/first-pipeline
          configuration:
            key1: value1
            key2: value2
          clusters:
            - label: default
              numWorkers: 2
              customTags:
                cluster_type: default
            - label: maintenance
              numWorkers: 1
              customTags:
                cluster_type: maintenance
          libraries:
            - notebook:
                path: ${dltDemo.id}
            - file:
                path: ${dltDemoRepo.path}/pipeline.sql
          continuous: false
          notifications:
            - emailRecipients:
                - user@domain.com
                - user1@domain.com
              alerts:
                - on-update-failure
                - on-update-fatal-failure
                - on-update-success
                - on-flow-failure
    

    The following resources are often used in the same context:

    Create Pipeline Resource

    Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.

    Constructor syntax

    new Pipeline(name: string, args?: PipelineArgs, opts?: CustomResourceOptions);
    @overload
    def Pipeline(resource_name: str,
                 args: Optional[PipelineArgs] = None,
                 opts: Optional[ResourceOptions] = None)
    
    @overload
    def Pipeline(resource_name: str,
                 opts: Optional[ResourceOptions] = None,
                 allow_duplicate_names: Optional[bool] = None,
                 catalog: Optional[str] = None,
                 channel: Optional[str] = None,
                 clusters: Optional[Sequence[PipelineClusterArgs]] = None,
                 configuration: Optional[Mapping[str, Any]] = None,
                 continuous: Optional[bool] = None,
                 deployment: Optional[PipelineDeploymentArgs] = None,
                 development: Optional[bool] = None,
                 edition: Optional[str] = None,
                 filters: Optional[PipelineFiltersArgs] = None,
                 libraries: Optional[Sequence[PipelineLibraryArgs]] = None,
                 name: Optional[str] = None,
                 notifications: Optional[Sequence[PipelineNotificationArgs]] = None,
                 photon: Optional[bool] = None,
                 serverless: Optional[bool] = None,
                 storage: Optional[str] = None,
                 target: Optional[str] = None)
    func NewPipeline(ctx *Context, name string, args *PipelineArgs, opts ...ResourceOption) (*Pipeline, error)
    public Pipeline(string name, PipelineArgs? args = null, CustomResourceOptions? opts = null)
    public Pipeline(String name, PipelineArgs args)
    public Pipeline(String name, PipelineArgs args, CustomResourceOptions options)
    
    type: databricks:Pipeline
    properties: # The arguments to resource properties.
    options: # Bag of options to control resource's behavior.
    
    

    Parameters

    name string
    The unique name of the resource.
    args PipelineArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    resource_name str
    The unique name of the resource.
    args PipelineArgs
    The arguments to resource properties.
    opts ResourceOptions
    Bag of options to control resource's behavior.
    ctx Context
    Context object for the current deployment.
    name string
    The unique name of the resource.
    args PipelineArgs
    The arguments to resource properties.
    opts ResourceOption
    Bag of options to control resource's behavior.
    name string
    The unique name of the resource.
    args PipelineArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    name String
    The unique name of the resource.
    args PipelineArgs
    The arguments to resource properties.
    options CustomResourceOptions
    Bag of options to control resource's behavior.

    Constructor example

    The following reference example uses placeholder values for all input properties.

    var pipelineResource = new Databricks.Pipeline("pipelineResource", new()
    {
        AllowDuplicateNames = false,
        Catalog = "string",
        Channel = "string",
        Clusters = new[]
        {
            new Databricks.Inputs.PipelineClusterArgs
            {
                ApplyPolicyDefaultValues = false,
                Autoscale = new Databricks.Inputs.PipelineClusterAutoscaleArgs
                {
                    MaxWorkers = 0,
                    MinWorkers = 0,
                    Mode = "string",
                },
                AwsAttributes = new Databricks.Inputs.PipelineClusterAwsAttributesArgs
                {
                    Availability = "string",
                    EbsVolumeCount = 0,
                    EbsVolumeSize = 0,
                    EbsVolumeType = "string",
                    FirstOnDemand = 0,
                    InstanceProfileArn = "string",
                    SpotBidPricePercent = 0,
                    ZoneId = "string",
                },
                AzureAttributes = new Databricks.Inputs.PipelineClusterAzureAttributesArgs
                {
                    Availability = "string",
                    FirstOnDemand = 0,
                    SpotBidMaxPrice = 0,
                },
                ClusterLogConf = new Databricks.Inputs.PipelineClusterClusterLogConfArgs
                {
                    Dbfs = new Databricks.Inputs.PipelineClusterClusterLogConfDbfsArgs
                    {
                        Destination = "string",
                    },
                    S3 = new Databricks.Inputs.PipelineClusterClusterLogConfS3Args
                    {
                        Destination = "string",
                        CannedAcl = "string",
                        EnableEncryption = false,
                        EncryptionType = "string",
                        Endpoint = "string",
                        KmsKey = "string",
                        Region = "string",
                    },
                },
                CustomTags = 
                {
                    { "string", "any" },
                },
                DriverInstancePoolId = "string",
                DriverNodeTypeId = "string",
                EnableLocalDiskEncryption = false,
                GcpAttributes = new Databricks.Inputs.PipelineClusterGcpAttributesArgs
                {
                    Availability = "string",
                    GoogleServiceAccount = "string",
                    LocalSsdCount = 0,
                    ZoneId = "string",
                },
                InitScripts = new[]
                {
                    new Databricks.Inputs.PipelineClusterInitScriptArgs
                    {
                        Abfss = new Databricks.Inputs.PipelineClusterInitScriptAbfssArgs
                        {
                            Destination = "string",
                        },
                        File = new Databricks.Inputs.PipelineClusterInitScriptFileArgs
                        {
                            Destination = "string",
                        },
                        Gcs = new Databricks.Inputs.PipelineClusterInitScriptGcsArgs
                        {
                            Destination = "string",
                        },
                        S3 = new Databricks.Inputs.PipelineClusterInitScriptS3Args
                        {
                            Destination = "string",
                            CannedAcl = "string",
                            EnableEncryption = false,
                            EncryptionType = "string",
                            Endpoint = "string",
                            KmsKey = "string",
                            Region = "string",
                        },
                        Volumes = new Databricks.Inputs.PipelineClusterInitScriptVolumesArgs
                        {
                            Destination = "string",
                        },
                        Workspace = new Databricks.Inputs.PipelineClusterInitScriptWorkspaceArgs
                        {
                            Destination = "string",
                        },
                    },
                },
                InstancePoolId = "string",
                Label = "string",
                NodeTypeId = "string",
                NumWorkers = 0,
                PolicyId = "string",
                SparkConf = 
                {
                    { "string", "any" },
                },
                SparkEnvVars = 
                {
                    { "string", "any" },
                },
                SshPublicKeys = new[]
                {
                    "string",
                },
            },
        },
        Configuration = 
        {
            { "string", "any" },
        },
        Continuous = false,
        Deployment = new Databricks.Inputs.PipelineDeploymentArgs
        {
            Kind = "string",
            MetadataFilePath = "string",
        },
        Development = false,
        Edition = "string",
        Filters = new Databricks.Inputs.PipelineFiltersArgs
        {
            Excludes = new[]
            {
                "string",
            },
            Includes = new[]
            {
                "string",
            },
        },
        Libraries = new[]
        {
            new Databricks.Inputs.PipelineLibraryArgs
            {
                File = new Databricks.Inputs.PipelineLibraryFileArgs
                {
                    Path = "string",
                },
                Jar = "string",
                Maven = new Databricks.Inputs.PipelineLibraryMavenArgs
                {
                    Coordinates = "string",
                    Exclusions = new[]
                    {
                        "string",
                    },
                    Repo = "string",
                },
                Notebook = new Databricks.Inputs.PipelineLibraryNotebookArgs
                {
                    Path = "string",
                },
                Whl = "string",
            },
        },
        Name = "string",
        Notifications = new[]
        {
            new Databricks.Inputs.PipelineNotificationArgs
            {
                Alerts = new[]
                {
                    "string",
                },
                EmailRecipients = new[]
                {
                    "string",
                },
            },
        },
        Photon = false,
        Serverless = false,
        Storage = "string",
        Target = "string",
    });
    
    example, err := databricks.NewPipeline(ctx, "pipelineResource", &databricks.PipelineArgs{
    	AllowDuplicateNames: pulumi.Bool(false),
    	Catalog:             pulumi.String("string"),
    	Channel:             pulumi.String("string"),
    	Clusters: databricks.PipelineClusterArray{
    		&databricks.PipelineClusterArgs{
    			ApplyPolicyDefaultValues: pulumi.Bool(false),
    			Autoscale: &databricks.PipelineClusterAutoscaleArgs{
    				MaxWorkers: pulumi.Int(0),
    				MinWorkers: pulumi.Int(0),
    				Mode:       pulumi.String("string"),
    			},
    			AwsAttributes: &databricks.PipelineClusterAwsAttributesArgs{
    				Availability:        pulumi.String("string"),
    				EbsVolumeCount:      pulumi.Int(0),
    				EbsVolumeSize:       pulumi.Int(0),
    				EbsVolumeType:       pulumi.String("string"),
    				FirstOnDemand:       pulumi.Int(0),
    				InstanceProfileArn:  pulumi.String("string"),
    				SpotBidPricePercent: pulumi.Int(0),
    				ZoneId:              pulumi.String("string"),
    			},
    			AzureAttributes: &databricks.PipelineClusterAzureAttributesArgs{
    				Availability:    pulumi.String("string"),
    				FirstOnDemand:   pulumi.Int(0),
    				SpotBidMaxPrice: pulumi.Float64(0),
    			},
    			ClusterLogConf: &databricks.PipelineClusterClusterLogConfArgs{
    				Dbfs: &databricks.PipelineClusterClusterLogConfDbfsArgs{
    					Destination: pulumi.String("string"),
    				},
    				S3: &databricks.PipelineClusterClusterLogConfS3Args{
    					Destination:      pulumi.String("string"),
    					CannedAcl:        pulumi.String("string"),
    					EnableEncryption: pulumi.Bool(false),
    					EncryptionType:   pulumi.String("string"),
    					Endpoint:         pulumi.String("string"),
    					KmsKey:           pulumi.String("string"),
    					Region:           pulumi.String("string"),
    				},
    			},
    			CustomTags: pulumi.Map{
    				"string": pulumi.Any("any"),
    			},
    			DriverInstancePoolId:      pulumi.String("string"),
    			DriverNodeTypeId:          pulumi.String("string"),
    			EnableLocalDiskEncryption: pulumi.Bool(false),
    			GcpAttributes: &databricks.PipelineClusterGcpAttributesArgs{
    				Availability:         pulumi.String("string"),
    				GoogleServiceAccount: pulumi.String("string"),
    				LocalSsdCount:        pulumi.Int(0),
    				ZoneId:               pulumi.String("string"),
    			},
    			InitScripts: databricks.PipelineClusterInitScriptArray{
    				&databricks.PipelineClusterInitScriptArgs{
    					Abfss: &databricks.PipelineClusterInitScriptAbfssArgs{
    						Destination: pulumi.String("string"),
    					},
    					File: &databricks.PipelineClusterInitScriptFileArgs{
    						Destination: pulumi.String("string"),
    					},
    					Gcs: &databricks.PipelineClusterInitScriptGcsArgs{
    						Destination: pulumi.String("string"),
    					},
    					S3: &databricks.PipelineClusterInitScriptS3Args{
    						Destination:      pulumi.String("string"),
    						CannedAcl:        pulumi.String("string"),
    						EnableEncryption: pulumi.Bool(false),
    						EncryptionType:   pulumi.String("string"),
    						Endpoint:         pulumi.String("string"),
    						KmsKey:           pulumi.String("string"),
    						Region:           pulumi.String("string"),
    					},
    					Volumes: &databricks.PipelineClusterInitScriptVolumesArgs{
    						Destination: pulumi.String("string"),
    					},
    					Workspace: &databricks.PipelineClusterInitScriptWorkspaceArgs{
    						Destination: pulumi.String("string"),
    					},
    				},
    			},
    			InstancePoolId: pulumi.String("string"),
    			Label:          pulumi.String("string"),
    			NodeTypeId:     pulumi.String("string"),
    			NumWorkers:     pulumi.Int(0),
    			PolicyId:       pulumi.String("string"),
    			SparkConf: pulumi.Map{
    				"string": pulumi.Any("any"),
    			},
    			SparkEnvVars: pulumi.Map{
    				"string": pulumi.Any("any"),
    			},
    			SshPublicKeys: pulumi.StringArray{
    				pulumi.String("string"),
    			},
    		},
    	},
    	Configuration: pulumi.Map{
    		"string": pulumi.Any("any"),
    	},
    	Continuous: pulumi.Bool(false),
    	Deployment: &databricks.PipelineDeploymentArgs{
    		Kind:             pulumi.String("string"),
    		MetadataFilePath: pulumi.String("string"),
    	},
    	Development: pulumi.Bool(false),
    	Edition:     pulumi.String("string"),
    	Filters: &databricks.PipelineFiltersArgs{
    		Excludes: pulumi.StringArray{
    			pulumi.String("string"),
    		},
    		Includes: pulumi.StringArray{
    			pulumi.String("string"),
    		},
    	},
    	Libraries: databricks.PipelineLibraryArray{
    		&databricks.PipelineLibraryArgs{
    			File: &databricks.PipelineLibraryFileArgs{
    				Path: pulumi.String("string"),
    			},
    			Jar: pulumi.String("string"),
    			Maven: &databricks.PipelineLibraryMavenArgs{
    				Coordinates: pulumi.String("string"),
    				Exclusions: pulumi.StringArray{
    					pulumi.String("string"),
    				},
    				Repo: pulumi.String("string"),
    			},
    			Notebook: &databricks.PipelineLibraryNotebookArgs{
    				Path: pulumi.String("string"),
    			},
    			Whl: pulumi.String("string"),
    		},
    	},
    	Name: pulumi.String("string"),
    	Notifications: databricks.PipelineNotificationArray{
    		&databricks.PipelineNotificationArgs{
    			Alerts: pulumi.StringArray{
    				pulumi.String("string"),
    			},
    			EmailRecipients: pulumi.StringArray{
    				pulumi.String("string"),
    			},
    		},
    	},
    	Photon:     pulumi.Bool(false),
    	Serverless: pulumi.Bool(false),
    	Storage:    pulumi.String("string"),
    	Target:     pulumi.String("string"),
    })
    
    var pipelineResource = new Pipeline("pipelineResource", PipelineArgs.builder()
        .allowDuplicateNames(false)
        .catalog("string")
        .channel("string")
        .clusters(PipelineClusterArgs.builder()
            .applyPolicyDefaultValues(false)
            .autoscale(PipelineClusterAutoscaleArgs.builder()
                .maxWorkers(0)
                .minWorkers(0)
                .mode("string")
                .build())
            .awsAttributes(PipelineClusterAwsAttributesArgs.builder()
                .availability("string")
                .ebsVolumeCount(0)
                .ebsVolumeSize(0)
                .ebsVolumeType("string")
                .firstOnDemand(0)
                .instanceProfileArn("string")
                .spotBidPricePercent(0)
                .zoneId("string")
                .build())
            .azureAttributes(PipelineClusterAzureAttributesArgs.builder()
                .availability("string")
                .firstOnDemand(0)
                .spotBidMaxPrice(0)
                .build())
            .clusterLogConf(PipelineClusterClusterLogConfArgs.builder()
                .dbfs(PipelineClusterClusterLogConfDbfsArgs.builder()
                    .destination("string")
                    .build())
                .s3(PipelineClusterClusterLogConfS3Args.builder()
                    .destination("string")
                    .cannedAcl("string")
                    .enableEncryption(false)
                    .encryptionType("string")
                    .endpoint("string")
                    .kmsKey("string")
                    .region("string")
                    .build())
                .build())
            .customTags(Map.of("string", "any"))
            .driverInstancePoolId("string")
            .driverNodeTypeId("string")
            .enableLocalDiskEncryption(false)
            .gcpAttributes(PipelineClusterGcpAttributesArgs.builder()
                .availability("string")
                .googleServiceAccount("string")
                .localSsdCount(0)
                .zoneId("string")
                .build())
            .initScripts(PipelineClusterInitScriptArgs.builder()
                .abfss(PipelineClusterInitScriptAbfssArgs.builder()
                    .destination("string")
                    .build())
                .file(PipelineClusterInitScriptFileArgs.builder()
                    .destination("string")
                    .build())
                .gcs(PipelineClusterInitScriptGcsArgs.builder()
                    .destination("string")
                    .build())
                .s3(PipelineClusterInitScriptS3Args.builder()
                    .destination("string")
                    .cannedAcl("string")
                    .enableEncryption(false)
                    .encryptionType("string")
                    .endpoint("string")
                    .kmsKey("string")
                    .region("string")
                    .build())
                .volumes(PipelineClusterInitScriptVolumesArgs.builder()
                    .destination("string")
                    .build())
                .workspace(PipelineClusterInitScriptWorkspaceArgs.builder()
                    .destination("string")
                    .build())
                .build())
            .instancePoolId("string")
            .label("string")
            .nodeTypeId("string")
            .numWorkers(0)
            .policyId("string")
            .sparkConf(Map.of("string", "any"))
            .sparkEnvVars(Map.of("string", "any"))
            .sshPublicKeys("string")
            .build())
        .configuration(Map.of("string", "any"))
        .continuous(false)
        .deployment(PipelineDeploymentArgs.builder()
            .kind("string")
            .metadataFilePath("string")
            .build())
        .development(false)
        .edition("string")
        .filters(PipelineFiltersArgs.builder()
            .excludes("string")
            .includes("string")
            .build())
        .libraries(PipelineLibraryArgs.builder()
            .file(PipelineLibraryFileArgs.builder()
                .path("string")
                .build())
            .jar("string")
            .maven(PipelineLibraryMavenArgs.builder()
                .coordinates("string")
                .exclusions("string")
                .repo("string")
                .build())
            .notebook(PipelineLibraryNotebookArgs.builder()
                .path("string")
                .build())
            .whl("string")
            .build())
        .name("string")
        .notifications(PipelineNotificationArgs.builder()
            .alerts("string")
            .emailRecipients("string")
            .build())
        .photon(false)
        .serverless(false)
        .storage("string")
        .target("string")
        .build());
    
    pipeline_resource = databricks.Pipeline("pipelineResource",
        allow_duplicate_names=False,
        catalog="string",
        channel="string",
        clusters=[databricks.PipelineClusterArgs(
            apply_policy_default_values=False,
            autoscale=databricks.PipelineClusterAutoscaleArgs(
                max_workers=0,
                min_workers=0,
                mode="string",
            ),
            aws_attributes=databricks.PipelineClusterAwsAttributesArgs(
                availability="string",
                ebs_volume_count=0,
                ebs_volume_size=0,
                ebs_volume_type="string",
                first_on_demand=0,
                instance_profile_arn="string",
                spot_bid_price_percent=0,
                zone_id="string",
            ),
            azure_attributes=databricks.PipelineClusterAzureAttributesArgs(
                availability="string",
                first_on_demand=0,
                spot_bid_max_price=0,
            ),
            cluster_log_conf=databricks.PipelineClusterClusterLogConfArgs(
                dbfs=databricks.PipelineClusterClusterLogConfDbfsArgs(
                    destination="string",
                ),
                s3=databricks.PipelineClusterClusterLogConfS3Args(
                    destination="string",
                    canned_acl="string",
                    enable_encryption=False,
                    encryption_type="string",
                    endpoint="string",
                    kms_key="string",
                    region="string",
                ),
            ),
            custom_tags={
                "string": "any",
            },
            driver_instance_pool_id="string",
            driver_node_type_id="string",
            enable_local_disk_encryption=False,
            gcp_attributes=databricks.PipelineClusterGcpAttributesArgs(
                availability="string",
                google_service_account="string",
                local_ssd_count=0,
                zone_id="string",
            ),
            init_scripts=[databricks.PipelineClusterInitScriptArgs(
                abfss=databricks.PipelineClusterInitScriptAbfssArgs(
                    destination="string",
                ),
                file=databricks.PipelineClusterInitScriptFileArgs(
                    destination="string",
                ),
                gcs=databricks.PipelineClusterInitScriptGcsArgs(
                    destination="string",
                ),
                s3=databricks.PipelineClusterInitScriptS3Args(
                    destination="string",
                    canned_acl="string",
                    enable_encryption=False,
                    encryption_type="string",
                    endpoint="string",
                    kms_key="string",
                    region="string",
                ),
                volumes=databricks.PipelineClusterInitScriptVolumesArgs(
                    destination="string",
                ),
                workspace=databricks.PipelineClusterInitScriptWorkspaceArgs(
                    destination="string",
                ),
            )],
            instance_pool_id="string",
            label="string",
            node_type_id="string",
            num_workers=0,
            policy_id="string",
            spark_conf={
                "string": "any",
            },
            spark_env_vars={
                "string": "any",
            },
            ssh_public_keys=["string"],
        )],
        configuration={
            "string": "any",
        },
        continuous=False,
        deployment=databricks.PipelineDeploymentArgs(
            kind="string",
            metadata_file_path="string",
        ),
        development=False,
        edition="string",
        filters=databricks.PipelineFiltersArgs(
            excludes=["string"],
            includes=["string"],
        ),
        libraries=[databricks.PipelineLibraryArgs(
            file=databricks.PipelineLibraryFileArgs(
                path="string",
            ),
            jar="string",
            maven=databricks.PipelineLibraryMavenArgs(
                coordinates="string",
                exclusions=["string"],
                repo="string",
            ),
            notebook=databricks.PipelineLibraryNotebookArgs(
                path="string",
            ),
            whl="string",
        )],
        name="string",
        notifications=[databricks.PipelineNotificationArgs(
            alerts=["string"],
            email_recipients=["string"],
        )],
        photon=False,
        serverless=False,
        storage="string",
        target="string")
    
    const pipelineResource = new databricks.Pipeline("pipelineResource", {
        allowDuplicateNames: false,
        catalog: "string",
        channel: "string",
        clusters: [{
            applyPolicyDefaultValues: false,
            autoscale: {
                maxWorkers: 0,
                minWorkers: 0,
                mode: "string",
            },
            awsAttributes: {
                availability: "string",
                ebsVolumeCount: 0,
                ebsVolumeSize: 0,
                ebsVolumeType: "string",
                firstOnDemand: 0,
                instanceProfileArn: "string",
                spotBidPricePercent: 0,
                zoneId: "string",
            },
            azureAttributes: {
                availability: "string",
                firstOnDemand: 0,
                spotBidMaxPrice: 0,
            },
            clusterLogConf: {
                dbfs: {
                    destination: "string",
                },
                s3: {
                    destination: "string",
                    cannedAcl: "string",
                    enableEncryption: false,
                    encryptionType: "string",
                    endpoint: "string",
                    kmsKey: "string",
                    region: "string",
                },
            },
            customTags: {
                string: "any",
            },
            driverInstancePoolId: "string",
            driverNodeTypeId: "string",
            enableLocalDiskEncryption: false,
            gcpAttributes: {
                availability: "string",
                googleServiceAccount: "string",
                localSsdCount: 0,
                zoneId: "string",
            },
            initScripts: [{
                abfss: {
                    destination: "string",
                },
                file: {
                    destination: "string",
                },
                gcs: {
                    destination: "string",
                },
                s3: {
                    destination: "string",
                    cannedAcl: "string",
                    enableEncryption: false,
                    encryptionType: "string",
                    endpoint: "string",
                    kmsKey: "string",
                    region: "string",
                },
                volumes: {
                    destination: "string",
                },
                workspace: {
                    destination: "string",
                },
            }],
            instancePoolId: "string",
            label: "string",
            nodeTypeId: "string",
            numWorkers: 0,
            policyId: "string",
            sparkConf: {
                string: "any",
            },
            sparkEnvVars: {
                string: "any",
            },
            sshPublicKeys: ["string"],
        }],
        configuration: {
            string: "any",
        },
        continuous: false,
        deployment: {
            kind: "string",
            metadataFilePath: "string",
        },
        development: false,
        edition: "string",
        filters: {
            excludes: ["string"],
            includes: ["string"],
        },
        libraries: [{
            file: {
                path: "string",
            },
            jar: "string",
            maven: {
                coordinates: "string",
                exclusions: ["string"],
                repo: "string",
            },
            notebook: {
                path: "string",
            },
            whl: "string",
        }],
        name: "string",
        notifications: [{
            alerts: ["string"],
            emailRecipients: ["string"],
        }],
        photon: false,
        serverless: false,
        storage: "string",
        target: "string",
    });
    
    type: databricks:Pipeline
    properties:
        allowDuplicateNames: false
        catalog: string
        channel: string
        clusters:
            - applyPolicyDefaultValues: false
              autoscale:
                maxWorkers: 0
                minWorkers: 0
                mode: string
              awsAttributes:
                availability: string
                ebsVolumeCount: 0
                ebsVolumeSize: 0
                ebsVolumeType: string
                firstOnDemand: 0
                instanceProfileArn: string
                spotBidPricePercent: 0
                zoneId: string
              azureAttributes:
                availability: string
                firstOnDemand: 0
                spotBidMaxPrice: 0
              clusterLogConf:
                dbfs:
                    destination: string
                s3:
                    cannedAcl: string
                    destination: string
                    enableEncryption: false
                    encryptionType: string
                    endpoint: string
                    kmsKey: string
                    region: string
              customTags:
                string: any
              driverInstancePoolId: string
              driverNodeTypeId: string
              enableLocalDiskEncryption: false
              gcpAttributes:
                availability: string
                googleServiceAccount: string
                localSsdCount: 0
                zoneId: string
              initScripts:
                - abfss:
                    destination: string
                  file:
                    destination: string
                  gcs:
                    destination: string
                  s3:
                    cannedAcl: string
                    destination: string
                    enableEncryption: false
                    encryptionType: string
                    endpoint: string
                    kmsKey: string
                    region: string
                  volumes:
                    destination: string
                  workspace:
                    destination: string
              instancePoolId: string
              label: string
              nodeTypeId: string
              numWorkers: 0
              policyId: string
              sparkConf:
                string: any
              sparkEnvVars:
                string: any
              sshPublicKeys:
                - string
        configuration:
            string: any
        continuous: false
        deployment:
            kind: string
            metadataFilePath: string
        development: false
        edition: string
        filters:
            excludes:
                - string
            includes:
                - string
        libraries:
            - file:
                path: string
              jar: string
              maven:
                coordinates: string
                exclusions:
                    - string
                repo: string
              notebook:
                path: string
              whl: string
        name: string
        notifications:
            - alerts:
                - string
              emailRecipients:
                - string
        photon: false
        serverless: false
        storage: string
        target: string
    

    Pipeline Resource Properties

    To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.

    Inputs

    The Pipeline resource accepts the following input properties:

    AllowDuplicateNames bool
    Catalog string
    The name of catalog in Unity Catalog. Change of this parameter forces recreation of the pipeline. (Conflicts with storage).
    Channel string
    optional name of the release channel for Spark version used by DLT pipeline. Supported values are: CURRENT (default) and PREVIEW.
    Clusters List<PipelineCluster>
    blocks - Clusters to run the pipeline. If none is specified, pipelines will automatically select a default cluster configuration for the pipeline. Please note that DLT pipeline clusters are supporting only subset of attributes as described in documentation. Also, note that autoscale block is extended with the mode parameter that controls the autoscaling algorithm (possible values are ENHANCED for new, enhanced autoscaling algorithm, or LEGACY for old algorithm).
    Configuration Dictionary<string, object>
    An optional list of values to apply to the entire pipeline. Elements must be formatted as key:value pairs.
    Continuous bool
    A flag indicating whether to run the pipeline continuously. The default value is false.
    Deployment PipelineDeployment
    Development bool
    A flag indicating whether to run the pipeline in development mode. The default value is true.
    Edition string
    optional name of the product edition. Supported values are: CORE, PRO, ADVANCED (default).
    Filters PipelineFilters
    Libraries List<PipelineLibrary>
    blocks - Specifies pipeline code and required artifacts. Syntax resembles library configuration block with the addition of a special notebook & file library types that should have the path attribute. Right now only the notebook & file types are supported.
    Name string
    A user-friendly name for this pipeline. The name can be used to identify pipeline jobs in the UI.
    Notifications List<PipelineNotification>
    Photon bool
    A flag indicating whether to use Photon engine. The default value is false.
    Serverless bool
    Storage string
    A location on DBFS or cloud storage where output data and metadata required for pipeline execution are stored. By default, tables are stored in a subdirectory of this location. Change of this parameter forces recreation of the pipeline. (Conflicts with catalog).
    Target string
    The name of a database (in either the Hive metastore or in a UC catalog) for persisting pipeline output data. Configuring the target setting allows you to view and query the pipeline output data from the Databricks UI.
    AllowDuplicateNames bool
    Catalog string
    The name of catalog in Unity Catalog. Change of this parameter forces recreation of the pipeline. (Conflicts with storage).
    Channel string
    optional name of the release channel for Spark version used by DLT pipeline. Supported values are: CURRENT (default) and PREVIEW.
    Clusters []PipelineClusterArgs
    blocks - Clusters to run the pipeline. If none is specified, pipelines will automatically select a default cluster configuration for the pipeline. Please note that DLT pipeline clusters are supporting only subset of attributes as described in documentation. Also, note that autoscale block is extended with the mode parameter that controls the autoscaling algorithm (possible values are ENHANCED for new, enhanced autoscaling algorithm, or LEGACY for old algorithm).
    Configuration map[string]interface{}
    An optional list of values to apply to the entire pipeline. Elements must be formatted as key:value pairs.
    Continuous bool
    A flag indicating whether to run the pipeline continuously. The default value is false.
    Deployment PipelineDeploymentArgs
    Development bool
    A flag indicating whether to run the pipeline in development mode. The default value is true.
    Edition string
    optional name of the product edition. Supported values are: CORE, PRO, ADVANCED (default).
    Filters PipelineFiltersArgs
    Libraries []PipelineLibraryArgs
    blocks - Specifies pipeline code and required artifacts. Syntax resembles library configuration block with the addition of a special notebook & file library types that should have the path attribute. Right now only the notebook & file types are supported.
    Name string
    A user-friendly name for this pipeline. The name can be used to identify pipeline jobs in the UI.
    Notifications []PipelineNotificationArgs
    Photon bool
    A flag indicating whether to use Photon engine. The default value is false.
    Serverless bool
    Storage string
    A location on DBFS or cloud storage where output data and metadata required for pipeline execution are stored. By default, tables are stored in a subdirectory of this location. Change of this parameter forces recreation of the pipeline. (Conflicts with catalog).
    Target string
    The name of a database (in either the Hive metastore or in a UC catalog) for persisting pipeline output data. Configuring the target setting allows you to view and query the pipeline output data from the Databricks UI.
    allowDuplicateNames Boolean
    catalog String
    The name of catalog in Unity Catalog. Change of this parameter forces recreation of the pipeline. (Conflicts with storage).
    channel String
    optional name of the release channel for Spark version used by DLT pipeline. Supported values are: CURRENT (default) and PREVIEW.
    clusters List<PipelineCluster>
    blocks - Clusters to run the pipeline. If none is specified, pipelines will automatically select a default cluster configuration for the pipeline. Please note that DLT pipeline clusters are supporting only subset of attributes as described in documentation. Also, note that autoscale block is extended with the mode parameter that controls the autoscaling algorithm (possible values are ENHANCED for new, enhanced autoscaling algorithm, or LEGACY for old algorithm).
    configuration Map<String,Object>
    An optional list of values to apply to the entire pipeline. Elements must be formatted as key:value pairs.
    continuous Boolean
    A flag indicating whether to run the pipeline continuously. The default value is false.
    deployment PipelineDeployment
    development Boolean
    A flag indicating whether to run the pipeline in development mode. The default value is true.
    edition String
    optional name of the product edition. Supported values are: CORE, PRO, ADVANCED (default).
    filters PipelineFilters
    libraries List<PipelineLibrary>
    blocks - Specifies pipeline code and required artifacts. Syntax resembles library configuration block with the addition of a special notebook & file library types that should have the path attribute. Right now only the notebook & file types are supported.
    name String
    A user-friendly name for this pipeline. The name can be used to identify pipeline jobs in the UI.
    notifications List<PipelineNotification>
    photon Boolean
    A flag indicating whether to use Photon engine. The default value is false.
    serverless Boolean
    storage String
    A location on DBFS or cloud storage where output data and metadata required for pipeline execution are stored. By default, tables are stored in a subdirectory of this location. Change of this parameter forces recreation of the pipeline. (Conflicts with catalog).
    target String
    The name of a database (in either the Hive metastore or in a UC catalog) for persisting pipeline output data. Configuring the target setting allows you to view and query the pipeline output data from the Databricks UI.
    allowDuplicateNames boolean
    catalog string
    The name of catalog in Unity Catalog. Change of this parameter forces recreation of the pipeline. (Conflicts with storage).
    channel string
    optional name of the release channel for Spark version used by DLT pipeline. Supported values are: CURRENT (default) and PREVIEW.
    clusters PipelineCluster[]
    blocks - Clusters to run the pipeline. If none is specified, pipelines will automatically select a default cluster configuration for the pipeline. Please note that DLT pipeline clusters are supporting only subset of attributes as described in documentation. Also, note that autoscale block is extended with the mode parameter that controls the autoscaling algorithm (possible values are ENHANCED for new, enhanced autoscaling algorithm, or LEGACY for old algorithm).
    configuration {[key: string]: any}
    An optional list of values to apply to the entire pipeline. Elements must be formatted as key:value pairs.
    continuous boolean
    A flag indicating whether to run the pipeline continuously. The default value is false.
    deployment PipelineDeployment
    development boolean
    A flag indicating whether to run the pipeline in development mode. The default value is true.
    edition string
    optional name of the product edition. Supported values are: CORE, PRO, ADVANCED (default).
    filters PipelineFilters
    libraries PipelineLibrary[]
    blocks - Specifies pipeline code and required artifacts. Syntax resembles library configuration block with the addition of a special notebook & file library types that should have the path attribute. Right now only the notebook & file types are supported.
    name string
    A user-friendly name for this pipeline. The name can be used to identify pipeline jobs in the UI.
    notifications PipelineNotification[]
    photon boolean
    A flag indicating whether to use Photon engine. The default value is false.
    serverless boolean
    storage string
    A location on DBFS or cloud storage where output data and metadata required for pipeline execution are stored. By default, tables are stored in a subdirectory of this location. Change of this parameter forces recreation of the pipeline. (Conflicts with catalog).
    target string
    The name of a database (in either the Hive metastore or in a UC catalog) for persisting pipeline output data. Configuring the target setting allows you to view and query the pipeline output data from the Databricks UI.
    allow_duplicate_names bool
    catalog str
    The name of catalog in Unity Catalog. Change of this parameter forces recreation of the pipeline. (Conflicts with storage).
    channel str
    optional name of the release channel for Spark version used by DLT pipeline. Supported values are: CURRENT (default) and PREVIEW.
    clusters Sequence[PipelineClusterArgs]
    blocks - Clusters to run the pipeline. If none is specified, pipelines will automatically select a default cluster configuration for the pipeline. Please note that DLT pipeline clusters are supporting only subset of attributes as described in documentation. Also, note that autoscale block is extended with the mode parameter that controls the autoscaling algorithm (possible values are ENHANCED for new, enhanced autoscaling algorithm, or LEGACY for old algorithm).
    configuration Mapping[str, Any]
    An optional list of values to apply to the entire pipeline. Elements must be formatted as key:value pairs.
    continuous bool
    A flag indicating whether to run the pipeline continuously. The default value is false.
    deployment PipelineDeploymentArgs
    development bool
    A flag indicating whether to run the pipeline in development mode. The default value is true.
    edition str
    optional name of the product edition. Supported values are: CORE, PRO, ADVANCED (default).
    filters PipelineFiltersArgs
    libraries Sequence[PipelineLibraryArgs]
    blocks - Specifies pipeline code and required artifacts. Syntax resembles library configuration block with the addition of a special notebook & file library types that should have the path attribute. Right now only the notebook & file types are supported.
    name str
    A user-friendly name for this pipeline. The name can be used to identify pipeline jobs in the UI.
    notifications Sequence[PipelineNotificationArgs]
    photon bool
    A flag indicating whether to use Photon engine. The default value is false.
    serverless bool
    storage str
    A location on DBFS or cloud storage where output data and metadata required for pipeline execution are stored. By default, tables are stored in a subdirectory of this location. Change of this parameter forces recreation of the pipeline. (Conflicts with catalog).
    target str
    The name of a database (in either the Hive metastore or in a UC catalog) for persisting pipeline output data. Configuring the target setting allows you to view and query the pipeline output data from the Databricks UI.
    allowDuplicateNames Boolean
    catalog String
    The name of catalog in Unity Catalog. Change of this parameter forces recreation of the pipeline. (Conflicts with storage).
    channel String
    optional name of the release channel for Spark version used by DLT pipeline. Supported values are: CURRENT (default) and PREVIEW.
    clusters List<Property Map>
    blocks - Clusters to run the pipeline. If none is specified, pipelines will automatically select a default cluster configuration for the pipeline. Please note that DLT pipeline clusters are supporting only subset of attributes as described in documentation. Also, note that autoscale block is extended with the mode parameter that controls the autoscaling algorithm (possible values are ENHANCED for new, enhanced autoscaling algorithm, or LEGACY for old algorithm).
    configuration Map<Any>
    An optional list of values to apply to the entire pipeline. Elements must be formatted as key:value pairs.
    continuous Boolean
    A flag indicating whether to run the pipeline continuously. The default value is false.
    deployment Property Map
    development Boolean
    A flag indicating whether to run the pipeline in development mode. The default value is true.
    edition String
    optional name of the product edition. Supported values are: CORE, PRO, ADVANCED (default).
    filters Property Map
    libraries List<Property Map>
    blocks - Specifies pipeline code and required artifacts. Syntax resembles library configuration block with the addition of a special notebook & file library types that should have the path attribute. Right now only the notebook & file types are supported.
    name String
    A user-friendly name for this pipeline. The name can be used to identify pipeline jobs in the UI.
    notifications List<Property Map>
    photon Boolean
    A flag indicating whether to use Photon engine. The default value is false.
    serverless Boolean
    storage String
    A location on DBFS or cloud storage where output data and metadata required for pipeline execution are stored. By default, tables are stored in a subdirectory of this location. Change of this parameter forces recreation of the pipeline. (Conflicts with catalog).
    target String
    The name of a database (in either the Hive metastore or in a UC catalog) for persisting pipeline output data. Configuring the target setting allows you to view and query the pipeline output data from the Databricks UI.

    Outputs

    All input properties are implicitly available as output properties. Additionally, the Pipeline resource produces the following output properties:

    Id string
    The provider-assigned unique ID for this managed resource.
    Url string
    URL of the DLT pipeline on the given workspace.
    Id string
    The provider-assigned unique ID for this managed resource.
    Url string
    URL of the DLT pipeline on the given workspace.
    id String
    The provider-assigned unique ID for this managed resource.
    url String
    URL of the DLT pipeline on the given workspace.
    id string
    The provider-assigned unique ID for this managed resource.
    url string
    URL of the DLT pipeline on the given workspace.
    id str
    The provider-assigned unique ID for this managed resource.
    url str
    URL of the DLT pipeline on the given workspace.
    id String
    The provider-assigned unique ID for this managed resource.
    url String
    URL of the DLT pipeline on the given workspace.

    Look up Existing Pipeline Resource

    Get an existing Pipeline resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.

    public static get(name: string, id: Input<ID>, state?: PipelineState, opts?: CustomResourceOptions): Pipeline
    @staticmethod
    def get(resource_name: str,
            id: str,
            opts: Optional[ResourceOptions] = None,
            allow_duplicate_names: Optional[bool] = None,
            catalog: Optional[str] = None,
            channel: Optional[str] = None,
            clusters: Optional[Sequence[PipelineClusterArgs]] = None,
            configuration: Optional[Mapping[str, Any]] = None,
            continuous: Optional[bool] = None,
            deployment: Optional[PipelineDeploymentArgs] = None,
            development: Optional[bool] = None,
            edition: Optional[str] = None,
            filters: Optional[PipelineFiltersArgs] = None,
            libraries: Optional[Sequence[PipelineLibraryArgs]] = None,
            name: Optional[str] = None,
            notifications: Optional[Sequence[PipelineNotificationArgs]] = None,
            photon: Optional[bool] = None,
            serverless: Optional[bool] = None,
            storage: Optional[str] = None,
            target: Optional[str] = None,
            url: Optional[str] = None) -> Pipeline
    func GetPipeline(ctx *Context, name string, id IDInput, state *PipelineState, opts ...ResourceOption) (*Pipeline, error)
    public static Pipeline Get(string name, Input<string> id, PipelineState? state, CustomResourceOptions? opts = null)
    public static Pipeline get(String name, Output<String> id, PipelineState state, CustomResourceOptions options)
    Resource lookup is not supported in YAML
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    resource_name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    The following state arguments are supported:
    AllowDuplicateNames bool
    Catalog string
    The name of catalog in Unity Catalog. Change of this parameter forces recreation of the pipeline. (Conflicts with storage).
    Channel string
    optional name of the release channel for Spark version used by DLT pipeline. Supported values are: CURRENT (default) and PREVIEW.
    Clusters List<PipelineCluster>
    blocks - Clusters to run the pipeline. If none is specified, pipelines will automatically select a default cluster configuration for the pipeline. Please note that DLT pipeline clusters are supporting only subset of attributes as described in documentation. Also, note that autoscale block is extended with the mode parameter that controls the autoscaling algorithm (possible values are ENHANCED for new, enhanced autoscaling algorithm, or LEGACY for old algorithm).
    Configuration Dictionary<string, object>
    An optional list of values to apply to the entire pipeline. Elements must be formatted as key:value pairs.
    Continuous bool
    A flag indicating whether to run the pipeline continuously. The default value is false.
    Deployment PipelineDeployment
    Development bool
    A flag indicating whether to run the pipeline in development mode. The default value is true.
    Edition string
    optional name of the product edition. Supported values are: CORE, PRO, ADVANCED (default).
    Filters PipelineFilters
    Libraries List<PipelineLibrary>
    blocks - Specifies pipeline code and required artifacts. Syntax resembles library configuration block with the addition of a special notebook & file library types that should have the path attribute. Right now only the notebook & file types are supported.
    Name string
    A user-friendly name for this pipeline. The name can be used to identify pipeline jobs in the UI.
    Notifications List<PipelineNotification>
    Photon bool
    A flag indicating whether to use Photon engine. The default value is false.
    Serverless bool
    Storage string
    A location on DBFS or cloud storage where output data and metadata required for pipeline execution are stored. By default, tables are stored in a subdirectory of this location. Change of this parameter forces recreation of the pipeline. (Conflicts with catalog).
    Target string
    The name of a database (in either the Hive metastore or in a UC catalog) for persisting pipeline output data. Configuring the target setting allows you to view and query the pipeline output data from the Databricks UI.
    Url string
    URL of the DLT pipeline on the given workspace.
    AllowDuplicateNames bool
    Catalog string
    The name of catalog in Unity Catalog. Change of this parameter forces recreation of the pipeline. (Conflicts with storage).
    Channel string
    optional name of the release channel for Spark version used by DLT pipeline. Supported values are: CURRENT (default) and PREVIEW.
    Clusters []PipelineClusterArgs
    blocks - Clusters to run the pipeline. If none is specified, pipelines will automatically select a default cluster configuration for the pipeline. Please note that DLT pipeline clusters are supporting only subset of attributes as described in documentation. Also, note that autoscale block is extended with the mode parameter that controls the autoscaling algorithm (possible values are ENHANCED for new, enhanced autoscaling algorithm, or LEGACY for old algorithm).
    Configuration map[string]interface{}
    An optional list of values to apply to the entire pipeline. Elements must be formatted as key:value pairs.
    Continuous bool
    A flag indicating whether to run the pipeline continuously. The default value is false.
    Deployment PipelineDeploymentArgs
    Development bool
    A flag indicating whether to run the pipeline in development mode. The default value is true.
    Edition string
    optional name of the product edition. Supported values are: CORE, PRO, ADVANCED (default).
    Filters PipelineFiltersArgs
    Libraries []PipelineLibraryArgs
    blocks - Specifies pipeline code and required artifacts. Syntax resembles library configuration block with the addition of a special notebook & file library types that should have the path attribute. Right now only the notebook & file types are supported.
    Name string
    A user-friendly name for this pipeline. The name can be used to identify pipeline jobs in the UI.
    Notifications []PipelineNotificationArgs
    Photon bool
    A flag indicating whether to use Photon engine. The default value is false.
    Serverless bool
    Storage string
    A location on DBFS or cloud storage where output data and metadata required for pipeline execution are stored. By default, tables are stored in a subdirectory of this location. Change of this parameter forces recreation of the pipeline. (Conflicts with catalog).
    Target string
    The name of a database (in either the Hive metastore or in a UC catalog) for persisting pipeline output data. Configuring the target setting allows you to view and query the pipeline output data from the Databricks UI.
    Url string
    URL of the DLT pipeline on the given workspace.
    allowDuplicateNames Boolean
    catalog String
    The name of catalog in Unity Catalog. Change of this parameter forces recreation of the pipeline. (Conflicts with storage).
    channel String
    optional name of the release channel for Spark version used by DLT pipeline. Supported values are: CURRENT (default) and PREVIEW.
    clusters List<PipelineCluster>
    blocks - Clusters to run the pipeline. If none is specified, pipelines will automatically select a default cluster configuration for the pipeline. Please note that DLT pipeline clusters are supporting only subset of attributes as described in documentation. Also, note that autoscale block is extended with the mode parameter that controls the autoscaling algorithm (possible values are ENHANCED for new, enhanced autoscaling algorithm, or LEGACY for old algorithm).
    configuration Map<String,Object>
    An optional list of values to apply to the entire pipeline. Elements must be formatted as key:value pairs.
    continuous Boolean
    A flag indicating whether to run the pipeline continuously. The default value is false.
    deployment PipelineDeployment
    development Boolean
    A flag indicating whether to run the pipeline in development mode. The default value is true.
    edition String
    optional name of the product edition. Supported values are: CORE, PRO, ADVANCED (default).
    filters PipelineFilters
    libraries List<PipelineLibrary>
    blocks - Specifies pipeline code and required artifacts. Syntax resembles library configuration block with the addition of a special notebook & file library types that should have the path attribute. Right now only the notebook & file types are supported.
    name String
    A user-friendly name for this pipeline. The name can be used to identify pipeline jobs in the UI.
    notifications List<PipelineNotification>
    photon Boolean
    A flag indicating whether to use Photon engine. The default value is false.
    serverless Boolean
    storage String
    A location on DBFS or cloud storage where output data and metadata required for pipeline execution are stored. By default, tables are stored in a subdirectory of this location. Change of this parameter forces recreation of the pipeline. (Conflicts with catalog).
    target String
    The name of a database (in either the Hive metastore or in a UC catalog) for persisting pipeline output data. Configuring the target setting allows you to view and query the pipeline output data from the Databricks UI.
    url String
    URL of the DLT pipeline on the given workspace.
    allowDuplicateNames boolean
    catalog string
    The name of catalog in Unity Catalog. Change of this parameter forces recreation of the pipeline. (Conflicts with storage).
    channel string
    optional name of the release channel for Spark version used by DLT pipeline. Supported values are: CURRENT (default) and PREVIEW.
    clusters PipelineCluster[]
    blocks - Clusters to run the pipeline. If none is specified, pipelines will automatically select a default cluster configuration for the pipeline. Please note that DLT pipeline clusters are supporting only subset of attributes as described in documentation. Also, note that autoscale block is extended with the mode parameter that controls the autoscaling algorithm (possible values are ENHANCED for new, enhanced autoscaling algorithm, or LEGACY for old algorithm).
    configuration {[key: string]: any}
    An optional list of values to apply to the entire pipeline. Elements must be formatted as key:value pairs.
    continuous boolean
    A flag indicating whether to run the pipeline continuously. The default value is false.
    deployment PipelineDeployment
    development boolean
    A flag indicating whether to run the pipeline in development mode. The default value is true.
    edition string
    optional name of the product edition. Supported values are: CORE, PRO, ADVANCED (default).
    filters PipelineFilters
    libraries PipelineLibrary[]
    blocks - Specifies pipeline code and required artifacts. Syntax resembles library configuration block with the addition of a special notebook & file library types that should have the path attribute. Right now only the notebook & file types are supported.
    name string
    A user-friendly name for this pipeline. The name can be used to identify pipeline jobs in the UI.
    notifications PipelineNotification[]
    photon boolean
    A flag indicating whether to use Photon engine. The default value is false.
    serverless boolean
    storage string
    A location on DBFS or cloud storage where output data and metadata required for pipeline execution are stored. By default, tables are stored in a subdirectory of this location. Change of this parameter forces recreation of the pipeline. (Conflicts with catalog).
    target string
    The name of a database (in either the Hive metastore or in a UC catalog) for persisting pipeline output data. Configuring the target setting allows you to view and query the pipeline output data from the Databricks UI.
    url string
    URL of the DLT pipeline on the given workspace.
    allow_duplicate_names bool
    catalog str
    The name of catalog in Unity Catalog. Change of this parameter forces recreation of the pipeline. (Conflicts with storage).
    channel str
    optional name of the release channel for Spark version used by DLT pipeline. Supported values are: CURRENT (default) and PREVIEW.
    clusters Sequence[PipelineClusterArgs]
    blocks - Clusters to run the pipeline. If none is specified, pipelines will automatically select a default cluster configuration for the pipeline. Please note that DLT pipeline clusters are supporting only subset of attributes as described in documentation. Also, note that autoscale block is extended with the mode parameter that controls the autoscaling algorithm (possible values are ENHANCED for new, enhanced autoscaling algorithm, or LEGACY for old algorithm).
    configuration Mapping[str, Any]
    An optional list of values to apply to the entire pipeline. Elements must be formatted as key:value pairs.
    continuous bool
    A flag indicating whether to run the pipeline continuously. The default value is false.
    deployment PipelineDeploymentArgs
    development bool
    A flag indicating whether to run the pipeline in development mode. The default value is true.
    edition str
    optional name of the product edition. Supported values are: CORE, PRO, ADVANCED (default).
    filters PipelineFiltersArgs
    libraries Sequence[PipelineLibraryArgs]
    blocks - Specifies pipeline code and required artifacts. Syntax resembles library configuration block with the addition of a special notebook & file library types that should have the path attribute. Right now only the notebook & file types are supported.
    name str
    A user-friendly name for this pipeline. The name can be used to identify pipeline jobs in the UI.
    notifications Sequence[PipelineNotificationArgs]
    photon bool
    A flag indicating whether to use Photon engine. The default value is false.
    serverless bool
    storage str
    A location on DBFS or cloud storage where output data and metadata required for pipeline execution are stored. By default, tables are stored in a subdirectory of this location. Change of this parameter forces recreation of the pipeline. (Conflicts with catalog).
    target str
    The name of a database (in either the Hive metastore or in a UC catalog) for persisting pipeline output data. Configuring the target setting allows you to view and query the pipeline output data from the Databricks UI.
    url str
    URL of the DLT pipeline on the given workspace.
    allowDuplicateNames Boolean
    catalog String
    The name of catalog in Unity Catalog. Change of this parameter forces recreation of the pipeline. (Conflicts with storage).
    channel String
    optional name of the release channel for Spark version used by DLT pipeline. Supported values are: CURRENT (default) and PREVIEW.
    clusters List<Property Map>
    blocks - Clusters to run the pipeline. If none is specified, pipelines will automatically select a default cluster configuration for the pipeline. Please note that DLT pipeline clusters are supporting only subset of attributes as described in documentation. Also, note that autoscale block is extended with the mode parameter that controls the autoscaling algorithm (possible values are ENHANCED for new, enhanced autoscaling algorithm, or LEGACY for old algorithm).
    configuration Map<Any>
    An optional list of values to apply to the entire pipeline. Elements must be formatted as key:value pairs.
    continuous Boolean
    A flag indicating whether to run the pipeline continuously. The default value is false.
    deployment Property Map
    development Boolean
    A flag indicating whether to run the pipeline in development mode. The default value is true.
    edition String
    optional name of the product edition. Supported values are: CORE, PRO, ADVANCED (default).
    filters Property Map
    libraries List<Property Map>
    blocks - Specifies pipeline code and required artifacts. Syntax resembles library configuration block with the addition of a special notebook & file library types that should have the path attribute. Right now only the notebook & file types are supported.
    name String
    A user-friendly name for this pipeline. The name can be used to identify pipeline jobs in the UI.
    notifications List<Property Map>
    photon Boolean
    A flag indicating whether to use Photon engine. The default value is false.
    serverless Boolean
    storage String
    A location on DBFS or cloud storage where output data and metadata required for pipeline execution are stored. By default, tables are stored in a subdirectory of this location. Change of this parameter forces recreation of the pipeline. (Conflicts with catalog).
    target String
    The name of a database (in either the Hive metastore or in a UC catalog) for persisting pipeline output data. Configuring the target setting allows you to view and query the pipeline output data from the Databricks UI.
    url String
    URL of the DLT pipeline on the given workspace.

    Supporting Types

    PipelineCluster, PipelineClusterArgs

    PipelineClusterAutoscale, PipelineClusterAutoscaleArgs

    maxWorkers Integer
    minWorkers Integer
    mode String
    maxWorkers number
    minWorkers number
    mode string
    maxWorkers Number
    minWorkers Number
    mode String

    PipelineClusterAwsAttributes, PipelineClusterAwsAttributesArgs

    PipelineClusterAzureAttributes, PipelineClusterAzureAttributesArgs

    PipelineClusterClusterLogConf, PipelineClusterClusterLogConfArgs

    PipelineClusterClusterLogConfDbfs, PipelineClusterClusterLogConfDbfsArgs

    PipelineClusterClusterLogConfS3, PipelineClusterClusterLogConfS3Args

    Destination string
    CannedAcl string
    EnableEncryption bool
    EncryptionType string
    Endpoint string
    KmsKey string
    Region string
    Destination string
    CannedAcl string
    EnableEncryption bool
    EncryptionType string
    Endpoint string
    KmsKey string
    Region string
    destination String
    cannedAcl String
    enableEncryption Boolean
    encryptionType String
    endpoint String
    kmsKey String
    region String
    destination string
    cannedAcl string
    enableEncryption boolean
    encryptionType string
    endpoint string
    kmsKey string
    region string
    destination String
    cannedAcl String
    enableEncryption Boolean
    encryptionType String
    endpoint String
    kmsKey String
    region String

    PipelineClusterGcpAttributes, PipelineClusterGcpAttributesArgs

    PipelineClusterInitScript, PipelineClusterInitScriptArgs

    abfss Property Map
    dbfs Property Map

    Deprecated: For init scripts use 'volumes', 'workspace' or cloud storage location instead of 'dbfs'.

    file Property Map
    gcs Property Map
    s3 Property Map
    volumes Property Map
    workspace Property Map

    PipelineClusterInitScriptAbfss, PipelineClusterInitScriptAbfssArgs

    PipelineClusterInitScriptDbfs, PipelineClusterInitScriptDbfsArgs

    PipelineClusterInitScriptFile, PipelineClusterInitScriptFileArgs

    PipelineClusterInitScriptGcs, PipelineClusterInitScriptGcsArgs

    PipelineClusterInitScriptS3, PipelineClusterInitScriptS3Args

    Destination string
    CannedAcl string
    EnableEncryption bool
    EncryptionType string
    Endpoint string
    KmsKey string
    Region string
    Destination string
    CannedAcl string
    EnableEncryption bool
    EncryptionType string
    Endpoint string
    KmsKey string
    Region string
    destination String
    cannedAcl String
    enableEncryption Boolean
    encryptionType String
    endpoint String
    kmsKey String
    region String
    destination string
    cannedAcl string
    enableEncryption boolean
    encryptionType string
    endpoint string
    kmsKey string
    region string
    destination String
    cannedAcl String
    enableEncryption Boolean
    encryptionType String
    endpoint String
    kmsKey String
    region String

    PipelineClusterInitScriptVolumes, PipelineClusterInitScriptVolumesArgs

    PipelineClusterInitScriptWorkspace, PipelineClusterInitScriptWorkspaceArgs

    PipelineDeployment, PipelineDeploymentArgs

    Kind string
    MetadataFilePath string
    Kind string
    MetadataFilePath string
    kind String
    metadataFilePath String
    kind string
    metadataFilePath string
    kind String
    metadataFilePath String

    PipelineFilters, PipelineFiltersArgs

    Excludes List<string>
    Includes List<string>
    Excludes []string
    Includes []string
    excludes List<String>
    includes List<String>
    excludes string[]
    includes string[]
    excludes Sequence[str]
    includes Sequence[str]
    excludes List<String>
    includes List<String>

    PipelineLibrary, PipelineLibraryArgs

    PipelineLibraryFile, PipelineLibraryFileArgs

    Path string
    Path string
    path String
    path string
    path str
    path String

    PipelineLibraryMaven, PipelineLibraryMavenArgs

    Coordinates string
    Exclusions List<string>
    Repo string
    Coordinates string
    Exclusions []string
    Repo string
    coordinates String
    exclusions List<String>
    repo String
    coordinates string
    exclusions string[]
    repo string
    coordinates str
    exclusions Sequence[str]
    repo str
    coordinates String
    exclusions List<String>
    repo String

    PipelineLibraryNotebook, PipelineLibraryNotebookArgs

    Path string
    Path string
    path String
    path string
    path str
    path String

    PipelineNotification, PipelineNotificationArgs

    Alerts List<string>
    non-empty list of alert types. Right now following alert types are supported, consult documentation for actual list

    • on-update-success - a pipeline update completes successfully.
    • on-update-failure - a pipeline update fails with a retryable error.
    • on-update-fatal-failure - a pipeline update fails with a non-retryable (fatal) error.
    • on-flow-failure - a single data flow fails.
    EmailRecipients List<string>
    non-empty list of emails to notify.
    Alerts []string
    non-empty list of alert types. Right now following alert types are supported, consult documentation for actual list

    • on-update-success - a pipeline update completes successfully.
    • on-update-failure - a pipeline update fails with a retryable error.
    • on-update-fatal-failure - a pipeline update fails with a non-retryable (fatal) error.
    • on-flow-failure - a single data flow fails.
    EmailRecipients []string
    non-empty list of emails to notify.
    alerts List<String>
    non-empty list of alert types. Right now following alert types are supported, consult documentation for actual list

    • on-update-success - a pipeline update completes successfully.
    • on-update-failure - a pipeline update fails with a retryable error.
    • on-update-fatal-failure - a pipeline update fails with a non-retryable (fatal) error.
    • on-flow-failure - a single data flow fails.
    emailRecipients List<String>
    non-empty list of emails to notify.
    alerts string[]
    non-empty list of alert types. Right now following alert types are supported, consult documentation for actual list

    • on-update-success - a pipeline update completes successfully.
    • on-update-failure - a pipeline update fails with a retryable error.
    • on-update-fatal-failure - a pipeline update fails with a non-retryable (fatal) error.
    • on-flow-failure - a single data flow fails.
    emailRecipients string[]
    non-empty list of emails to notify.
    alerts Sequence[str]
    non-empty list of alert types. Right now following alert types are supported, consult documentation for actual list

    • on-update-success - a pipeline update completes successfully.
    • on-update-failure - a pipeline update fails with a retryable error.
    • on-update-fatal-failure - a pipeline update fails with a non-retryable (fatal) error.
    • on-flow-failure - a single data flow fails.
    email_recipients Sequence[str]
    non-empty list of emails to notify.
    alerts List<String>
    non-empty list of alert types. Right now following alert types are supported, consult documentation for actual list

    • on-update-success - a pipeline update completes successfully.
    • on-update-failure - a pipeline update fails with a retryable error.
    • on-update-fatal-failure - a pipeline update fails with a non-retryable (fatal) error.
    • on-flow-failure - a single data flow fails.
    emailRecipients List<String>
    non-empty list of emails to notify.

    Import

    The resource job can be imported using the id of the pipeline

    bash

    $ pulumi import databricks:index/pipeline:Pipeline this <pipeline-id>
    

    To learn more about importing existing cloud resources, see Importing resources.

    Package Details

    Repository
    databricks pulumi/pulumi-databricks
    License
    Apache-2.0
    Notes
    This Pulumi package is based on the databricks Terraform Provider.
    databricks logo
    Databricks v1.46.1 published on Friday, Jun 28, 2024 by Pulumi