We recommend using Azure Native.
azure.datafactory.DatasetSnowflake
Explore with Pulumi AI
Manages a Snowflake Dataset inside an Azure Data Factory.
Example Usage
import * as pulumi from "@pulumi/pulumi";
import * as azure from "@pulumi/azure";
const example = new azure.core.ResourceGroup("example", {
name: "example-resources",
location: "West Europe",
});
const exampleFactory = new azure.datafactory.Factory("example", {
name: "example",
location: example.location,
resourceGroupName: example.name,
});
const exampleLinkedServiceSnowflake = new azure.datafactory.LinkedServiceSnowflake("example", {
name: "example",
dataFactoryId: exampleFactory.id,
connectionString: "jdbc:snowflake://account.region.snowflakecomputing.com/?user=user&db=db&warehouse=wh",
});
const exampleDatasetSnowflake = new azure.datafactory.DatasetSnowflake("example", {
name: "example",
dataFactoryId: exampleFactory.id,
linkedServiceName: exampleLinkedServiceSnowflake.name,
schemaName: "foo_schema",
tableName: "foo_table",
});
import pulumi
import pulumi_azure as azure
example = azure.core.ResourceGroup("example",
name="example-resources",
location="West Europe")
example_factory = azure.datafactory.Factory("example",
name="example",
location=example.location,
resource_group_name=example.name)
example_linked_service_snowflake = azure.datafactory.LinkedServiceSnowflake("example",
name="example",
data_factory_id=example_factory.id,
connection_string="jdbc:snowflake://account.region.snowflakecomputing.com/?user=user&db=db&warehouse=wh")
example_dataset_snowflake = azure.datafactory.DatasetSnowflake("example",
name="example",
data_factory_id=example_factory.id,
linked_service_name=example_linked_service_snowflake.name,
schema_name="foo_schema",
table_name="foo_table")
package main
import (
"github.com/pulumi/pulumi-azure/sdk/v5/go/azure/core"
"github.com/pulumi/pulumi-azure/sdk/v5/go/azure/datafactory"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
example, err := core.NewResourceGroup(ctx, "example", &core.ResourceGroupArgs{
Name: pulumi.String("example-resources"),
Location: pulumi.String("West Europe"),
})
if err != nil {
return err
}
exampleFactory, err := datafactory.NewFactory(ctx, "example", &datafactory.FactoryArgs{
Name: pulumi.String("example"),
Location: example.Location,
ResourceGroupName: example.Name,
})
if err != nil {
return err
}
exampleLinkedServiceSnowflake, err := datafactory.NewLinkedServiceSnowflake(ctx, "example", &datafactory.LinkedServiceSnowflakeArgs{
Name: pulumi.String("example"),
DataFactoryId: exampleFactory.ID(),
ConnectionString: pulumi.String("jdbc:snowflake://account.region.snowflakecomputing.com/?user=user&db=db&warehouse=wh"),
})
if err != nil {
return err
}
_, err = datafactory.NewDatasetSnowflake(ctx, "example", &datafactory.DatasetSnowflakeArgs{
Name: pulumi.String("example"),
DataFactoryId: exampleFactory.ID(),
LinkedServiceName: exampleLinkedServiceSnowflake.Name,
SchemaName: pulumi.String("foo_schema"),
TableName: pulumi.String("foo_table"),
})
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Azure = Pulumi.Azure;
return await Deployment.RunAsync(() =>
{
var example = new Azure.Core.ResourceGroup("example", new()
{
Name = "example-resources",
Location = "West Europe",
});
var exampleFactory = new Azure.DataFactory.Factory("example", new()
{
Name = "example",
Location = example.Location,
ResourceGroupName = example.Name,
});
var exampleLinkedServiceSnowflake = new Azure.DataFactory.LinkedServiceSnowflake("example", new()
{
Name = "example",
DataFactoryId = exampleFactory.Id,
ConnectionString = "jdbc:snowflake://account.region.snowflakecomputing.com/?user=user&db=db&warehouse=wh",
});
var exampleDatasetSnowflake = new Azure.DataFactory.DatasetSnowflake("example", new()
{
Name = "example",
DataFactoryId = exampleFactory.Id,
LinkedServiceName = exampleLinkedServiceSnowflake.Name,
SchemaName = "foo_schema",
TableName = "foo_table",
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.azure.core.ResourceGroup;
import com.pulumi.azure.core.ResourceGroupArgs;
import com.pulumi.azure.datafactory.Factory;
import com.pulumi.azure.datafactory.FactoryArgs;
import com.pulumi.azure.datafactory.LinkedServiceSnowflake;
import com.pulumi.azure.datafactory.LinkedServiceSnowflakeArgs;
import com.pulumi.azure.datafactory.DatasetSnowflake;
import com.pulumi.azure.datafactory.DatasetSnowflakeArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var example = new ResourceGroup("example", ResourceGroupArgs.builder()
.name("example-resources")
.location("West Europe")
.build());
var exampleFactory = new Factory("exampleFactory", FactoryArgs.builder()
.name("example")
.location(example.location())
.resourceGroupName(example.name())
.build());
var exampleLinkedServiceSnowflake = new LinkedServiceSnowflake("exampleLinkedServiceSnowflake", LinkedServiceSnowflakeArgs.builder()
.name("example")
.dataFactoryId(exampleFactory.id())
.connectionString("jdbc:snowflake://account.region.snowflakecomputing.com/?user=user&db=db&warehouse=wh")
.build());
var exampleDatasetSnowflake = new DatasetSnowflake("exampleDatasetSnowflake", DatasetSnowflakeArgs.builder()
.name("example")
.dataFactoryId(exampleFactory.id())
.linkedServiceName(exampleLinkedServiceSnowflake.name())
.schemaName("foo_schema")
.tableName("foo_table")
.build());
}
}
resources:
example:
type: azure:core:ResourceGroup
properties:
name: example-resources
location: West Europe
exampleFactory:
type: azure:datafactory:Factory
name: example
properties:
name: example
location: ${example.location}
resourceGroupName: ${example.name}
exampleLinkedServiceSnowflake:
type: azure:datafactory:LinkedServiceSnowflake
name: example
properties:
name: example
dataFactoryId: ${exampleFactory.id}
connectionString: jdbc:snowflake://account.region.snowflakecomputing.com/?user=user&db=db&warehouse=wh
exampleDatasetSnowflake:
type: azure:datafactory:DatasetSnowflake
name: example
properties:
name: example
dataFactoryId: ${exampleFactory.id}
linkedServiceName: ${exampleLinkedServiceSnowflake.name}
schemaName: foo_schema
tableName: foo_table
Create DatasetSnowflake Resource
Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.
Constructor syntax
new DatasetSnowflake(name: string, args: DatasetSnowflakeArgs, opts?: CustomResourceOptions);
@overload
def DatasetSnowflake(resource_name: str,
args: DatasetSnowflakeArgs,
opts: Optional[ResourceOptions] = None)
@overload
def DatasetSnowflake(resource_name: str,
opts: Optional[ResourceOptions] = None,
data_factory_id: Optional[str] = None,
linked_service_name: Optional[str] = None,
additional_properties: Optional[Mapping[str, str]] = None,
annotations: Optional[Sequence[str]] = None,
description: Optional[str] = None,
folder: Optional[str] = None,
name: Optional[str] = None,
parameters: Optional[Mapping[str, str]] = None,
schema_columns: Optional[Sequence[DatasetSnowflakeSchemaColumnArgs]] = None,
schema_name: Optional[str] = None,
table_name: Optional[str] = None)
func NewDatasetSnowflake(ctx *Context, name string, args DatasetSnowflakeArgs, opts ...ResourceOption) (*DatasetSnowflake, error)
public DatasetSnowflake(string name, DatasetSnowflakeArgs args, CustomResourceOptions? opts = null)
public DatasetSnowflake(String name, DatasetSnowflakeArgs args)
public DatasetSnowflake(String name, DatasetSnowflakeArgs args, CustomResourceOptions options)
type: azure:datafactory:DatasetSnowflake
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.
Parameters
- name string
- The unique name of the resource.
- args DatasetSnowflakeArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- resource_name str
- The unique name of the resource.
- args DatasetSnowflakeArgs
- The arguments to resource properties.
- opts ResourceOptions
- Bag of options to control resource's behavior.
- ctx Context
- Context object for the current deployment.
- name string
- The unique name of the resource.
- args DatasetSnowflakeArgs
- The arguments to resource properties.
- opts ResourceOption
- Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args DatasetSnowflakeArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- name String
- The unique name of the resource.
- args DatasetSnowflakeArgs
- The arguments to resource properties.
- options CustomResourceOptions
- Bag of options to control resource's behavior.
Constructor example
The following reference example uses placeholder values for all input properties.
var datasetSnowflakeResource = new Azure.DataFactory.DatasetSnowflake("datasetSnowflakeResource", new()
{
DataFactoryId = "string",
LinkedServiceName = "string",
AdditionalProperties =
{
{ "string", "string" },
},
Annotations = new[]
{
"string",
},
Description = "string",
Folder = "string",
Name = "string",
Parameters =
{
{ "string", "string" },
},
SchemaColumns = new[]
{
new Azure.DataFactory.Inputs.DatasetSnowflakeSchemaColumnArgs
{
Name = "string",
Precision = 0,
Scale = 0,
Type = "string",
},
},
SchemaName = "string",
TableName = "string",
});
example, err := datafactory.NewDatasetSnowflake(ctx, "datasetSnowflakeResource", &datafactory.DatasetSnowflakeArgs{
DataFactoryId: pulumi.String("string"),
LinkedServiceName: pulumi.String("string"),
AdditionalProperties: pulumi.StringMap{
"string": pulumi.String("string"),
},
Annotations: pulumi.StringArray{
pulumi.String("string"),
},
Description: pulumi.String("string"),
Folder: pulumi.String("string"),
Name: pulumi.String("string"),
Parameters: pulumi.StringMap{
"string": pulumi.String("string"),
},
SchemaColumns: datafactory.DatasetSnowflakeSchemaColumnArray{
&datafactory.DatasetSnowflakeSchemaColumnArgs{
Name: pulumi.String("string"),
Precision: pulumi.Int(0),
Scale: pulumi.Int(0),
Type: pulumi.String("string"),
},
},
SchemaName: pulumi.String("string"),
TableName: pulumi.String("string"),
})
var datasetSnowflakeResource = new DatasetSnowflake("datasetSnowflakeResource", DatasetSnowflakeArgs.builder()
.dataFactoryId("string")
.linkedServiceName("string")
.additionalProperties(Map.of("string", "string"))
.annotations("string")
.description("string")
.folder("string")
.name("string")
.parameters(Map.of("string", "string"))
.schemaColumns(DatasetSnowflakeSchemaColumnArgs.builder()
.name("string")
.precision(0)
.scale(0)
.type("string")
.build())
.schemaName("string")
.tableName("string")
.build());
dataset_snowflake_resource = azure.datafactory.DatasetSnowflake("datasetSnowflakeResource",
data_factory_id="string",
linked_service_name="string",
additional_properties={
"string": "string",
},
annotations=["string"],
description="string",
folder="string",
name="string",
parameters={
"string": "string",
},
schema_columns=[azure.datafactory.DatasetSnowflakeSchemaColumnArgs(
name="string",
precision=0,
scale=0,
type="string",
)],
schema_name="string",
table_name="string")
const datasetSnowflakeResource = new azure.datafactory.DatasetSnowflake("datasetSnowflakeResource", {
dataFactoryId: "string",
linkedServiceName: "string",
additionalProperties: {
string: "string",
},
annotations: ["string"],
description: "string",
folder: "string",
name: "string",
parameters: {
string: "string",
},
schemaColumns: [{
name: "string",
precision: 0,
scale: 0,
type: "string",
}],
schemaName: "string",
tableName: "string",
});
type: azure:datafactory:DatasetSnowflake
properties:
additionalProperties:
string: string
annotations:
- string
dataFactoryId: string
description: string
folder: string
linkedServiceName: string
name: string
parameters:
string: string
schemaColumns:
- name: string
precision: 0
scale: 0
type: string
schemaName: string
tableName: string
DatasetSnowflake Resource Properties
To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.
Inputs
The DatasetSnowflake resource accepts the following input properties:
- Data
Factory stringId - The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.
- Linked
Service stringName - The Data Factory Linked Service name in which to associate the Dataset with.
- Additional
Properties Dictionary<string, string> - A map of additional properties to associate with the Data Factory Dataset Snowflake.
- Annotations List<string>
- List of tags that can be used for describing the Data Factory Dataset Snowflake.
- Description string
- The description for the Data Factory Dataset Snowflake.
- Folder string
- The folder that this Dataset is in. If not specified, the Dataset will appear at the root level.
- Name string
- Specifies the name of the Data Factory Dataset Snowflake. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.
- Parameters Dictionary<string, string>
- A map of parameters to associate with the Data Factory Dataset Snowflake.
- Schema
Columns List<DatasetSnowflake Schema Column> - A
schema_column
block as defined below. - Schema
Name string - The schema name of the Data Factory Dataset Snowflake.
- Table
Name string - The table name of the Data Factory Dataset Snowflake.
- Data
Factory stringId - The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.
- Linked
Service stringName - The Data Factory Linked Service name in which to associate the Dataset with.
- Additional
Properties map[string]string - A map of additional properties to associate with the Data Factory Dataset Snowflake.
- Annotations []string
- List of tags that can be used for describing the Data Factory Dataset Snowflake.
- Description string
- The description for the Data Factory Dataset Snowflake.
- Folder string
- The folder that this Dataset is in. If not specified, the Dataset will appear at the root level.
- Name string
- Specifies the name of the Data Factory Dataset Snowflake. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.
- Parameters map[string]string
- A map of parameters to associate with the Data Factory Dataset Snowflake.
- Schema
Columns []DatasetSnowflake Schema Column Args - A
schema_column
block as defined below. - Schema
Name string - The schema name of the Data Factory Dataset Snowflake.
- Table
Name string - The table name of the Data Factory Dataset Snowflake.
- data
Factory StringId - The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.
- linked
Service StringName - The Data Factory Linked Service name in which to associate the Dataset with.
- additional
Properties Map<String,String> - A map of additional properties to associate with the Data Factory Dataset Snowflake.
- annotations List<String>
- List of tags that can be used for describing the Data Factory Dataset Snowflake.
- description String
- The description for the Data Factory Dataset Snowflake.
- folder String
- The folder that this Dataset is in. If not specified, the Dataset will appear at the root level.
- name String
- Specifies the name of the Data Factory Dataset Snowflake. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.
- parameters Map<String,String>
- A map of parameters to associate with the Data Factory Dataset Snowflake.
- schema
Columns List<DatasetSnowflake Schema Column> - A
schema_column
block as defined below. - schema
Name String - The schema name of the Data Factory Dataset Snowflake.
- table
Name String - The table name of the Data Factory Dataset Snowflake.
- data
Factory stringId - The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.
- linked
Service stringName - The Data Factory Linked Service name in which to associate the Dataset with.
- additional
Properties {[key: string]: string} - A map of additional properties to associate with the Data Factory Dataset Snowflake.
- annotations string[]
- List of tags that can be used for describing the Data Factory Dataset Snowflake.
- description string
- The description for the Data Factory Dataset Snowflake.
- folder string
- The folder that this Dataset is in. If not specified, the Dataset will appear at the root level.
- name string
- Specifies the name of the Data Factory Dataset Snowflake. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.
- parameters {[key: string]: string}
- A map of parameters to associate with the Data Factory Dataset Snowflake.
- schema
Columns DatasetSnowflake Schema Column[] - A
schema_column
block as defined below. - schema
Name string - The schema name of the Data Factory Dataset Snowflake.
- table
Name string - The table name of the Data Factory Dataset Snowflake.
- data_
factory_ strid - The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.
- linked_
service_ strname - The Data Factory Linked Service name in which to associate the Dataset with.
- additional_
properties Mapping[str, str] - A map of additional properties to associate with the Data Factory Dataset Snowflake.
- annotations Sequence[str]
- List of tags that can be used for describing the Data Factory Dataset Snowflake.
- description str
- The description for the Data Factory Dataset Snowflake.
- folder str
- The folder that this Dataset is in. If not specified, the Dataset will appear at the root level.
- name str
- Specifies the name of the Data Factory Dataset Snowflake. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.
- parameters Mapping[str, str]
- A map of parameters to associate with the Data Factory Dataset Snowflake.
- schema_
columns Sequence[DatasetSnowflake Schema Column Args] - A
schema_column
block as defined below. - schema_
name str - The schema name of the Data Factory Dataset Snowflake.
- table_
name str - The table name of the Data Factory Dataset Snowflake.
- data
Factory StringId - The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.
- linked
Service StringName - The Data Factory Linked Service name in which to associate the Dataset with.
- additional
Properties Map<String> - A map of additional properties to associate with the Data Factory Dataset Snowflake.
- annotations List<String>
- List of tags that can be used for describing the Data Factory Dataset Snowflake.
- description String
- The description for the Data Factory Dataset Snowflake.
- folder String
- The folder that this Dataset is in. If not specified, the Dataset will appear at the root level.
- name String
- Specifies the name of the Data Factory Dataset Snowflake. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.
- parameters Map<String>
- A map of parameters to associate with the Data Factory Dataset Snowflake.
- schema
Columns List<Property Map> - A
schema_column
block as defined below. - schema
Name String - The schema name of the Data Factory Dataset Snowflake.
- table
Name String - The table name of the Data Factory Dataset Snowflake.
Outputs
All input properties are implicitly available as output properties. Additionally, the DatasetSnowflake resource produces the following output properties:
- Id string
- The provider-assigned unique ID for this managed resource.
- Id string
- The provider-assigned unique ID for this managed resource.
- id String
- The provider-assigned unique ID for this managed resource.
- id string
- The provider-assigned unique ID for this managed resource.
- id str
- The provider-assigned unique ID for this managed resource.
- id String
- The provider-assigned unique ID for this managed resource.
Look up Existing DatasetSnowflake Resource
Get an existing DatasetSnowflake resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.
public static get(name: string, id: Input<ID>, state?: DatasetSnowflakeState, opts?: CustomResourceOptions): DatasetSnowflake
@staticmethod
def get(resource_name: str,
id: str,
opts: Optional[ResourceOptions] = None,
additional_properties: Optional[Mapping[str, str]] = None,
annotations: Optional[Sequence[str]] = None,
data_factory_id: Optional[str] = None,
description: Optional[str] = None,
folder: Optional[str] = None,
linked_service_name: Optional[str] = None,
name: Optional[str] = None,
parameters: Optional[Mapping[str, str]] = None,
schema_columns: Optional[Sequence[DatasetSnowflakeSchemaColumnArgs]] = None,
schema_name: Optional[str] = None,
table_name: Optional[str] = None) -> DatasetSnowflake
func GetDatasetSnowflake(ctx *Context, name string, id IDInput, state *DatasetSnowflakeState, opts ...ResourceOption) (*DatasetSnowflake, error)
public static DatasetSnowflake Get(string name, Input<string> id, DatasetSnowflakeState? state, CustomResourceOptions? opts = null)
public static DatasetSnowflake get(String name, Output<String> id, DatasetSnowflakeState state, CustomResourceOptions options)
Resource lookup is not supported in YAML
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- resource_name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- Additional
Properties Dictionary<string, string> - A map of additional properties to associate with the Data Factory Dataset Snowflake.
- Annotations List<string>
- List of tags that can be used for describing the Data Factory Dataset Snowflake.
- Data
Factory stringId - The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.
- Description string
- The description for the Data Factory Dataset Snowflake.
- Folder string
- The folder that this Dataset is in. If not specified, the Dataset will appear at the root level.
- Linked
Service stringName - The Data Factory Linked Service name in which to associate the Dataset with.
- Name string
- Specifies the name of the Data Factory Dataset Snowflake. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.
- Parameters Dictionary<string, string>
- A map of parameters to associate with the Data Factory Dataset Snowflake.
- Schema
Columns List<DatasetSnowflake Schema Column> - A
schema_column
block as defined below. - Schema
Name string - The schema name of the Data Factory Dataset Snowflake.
- Table
Name string - The table name of the Data Factory Dataset Snowflake.
- Additional
Properties map[string]string - A map of additional properties to associate with the Data Factory Dataset Snowflake.
- Annotations []string
- List of tags that can be used for describing the Data Factory Dataset Snowflake.
- Data
Factory stringId - The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.
- Description string
- The description for the Data Factory Dataset Snowflake.
- Folder string
- The folder that this Dataset is in. If not specified, the Dataset will appear at the root level.
- Linked
Service stringName - The Data Factory Linked Service name in which to associate the Dataset with.
- Name string
- Specifies the name of the Data Factory Dataset Snowflake. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.
- Parameters map[string]string
- A map of parameters to associate with the Data Factory Dataset Snowflake.
- Schema
Columns []DatasetSnowflake Schema Column Args - A
schema_column
block as defined below. - Schema
Name string - The schema name of the Data Factory Dataset Snowflake.
- Table
Name string - The table name of the Data Factory Dataset Snowflake.
- additional
Properties Map<String,String> - A map of additional properties to associate with the Data Factory Dataset Snowflake.
- annotations List<String>
- List of tags that can be used for describing the Data Factory Dataset Snowflake.
- data
Factory StringId - The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.
- description String
- The description for the Data Factory Dataset Snowflake.
- folder String
- The folder that this Dataset is in. If not specified, the Dataset will appear at the root level.
- linked
Service StringName - The Data Factory Linked Service name in which to associate the Dataset with.
- name String
- Specifies the name of the Data Factory Dataset Snowflake. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.
- parameters Map<String,String>
- A map of parameters to associate with the Data Factory Dataset Snowflake.
- schema
Columns List<DatasetSnowflake Schema Column> - A
schema_column
block as defined below. - schema
Name String - The schema name of the Data Factory Dataset Snowflake.
- table
Name String - The table name of the Data Factory Dataset Snowflake.
- additional
Properties {[key: string]: string} - A map of additional properties to associate with the Data Factory Dataset Snowflake.
- annotations string[]
- List of tags that can be used for describing the Data Factory Dataset Snowflake.
- data
Factory stringId - The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.
- description string
- The description for the Data Factory Dataset Snowflake.
- folder string
- The folder that this Dataset is in. If not specified, the Dataset will appear at the root level.
- linked
Service stringName - The Data Factory Linked Service name in which to associate the Dataset with.
- name string
- Specifies the name of the Data Factory Dataset Snowflake. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.
- parameters {[key: string]: string}
- A map of parameters to associate with the Data Factory Dataset Snowflake.
- schema
Columns DatasetSnowflake Schema Column[] - A
schema_column
block as defined below. - schema
Name string - The schema name of the Data Factory Dataset Snowflake.
- table
Name string - The table name of the Data Factory Dataset Snowflake.
- additional_
properties Mapping[str, str] - A map of additional properties to associate with the Data Factory Dataset Snowflake.
- annotations Sequence[str]
- List of tags that can be used for describing the Data Factory Dataset Snowflake.
- data_
factory_ strid - The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.
- description str
- The description for the Data Factory Dataset Snowflake.
- folder str
- The folder that this Dataset is in. If not specified, the Dataset will appear at the root level.
- linked_
service_ strname - The Data Factory Linked Service name in which to associate the Dataset with.
- name str
- Specifies the name of the Data Factory Dataset Snowflake. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.
- parameters Mapping[str, str]
- A map of parameters to associate with the Data Factory Dataset Snowflake.
- schema_
columns Sequence[DatasetSnowflake Schema Column Args] - A
schema_column
block as defined below. - schema_
name str - The schema name of the Data Factory Dataset Snowflake.
- table_
name str - The table name of the Data Factory Dataset Snowflake.
- additional
Properties Map<String> - A map of additional properties to associate with the Data Factory Dataset Snowflake.
- annotations List<String>
- List of tags that can be used for describing the Data Factory Dataset Snowflake.
- data
Factory StringId - The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.
- description String
- The description for the Data Factory Dataset Snowflake.
- folder String
- The folder that this Dataset is in. If not specified, the Dataset will appear at the root level.
- linked
Service StringName - The Data Factory Linked Service name in which to associate the Dataset with.
- name String
- Specifies the name of the Data Factory Dataset Snowflake. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.
- parameters Map<String>
- A map of parameters to associate with the Data Factory Dataset Snowflake.
- schema
Columns List<Property Map> - A
schema_column
block as defined below. - schema
Name String - The schema name of the Data Factory Dataset Snowflake.
- table
Name String - The table name of the Data Factory Dataset Snowflake.
Supporting Types
DatasetSnowflakeSchemaColumn, DatasetSnowflakeSchemaColumnArgs
- Name string
- The name of the column.
- Precision int
- The total number of digits allowed.
- Scale int
- The number of digits allowed to the right of the decimal point.
- Type string
- Type of the column. Valid values are
NUMBER
,DECIMAL
,NUMERIC
,INT
,INTEGER
,BIGINT
,SMALLINT
,FLOAT``FLOAT4
,FLOAT8
,DOUBLE
,DOUBLE PRECISION
,REAL
,VARCHAR
,CHAR
,CHARACTER
,STRING
,TEXT
,BINARY
,VARBINARY
,BOOLEAN
,DATE
,DATETIME
,TIME
,TIMESTAMP
,TIMESTAMP_LTZ
,TIMESTAMP_NTZ
,TIMESTAMP_TZ
,VARIANT
,OBJECT
,ARRAY
,GEOGRAPHY
. Please note these values are case sensitive.
- Name string
- The name of the column.
- Precision int
- The total number of digits allowed.
- Scale int
- The number of digits allowed to the right of the decimal point.
- Type string
- Type of the column. Valid values are
NUMBER
,DECIMAL
,NUMERIC
,INT
,INTEGER
,BIGINT
,SMALLINT
,FLOAT``FLOAT4
,FLOAT8
,DOUBLE
,DOUBLE PRECISION
,REAL
,VARCHAR
,CHAR
,CHARACTER
,STRING
,TEXT
,BINARY
,VARBINARY
,BOOLEAN
,DATE
,DATETIME
,TIME
,TIMESTAMP
,TIMESTAMP_LTZ
,TIMESTAMP_NTZ
,TIMESTAMP_TZ
,VARIANT
,OBJECT
,ARRAY
,GEOGRAPHY
. Please note these values are case sensitive.
- name String
- The name of the column.
- precision Integer
- The total number of digits allowed.
- scale Integer
- The number of digits allowed to the right of the decimal point.
- type String
- Type of the column. Valid values are
NUMBER
,DECIMAL
,NUMERIC
,INT
,INTEGER
,BIGINT
,SMALLINT
,FLOAT``FLOAT4
,FLOAT8
,DOUBLE
,DOUBLE PRECISION
,REAL
,VARCHAR
,CHAR
,CHARACTER
,STRING
,TEXT
,BINARY
,VARBINARY
,BOOLEAN
,DATE
,DATETIME
,TIME
,TIMESTAMP
,TIMESTAMP_LTZ
,TIMESTAMP_NTZ
,TIMESTAMP_TZ
,VARIANT
,OBJECT
,ARRAY
,GEOGRAPHY
. Please note these values are case sensitive.
- name string
- The name of the column.
- precision number
- The total number of digits allowed.
- scale number
- The number of digits allowed to the right of the decimal point.
- type string
- Type of the column. Valid values are
NUMBER
,DECIMAL
,NUMERIC
,INT
,INTEGER
,BIGINT
,SMALLINT
,FLOAT``FLOAT4
,FLOAT8
,DOUBLE
,DOUBLE PRECISION
,REAL
,VARCHAR
,CHAR
,CHARACTER
,STRING
,TEXT
,BINARY
,VARBINARY
,BOOLEAN
,DATE
,DATETIME
,TIME
,TIMESTAMP
,TIMESTAMP_LTZ
,TIMESTAMP_NTZ
,TIMESTAMP_TZ
,VARIANT
,OBJECT
,ARRAY
,GEOGRAPHY
. Please note these values are case sensitive.
- name str
- The name of the column.
- precision int
- The total number of digits allowed.
- scale int
- The number of digits allowed to the right of the decimal point.
- type str
- Type of the column. Valid values are
NUMBER
,DECIMAL
,NUMERIC
,INT
,INTEGER
,BIGINT
,SMALLINT
,FLOAT``FLOAT4
,FLOAT8
,DOUBLE
,DOUBLE PRECISION
,REAL
,VARCHAR
,CHAR
,CHARACTER
,STRING
,TEXT
,BINARY
,VARBINARY
,BOOLEAN
,DATE
,DATETIME
,TIME
,TIMESTAMP
,TIMESTAMP_LTZ
,TIMESTAMP_NTZ
,TIMESTAMP_TZ
,VARIANT
,OBJECT
,ARRAY
,GEOGRAPHY
. Please note these values are case sensitive.
- name String
- The name of the column.
- precision Number
- The total number of digits allowed.
- scale Number
- The number of digits allowed to the right of the decimal point.
- type String
- Type of the column. Valid values are
NUMBER
,DECIMAL
,NUMERIC
,INT
,INTEGER
,BIGINT
,SMALLINT
,FLOAT``FLOAT4
,FLOAT8
,DOUBLE
,DOUBLE PRECISION
,REAL
,VARCHAR
,CHAR
,CHARACTER
,STRING
,TEXT
,BINARY
,VARBINARY
,BOOLEAN
,DATE
,DATETIME
,TIME
,TIMESTAMP
,TIMESTAMP_LTZ
,TIMESTAMP_NTZ
,TIMESTAMP_TZ
,VARIANT
,OBJECT
,ARRAY
,GEOGRAPHY
. Please note these values are case sensitive.
Import
Data Factory Snowflake Datasets can be imported using the resource id
, e.g.
$ pulumi import azure:datafactory/datasetSnowflake:DatasetSnowflake example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/example/providers/Microsoft.DataFactory/factories/example/datasets/example
To learn more about importing existing cloud resources, see Importing resources.
Package Details
- Repository
- Azure Classic pulumi/pulumi-azure
- License
- Apache-2.0
- Notes
- This Pulumi package is based on the
azurerm
Terraform Provider.